From 8d7eccad5dd7d2e8b807dd4af56ccf7b43649c1c Mon Sep 17 00:00:00 2001 From: Ludovic Fernandez Date: Mon, 17 Jun 2019 11:48:05 +0200 Subject: [PATCH] New static configuration loading system. Co-authored-by: Mathieu Lonjaret --- .gitignore | 1 + .golangci.toml | 4 +- Gopkg.lock | 39 +- Gopkg.toml | 22 +- cmd/configuration.go | 190 +-- cmd/healthcheck/healthcheck.go | 35 +- cmd/storeconfig/storeconfig.go | 150 -- cmd/traefik/traefik.go | 157 +-- cmd/version/version.go | 16 +- .../getting-started/configuration-overview.md | 40 +- docs/content/operations/cli.md | 60 +- docs/content/providers/docker.md | 2 +- docs/content/providers/file.md | 2 +- docs/content/providers/marathon.md | 2 +- .../content/reference/static-configuration.md | 13 - .../reference/static-configuration.txt | 202 --- .../reference/static-configuration/cli.md | 5 + .../reference/static-configuration/cli.txt | 635 +++++++++ .../reference/static-configuration/env.md | 616 ++++++++ .../reference/static-configuration/file.md | 7 + .../file.toml} | 0 .../static-configuration/overview.md | 5 + docs/content/routing/entrypoints.md | 61 +- .../user-guides/crd-acme/03-deployments.yml | 4 +- docs/mkdocs.yml | 6 +- integration/fixtures/docker/simple.toml | 2 +- integration/fixtures/simple_whitelist.toml | 2 - integration/integration_test.go | 3 +- integration/simple_test.go | 21 +- pkg/anonymize/anonymize_config_test.go | 41 +- pkg/cli/commands.go | 115 ++ pkg/cli/commands_test.go | 681 +++++++++ pkg/cli/file_finder.go | 50 + pkg/cli/file_finder_test.go | 162 +++ pkg/cli/fixtures/config.toml | 3 + pkg/cli/fixtures_test.go | 25 + pkg/cli/help.go | 89 ++ pkg/cli/help_test.go | 211 +++ pkg/cli/loader.go | 21 + pkg/cli/loader_env.go | 40 + pkg/cli/loader_file.go | 78 + pkg/cli/loader_flag.go | 22 + pkg/collector/collector.go | 6 +- pkg/config/dyn_config.go | 4 +- pkg/config/env/env.go | 50 + pkg/config/env/env_test.go | 498 +++++++ pkg/config/file/file.go | 31 + pkg/config/file/file_node.go | 86 ++ pkg/config/file/file_node_test.go | 599 ++++++++ pkg/config/file/file_test.go | 76 + pkg/config/file/fixtures/sample.toml | 539 +++++++ pkg/config/file/fixtures/sample.yml | 257 ++++ pkg/config/file/fixtures_test.go | 34 + pkg/config/file/raw_node.go | 128 ++ pkg/config/file/raw_node_test.go | 540 +++++++ pkg/config/flag/flag.go | 44 + pkg/config/flag/flag_test.go | 926 ++++++++++++ pkg/config/flag/flagparser.go | 108 ++ pkg/config/flag/flagparser_test.go | 255 ++++ pkg/config/flag/flagtype.go | 60 + pkg/config/flag/flagtype_test.go | 226 +++ pkg/config/generator/generator.go | 97 ++ pkg/config/generator/generator_test.go | 439 ++++++ pkg/config/label/label.go | 33 + .../label/label_test.go} | 10 +- pkg/config/middlewares.go | 8 +- .../parser}/element_fill.go | 43 +- .../parser}/element_fill_test.go | 273 +++- .../parser}/element_nodes.go | 90 +- .../parser}/element_nodes_test.go | 154 +- pkg/config/parser/flat_encode.go | 166 +++ pkg/config/parser/flat_encode_test.go | 1250 +++++++++++++++++ .../parser}/labels_decode.go | 63 +- .../parser}/labels_decode_test.go | 36 +- .../parser}/labels_encode.go | 10 +- .../parser}/labels_encode_test.go | 26 +- pkg/config/parser/node.go | 18 + .../parser}/nodes_metadata.go | 88 +- .../parser}/nodes_metadata_test.go | 245 +++- pkg/config/parser/parser.go | 38 + pkg/config/parser/tags.go | 18 + pkg/config/static/entrypoints.go | 135 +- pkg/config/static/entrypoints_test.go | 257 ---- pkg/config/static/static_config.go | 151 +- pkg/metrics/datadog.go | 7 +- pkg/metrics/datadog_test.go | 2 +- pkg/metrics/influxdb.go | 12 +- pkg/metrics/influxdb_test.go | 4 +- pkg/metrics/statsd.go | 7 +- pkg/metrics/statsd_test.go | 2 +- pkg/middlewares/accesslog/logger.go | 3 +- pkg/middlewares/accesslog/logger_test.go | 21 +- pkg/ping/ping.go | 9 +- pkg/provider/acme/provider.go | 32 +- pkg/provider/constrainer.go | 2 +- pkg/provider/docker/config.go | 2 +- pkg/provider/docker/config_test.go | 14 +- pkg/provider/docker/docker.go | 32 +- pkg/provider/docker/label.go | 2 +- pkg/provider/file/file.go | 12 +- pkg/provider/file/file_test.go | 2 +- pkg/provider/kubernetes/crd/client.go | 9 +- .../kubernetes/crd/client_mock_test.go | 2 +- pkg/provider/kubernetes/crd/kubernetes.go | 15 +- pkg/provider/kubernetes/ingress/client.go | 9 +- .../kubernetes/ingress/client_mock_test.go | 2 +- pkg/provider/kubernetes/ingress/kubernetes.go | 23 +- pkg/provider/kubernetes/k8s/namespace.go | 32 - pkg/provider/label/internal/node.go | 13 - pkg/provider/label/internal/tags.go | 12 - pkg/provider/label/parser.go | 58 - pkg/provider/marathon/config.go | 2 +- pkg/provider/marathon/config_test.go | 28 +- pkg/provider/marathon/label.go | 2 +- pkg/provider/marathon/marathon.go | 46 +- pkg/provider/rancher/config.go | 2 +- pkg/provider/rancher/config_test.go | 14 +- pkg/provider/rancher/label.go | 2 +- pkg/provider/rancher/rancher.go | 27 +- pkg/provider/rest/rest.go | 8 +- pkg/server/roundtripper.go | 2 +- pkg/server/server_entrypoint_tcp_test.go | 8 +- pkg/server/server_test.go | 4 +- pkg/server/service/proxy.go | 6 +- pkg/tls/tls.go | 53 +- pkg/tracing/datadog/datadog.go | 12 +- pkg/tracing/haystack/haystack.go | 12 +- pkg/tracing/instana/instana.go | 7 + pkg/tracing/jaeger/jaeger.go | 26 +- pkg/tracing/zipkin/zipkin.go | 9 + pkg/types/constraints.go | 53 +- pkg/types/dns_resolvers.go | 44 - pkg/types/domains.go | 4 +- pkg/types/duration.go | 69 + pkg/types/host_resolver.go | 7 + pkg/types/logs.go | 168 +-- pkg/types/logs_test.go | 419 ------ pkg/types/metrics.go | 101 +- .../github.com/abronan/valkeyrie/valkeyrie.go | 40 - vendor/github.com/containous/flaeg/LICENSE.md | 21 - vendor/github.com/containous/flaeg/flaeg.go | 742 ---------- .../containous/flaeg/flaeg_types.go | 7 - .../containous/flaeg/parse/parse.go | 313 ----- .../github.com/containous/staert/LICENSE.md | 21 - vendor/github.com/containous/staert/kv.go | 395 ------ vendor/github.com/containous/staert/staert.go | 80 -- vendor/github.com/containous/staert/toml.go | 118 -- vendor/github.com/ogier/pflag/LICENSE | 28 - vendor/github.com/ogier/pflag/bool.go | 79 -- vendor/github.com/ogier/pflag/duration.go | 74 - vendor/github.com/ogier/pflag/flag.go | 624 -------- vendor/github.com/ogier/pflag/float32.go | 70 - vendor/github.com/ogier/pflag/float64.go | 70 - vendor/github.com/ogier/pflag/int.go | 70 - vendor/github.com/ogier/pflag/int32.go | 70 - vendor/github.com/ogier/pflag/int64.go | 70 - vendor/github.com/ogier/pflag/int8.go | 70 - vendor/github.com/ogier/pflag/ip.go | 75 - vendor/github.com/ogier/pflag/ipmask.go | 85 -- vendor/github.com/ogier/pflag/string.go | 66 - vendor/github.com/ogier/pflag/uint.go | 70 - vendor/github.com/ogier/pflag/uint16.go | 71 - vendor/github.com/ogier/pflag/uint32.go | 71 - vendor/github.com/ogier/pflag/uint64.go | 70 - vendor/github.com/ogier/pflag/uint8.go | 70 - 165 files changed, 10894 insertions(+), 6076 deletions(-) delete mode 100644 cmd/storeconfig/storeconfig.go delete mode 100644 docs/content/reference/static-configuration.md delete mode 100644 docs/content/reference/static-configuration.txt create mode 100644 docs/content/reference/static-configuration/cli.md create mode 100644 docs/content/reference/static-configuration/cli.txt create mode 100644 docs/content/reference/static-configuration/env.md create mode 100644 docs/content/reference/static-configuration/file.md rename docs/content/reference/{static-configuration.toml => static-configuration/file.toml} (100%) create mode 100644 docs/content/reference/static-configuration/overview.md create mode 100644 pkg/cli/commands.go create mode 100644 pkg/cli/commands_test.go create mode 100644 pkg/cli/file_finder.go create mode 100644 pkg/cli/file_finder_test.go create mode 100644 pkg/cli/fixtures/config.toml create mode 100644 pkg/cli/fixtures_test.go create mode 100644 pkg/cli/help.go create mode 100644 pkg/cli/help_test.go create mode 100644 pkg/cli/loader.go create mode 100644 pkg/cli/loader_env.go create mode 100644 pkg/cli/loader_file.go create mode 100644 pkg/cli/loader_flag.go create mode 100644 pkg/config/env/env.go create mode 100644 pkg/config/env/env_test.go create mode 100644 pkg/config/file/file.go create mode 100644 pkg/config/file/file_node.go create mode 100644 pkg/config/file/file_node_test.go create mode 100644 pkg/config/file/file_test.go create mode 100644 pkg/config/file/fixtures/sample.toml create mode 100644 pkg/config/file/fixtures/sample.yml create mode 100644 pkg/config/file/fixtures_test.go create mode 100644 pkg/config/file/raw_node.go create mode 100644 pkg/config/file/raw_node_test.go create mode 100644 pkg/config/flag/flag.go create mode 100644 pkg/config/flag/flag_test.go create mode 100644 pkg/config/flag/flagparser.go create mode 100644 pkg/config/flag/flagparser_test.go create mode 100644 pkg/config/flag/flagtype.go create mode 100644 pkg/config/flag/flagtype_test.go create mode 100644 pkg/config/generator/generator.go create mode 100644 pkg/config/generator/generator_test.go create mode 100644 pkg/config/label/label.go rename pkg/{provider/label/parser_test.go => config/label/label_test.go} (99%) rename pkg/{provider/label/internal => config/parser}/element_fill.go (86%) rename pkg/{provider/label/internal => config/parser}/element_fill_test.go (81%) rename pkg/{provider/label/internal => config/parser}/element_nodes.go (55%) rename pkg/{provider/label/internal => config/parser}/element_nodes_test.go (78%) create mode 100644 pkg/config/parser/flat_encode.go create mode 100644 pkg/config/parser/flat_encode_test.go rename pkg/{provider/label/internal => config/parser}/labels_decode.go (64%) rename pkg/{provider/label/internal => config/parser}/labels_decode_test.go (82%) rename pkg/{provider/label/internal => config/parser}/labels_encode.go (80%) rename pkg/{provider/label/internal => config/parser}/labels_encode_test.go (83%) create mode 100644 pkg/config/parser/node.go rename pkg/{provider/label/internal => config/parser}/nodes_metadata.go (70%) rename pkg/{provider/label/internal => config/parser}/nodes_metadata_test.go (72%) create mode 100644 pkg/config/parser/parser.go create mode 100644 pkg/config/parser/tags.go delete mode 100644 pkg/config/static/entrypoints_test.go delete mode 100644 pkg/provider/kubernetes/k8s/namespace.go delete mode 100644 pkg/provider/label/internal/node.go delete mode 100644 pkg/provider/label/internal/tags.go delete mode 100644 pkg/provider/label/parser.go delete mode 100644 pkg/types/dns_resolvers.go create mode 100644 pkg/types/duration.go delete mode 100644 pkg/types/logs_test.go delete mode 100644 vendor/github.com/abronan/valkeyrie/valkeyrie.go delete mode 100644 vendor/github.com/containous/flaeg/LICENSE.md delete mode 100644 vendor/github.com/containous/flaeg/flaeg.go delete mode 100644 vendor/github.com/containous/flaeg/flaeg_types.go delete mode 100644 vendor/github.com/containous/flaeg/parse/parse.go delete mode 100644 vendor/github.com/containous/staert/LICENSE.md delete mode 100644 vendor/github.com/containous/staert/kv.go delete mode 100644 vendor/github.com/containous/staert/staert.go delete mode 100644 vendor/github.com/containous/staert/toml.go delete mode 100644 vendor/github.com/ogier/pflag/LICENSE delete mode 100644 vendor/github.com/ogier/pflag/bool.go delete mode 100644 vendor/github.com/ogier/pflag/duration.go delete mode 100644 vendor/github.com/ogier/pflag/flag.go delete mode 100644 vendor/github.com/ogier/pflag/float32.go delete mode 100644 vendor/github.com/ogier/pflag/float64.go delete mode 100644 vendor/github.com/ogier/pflag/int.go delete mode 100644 vendor/github.com/ogier/pflag/int32.go delete mode 100644 vendor/github.com/ogier/pflag/int64.go delete mode 100644 vendor/github.com/ogier/pflag/int8.go delete mode 100644 vendor/github.com/ogier/pflag/ip.go delete mode 100644 vendor/github.com/ogier/pflag/ipmask.go delete mode 100644 vendor/github.com/ogier/pflag/string.go delete mode 100644 vendor/github.com/ogier/pflag/uint.go delete mode 100644 vendor/github.com/ogier/pflag/uint16.go delete mode 100644 vendor/github.com/ogier/pflag/uint32.go delete mode 100644 vendor/github.com/ogier/pflag/uint64.go delete mode 100644 vendor/github.com/ogier/pflag/uint8.go diff --git a/.gitignore b/.gitignore index e9aa4a717..896009d10 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ /autogen/ /traefik /traefik.toml +/traefik.yml *.log *.exe cover.out diff --git a/.golangci.toml b/.golangci.toml index cefb36603..eeb123a15 100644 --- a/.golangci.toml +++ b/.golangci.toml @@ -36,6 +36,8 @@ "scopelint", "gochecknoinits", "gochecknoglobals", + # uncomment when the CI will be updated + # "bodyclose", # Too many false-positive and panics. ] [issues] @@ -72,7 +74,7 @@ path = "pkg/provider/kubernetes/builder_(endpoint|service)_test.go" text = "(U1000: func )?`(.+)` is unused" [[issues.exclude-rules]] - path = "pkg/provider/label/internal/.+_test.go" + path = "pkg/config/parser/.+_test.go" text = "U1000: field `(foo|fuu)` is unused" [[issues.exclude-rules]] path = "pkg/server/service/bufferpool.go" diff --git a/Gopkg.lock b/Gopkg.lock index e2d68289f..ad88768f6 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -151,12 +151,9 @@ [[projects]] branch = "master" - digest = "1:28be1959f81e9a6dec3058768a4c4535cf73fcd6e171d21688ad0a7fdf49d43a" + digest = "1:d37d0fa13c422a0b85981fe42ff8f176885921294cf0c3ce585c160669cc32bb" name = "github.com/abronan/valkeyrie" - packages = [ - ".", - "store", - ] + packages = ["store"] pruneopts = "NUT" revision = "063d875e3c5fd734fa2aa12fac83829f62acfc70" @@ -295,17 +292,6 @@ pruneopts = "NUT" revision = "d83ebdd94cbdbcd9c6c6a22e1a0cde05e55d9d90" -[[projects]] - digest = "1:3cd675d508d3f9067704d36011c7a262fc0a0bf1ad0361a7d1e60a405d12941e" - name = "github.com/containous/flaeg" - packages = [ - ".", - "parse", - ] - pruneopts = "NUT" - revision = "c93d194b807ef171c43344d60adad8b58217390a" - version = "v1.4.1" - [[projects]] branch = "master" digest = "1:3a789aa5487458c1fc913b47be763e5906e1524f1143acb8617287866184f9a7" @@ -314,14 +300,6 @@ pruneopts = "NUT" revision = "c33f32e268983f989290677351b871b65da75ba5" -[[projects]] - digest = "1:a4f16a1b72349621b887bde88f458da518160abcb600eae3d591d8a2afa78bda" - name = "github.com/containous/staert" - packages = ["."] - pruneopts = "NUT" - revision = "7a9987c3a6d46be84e141a5c3191347ec10af17d" - version = "v3.1.2" - [[projects]] digest = "1:fa91847d50d3f656fc2d2d608b9749b97d77528e8988ad8001f957640545e91e" name = "github.com/coreos/go-systemd" @@ -1228,14 +1206,6 @@ revision = "d8152159450570012552f924a0ae6ab3d8c617e0" version = "v0.6.0" -[[projects]] - branch = "master" - digest = "1:95d27e49401b61dd203a4cf8237037bd6cd49599651f855ac1988c4ae27b090e" - name = "github.com/ogier/pflag" - packages = ["."] - pruneopts = "NUT" - revision = "45c278ab3607870051a2ea9040bb85fcb8557481" - [[projects]] digest = "1:69e47979091e47a10e5ff0e2776ca71aa3e884238ce446bd71e246878ba0858d" name = "github.com/opencontainers/go-digest" @@ -2262,10 +2232,7 @@ "github.com/armon/go-proxyproto", "github.com/cenkalti/backoff", "github.com/containous/alice", - "github.com/containous/flaeg", - "github.com/containous/flaeg/parse", "github.com/containous/mux", - "github.com/containous/staert", "github.com/coreos/go-systemd/daemon", "github.com/davecgh/go-spew/spew", "github.com/docker/docker/api/types", @@ -2312,7 +2279,6 @@ "github.com/mitchellh/copystructure", "github.com/mitchellh/hashstructure", "github.com/mvdan/xurls", - "github.com/ogier/pflag", "github.com/opentracing/opentracing-go", "github.com/opentracing/opentracing-go/ext", "github.com/opentracing/opentracing-go/log", @@ -2353,6 +2319,7 @@ "gopkg.in/DataDog/dd-trace-go.v1/ddtrace/opentracer", "gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer", "gopkg.in/fsnotify.v1", + "gopkg.in/yaml.v2", "k8s.io/api/core/v1", "k8s.io/api/extensions/v1beta1", "k8s.io/apimachinery/pkg/api/errors", diff --git a/Gopkg.toml b/Gopkg.toml index 7499f3763..7f7fd3244 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -69,10 +69,6 @@ required = [ name = "github.com/cenkalti/backoff" version = "2.1.1" -[[constraint]] - name = "github.com/containous/flaeg" - version = "1.4.1" - [[constraint]] branch = "master" name = "github.com/containous/mux" @@ -81,14 +77,10 @@ required = [ branch = "containous-fork" name = "github.com/containous/alice" -[[constraint]] - name = "github.com/containous/staert" - version = "3.1.2" - -[[constraint]] - name = "github.com/thoas/stats" - # related to https://github.com/thoas/stats/pull/32 - revision = "4975baf6a358ed3ddaa42133996e1959f96c9300" +#[[constraint]] +# name = "github.com/thoas/stats" +# # related to https://github.com/thoas/stats/pull/32 +# revision = "4975baf6a358ed3ddaa42133996e1959f96c9300" [[constraint]] name = "github.com/coreos/go-systemd" @@ -137,9 +129,9 @@ required = [ # branch = "master" # name = "github.com/jjcollinge/servicefabric" -[[constraint]] - branch = "master" - name = "github.com/abronan/valkeyrie" +#[[constraint]] +# branch = "master" +# name = "github.com/abronan/valkeyrie" #[[constraint]] # name = "github.com/mesosphere/mesos-dns" diff --git a/cmd/configuration.go b/cmd/configuration.go index 643003112..795026e7a 100644 --- a/cmd/configuration.go +++ b/cmd/configuration.go @@ -3,40 +3,27 @@ package cmd import ( "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config/static" - "github.com/containous/traefik/pkg/middlewares/accesslog" - "github.com/containous/traefik/pkg/ping" - "github.com/containous/traefik/pkg/provider/docker" - "github.com/containous/traefik/pkg/provider/file" - "github.com/containous/traefik/pkg/provider/kubernetes/ingress" - "github.com/containous/traefik/pkg/provider/marathon" - "github.com/containous/traefik/pkg/provider/rancher" - "github.com/containous/traefik/pkg/provider/rest" - "github.com/containous/traefik/pkg/tracing/datadog" - "github.com/containous/traefik/pkg/tracing/instana" - "github.com/containous/traefik/pkg/tracing/jaeger" - "github.com/containous/traefik/pkg/tracing/zipkin" "github.com/containous/traefik/pkg/types" - jaegercli "github.com/uber/jaeger-client-go" ) -// TraefikConfiguration holds GlobalConfiguration and other stuff -type TraefikConfiguration struct { - static.Configuration `mapstructure:",squash" export:"true"` - ConfigFile string `short:"c" description:"Configuration file to use (TOML)." export:"true"` +// TraefikCmdConfiguration wraps the static configuration and extra parameters. +type TraefikCmdConfiguration struct { + static.Configuration `export:"true"` + // ConfigFile is the path to the configuration file. + ConfigFile string `description:"Configuration file to use. If specified all other flags are ignored." export:"true"` } -// NewTraefikConfiguration creates a TraefikConfiguration with default values -func NewTraefikConfiguration() *TraefikConfiguration { - return &TraefikConfiguration{ +// NewTraefikConfiguration creates a TraefikCmdConfiguration with default values. +func NewTraefikConfiguration() *TraefikCmdConfiguration { + return &TraefikCmdConfiguration{ Configuration: static.Configuration{ Global: &static.Global{ CheckNewVersion: true, }, EntryPoints: make(static.EntryPoints), Providers: &static.Providers{ - ProvidersThrottleDuration: parse.Duration(2 * time.Second), + ProvidersThrottleDuration: types.Duration(2 * time.Second), }, ServersTransport: &static.ServersTransport{ MaxIdleConnsPerHost: 200, @@ -45,162 +32,3 @@ func NewTraefikConfiguration() *TraefikConfiguration { ConfigFile: "", } } - -// NewTraefikDefaultPointersConfiguration creates a TraefikConfiguration with pointers default values -func NewTraefikDefaultPointersConfiguration() *TraefikConfiguration { - // default File - var defaultFile file.Provider - defaultFile.Watch = true - defaultFile.Filename = "" // needs equivalent to viper.ConfigFileUsed() - - // default Ping - var defaultPing = ping.Handler{ - EntryPoint: "traefik", - } - - // default TraefikLog - defaultTraefikLog := types.TraefikLog{ - Format: "common", - FilePath: "", - } - - // default AccessLog - defaultAccessLog := types.AccessLog{ - Format: accesslog.CommonFormat, - FilePath: "", - Filters: &types.AccessLogFilters{}, - Fields: &types.AccessLogFields{ - DefaultMode: types.AccessLogKeep, - Headers: &types.FieldHeaders{ - DefaultMode: types.AccessLogKeep, - }, - }, - } - - // default Tracing - defaultTracing := static.Tracing{ - Backend: "jaeger", - ServiceName: "traefik", - SpanNameLimit: 0, - Jaeger: &jaeger.Config{ - SamplingServerURL: "http://localhost:5778/sampling", - SamplingType: "const", - SamplingParam: 1.0, - LocalAgentHostPort: "127.0.0.1:6831", - Propagation: "jaeger", - Gen128Bit: false, - TraceContextHeaderName: jaegercli.TraceContextHeaderName, - }, - Zipkin: &zipkin.Config{ - HTTPEndpoint: "http://localhost:9411/api/v1/spans", - SameSpan: false, - ID128Bit: true, - Debug: false, - SampleRate: 1.0, - }, - DataDog: &datadog.Config{ - LocalAgentHostPort: "localhost:8126", - GlobalTag: "", - Debug: false, - PrioritySampling: false, - }, - Instana: &instana.Config{ - LocalAgentHost: "localhost", - LocalAgentPort: 42699, - LogLevel: "info", - }, - } - - // default ApiConfiguration - defaultAPI := static.API{ - EntryPoint: "traefik", - Dashboard: true, - } - defaultAPI.Statistics = &types.Statistics{ - RecentErrors: 10, - } - - // default Metrics - defaultMetrics := types.Metrics{ - Prometheus: &types.Prometheus{ - Buckets: types.Buckets{0.1, 0.3, 1.2, 5}, - EntryPoint: static.DefaultInternalEntryPointName, - }, - Datadog: &types.Datadog{ - Address: "localhost:8125", - PushInterval: "10s", - }, - StatsD: &types.Statsd{ - Address: "localhost:8125", - PushInterval: "10s", - }, - InfluxDB: &types.InfluxDB{ - Address: "localhost:8089", - Protocol: "udp", - PushInterval: "10s", - }, - } - - defaultResolver := types.HostResolverConfig{ - CnameFlattening: false, - ResolvConfig: "/etc/resolv.conf", - ResolvDepth: 5, - } - - var defaultDocker docker.Provider - defaultDocker.Watch = true - defaultDocker.ExposedByDefault = true - defaultDocker.Endpoint = "unix:///var/run/docker.sock" - defaultDocker.SwarmMode = false - defaultDocker.SwarmModeRefreshSeconds = 15 - defaultDocker.DefaultRule = docker.DefaultTemplateRule - - // default Rest - var defaultRest rest.Provider - defaultRest.EntryPoint = static.DefaultInternalEntryPointName - - // default Marathon - var defaultMarathon marathon.Provider - defaultMarathon.Watch = true - defaultMarathon.Endpoint = "http://127.0.0.1:8080" - defaultMarathon.ExposedByDefault = true - defaultMarathon.DialerTimeout = parse.Duration(5 * time.Second) - defaultMarathon.ResponseHeaderTimeout = parse.Duration(60 * time.Second) - defaultMarathon.TLSHandshakeTimeout = parse.Duration(5 * time.Second) - defaultMarathon.KeepAlive = parse.Duration(10 * time.Second) - defaultMarathon.DefaultRule = marathon.DefaultTemplateRule - - // default Kubernetes - var defaultKubernetes ingress.Provider - - // default Rancher - var defaultRancher rancher.Provider - defaultRancher.Watch = true - defaultRancher.ExposedByDefault = true - defaultRancher.EnableServiceHealthFilter = true - defaultRancher.RefreshSeconds = 15 - defaultRancher.DefaultRule = rancher.DefaultTemplateRule - defaultRancher.Prefix = "latest" - - defaultProviders := static.Providers{ - File: &defaultFile, - Docker: &defaultDocker, - Rest: &defaultRest, - Marathon: &defaultMarathon, - Kubernetes: &defaultKubernetes, - Rancher: &defaultRancher, - } - - return &TraefikConfiguration{ - Configuration: static.Configuration{ - Providers: &defaultProviders, - Log: &defaultTraefikLog, - AccessLog: &defaultAccessLog, - Ping: &defaultPing, - API: &defaultAPI, - Metrics: &defaultMetrics, - Tracing: &defaultTracing, - HostResolver: &defaultResolver, - }, - } -} diff --git a/cmd/healthcheck/healthcheck.go b/cmd/healthcheck/healthcheck.go index bfa0a6ca6..8784f5d60 100644 --- a/cmd/healthcheck/healthcheck.go +++ b/cmd/healthcheck/healthcheck.go @@ -7,34 +7,34 @@ import ( "os" "time" - "github.com/containous/flaeg" - "github.com/containous/traefik/cmd" + "github.com/containous/traefik/pkg/cli" "github.com/containous/traefik/pkg/config/static" ) -// NewCmd builds a new HealthCheck command -func NewCmd(traefikConfiguration *cmd.TraefikConfiguration, traefikPointersConfiguration *cmd.TraefikConfiguration) *flaeg.Command { - return &flaeg.Command{ - Name: "healthcheck", - Description: `Calls traefik /ping to check health (web provider must be enabled)`, - Config: traefikConfiguration, - DefaultPointersConfig: traefikPointersConfiguration, - Run: runCmd(traefikConfiguration), - Metadata: map[string]string{ - "parseAllSources": "true", - }, +// NewCmd builds a new HealthCheck command. +func NewCmd(traefikConfiguration *static.Configuration, loaders []cli.ResourceLoader) *cli.Command { + return &cli.Command{ + Name: "healthcheck", + Description: `Calls Traefik /ping to check the health of Traefik (the API must be enabled).`, + Configuration: traefikConfiguration, + Run: runCmd(traefikConfiguration), + Resources: loaders, } } -func runCmd(traefikConfiguration *cmd.TraefikConfiguration) func() error { - return func() error { - traefikConfiguration.Configuration.SetEffectiveConfiguration(traefikConfiguration.ConfigFile) +func runCmd(traefikConfiguration *static.Configuration) func(_ []string) error { + return func(_ []string) error { + traefikConfiguration.SetEffectiveConfiguration("") - resp, errPing := Do(traefikConfiguration.Configuration) + resp, errPing := Do(*traefikConfiguration) + if resp != nil { + resp.Body.Close() + } if errPing != nil { fmt.Printf("Error calling healthcheck: %s\n", errPing) os.Exit(1) } + if resp.StatusCode != http.StatusOK { fmt.Printf("Bad healthcheck status: %s\n", resp.Status) os.Exit(1) @@ -50,6 +50,7 @@ func Do(staticConfiguration static.Configuration) (*http.Response, error) { if staticConfiguration.Ping == nil { return nil, errors.New("please enable `ping` to use health check") } + pingEntryPoint, ok := staticConfiguration.EntryPoints[staticConfiguration.Ping.EntryPoint] if !ok { return nil, errors.New("missing `ping` entrypoint") diff --git a/cmd/storeconfig/storeconfig.go b/cmd/storeconfig/storeconfig.go deleted file mode 100644 index 7cb4d8f91..000000000 --- a/cmd/storeconfig/storeconfig.go +++ /dev/null @@ -1,150 +0,0 @@ -package storeconfig - -import ( - "encoding/json" - "fmt" - stdlog "log" - - "github.com/containous/flaeg" - "github.com/containous/staert" - "github.com/containous/traefik/cmd" -) - -// NewCmd builds a new StoreConfig command -func NewCmd(traefikConfiguration *cmd.TraefikConfiguration, traefikPointersConfiguration *cmd.TraefikConfiguration) *flaeg.Command { - return &flaeg.Command{ - Name: "storeconfig", - Description: `Stores the static traefik configuration into a Key-value store. Traefik will not start.`, - Config: traefikConfiguration, - DefaultPointersConfig: traefikPointersConfiguration, - HideHelp: true, // TODO storeconfig - Metadata: map[string]string{ - "parseAllSources": "true", - }, - } -} - -// Run store config in KV -func Run(kv *staert.KvSource, traefikConfiguration *cmd.TraefikConfiguration) func() error { - return func() error { - if kv == nil { - return fmt.Errorf("error using command storeconfig, no Key-value store defined") - } - - fileConfig := traefikConfiguration.Providers.File - if fileConfig != nil { - traefikConfiguration.Providers.File = nil - if len(fileConfig.Filename) == 0 && len(fileConfig.Directory) == 0 { - fileConfig.Filename = traefikConfiguration.ConfigFile - } - } - - jsonConf, err := json.Marshal(traefikConfiguration.Configuration) - if err != nil { - return err - } - stdlog.Printf("Storing configuration: %s\n", jsonConf) - - err = kv.StoreConfig(traefikConfiguration.Configuration) - if err != nil { - return err - } - - if fileConfig != nil { - jsonConf, err = json.Marshal(fileConfig) - if err != nil { - return err - } - - stdlog.Printf("Storing file configuration: %s\n", jsonConf) - config, err := fileConfig.BuildConfiguration() - if err != nil { - return err - } - - stdlog.Print("Writing config to KV") - err = kv.StoreConfig(config) - if err != nil { - return err - } - } - - // if traefikConfiguration.Configuration.ACME != nil { - // account := &acme.Account{} - // - // accountInitialized, err := keyExists(kv, traefikConfiguration.Configuration.ACME.Storage) - // if err != nil && err != store.ErrKeyNotFound { - // return err - // } - // - // // Check to see if ACME account object is already in kv store - // if traefikConfiguration.Configuration.ACME.OverrideCertificates || !accountInitialized { - // - // // Stores the ACME Account into the KV Store - // // Certificates in KV Stores will be overridden - // meta := cluster.NewMetadata(account) - // err = meta.Marshall() - // if err != nil { - // return err - // } - // - // source := staert.KvSource{ - // Store: kv, - // Prefix: traefikConfiguration.Configuration.ACME.Storage, - // } - // - // err = source.StoreConfig(meta) - // if err != nil { - // return err - // } - // } - // } - return nil - } -} - -// func keyExists(source *staert.KvSource, key string) (bool, error) { -// list, err := source.List(key, nil) -// if err != nil { -// return false, err -// } -// -// return len(list) > 0, nil -// } - -// CreateKvSource creates KvSource -// TLS support is enable for Consul and Etcd backends -func CreateKvSource(traefikConfiguration *cmd.TraefikConfiguration) (*staert.KvSource, error) { - var kv *staert.KvSource - // var kvStore store.Store - var err error - - // TODO kv store - // switch { - // case traefikConfiguration.Providers.Consul != nil: - // kvStore, err = traefikConfiguration.Providers.Consul.CreateStore() - // kv = &staert.KvSource{ - // Store: kvStore, - // Prefix: traefikConfiguration.Providers.Consul.Prefix, - // } - // case traefikConfiguration.Providers.Etcd != nil: - // kvStore, err = traefikConfiguration.Providers.Etcd.CreateStore() - // kv = &staert.KvSource{ - // Store: kvStore, - // Prefix: traefikConfiguration.Providers.Etcd.Prefix, - // } - // case traefikConfiguration.Providers.Zookeeper != nil: - // kvStore, err = traefikConfiguration.Providers.Zookeeper.CreateStore() - // kv = &staert.KvSource{ - // Store: kvStore, - // Prefix: traefikConfiguration.Providers.Zookeeper.Prefix, - // } - // case traefikConfiguration.Providers.Boltdb != nil: - // kvStore, err = traefikConfiguration.Providers.Boltdb.CreateStore() - // kv = &staert.KvSource{ - // Store: kvStore, - // Prefix: traefikConfiguration.Providers.Boltdb.Prefix, - // } - // } - return kv, err -} diff --git a/cmd/traefik/traefik.go b/cmd/traefik/traefik.go index 0a6bacee3..0c0f768c5 100644 --- a/cmd/traefik/traefik.go +++ b/cmd/traefik/traefik.go @@ -4,38 +4,30 @@ import ( "context" "encoding/json" "fmt" - fmtlog "log" + stdlog "log" "net/http" "os" "path/filepath" - "reflect" "strings" "time" - "github.com/cenkalti/backoff" - "github.com/containous/flaeg" - "github.com/containous/staert" "github.com/containous/traefik/autogen/genstatic" "github.com/containous/traefik/cmd" "github.com/containous/traefik/cmd/healthcheck" - "github.com/containous/traefik/cmd/storeconfig" cmdVersion "github.com/containous/traefik/cmd/version" + "github.com/containous/traefik/pkg/cli" "github.com/containous/traefik/pkg/collector" "github.com/containous/traefik/pkg/config" "github.com/containous/traefik/pkg/config/static" - "github.com/containous/traefik/pkg/job" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/provider/aggregator" - "github.com/containous/traefik/pkg/provider/kubernetes/k8s" "github.com/containous/traefik/pkg/safe" "github.com/containous/traefik/pkg/server" "github.com/containous/traefik/pkg/server/router" traefiktls "github.com/containous/traefik/pkg/tls" - "github.com/containous/traefik/pkg/types" "github.com/containous/traefik/pkg/version" "github.com/coreos/go-systemd/daemon" assetfs "github.com/elazarl/go-bindata-assetfs" - "github.com/ogier/pflag" "github.com/sirupsen/logrus" "github.com/vulcand/oxy/roundrobin" ) @@ -48,141 +40,38 @@ func init() { os.Setenv("GODEBUG", goDebug+"tls13=1") } -// sliceOfStrings is the parser for []string -type sliceOfStrings []string - -// String is the method to format the flag's value, part of the flag.Value interface. -// The String method's output will be used in diagnostics. -func (s *sliceOfStrings) String() string { - return strings.Join(*s, ",") -} - -// Set is the method to set the flag value, part of the flag.Value interface. -// Set's argument is a string to be parsed to set the flag. -// It's a comma-separated list, so we split it. -func (s *sliceOfStrings) Set(value string) error { - parts := strings.Split(value, ",") - if len(parts) == 0 { - return fmt.Errorf("bad []string format: %s", value) - } - for _, entrypoint := range parts { - *s = append(*s, entrypoint) - } - return nil -} - -// Get return the []string -func (s *sliceOfStrings) Get() interface{} { - return *s -} - -// SetValue sets the []string with val -func (s *sliceOfStrings) SetValue(val interface{}) { - *s = val.([]string) -} - -// Type is type of the struct -func (s *sliceOfStrings) Type() string { - return "sliceOfStrings" -} - func main() { // traefik config inits - traefikConfiguration := cmd.NewTraefikConfiguration() - traefikPointersConfiguration := cmd.NewTraefikDefaultPointersConfiguration() + tConfig := cmd.NewTraefikConfiguration() - // traefik Command init - traefikCmd := &flaeg.Command{ + loaders := []cli.ResourceLoader{&cli.FileLoader{}, &cli.EnvLoader{}, &cli.FlagLoader{}} + + cmdTraefik := &cli.Command{ Name: "traefik", Description: `Traefik is a modern HTTP reverse proxy and load balancer made to deploy microservices with ease. Complete documentation is available at https://traefik.io`, - Config: traefikConfiguration, - DefaultPointersConfig: traefikPointersConfiguration, - Run: func() error { - return runCmd(&traefikConfiguration.Configuration, traefikConfiguration.ConfigFile) + Configuration: tConfig, + Resources: loaders, + Run: func(_ []string) error { + return runCmd(&tConfig.Configuration, cli.GetConfigFile(loaders)) }, } - // storeconfig Command init - storeConfigCmd := storeconfig.NewCmd(traefikConfiguration, traefikPointersConfiguration) - - // init flaeg source - f := flaeg.New(traefikCmd, os.Args[1:]) - // add custom parsers - f.AddParser(reflect.TypeOf(static.EntryPoints{}), &static.EntryPoints{}) - - f.AddParser(reflect.SliceOf(reflect.TypeOf("")), &sliceOfStrings{}) - f.AddParser(reflect.TypeOf(traefiktls.FilesOrContents{}), &traefiktls.FilesOrContents{}) - f.AddParser(reflect.TypeOf(types.Constraints{}), &types.Constraints{}) - f.AddParser(reflect.TypeOf(k8s.Namespaces{}), &k8s.Namespaces{}) - f.AddParser(reflect.TypeOf([]types.Domain{}), &types.Domains{}) - f.AddParser(reflect.TypeOf(types.DNSResolvers{}), &types.DNSResolvers{}) - f.AddParser(reflect.TypeOf(types.Buckets{}), &types.Buckets{}) - - f.AddParser(reflect.TypeOf(types.StatusCodes{}), &types.StatusCodes{}) - f.AddParser(reflect.TypeOf(types.FieldNames{}), &types.FieldNames{}) - f.AddParser(reflect.TypeOf(types.FieldHeaderNames{}), &types.FieldHeaderNames{}) - - // add commands - f.AddCommand(cmdVersion.NewCmd()) - f.AddCommand(storeConfigCmd) - f.AddCommand(healthcheck.NewCmd(traefikConfiguration, traefikPointersConfiguration)) - - usedCmd, err := f.GetCommand() + err := cmdTraefik.AddCommand(healthcheck.NewCmd(&tConfig.Configuration, loaders)) if err != nil { - fmtlog.Println(err) + stdlog.Println(err) os.Exit(1) } - if _, err := f.Parse(usedCmd); err != nil { - if err == pflag.ErrHelp { - os.Exit(0) - } - fmtlog.Printf("Error parsing command: %s\n", err) - os.Exit(1) - } - - // staert init - s := staert.NewStaert(traefikCmd) - // init TOML source - toml := staert.NewTomlSource("traefik", []string{traefikConfiguration.ConfigFile, "/etc/traefik/", "$HOME/.traefik/", "."}) - - // add sources to staert - s.AddSource(toml) - s.AddSource(f) - if _, err := s.LoadConfig(); err != nil { - fmtlog.Printf("Error reading TOML config file %s : %s\n", toml.ConfigFileUsed(), err) - os.Exit(1) - } - - traefikConfiguration.ConfigFile = toml.ConfigFileUsed() - - kv, err := storeconfig.CreateKvSource(traefikConfiguration) + err = cmdTraefik.AddCommand(cmdVersion.NewCmd()) if err != nil { - fmtlog.Printf("Error creating kv store: %s\n", err) + stdlog.Println(err) os.Exit(1) } - storeConfigCmd.Run = storeconfig.Run(kv, traefikConfiguration) - // if a KV Store is enable and no sub-command called in args - if kv != nil && usedCmd == traefikCmd { - s.AddSource(kv) - operation := func() error { - _, err := s.LoadConfig() - return err - } - notify := func(err error, time time.Duration) { - log.WithoutContext().Errorf("Load config error: %+v, retrying in %s", err, time) - } - err := backoff.RetryNotify(safe.OperationWithRecover(operation), job.NewBackOff(backoff.NewExponentialBackOff()), notify) - if err != nil { - fmtlog.Printf("Error loading configuration: %s\n", err) - os.Exit(1) - } - } - - if err := s.Run(); err != nil { - fmtlog.Printf("Error running traefik: %s\n", err) + err = cli.Execute(cmdTraefik) + if err != nil { + stdlog.Println(err) os.Exit(1) } @@ -192,10 +81,6 @@ Complete documentation is available at https://traefik.io`, func runCmd(staticConfiguration *static.Configuration, configFile string) error { configureLogging(staticConfiguration) - if len(configFile) > 0 { - log.WithoutContext().Infof("Using TOML configuration file %s", configFile) - } - http.DefaultTransport.(*http.Transport).Proxy = http.ProxyFromEnvironment if err := roundrobin.SetDefaultWeight(0); err != nil { @@ -289,7 +174,11 @@ func runCmd(staticConfiguration *static.Configuration, configFile string) error safe.Go(func() { tick := time.Tick(t) for range tick { - _, errHealthCheck := healthcheck.Do(*staticConfiguration) + resp, errHealthCheck := healthcheck.Do(*staticConfiguration) + if resp != nil { + resp.Body.Close() + } + if staticConfiguration.Ping == nil || errHealthCheck == nil { if ok, _ := daemon.SdNotify(false, "WATCHDOG=1"); !ok { log.WithoutContext().Error("Fail to tick watchdog") @@ -309,7 +198,7 @@ func runCmd(staticConfiguration *static.Configuration, configFile string) error func configureLogging(staticConfiguration *static.Configuration) { // configure default log flags - fmtlog.SetFlags(fmtlog.Lshortfile | fmtlog.LstdFlags) + stdlog.SetFlags(stdlog.Lshortfile | stdlog.LstdFlags) // configure log level // an explicitly defined log level always has precedence. if none is diff --git a/cmd/version/version.go b/cmd/version/version.go index 4d0e9c779..cf1f9bcf7 100644 --- a/cmd/version/version.go +++ b/cmd/version/version.go @@ -7,7 +7,7 @@ import ( "runtime" "text/template" - "github.com/containous/flaeg" + "github.com/containous/traefik/pkg/cli" "github.com/containous/traefik/pkg/version" ) @@ -18,19 +18,17 @@ Built: {{.BuildTime}} OS/Arch: {{.Os}}/{{.Arch}}` // NewCmd builds a new Version command -func NewCmd() *flaeg.Command { - return &flaeg.Command{ - Name: "version", - Description: `Print version`, - Config: struct{}{}, - DefaultPointersConfig: struct{}{}, - Run: func() error { +func NewCmd() *cli.Command { + return &cli.Command{ + Name: "version", + Description: `Shows the current Traefik version.`, + Configuration: nil, + Run: func(_ []string) error { if err := GetPrint(os.Stdout); err != nil { return err } fmt.Print("\n") return nil - }, } } diff --git a/docs/content/getting-started/configuration-overview.md b/docs/content/getting-started/configuration-overview.md index 3624961b9..3620123b4 100644 --- a/docs/content/getting-started/configuration-overview.md +++ b/docs/content/getting-started/configuration-overview.md @@ -33,31 +33,23 @@ Traefik gets its _dynamic configuration_ from [providers](../providers/overview. ## The Static Configuration -There are three different locations where you can define static configuration options in Traefik: +There are three different, mutually exclusive, ways to define static configuration options in Traefik: -- In a key-value store -- In the command-line arguments - In a configuration file +- As environment variables +- In the command-line arguments -If you don't provide a value for a given option, default values apply. +These ways are evaluated in the order listed above. -!!! important "Precedence Order" - - The following precedence order applies for configuration options: key-value > command-line > configuration file. +If no value was provided for a given option, a default value applies. +Moreover, if an option has sub-options, and any of these sub-options is not specified, a default value will apply as well. - It means that arguments override configuration file, and key-value store overrides arguments. - -!!! important "Default Values" - - Some root options are enablers: they set default values for all their children. - - For example, the `--providers.docker` option enables the docker provider. - Once positioned, this option sets (and resets) all the default values under the root `providers.docker`. - If you define child options using a lesser precedence configuration source, they will be overwritten by the default values. +For example, the `--providers.docker` option is enough by itself to enable the docker provider, even though sub-options like `--providers.docker.endpoint` exist. +Once positioned, this option sets (and resets) all the default values of the sub-options of `--providers.docker`. ### Configuration File -At startup, Traefik searches for a file named `traefik.toml` in `/etc/traefik/`, `$HOME/.traefik/`, and `.` (_the working directory_). +At startup, Traefik searches for a file named `traefik.toml` in `/etc/traefik/`, `$XDG_CONFIG_HOME/`, `$HOME/.config/`, and `.` (_the working directory_). You can override this using the `configFile` argument. @@ -67,16 +59,16 @@ traefik --configFile=foo/bar/myconfigfile.toml ### Arguments -Use `traefik --help` to get the list of the available arguments. +To get the list of all available arguments: -### Key-Value Stores +```bash +traefik --help -Traefik supports several Key-value stores: +# or -- [Consul](https://consul.io) -- [etcd](https://coreos.com/etcd/) -- [ZooKeeper](https://zookeeper.apache.org/) -- [boltdb](https://github.com/boltdb/bolt) +docker run traefik[:version] --help +# ex: docker run traefik:2.0 --help +``` ## Available Configuration Options diff --git a/docs/content/operations/cli.md b/docs/content/operations/cli.md index 8cd3fc39b..bef4a8510 100644 --- a/docs/content/operations/cli.md +++ b/docs/content/operations/cli.md @@ -6,48 +6,56 @@ The Traefik Command Line ## General ```bash -traefik [command] [--flag=flag_argument] +traefik [command] [flags] [arguments] ``` -Available commands: +Use `traefik [command] --help` for help on any command. -- `version` : Print version -- `storeconfig` : Store the static Traefik configuration into a Key-value store. Please refer to the `Store Traefik configuration`(TODO: add doc and link) section to get documentation on it. -- `healthcheck`: Calls Traefik `/ping` to check health. +Commands: -Each command can have additional flags. +- `healthcheck` Calls Traefik `/ping` to check the health of Traefik (the API must be enabled). +- `version` Shows the current Traefik version. -All those flags will be displayed with: +Flag's usage: ```bash -traefik [command] --help +# set flag_argument to flag(s) +traefik [--flag=flag_argument] [-f [flag_argument]] + +# set true/false to boolean flag(s) +traefik [--flag[=true|false| ]] [-f [true|false| ]] ``` -Each command is described at the beginning of the help section: +### healthcheck -```bash -traefik --help +Calls Traefik `/ping` to check the health of Traefik. +Its exit status is `0` if Traefik is healthy and `1` otherwise. -# or - -docker run traefik[:version] --help -# ex: docker run traefik:1.5 --help -``` - -### Command: healthcheck - -Checks the health of Traefik. -Its exit status is `0` if Traefik is healthy and `1` if it is unhealthy. - -This can be used with Docker [HEALTHCHECK](https://docs.docker.com/engine/reference/builder/#healthcheck) instruction or any other health check orchestration mechanism. +This can be used with Docker [HEALTHCHECK](https://docs.docker.com/engine/reference/builder/#healthcheck) instruction +or any other health check orchestration mechanism. !!! note The [`ping` endpoint](../ping/) must be enabled to allow the `healthcheck` command to call `/ping`. -```bash -traefik healthcheck -``` +Usage: ```bash +traefik healthcheck [command] [flags] [arguments] +``` + +Example: + +```bash +$ traefik healthcheck OK: http://:8082/ping ``` + +### version + +Shows the current Traefik version. + +Usage: + +```bash +traefik version [command] [flags] [arguments] +``` diff --git a/docs/content/providers/docker.md b/docs/content/providers/docker.md index 81fae7b66..4888f472e 100644 --- a/docs/content/providers/docker.md +++ b/docs/content/providers/docker.md @@ -63,7 +63,7 @@ Attach labels to your containers and let Traefik do the rest! ## Provider Configuration Options !!! tip "Browse the Reference" - If you're in a hurry, maybe you'd rather go through the [static](../reference/static-configuration.md) and the [dynamic](../reference/dynamic-configuration/docker.md) configuration references. + If you're in a hurry, maybe you'd rather go through the [static](../reference/static-configuration/overview.md) and the [dynamic](../reference/dynamic-configuration/docker.md) configuration references. ### `endpoint` diff --git a/docs/content/providers/file.md b/docs/content/providers/file.md index 26284a074..3740f5ba6 100644 --- a/docs/content/providers/file.md +++ b/docs/content/providers/file.md @@ -53,7 +53,7 @@ You can write these configuration elements: ## Provider Configuration Options !!! tip "Browse the Reference" - If you're in a hurry, maybe you'd rather go through the [static](../reference/static-configuration.md) and the [dynamic](../reference/dynamic-configuration/file.md) configuration references. + If you're in a hurry, maybe you'd rather go through the [static](../reference/static-configuration/overview.md) and the [dynamic](../reference/dynamic-configuration/file.md) configuration references. ### `filename` diff --git a/docs/content/providers/marathon.md b/docs/content/providers/marathon.md index 0b9b42274..8bec1027b 100644 --- a/docs/content/providers/marathon.md +++ b/docs/content/providers/marathon.md @@ -49,7 +49,7 @@ See also [Marathon user guide](../user-guides/marathon.md). ## Provider Configuration Options !!! tip "Browse the Reference" - If you're in a hurry, maybe you'd rather go through the [static](../reference/static-configuration.md) and the [dynamic](../reference/dynamic-configuration/marathon.md) configuration references. + If you're in a hurry, maybe you'd rather go through the [static](../reference/static-configuration/overview.md) and the [dynamic](../reference/dynamic-configuration/marathon.md) configuration references. ### `basic` diff --git a/docs/content/reference/static-configuration.md b/docs/content/reference/static-configuration.md deleted file mode 100644 index 0101746b3..000000000 --- a/docs/content/reference/static-configuration.md +++ /dev/null @@ -1,13 +0,0 @@ -# Static Configuration - -## File - -```toml ---8<-- "content/reference/static-configuration.toml" -``` - -## CLI - -```txt ---8<-- "content/reference/static-configuration.txt" -``` diff --git a/docs/content/reference/static-configuration.txt b/docs/content/reference/static-configuration.txt deleted file mode 100644 index ea9c1afe9..000000000 --- a/docs/content/reference/static-configuration.txt +++ /dev/null @@ -1,202 +0,0 @@ ---accesslog Access log settings (default "false") ---accesslog.bufferingsize Number of access log lines to process in a buffered way. Default 0. (default "0") ---accesslog.fields AccessLogFields (default "false") ---accesslog.fields.defaultmode Default mode for fields: keep | drop (default "keep") ---accesslog.fields.headers Headers to keep, drop or redact (default "false") ---accesslog.fields.headers.defaultmode Default mode for fields: keep | drop | redact (default "keep") ---accesslog.fields.headers.names Override mode for headers (default "map[]") ---accesslog.fields.names Override mode for fields (default "map[]") ---accesslog.filepath Access log file path. Stdout is used when omitted or empty ---accesslog.filters Access log filters, used to keep only specific access logs (default "false") ---accesslog.filters.minduration Keep access logs when request took longer than the specified duration (default "0s") ---accesslog.filters.retryattempts Keep access logs when at least one retry happened (default "false") ---accesslog.filters.statuscodes Keep access logs with status codes in the specified range (default "[]") ---accesslog.format Access log format: json | common (default "common") ---acme Enable ACME (Let's Encrypt): automatic SSL (default "false") ---acme.acmelogging Enable debug logging of ACME actions. (default "false") ---acme.caserver CA server to use. ---acme.dnschallenge Activate DNS-01 Challenge (default "false") ---acme.dnschallenge.delaybeforecheck Assume DNS propagates after a delay in seconds rather than finding and querying (default "0s") - nameservers. ---acme.dnschallenge.disablepropagationcheck Disable the DNS propagation checks before notifying ACME that the DNS challenge (default "false") - is ready. [not recommended] ---acme.dnschallenge.provider Use a DNS-01 based challenge provider rather than HTTPS. ---acme.dnschallenge.resolvers Use following DNS servers to resolve the FQDN authority. ---acme.domains CN and SANs (alternative domains) to each main domain using format: (default "[]") - --acme.domains='main.com,san1.com,san2.com' --acme.domains='*.main.net'. No - SANs for wildcards domain. Wildcard domains only accepted with DNSChallenge ---acme.email Email address used for registration ---acme.entrypoint EntryPoint to use. ---acme.httpchallenge Activate HTTP-01 Challenge (default "false") ---acme.httpchallenge.entrypoint HTTP challenge EntryPoint ---acme.keytype KeyType used for generating certificate private key. Allow value 'EC256', - 'EC384', 'RSA2048', 'RSA4096', 'RSA8192'. Default to 'RSA4096' ---acme.onhostrule Enable certificate generation on frontends Host rules. (default "false") ---acme.storage Storage to use. ---acme.tlschallenge Activate TLS-ALPN-01 Challenge (default "false") ---api Enable api/dashboard (default "false") ---api.dashboard Activate dashboard (default "true") ---api.entrypoint EntryPoint (default "traefik") ---api.middlewares Middleware list ---api.statistics Enable more detailed statistics (default "true") ---api.statistics.recenterrors Number of recent errors logged (default "10") --c, --configfile Configuration file to use (TOML). ---entrypoints Entrypoints definition using format: --entryPoints='Name:http Address::8000' (default "map[]") - --entryPoints='Name:https Address::4442' ---global Global configuration options (default "true") ---global.checknewversion Periodically check if a new version has been released (default "true") --d, --global.debug Enable debug mode (default "false") ---global.sendanonymoususage send periodically anonymous usage statistics (default "false") ---hostresolver Enable CNAME Flattening (default "false") ---hostresolver.cnameflattening A flag to enable/disable CNAME flattening (default "false") ---hostresolver.resolvconfig resolv.conf used for DNS resolving (default "/etc/resolv.conf") ---hostresolver.resolvdepth The maximal depth of DNS recursive resolving (default "5") ---log Traefik log settings (default "false") ---log.filepath Traefik log file path. Stdout is used when omitted or empty ---log.format Traefik log format: json | common (default "common") ---log.level Log level set to traefik logs. ---metrics Enable a metrics exporter (default "false") ---metrics.datadog DataDog metrics exporter type (default "false") ---metrics.datadog.address DataDog's address (default "localhost:8125") ---metrics.datadog.pushinterval DataDog push interval (default "10s") ---metrics.influxdb InfluxDB metrics exporter type (default "false") ---metrics.influxdb.address InfluxDB address (default "localhost:8089") ---metrics.influxdb.database InfluxDB database used when protocol is http ---metrics.influxdb.password InfluxDB password (only with http) ---metrics.influxdb.protocol InfluxDB address protocol (udp or http) (default "udp") ---metrics.influxdb.pushinterval InfluxDB push interval (default "10s") ---metrics.influxdb.retentionpolicy InfluxDB retention policy used when protocol is http ---metrics.influxdb.username InfluxDB username (only with http) ---metrics.prometheus Prometheus metrics exporter type (default "false") ---metrics.prometheus.buckets Buckets for latency metrics (default "[0.1 0.3 1.2 5]") ---metrics.prometheus.entrypoint EntryPoint (default "traefik") ---metrics.prometheus.middlewares Middlewares ---metrics.statsd StatsD metrics exporter type (default "false") ---metrics.statsd.address StatsD address (default "localhost:8125") ---metrics.statsd.pushinterval StatsD push interval (default "10s") ---ping Enable ping (default "false") ---ping.entrypoint Ping entryPoint (default "traefik") ---ping.middlewares Middleware list ---providers Providers configuration (default "false") ---providers.docker Enable Docker backend with default settings (default "false") ---providers.docker.constraints Filter services by constraint, matching with Traefik tags. (default "[]") ---providers.docker.defaultrule Default rule (default "Host(`{{ normalize .Name }}`)") ---providers.docker.endpoint Docker server endpoint. Can be a tcp or a unix socket endpoint (default "unix:///var/run/docker.sock") ---providers.docker.exposedbydefault Expose containers by default (default "true") ---providers.docker.network Default Docker network used ---providers.docker.swarmmode Use Docker on Swarm Mode (default "false") ---providers.docker.swarmmoderefreshseconds Polling interval for swarm mode (in seconds) (default "15") ---providers.docker.tls Enable Docker TLS support (default "false") ---providers.docker.tls.ca TLS CA ---providers.docker.tls.caoptional TLS CA.Optional (default "false") ---providers.docker.tls.cert TLS cert ---providers.docker.tls.insecureskipverify TLS insecure skip verify (default "false") ---providers.docker.tls.key TLS key ---providers.docker.usebindportip Use the ip address from the bound port, rather than from the inner network (default "false") ---providers.docker.watch Watch provider (default "true") ---providers.file Enable File backend with default settings (default "true") ---providers.file.debugloggeneratedtemplate Enable debug logging of generated configuration template. (default "false") ---providers.file.directory Load configuration from one or more .toml files in a directory ---providers.file.filename Override default configuration template. For advanced users :) ---providers.file.watch Watch provider (default "true") ---providers.kubernetes Enable Kubernetes backend with default settings (default "true") ---providers.kubernetes.certauthfilepath Kubernetes certificate authority file path (not needed for in-cluster client) ---providers.kubernetes.disablepasshostheaders Kubernetes disable PassHost Headers (default "false") ---providers.kubernetes.endpoint Kubernetes server endpoint (required for external cluster client) ---providers.kubernetes.ingressclass Value of kubernetes.io/ingress.class annotation to watch for ---providers.kubernetes.ingressendpoint Kubernetes Ingress Endpoint (default "false") ---providers.kubernetes.ingressendpoint.hostname Hostname used for Kubernetes Ingress endpoints ---providers.kubernetes.ingressendpoint.ip IP used for Kubernetes Ingress endpoints ---providers.kubernetes.ingressendpoint.publishedservice Published Kubernetes Service to copy status from ---providers.kubernetes.labelselector Kubernetes Ingress label selector to use ---providers.kubernetes.namespaces Kubernetes namespaces (default "[]") ---providers.kubernetes.token Kubernetes bearer token (not needed for in-cluster client) ---providers.kubernetescrd Enable Kubernetes backend with default settings (default "false") ---providers.kubernetescrd.certauthfilepath Kubernetes certificate authority file path (not needed for in-cluster client) ---providers.kubernetescrd.disablepasshostheaders Kubernetes disable PassHost Headers (default "false") ---providers.kubernetescrd.endpoint Kubernetes server endpoint (required for external cluster client) ---providers.kubernetescrd.ingressclass Value of kubernetes.io/ingress.class annotation to watch for ---providers.kubernetescrd.labelselector Kubernetes label selector to use ---providers.kubernetescrd.namespaces Kubernetes namespaces (default "[]") ---providers.kubernetescrd.token Kubernetes bearer token (not needed for in-cluster client) ---providers.marathon Enable Marathon backend with default settings (default "false") ---providers.marathon.basic Enable basic authentication (default "false") ---providers.marathon.basic.httpbasicauthuser Basic authentication User ---providers.marathon.basic.httpbasicpassword Basic authentication Password ---providers.marathon.constraints Filter services by constraint, matching with Traefik tags. (default "[]") ---providers.marathon.dcostoken DCOSToken for DCOS environment, This will override the Authorization header ---providers.marathon.defaultrule Default rule (default "Host(`{{ normalize .Name }}`)") ---providers.marathon.dialertimeout Set a dialer timeout for Marathon (default "5s") ---providers.marathon.endpoint Marathon server endpoint. You can also specify multiple endpoint for Marathon (default "http://127.0.0.1:8080") ---providers.marathon.exposedbydefault Expose Marathon apps by default (default "true") ---providers.marathon.filtermarathonconstraints Enable use of Marathon constraints in constraint filtering (default "false") ---providers.marathon.forcetaskhostname Force to use the task's hostname. (default "false") ---providers.marathon.keepalive Set a TCP Keep Alive time in seconds (default "10s") ---providers.marathon.respectreadinesschecks Filter out tasks with non-successful readiness checks during deployments (default "false") ---providers.marathon.responseheadertimeout Set a response header timeout for Marathon (default "1m0s") ---providers.marathon.tls Enable TLS support (default "false") ---providers.marathon.tls.ca TLS CA ---providers.marathon.tls.caoptional TLS CA.Optional (default "false") ---providers.marathon.tls.cert TLS cert ---providers.marathon.tls.insecureskipverify TLS insecure skip verify (default "false") ---providers.marathon.tls.key TLS key ---providers.marathon.tlshandshaketimeout Set a TLS handhsake timeout for Marathon (default "5s") ---providers.marathon.trace Display additional provider logs. (default "false") ---providers.marathon.watch Watch provider (default "true") ---providers.providersthrottleduration Backends throttle duration: minimum duration between 2 events from providers (default "2s") - before applying a new configuration. It avoids unnecessary reloads if multiples - events are sent in a short amount of time. ---providers.rancher Enable Rancher backend with default settings (default "true") ---providers.rancher.constraints Filter services by constraint, matching with Traefik tags. (default "[]") ---providers.rancher.defaultrule Default rule (default "Host(`{{ normalize .Name }}`)") ---providers.rancher.exposedbydefault Expose containers by default (default "true") ---providers.rancher.intervalpoll Poll the Rancher metadata service every 'rancher.refreshseconds' (less accurate) (default "false") ---providers.rancher.prefix Prefix used for accessing the Rancher metadata service (default "latest") ---providers.rancher.watch Watch provider (default "true") ---providers.rest Enable Rest backend with default settings (default "true") ---providers.rest.entrypoint EntryPoint (default "traefik") ---serverstransport Servers default transport (default "true") ---serverstransport.forwardingtimeouts Timeouts for requests forwarded to the backend servers (default "true") ---serverstransport.forwardingtimeouts.dialtimeout The amount of time to wait until a connection to a backend server can be (default "0s") - established. Defaults to 30 seconds. If zero, no timeout exists ---serverstransport.forwardingtimeouts.responseheadertimeout The amount of time to wait for a server's response headers after fully writing (default "0s") - the request (including its body, if any). If zero, no timeout exists ---serverstransport.insecureskipverify Disable SSL certificate verification (default "false") ---serverstransport.maxidleconnsperhost If non-zero, controls the maximum idle (keep-alive) to keep per-host. If zero, (default "200") - DefaultMaxIdleConnsPerHost is used ---serverstransport.rootcas Add cert file for self-signed certificate ---tracing OpenTracing configuration (default "false") ---tracing.backend Selects the tracking backend ('jaeger','zipkin','datadog','instana'). (default "jaeger") ---tracing.datadog Settings for DataDog (default "false") ---tracing.datadog.bagageprefixheadername specifies the header name prefix that will be used to store baggage items in a - map. ---tracing.datadog.debug Enable DataDog debug. (default "false") ---tracing.datadog.globaltag Key:Value tag to be set on all the spans. ---tracing.datadog.localagenthostport Set datadog-agent's host:port that the reporter will used. Defaults to (default "localhost:8126") - localhost:8126 ---tracing.datadog.parentidheadername Specifies the header name that will be used to store the parent ID. ---tracing.datadog.prioritysampling Enable priority sampling. When using distributed tracing, this option must be (default "false") - enabled in order to get all the parts of a distributed trace sampled. ---tracing.datadog.samplingpriorityheadername Specifies the header name that will be used to store the sampling priority. ---tracing.datadog.traceidheadername Specifies the header name that will be used to store the trace ID. ---tracing.instana Settings for Instana (default "false") ---tracing.instana.localagenthost Set instana-agent's host that the reporter will used. (default "localhost") ---tracing.instana.localagentport Set instana-agent's port that the reporter will used. (default "42699") ---tracing.instana.loglevel Set instana-agent's log level. ('error','warn','info','debug') (default "info") ---tracing.jaeger Settings for jaeger (default "false") ---tracing.jaeger.gen128bit generate 128 bit span IDs. (default "false") ---tracing.jaeger.localagenthostport set jaeger-agent's host:port that the reporter will used. (default "127.0.0.1:6831") ---tracing.jaeger.propagation which propgation format to use (jaeger/b3). (default "jaeger") ---tracing.jaeger.samplingparam set the sampling parameter. (default "1") ---tracing.jaeger.samplingserverurl set the sampling server url. (default "http://localhost:5778/sampling") ---tracing.jaeger.samplingtype set the sampling type. (default "const") ---tracing.jaeger.tracecontextheadername set the header to use for the trace-id. (default "uber-trace-id") ---tracing.servicename Set the name for this service (default "traefik") ---tracing.spannamelimit Set the maximum character limit for Span names (default 0 = no limit) (default "0") ---tracing.zipkin Settings for zipkin (default "false") ---tracing.zipkin.debug Enable Zipkin debug. (default "false") ---tracing.zipkin.httpendpoint HTTP Endpoint to report traces to. (default "http://localhost:9411/api/v1/spans") ---tracing.zipkin.id128bit Use Zipkin 128 bit root span IDs. (default "true") ---tracing.zipkin.samespan Use Zipkin SameSpan RPC style traces. (default "false") ---tracing.zipkin.samplerate The rate between 0.0 and 1.0 of requests to trace. (default "1") --h, --help Print Help (this message) and exit \ No newline at end of file diff --git a/docs/content/reference/static-configuration/cli.md b/docs/content/reference/static-configuration/cli.md new file mode 100644 index 000000000..1111439df --- /dev/null +++ b/docs/content/reference/static-configuration/cli.md @@ -0,0 +1,5 @@ +# Static Configuration: CLI + +```txt +--8<-- "content/reference/static-configuration/cli.txt" +``` diff --git a/docs/content/reference/static-configuration/cli.txt b/docs/content/reference/static-configuration/cli.txt new file mode 100644 index 000000000..2d9d2cfb9 --- /dev/null +++ b/docs/content/reference/static-configuration/cli.txt @@ -0,0 +1,635 @@ +--accesslog (Default: "false") + Access log settings. + +--accesslog.bufferingsize (Default: "0") + Number of access log lines to process in a buffered way. + +--accesslog.fields.defaultmode (Default: "keep") + Default mode for fields: keep | drop + +--accesslog.fields.headers.defaultmode (Default: "keep") + Default mode for fields: keep | drop | redact + +--accesslog.fields.headers.names. (Default: "") + Override mode for headers + +--accesslog.fields.names. (Default: "") + Override mode for fields + +--accesslog.filepath (Default: "") + Access log file path. Stdout is used when omitted or empty. + +--accesslog.filters.minduration (Default: "0") + Keep access logs when request took longer than the specified duration. + +--accesslog.filters.retryattempts (Default: "false") + Keep access logs when at least one retry happened. + +--accesslog.filters.statuscodes (Default: "") + Keep access logs with status codes in the specified range. + +--accesslog.format (Default: "common") + Access log format: json | common + +--acme.acmelogging (Default: "false") + Enable debug logging of ACME actions. + +--acme.caserver (Default: "https://acme-v02.api.letsencrypt.org/directory") + CA server to use. + +--acme.dnschallenge (Default: "false") + Activate DNS-01 Challenge. + +--acme.dnschallenge.delaybeforecheck (Default: "0") + Assume DNS propagates after a delay in seconds rather than finding and querying + nameservers. + +--acme.dnschallenge.disablepropagationcheck (Default: "false") + Disable the DNS propagation checks before notifying ACME that the DNS challenge + is ready. [not recommended] + +--acme.dnschallenge.provider (Default: "") + Use a DNS-01 based challenge provider rather than HTTPS. + +--acme.dnschallenge.resolvers (Default: "") + Use following DNS servers to resolve the FQDN authority. + +--acme.domains (Default: "") + The list of domains for which certificates are generated on startup. Wildcard + domains only accepted with DNSChallenge. + +--acme.domains[n].main (Default: "") + Default subject name. + +--acme.domains[n].sans (Default: "") + Subject alternative names. + +--acme.email (Default: "") + Email address used for registration. + +--acme.entrypoint (Default: "") + EntryPoint to use. + +--acme.httpchallenge (Default: "false") + Activate HTTP-01 Challenge. + +--acme.httpchallenge.entrypoint (Default: "") + HTTP challenge EntryPoint + +--acme.keytype (Default: "RSA4096") + KeyType used for generating certificate private key. Allow value 'EC256', + 'EC384', 'RSA2048', 'RSA4096', 'RSA8192'. + +--acme.onhostrule (Default: "false") + Enable certificate generation on router Host rules. + +--acme.storage (Default: "acme.json") + Storage to use. + +--acme.tlschallenge (Default: "true") + Activate TLS-ALPN-01 Challenge. + +--api (Default: "false") + Enable api/dashboard. + +--api.dashboard (Default: "true") + Activate dashboard. + +--api.entrypoint (Default: "traefik") + EntryPoint. + +--api.middlewares (Default: "") + Middleware list. + +--api.statistics (Default: "false") + Enable more detailed statistics. + +--api.statistics.recenterrors (Default: "10") + Number of recent errors logged. + +--configfile (Default: "") + Configuration file to use. If specified all other flags are ignored. + +--entrypoints. (Default: "false") + Entry points definition. + +--entrypoints..address (Default: "") + Entry point address. + +--entrypoints..forwardedheaders.insecure (Default: "false") + Trust all forwarded headers. + +--entrypoints..forwardedheaders.trustedips (Default: "") + Trust only forwarded headers from selected IPs. + +--entrypoints..proxyprotocol (Default: "false") + Proxy-Protocol configuration. + +--entrypoints..proxyprotocol.insecure (Default: "false") + Trust all. + +--entrypoints..proxyprotocol.trustedips (Default: "") + Trust only selected IPs. + +--entrypoints..transport.lifecycle.gracetimeout (Default: "10") + Duration to give active requests a chance to finish before Traefik stops. + +--entrypoints..transport.lifecycle.requestacceptgracetimeout (Default: "0") + Duration to keep accepting requests before Traefik initiates the graceful + shutdown procedure. + +--entrypoints..transport.respondingtimeouts.idletimeout (Default: "180") + IdleTimeout is the maximum amount duration an idle (keep-alive) connection will + remain idle before closing itself. If zero, no timeout is set. + +--entrypoints..transport.respondingtimeouts.readtimeout (Default: "0") + ReadTimeout is the maximum duration for reading the entire request, including + the body. If zero, no timeout is set. + +--entrypoints..transport.respondingtimeouts.writetimeout (Default: "0") + WriteTimeout is the maximum duration before timing out writes of the response. + If zero, no timeout is set. + +--global.checknewversion (Default: "true") + Periodically check if a new version has been released. + +--global.debug (Default: "false") + Enable debug mode. + +--global.sendanonymoususage + Periodically send anonymous usage statistics. If the option is not specified, it + will be enabled by default. + +--hostresolver (Default: "false") + Enable CNAME Flattening. + +--hostresolver.cnameflattening (Default: "false") + A flag to enable/disable CNAME flattening + +--hostresolver.resolvconfig (Default: "/etc/resolv.conf") + resolv.conf used for DNS resolving + +--hostresolver.resolvdepth (Default: "5") + The maximal depth of DNS recursive resolving + +--log.filepath (Default: "") + Traefik log file path. Stdout is used when omitted or empty. + +--log.format (Default: "common") + Traefik log format: json | common + +--log.level (Default: "ERROR") + Log level set to traefik logs. + +--metrics.datadog (Default: "false") + DataDog metrics exporter type. + +--metrics.datadog.address (Default: "localhost:8125") + DataDog's address. + +--metrics.datadog.pushinterval (Default: "10") + DataDog push interval. + +--metrics.influxdb (Default: "false") + InfluxDB metrics exporter type. + +--metrics.influxdb.address (Default: "localhost:8089") + InfluxDB address. + +--metrics.influxdb.database (Default: "") + InfluxDB database used when protocol is http. + +--metrics.influxdb.password (Default: "") + InfluxDB password (only with http). + +--metrics.influxdb.protocol (Default: "udp") + InfluxDB address protocol (udp or http). + +--metrics.influxdb.pushinterval (Default: "10") + InfluxDB push interval. + +--metrics.influxdb.retentionpolicy (Default: "") + InfluxDB retention policy used when protocol is http. + +--metrics.influxdb.username (Default: "") + InfluxDB username (only with http). + +--metrics.prometheus (Default: "false") + Prometheus metrics exporter type. + +--metrics.prometheus.buckets (Default: "0.100000, 0.300000, 1.200000, 5.000000") + Buckets for latency metrics. + +--metrics.prometheus.entrypoint (Default: "traefik") + EntryPoint. + +--metrics.prometheus.middlewares (Default: "") + Middlewares. + +--metrics.statsd (Default: "false") + StatsD metrics exporter type. + +--metrics.statsd.address (Default: "localhost:8125") + StatsD address. + +--metrics.statsd.pushinterval (Default: "10") + StatsD push interval. + +--ping (Default: "false") + Enable ping. + +--ping.entrypoint (Default: "traefik") + Ping entryPoint. + +--ping.middlewares (Default: "") + Middleware list. + +--providers.docker (Default: "false") + Enable Docker backend with default settings. + +--providers.docker.constraints (Default: "") + Filter services by constraint, matching with Traefik tags. + +--providers.docker.constraints[n].key (Default: "") + The provider label that will be matched against. In practice, it is always + 'tag'. + +--providers.docker.constraints[n].mustmatch (Default: "false") + Whether the matching operator is equals or not equals. + +--providers.docker.constraints[n].value (Default: "") + The value that will be matched against. + +--providers.docker.defaultrule (Default: "Host(`{{ normalize .Name }}`)") + Default rule. + +--providers.docker.endpoint (Default: "unix:///var/run/docker.sock") + Docker server endpoint. Can be a tcp or a unix socket endpoint. + +--providers.docker.exposedbydefault (Default: "true") + Expose containers by default. + +--providers.docker.network (Default: "") + Default Docker network used. + +--providers.docker.swarmmode (Default: "false") + Use Docker on Swarm Mode. + +--providers.docker.swarmmoderefreshseconds (Default: "15") + Polling interval for swarm mode. + +--providers.docker.tls.ca (Default: "") + TLS CA + +--providers.docker.tls.caoptional (Default: "false") + TLS CA.Optional + +--providers.docker.tls.cert (Default: "") + TLS cert + +--providers.docker.tls.insecureskipverify (Default: "false") + TLS insecure skip verify + +--providers.docker.tls.key (Default: "") + TLS key + +--providers.docker.usebindportip (Default: "false") + Use the ip address from the bound port, rather than from the inner network. + +--providers.docker.watch (Default: "true") + Watch provider. + +--providers.file (Default: "false") + Enable File backend with default settings. + +--providers.file.debugloggeneratedtemplate (Default: "false") + Enable debug logging of generated configuration template. + +--providers.file.directory (Default: "") + Load configuration from one or more .toml files in a directory. + +--providers.file.filename (Default: "") + Override default configuration template. For advanced users :) + +--providers.file.watch (Default: "true") + Watch provider. + +--providers.kubernetes (Default: "false") + Enable Kubernetes backend with default settings. + +--providers.kubernetes.certauthfilepath (Default: "") + Kubernetes certificate authority file path (not needed for in-cluster client). + +--providers.kubernetes.disablepasshostheaders (Default: "false") + Kubernetes disable PassHost Headers. + +--providers.kubernetes.endpoint (Default: "") + Kubernetes server endpoint (required for external cluster client). + +--providers.kubernetes.ingressclass (Default: "") + Value of kubernetes.io/ingress.class annotation to watch for. + +--providers.kubernetes.ingressendpoint.hostname (Default: "") + Hostname used for Kubernetes Ingress endpoints. + +--providers.kubernetes.ingressendpoint.ip (Default: "") + IP used for Kubernetes Ingress endpoints. + +--providers.kubernetes.ingressendpoint.publishedservice (Default: "") + Published Kubernetes Service to copy status from. + +--providers.kubernetes.labelselector (Default: "") + Kubernetes Ingress label selector to use. + +--providers.kubernetes.namespaces (Default: "") + Kubernetes namespaces. + +--providers.kubernetes.token (Default: "") + Kubernetes bearer token (not needed for in-cluster client). + +--providers.kubernetescrd (Default: "false") + Enable Kubernetes backend with default settings. + +--providers.kubernetescrd.certauthfilepath (Default: "") + Kubernetes certificate authority file path (not needed for in-cluster client). + +--providers.kubernetescrd.disablepasshostheaders (Default: "false") + Kubernetes disable PassHost Headers. + +--providers.kubernetescrd.endpoint (Default: "") + Kubernetes server endpoint (required for external cluster client). + +--providers.kubernetescrd.ingressclass (Default: "") + Value of kubernetes.io/ingress.class annotation to watch for. + +--providers.kubernetescrd.labelselector (Default: "") + Kubernetes label selector to use. + +--providers.kubernetescrd.namespaces (Default: "") + Kubernetes namespaces. + +--providers.kubernetescrd.token (Default: "") + Kubernetes bearer token (not needed for in-cluster client). + +--providers.marathon (Default: "false") + Enable Marathon backend with default settings. + +--providers.marathon.basic.httpbasicauthuser (Default: "") + Basic authentication User. + +--providers.marathon.basic.httpbasicpassword (Default: "") + Basic authentication Password. + +--providers.marathon.constraints (Default: "") + Filter services by constraint, matching with Traefik tags. + +--providers.marathon.constraints[n].key (Default: "") + The provider label that will be matched against. In practice, it is always + 'tag'. + +--providers.marathon.constraints[n].mustmatch (Default: "false") + Whether the matching operator is equals or not equals. + +--providers.marathon.constraints[n].value (Default: "") + The value that will be matched against. + +--providers.marathon.dcostoken (Default: "") + DCOSToken for DCOS environment, This will override the Authorization header. + +--providers.marathon.defaultrule (Default: "Host(`{{ normalize .Name }}`)") + Default rule. + +--providers.marathon.dialertimeout (Default: "5") + Set a dialer timeout for Marathon. + +--providers.marathon.endpoint (Default: "http://127.0.0.1:8080") + Marathon server endpoint. You can also specify multiple endpoint for Marathon. + +--providers.marathon.exposedbydefault (Default: "true") + Expose Marathon apps by default. + +--providers.marathon.filtermarathonconstraints (Default: "false") + Enable use of Marathon constraints in constraint filtering. + +--providers.marathon.forcetaskhostname (Default: "false") + Force to use the task's hostname. + +--providers.marathon.keepalive (Default: "10") + Set a TCP Keep Alive time. + +--providers.marathon.respectreadinesschecks (Default: "false") + Filter out tasks with non-successful readiness checks during deployments. + +--providers.marathon.responseheadertimeout (Default: "60") + Set a response header timeout for Marathon. + +--providers.marathon.tls.ca (Default: "") + TLS CA + +--providers.marathon.tls.caoptional (Default: "false") + TLS CA.Optional + +--providers.marathon.tls.cert (Default: "") + TLS cert + +--providers.marathon.tls.insecureskipverify (Default: "false") + TLS insecure skip verify + +--providers.marathon.tls.key (Default: "") + TLS key + +--providers.marathon.tlshandshaketimeout (Default: "5") + Set a TLS handshake timeout for Marathon. + +--providers.marathon.trace (Default: "false") + Display additional provider logs. + +--providers.marathon.watch (Default: "true") + Watch provider. + +--providers.providersthrottleduration (Default: "2") + Backends throttle duration: minimum duration between 2 events from providers + before applying a new configuration. It avoids unnecessary reloads if multiples + events are sent in a short amount of time. + +--providers.rancher (Default: "false") + Enable Rancher backend with default settings. + +--providers.rancher.constraints (Default: "") + Filter services by constraint, matching with Traefik tags. + +--providers.rancher.constraints[n].key (Default: "") + The provider label that will be matched against. In practice, it is always + 'tag'. + +--providers.rancher.constraints[n].mustmatch (Default: "false") + Whether the matching operator is equals or not equals. + +--providers.rancher.constraints[n].value (Default: "") + The value that will be matched against. + +--providers.rancher.defaultrule (Default: "Host(`{{ normalize .Name }}`)") + Default rule. + +--providers.rancher.enableservicehealthfilter (Default: "true") + Filter services with unhealthy states and inactive states. + +--providers.rancher.exposedbydefault (Default: "true") + Expose containers by default. + +--providers.rancher.intervalpoll (Default: "false") + Poll the Rancher metadata service every 'rancher.refreshseconds' (less + accurate). + +--providers.rancher.prefix (Default: "latest") + Prefix used for accessing the Rancher metadata service. + +--providers.rancher.refreshseconds (Default: "15") + Defines the polling interval in seconds. + +--providers.rancher.watch (Default: "true") + Watch provider. + +--providers.rest (Default: "false") + Enable Rest backend with default settings. + +--providers.rest.entrypoint (Default: "traefik") + EntryPoint. + +--serverstransport.forwardingtimeouts.dialtimeout (Default: "30") + The amount of time to wait until a connection to a backend server can be + established. If zero, no timeout exists. + +--serverstransport.forwardingtimeouts.responseheadertimeout (Default: "0") + The amount of time to wait for a server's response headers after fully writing + the request (including its body, if any). If zero, no timeout exists. + +--serverstransport.insecureskipverify (Default: "false") + Disable SSL certificate verification. + +--serverstransport.maxidleconnsperhost (Default: "200") + If non-zero, controls the maximum idle (keep-alive) to keep per-host. If zero, + DefaultMaxIdleConnsPerHost is used + +--serverstransport.rootcas (Default: "") + Add cert file for self-signed certificate. + +--tracing (Default: "false") + OpenTracing configuration. + +--tracing.backend (Default: "jaeger") + Selects the tracking backend ('jaeger','zipkin','datadog','instana'). + +--tracing.datadog (Default: "false") + Settings for DataDog. + +--tracing.datadog.bagageprefixheadername (Default: "") + Specifies the header name prefix that will be used to store baggage items in a + map. + +--tracing.datadog.debug (Default: "false") + Enable DataDog debug. + +--tracing.datadog.globaltag (Default: "") + Key:Value tag to be set on all the spans. + +--tracing.datadog.localagenthostport (Default: "localhost:8126") + Set datadog-agent's host:port that the reporter will used. + +--tracing.datadog.parentidheadername (Default: "") + Specifies the header name that will be used to store the parent ID. + +--tracing.datadog.prioritysampling (Default: "false") + Enable priority sampling. When using distributed tracing, this option must be + enabled in order to get all the parts of a distributed trace sampled. + +--tracing.datadog.samplingpriorityheadername (Default: "") + Specifies the header name that will be used to store the sampling priority. + +--tracing.datadog.traceidheadername (Default: "") + Specifies the header name that will be used to store the trace ID. + +--tracing.haystack (Default: "false") + Settings for Haystack. + +--tracing.haystack.baggageprefixheadername (Default: "") + specifies the header name prefix that will be used to store baggage items in a + map. + +--tracing.haystack.globaltag (Default: "") + Key:Value tag to be set on all the spans. + +--tracing.haystack.localagenthost (Default: "LocalAgentHost") + Set haystack-agent's host that the reporter will used. + +--tracing.haystack.localagentport (Default: "35000") + Set haystack-agent's port that the reporter will used. + +--tracing.haystack.parentidheadername (Default: "") + Specifies the header name that will be used to store the parent ID. + +--tracing.haystack.spanidheadername (Default: "") + Specifies the header name that will be used to store the span ID. + +--tracing.haystack.traceidheadername (Default: "") + Specifies the header name that will be used to store the trace ID. + +--tracing.instana (Default: "false") + Settings for Instana. + +--tracing.instana.localagenthost (Default: "localhost") + Set instana-agent's host that the reporter will used. + +--tracing.instana.localagentport (Default: "42699") + Set instana-agent's port that the reporter will used. + +--tracing.instana.loglevel (Default: "info") + Set instana-agent's log level. ('error','warn','info','debug') + +--tracing.jaeger (Default: "false") + Settings for jaeger. + +--tracing.jaeger.gen128bit (Default: "false") + Generate 128 bit span IDs. + +--tracing.jaeger.localagenthostport (Default: "127.0.0.1:6831") + Set jaeger-agent's host:port that the reporter will used. + +--tracing.jaeger.propagation (Default: "jaeger") + Which propgation format to use (jaeger/b3). + +--tracing.jaeger.samplingparam (Default: "1.000000") + Set the sampling parameter. + +--tracing.jaeger.samplingserverurl (Default: "http://localhost:5778/sampling") + Set the sampling server url. + +--tracing.jaeger.samplingtype (Default: "const") + Set the sampling type. + +--tracing.jaeger.tracecontextheadername (Default: "uber-trace-id") + Set the header to use for the trace-id. + +--tracing.servicename (Default: "traefik") + Set the name for this service. + +--tracing.spannamelimit (Default: "0") + Set the maximum character limit for Span names (default 0 = no limit). + +--tracing.zipkin (Default: "false") + Settings for zipkin. + +--tracing.zipkin.debug (Default: "false") + Enable Zipkin debug. + +--tracing.zipkin.httpendpoint (Default: "http://localhost:9411/api/v1/spans") + HTTP Endpoint to report traces to. + +--tracing.zipkin.id128bit (Default: "true") + Use Zipkin 128 bit root span IDs. + +--tracing.zipkin.samespan (Default: "false") + Use Zipkin SameSpan RPC style traces. + +--tracing.zipkin.samplerate (Default: "1.000000") + The rate between 0.0 and 1.0 of requests to trace. diff --git a/docs/content/reference/static-configuration/env.md b/docs/content/reference/static-configuration/env.md new file mode 100644 index 000000000..c61eff1ed --- /dev/null +++ b/docs/content/reference/static-configuration/env.md @@ -0,0 +1,616 @@ +# Static Configuration: Environment variables + +`TRAEFIK_ACCESSLOG`: +Access log settings. (Default: ```false```) + +`TRAEFIK_ACCESSLOG_BUFFERINGSIZE`: +Number of access log lines to process in a buffered way. (Default: ```0```) + +`TRAEFIK_ACCESSLOG_FIELDS_DEFAULTMODE`: +Default mode for fields: keep | drop (Default: ```keep```) + +`TRAEFIK_ACCESSLOG_FIELDS_HEADERS_DEFAULTMODE`: +Default mode for fields: keep | drop | redact (Default: ```keep```) + +`TRAEFIK_ACCESSLOG_FIELDS_HEADERS_NAMES_`: +Override mode for headers + +`TRAEFIK_ACCESSLOG_FIELDS_NAMES_`: +Override mode for fields + +`TRAEFIK_ACCESSLOG_FILEPATH`: +Access log file path. Stdout is used when omitted or empty. + +`TRAEFIK_ACCESSLOG_FILTERS_MINDURATION`: +Keep access logs when request took longer than the specified duration. (Default: ```0```) + +`TRAEFIK_ACCESSLOG_FILTERS_RETRYATTEMPTS`: +Keep access logs when at least one retry happened. (Default: ```false```) + +`TRAEFIK_ACCESSLOG_FILTERS_STATUSCODES`: +Keep access logs with status codes in the specified range. + +`TRAEFIK_ACCESSLOG_FORMAT`: +Access log format: json | common (Default: ```common```) + +`TRAEFIK_ACME_ACMELOGGING`: +Enable debug logging of ACME actions. (Default: ```false```) + +`TRAEFIK_ACME_CASERVER`: +CA server to use. (Default: ```https://acme-v02.api.letsencrypt.org/directory```) + +`TRAEFIK_ACME_DNSCHALLENGE`: +Activate DNS-01 Challenge. (Default: ```false```) + +`TRAEFIK_ACME_DNSCHALLENGE_DELAYBEFORECHECK`: +Assume DNS propagates after a delay in seconds rather than finding and querying nameservers. (Default: ```0```) + +`TRAEFIK_ACME_DNSCHALLENGE_DISABLEPROPAGATIONCHECK`: +Disable the DNS propagation checks before notifying ACME that the DNS challenge is ready. [not recommended] (Default: ```false```) + +`TRAEFIK_ACME_DNSCHALLENGE_PROVIDER`: +Use a DNS-01 based challenge provider rather than HTTPS. + +`TRAEFIK_ACME_DNSCHALLENGE_RESOLVERS`: +Use following DNS servers to resolve the FQDN authority. + +`TRAEFIK_ACME_DOMAINS`: +The list of domains for which certificates are generated on startup. Wildcard domains only accepted with DNSChallenge. + +`TRAEFIK_ACME_DOMAINS[n]_MAIN`: +Default subject name. + +`TRAEFIK_ACME_DOMAINS[n]_SANS`: +Subject alternative names. + +`TRAEFIK_ACME_EMAIL`: +Email address used for registration. + +`TRAEFIK_ACME_ENTRYPOINT`: +EntryPoint to use. + +`TRAEFIK_ACME_HTTPCHALLENGE`: +Activate HTTP-01 Challenge. (Default: ```false```) + +`TRAEFIK_ACME_HTTPCHALLENGE_ENTRYPOINT`: +HTTP challenge EntryPoint + +`TRAEFIK_ACME_KEYTYPE`: +KeyType used for generating certificate private key. Allow value 'EC256', 'EC384', 'RSA2048', 'RSA4096', 'RSA8192'. (Default: ```RSA4096```) + +`TRAEFIK_ACME_ONHOSTRULE`: +Enable certificate generation on router Host rules. (Default: ```false```) + +`TRAEFIK_ACME_STORAGE`: +Storage to use. (Default: ```acme.json```) + +`TRAEFIK_ACME_TLSCHALLENGE`: +Activate TLS-ALPN-01 Challenge. (Default: ```true```) + +`TRAEFIK_API`: +Enable api/dashboard. (Default: ```false```) + +`TRAEFIK_API_DASHBOARD`: +Activate dashboard. (Default: ```true```) + +`TRAEFIK_API_ENTRYPOINT`: +EntryPoint. (Default: ```traefik```) + +`TRAEFIK_API_MIDDLEWARES`: +Middleware list. + +`TRAEFIK_API_STATISTICS`: +Enable more detailed statistics. (Default: ```false```) + +`TRAEFIK_API_STATISTICS_RECENTERRORS`: +Number of recent errors logged. (Default: ```10```) + +`TRAEFIK_CONFIGFILE`: +Configuration file to use. If specified all other flags are ignored. (Default: "") + +`TRAEFIK_ENTRYPOINTS_`: +Entry points definition. (Default: ```false```) + +`TRAEFIK_ENTRYPOINTS__ADDRESS`: +Entry point address. + +`TRAEFIK_ENTRYPOINTS__FORWARDEDHEADERS_INSECURE`: +Trust all forwarded headers. (Default: ```false```) + +`TRAEFIK_ENTRYPOINTS__FORWARDEDHEADERS_TRUSTEDIPS`: +Trust only forwarded headers from selected IPs. + +`TRAEFIK_ENTRYPOINTS__PROXYPROTOCOL`: +Proxy-Protocol configuration. (Default: ```false```) + +`TRAEFIK_ENTRYPOINTS__PROXYPROTOCOL_INSECURE`: +Trust all. (Default: ```false```) + +`TRAEFIK_ENTRYPOINTS__PROXYPROTOCOL_TRUSTEDIPS`: +Trust only selected IPs. + +`TRAEFIK_ENTRYPOINTS__TRANSPORT_LIFECYCLE_GRACETIMEOUT`: +Duration to give active requests a chance to finish before Traefik stops. (Default: ```10```) + +`TRAEFIK_ENTRYPOINTS__TRANSPORT_LIFECYCLE_REQUESTACCEPTGRACETIMEOUT`: +Duration to keep accepting requests before Traefik initiates the graceful shutdown procedure. (Default: ```0```) + +`TRAEFIK_ENTRYPOINTS__TRANSPORT_RESPONDINGTIMEOUTS_IDLETIMEOUT`: +IdleTimeout is the maximum amount duration an idle (keep-alive) connection will remain idle before closing itself. If zero, no timeout is set. (Default: ```180```) + +`TRAEFIK_ENTRYPOINTS__TRANSPORT_RESPONDINGTIMEOUTS_READTIMEOUT`: +ReadTimeout is the maximum duration for reading the entire request, including the body. If zero, no timeout is set. (Default: ```0```) + +`TRAEFIK_ENTRYPOINTS__TRANSPORT_RESPONDINGTIMEOUTS_WRITETIMEOUT`: +WriteTimeout is the maximum duration before timing out writes of the response. If zero, no timeout is set. (Default: ```0```) + +`TRAEFIK_GLOBAL_CHECKNEWVERSION`: +Periodically check if a new version has been released. (Default: ```false```) + +`TRAEFIK_GLOBAL_DEBUG`: +Enable debug mode. (Default: ```false```) + +`TRAEFIK_GLOBAL_SENDANONYMOUSUSAGE`: +Periodically send anonymous usage statistics. If the option is not specified, it will be enabled by default. + +`TRAEFIK_HOSTRESOLVER`: +Enable CNAME Flattening. (Default: ```false```) + +`TRAEFIK_HOSTRESOLVER_CNAMEFLATTENING`: +A flag to enable/disable CNAME flattening (Default: ```false```) + +`TRAEFIK_HOSTRESOLVER_RESOLVCONFIG`: +resolv.conf used for DNS resolving (Default: ```/etc/resolv.conf```) + +`TRAEFIK_HOSTRESOLVER_RESOLVDEPTH`: +The maximal depth of DNS recursive resolving (Default: ```5```) + +`TRAEFIK_LOG_FILEPATH`: +Traefik log file path. Stdout is used when omitted or empty. + +`TRAEFIK_LOG_FORMAT`: +Traefik log format: json | common (Default: ```common```) + +`TRAEFIK_LOG_LEVEL`: +Log level set to traefik logs. (Default: ```ERROR```) + +`TRAEFIK_METRICS_DATADOG`: +DataDog metrics exporter type. (Default: ```false```) + +`TRAEFIK_METRICS_DATADOG_ADDRESS`: +DataDog's address. (Default: ```localhost:8125```) + +`TRAEFIK_METRICS_DATADOG_PUSHINTERVAL`: +DataDog push interval. (Default: ```10```) + +`TRAEFIK_METRICS_INFLUXDB`: +InfluxDB metrics exporter type. (Default: ```false```) + +`TRAEFIK_METRICS_INFLUXDB_ADDRESS`: +InfluxDB address. (Default: ```localhost:8089```) + +`TRAEFIK_METRICS_INFLUXDB_DATABASE`: +InfluxDB database used when protocol is http. + +`TRAEFIK_METRICS_INFLUXDB_PASSWORD`: +InfluxDB password (only with http). + +`TRAEFIK_METRICS_INFLUXDB_PROTOCOL`: +InfluxDB address protocol (udp or http). (Default: ```udp```) + +`TRAEFIK_METRICS_INFLUXDB_PUSHINTERVAL`: +InfluxDB push interval. (Default: ```10```) + +`TRAEFIK_METRICS_INFLUXDB_RETENTIONPOLICY`: +InfluxDB retention policy used when protocol is http. + +`TRAEFIK_METRICS_INFLUXDB_USERNAME`: +InfluxDB username (only with http). + +`TRAEFIK_METRICS_PROMETHEUS`: +Prometheus metrics exporter type. (Default: ```false```) + +`TRAEFIK_METRICS_PROMETHEUS_BUCKETS`: +Buckets for latency metrics. (Default: ```0.100000, 0.300000, 1.200000, 5.000000```) + +`TRAEFIK_METRICS_PROMETHEUS_ENTRYPOINT`: +EntryPoint. (Default: ```traefik```) + +`TRAEFIK_METRICS_PROMETHEUS_MIDDLEWARES`: +Middlewares. + +`TRAEFIK_METRICS_STATSD`: +StatsD metrics exporter type. (Default: ```false```) + +`TRAEFIK_METRICS_STATSD_ADDRESS`: +StatsD address. (Default: ```localhost:8125```) + +`TRAEFIK_METRICS_STATSD_PUSHINTERVAL`: +StatsD push interval. (Default: ```10```) + +`TRAEFIK_PING`: +Enable ping. (Default: ```false```) + +`TRAEFIK_PING_ENTRYPOINT`: +Ping entryPoint. (Default: ```traefik```) + +`TRAEFIK_PING_MIDDLEWARES`: +Middleware list. + +`TRAEFIK_PROVIDERS_DOCKER`: +Enable Docker backend with default settings. (Default: ```false```) + +`TRAEFIK_PROVIDERS_DOCKER_CONSTRAINTS`: +Filter services by constraint, matching with Traefik tags. + +`TRAEFIK_PROVIDERS_DOCKER_CONSTRAINTS[n]_KEY`: +The provider label that will be matched against. In practice, it is always 'tag'. + +`TRAEFIK_PROVIDERS_DOCKER_CONSTRAINTS[n]_MUSTMATCH`: +Whether the matching operator is equals or not equals. (Default: ```false```) + +`TRAEFIK_PROVIDERS_DOCKER_CONSTRAINTS[n]_VALUE`: +The value that will be matched against. + +`TRAEFIK_PROVIDERS_DOCKER_DEFAULTRULE`: +Default rule. (Default: ```Host(`{{ normalize .Name }}`)```) + +`TRAEFIK_PROVIDERS_DOCKER_ENDPOINT`: +Docker server endpoint. Can be a tcp or a unix socket endpoint. (Default: ```unix:///var/run/docker.sock```) + +`TRAEFIK_PROVIDERS_DOCKER_EXPOSEDBYDEFAULT`: +Expose containers by default. (Default: ```true```) + +`TRAEFIK_PROVIDERS_DOCKER_NETWORK`: +Default Docker network used. + +`TRAEFIK_PROVIDERS_DOCKER_SWARMMODE`: +Use Docker on Swarm Mode. (Default: ```false```) + +`TRAEFIK_PROVIDERS_DOCKER_SWARMMODEREFRESHSECONDS`: +Polling interval for swarm mode. (Default: ```15```) + +`TRAEFIK_PROVIDERS_DOCKER_TLS_CA`: +TLS CA + +`TRAEFIK_PROVIDERS_DOCKER_TLS_CAOPTIONAL`: +TLS CA.Optional (Default: ```false```) + +`TRAEFIK_PROVIDERS_DOCKER_TLS_CERT`: +TLS cert + +`TRAEFIK_PROVIDERS_DOCKER_TLS_INSECURESKIPVERIFY`: +TLS insecure skip verify (Default: ```false```) + +`TRAEFIK_PROVIDERS_DOCKER_TLS_KEY`: +TLS key + +`TRAEFIK_PROVIDERS_DOCKER_USEBINDPORTIP`: +Use the ip address from the bound port, rather than from the inner network. (Default: ```false```) + +`TRAEFIK_PROVIDERS_DOCKER_WATCH`: +Watch provider. (Default: ```true```) + +`TRAEFIK_PROVIDERS_FILE`: +Enable File backend with default settings. (Default: ```false```) + +`TRAEFIK_PROVIDERS_FILE_DEBUGLOGGENERATEDTEMPLATE`: +Enable debug logging of generated configuration template. (Default: ```false```) + +`TRAEFIK_PROVIDERS_FILE_DIRECTORY`: +Load configuration from one or more .toml files in a directory. + +`TRAEFIK_PROVIDERS_FILE_FILENAME`: +Override default configuration template. For advanced users :) + +`TRAEFIK_PROVIDERS_FILE_WATCH`: +Watch provider. (Default: ```true```) + +`TRAEFIK_PROVIDERS_KUBERNETES`: +Enable Kubernetes backend with default settings. (Default: ```false```) + +`TRAEFIK_PROVIDERS_KUBERNETESCRD`: +Enable Kubernetes backend with default settings. (Default: ```false```) + +`TRAEFIK_PROVIDERS_KUBERNETESCRD_CERTAUTHFILEPATH`: +Kubernetes certificate authority file path (not needed for in-cluster client). + +`TRAEFIK_PROVIDERS_KUBERNETESCRD_DISABLEPASSHOSTHEADERS`: +Kubernetes disable PassHost Headers. (Default: ```false```) + +`TRAEFIK_PROVIDERS_KUBERNETESCRD_ENDPOINT`: +Kubernetes server endpoint (required for external cluster client). + +`TRAEFIK_PROVIDERS_KUBERNETESCRD_INGRESSCLASS`: +Value of kubernetes.io/ingress.class annotation to watch for. + +`TRAEFIK_PROVIDERS_KUBERNETESCRD_LABELSELECTOR`: +Kubernetes label selector to use. + +`TRAEFIK_PROVIDERS_KUBERNETESCRD_NAMESPACES`: +Kubernetes namespaces. + +`TRAEFIK_PROVIDERS_KUBERNETESCRD_TOKEN`: +Kubernetes bearer token (not needed for in-cluster client). + +`TRAEFIK_PROVIDERS_KUBERNETES_CERTAUTHFILEPATH`: +Kubernetes certificate authority file path (not needed for in-cluster client). + +`TRAEFIK_PROVIDERS_KUBERNETES_DISABLEPASSHOSTHEADERS`: +Kubernetes disable PassHost Headers. (Default: ```false```) + +`TRAEFIK_PROVIDERS_KUBERNETES_ENDPOINT`: +Kubernetes server endpoint (required for external cluster client). + +`TRAEFIK_PROVIDERS_KUBERNETES_INGRESSCLASS`: +Value of kubernetes.io/ingress.class annotation to watch for. + +`TRAEFIK_PROVIDERS_KUBERNETES_INGRESSENDPOINT_HOSTNAME`: +Hostname used for Kubernetes Ingress endpoints. + +`TRAEFIK_PROVIDERS_KUBERNETES_INGRESSENDPOINT_IP`: +IP used for Kubernetes Ingress endpoints. + +`TRAEFIK_PROVIDERS_KUBERNETES_INGRESSENDPOINT_PUBLISHEDSERVICE`: +Published Kubernetes Service to copy status from. + +`TRAEFIK_PROVIDERS_KUBERNETES_LABELSELECTOR`: +Kubernetes Ingress label selector to use. + +`TRAEFIK_PROVIDERS_KUBERNETES_NAMESPACES`: +Kubernetes namespaces. + +`TRAEFIK_PROVIDERS_KUBERNETES_TOKEN`: +Kubernetes bearer token (not needed for in-cluster client). + +`TRAEFIK_PROVIDERS_MARATHON`: +Enable Marathon backend with default settings. (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_BASIC_HTTPBASICAUTHUSER`: +Basic authentication User. + +`TRAEFIK_PROVIDERS_MARATHON_BASIC_HTTPBASICPASSWORD`: +Basic authentication Password. + +`TRAEFIK_PROVIDERS_MARATHON_CONSTRAINTS`: +Filter services by constraint, matching with Traefik tags. + +`TRAEFIK_PROVIDERS_MARATHON_CONSTRAINTS[n]_KEY`: +The provider label that will be matched against. In practice, it is always 'tag'. + +`TRAEFIK_PROVIDERS_MARATHON_CONSTRAINTS[n]_MUSTMATCH`: +Whether the matching operator is equals or not equals. (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_CONSTRAINTS[n]_VALUE`: +The value that will be matched against. + +`TRAEFIK_PROVIDERS_MARATHON_DCOSTOKEN`: +DCOSToken for DCOS environment, This will override the Authorization header. + +`TRAEFIK_PROVIDERS_MARATHON_DEFAULTRULE`: +Default rule. (Default: ```Host(`{{ normalize .Name }}`)```) + +`TRAEFIK_PROVIDERS_MARATHON_DIALERTIMEOUT`: +Set a dialer timeout for Marathon. (Default: ```5```) + +`TRAEFIK_PROVIDERS_MARATHON_ENDPOINT`: +Marathon server endpoint. You can also specify multiple endpoint for Marathon. (Default: ```http://127.0.0.1:8080```) + +`TRAEFIK_PROVIDERS_MARATHON_EXPOSEDBYDEFAULT`: +Expose Marathon apps by default. (Default: ```true```) + +`TRAEFIK_PROVIDERS_MARATHON_FILTERMARATHONCONSTRAINTS`: +Enable use of Marathon constraints in constraint filtering. (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_FORCETASKHOSTNAME`: +Force to use the task's hostname. (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_KEEPALIVE`: +Set a TCP Keep Alive time. (Default: ```10```) + +`TRAEFIK_PROVIDERS_MARATHON_RESPECTREADINESSCHECKS`: +Filter out tasks with non-successful readiness checks during deployments. (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_RESPONSEHEADERTIMEOUT`: +Set a response header timeout for Marathon. (Default: ```60```) + +`TRAEFIK_PROVIDERS_MARATHON_TLSHANDSHAKETIMEOUT`: +Set a TLS handshake timeout for Marathon. (Default: ```5```) + +`TRAEFIK_PROVIDERS_MARATHON_TLS_CA`: +TLS CA + +`TRAEFIK_PROVIDERS_MARATHON_TLS_CAOPTIONAL`: +TLS CA.Optional (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_TLS_CERT`: +TLS cert + +`TRAEFIK_PROVIDERS_MARATHON_TLS_INSECURESKIPVERIFY`: +TLS insecure skip verify (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_TLS_KEY`: +TLS key + +`TRAEFIK_PROVIDERS_MARATHON_TRACE`: +Display additional provider logs. (Default: ```false```) + +`TRAEFIK_PROVIDERS_MARATHON_WATCH`: +Watch provider. (Default: ```true```) + +`TRAEFIK_PROVIDERS_PROVIDERSTHROTTLEDURATION`: +Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time. (Default: ```0```) + +`TRAEFIK_PROVIDERS_RANCHER`: +Enable Rancher backend with default settings. (Default: ```false```) + +`TRAEFIK_PROVIDERS_RANCHER_CONSTRAINTS`: +Filter services by constraint, matching with Traefik tags. + +`TRAEFIK_PROVIDERS_RANCHER_CONSTRAINTS[n]_KEY`: +The provider label that will be matched against. In practice, it is always 'tag'. + +`TRAEFIK_PROVIDERS_RANCHER_CONSTRAINTS[n]_MUSTMATCH`: +Whether the matching operator is equals or not equals. (Default: ```false```) + +`TRAEFIK_PROVIDERS_RANCHER_CONSTRAINTS[n]_VALUE`: +The value that will be matched against. + +`TRAEFIK_PROVIDERS_RANCHER_DEFAULTRULE`: +Default rule. (Default: ```Host(`{{ normalize .Name }}`)```) + +`TRAEFIK_PROVIDERS_RANCHER_ENABLESERVICEHEALTHFILTER`: +Filter services with unhealthy states and inactive states. (Default: ```true```) + +`TRAEFIK_PROVIDERS_RANCHER_EXPOSEDBYDEFAULT`: +Expose containers by default. (Default: ```true```) + +`TRAEFIK_PROVIDERS_RANCHER_INTERVALPOLL`: +Poll the Rancher metadata service every 'rancher.refreshseconds' (less accurate). (Default: ```false```) + +`TRAEFIK_PROVIDERS_RANCHER_PREFIX`: +Prefix used for accessing the Rancher metadata service. (Default: ```latest```) + +`TRAEFIK_PROVIDERS_RANCHER_REFRESHSECONDS`: +Defines the polling interval in seconds. (Default: ```15```) + +`TRAEFIK_PROVIDERS_RANCHER_WATCH`: +Watch provider. (Default: ```true```) + +`TRAEFIK_PROVIDERS_REST`: +Enable Rest backend with default settings. (Default: ```false```) + +`TRAEFIK_PROVIDERS_REST_ENTRYPOINT`: +EntryPoint. (Default: ```traefik```) + +`TRAEFIK_SERVERSTRANSPORT_FORWARDINGTIMEOUTS_DIALTIMEOUT`: +The amount of time to wait until a connection to a backend server can be established. If zero, no timeout exists. (Default: ```30```) + +`TRAEFIK_SERVERSTRANSPORT_FORWARDINGTIMEOUTS_RESPONSEHEADERTIMEOUT`: +The amount of time to wait for a server's response headers after fully writing the request (including its body, if any). If zero, no timeout exists. (Default: ```0```) + +`TRAEFIK_SERVERSTRANSPORT_INSECURESKIPVERIFY`: +Disable SSL certificate verification. (Default: ```false```) + +`TRAEFIK_SERVERSTRANSPORT_MAXIDLECONNSPERHOST`: +If non-zero, controls the maximum idle (keep-alive) to keep per-host. If zero, DefaultMaxIdleConnsPerHost is used (Default: ```0```) + +`TRAEFIK_SERVERSTRANSPORT_ROOTCAS`: +Add cert file for self-signed certificate. + +`TRAEFIK_TRACING`: +OpenTracing configuration. (Default: ```false```) + +`TRAEFIK_TRACING_BACKEND`: +Selects the tracking backend ('jaeger','zipkin','datadog','instana'). (Default: ```jaeger```) + +`TRAEFIK_TRACING_DATADOG`: +Settings for DataDog. (Default: ```false```) + +`TRAEFIK_TRACING_DATADOG_BAGAGEPREFIXHEADERNAME`: +Specifies the header name prefix that will be used to store baggage items in a map. + +`TRAEFIK_TRACING_DATADOG_DEBUG`: +Enable DataDog debug. (Default: ```false```) + +`TRAEFIK_TRACING_DATADOG_GLOBALTAG`: +Key:Value tag to be set on all the spans. + +`TRAEFIK_TRACING_DATADOG_LOCALAGENTHOSTPORT`: +Set datadog-agent's host:port that the reporter will used. (Default: ```localhost:8126```) + +`TRAEFIK_TRACING_DATADOG_PARENTIDHEADERNAME`: +Specifies the header name that will be used to store the parent ID. + +`TRAEFIK_TRACING_DATADOG_PRIORITYSAMPLING`: +Enable priority sampling. When using distributed tracing, this option must be enabled in order to get all the parts of a distributed trace sampled. (Default: ```false```) + +`TRAEFIK_TRACING_DATADOG_SAMPLINGPRIORITYHEADERNAME`: +Specifies the header name that will be used to store the sampling priority. + +`TRAEFIK_TRACING_DATADOG_TRACEIDHEADERNAME`: +Specifies the header name that will be used to store the trace ID. + +`TRAEFIK_TRACING_HAYSTACK`: +Settings for Haystack. (Default: ```false```) + +`TRAEFIK_TRACING_HAYSTACK_BAGGAGEPREFIXHEADERNAME`: +specifies the header name prefix that will be used to store baggage items in a map. + +`TRAEFIK_TRACING_HAYSTACK_GLOBALTAG`: +Key:Value tag to be set on all the spans. + +`TRAEFIK_TRACING_HAYSTACK_LOCALAGENTHOST`: +Set haystack-agent's host that the reporter will used. (Default: ```LocalAgentHost```) + +`TRAEFIK_TRACING_HAYSTACK_LOCALAGENTPORT`: +Set haystack-agent's port that the reporter will used. (Default: ```35000```) + +`TRAEFIK_TRACING_HAYSTACK_PARENTIDHEADERNAME`: +Specifies the header name that will be used to store the parent ID. + +`TRAEFIK_TRACING_HAYSTACK_SPANIDHEADERNAME`: +Specifies the header name that will be used to store the span ID. + +`TRAEFIK_TRACING_HAYSTACK_TRACEIDHEADERNAME`: +Specifies the header name that will be used to store the trace ID. + +`TRAEFIK_TRACING_INSTANA`: +Settings for Instana. (Default: ```false```) + +`TRAEFIK_TRACING_INSTANA_LOCALAGENTHOST`: +Set instana-agent's host that the reporter will used. (Default: ```localhost```) + +`TRAEFIK_TRACING_INSTANA_LOCALAGENTPORT`: +Set instana-agent's port that the reporter will used. (Default: ```42699```) + +`TRAEFIK_TRACING_INSTANA_LOGLEVEL`: +Set instana-agent's log level. ('error','warn','info','debug') (Default: ```info```) + +`TRAEFIK_TRACING_JAEGER`: +Settings for jaeger. (Default: ```false```) + +`TRAEFIK_TRACING_JAEGER_GEN128BIT`: +Generate 128 bit span IDs. (Default: ```false```) + +`TRAEFIK_TRACING_JAEGER_LOCALAGENTHOSTPORT`: +Set jaeger-agent's host:port that the reporter will used. (Default: ```127.0.0.1:6831```) + +`TRAEFIK_TRACING_JAEGER_PROPAGATION`: +Which propgation format to use (jaeger/b3). (Default: ```jaeger```) + +`TRAEFIK_TRACING_JAEGER_SAMPLINGPARAM`: +Set the sampling parameter. (Default: ```1.000000```) + +`TRAEFIK_TRACING_JAEGER_SAMPLINGSERVERURL`: +Set the sampling server url. (Default: ```http://localhost:5778/sampling```) + +`TRAEFIK_TRACING_JAEGER_SAMPLINGTYPE`: +Set the sampling type. (Default: ```const```) + +`TRAEFIK_TRACING_JAEGER_TRACECONTEXTHEADERNAME`: +Set the header to use for the trace-id. (Default: ```uber-trace-id```) + +`TRAEFIK_TRACING_SERVICENAME`: +Set the name for this service. (Default: ```traefik```) + +`TRAEFIK_TRACING_SPANNAMELIMIT`: +Set the maximum character limit for Span names (default 0 = no limit). (Default: ```0```) + +`TRAEFIK_TRACING_ZIPKIN`: +Settings for zipkin. (Default: ```false```) + +`TRAEFIK_TRACING_ZIPKIN_DEBUG`: +Enable Zipkin debug. (Default: ```false```) + +`TRAEFIK_TRACING_ZIPKIN_HTTPENDPOINT`: +HTTP Endpoint to report traces to. (Default: ```http://localhost:9411/api/v1/spans```) + +`TRAEFIK_TRACING_ZIPKIN_ID128BIT`: +Use Zipkin 128 bit root span IDs. (Default: ```true```) + +`TRAEFIK_TRACING_ZIPKIN_SAMESPAN`: +Use Zipkin SameSpan RPC style traces. (Default: ```false```) + +`TRAEFIK_TRACING_ZIPKIN_SAMPLERATE`: +The rate between 0.0 and 1.0 of requests to trace. (Default: ```1.000000```) diff --git a/docs/content/reference/static-configuration/file.md b/docs/content/reference/static-configuration/file.md new file mode 100644 index 000000000..c103de7f7 --- /dev/null +++ b/docs/content/reference/static-configuration/file.md @@ -0,0 +1,7 @@ +# Static Configuration: File + +## TOML + +```toml +--8<-- "content/reference/static-configuration/file.toml" +``` diff --git a/docs/content/reference/static-configuration.toml b/docs/content/reference/static-configuration/file.toml similarity index 100% rename from docs/content/reference/static-configuration.toml rename to docs/content/reference/static-configuration/file.toml diff --git a/docs/content/reference/static-configuration/overview.md b/docs/content/reference/static-configuration/overview.md new file mode 100644 index 000000000..57bf649d0 --- /dev/null +++ b/docs/content/reference/static-configuration/overview.md @@ -0,0 +1,5 @@ +# Static Configuration + +- [File](./file.md) +- [CLI](./cli.md) +- [Environment Variables](./env.md) diff --git a/docs/content/routing/entrypoints.md b/docs/content/routing/entrypoints.md index 8a72b7876..2734bf54c 100644 --- a/docs/content/routing/entrypoints.md +++ b/docs/content/routing/entrypoints.md @@ -47,7 +47,7 @@ See the complete reference for the list of available options: [EntryPoints] [EntryPoints.EntryPoint0] - Address = "foobar" + Address = ":8888" [EntryPoints.EntryPoint0.Transport] [EntryPoints.EntryPoint0.Transport.LifeCycle] RequestAcceptGraceTimeout = 42 @@ -65,52 +65,18 @@ See the complete reference for the list of available options: ``` ```ini tab="CLI" -Name:EntryPoint0 -Address:foobar -Transport.LifeCycle.RequestAcceptGraceTimeout:42 -Transport.LifeCycle.GraceTimeOut:42 -Transport.RespondingTimeouts.ReadTimeout:42 -Transport.RespondingTimeouts.WriteTimeout:42 -Transport.RespondingTimeouts.IdleTimeout:42 -ProxyProtocol.Insecure:true -ProxyProtocol.TrustedIPs:foobar,foobar -ForwardedHeaders.Insecure:true -ForwardedHeaders.TrustedIPs:foobar,foobar +--entryPoints.EntryPoint0.Address=:8888 +--entryPoints.EntryPoint0.Transport.LifeCycle.RequestAcceptGraceTimeout=42 +--entryPoints.EntryPoint0.Transport.LifeCycle.GraceTimeOut=42 +--entryPoints.EntryPoint0.Transport.RespondingTimeouts.ReadTimeout=42 +--entryPoints.EntryPoint0.Transport.RespondingTimeouts.WriteTimeout=42 +--entryPoints.EntryPoint0.Transport.RespondingTimeouts.IdleTimeout=42 +--entryPoints.EntryPoint0.ProxyProtocol.Insecure=true +--entryPoints.EntryPoint0.ProxyProtocol.TrustedIPs=foobar,foobar +--entryPoints.EntryPoint0.ForwardedHeaders.Insecure=true +--entryPoints.EntryPoint0.ForwardedHeaders.TrustedIPs=foobar,foobar ``` -??? example "Using the CLI" - - Here is an example of using the CLI to define `entrypoints`: - - ```shell - --entryPoints='Name:http Address::80' - --entryPoints='Name:https Address::443' - ``` - - !!! note - The whitespace character (` `) is the option separator, and the comma (`,`) is the value separator for lists inside an option. - The option names are case-insensitive. - - !!! warning "Using Docker Compose Files" - - The syntax for passing arguments inside a docker compose file is a little different. Here are two examples. - - ```yaml - traefik: - image: traefik:v2.0 # The official v2.0 Traefik docker image - command: - - --defaultentrypoints=powpow - - "--entryPoints=Name:powpow Address::42 Compress:true" - ``` - - or - - ```yaml - traefik: - image: traefik:v2.0 # The official v2.0 Traefik docker image - command: --defaultentrypoints=powpow --entryPoints='Name:powpow Address::42 Compress:true' - ``` - ## ProxyProtocol Traefik supports [ProxyProtocol](https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt). @@ -128,9 +94,10 @@ Traefik supports [ProxyProtocol](https://www.haproxy.org/download/1.8/doc/proxy- IPs in `trustedIPs` only will lead to remote client address replacement: Declare load-balancer IPs or CIDR range here. -??? example "Insecure Mode -- Testing Environnement Only" +??? example "Insecure Mode -- Testing Environment Only" - In a test environments, you can configure Traefik to trust every incoming connection. Doing so, every remote client address will be replaced (`trustedIPs` won't have any effect) + In a test environments, you can configure Traefik to trust every incoming connection. + Doing so, every remote client address will be replaced (`trustedIPs` won't have any effect) ```toml [entryPoints] diff --git a/docs/content/user-guides/crd-acme/03-deployments.yml b/docs/content/user-guides/crd-acme/03-deployments.yml index 712503a53..52b70d47c 100644 --- a/docs/content/user-guides/crd-acme/03-deployments.yml +++ b/docs/content/user-guides/crd-acme/03-deployments.yml @@ -30,8 +30,8 @@ spec: args: - --api - --accesslog - - --entrypoints=Name:web Address::8000 - - --entrypoints=Name:websecure Address::4443 + - --entrypoints.web.Address=:8000 + - --entrypoints.websecure.Address=:4443 - --providers.kubernetescrd - --providers.kubernetescrd.trace - --acme diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 7eb33dba7..03c97ce66 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -133,7 +133,11 @@ nav: - 'Maintainers': 'contributing/maintainers.md' - 'Glossary': 'glossary.md' - 'References': - - 'Static Configuration': 'reference/static-configuration.md' + - 'Static Configuration': + - 'Overview': 'reference/static-configuration/overview.md' + - 'File': 'reference/static-configuration/file.md' + - 'CLI': 'reference/static-configuration/cli.md' + - 'Environment variables': 'reference/static-configuration/env.md' - 'Dynamic Configuration': - 'Docker': 'reference/dynamic-configuration/docker.md' - 'Kubernetes CRD': 'reference/dynamic-configuration/kubernetes-crd.md' diff --git a/integration/fixtures/docker/simple.toml b/integration/fixtures/docker/simple.toml index 365533562..767abfd82 100644 --- a/integration/fixtures/docker/simple.toml +++ b/integration/fixtures/docker/simple.toml @@ -15,4 +15,4 @@ level = "DEBUG" [providers.docker] endpoint = "{{ .DockerHost }}" defaultRule = "{{ .DefaultRule }}" - exposedByDefault = true + exposedByDefault = true diff --git a/integration/fixtures/simple_whitelist.toml b/integration/fixtures/simple_whitelist.toml index 272e634a2..a6a456664 100644 --- a/integration/fixtures/simple_whitelist.toml +++ b/integration/fixtures/simple_whitelist.toml @@ -10,8 +10,6 @@ level = "DEBUG" address = ":8000" [entryPoints.web.ForwardedHeaders] insecure=true - [entryPoints.web.ClientIPStrategy] - depth=2 [api] diff --git a/integration/integration_test.go b/integration/integration_test.go index 9429a092f..1355b7767 100644 --- a/integration/integration_test.go +++ b/integration/integration_test.go @@ -10,6 +10,7 @@ import ( "os" "os/exec" "path/filepath" + "strings" "testing" "text/template" @@ -144,7 +145,7 @@ func (s *BaseSuite) adaptFile(c *check.C, path string, tempObjects interface{}) c.Assert(err, checker.IsNil) folder, prefix := filepath.Split(path) - tmpFile, err := ioutil.TempFile(folder, prefix) + tmpFile, err := ioutil.TempFile(folder, strings.TrimSuffix(prefix, filepath.Ext(prefix))+"_*"+filepath.Ext(prefix)) c.Assert(err, checker.IsNil) defer tmpFile.Close() diff --git a/integration/simple_test.go b/integration/simple_test.go index 5e9048554..d02c9c621 100644 --- a/integration/simple_test.go +++ b/integration/simple_test.go @@ -161,7 +161,7 @@ func (s *SimpleSuite) TestApiOnSameEntryPoint(c *check.C) { s.createComposeProject(c, "base") s.composeProject.Start(c) - cmd, output := s.traefikCmd("--entryPoints=Name:http Address::8000", "--api.entryPoint=http", "--global.debug", "--providers.docker") + cmd, output := s.traefikCmd("--entryPoints.http.Address=:8000", "--api.entryPoint=http", "--global.debug", "--providers.docker") defer output(c) err := cmd.Start() @@ -241,7 +241,7 @@ func (s *SimpleSuite) TestDefaultEntrypointHTTP(c *check.C) { s.createComposeProject(c, "base") s.composeProject.Start(c) - cmd, output := s.traefikCmd("--entryPoints=Name:http Address::8000", "--global.debug", "--providers.docker", "--api") + cmd, output := s.traefikCmd("--entryPoints.http.Address=:8000", "--global.debug", "--providers.docker", "--api") defer output(c) err := cmd.Start() @@ -259,7 +259,7 @@ func (s *SimpleSuite) TestWithUnexistingEntrypoint(c *check.C) { s.createComposeProject(c, "base") s.composeProject.Start(c) - cmd, output := s.traefikCmd("--entryPoints=Name:http Address::8000", "--global.debug", "--providers.docker", "--api") + cmd, output := s.traefikCmd("--entryPoints.http.Address=:8000", "--global.debug", "--providers.docker", "--api") defer output(c) err := cmd.Start() @@ -277,7 +277,7 @@ func (s *SimpleSuite) TestMetricsPrometheusDefaultEntrypoint(c *check.C) { s.createComposeProject(c, "base") s.composeProject.Start(c) - cmd, output := s.traefikCmd("--entryPoints=Name:http Address::8000", "--api", "--metrics.prometheus.buckets=0.1,0.3,1.2,5.0", "--providers.docker", "--global.debug") + cmd, output := s.traefikCmd("--entryPoints.http.Address=:8000", "--api", "--metrics.prometheus.buckets=0.1,0.3,1.2,5.0", "--providers.docker", "--global.debug") defer output(c) err := cmd.Start() @@ -346,19 +346,6 @@ func (s *SimpleSuite) TestIPStrategyWhitelist(c *check.C) { host string expectedStatusCode int }{ - // { - // desc: "default client ip strategy accept", - // xForwardedFor: "8.8.8.8,127.0.0.1", - // host: "no.override.whitelist.docker.local", - // expectedStatusCode: 200, - // }, - // FIXME add clientipstrategy and forwarded headers on entrypoint - // { - // desc: "default client ip strategy reject", - // xForwardedFor: "8.8.8.10,127.0.0.1", - // host: "no.override.whitelist.docker.local", - // expectedStatusCode: 403, - // }, { desc: "override remote addr reject", xForwardedFor: "8.8.8.8,8.8.8.8", diff --git a/pkg/anonymize/anonymize_config_test.go b/pkg/anonymize/anonymize_config_test.go index 0108bdb69..0f77f660a 100644 --- a/pkg/anonymize/anonymize_config_test.go +++ b/pkg/anonymize/anonymize_config_test.go @@ -5,7 +5,6 @@ import ( "testing" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config/static" "github.com/containous/traefik/pkg/ping" "github.com/containous/traefik/pkg/provider" @@ -38,18 +37,18 @@ func TestDo_globalConfiguration(t *testing.T) { FilePath: "AccessLog FilePath", Format: "AccessLog Format", Filters: &types.AccessLogFilters{ - StatusCodes: types.StatusCodes{"200", "500"}, + StatusCodes: []string{"200", "500"}, RetryAttempts: true, MinDuration: 10, }, Fields: &types.AccessLogFields{ DefaultMode: "drop", - Names: types.FieldNames{ + Names: map[string]string{ "RequestHost": "keep", }, Headers: &types.FieldHeaders{ DefaultMode: "drop", - Names: types.FieldHeaderNames{ + Names: map[string]string{ "Referer": "keep", }, }, @@ -68,9 +67,9 @@ func TestDo_globalConfiguration(t *testing.T) { Address: "foo Address", Transport: &static.EntryPointsTransport{ RespondingTimeouts: &static.RespondingTimeouts{ - ReadTimeout: parse.Duration(111 * time.Second), - WriteTimeout: parse.Duration(111 * time.Second), - IdleTimeout: parse.Duration(111 * time.Second), + ReadTimeout: types.Duration(111 * time.Second), + WriteTimeout: types.Duration(111 * time.Second), + IdleTimeout: types.Duration(111 * time.Second), }, }, ProxyProtocol: &static.ProxyProtocol{ @@ -81,9 +80,9 @@ func TestDo_globalConfiguration(t *testing.T) { Address: "fii Address", Transport: &static.EntryPointsTransport{ RespondingTimeouts: &static.RespondingTimeouts{ - ReadTimeout: parse.Duration(111 * time.Second), - WriteTimeout: parse.Duration(111 * time.Second), - IdleTimeout: parse.Duration(111 * time.Second), + ReadTimeout: types.Duration(111 * time.Second), + WriteTimeout: types.Duration(111 * time.Second), + IdleTimeout: types.Duration(111 * time.Second), }, }, ProxyProtocol: &static.ProxyProtocol{ @@ -112,16 +111,16 @@ func TestDo_globalConfiguration(t *testing.T) { }, } config.Providers = &static.Providers{ - ProvidersThrottleDuration: parse.Duration(111 * time.Second), + ProvidersThrottleDuration: types.Duration(111 * time.Second), } config.ServersTransport = &static.ServersTransport{ InsecureSkipVerify: true, - RootCAs: traefiktls.FilesOrContents{"RootCAs 1", "RootCAs 2", "RootCAs 3"}, + RootCAs: []traefiktls.FileOrContent{"RootCAs 1", "RootCAs 2", "RootCAs 3"}, MaxIdleConnsPerHost: 111, ForwardingTimeouts: &static.ForwardingTimeouts{ - DialTimeout: parse.Duration(111 * time.Second), - ResponseHeaderTimeout: parse.Duration(111 * time.Second), + DialTimeout: types.Duration(111 * time.Second), + ResponseHeaderTimeout: types.Duration(111 * time.Second), }, } @@ -156,15 +155,15 @@ func TestDo_globalConfiguration(t *testing.T) { config.Providers.Docker = &docker.Provider{ Constrainer: provider.Constrainer{ - Constraints: types.Constraints{ + Constraints: []*types.Constraint{ { Key: "file Constraints Key 1", - Regex: "file Constraints Regex 2", + Value: "file Constraints Regex 2", MustMatch: true, }, { Key: "file Constraints Key 1", - Regex: "file Constraints Regex 2", + Value: "file Constraints Regex 2", MustMatch: true, }, }, @@ -210,22 +209,22 @@ func TestDo_globalConfiguration(t *testing.T) { config.Metrics = &types.Metrics{ Prometheus: &types.Prometheus{ - Buckets: types.Buckets{0.1, 0.3, 1.2, 5}, + Buckets: []float64{0.1, 0.3, 1.2, 5}, EntryPoint: "MyEntryPoint", Middlewares: []string{"m1", "m2"}, }, Datadog: &types.Datadog{ Address: "localhost:8181", - PushInterval: "12", + PushInterval: 12, }, StatsD: &types.Statsd{ Address: "localhost:8182", - PushInterval: "42", + PushInterval: 42, }, InfluxDB: &types.InfluxDB{ Address: "localhost:8183", Protocol: "http", - PushInterval: "22", + PushInterval: 22, Database: "myDB", RetentionPolicy: "12", Username: "a", diff --git a/pkg/cli/commands.go b/pkg/cli/commands.go new file mode 100644 index 000000000..eaf8f73a7 --- /dev/null +++ b/pkg/cli/commands.go @@ -0,0 +1,115 @@ +// Package cli provides tools to create commands that support advanced configuration features, +// sub-commands, and allowing configuration from command-line flags, configuration files, and environment variables. +package cli + +import ( + "errors" + "fmt" + "os" + "path/filepath" +) + +// Command structure contains program/command information (command name and description). +type Command struct { + Name string + Description string + Configuration interface{} + Resources []ResourceLoader + Run func([]string) error + Hidden bool + subCommands []*Command +} + +// AddCommand Adds a sub command. +func (c *Command) AddCommand(cmd *Command) error { + if c == nil || cmd == nil { + return nil + } + + if c.Name == cmd.Name { + return fmt.Errorf("child command cannot have the same name as their parent: %s", cmd.Name) + } + + c.subCommands = append(c.subCommands, cmd) + return nil +} + +// Execute Executes a command. +func Execute(cmd *Command) error { + return execute(cmd, os.Args, true) +} + +func execute(cmd *Command, args []string, root bool) error { + if len(args) == 1 { + if err := run(cmd, args); err != nil { + return fmt.Errorf("command %s error: %v", args[0], err) + } + return nil + } + + if root && cmd.Name != args[1] && !contains(cmd.subCommands, args[1]) { + if err := run(cmd, args[1:]); err != nil { + return fmt.Errorf("command %s error: %v", filepath.Base(args[0]), err) + } + return nil + } + + if len(args) >= 2 && cmd.Name == args[1] { + if err := run(cmd, args[2:]); err != nil { + return fmt.Errorf("command %s error: %v", cmd.Name, err) + } + return nil + } + + if len(cmd.subCommands) == 0 { + if err := run(cmd, args[1:]); err != nil { + return fmt.Errorf("command %s error: %v", cmd.Name, err) + } + return nil + } + + for _, subCmd := range cmd.subCommands { + if len(args) >= 2 && subCmd.Name == args[1] { + return execute(subCmd, args[1:], false) + } + } + + return fmt.Errorf("command not found: %v", args) +} + +func run(cmd *Command, args []string) error { + if isHelp(args) { + return PrintHelp(os.Stdout, cmd) + } + + if cmd.Run == nil { + _ = PrintHelp(os.Stdout, cmd) + return errors.New("command not found") + } + + if cmd.Configuration == nil { + return cmd.Run(args) + } + + for _, resource := range cmd.Resources { + done, err := resource.Load(args, cmd) + if err != nil { + return err + } + if done { + break + } + } + + return cmd.Run(args) +} + +func contains(cmds []*Command, name string) bool { + for _, cmd := range cmds { + if cmd.Name == name { + return true + } + } + + return false +} diff --git a/pkg/cli/commands_test.go b/pkg/cli/commands_test.go new file mode 100644 index 000000000..b635af3f2 --- /dev/null +++ b/pkg/cli/commands_test.go @@ -0,0 +1,681 @@ +package cli + +import ( + "io/ioutil" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCommand_AddCommand(t *testing.T) { + testCases := []struct { + desc string + subCommand *Command + expectedError bool + }{ + { + desc: "sub command nil", + subCommand: nil, + }, + { + desc: "add a simple command", + subCommand: &Command{ + Name: "sub", + }, + }, + { + desc: "add a sub command with the same name as their parent", + subCommand: &Command{ + Name: "root", + }, + expectedError: true, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + rootCmd := &Command{ + Name: "root", + } + + err := rootCmd.AddCommand(test.subCommand) + + if test.expectedError { + require.Error(t, err) + } else { + require.NoError(t, err) + } + }) + } +} + +func Test_execute(t *testing.T) { + var called string + + type expected struct { + result string + error bool + } + + testCases := []struct { + desc string + args []string + command func() *Command + expected expected + }{ + { + desc: "root command", + args: []string{""}, + command: func() *Command { + return &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called = "root" + return nil + }, + } + + }, + expected: expected{result: "root"}, + }, + { + desc: "one sub command", + args: []string{"", "sub1"}, + command: func() *Command { + rootCmd := &Command{ + Name: "test", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called += "root" + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "sub1"}, + }, + { + desc: "two sub commands", + args: []string{"", "sub2"}, + command: func() *Command { + rootCmd := &Command{ + Name: "test", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called += "root" + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + }) + + _ = rootCmd.AddCommand(&Command{ + Name: "sub2", + Description: "sub2", + Configuration: nil, + Run: func(_ []string) error { + called += "sub2" + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "sub2"}, + }, + { + desc: "command with sub sub command, call sub command", + args: []string{"", "sub1"}, + command: func() *Command { + rootCmd := &Command{ + Name: "test", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called += "root" + return nil + }, + } + + sub1 := &Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + } + _ = rootCmd.AddCommand(sub1) + + _ = sub1.AddCommand(&Command{ + Name: "sub2", + Description: "sub2", + Configuration: nil, + Run: func(_ []string) error { + called += "sub2" + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "sub1"}, + }, + { + desc: "command with sub sub command, call sub sub command", + args: []string{"", "sub1", "sub2"}, + command: func() *Command { + rootCmd := &Command{ + Name: "test", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called += "root" + return nil + }, + } + + sub1 := &Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + } + _ = rootCmd.AddCommand(sub1) + + _ = sub1.AddCommand(&Command{ + Name: "sub2", + Description: "sub2", + Configuration: nil, + Run: func(_ []string) error { + called += "sub2" + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "sub2"}, + }, + { + desc: "command with sub command, call root command explicitly", + args: []string{"", "root"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called += "root" + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "root"}, + }, + { + desc: "command with sub command, call root command implicitly", + args: []string{""}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called += "root" + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "root"}, + }, + { + desc: "command with sub command, call sub command which has no run", + args: []string{"", "sub1"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called += "root" + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + }) + + return rootCmd + }, + expected: expected{error: true}, + }, + { + desc: "command with sub command, call root command which has no run", + args: []string{"", "root"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + }) + + return rootCmd + }, + expected: expected{error: true}, + }, + { + desc: "command with sub command, call implicitly root command which has no run", + args: []string{""}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(_ []string) error { + called += "sub1" + return nil + }, + }) + + return rootCmd + }, + expected: expected{error: true}, + }, + { + desc: "command with sub command, call sub command with arguments", + args: []string{"", "sub1", "foobar.txt"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called = "root" + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(args []string) error { + called += "sub1-" + strings.Join(args, "-") + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "sub1-foobar.txt"}, + }, + { + desc: "command with sub command, call root command with arguments", + args: []string{"", "foobar.txt"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(args []string) error { + called += "root-" + strings.Join(args, "-") + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(args []string) error { + called += "sub1-" + strings.Join(args, "-") + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "root-foobar.txt"}, + }, + { + desc: "command with sub command, call sub command with flags", + args: []string{"", "sub1", "--foo=bar", "--fii=bir"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + called = "root" + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(args []string) error { + called += "sub1-" + strings.Join(args, "") + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "sub1---foo=bar--fii=bir"}, + }, + { + desc: "command with sub command, call explicitly root command with flags", + args: []string{"", "root", "--foo=bar", "--fii=bir"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(args []string) error { + called += "root-" + strings.Join(args, "") + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(args []string) error { + called += "sub1-" + strings.Join(args, "") + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "root---foo=bar--fii=bir"}, + }, + { + desc: "command with sub command, call implicitly root command with flags", + args: []string{"", "--foo=bar", "--fii=bir"}, + command: func() *Command { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(args []string) error { + called += "root-" + strings.Join(args, "") + return nil + }, + } + + _ = rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "sub1", + Configuration: nil, + Run: func(args []string) error { + called += "sub1-" + strings.Join(args, "") + return nil + }, + }) + + return rootCmd + }, + expected: expected{result: "root---foo=bar--fii=bir"}, + }, + } + + for _, test := range testCases { + t.Run(test.desc, func(t *testing.T) { + defer func() { + called = "" + }() + + err := execute(test.command(), test.args, true) + + if test.expected.error { + require.Error(t, err) + } else { + require.NoError(t, err) + assert.Equal(t, test.expected.result, called) + } + }) + } +} + +func Test_execute_configuration(t *testing.T) { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + return nil + }, + } + + element := &Yo{ + Fuu: "test", + } + + sub1 := &Command{ + Name: "sub1", + Description: "sub1", + Configuration: element, + Resources: []ResourceLoader{&FlagLoader{}}, + Run: func(args []string) error { + return nil + }, + } + err := rootCmd.AddCommand(sub1) + require.NoError(t, err) + + args := []string{"", "sub1", "--foo=bar", "--fii=bir", "--yi"} + + err = execute(rootCmd, args, true) + require.NoError(t, err) + + expected := &Yo{ + Foo: "bar", + Fii: "bir", + Fuu: "test", + Yi: &Yi{ + Foo: "foo", + Fii: "fii", + }, + } + assert.Equal(t, expected, element) +} + +func Test_execute_configuration_file(t *testing.T) { + rootCmd := &Command{ + Name: "root", + Description: "This is a test", + Configuration: nil, + Run: func(_ []string) error { + return nil + }, + } + + element := &Yo{ + Fuu: "test", + } + + sub1 := &Command{ + Name: "sub1", + Description: "sub1", + Configuration: element, + Resources: []ResourceLoader{&FileLoader{}, &FlagLoader{}}, + Run: func(args []string) error { + return nil + }, + } + err := rootCmd.AddCommand(sub1) + require.NoError(t, err) + + args := []string{"", "sub1", "--configFile=./fixtures/config.toml"} + + err = execute(rootCmd, args, true) + require.NoError(t, err) + + expected := &Yo{ + Foo: "bar", + Fii: "bir", + Fuu: "test", + Yi: &Yi{ + Foo: "foo", + Fii: "fii", + }, + } + assert.Equal(t, expected, element) +} + +func Test_execute_help(t *testing.T) { + element := &Yo{ + Fuu: "test", + } + + rooCmd := &Command{ + Name: "root", + Description: "Description for root", + Configuration: element, + Run: func(args []string) error { + return nil + }, + } + + args := []string{"", "--help", "--foo"} + + backupStdout := os.Stdout + defer func() { + os.Stdout = backupStdout + }() + + r, w, _ := os.Pipe() + os.Stdout = w + + err := execute(rooCmd, args, true) + if err != nil { + return + } + + // read and restore stdout + if err = w.Close(); err != nil { + t.Fatal(err) + } + out, err := ioutil.ReadAll(r) + if err != nil { + t.Fatal(err) + } + + os.Stdout = backupStdout + + assert.Equal(t, `root Description for root + +Usage: root [command] [flags] [arguments] + +Use "root [command] --help" for help on any command. + +Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s) + or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s) + +Flags: + --fii (Default: "fii") + Fii description + + --foo (Default: "foo") + Foo description + + --fuu (Default: "test") + Fuu description + + --yi (Default: "false") + + --yi.fii (Default: "fii") + + --yi.foo (Default: "foo") + + --yi.fuu (Default: "") + + --yu.fii (Default: "fii") + + --yu.foo (Default: "foo") + + --yu.fuu (Default: "") + +`, string(out)) +} diff --git a/pkg/cli/file_finder.go b/pkg/cli/file_finder.go new file mode 100644 index 000000000..b0b724aa3 --- /dev/null +++ b/pkg/cli/file_finder.go @@ -0,0 +1,50 @@ +package cli + +import ( + "os" + "path/filepath" + "strings" +) + +// Finder holds a list of file paths. +type Finder struct { + BasePaths []string + Extensions []string +} + +// Find returns the first valid existing file among configFile +// and the paths already registered with Finder. +func (f Finder) Find(configFile string) (string, error) { + paths := f.getPaths(configFile) + + for _, filePath := range paths { + fp := os.ExpandEnv(filePath) + + _, err := os.Stat(fp) + if os.IsNotExist(err) { + continue + } + if err != nil { + return "", err + } + + return filepath.Abs(fp) + } + + return "", nil +} + +func (f Finder) getPaths(configFile string) []string { + var paths []string + if strings.TrimSpace(configFile) != "" { + paths = append(paths, configFile) + } + + for _, basePath := range f.BasePaths { + for _, ext := range f.Extensions { + paths = append(paths, basePath+"."+ext) + } + } + + return paths +} diff --git a/pkg/cli/file_finder_test.go b/pkg/cli/file_finder_test.go new file mode 100644 index 000000000..53acfe382 --- /dev/null +++ b/pkg/cli/file_finder_test.go @@ -0,0 +1,162 @@ +package cli + +import ( + "io/ioutil" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFinder_Find(t *testing.T) { + configFile, err := ioutil.TempFile("", "traefik-file-finder-test-*.toml") + require.NoError(t, err) + + defer func() { + _ = os.Remove(configFile.Name()) + }() + + dir, err := ioutil.TempDir("", "traefik-file-finder-test") + require.NoError(t, err) + + defer func() { + _ = os.RemoveAll(dir) + }() + + fooFile, err := os.Create(filepath.Join(dir, "foo.toml")) + require.NoError(t, err) + + _, err = os.Create(filepath.Join(dir, "bar.toml")) + require.NoError(t, err) + + type expected struct { + error bool + path string + } + + testCases := []struct { + desc string + basePaths []string + configFile string + expected expected + }{ + { + desc: "not found: no config file", + configFile: "", + expected: expected{path: ""}, + }, + { + desc: "not found: no config file, no other paths available", + configFile: "", + basePaths: []string{"/my/path/traefik", "$HOME/my/path/traefik", "./my-traefik"}, + expected: expected{path: ""}, + }, + { + desc: "not found: with non existing config file", + configFile: "/my/path/config.toml", + expected: expected{path: ""}, + }, + { + desc: "found: with config file", + configFile: configFile.Name(), + expected: expected{path: configFile.Name()}, + }, + { + desc: "found: no config file, first base path", + configFile: "", + basePaths: []string{filepath.Join(dir, "foo"), filepath.Join(dir, "bar")}, + expected: expected{path: fooFile.Name()}, + }, + { + desc: "found: no config file, base path", + configFile: "", + basePaths: []string{"/my/path/traefik", "$HOME/my/path/traefik", filepath.Join(dir, "foo")}, + expected: expected{path: fooFile.Name()}, + }, + { + desc: "found: config file over base path", + configFile: configFile.Name(), + basePaths: []string{filepath.Join(dir, "foo"), filepath.Join(dir, "bar")}, + expected: expected{path: configFile.Name()}, + }, + } + + for _, test := range testCases { + t.Run(test.desc, func(t *testing.T) { + + finder := Finder{ + BasePaths: test.basePaths, + Extensions: []string{"toml", "yaml", "yml"}, + } + + path, err := finder.Find(test.configFile) + + if test.expected.error { + require.Error(t, err) + } else { + require.NoError(t, err) + assert.Equal(t, test.expected.path, path) + } + }) + } +} + +func TestFinder_getPaths(t *testing.T) { + testCases := []struct { + desc string + basePaths []string + configFile string + expected []string + }{ + { + desc: "no config file", + basePaths: []string{"/etc/traefik/traefik", "$HOME/.config/traefik", "./traefik"}, + configFile: "", + expected: []string{ + "/etc/traefik/traefik.toml", + "/etc/traefik/traefik.yaml", + "/etc/traefik/traefik.yml", + "$HOME/.config/traefik.toml", + "$HOME/.config/traefik.yaml", + "$HOME/.config/traefik.yml", + "./traefik.toml", + "./traefik.yaml", + "./traefik.yml", + }, + }, + { + desc: "with config file", + basePaths: []string{"/etc/traefik/traefik", "$HOME/.config/traefik", "./traefik"}, + configFile: "/my/path/config.toml", + expected: []string{ + "/my/path/config.toml", + "/etc/traefik/traefik.toml", + "/etc/traefik/traefik.yaml", + "/etc/traefik/traefik.yml", + "$HOME/.config/traefik.toml", + "$HOME/.config/traefik.yaml", + "$HOME/.config/traefik.yml", + "./traefik.toml", + "./traefik.yaml", + "./traefik.yml", + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + finder := Finder{ + BasePaths: test.basePaths, + Extensions: []string{"toml", "yaml", "yml"}, + } + paths := finder.getPaths(test.configFile) + + assert.Equal(t, test.expected, paths) + }) + } +} diff --git a/pkg/cli/fixtures/config.toml b/pkg/cli/fixtures/config.toml new file mode 100644 index 000000000..72153e418 --- /dev/null +++ b/pkg/cli/fixtures/config.toml @@ -0,0 +1,3 @@ +foo = "bar" +fii = "bir" +[yi] diff --git a/pkg/cli/fixtures_test.go b/pkg/cli/fixtures_test.go new file mode 100644 index 000000000..3cdca3927 --- /dev/null +++ b/pkg/cli/fixtures_test.go @@ -0,0 +1,25 @@ +package cli + +type Yo struct { + Foo string `description:"Foo description"` + Fii string `description:"Fii description"` + Fuu string `description:"Fuu description"` + Yi *Yi `label:"allowEmpty"` + Yu *Yi +} + +func (y *Yo) SetDefaults() { + y.Foo = "foo" + y.Fii = "fii" +} + +type Yi struct { + Foo string + Fii string + Fuu string +} + +func (y *Yi) SetDefaults() { + y.Foo = "foo" + y.Fii = "fii" +} diff --git a/pkg/cli/help.go b/pkg/cli/help.go new file mode 100644 index 000000000..83f3a4935 --- /dev/null +++ b/pkg/cli/help.go @@ -0,0 +1,89 @@ +package cli + +import ( + "io" + "strings" + "text/tabwriter" + "text/template" + + "github.com/Masterminds/sprig" + "github.com/containous/traefik/pkg/config/flag" + "github.com/containous/traefik/pkg/config/generator" + "github.com/containous/traefik/pkg/config/parser" +) + +const tmplHelp = `{{ .Cmd.Name }} {{ .Cmd.Description }} + +Usage: {{ .Cmd.Name }} [command] [flags] [arguments] + +Use "{{ .Cmd.Name }} [command] --help" for help on any command. +{{if .SubCommands }} +Commands: +{{- range $i, $subCmd := .SubCommands }} +{{ if not $subCmd.Hidden }} {{ $subCmd.Name }} {{ $subCmd.Description }}{{end}}{{end}} +{{end}} +{{- if .Flags }} +Flag's usage: {{ .Cmd.Name }} [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s) + or: {{ .Cmd.Name }} [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s) + +Flags: +{{- range $i, $flag := .Flags }} + --{{ SliceIndexN $flag.Name }} {{if ne $flag.Name "global.sendanonymoususage"}}(Default: "{{ $flag.Default}}"){{end}} +{{if $flag.Description }} {{ wrapWith 80 "\n\t\t" $flag.Description }} +{{else}} +{{- end}} +{{- end}} +{{- end}} +` + +func isHelp(args []string) bool { + for _, name := range args { + if name == "--help" || name == "-help" || name == "-h" { + return true + } + } + return false +} + +// PrintHelp prints the help for the command given as argument. +func PrintHelp(w io.Writer, cmd *Command) error { + var flags []parser.Flat + if cmd.Configuration != nil { + generator.Generate(cmd.Configuration) + + var err error + flags, err = flag.Encode(cmd.Configuration) + if err != nil { + return err + } + } + + model := map[string]interface{}{ + "Cmd": cmd, + "Flags": flags, + "SubCommands": cmd.subCommands, + } + + funcs := sprig.TxtFuncMap() + funcs["SliceIndexN"] = sliceIndexN + + tmpl, err := template.New("flags"). + Funcs(funcs). + Parse(tmplHelp) + if err != nil { + return err + } + + tw := tabwriter.NewWriter(w, 4, 0, 4, ' ', 0) + + err = tmpl.Execute(tw, model) + if err != nil { + return err + } + + return tw.Flush() +} + +func sliceIndexN(flag string) string { + return strings.ReplaceAll(flag, "[0]", "[n]") +} diff --git a/pkg/cli/help_test.go b/pkg/cli/help_test.go new file mode 100644 index 000000000..768f74a2a --- /dev/null +++ b/pkg/cli/help_test.go @@ -0,0 +1,211 @@ +package cli + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPrintHelp(t *testing.T) { + testCases := []struct { + desc string + command *Command + expected string + }{ + { + desc: "no sub-command, with flags", + command: func() *Command { + element := &Yo{ + Fuu: "test", + } + + return &Command{ + Name: "root", + Description: "Description for root", + Configuration: element, + Run: func(args []string) error { + return nil + }, + } + }(), + expected: `root Description for root + +Usage: root [command] [flags] [arguments] + +Use "root [command] --help" for help on any command. + +Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s) + or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s) + +Flags: + --fii (Default: "fii") + Fii description + + --foo (Default: "foo") + Foo description + + --fuu (Default: "test") + Fuu description + + --yi (Default: "false") + + --yi.fii (Default: "fii") + + --yi.foo (Default: "foo") + + --yi.fuu (Default: "") + + --yu.fii (Default: "fii") + + --yu.foo (Default: "foo") + + --yu.fuu (Default: "") + +`, + }, + { + desc: "with sub-commands, with flags, call root help", + command: func() *Command { + element := &Yo{ + Fuu: "test", + } + + rootCmd := &Command{ + Name: "root", + Description: "Description for root", + Configuration: element, + Run: func(_ []string) error { + return nil + }, + } + + err := rootCmd.AddCommand(&Command{ + Name: "sub1", + Description: "Description for sub1", + Configuration: element, + Run: func(args []string) error { + return nil + }, + }) + require.NoError(t, err) + + err = rootCmd.AddCommand(&Command{ + Name: "sub2", + Description: "Description for sub2", + Configuration: element, + Run: func(args []string) error { + return nil + }, + }) + require.NoError(t, err) + + return rootCmd + }(), + expected: `root Description for root + +Usage: root [command] [flags] [arguments] + +Use "root [command] --help" for help on any command. + +Commands: + sub1 Description for sub1 + sub2 Description for sub2 + +Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s) + or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s) + +Flags: + --fii (Default: "fii") + Fii description + + --foo (Default: "foo") + Foo description + + --fuu (Default: "test") + Fuu description + + --yi (Default: "false") + + --yi.fii (Default: "fii") + + --yi.foo (Default: "foo") + + --yi.fuu (Default: "") + + --yu.fii (Default: "fii") + + --yu.foo (Default: "foo") + + --yu.fuu (Default: "") + +`, + }, + { + desc: "no sub-command, no flags", + command: func() *Command { + return &Command{ + Name: "root", + Description: "Description for root", + Configuration: nil, + Run: func(args []string) error { + return nil + }, + } + }(), + expected: `root Description for root + +Usage: root [command] [flags] [arguments] + +Use "root [command] --help" for help on any command. + +`, + }, + { + desc: "no sub-command, slice flags", + command: func() *Command { + return &Command{ + Name: "root", + Description: "Description for root", + Configuration: &struct { + Foo []struct { + Field string + } + }{}, + Run: func(args []string) error { + return nil + }, + } + }(), + expected: `root Description for root + +Usage: root [command] [flags] [arguments] + +Use "root [command] --help" for help on any command. + +Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s) + or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s) + +Flags: + --foo (Default: "") + + --foo[n].field (Default: "") + +`, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + buffer := &bytes.Buffer{} + err := PrintHelp(buffer, test.command) + require.NoError(t, err) + + assert.Equal(t, test.expected, buffer.String()) + }) + } +} diff --git a/pkg/cli/loader.go b/pkg/cli/loader.go new file mode 100644 index 000000000..90065910f --- /dev/null +++ b/pkg/cli/loader.go @@ -0,0 +1,21 @@ +package cli + +// ResourceLoader is a configuration resource loader. +type ResourceLoader interface { + // Load populates cmd.Configuration, optionally using args to do so. + Load(args []string, cmd *Command) (bool, error) +} + +type filenameGetter interface { + GetFilename() string +} + +// GetConfigFile returns the configuration file corresponding to the first configuration file loader found in ResourceLoader, if any. +func GetConfigFile(loaders []ResourceLoader) string { + for _, loader := range loaders { + if v, ok := loader.(filenameGetter); ok { + return v.GetFilename() + } + } + return "" +} diff --git a/pkg/cli/loader_env.go b/pkg/cli/loader_env.go new file mode 100644 index 000000000..3bff0251b --- /dev/null +++ b/pkg/cli/loader_env.go @@ -0,0 +1,40 @@ +package cli + +import ( + "fmt" + "os" + "strings" + + "github.com/containous/traefik/pkg/config/env" + "github.com/containous/traefik/pkg/log" +) + +// EnvLoader loads a configuration from all the environment variables prefixed with "TRAEFIK_". +type EnvLoader struct{} + +// Load loads the command's configuration from the environment variables. +func (e *EnvLoader) Load(_ []string, cmd *Command) (bool, error) { + return e.load(os.Environ(), cmd) +} + +func (*EnvLoader) load(environ []string, cmd *Command) (bool, error) { + var found bool + for _, value := range environ { + if strings.HasPrefix(value, "TRAEFIK_") { + found = true + break + } + } + + if !found { + return false, nil + } + + if err := env.Decode(environ, cmd.Configuration); err != nil { + return false, fmt.Errorf("failed to decode configuration from environment variables: %v", err) + } + + log.WithoutContext().Println("Configuration loaded from environment variables.") + + return true, nil +} diff --git a/pkg/cli/loader_file.go b/pkg/cli/loader_file.go new file mode 100644 index 000000000..86bbdc7be --- /dev/null +++ b/pkg/cli/loader_file.go @@ -0,0 +1,78 @@ +package cli + +import ( + "io/ioutil" + "os" + "strings" + + "github.com/containous/traefik/pkg/config/file" + "github.com/containous/traefik/pkg/config/flag" + "github.com/containous/traefik/pkg/log" +) + +// FileLoader loads a configuration from a file. +type FileLoader struct { + ConfigFileFlag string + filename string +} + +// GetFilename returns the configuration file if any. +func (f *FileLoader) GetFilename() string { + return f.filename +} + +// Load loads the command's configuration from a file either specified with the -traefik.configfile flag, or from default locations. +func (f *FileLoader) Load(args []string, cmd *Command) (bool, error) { + ref, err := flag.Parse(args, cmd.Configuration) + if err != nil { + _ = PrintHelp(os.Stdout, cmd) + return false, err + } + + configFileFlag := "traefik.configfile" + if f.ConfigFileFlag != "" { + configFileFlag = "traefik." + strings.ToLower(f.ConfigFileFlag) + } + + configFile, err := loadConfigFiles(ref[configFileFlag], cmd.Configuration) + if err != nil { + return false, err + } + + f.filename = configFile + + if configFile == "" { + return false, nil + } + + logger := log.WithoutContext() + logger.Printf("Configuration loaded from file: %s", configFile) + + content, _ := ioutil.ReadFile(configFile) + logger.Debug(string(content)) + + return true, nil +} + +// loadConfigFiles tries to decode the given configuration file and all default locations for the configuration file. +// It stops as soon as decoding one of them is successful. +func loadConfigFiles(configFile string, element interface{}) (string, error) { + finder := Finder{ + BasePaths: []string{"/etc/traefik/traefik", "$XDG_CONFIG_HOME/traefik", "$HOME/.config/traefik", "./traefik"}, + Extensions: []string{"toml", "yaml", "yml"}, + } + + filePath, err := finder.Find(configFile) + if err != nil { + return "", err + } + + if len(filePath) == 0 { + return "", nil + } + + if err = file.Decode(filePath, element); err != nil { + return "", err + } + return filePath, nil +} diff --git a/pkg/cli/loader_flag.go b/pkg/cli/loader_flag.go new file mode 100644 index 000000000..cb529a9f4 --- /dev/null +++ b/pkg/cli/loader_flag.go @@ -0,0 +1,22 @@ +package cli + +import ( + "fmt" + + "github.com/containous/traefik/pkg/config/flag" + "github.com/containous/traefik/pkg/log" +) + +// FlagLoader loads configuration from flags. +type FlagLoader struct{} + +// Load loads the command's configuration from flag arguments. +func (*FlagLoader) Load(args []string, cmd *Command) (bool, error) { + if err := flag.Decode(args, cmd.Configuration); err != nil { + return false, fmt.Errorf("failed to decode configuration from flags: %v", err) + } + + log.WithoutContext().Println("Configuration loaded from flags.") + + return true, nil +} diff --git a/pkg/collector/collector.go b/pkg/collector/collector.go index e69385fcd..0b41dd0ba 100644 --- a/pkg/collector/collector.go +++ b/pkg/collector/collector.go @@ -56,7 +56,11 @@ func Collect(staticConfiguration *static.Configuration) error { return err } - _, err = makeHTTPClient().Post(collectorURL, "application/json; charset=utf-8", buf) + resp, err := makeHTTPClient().Post(collectorURL, "application/json; charset=utf-8", buf) + if resp != nil { + resp.Body.Close() + } + return err } diff --git a/pkg/config/dyn_config.go b/pkg/config/dyn_config.go index 23d0b601b..1ccea24aa 100644 --- a/pkg/config/dyn_config.go +++ b/pkg/config/dyn_config.go @@ -125,9 +125,9 @@ type HealthCheck struct { Scheme string `json:"scheme,omitempty" toml:",omitempty"` Path string `json:"path,omitempty" toml:",omitempty"` Port int `json:"port,omitempty" toml:",omitempty,omitzero"` - // FIXME change string to parse.Duration + // FIXME change string to types.Duration Interval string `json:"interval,omitempty" toml:",omitempty"` - // FIXME change string to parse.Duration + // FIXME change string to types.Duration Timeout string `json:"timeout,omitempty" toml:",omitempty"` Hostname string `json:"hostname,omitempty" toml:",omitempty"` Headers map[string]string `json:"headers,omitempty" toml:",omitempty"` diff --git a/pkg/config/env/env.go b/pkg/config/env/env.go new file mode 100644 index 000000000..e71314b1d --- /dev/null +++ b/pkg/config/env/env.go @@ -0,0 +1,50 @@ +// Package env implements encoding and decoding between environment variable and a typed Configuration. +package env + +import ( + "strings" + + "github.com/containous/traefik/pkg/config/parser" +) + +// Decode decodes the given environment variables into the given element. +// The operation goes through four stages roughly summarized as: +// env vars -> map +// map -> tree of untyped nodes +// untyped nodes -> nodes augmented with metadata such as kind (inferred from element) +// "typed" nodes -> typed element +func Decode(environ []string, element interface{}) error { + vars := make(map[string]string) + for _, evr := range environ { + n := strings.SplitN(evr, "=", 2) + if strings.HasPrefix(strings.ToUpper(n[0]), "TRAEFIK_") { + key := strings.ReplaceAll(strings.ToLower(n[0]), "_", ".") + vars[key] = n[1] + } + } + + return parser.Decode(vars, element) +} + +// Encode encodes the configuration in element into the environment variables represented in the returned Flats. +// The operation goes through three stages roughly summarized as: +// typed configuration in element -> tree of untyped nodes +// untyped nodes -> nodes augmented with metadata such as kind (inferred from element) +// "typed" nodes -> environment variables with default values (determined by type/kind) +func Encode(element interface{}) ([]parser.Flat, error) { + if element == nil { + return nil, nil + } + + node, err := parser.EncodeToNode(element, false) + if err != nil { + return nil, err + } + + err = parser.AddMetadata(element, node) + if err != nil { + return nil, err + } + + return parser.EncodeToFlat(element, node, parser.FlatOpts{Case: "upper", Separator: "_"}) +} diff --git a/pkg/config/env/env_test.go b/pkg/config/env/env_test.go new file mode 100644 index 000000000..342a1f77a --- /dev/null +++ b/pkg/config/env/env_test.go @@ -0,0 +1,498 @@ +package env + +import ( + "testing" + + "github.com/containous/traefik/pkg/config/generator" + "github.com/containous/traefik/pkg/config/parser" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDecode(t *testing.T) { + testCases := []struct { + desc string + environ []string + element interface{} + expected interface{} + }{ + { + desc: "no env vars", + environ: nil, + expected: nil, + }, + { + desc: "bool value", + environ: []string{"TRAEFIK_FOO=true"}, + element: &struct { + Foo bool + }{}, + expected: &struct { + Foo bool + }{ + Foo: true, + }, + }, + { + desc: "equal", + environ: []string{"TRAEFIK_FOO=bar"}, + element: &struct { + Foo string + }{}, + expected: &struct { + Foo string + }{ + Foo: "bar", + }, + }, + { + desc: "multiple bool flags without value", + environ: []string{"TRAEFIK_FOO=true", "TRAEFIK_BAR=true"}, + element: &struct { + Foo bool + Bar bool + }{}, + expected: &struct { + Foo bool + Bar bool + }{ + Foo: true, + Bar: true, + }, + }, + { + desc: "map string", + environ: []string{"TRAEFIK_FOO_NAME=bar"}, + element: &struct { + Foo map[string]string + }{}, + expected: &struct { + Foo map[string]string + }{ + Foo: map[string]string{ + "name": "bar", + }, + }, + }, + { + desc: "map struct", + environ: []string{"TRAEFIK_FOO_NAME_VALUE=bar"}, + element: &struct { + Foo map[string]struct{ Value string } + }{}, + expected: &struct { + Foo map[string]struct{ Value string } + }{ + Foo: map[string]struct{ Value string }{ + "name": { + Value: "bar", + }, + }, + }, + }, + { + desc: "map struct with sub-struct", + environ: []string{"TRAEFIK_FOO_NAME_BAR_VALUE=bar"}, + element: &struct { + Foo map[string]struct { + Bar *struct{ Value string } + } + }{}, + expected: &struct { + Foo map[string]struct { + Bar *struct{ Value string } + } + }{ + Foo: map[string]struct { + Bar *struct{ Value string } + }{ + "name": { + Bar: &struct { + Value string + }{ + Value: "bar", + }, + }, + }, + }, + }, + { + desc: "map struct with sub-map", + environ: []string{"TRAEFIK_FOO_NAME1_BAR_NAME2_VALUE=bar"}, + element: &struct { + Foo map[string]struct { + Bar map[string]struct{ Value string } + } + }{}, + expected: &struct { + Foo map[string]struct { + Bar map[string]struct{ Value string } + } + }{ + Foo: map[string]struct { + Bar map[string]struct{ Value string } + }{ + "name1": { + Bar: map[string]struct{ Value string }{ + "name2": { + Value: "bar", + }, + }, + }, + }, + }, + }, + { + desc: "slice", + environ: []string{"TRAEFIK_FOO=bar,baz"}, + element: &struct { + Foo []string + }{}, + expected: &struct { + Foo []string + }{ + Foo: []string{"bar", "baz"}, + }, + }, + { + desc: "struct pointer value", + environ: []string{"TRAEFIK_FOO=true"}, + element: &struct { + Foo *struct{ Field string } `label:"allowEmpty"` + }{}, + expected: &struct { + Foo *struct{ Field string } `label:"allowEmpty"` + }{ + Foo: &struct{ Field string }{}, + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + err := Decode(test.environ, test.element) + require.NoError(t, err) + + assert.Equal(t, test.expected, test.element) + }) + } +} + +func TestEncode(t *testing.T) { + element := &Ya{ + Foo: &Yaa{ + FieldIn1: "bar", + FieldIn2: false, + FieldIn3: 1, + FieldIn4: map[string]string{ + parser.MapNamePlaceholder: "", + }, + FieldIn5: map[string]int{ + parser.MapNamePlaceholder: 0, + }, + FieldIn6: map[string]struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + FieldIn7: map[string]struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + FieldIn8: map[string]*struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + FieldIn9: map[string]*struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + FieldIn10: struct{ Field string }{}, + FieldIn11: &struct{ Field string }{}, + FieldIn12: func(v string) *string { return &v }(""), + FieldIn13: func(v bool) *bool { return &v }(false), + FieldIn14: func(v int) *int { return &v }(0), + }, + Field1: "bir", + Field2: true, + Field3: 0, + Field4: map[string]string{ + parser.MapNamePlaceholder: "", + }, + Field5: map[string]int{ + parser.MapNamePlaceholder: 0, + }, + Field6: map[string]struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + Field7: map[string]struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + Field8: map[string]*struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + Field9: map[string]*struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + Field10: struct{ Field string }{}, + Field11: &struct{ Field string }{}, + Field12: func(v string) *string { return &v }(""), + Field13: func(v bool) *bool { return &v }(false), + Field14: func(v int) *int { return &v }(0), + Field15: []int{7}, + } + generator.Generate(element) + + flats, err := Encode(element) + require.NoError(t, err) + + expected := []parser.Flat{ + { + Name: "TRAEFIK_FIELD1", + Description: "", + Default: "bir", + }, + { + Name: "TRAEFIK_FIELD10", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD10_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD11_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD12", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD13", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FIELD14", + Description: "", + Default: "0", + }, + { + Name: "TRAEFIK_FIELD15", + Description: "", + Default: "7", + }, + { + Name: "TRAEFIK_FIELD2", + Description: "", + Default: "true", + }, + { + Name: "TRAEFIK_FIELD3", + Description: "", + Default: "0", + }, + { + Name: "TRAEFIK_FIELD4_\u003cNAME\u003e", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD5_\u003cNAME\u003e", + Description: "", + Default: "0", + }, + { + Name: "TRAEFIK_FIELD6_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FIELD6_\u003cNAME\u003e_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD7_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FIELD7_\u003cNAME\u003e_FIELD_\u003cNAME\u003e", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD8_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FIELD8_\u003cNAME\u003e_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FIELD9_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FIELD9_\u003cNAME\u003e_FIELD_\u003cNAME\u003e", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN1", + Description: "", + Default: "bar", + }, + { + Name: "TRAEFIK_FOO_FIELDIN10", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN10_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN11_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN12", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN13", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FOO_FIELDIN14", + Description: "", + Default: "0", + }, + { + Name: "TRAEFIK_FOO_FIELDIN2", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FOO_FIELDIN3", + Description: "", + Default: "1", + }, + { + Name: "TRAEFIK_FOO_FIELDIN4_\u003cNAME\u003e", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN5_\u003cNAME\u003e", + Description: "", + Default: "0", + }, + { + Name: "TRAEFIK_FOO_FIELDIN6_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FOO_FIELDIN6_\u003cNAME\u003e_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN7_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FOO_FIELDIN7_\u003cNAME\u003e_FIELD_\u003cNAME\u003e", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN8_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FOO_FIELDIN8_\u003cNAME\u003e_FIELD", + Description: "", + Default: "", + }, + { + Name: "TRAEFIK_FOO_FIELDIN9_\u003cNAME\u003e", + Description: "", + Default: "false", + }, + { + Name: "TRAEFIK_FOO_FIELDIN9_\u003cNAME\u003e_FIELD_\u003cNAME\u003e", + Description: "", + Default: "", + }, + } + + assert.Equal(t, expected, flats) +} + +type Ya struct { + Foo *Yaa + Field1 string + Field2 bool + Field3 int + Field4 map[string]string + Field5 map[string]int + Field6 map[string]struct{ Field string } + Field7 map[string]struct{ Field map[string]string } + Field8 map[string]*struct{ Field string } + Field9 map[string]*struct{ Field map[string]string } + Field10 struct{ Field string } + Field11 *struct{ Field string } + Field12 *string + Field13 *bool + Field14 *int + Field15 []int +} + +type Yaa struct { + FieldIn1 string + FieldIn2 bool + FieldIn3 int + FieldIn4 map[string]string + FieldIn5 map[string]int + FieldIn6 map[string]struct{ Field string } + FieldIn7 map[string]struct{ Field map[string]string } + FieldIn8 map[string]*struct{ Field string } + FieldIn9 map[string]*struct{ Field map[string]string } + FieldIn10 struct{ Field string } + FieldIn11 *struct{ Field string } + FieldIn12 *string + FieldIn13 *bool + FieldIn14 *int +} diff --git a/pkg/config/file/file.go b/pkg/config/file/file.go new file mode 100644 index 000000000..6601669ce --- /dev/null +++ b/pkg/config/file/file.go @@ -0,0 +1,31 @@ +// Package file implements decoding between configuration in a file and a typed Configuration. +package file + +import ( + "github.com/containous/traefik/pkg/config/parser" +) + +// Decode decodes the given configuration file into the given element. +// The operation goes through three stages roughly summarized as: +// file contents -> tree of untyped nodes +// untyped nodes -> nodes augmented with metadata such as kind (inferred from element) +// "typed" nodes -> typed element +func Decode(filePath string, element interface{}) error { + if element == nil { + return nil + } + + filters := getRootFieldNames(element) + + root, err := decodeFileToNode(filePath, filters...) + if err != nil { + return err + } + + err = parser.AddMetadata(element, root) + if err != nil { + return err + } + + return parser.Fill(element, root) +} diff --git a/pkg/config/file/file_node.go b/pkg/config/file/file_node.go new file mode 100644 index 000000000..d23e2344b --- /dev/null +++ b/pkg/config/file/file_node.go @@ -0,0 +1,86 @@ +package file + +import ( + "fmt" + "io/ioutil" + "path/filepath" + "reflect" + + "github.com/BurntSushi/toml" + "github.com/containous/traefik/pkg/config/parser" + "gopkg.in/yaml.v2" +) + +// decodeFileToNode decodes the configuration in filePath in a tree of untyped nodes. +// If filters is not empty, it skips any configuration element whose name is +// not among filters. +func decodeFileToNode(filePath string, filters ...string) (*parser.Node, error) { + content, err := ioutil.ReadFile(filePath) + if err != nil { + return nil, err + } + + data := make(map[string]interface{}) + + switch filepath.Ext(filePath) { + case ".toml": + err = toml.Unmarshal(content, &data) + if err != nil { + return nil, err + } + + case ".yml", ".yaml": + var err error + err = yaml.Unmarshal(content, data) + if err != nil { + return nil, err + } + + return decodeRawToNode(data, filters...) + + default: + return nil, fmt.Errorf("unsupported file extension: %s", filePath) + } + + return decodeRawToNode(data, filters...) +} + +func getRootFieldNames(element interface{}) []string { + if element == nil { + return nil + } + + rootType := reflect.TypeOf(element) + + return getFieldNames(rootType) +} + +func getFieldNames(rootType reflect.Type) []string { + var names []string + + if rootType.Kind() == reflect.Ptr { + rootType = rootType.Elem() + } + + if rootType.Kind() != reflect.Struct { + return nil + } + + for i := 0; i < rootType.NumField(); i++ { + field := rootType.Field(i) + + if !parser.IsExported(field) { + continue + } + + if field.Anonymous && + (field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct || field.Type.Kind() == reflect.Struct) { + names = append(names, getFieldNames(field.Type)...) + continue + } + + names = append(names, field.Name) + } + + return names +} diff --git a/pkg/config/file/file_node_test.go b/pkg/config/file/file_node_test.go new file mode 100644 index 000000000..cb8217d12 --- /dev/null +++ b/pkg/config/file/file_node_test.go @@ -0,0 +1,599 @@ +package file + +import ( + "testing" + + "github.com/containous/traefik/pkg/config/parser" + "github.com/stretchr/testify/assert" +) + +func Test_getRootFieldNames(t *testing.T) { + testCases := []struct { + desc string + element interface{} + expected []string + }{ + { + desc: "simple fields", + element: &Yo{}, + expected: []string{"Foo", "Fii", "Fuu", "Yi"}, + }, + { + desc: "embedded struct", + element: &Yu{}, + expected: []string{"Foo", "Fii", "Fuu"}, + }, + { + desc: "embedded struct pointer", + element: &Ye{}, + expected: []string{"Foo", "Fii", "Fuu"}, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + names := getRootFieldNames(test.element) + + assert.Equal(t, test.expected, names) + }) + } +} + +func Test_decodeFileToNode_compare(t *testing.T) { + nodeToml, err := decodeFileToNode("./fixtures/sample.toml", + "Global", "ServersTransport", "EntryPoints", "Providers", "API", "Metrics", "Ping", "Log", "AccessLog", "Tracing", "HostResolver", "ACME") + if err != nil { + t.Fatal(err) + } + + nodeYaml, err := decodeFileToNode("./fixtures/sample.yml") + if err != nil { + t.Fatal(err) + } + + assert.Equal(t, nodeToml, nodeYaml) +} + +func Test_decodeFileToNode_Toml(t *testing.T) { + node, err := decodeFileToNode("./fixtures/sample.toml", + "Global", "ServersTransport", "EntryPoints", "Providers", "API", "Metrics", "Ping", "Log", "AccessLog", "Tracing", "HostResolver", "ACME") + if err != nil { + t.Fatal(err) + } + + expected := &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "ACME", + Children: []*parser.Node{ + {Name: "ACMELogging", Value: "true"}, + {Name: "CAServer", Value: "foobar"}, + {Name: "DNSChallenge", Children: []*parser.Node{ + {Name: "DelayBeforeCheck", Value: "42"}, + {Name: "DisablePropagationCheck", Value: "true"}, + {Name: "Provider", Value: "foobar"}, + {Name: "Resolvers", Value: "foobar,foobar"}, + }}, + {Name: "Domains", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Main", Value: "foobar"}, + {Name: "SANs", Value: "foobar,foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Main", Value: "foobar"}, + {Name: "SANs", Value: "foobar,foobar"}, + }}, + }}, + {Name: "Email", Value: "foobar"}, + {Name: "EntryPoint", Value: "foobar"}, + {Name: "HTTPChallenge", Children: []*parser.Node{ + {Name: "EntryPoint", Value: "foobar"}}}, + {Name: "KeyType", Value: "foobar"}, + {Name: "OnHostRule", Value: "true"}, + {Name: "Storage", Value: "foobar"}, + {Name: "TLSChallenge"}, + }, + }, + {Name: "API", Children: []*parser.Node{ + {Name: "Dashboard", Value: "true"}, + {Name: "EntryPoint", Value: "foobar"}, + {Name: "Middlewares", Value: "foobar,foobar"}, + {Name: "Statistics", Children: []*parser.Node{ + {Name: "RecentErrors", Value: "42"}}}}}, + {Name: "AccessLog", Children: []*parser.Node{ + {Name: "BufferingSize", Value: "42"}, + {Name: "Fields", Children: []*parser.Node{ + {Name: "DefaultMode", Value: "foobar"}, + {Name: "Headers", Children: []*parser.Node{ + {Name: "DefaultMode", Value: "foobar"}, + {Name: "Names", Children: []*parser.Node{ + {Name: "name0", Value: "foobar"}, + {Name: "name1", Value: "foobar"}}}}}, + {Name: "Names", Children: []*parser.Node{ + {Name: "name0", Value: "foobar"}, + {Name: "name1", Value: "foobar"}}}}}, + {Name: "FilePath", Value: "foobar"}, + {Name: "Filters", Children: []*parser.Node{ + {Name: "MinDuration", Value: "42"}, + {Name: "RetryAttempts", Value: "true"}, + {Name: "StatusCodes", Value: "foobar,foobar"}}}, + {Name: "Format", Value: "foobar"}}}, + {Name: "EntryPoints", Children: []*parser.Node{ + {Name: "EntryPoint0", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "ForwardedHeaders", Children: []*parser.Node{ + {Name: "Insecure", Value: "true"}, + {Name: "TrustedIPs", Value: "foobar,foobar"}}}, + {Name: "ProxyProtocol", Children: []*parser.Node{ + {Name: "Insecure", Value: "true"}, + {Name: "TrustedIPs", Value: "foobar,foobar"}}}, + {Name: "Transport", Children: []*parser.Node{ + {Name: "LifeCycle", Children: []*parser.Node{ + {Name: "GraceTimeOut", Value: "42"}, + {Name: "RequestAcceptGraceTimeout", Value: "42"}}}, + {Name: "RespondingTimeouts", Children: []*parser.Node{ + {Name: "IdleTimeout", Value: "42"}, + {Name: "ReadTimeout", Value: "42"}, + {Name: "WriteTimeout", Value: "42"}}}}}}}}}, + {Name: "Global", Children: []*parser.Node{ + {Name: "CheckNewVersion", Value: "true"}, + {Name: "Debug", Value: "true"}, + {Name: "SendAnonymousUsage", Value: "true"}}}, + {Name: "HostResolver", Children: []*parser.Node{ + {Name: "CnameFlattening", Value: "true"}, + {Name: "ResolvConfig", Value: "foobar"}, + {Name: "ResolvDepth", Value: "42"}}}, + {Name: "Log", Children: []*parser.Node{ + {Name: "FilePath", Value: "foobar"}, + {Name: "Format", Value: "foobar"}, + {Name: "Level", Value: "foobar"}}}, + {Name: "Metrics", Children: []*parser.Node{ + {Name: "Datadog", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "PushInterval", Value: "10s"}}}, + {Name: "InfluxDB", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "Database", Value: "foobar"}, + {Name: "Password", Value: "foobar"}, + {Name: "Protocol", Value: "foobar"}, + {Name: "PushInterval", Value: "10s"}, + {Name: "RetentionPolicy", Value: "foobar"}, + {Name: "Username", Value: "foobar"}}}, + {Name: "Prometheus", Children: []*parser.Node{ + {Name: "Buckets", Value: "42,42"}, + {Name: "EntryPoint", Value: "foobar"}, + {Name: "Middlewares", Value: "foobar,foobar"}}}, + {Name: "StatsD", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "PushInterval", Value: "10s"}}}}}, + {Name: "Ping", Children: []*parser.Node{ + {Name: "EntryPoint", Value: "foobar"}, + {Name: "Middlewares", Value: "foobar,foobar"}}}, + {Name: "Providers", Children: []*parser.Node{ + {Name: "Docker", Children: []*parser.Node{ + {Name: "Constraints", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + }}, + {Name: "DefaultRule", Value: "foobar"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "ExposedByDefault", Value: "true"}, + {Name: "Network", Value: "foobar"}, + {Name: "SwarmMode", Value: "true"}, + {Name: "SwarmModeRefreshSeconds", Value: "42"}, + {Name: "TLS", Children: []*parser.Node{ + {Name: "CA", Value: "foobar"}, + {Name: "CAOptional", Value: "true"}, + {Name: "Cert", Value: "foobar"}, + {Name: "InsecureSkipVerify", Value: "true"}, + {Name: "Key", Value: "foobar"}}}, + {Name: "UseBindPortIP", Value: "true"}, + {Name: "Watch", Value: "true"}}}, + {Name: "File", Children: []*parser.Node{ + {Name: "DebugLogGeneratedTemplate", Value: "true"}, + {Name: "Directory", Value: "foobar"}, + {Name: "Filename", Value: "foobar"}, + {Name: "TraefikFile", Value: "foobar"}, + {Name: "Watch", Value: "true"}}}, + {Name: "Kubernetes", Children: []*parser.Node{ + {Name: "CertAuthFilePath", Value: "foobar"}, + {Name: "DisablePassHostHeaders", Value: "true"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "IngressClass", Value: "foobar"}, + {Name: "IngressEndpoint", Children: []*parser.Node{ + {Name: "Hostname", Value: "foobar"}, + {Name: "IP", Value: "foobar"}, + {Name: "PublishedService", Value: "foobar"}}}, + {Name: "LabelSelector", Value: "foobar"}, + {Name: "Namespaces", Value: "foobar,foobar"}, + {Name: "Token", Value: "foobar"}}}, + {Name: "KubernetesCRD", + Children: []*parser.Node{ + {Name: "CertAuthFilePath", Value: "foobar"}, + {Name: "DisablePassHostHeaders", Value: "true"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "IngressClass", Value: "foobar"}, + {Name: "LabelSelector", Value: "foobar"}, + {Name: "Namespaces", Value: "foobar,foobar"}, + {Name: "Token", Value: "foobar"}}}, + {Name: "Marathon", Children: []*parser.Node{ + {Name: "Basic", Children: []*parser.Node{ + {Name: "HTTPBasicAuthUser", Value: "foobar"}, + {Name: "HTTPBasicPassword", Value: "foobar"}}}, + {Name: "Constraints", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + }}, + {Name: "DCOSToken", Value: "foobar"}, + {Name: "DefaultRule", Value: "foobar"}, + {Name: "DialerTimeout", Value: "42"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "ExposedByDefault", Value: "true"}, + {Name: "FilterMarathonConstraints", Value: "true"}, + {Name: "ForceTaskHostname", Value: "true"}, + {Name: "KeepAlive", Value: "42"}, + {Name: "RespectReadinessChecks", Value: "true"}, + {Name: "ResponseHeaderTimeout", Value: "42"}, + {Name: "TLS", Children: []*parser.Node{ + {Name: "CA", Value: "foobar"}, + {Name: "CAOptional", Value: "true"}, + {Name: "Cert", Value: "foobar"}, + {Name: "InsecureSkipVerify", Value: "true"}, + {Name: "Key", Value: "foobar"}}}, + {Name: "TLSHandshakeTimeout", Value: "42"}, + {Name: "Trace", Value: "true"}, + {Name: "Watch", Value: "true"}}}, + {Name: "ProvidersThrottleDuration", Value: "42"}, + {Name: "Rancher", Children: []*parser.Node{ + {Name: "Constraints", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + }}, + {Name: "DefaultRule", Value: "foobar"}, + {Name: "EnableServiceHealthFilter", Value: "true"}, + {Name: "ExposedByDefault", Value: "true"}, + {Name: "IntervalPoll", Value: "true"}, + {Name: "Prefix", Value: "foobar"}, + {Name: "RefreshSeconds", Value: "42"}, + {Name: "Watch", Value: "true"}}}, + {Name: "Rest", Children: []*parser.Node{ + {Name: "EntryPoint", Value: "foobar"}}}}}, + {Name: "ServersTransport", Children: []*parser.Node{ + {Name: "ForwardingTimeouts", Children: []*parser.Node{ + {Name: "DialTimeout", Value: "42"}, + {Name: "ResponseHeaderTimeout", Value: "42"}}}, + {Name: "InsecureSkipVerify", Value: "true"}, + {Name: "MaxIdleConnsPerHost", Value: "42"}, + {Name: "RootCAs", Value: "foobar,foobar"}}}, + {Name: "Tracing", Children: []*parser.Node{ + {Name: "Backend", Value: "foobar"}, + {Name: "DataDog", Children: []*parser.Node{ + {Name: "BagagePrefixHeaderName", Value: "foobar"}, + {Name: "Debug", Value: "true"}, + {Name: "GlobalTag", Value: "foobar"}, + {Name: "LocalAgentHostPort", Value: "foobar"}, + {Name: "ParentIDHeaderName", Value: "foobar"}, + {Name: "PrioritySampling", Value: "true"}, + {Name: "SamplingPriorityHeaderName", Value: "foobar"}, + {Name: "TraceIDHeaderName", Value: "foobar"}}}, + {Name: "Instana", Children: []*parser.Node{ + {Name: "LocalAgentHost", Value: "foobar"}, + {Name: "LocalAgentPort", Value: "42"}, + {Name: "LogLevel", Value: "foobar"}}}, + {Name: "Jaeger", Children: []*parser.Node{ + {Name: "Gen128Bit", Value: "true"}, + {Name: "LocalAgentHostPort", Value: "foobar"}, + {Name: "Propagation", Value: "foobar"}, + {Name: "SamplingParam", Value: "42"}, + {Name: "SamplingServerURL", Value: "foobar"}, + {Name: "SamplingType", Value: "foobar"}, + {Name: "TraceContextHeaderName", Value: "foobar"}}}, + {Name: "ServiceName", Value: "foobar"}, + {Name: "SpanNameLimit", Value: "42"}, + {Name: "Zipkin", Children: []*parser.Node{ + {Name: "Debug", Value: "true"}, + {Name: "HTTPEndpoint", Value: "foobar"}, + {Name: "ID128Bit", Value: "true"}, + {Name: "SameSpan", Value: "true"}, + {Name: "SampleRate", Value: "42"}}}}}}, + } + + assert.Equal(t, expected, node) +} + +func Test_decodeFileToNode_Yaml(t *testing.T) { + node, err := decodeFileToNode("./fixtures/sample.yml") + if err != nil { + t.Fatal(err) + } + + expected := &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "ACME", + Children: []*parser.Node{ + {Name: "ACMELogging", Value: "true"}, + {Name: "CAServer", Value: "foobar"}, + {Name: "DNSChallenge", Children: []*parser.Node{ + {Name: "DelayBeforeCheck", Value: "42"}, + {Name: "DisablePropagationCheck", Value: "true"}, + {Name: "Provider", Value: "foobar"}, + {Name: "Resolvers", Value: "foobar,foobar"}, + }}, + {Name: "Domains", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Main", Value: "foobar"}, + {Name: "SANs", Value: "foobar,foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Main", Value: "foobar"}, + {Name: "SANs", Value: "foobar,foobar"}, + }}, + }}, + {Name: "Email", Value: "foobar"}, + {Name: "EntryPoint", Value: "foobar"}, + {Name: "HTTPChallenge", Children: []*parser.Node{ + {Name: "EntryPoint", Value: "foobar"}}}, + {Name: "KeyType", Value: "foobar"}, + {Name: "OnHostRule", Value: "true"}, + {Name: "Storage", Value: "foobar"}, + {Name: "TLSChallenge"}, + }, + }, + {Name: "API", Children: []*parser.Node{ + {Name: "Dashboard", Value: "true"}, + {Name: "EntryPoint", Value: "foobar"}, + {Name: "Middlewares", Value: "foobar,foobar"}, + {Name: "Statistics", Children: []*parser.Node{ + {Name: "RecentErrors", Value: "42"}}}}}, + {Name: "AccessLog", Children: []*parser.Node{ + {Name: "BufferingSize", Value: "42"}, + {Name: "Fields", Children: []*parser.Node{ + {Name: "DefaultMode", Value: "foobar"}, + {Name: "Headers", Children: []*parser.Node{ + {Name: "DefaultMode", Value: "foobar"}, + {Name: "Names", Children: []*parser.Node{ + {Name: "name0", Value: "foobar"}, + {Name: "name1", Value: "foobar"}}}}}, + {Name: "Names", Children: []*parser.Node{ + {Name: "name0", Value: "foobar"}, + {Name: "name1", Value: "foobar"}}}}}, + {Name: "FilePath", Value: "foobar"}, + {Name: "Filters", Children: []*parser.Node{ + {Name: "MinDuration", Value: "42"}, + {Name: "RetryAttempts", Value: "true"}, + {Name: "StatusCodes", Value: "foobar,foobar"}}}, + {Name: "Format", Value: "foobar"}}}, + {Name: "EntryPoints", Children: []*parser.Node{ + {Name: "EntryPoint0", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "ForwardedHeaders", Children: []*parser.Node{ + {Name: "Insecure", Value: "true"}, + {Name: "TrustedIPs", Value: "foobar,foobar"}}}, + {Name: "ProxyProtocol", Children: []*parser.Node{ + {Name: "Insecure", Value: "true"}, + {Name: "TrustedIPs", Value: "foobar,foobar"}}}, + {Name: "Transport", Children: []*parser.Node{ + {Name: "LifeCycle", Children: []*parser.Node{ + {Name: "GraceTimeOut", Value: "42"}, + {Name: "RequestAcceptGraceTimeout", Value: "42"}}}, + {Name: "RespondingTimeouts", Children: []*parser.Node{ + {Name: "IdleTimeout", Value: "42"}, + {Name: "ReadTimeout", Value: "42"}, + {Name: "WriteTimeout", Value: "42"}}}}}}}}}, + {Name: "Global", Children: []*parser.Node{ + {Name: "CheckNewVersion", Value: "true"}, + {Name: "Debug", Value: "true"}, + {Name: "SendAnonymousUsage", Value: "true"}}}, + {Name: "HostResolver", Children: []*parser.Node{ + {Name: "CnameFlattening", Value: "true"}, + {Name: "ResolvConfig", Value: "foobar"}, + {Name: "ResolvDepth", Value: "42"}}}, + {Name: "Log", Children: []*parser.Node{ + {Name: "FilePath", Value: "foobar"}, + {Name: "Format", Value: "foobar"}, + {Name: "Level", Value: "foobar"}}}, + {Name: "Metrics", Children: []*parser.Node{ + {Name: "Datadog", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "PushInterval", Value: "10s"}}}, + {Name: "InfluxDB", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "Database", Value: "foobar"}, + {Name: "Password", Value: "foobar"}, + {Name: "Protocol", Value: "foobar"}, + {Name: "PushInterval", Value: "10s"}, + {Name: "RetentionPolicy", Value: "foobar"}, + {Name: "Username", Value: "foobar"}}}, + {Name: "Prometheus", Children: []*parser.Node{ + {Name: "Buckets", Value: "42,42"}, + {Name: "EntryPoint", Value: "foobar"}, + {Name: "Middlewares", Value: "foobar,foobar"}}}, + {Name: "StatsD", Children: []*parser.Node{ + {Name: "Address", Value: "foobar"}, + {Name: "PushInterval", Value: "10s"}}}}}, + {Name: "Ping", Children: []*parser.Node{ + {Name: "EntryPoint", Value: "foobar"}, + {Name: "Middlewares", Value: "foobar,foobar"}}}, + {Name: "Providers", Children: []*parser.Node{ + {Name: "Docker", Children: []*parser.Node{ + {Name: "Constraints", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + }}, + {Name: "DefaultRule", Value: "foobar"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "ExposedByDefault", Value: "true"}, + {Name: "Network", Value: "foobar"}, + {Name: "SwarmMode", Value: "true"}, + {Name: "SwarmModeRefreshSeconds", Value: "42"}, + {Name: "TLS", Children: []*parser.Node{ + {Name: "CA", Value: "foobar"}, + {Name: "CAOptional", Value: "true"}, + {Name: "Cert", Value: "foobar"}, + {Name: "InsecureSkipVerify", Value: "true"}, + {Name: "Key", Value: "foobar"}}}, + {Name: "UseBindPortIP", Value: "true"}, + {Name: "Watch", Value: "true"}}}, + {Name: "File", Children: []*parser.Node{ + {Name: "DebugLogGeneratedTemplate", Value: "true"}, + {Name: "Directory", Value: "foobar"}, + {Name: "Filename", Value: "foobar"}, + {Name: "TraefikFile", Value: "foobar"}, + {Name: "Watch", Value: "true"}}}, + {Name: "Kubernetes", Children: []*parser.Node{ + {Name: "CertAuthFilePath", Value: "foobar"}, + {Name: "DisablePassHostHeaders", Value: "true"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "IngressClass", Value: "foobar"}, + {Name: "IngressEndpoint", Children: []*parser.Node{ + {Name: "Hostname", Value: "foobar"}, + {Name: "IP", Value: "foobar"}, + {Name: "PublishedService", Value: "foobar"}}}, + {Name: "LabelSelector", Value: "foobar"}, + {Name: "Namespaces", Value: "foobar,foobar"}, + {Name: "Token", Value: "foobar"}}}, + {Name: "KubernetesCRD", + Children: []*parser.Node{ + {Name: "CertAuthFilePath", Value: "foobar"}, + {Name: "DisablePassHostHeaders", Value: "true"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "IngressClass", Value: "foobar"}, + {Name: "LabelSelector", Value: "foobar"}, + {Name: "Namespaces", Value: "foobar,foobar"}, + {Name: "Token", Value: "foobar"}}}, + {Name: "Marathon", Children: []*parser.Node{ + {Name: "Basic", Children: []*parser.Node{ + {Name: "HTTPBasicAuthUser", Value: "foobar"}, + {Name: "HTTPBasicPassword", Value: "foobar"}}}, + {Name: "Constraints", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + }}, + {Name: "DCOSToken", Value: "foobar"}, + {Name: "DefaultRule", Value: "foobar"}, + {Name: "DialerTimeout", Value: "42"}, + {Name: "Endpoint", Value: "foobar"}, + {Name: "ExposedByDefault", Value: "true"}, + {Name: "FilterMarathonConstraints", Value: "true"}, + {Name: "ForceTaskHostname", Value: "true"}, + {Name: "KeepAlive", Value: "42"}, + {Name: "RespectReadinessChecks", Value: "true"}, + {Name: "ResponseHeaderTimeout", Value: "42"}, + {Name: "TLS", Children: []*parser.Node{ + {Name: "CA", Value: "foobar"}, + {Name: "CAOptional", Value: "true"}, + {Name: "Cert", Value: "foobar"}, + {Name: "InsecureSkipVerify", Value: "true"}, + {Name: "Key", Value: "foobar"}}}, + {Name: "TLSHandshakeTimeout", Value: "42"}, + {Name: "Trace", Value: "true"}, + {Name: "Watch", Value: "true"}}}, + {Name: "ProvidersThrottleDuration", Value: "42"}, + {Name: "Rancher", Children: []*parser.Node{ + {Name: "Constraints", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "Key", Value: "foobar"}, + {Name: "MustMatch", Value: "true"}, + {Name: "Value", Value: "foobar"}, + }}, + }}, + {Name: "DefaultRule", Value: "foobar"}, + {Name: "EnableServiceHealthFilter", Value: "true"}, + {Name: "ExposedByDefault", Value: "true"}, + {Name: "IntervalPoll", Value: "true"}, + {Name: "Prefix", Value: "foobar"}, + {Name: "RefreshSeconds", Value: "42"}, + {Name: "Watch", Value: "true"}}}, + {Name: "Rest", Children: []*parser.Node{ + {Name: "EntryPoint", Value: "foobar"}}}}}, + {Name: "ServersTransport", Children: []*parser.Node{ + {Name: "ForwardingTimeouts", Children: []*parser.Node{ + {Name: "DialTimeout", Value: "42"}, + {Name: "ResponseHeaderTimeout", Value: "42"}}}, + {Name: "InsecureSkipVerify", Value: "true"}, + {Name: "MaxIdleConnsPerHost", Value: "42"}, + {Name: "RootCAs", Value: "foobar,foobar"}}}, + {Name: "Tracing", Children: []*parser.Node{ + {Name: "Backend", Value: "foobar"}, + {Name: "DataDog", Children: []*parser.Node{ + {Name: "BagagePrefixHeaderName", Value: "foobar"}, + {Name: "Debug", Value: "true"}, + {Name: "GlobalTag", Value: "foobar"}, + {Name: "LocalAgentHostPort", Value: "foobar"}, + {Name: "ParentIDHeaderName", Value: "foobar"}, + {Name: "PrioritySampling", Value: "true"}, + {Name: "SamplingPriorityHeaderName", Value: "foobar"}, + {Name: "TraceIDHeaderName", Value: "foobar"}}}, + {Name: "Instana", Children: []*parser.Node{ + {Name: "LocalAgentHost", Value: "foobar"}, + {Name: "LocalAgentPort", Value: "42"}, + {Name: "LogLevel", Value: "foobar"}}}, + {Name: "Jaeger", Children: []*parser.Node{ + {Name: "Gen128Bit", Value: "true"}, + {Name: "LocalAgentHostPort", Value: "foobar"}, + {Name: "Propagation", Value: "foobar"}, + {Name: "SamplingParam", Value: "42"}, + {Name: "SamplingServerURL", Value: "foobar"}, + {Name: "SamplingType", Value: "foobar"}, + {Name: "TraceContextHeaderName", Value: "foobar"}}}, + {Name: "ServiceName", Value: "foobar"}, + {Name: "SpanNameLimit", Value: "42"}, + {Name: "Zipkin", Children: []*parser.Node{ + {Name: "Debug", Value: "true"}, + {Name: "HTTPEndpoint", Value: "foobar"}, + {Name: "ID128Bit", Value: "true"}, + {Name: "SameSpan", Value: "true"}, + {Name: "SampleRate", Value: "42"}}}}}}, + } + + assert.Equal(t, expected, node) +} diff --git a/pkg/config/file/file_test.go b/pkg/config/file/file_test.go new file mode 100644 index 000000000..c769514e3 --- /dev/null +++ b/pkg/config/file/file_test.go @@ -0,0 +1,76 @@ +package file + +import ( + "io/ioutil" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDecode_TOML(t *testing.T) { + f, err := ioutil.TempFile("", "traefik-config-*.toml") + require.NoError(t, err) + defer func() { + _ = os.Remove(f.Name()) + }() + + _, err = f.Write([]byte(` +foo = "bar" +fii = "bir" +[yi] +`)) + require.NoError(t, err) + + element := &Yo{ + Fuu: "test", + } + + err = Decode(f.Name(), element) + require.NoError(t, err) + + expected := &Yo{ + Foo: "bar", + Fii: "bir", + Fuu: "test", + Yi: &Yi{ + Foo: "foo", + Fii: "fii", + }, + } + assert.Equal(t, expected, element) +} + +func TestDecode_YAML(t *testing.T) { + f, err := ioutil.TempFile("", "traefik-config-*.yaml") + require.NoError(t, err) + defer func() { + _ = os.Remove(f.Name()) + }() + + _, err = f.Write([]byte(` +foo: bar +fii: bir +yi: {} +`)) + require.NoError(t, err) + + element := &Yo{ + Fuu: "test", + } + + err = Decode(f.Name(), element) + require.NoError(t, err) + + expected := &Yo{ + Foo: "bar", + Fii: "bir", + Fuu: "test", + Yi: &Yi{ + Foo: "foo", + Fii: "fii", + }, + } + assert.Equal(t, expected, element) +} diff --git a/pkg/config/file/fixtures/sample.toml b/pkg/config/file/fixtures/sample.toml new file mode 100644 index 000000000..a3a6373ba --- /dev/null +++ b/pkg/config/file/fixtures/sample.toml @@ -0,0 +1,539 @@ +[Global] + Debug = true + CheckNewVersion = true + SendAnonymousUsage = true + +[ServersTransport] + InsecureSkipVerify = true + RootCAs = ["foobar", "foobar"] + MaxIdleConnsPerHost = 42 + [ServersTransport.ForwardingTimeouts] + DialTimeout = 42 + ResponseHeaderTimeout = 42 + +[EntryPoints] + + [EntryPoints.EntryPoint0] + Address = "foobar" + [EntryPoints.EntryPoint0.Transport] + [EntryPoints.EntryPoint0.Transport.LifeCycle] + RequestAcceptGraceTimeout = 42 + GraceTimeOut = 42 + [EntryPoints.EntryPoint0.Transport.RespondingTimeouts] + ReadTimeout = 42 + WriteTimeout = 42 + IdleTimeout = 42 + [EntryPoints.EntryPoint0.ProxyProtocol] + Insecure = true + TrustedIPs = ["foobar", "foobar"] + [EntryPoints.EntryPoint0.ForwardedHeaders] + Insecure = true + TrustedIPs = ["foobar", "foobar"] + +[Providers] + ProvidersThrottleDuration = 42 + + [Providers.Docker] + Watch = true + Endpoint = "foobar" + DefaultRule = "foobar" + ExposedByDefault = true + UseBindPortIP = true + SwarmMode = true + Network = "foobar" + SwarmModeRefreshSeconds = 42 + + [[Providers.Docker.Constraints]] + Key = "foobar" + MustMatch = true + Value = "foobar" + + [[Providers.Docker.Constraints]] + Key = "foobar" + MustMatch = true + Value = "foobar" + + [Providers.Docker.TLS] + CA = "foobar" + CAOptional = true + Cert = "foobar" + Key = "foobar" + InsecureSkipVerify = true + + [Providers.File] + Directory = "foobar" + Watch = true + Filename = "foobar" + DebugLogGeneratedTemplate = true + TraefikFile = "foobar" + + [Providers.Marathon] + Trace = true + Watch = true + Endpoint = "foobar" + DefaultRule = "foobar" + ExposedByDefault = true + DCOSToken = "foobar" + FilterMarathonConstraints = true + DialerTimeout = 42 + ResponseHeaderTimeout = 42 + TLSHandshakeTimeout = 42 + KeepAlive = 42 + ForceTaskHostname = true + RespectReadinessChecks = true + + [[Providers.Marathon.Constraints]] + Key = "foobar" + MustMatch = true + Value = "foobar" + + [[Providers.Marathon.Constraints]] + Key = "foobar" + MustMatch = true + Value = "foobar" + + [Providers.Marathon.TLS] + CA = "foobar" + CAOptional = true + Cert = "foobar" + Key = "foobar" + InsecureSkipVerify = true + [Providers.Marathon.Basic] + HTTPBasicAuthUser = "foobar" + HTTPBasicPassword = "foobar" + + [Providers.Kubernetes] + Endpoint = "foobar" + Token = "foobar" + CertAuthFilePath = "foobar" + DisablePassHostHeaders = true + Namespaces = ["foobar", "foobar"] + LabelSelector = "foobar" + IngressClass = "foobar" + [Providers.Kubernetes.IngressEndpoint] + IP = "foobar" + Hostname = "foobar" + PublishedService = "foobar" + + [Providers.KubernetesCRD] + Endpoint = "foobar" + Token = "foobar" + CertAuthFilePath = "foobar" + DisablePassHostHeaders = true + Namespaces = ["foobar", "foobar"] + LabelSelector = "foobar" + IngressClass = "foobar" + + [Providers.Rest] + EntryPoint = "foobar" + + [Providers.Rancher] + Watch = true + DefaultRule = "foobar" + ExposedByDefault = true + EnableServiceHealthFilter = true + RefreshSeconds = 42 + IntervalPoll = true + Prefix = "foobar" + + [[Providers.Rancher.Constraints]] + Key = "foobar" + MustMatch = true + Value = "foobar" + + [[Providers.Rancher.Constraints]] + Key = "foobar" + MustMatch = true + Value = "foobar" + +[API] + EntryPoint = "foobar" + Dashboard = true + Middlewares = ["foobar", "foobar"] + [API.Statistics] + RecentErrors = 42 + +[Metrics] + + [Metrics.Prometheus] + Buckets = [42.0, 42.0] + EntryPoint = "foobar" + Middlewares = ["foobar", "foobar"] + + [Metrics.Datadog] + Address = "foobar" + PushInterval = "10s" + + [Metrics.StatsD] + Address = "foobar" + PushInterval = "10s" + + [Metrics.InfluxDB] + Address = "foobar" + Protocol = "foobar" + PushInterval = "10s" + Database = "foobar" + RetentionPolicy = "foobar" + Username = "foobar" + Password = "foobar" + +[Ping] + EntryPoint = "foobar" + Middlewares = ["foobar", "foobar"] + +[Log] + Level = "foobar" + FilePath = "foobar" + Format = "foobar" + +[AccessLog] + FilePath = "foobar" + Format = "foobar" + BufferingSize = 42 + [AccessLog.Filters] + StatusCodes = ["foobar", "foobar"] + RetryAttempts = true + MinDuration = 42 + [AccessLog.Fields] + DefaultMode = "foobar" + [AccessLog.Fields.Names] + name0 = "foobar" + name1 = "foobar" + [AccessLog.Fields.Headers] + DefaultMode = "foobar" + [AccessLog.Fields.Headers.Names] + name0 = "foobar" + name1 = "foobar" + +[Tracing] + Backend = "foobar" + ServiceName = "foobar" + SpanNameLimit = 42 + + [Tracing.Jaeger] + SamplingServerURL = "foobar" + SamplingType = "foobar" + SamplingParam = 42.0 + LocalAgentHostPort = "foobar" + Gen128Bit = true + Propagation = "foobar" + TraceContextHeaderName = "foobar" + + [Tracing.Zipkin] + HTTPEndpoint = "foobar" + SameSpan = true + ID128Bit = true + Debug = true + SampleRate = 42.0 + + [Tracing.DataDog] + LocalAgentHostPort = "foobar" + GlobalTag = "foobar" + Debug = true + PrioritySampling = true + TraceIDHeaderName = "foobar" + ParentIDHeaderName = "foobar" + SamplingPriorityHeaderName = "foobar" + BagagePrefixHeaderName = "foobar" + + [Tracing.Instana] + LocalAgentHost = "foobar" + LocalAgentPort = 42 + LogLevel = "foobar" + +[HostResolver] + CnameFlattening = true + ResolvConfig = "foobar" + ResolvDepth = 42 + +[ACME] + Email = "foobar" + ACMELogging = true + CAServer = "foobar" + Storage = "foobar" + EntryPoint = "foobar" + KeyType = "foobar" + OnHostRule = true + + [ACME.DNSChallenge] + Provider = "foobar" + DelayBeforeCheck = 42 + Resolvers = ["foobar", "foobar"] + DisablePropagationCheck = true + + [ACME.HTTPChallenge] + EntryPoint = "foobar" + + [ACME.TLSChallenge] + + [[ACME.Domains]] + Main = "foobar" + SANs = ["foobar", "foobar"] + + [[ACME.Domains]] + Main = "foobar" + SANs = ["foobar", "foobar"] + +#### Dynamic configuration + +[HTTP] + + [HTTP.Routers] + + [HTTP.Routers.Router0] + EntryPoints = ["foobar", "foobar"] + Middlewares = ["foobar", "foobar"] + Service = "foobar" + Rule = "foobar" + priority = 42 + [HTTP.Routers.Router0.tls] + + [HTTP.Middlewares] + + [HTTP.Middlewares.Middleware0.AddPrefix] + Prefix = "foobar" + + [HTTP.Middlewares.Middleware1.StripPrefix] + Prefixes = ["foobar", "foobar"] + + [HTTP.Middlewares.Middleware2.StripPrefixRegex] + Regex = ["foobar", "foobar"] + + [HTTP.Middlewares.Middleware3.ReplacePath] + Path = "foobar" + + [HTTP.Middlewares.Middleware4.ReplacePathRegex] + Regex = "foobar" + Replacement = "foobar" + + [HTTP.Middlewares.Middleware5.Chain] + Middlewares = ["foobar", "foobar"] + + [HTTP.Middlewares.Middleware6.IPWhiteList] + SourceRange = ["foobar", "foobar"] + + [HTTP.Middlewares.Middleware7.IPWhiteList.IPStrategy] + Depth = 42 + ExcludedIPs = ["foobar", "foobar"] + + [HTTP.Middlewares.Middleware8.Headers] + AccessControlAllowCredentials = true + AccessControlAllowHeaders = ["foobar", "foobar"] + AccessControlAllowMethods = ["foobar", "foobar"] + AccessControlAllowOrigin = "foobar" + AccessControlExposeHeaders = ["foobar", "foobar"] + AccessControlMaxAge = 42 + AddVaryHeader = true + AllowedHosts = ["foobar", "foobar"] + HostsProxyHeaders = ["foobar", "foobar"] + SSLRedirect = true + SSLTemporaryRedirect = true + SSLHost = "foobar" + SSLForceHost = true + STSSeconds = 42 + STSIncludeSubdomains = true + STSPreload = true + ForceSTSHeader = true + FrameDeny = true + CustomFrameOptionsValue = "foobar" + ContentTypeNosniff = true + BrowserXSSFilter = true + CustomBrowserXSSValue = "foobar" + ContentSecurityPolicy = "foobar" + PublicKey = "foobar" + ReferrerPolicy = "foobar" + IsDevelopment = true + [HTTP.Middlewares.Middleware8.Headers.CustomRequestHeaders] + name0 = "foobar" + name1 = "foobar" + [HTTP.Middlewares.Middleware8.Headers.CustomResponseHeaders] + name0 = "foobar" + name1 = "foobar" + [HTTP.Middlewares.Middleware8.Headers.SSLProxyHeaders] + name0 = "foobar" + name1 = "foobar" + + [HTTP.Middlewares.Middleware9.Errors] + Status = ["foobar", "foobar"] + Service = "foobar" + Query = "foobar" + + [HTTP.Middlewares.Middleware10.RateLimit] + ExtractorFunc = "foobar" + [HTTP.Middlewares.Middleware10.RateLimit.RateSet] + [HTTP.Middlewares.Middleware10.RateLimit.RateSet.Rate0] + Period = 42 + Average = 42 + Burst = 42 + [HTTP.Middlewares.Middleware10.RateLimit.RateSet.Rate1] + Period = 42 + Average = 42 + Burst = 42 + + [HTTP.Middlewares.Middleware11.RedirectRegex] + Regex = "foobar" + Replacement = "foobar" + Permanent = true + + [HTTP.Middlewares.Middleware12.RedirectScheme] + Scheme = "foobar" + Port = "foobar" + Permanent = true + + [HTTP.Middlewares.Middleware13.BasicAuth] + Users = ["foobar", "foobar"] + UsersFile = "foobar" + Realm = "foobar" + RemoveHeader = true + HeaderField = "foobar" + + [HTTP.Middlewares.Middleware14.DigestAuth] + Users = ["foobar", "foobar"] + UsersFile = "foobar" + RemoveHeader = true + Realm = "foobar" + HeaderField = "foobar" + + [HTTP.Middlewares.Middleware15.ForwardAuth] + Address = "foobar" + TrustForwardHeader = true + AuthResponseHeaders = ["foobar", "foobar"] + [HTTP.Middlewares.Middleware15.ForwardAuth.TLS] + CA = "foobar" + CAOptional = true + Cert = "foobar" + Key = "foobar" + InsecureSkipVerify = true + + [HTTP.Middlewares.Middleware16.MaxConn] + Amount = 42 + ExtractorFunc = "foobar" + + [HTTP.Middlewares.Middleware17.Buffering] + MaxRequestBodyBytes = 42 + MemRequestBodyBytes = 42 + MaxResponseBodyBytes = 42 + MemResponseBodyBytes = 42 + RetryExpression = "foobar" + + [HTTP.Middlewares.Middleware18.CircuitBreaker] + Expression = "foobar" + + [HTTP.Middlewares.Middleware19.Compress] + + [HTTP.Middlewares.Middleware20.PassTLSClientCert] + PEM = true + [HTTP.Middlewares.Middleware20.PassTLSClientCert.Info] + NotAfter = true + NotBefore = true + Sans = true + [HTTP.Middlewares.Middleware20.PassTLSClientCert.Info.Subject] + Country = true + Province = true + Locality = true + Organization = true + CommonName = true + SerialNumber = true + DomainComponent = true + [HTTP.Middlewares.Middleware20.PassTLSClientCert.Info.Issuer] + Country = true + Province = true + Locality = true + Organization = true + CommonName = true + SerialNumber = true + DomainComponent = true + + [HTTP.Middlewares.Middleware21.Retry] + Attempts = 42 + + [HTTP.Services] + [HTTP.Services.Service0] + [HTTP.Services.Service0.LoadBalancer] + Method = "foobar" + PassHostHeader = true + + [[HTTP.Services.Service0.LoadBalancer.Servers]] + URL = "foobar" + + [HTTP.Services.Service0.LoadBalancer.Stickiness] + CookieName = "foobar" + + [[HTTP.Services.Service0.LoadBalancer.Servers]] + URL = "foobar" + + [HTTP.Services.Service0.LoadBalancer.HealthCheck] + Scheme = "foobar" + Path = "foobar" + Port = 42 + Interval = "foobar" + Timeout = "foobar" + Hostname = "foobar" + [HTTP.Services.Service0.LoadBalancer.HealthCheck.Headers] + name0 = "foobar" + name1 = "foobar" + [HTTP.Services.Service0.LoadBalancer.ResponseForwarding] + FlushInterval = "foobar" + +[TCP] + + [TCP.Routers] + + [TCP.Routers.TCPRouter0] + EntryPoints = ["foobar", "foobar"] + Service = "foobar" + Rule = "foobar" + [TCP.Routers.TCPRouter0.tls] + passthrough = true + + [TCP.Services] + + [TCP.Services.TCPService0] + [TCP.Services.TCPService0.LoadBalancer] + Method = "foobar" + + [[TCP.Services.TCPService0.LoadBalancer.Servers]] + Address = "foobar" + + [[TCP.Services.TCPService0.LoadBalancer.Servers]] + Address = "foobar" + +[[TLS]] + Stores = ["foobar", "foobar"] + [TLS.Certificate] + CertFile = "foobar" + KeyFile = "foobar" + +[[TLS]] + Stores = ["foobar", "foobar"] + [TLS.Certificate] + CertFile = "foobar" + KeyFile = "foobar" + +[TLSOptions] + + [TLSOptions.TLS0] + MinVersion = "foobar" + CipherSuites = ["foobar", "foobar"] + SniStrict = true + [TLSOptions.TLS0.ClientCA] + Files = ["foobar", "foobar"] + Optional = true + [TLSOptions.TLS1] + MinVersion = "foobar" + CipherSuites = ["foobar", "foobar"] + SniStrict = true + [TLSOptions.TLS1.ClientCA] + Files = ["foobar", "foobar"] + Optional = true + +[TLSStores] + + [TLSStores.Store0] + [TLSStores.Store0.DefaultCertificate] + CertFile = "foobar" + KeyFile = "foobar" + [TLSStores.Store1] + [TLSStores.Store1.DefaultCertificate] + CertFile = "foobar" + KeyFile = "foobar" diff --git a/pkg/config/file/fixtures/sample.yml b/pkg/config/file/fixtures/sample.yml new file mode 100644 index 000000000..adaf8c02b --- /dev/null +++ b/pkg/config/file/fixtures/sample.yml @@ -0,0 +1,257 @@ +Global: + Debug: true + CheckNewVersion: true + SendAnonymousUsage: true +ServersTransport: + InsecureSkipVerify: true + RootCAs: + - foobar + - foobar + MaxIdleConnsPerHost: 42 + ForwardingTimeouts: + DialTimeout: 42 + ResponseHeaderTimeout: 42 +EntryPoints: + EntryPoint0: + Address: foobar + Transport: + LifeCycle: + RequestAcceptGraceTimeout: 42 + GraceTimeOut: 42 + RespondingTimeouts: + ReadTimeout: 42 + WriteTimeout: 42 + IdleTimeout: 42 + ProxyProtocol: + Insecure: true + TrustedIPs: + - foobar + - foobar + ForwardedHeaders: + Insecure: true + TrustedIPs: + - foobar + - foobar +Providers: + ProvidersThrottleDuration: 42 + Docker: + Watch: true + Endpoint: foobar + DefaultRule: foobar + ExposedByDefault: true + UseBindPortIP: true + SwarmMode: true + Network: foobar + SwarmModeRefreshSeconds: 42 + Constraints: + - Key: foobar + MustMatch: true + Value: foobar + - Key: foobar + MustMatch: true + Value: foobar + TLS: + CA: foobar + CAOptional: true + Cert: foobar + Key: foobar + InsecureSkipVerify: true + File: + Directory: foobar + Watch: true + Filename: foobar + DebugLogGeneratedTemplate: true + TraefikFile: foobar + Marathon: + Trace: true + Watch: true + Endpoint: foobar + DefaultRule: foobar + ExposedByDefault: true + DCOSToken: foobar + FilterMarathonConstraints: true + DialerTimeout: 42 + ResponseHeaderTimeout: 42 + TLSHandshakeTimeout: 42 + KeepAlive: 42 + ForceTaskHostname: true + RespectReadinessChecks: true + Constraints: + - Key: foobar + MustMatch: true + Value: foobar + - Key: foobar + MustMatch: true + Value: foobar + TLS: + CA: foobar + CAOptional: true + Cert: foobar + Key: foobar + InsecureSkipVerify: true + Basic: + HTTPBasicAuthUser: foobar + HTTPBasicPassword: foobar + Kubernetes: + Endpoint: foobar + Token: foobar + CertAuthFilePath: foobar + DisablePassHostHeaders: true + Namespaces: + - foobar + - foobar + LabelSelector: foobar + IngressClass: foobar + IngressEndpoint: + IP: foobar + Hostname: foobar + PublishedService: foobar + KubernetesCRD: + Endpoint: foobar + Token: foobar + CertAuthFilePath: foobar + DisablePassHostHeaders: true + Namespaces: + - foobar + - foobar + LabelSelector: foobar + IngressClass: foobar + Rest: + EntryPoint: foobar + Rancher: + Watch: true + DefaultRule: foobar + ExposedByDefault: true + EnableServiceHealthFilter: true + RefreshSeconds: 42 + IntervalPoll: true + Prefix: foobar + Constraints: + - Key: foobar + MustMatch: true + Value: foobar + - Key: foobar + MustMatch: true + Value: foobar +API: + EntryPoint: foobar + Dashboard: true + Middlewares: + - foobar + - foobar + Statistics: + RecentErrors: 42 +Metrics: + Prometheus: + Buckets: + - 42 + - 42 + EntryPoint: foobar + Middlewares: + - foobar + - foobar + Datadog: + Address: foobar + PushInterval: 10s + StatsD: + Address: foobar + PushInterval: 10s + InfluxDB: + Address: foobar + Protocol: foobar + PushInterval: 10s + Database: foobar + RetentionPolicy: foobar + Username: foobar + Password: foobar +Ping: + EntryPoint: foobar + Middlewares: + - foobar + - foobar +Log: + Level: foobar + FilePath: foobar + Format: foobar +AccessLog: + FilePath: foobar + Format: foobar + BufferingSize: 42 + Filters: + StatusCodes: + - foobar + - foobar + RetryAttempts: true + MinDuration: 42 + Fields: + DefaultMode: foobar + Names: + name0: foobar + name1: foobar + Headers: + DefaultMode: foobar + Names: + name0: foobar + name1: foobar +Tracing: + Backend: foobar + ServiceName: foobar + SpanNameLimit: 42 + Jaeger: + SamplingServerURL: foobar + SamplingType: foobar + SamplingParam: 42 + LocalAgentHostPort: foobar + Gen128Bit: true + Propagation: foobar + TraceContextHeaderName: foobar + Zipkin: + HTTPEndpoint: foobar + SameSpan: true + ID128Bit: true + Debug: true + SampleRate: 42 + DataDog: + LocalAgentHostPort: foobar + GlobalTag: foobar + Debug: true + PrioritySampling: true + TraceIDHeaderName: foobar + ParentIDHeaderName: foobar + SamplingPriorityHeaderName: foobar + BagagePrefixHeaderName: foobar + Instana: + LocalAgentHost: foobar + LocalAgentPort: 42 + LogLevel: foobar +HostResolver: + CnameFlattening: true + ResolvConfig: foobar + ResolvDepth: 42 +ACME: + Email: foobar + ACMELogging: true + CAServer: foobar + Storage: foobar + EntryPoint: foobar + KeyType: foobar + OnHostRule: true + DNSChallenge: + Provider: foobar + DelayBeforeCheck: 42 + Resolvers: + - foobar + - foobar + DisablePropagationCheck: true + HTTPChallenge: + EntryPoint: foobar + TLSChallenge: {} + Domains: + - Main: foobar + SANs: + - foobar + - foobar + - Main: foobar + SANs: + - foobar + - foobar diff --git a/pkg/config/file/fixtures_test.go b/pkg/config/file/fixtures_test.go new file mode 100644 index 000000000..25521e8a3 --- /dev/null +++ b/pkg/config/file/fixtures_test.go @@ -0,0 +1,34 @@ +package file + +type bar string + +type Yo struct { + Foo string + Fii string + Fuu string + Yi *Yi `label:"allowEmpty"` +} + +func (y *Yo) SetDefaults() { + y.Foo = "foo" + y.Fii = "fii" +} + +type Yi struct { + Foo string + Fii string + Fuu string +} + +func (y *Yi) SetDefaults() { + y.Foo = "foo" + y.Fii = "fii" +} + +type Yu struct { + Yi +} + +type Ye struct { + *Yi +} diff --git a/pkg/config/file/raw_node.go b/pkg/config/file/raw_node.go new file mode 100644 index 000000000..8bcf776d2 --- /dev/null +++ b/pkg/config/file/raw_node.go @@ -0,0 +1,128 @@ +package file + +import ( + "reflect" + "sort" + "strconv" + "strings" + + "github.com/containous/traefik/pkg/config/parser" +) + +func decodeRawToNode(data map[string]interface{}, filters ...string) (*parser.Node, error) { + root := &parser.Node{ + Name: "traefik", + } + + vData := reflect.ValueOf(data) + decodeRaw(root, vData, filters...) + + return root, nil +} + +func decodeRaw(node *parser.Node, vData reflect.Value, filters ...string) { + sortedKeys := sortKeys(vData, filters) + + for _, key := range sortedKeys { + value := reflect.ValueOf(vData.MapIndex(key).Interface()) + + child := &parser.Node{Name: key.String()} + + switch value.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + fallthrough + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + fallthrough + case reflect.Float32, reflect.Float64: + fallthrough + case reflect.Bool: + fallthrough + case reflect.String: + child.Value = getSimpleValue(value) + case reflect.Slice: + var values []string + + for i := 0; i < value.Len(); i++ { + item := value.Index(i) + switch item.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + fallthrough + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + fallthrough + case reflect.Bool: + fallthrough + case reflect.String: + fallthrough + case reflect.Map: + fallthrough + case reflect.Interface: + sValue := reflect.ValueOf(item.Interface()) + if sValue.Kind() == reflect.Map { + ch := &parser.Node{ + Name: "[" + strconv.Itoa(i) + "]", + } + + child.Children = append(child.Children, ch) + decodeRaw(ch, sValue) + } else { + values = append(values, getSimpleValue(sValue)) + } + default: + panic("Unsupported slice type: " + item.Kind().String()) + } + } + + child.Value = strings.Join(values, ",") + case reflect.Map: + decodeRaw(child, value) + default: + panic("Unsupported type: " + value.Kind().String()) + } + + node.Children = append(node.Children, child) + } +} + +func getSimpleValue(item reflect.Value) string { + switch item.Kind() { + case reflect.String: + return item.String() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return strconv.FormatInt(item.Int(), 10) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return strconv.FormatUint(item.Uint(), 10) + case reflect.Float32, reflect.Float64: + return strings.TrimSuffix(strconv.FormatFloat(item.Float(), 'f', 6, 64), ".000000") + case reflect.Bool: + return strconv.FormatBool(item.Bool()) + default: + panic("Unsupported Simple value type: " + item.Kind().String()) + } +} + +func sortKeys(vData reflect.Value, filters []string) []reflect.Value { + var sortedKeys []reflect.Value + + for _, v := range vData.MapKeys() { + rValue := reflect.ValueOf(v.Interface()) + key := rValue.String() + + if len(filters) == 0 { + sortedKeys = append(sortedKeys, rValue) + continue + } + + for _, filter := range filters { + if strings.EqualFold(key, filter) { + sortedKeys = append(sortedKeys, rValue) + continue + } + } + } + + sort.Slice(sortedKeys, func(i, j int) bool { + return sortedKeys[i].String() < sortedKeys[j].String() + }) + + return sortedKeys +} diff --git a/pkg/config/file/raw_node_test.go b/pkg/config/file/raw_node_test.go new file mode 100644 index 000000000..15cab5957 --- /dev/null +++ b/pkg/config/file/raw_node_test.go @@ -0,0 +1,540 @@ +package file + +import ( + "testing" + + "github.com/containous/traefik/pkg/config/parser" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_decodeRawToNode(t *testing.T) { + testCases := []struct { + desc string + data map[string]interface{} + expected *parser.Node + }{ + { + desc: "empty", + data: map[string]interface{}{}, + expected: &parser.Node{ + Name: "traefik", + }, + }, + { + desc: "string", + data: map[string]interface{}{ + "foo": "bar", + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "bar"}, + }, + }, + }, + { + desc: "string named type", + data: map[string]interface{}{ + "foo": bar("bar"), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "bar"}, + }, + }, + }, + { + desc: "bool", + data: map[string]interface{}{ + "foo": true, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "true"}, + }, + }, + }, + { + desc: "int", + data: map[string]interface{}{ + "foo": 1, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "int8", + data: map[string]interface{}{ + "foo": int8(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "int16", + data: map[string]interface{}{ + "foo": int16(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "int32", + data: map[string]interface{}{ + "foo": int32(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "int64", + data: map[string]interface{}{ + "foo": int64(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "uint", + data: map[string]interface{}{ + "foo": uint(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "uint8", + data: map[string]interface{}{ + "foo": uint8(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "uint16", + data: map[string]interface{}{ + "foo": uint16(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "uint32", + data: map[string]interface{}{ + "foo": uint32(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "uint64", + data: map[string]interface{}{ + "foo": uint64(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "float32", + data: map[string]interface{}{ + "foo": float32(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "float64", + data: map[string]interface{}{ + "foo": float64(1), + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1"}, + }, + }, + }, + { + desc: "string slice", + data: map[string]interface{}{ + "foo": []string{"A", "B"}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "A,B"}, + }, + }, + }, + { + desc: "int slice", + data: map[string]interface{}{ + "foo": []int{1, 2}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1,2"}, + }, + }, + }, + { + desc: "int8 slice", + data: map[string]interface{}{ + "foo": []int8{1, 2}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1,2"}, + }, + }, + }, + { + desc: "int16 slice", + data: map[string]interface{}{ + "foo": []int16{1, 2}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1,2"}, + }, + }, + }, + { + desc: "int32 slice", + data: map[string]interface{}{ + "foo": []int32{1, 2}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1,2"}, + }, + }, + }, + { + desc: "int64 slice", + data: map[string]interface{}{ + "foo": []int64{1, 2}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1,2"}, + }, + }, + }, + { + desc: "bool slice", + data: map[string]interface{}{ + "foo": []bool{true, false}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "true,false"}, + }, + }, + }, + { + desc: "interface (string) slice", + data: map[string]interface{}{ + "foo": []interface{}{"A", "B"}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "A,B"}, + }, + }, + }, + { + desc: "interface (int) slice", + data: map[string]interface{}{ + "foo": []interface{}{1, 2}, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Value: "1,2"}, + }, + }, + }, + { + desc: "2 strings", + data: map[string]interface{}{ + "foo": "bar", + "fii": "bir", + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Value: "bir"}, + {Name: "foo", Value: "bar"}, + }, + }, + }, + { + desc: "string, level 2", + data: map[string]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": "bur", + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "bur"}}}, + }, + }, + }, + { + desc: "int, level 2", + data: map[string]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": 1, + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}}, + }, + }, + }, + { + desc: "uint, level 2", + data: map[string]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": uint(1), + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}}, + }, + }, + }, + { + desc: "bool, level 2", + data: map[string]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": true, + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "true"}}}, + }, + }, + }, + { + desc: "string, level 3", + data: map[string]interface{}{ + "foo": map[interface{}]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": "bur", + }, + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "bur"}}}, + }}, + }, + }, + }, + { + desc: "int, level 3", + data: map[string]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": 1, + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}}, + }, + }, + }, + { + desc: "uint, level 3", + data: map[string]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": uint(1), + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}}, + }, + }, + }, + { + desc: "bool, level 3", + data: map[string]interface{}{ + "fii": map[interface{}]interface{}{ + "fuu": true, + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "true"}}}, + }, + }, + }, + { + desc: "struct", + data: map[string]interface{}{ + "foo": map[interface{}]interface{}{ + "field1": "C", + "field2": "C", + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Children: []*parser.Node{ + {Name: "field1", Value: "C"}, + {Name: "field2", Value: "C"}, + }}, + }, + }, + }, + { + desc: "slice struct 1", + data: map[string]interface{}{ + "foo": []map[string]interface{}{ + {"field1": "A", "field2": "A"}, + {"field1": "B", "field2": "B"}, + {"field2": "C", "field1": "C"}, + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "field1", Value: "A"}, + {Name: "field2", Value: "A"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "field1", Value: "B"}, + {Name: "field2", Value: "B"}, + }}, + {Name: "[2]", Children: []*parser.Node{ + {Name: "field1", Value: "C"}, + {Name: "field2", Value: "C"}, + }}, + }}, + }, + }, + }, + { + desc: "slice struct 2", + data: map[string]interface{}{ + "foo": []interface{}{ + map[interface{}]interface{}{ + "field2": "A", + "field1": "A", + }, + map[interface{}]interface{}{ + "field1": "B", + "field2": "B", + }, + map[interface{}]interface{}{ + "field1": "C", + "field2": "C", + }, + }, + }, + expected: &parser.Node{ + Name: "traefik", + Children: []*parser.Node{ + {Name: "foo", Children: []*parser.Node{ + {Name: "[0]", Children: []*parser.Node{ + {Name: "field1", Value: "A"}, + {Name: "field2", Value: "A"}, + }}, + {Name: "[1]", Children: []*parser.Node{ + {Name: "field1", Value: "B"}, + {Name: "field2", Value: "B"}, + }}, + {Name: "[2]", Children: []*parser.Node{ + {Name: "field1", Value: "C"}, + {Name: "field2", Value: "C"}, + }}, + }}, + }, + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + node, err := decodeRawToNode(test.data) + require.NoError(t, err) + + assert.Equal(t, test.expected, node) + }) + } +} diff --git a/pkg/config/flag/flag.go b/pkg/config/flag/flag.go new file mode 100644 index 000000000..e6e8f51e5 --- /dev/null +++ b/pkg/config/flag/flag.go @@ -0,0 +1,44 @@ +// Package flag implements encoding and decoding between flag arguments and a typed Configuration. +package flag + +import ( + "github.com/containous/traefik/pkg/config/parser" +) + +// Decode decodes the given flag arguments into the given element. +// The operation goes through four stages roughly summarized as: +// flag arguments -> parsed map of flags +// map -> tree of untyped nodes +// untyped nodes -> nodes augmented with metadata such as kind (inferred from element) +// "typed" nodes -> typed element +func Decode(args []string, element interface{}) error { + ref, err := Parse(args, element) + if err != nil { + return err + } + + return parser.Decode(ref, element) +} + +// Encode encodes the configuration in element into the flags represented in the returned Flats. +// The operation goes through three stages roughly summarized as: +// typed configuration in element -> tree of untyped nodes +// untyped nodes -> nodes augmented with metadata such as kind (inferred from element) +// "typed" nodes -> flags with default values (determined by type/kind) +func Encode(element interface{}) ([]parser.Flat, error) { + if element == nil { + return nil, nil + } + + node, err := parser.EncodeToNode(element, false) + if err != nil { + return nil, err + } + + err = parser.AddMetadata(element, node) + if err != nil { + return nil, err + } + + return parser.EncodeToFlat(element, node, parser.FlatOpts{Separator: ".", SkipRoot: true}) +} diff --git a/pkg/config/flag/flag_test.go b/pkg/config/flag/flag_test.go new file mode 100644 index 000000000..c4683c7e8 --- /dev/null +++ b/pkg/config/flag/flag_test.go @@ -0,0 +1,926 @@ +package flag + +import ( + "testing" + "time" + + "github.com/containous/traefik/pkg/config/generator" + "github.com/containous/traefik/pkg/config/parser" + "github.com/containous/traefik/pkg/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDecode(t *testing.T) { + testCases := []struct { + desc string + args []string + element interface{} + expected interface{} + }{ + { + desc: "no args", + args: nil, + expected: nil, + }, + { + desc: "types.Duration value", + args: []string{"--foo=1"}, + element: &struct { + Foo types.Duration + }{}, + expected: &struct { + Foo types.Duration + }{ + Foo: types.Duration(1 * time.Second), + }, + }, + { + desc: "time.Duration value", + args: []string{"--foo=1"}, + element: &struct { + Foo time.Duration + }{}, + expected: &struct { + Foo time.Duration + }{ + Foo: 1 * time.Nanosecond, + }, + }, + { + desc: "bool value", + args: []string{"--foo"}, + element: &struct { + Foo bool + }{}, + expected: &struct { + Foo bool + }{ + Foo: true, + }, + }, + { + desc: "equal", + args: []string{"--foo=bar"}, + element: &struct { + Foo string + }{}, + expected: &struct { + Foo string + }{ + Foo: "bar", + }, + }, + { + desc: "space separated", + args: []string{"--foo", "bar"}, + element: &struct { + Foo string + }{}, + expected: &struct { + Foo string + }{ + Foo: "bar", + }, + }, + { + desc: "space separated with end of parameter", + args: []string{"--foo=bir", "--", "--bar"}, + element: &struct { + Foo string + }{}, + expected: &struct { + Foo string + }{ + Foo: "bir", + }, + }, + { + desc: "multiple bool flags without value", + args: []string{"--foo", "--bar"}, + element: &struct { + Foo bool + Bar bool + }{}, + expected: &struct { + Foo bool + Bar bool + }{ + Foo: true, + Bar: true, + }, + }, + { + desc: "slice with several flags", + args: []string{"--foo=bar", "--foo=baz"}, + element: &struct { + Foo []string + }{}, + expected: &struct { + Foo []string + }{ + Foo: []string{"bar", "baz"}, + }, + }, + { + desc: "map string", + args: []string{"--foo.name=bar"}, + element: &struct { + Foo map[string]string + }{}, + expected: &struct { + Foo map[string]string + }{ + Foo: map[string]string{ + "name": "bar", + }, + }, + }, + { + desc: "map struct", + args: []string{"--foo.name.value=bar"}, + element: &struct { + Foo map[string]struct{ Value string } + }{}, + expected: &struct { + Foo map[string]struct{ Value string } + }{ + Foo: map[string]struct{ Value string }{ + "name": { + Value: "bar", + }, + }, + }, + }, + { + desc: "map struct with sub-struct", + args: []string{"--foo.name.bar.value=bar"}, + element: &struct { + Foo map[string]struct { + Bar *struct{ Value string } + } + }{}, + expected: &struct { + Foo map[string]struct { + Bar *struct{ Value string } + } + }{ + Foo: map[string]struct { + Bar *struct{ Value string } + }{ + "name": { + Bar: &struct { + Value string + }{ + Value: "bar", + }, + }, + }, + }, + }, + { + desc: "map struct with sub-map", + args: []string{"--foo.name1.bar.name2.value=bar"}, + element: &struct { + Foo map[string]struct { + Bar map[string]struct{ Value string } + } + }{}, + expected: &struct { + Foo map[string]struct { + Bar map[string]struct{ Value string } + } + }{ + Foo: map[string]struct { + Bar map[string]struct{ Value string } + }{ + "name1": { + Bar: map[string]struct{ Value string }{ + "name2": { + Value: "bar", + }, + }, + }, + }, + }, + }, + { + desc: "slice with several flags 2", + args: []string{"--foo", "bar", "--foo", "baz"}, + element: &struct { + Foo []string + }{}, + expected: &struct { + Foo []string + }{ + Foo: []string{"bar", "baz"}, + }, + }, + { + desc: "slice with several flags 3", + args: []string{"--foo", "bar", "--foo=", "--baz"}, + element: &struct { + Foo []string + Baz bool + }{}, + expected: &struct { + Foo []string + Baz bool + }{ + Foo: []string{"bar", ""}, + Baz: true, + }, + }, + { + desc: "slice with several flags 4", + args: []string{"--foo", "bar", "--foo", "--baz"}, + element: &struct { + Foo []string + Baz bool + }{}, + expected: &struct { + Foo []string + Baz bool + }{ + Foo: []string{"bar", "--baz"}, + }, + }, + { + desc: "slice of struct", + args: []string{ + "--foo[0].Field1", "bar", "--foo[0].Field2", "6", + "--foo[1].Field1", "bur", "--foo[1].Field2", "2", + }, + element: &struct { + Foo []struct { + Field1 string + Field2 int + } + }{}, + expected: &struct { + Foo []struct { + Field1 string + Field2 int + } + }{ + Foo: []struct { + Field1 string + Field2 int + }{ + { + Field1: "bar", + Field2: 6, + }, + { + Field1: "bur", + Field2: 2, + }, + }, + }, + }, + { + desc: "slice of pointer of struct", + args: []string{ + "--foo[0].Field1", "bar", "--foo[0].Field2", "6", + "--foo[1].Field1", "bur", "--foo[1].Field2", "2", + }, + element: &struct { + Foo []*struct { + Field1 string + Field2 int + } + }{}, + expected: &struct { + Foo []*struct { + Field1 string + Field2 int + } + }{ + Foo: []*struct { + Field1 string + Field2 int + }{ + { + Field1: "bar", + Field2: 6, + }, + { + Field1: "bur", + Field2: 2, + }, + }, + }, + }, + { + desc: "multiple string flag", + element: &struct { + Foo string + }{}, + args: []string{"--foo=bar", "--foo=baz"}, + expected: &struct { + Foo string + }{ + Foo: "baz", + }, + }, + { + desc: "multiple string flag 2", + element: &struct { + Foo string + }{}, + args: []string{"--foo", "bar", "--foo", "baz"}, + expected: &struct { + Foo string + }{ + Foo: "baz", + }, + }, + { + desc: "string without value", + element: &struct { + Foo string + Bar bool + }{}, + args: []string{"--foo", "--bar"}, + expected: &struct { + Foo string + Bar bool + }{ + Foo: "--bar", + }, + }, + { + desc: "struct pointer value", + args: []string{"--foo"}, + element: &struct { + Foo *struct{ Field string } `label:"allowEmpty"` + }{}, + expected: &struct { + Foo *struct{ Field string } `label:"allowEmpty"` + }{ + Foo: &struct{ Field string }{}, + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + err := Decode(test.args, test.element) + require.NoError(t, err) + + assert.Equal(t, test.expected, test.element) + }) + } +} + +func TestEncode(t *testing.T) { + testCases := []struct { + desc string + element interface{} + expected []parser.Flat + }{ + { + desc: "string field", + element: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "test", + }}, + }, + { + desc: "int field", + element: &struct { + Field int `description:"field description"` + }{ + Field: 6, + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "6", + }}, + }, + { + desc: "bool field", + element: &struct { + Field bool `description:"field description"` + }{ + Field: true, + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "true", + }}, + }, + { + desc: "string pointer field", + element: &struct { + Field *string `description:"field description"` + }{ + Field: func(v string) *string { return &v }("test"), + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "test", + }}, + }, + { + desc: "int pointer field", + element: &struct { + Field *int `description:"field description"` + }{ + Field: func(v int) *int { return &v }(6), + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "6", + }}, + }, + { + desc: "bool pointer field", + element: &struct { + Field *bool `description:"field description"` + }{ + Field: func(v bool) *bool { return &v }(true), + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "true", + }}, + }, + { + desc: "slice of string field, no initial value", + element: &struct { + Field []string `description:"field description"` + }{}, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "", + }}, + }, + { + desc: "slice of string field, with initial value", + element: &struct { + Field []string `description:"field description"` + }{ + Field: []string{"foo", "bar"}, + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "foo, bar", + }}, + }, + { + desc: "slice of int field, no initial value", + element: &struct { + Field []int `description:"field description"` + }{}, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "", + }}, + }, + { + desc: "slice of int field, with initial value", + element: &struct { + Field []int `description:"field description"` + }{ + Field: []int{6, 3}, + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "6, 3", + }}, + }, + { + desc: "map string field", + element: &struct { + Field map[string]string `description:"field description"` + }{ + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + expected: []parser.Flat{{ + Name: "field.", + Description: "field description", + Default: "", + }}, + }, + { + desc: "struct pointer field", + element: &struct { + Foo *struct { + Field string `description:"field description"` + } `description:"foo description"` + }{ + Foo: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + expected: []parser.Flat{ + { + Name: "foo.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "struct pointer field, allow empty", + element: &struct { + Foo *struct { + Field string `description:"field description"` + } `description:"foo description" label:"allowEmpty"` + }{ + Foo: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + expected: []parser.Flat{ + { + Name: "foo", + Description: "foo description", + Default: "false", + }, + { + Name: "foo.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "struct pointer field level 2", + element: &struct { + Foo *struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description"` + } `description:"foo description"` + }{ + Foo: &struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description"` + }{ + Fii: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + }, + expected: []parser.Flat{ + { + Name: "foo.fii.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "struct pointer field level 2, allow empty", + element: &struct { + Foo *struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description" label:"allowEmpty"` + } `description:"foo description" label:"allowEmpty"` + }{ + Foo: &struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description" label:"allowEmpty"` + }{ + Fii: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + }, + expected: []parser.Flat{ + { + Name: "foo", + Description: "foo description", + Default: "false", + }, + { + Name: "foo.fii", + Description: "fii description", + Default: "false", + }, + { + Name: "foo.fii.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "map string field level 2", + element: &struct { + Foo *struct { + Fii map[string]string `description:"fii description"` + } `description:"foo description"` + }{ + Foo: &struct { + Fii map[string]string `description:"fii description"` + }{ + Fii: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + expected: []parser.Flat{ + { + Name: "foo.fii.", + Description: "fii description", + Default: "", + }, + }, + }, + { + desc: "map string pointer field level 2", + element: &struct { + Foo *struct { + Fii map[string]*string `description:"fii description"` + } `description:"foo description"` + }{ + Foo: &struct { + Fii map[string]*string `description:"fii description"` + }{ + Fii: map[string]*string{ + parser.MapNamePlaceholder: func(v string) *string { return &v }(""), + }, + }, + }, + expected: []parser.Flat{ + { + Name: "foo.fii.", + Description: "fii description", + Default: "", + }, + }, + }, + { + desc: "map struct level 1", + element: &struct { + Foo map[string]struct { + Field string `description:"field description"` + Yo int `description:"yo description"` + } `description:"foo description"` + }{}, + expected: []parser.Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..field", + Description: "field description", + Default: "", + }, + { + Name: "foo..yo", + Description: "yo description", + Default: "0", + }, + }, + }, + { + desc: "map struct pointer level 1", + element: &struct { + Foo map[string]*struct { + Field string `description:"field description"` + Yo string `description:"yo description"` + } `description:"foo description"` + }{}, + expected: []parser.Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..field", + Description: "field description", + Default: "", + }, + { + Name: "foo..yo", + Description: "yo description", + Default: "", + }, + }, + }, + { + desc: "time duration field", + element: &struct { + Field time.Duration `description:"field description"` + }{ + Field: 1 * time.Second, + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "1s", + }}, + }, + { + desc: "time duration field map", + element: &struct { + Foo map[string]*struct { + Field time.Duration `description:"field description"` + } `description:"foo description"` + }{ + Foo: map[string]*struct { + Field time.Duration `description:"field description"` + }{}, + }, + expected: []parser.Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..field", + Description: "field description", + Default: "0s", + }, + }, + }, + { + desc: "time duration field map 2", + element: &struct { + Foo map[string]*struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + } `description:"foo description"` + }{ + Foo: map[string]*struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + }{}, + }, + expected: []parser.Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..fii.field", + Description: "field description", + Default: "0s", + }, + }, + }, + { + desc: "time duration field 2", + element: &struct { + Foo *struct { + Field time.Duration `description:"field description"` + } + }{ + Foo: &struct { + Field time.Duration `description:"field description"` + }{ + Field: 1 * time.Second, + }, + }, + expected: []parser.Flat{{ + Name: "foo.field", + Description: "field description", + Default: "1s", + }}, + }, + { + desc: "time duration field 3", + element: &struct { + Foo *struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + } + }{ + Foo: &struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + }{ + Fii: &struct { + Field time.Duration `description:"field description"` + }{ + Field: 1 * time.Second, + }, + }, + }, + expected: []parser.Flat{{ + Name: "foo.fii.field", + Description: "field description", + Default: "1s", + }}, + }, + { + desc: "time duration field", + element: &struct { + Field types.Duration `description:"field description"` + }{ + Field: types.Duration(180 * time.Second), + }, + expected: []parser.Flat{{ + Name: "field", + Description: "field description", + Default: "180", + }}, + }, + { + desc: "slice of struct", + element: &struct { + Foo *struct { + Fii []struct { + Field1 string `description:"field1 description"` + Field2 int `description:"field2 description"` + } `description:"fii description"` + } `description:"foo description"` + }{}, + expected: []parser.Flat{ + { + Name: "foo.fii", + Description: "fii description", + Default: "", + }, + { + Name: "foo.fii[0].field1", + Description: "field1 description", + Default: "", + }, + { + Name: "foo.fii[0].field2", + Description: "field2 description", + Default: "0", + }, + }, + }, + // Skipped: because realistically not needed in Traefik for now. + // { + // desc: "map of map field level 2", + // element: &struct { + // Foo *struct { + // Fii map[string]map[string]string `description:"fii description"` + // } `description:"foo description"` + // }{ + // Foo: &struct { + // Fii map[string]map[string]string `description:"fii description"` + // }{ + // Fii: map[string]map[string]string{ + // parser.MapNamePlaceholder: { + // parser.MapNamePlaceholder: "test", + // }, + // }, + // }, + // }, + // expected: `XXX`, + // }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + generator.Generate(test.element) + + entries, err := Encode(test.element) + require.NoError(t, err) + + assert.Equal(t, test.expected, entries) + }) + } +} diff --git a/pkg/config/flag/flagparser.go b/pkg/config/flag/flagparser.go new file mode 100644 index 000000000..3520e00c1 --- /dev/null +++ b/pkg/config/flag/flagparser.go @@ -0,0 +1,108 @@ +package flag + +import ( + "fmt" + "reflect" + "strings" +) + +// Parse parses the command-line flag arguments into a map, +// using the type information in element to discriminate whether a flag is supposed to be a bool, +// and other such ambiguities. +func Parse(args []string, element interface{}) (map[string]string, error) { + f := flagSet{ + flagTypes: getFlagTypes(element), + args: args, + values: make(map[string]string), + } + + for { + seen, err := f.parseOne() + if seen { + continue + } + if err == nil { + break + } + return nil, err + } + return f.values, nil +} + +type flagSet struct { + flagTypes map[string]reflect.Kind + args []string + values map[string]string +} + +func (f *flagSet) parseOne() (bool, error) { + if len(f.args) == 0 { + return false, nil + } + + s := f.args[0] + if len(s) < 2 || s[0] != '-' { + return false, nil + } + numMinuses := 1 + if s[1] == '-' { + numMinuses++ + if len(s) == 2 { // "--" terminates the flags + f.args = f.args[1:] + return false, nil + } + } + + name := s[numMinuses:] + if len(name) == 0 || name[0] == '-' || name[0] == '=' { + return false, fmt.Errorf("bad flag syntax: %s", s) + } + + // it's a flag. does it have an argument? + f.args = f.args[1:] + hasValue := false + value := "" + for i := 1; i < len(name); i++ { // equals cannot be first + if name[i] == '=' { + value = name[i+1:] + hasValue = true + name = name[0:i] + break + } + } + + if hasValue { + f.setValue(name, value) + return true, nil + } + + if f.flagTypes[name] == reflect.Bool || f.flagTypes[name] == reflect.Ptr { + f.setValue(name, "true") + return true, nil + } + + if len(f.args) > 0 { + // value is the next arg + hasValue = true + value, f.args = f.args[0], f.args[1:] + } + + if !hasValue { + return false, fmt.Errorf("flag needs an argument: -%s", name) + } + + f.setValue(name, value) + return true, nil +} + +func (f *flagSet) setValue(name string, value string) { + n := strings.ToLower("traefik." + name) + v, ok := f.values[n] + + if ok && f.flagTypes[name] == reflect.Slice { + f.values[n] = v + "," + value + return + } + + f.values[n] = value +} diff --git a/pkg/config/flag/flagparser_test.go b/pkg/config/flag/flagparser_test.go new file mode 100644 index 000000000..46e2b4a62 --- /dev/null +++ b/pkg/config/flag/flagparser_test.go @@ -0,0 +1,255 @@ +package flag + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParse(t *testing.T) { + testCases := []struct { + desc string + args []string + element interface{} + expected map[string]string + }{ + { + desc: "no args", + args: nil, + expected: map[string]string{}, + }, + { + desc: "bool value", + args: []string{"--foo"}, + element: &struct { + Foo bool + }{}, + expected: map[string]string{ + "traefik.foo": "true", + }, + }, + { + desc: "equal", + args: []string{"--foo=bar"}, + element: &struct { + Foo string + }{}, + expected: map[string]string{ + "traefik.foo": "bar", + }, + }, + { + desc: "space separated", + args: []string{"--foo", "bar"}, + element: &struct { + Foo string + }{}, + expected: map[string]string{ + "traefik.foo": "bar", + }, + }, + { + desc: "space separated with end of parameter", + args: []string{"--foo=bir", "--", "--bar"}, + element: &struct { + Foo string + }{}, + expected: map[string]string{ + "traefik.foo": "bir", + }, + }, + { + desc: "multiple bool flags without value", + args: []string{"--foo", "--bar"}, + element: &struct { + Foo bool + Bar bool + }{}, + expected: map[string]string{ + "traefik.foo": "true", + "traefik.bar": "true", + }, + }, + { + desc: "slice with several flags", + args: []string{"--foo=bar", "--foo=baz"}, + element: &struct { + Foo []string + }{}, + expected: map[string]string{ + "traefik.foo": "bar,baz", + }, + }, + { + desc: "map string", + args: []string{"--foo.name=bar"}, + element: &struct { + Foo map[string]string + }{}, + expected: map[string]string{ + "traefik.foo.name": "bar", + }, + }, + { + desc: "map struct", + args: []string{"--foo.name.value=bar"}, + element: &struct { + Foo map[string]struct{ Value string } + }{}, + expected: map[string]string{ + "traefik.foo.name.value": "bar", + }, + }, + { + desc: "map struct with sub-struct", + args: []string{"--foo.name.bar.value=bar"}, + element: &struct { + Foo map[string]struct { + Bar *struct{ Value string } + } + }{}, + expected: map[string]string{ + "traefik.foo.name.bar.value": "bar", + }, + }, + { + desc: "map struct with sub-map", + args: []string{"--foo.name1.bar.name2.value=bar"}, + element: &struct { + Foo map[string]struct { + Bar map[string]struct{ Value string } + } + }{}, + expected: map[string]string{ + "traefik.foo.name1.bar.name2.value": "bar", + }, + }, + { + desc: "slice with several flags 2", + args: []string{"--foo", "bar", "--foo", "baz"}, + element: &struct { + Foo []string + }{}, + expected: map[string]string{ + "traefik.foo": "bar,baz", + }, + }, + { + desc: "slice with several flags 3", + args: []string{"--foo", "bar", "--foo=", "--baz"}, + element: &struct { + Foo []string + Baz bool + }{}, + expected: map[string]string{ + "traefik.foo": "bar,", + "traefik.baz": "true", + }, + }, + { + desc: "slice with several flags 4", + args: []string{"--foo", "bar", "--foo", "--baz"}, + element: &struct { + Foo []string + Baz bool + }{}, + expected: map[string]string{ + "traefik.foo": "bar,--baz", + }, + }, + { + desc: "multiple string flag", + element: &struct { + Foo string + }{}, + args: []string{"--foo=bar", "--foo=baz"}, + expected: map[string]string{ + "traefik.foo": "baz", + }, + }, + { + desc: "multiple string flag 2", + element: &struct { + Foo string + }{}, + args: []string{"--foo", "bar", "--foo", "baz"}, + expected: map[string]string{ + "traefik.foo": "baz", + }, + }, + { + desc: "string without value", + element: &struct { + Foo string + Bar bool + }{}, + args: []string{"--foo", "--bar"}, + expected: map[string]string{ + "traefik.foo": "--bar", + }, + }, + { + desc: "struct pointer value", + args: []string{"--foo"}, + element: &struct { + Foo *struct{ Field string } + }{}, + expected: map[string]string{ + "traefik.foo": "true", + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + fl, err := Parse(test.args, test.element) + require.NoError(t, err) + assert.Equal(t, test.expected, fl) + }) + } +} + +func TestParse_Errors(t *testing.T) { + testCases := []struct { + desc string + args []string + element interface{} + }{ + { + desc: "triple hyphen", + args: []string{"---foo"}, + element: &struct { + Foo bool + }{}, + }, + { + desc: "equal", + args: []string{"--=foo"}, + element: &struct { + Foo bool + }{}, + }, + { + desc: "string without value", + element: &struct { + Foo string + Bar bool + }{}, + args: []string{"--foo"}, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + _, err := Parse(test.args, test.element) + require.Error(t, err) + }) + } +} diff --git a/pkg/config/flag/flagtype.go b/pkg/config/flag/flagtype.go new file mode 100644 index 000000000..93b4fd58d --- /dev/null +++ b/pkg/config/flag/flagtype.go @@ -0,0 +1,60 @@ +package flag + +import ( + "reflect" + "strings" + + "github.com/containous/traefik/pkg/config/parser" +) + +func getFlagTypes(element interface{}) map[string]reflect.Kind { + ref := map[string]reflect.Kind{} + + if element == nil { + return ref + } + + tp := reflect.TypeOf(element).Elem() + + addFlagType(ref, "", tp) + + return ref +} + +func addFlagType(ref map[string]reflect.Kind, name string, typ reflect.Type) { + switch typ.Kind() { + case reflect.Bool, reflect.Slice: + ref[name] = typ.Kind() + + case reflect.Map: + addFlagType(ref, getName(name, parser.MapNamePlaceholder), typ.Elem()) + + case reflect.Ptr: + if typ.Elem().Kind() == reflect.Struct { + ref[name] = typ.Kind() + } + addFlagType(ref, name, typ.Elem()) + + case reflect.Struct: + for j := 0; j < typ.NumField(); j++ { + subField := typ.Field(j) + + if !parser.IsExported(subField) { + continue + } + + if subField.Anonymous { + addFlagType(ref, getName(name), subField.Type) + } else { + addFlagType(ref, getName(name, subField.Name), subField.Type) + } + } + + default: + // noop + } +} + +func getName(names ...string) string { + return strings.TrimPrefix(strings.ToLower(strings.Join(names, ".")), ".") +} diff --git a/pkg/config/flag/flagtype_test.go b/pkg/config/flag/flagtype_test.go new file mode 100644 index 000000000..046d25ea4 --- /dev/null +++ b/pkg/config/flag/flagtype_test.go @@ -0,0 +1,226 @@ +package flag + +import ( + "reflect" + "testing" + + "github.com/containous/traefik/pkg/config/parser" + "github.com/stretchr/testify/assert" +) + +func Test_getFlagTypes(t *testing.T) { + testCases := []struct { + desc string + element interface{} + expected map[string]reflect.Kind + }{ + { + desc: "nil", + element: nil, + expected: map[string]reflect.Kind{}, + }, + { + desc: "no fields", + element: &struct { + }{}, + expected: map[string]reflect.Kind{}, + }, + { + desc: "string field", + element: &struct { + Foo string + }{}, + expected: map[string]reflect.Kind{}, + }, + { + desc: "bool field level 0", + element: &struct { + Foo bool + fii bool + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Bool, + }, + }, + { + desc: "bool field level 1", + element: &struct { + Foo struct { + Field bool + } + }{}, + expected: map[string]reflect.Kind{ + "foo.field": reflect.Bool, + }, + }, + { + desc: "bool field level 2", + element: &struct { + Foo *struct { + Fii *struct { + Field bool + } + } + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Ptr, + "foo.fii": reflect.Ptr, + "foo.fii.field": reflect.Bool, + }, + }, + { + desc: "pointer field", + element: &struct { + Foo *struct { + Field string + } + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Ptr, + }, + }, + { + desc: "bool field level 3", + element: &struct { + Foo *struct { + Fii *struct { + Fuu *struct { + Field bool + } + } + } + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Ptr, + "foo.fii": reflect.Ptr, + "foo.fii.fuu": reflect.Ptr, + "foo.fii.fuu.field": reflect.Bool, + }, + }, + { + desc: "map string", + element: &struct { + Foo map[string]string + }{}, + expected: map[string]reflect.Kind{}, + }, + { + desc: "map bool", + element: &struct { + Foo map[string]bool + Fii struct{} + }{}, + expected: map[string]reflect.Kind{ + "foo." + parser.MapNamePlaceholder: reflect.Bool, + }, + }, + { + desc: "map struct", + element: &struct { + Foo map[string]struct { + Field bool + } + }{}, + expected: map[string]reflect.Kind{ + "foo." + parser.MapNamePlaceholder + ".field": reflect.Bool, + }, + }, + { + desc: "map map bool", + element: &struct { + Foo map[string]map[string]bool + }{}, + expected: map[string]reflect.Kind{ + "foo." + parser.MapNamePlaceholder + "." + parser.MapNamePlaceholder: reflect.Bool, + }, + }, + { + desc: "map struct map", + element: &struct { + Foo map[string]struct { + Fii map[string]bool + } + }{}, + expected: map[string]reflect.Kind{ + "foo." + parser.MapNamePlaceholder + ".fii." + parser.MapNamePlaceholder: reflect.Bool, + }, + }, + { + desc: "pointer bool field level 0", + element: &struct { + Foo *bool + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Bool, + }, + }, + { + desc: "pointer int field level 0", + element: &struct { + Foo *int + }{}, + expected: map[string]reflect.Kind{}, + }, + { + desc: "bool slice field level 0", + element: &struct { + Foo []bool + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Slice, + }, + }, + { + desc: "string slice field level 0", + element: &struct { + Foo []string + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Slice, + }, + }, + { + desc: "slice field level 1", + element: &struct { + Foo struct { + Field []string + } + }{}, + expected: map[string]reflect.Kind{ + "foo.field": reflect.Slice, + }, + }, + { + desc: "map slice string", + element: &struct { + Foo map[string][]string + }{}, + expected: map[string]reflect.Kind{ + "foo." + parser.MapNamePlaceholder: reflect.Slice, + }, + }, + { + desc: "embedded struct", + element: &struct { + Yo + }{}, + expected: map[string]reflect.Kind{ + "foo": reflect.Bool, + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + actual := getFlagTypes(test.element) + assert.Equal(t, test.expected, actual) + }) + } +} + +type Yo struct { + Foo bool +} diff --git a/pkg/config/generator/generator.go b/pkg/config/generator/generator.go new file mode 100644 index 000000000..b8956a565 --- /dev/null +++ b/pkg/config/generator/generator.go @@ -0,0 +1,97 @@ +// Package generator implements the custom initialization of all the fields of an empty interface. +package generator + +import ( + "reflect" + + "github.com/containous/traefik/pkg/config/parser" +) + +type initializer interface { + SetDefaults() +} + +// Generate recursively initializes an empty structure, calling SetDefaults on each field, when it applies. +func Generate(element interface{}) { + if element == nil { + return + } + + generate(element) +} + +func generate(element interface{}) { + field := reflect.ValueOf(element) + + fill(field) +} + +func fill(field reflect.Value) { + switch field.Kind() { + case reflect.Ptr: + setPtr(field) + case reflect.Struct: + setStruct(field) + case reflect.Map: + setMap(field) + case reflect.Slice: + if field.Type().Elem().Kind() == reflect.Struct || + field.Type().Elem().Kind() == reflect.Ptr && field.Type().Elem().Elem().Kind() == reflect.Struct { + slice := reflect.MakeSlice(field.Type(), 1, 1) + field.Set(slice) + + // use Ptr to allow "SetDefaults" + value := reflect.New(reflect.PtrTo(field.Type().Elem())) + setPtr(value) + + elem := value.Elem().Elem() + field.Index(0).Set(elem) + } else if field.Len() == 0 { + slice := reflect.MakeSlice(field.Type(), 0, 0) + field.Set(slice) + } + } +} + +func setPtr(field reflect.Value) { + if field.IsNil() { + field.Set(reflect.New(field.Type().Elem())) + } + + if field.Type().Implements(reflect.TypeOf((*initializer)(nil)).Elem()) { + method := field.MethodByName("SetDefaults") + if method.IsValid() { + method.Call([]reflect.Value{}) + } + } + + fill(field.Elem()) +} + +func setStruct(field reflect.Value) { + for i := 0; i < field.NumField(); i++ { + fd := field.Field(i) + structField := field.Type().Field(i) + + if structField.Tag.Get(parser.TagLabel) == "-" { + continue + } + + if parser.IsExported(structField) { + fill(fd) + } + } +} + +func setMap(field reflect.Value) { + if field.IsNil() { + field.Set(reflect.MakeMap(field.Type())) + } + + ptrValue := reflect.New(reflect.PtrTo(field.Type().Elem())) + fill(ptrValue) + + value := ptrValue.Elem().Elem() + key := reflect.ValueOf(parser.MapNamePlaceholder) + field.SetMapIndex(key, value) +} diff --git a/pkg/config/generator/generator_test.go b/pkg/config/generator/generator_test.go new file mode 100644 index 000000000..fbd64a94d --- /dev/null +++ b/pkg/config/generator/generator_test.go @@ -0,0 +1,439 @@ +package generator + +import ( + "testing" + + "github.com/containous/traefik/pkg/config/parser" + "github.com/stretchr/testify/assert" +) + +func TestGenerate(t *testing.T) { + testCases := []struct { + desc string + element interface{} + expected interface{} + }{ + { + desc: "nil", + }, + { + desc: "simple", + element: &Ya{}, + expected: &Ya{ + Foo: &Yaa{ + FieldIn1: "", + FieldIn2: false, + FieldIn3: 0, + FieldIn4: map[string]string{ + parser.MapNamePlaceholder: "", + }, + FieldIn5: map[string]int{ + parser.MapNamePlaceholder: 0, + }, + FieldIn6: map[string]struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + FieldIn7: map[string]struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + FieldIn8: map[string]*struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + FieldIn9: map[string]*struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + FieldIn10: struct{ Field string }{}, + FieldIn11: &struct{ Field string }{}, + FieldIn12: func(v string) *string { return &v }(""), + FieldIn13: func(v bool) *bool { return &v }(false), + FieldIn14: func(v int) *int { return &v }(0), + }, + Field1: "", + Field2: false, + Field3: 0, + Field4: map[string]string{ + parser.MapNamePlaceholder: "", + }, + Field5: map[string]int{ + parser.MapNamePlaceholder: 0, + }, + Field6: map[string]struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + Field7: map[string]struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + Field8: map[string]*struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + Field9: map[string]*struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + Field10: struct{ Field string }{}, + Field11: &struct{ Field string }{}, + Field12: func(v string) *string { return &v }(""), + Field13: func(v bool) *bool { return &v }(false), + Field14: func(v int) *int { return &v }(0), + Field15: []int{}, + }, + }, + { + desc: "with initial state", + element: &Ya{ + Foo: &Yaa{ + FieldIn1: "bar", + FieldIn2: false, + FieldIn3: 1, + FieldIn4: nil, + FieldIn5: nil, + FieldIn6: nil, + FieldIn7: nil, + FieldIn8: nil, + FieldIn9: nil, + FieldIn10: struct{ Field string }{}, + FieldIn11: nil, + FieldIn12: nil, + FieldIn13: nil, + FieldIn14: nil, + }, + Field1: "bir", + Field2: true, + Field3: 0, + Field4: nil, + Field5: nil, + Field6: nil, + Field7: nil, + Field8: nil, + Field9: nil, + Field10: struct{ Field string }{}, + Field11: nil, + Field12: nil, + Field13: nil, + Field14: nil, + Field15: []int{7}, + }, + expected: &Ya{ + Foo: &Yaa{ + FieldIn1: "bar", + FieldIn2: false, + FieldIn3: 1, + FieldIn4: map[string]string{ + parser.MapNamePlaceholder: "", + }, + FieldIn5: map[string]int{ + parser.MapNamePlaceholder: 0, + }, + FieldIn6: map[string]struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + FieldIn7: map[string]struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + FieldIn8: map[string]*struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + FieldIn9: map[string]*struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + FieldIn10: struct{ Field string }{}, + FieldIn11: &struct{ Field string }{}, + FieldIn12: func(v string) *string { return &v }(""), + FieldIn13: func(v bool) *bool { return &v }(false), + FieldIn14: func(v int) *int { return &v }(0), + }, + Field1: "bir", + Field2: true, + Field3: 0, + Field4: map[string]string{ + parser.MapNamePlaceholder: "", + }, + Field5: map[string]int{ + parser.MapNamePlaceholder: 0, + }, + Field6: map[string]struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + Field7: map[string]struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + Field8: map[string]*struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + Field9: map[string]*struct{ Field map[string]string }{ + parser.MapNamePlaceholder: { + Field: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + Field10: struct{ Field string }{}, + Field11: &struct{ Field string }{}, + Field12: func(v string) *string { return &v }(""), + Field13: func(v bool) *bool { return &v }(false), + Field14: func(v int) *int { return &v }(0), + Field15: []int{7}, + }, + }, + { + desc: "setDefault", + element: &Hu{}, + expected: &Hu{ + Foo: "hu", + Fii: &Hi{ + Field: "hi", + }, + Fuu: map[string]string{"": ""}, + Fee: map[string]Hi{"": {Field: "hi"}}, + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + Generate(test.element) + + assert.Equal(t, test.expected, test.element) + }) + } +} + +func Test_generate(t *testing.T) { + testCases := []struct { + desc string + element interface{} + expected interface{} + }{ + { + desc: "struct pointer", + element: &struct { + Foo string + Fii *struct{ Field string } + }{}, + expected: &struct { + Foo string + Fii *struct{ Field string } + }{ + Foo: "", + Fii: &struct{ Field string }{ + Field: "", + }, + }, + }, + { + desc: "string slice", + element: &struct { + Foo []string + }{}, + expected: &struct { + Foo []string + }{ + Foo: []string{}, + }, + }, + { + desc: "int slice", + element: &struct { + Foo []int + }{}, + expected: &struct { + Foo []int + }{ + Foo: []int{}, + }, + }, + { + desc: "struct slice", + element: &struct { + Foo []struct { + Field string + } + }{}, + expected: &struct { + Foo []struct { + Field string + } + }{ + Foo: []struct { + Field string + }{ + {Field: ""}, + }, + }, + }, + { + desc: "map string", + element: &struct { + Foo string + Fii map[string]string + }{}, + expected: &struct { + Foo string + Fii map[string]string + }{ + Foo: "", + Fii: map[string]string{ + parser.MapNamePlaceholder: "", + }, + }, + }, + { + desc: "map struct", + element: &struct { + Foo string + Fii map[string]struct{ Field string } + }{}, + expected: &struct { + Foo string + Fii map[string]struct{ Field string } + }{ + Foo: "", + Fii: map[string]struct{ Field string }{ + parser.MapNamePlaceholder: {}, + }, + }, + }, + { + desc: "map struct pointer level 2", + element: &struct { + Foo string + Fuu *struct { + Fii map[string]*struct{ Field string } + } + }{}, + expected: &struct { + Foo string + Fuu *struct { + Fii map[string]*struct{ Field string } + } + }{ + Foo: "", + Fuu: &struct { + Fii map[string]*struct { + Field string + } + }{ + Fii: map[string]*struct{ Field string }{ + parser.MapNamePlaceholder: { + Field: "", + }, + }, + }, + }, + }, + { + desc: "SetDefaults", + element: &Hu{}, + expected: &Hu{ + Foo: "hu", + Fii: &Hi{ + Field: "hi", + }, + Fuu: map[string]string{ + parser.MapNamePlaceholder: "", + }, + Fee: map[string]Hi{ + parser.MapNamePlaceholder: { + Field: "hi", + }, + }, + }, + }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + generate(test.element) + + assert.Equal(t, test.expected, test.element) + }) + } +} + +type Hu struct { + Foo string + Fii *Hi + Fuu map[string]string + Fee map[string]Hi +} + +func (h *Hu) SetDefaults() { + h.Foo = "hu" +} + +type Hi struct { + Field string +} + +func (h *Hi) SetDefaults() { + h.Field = "hi" +} + +type Ya struct { + Foo *Yaa + Field1 string + Field2 bool + Field3 int + Field4 map[string]string + Field5 map[string]int + Field6 map[string]struct{ Field string } + Field7 map[string]struct{ Field map[string]string } + Field8 map[string]*struct{ Field string } + Field9 map[string]*struct{ Field map[string]string } + Field10 struct{ Field string } + Field11 *struct{ Field string } + Field12 *string + Field13 *bool + Field14 *int + Field15 []int +} + +type Yaa struct { + FieldIn1 string + FieldIn2 bool + FieldIn3 int + FieldIn4 map[string]string + FieldIn5 map[string]int + FieldIn6 map[string]struct{ Field string } + FieldIn7 map[string]struct{ Field map[string]string } + FieldIn8 map[string]*struct{ Field string } + FieldIn9 map[string]*struct{ Field map[string]string } + FieldIn10 struct{ Field string } + FieldIn11 *struct{ Field string } + FieldIn12 *string + FieldIn13 *bool + FieldIn14 *int +} diff --git a/pkg/config/label/label.go b/pkg/config/label/label.go new file mode 100644 index 000000000..e821e21aa --- /dev/null +++ b/pkg/config/label/label.go @@ -0,0 +1,33 @@ +// Package label implements the decoding and encoding between flat labels and a typed Configuration. +package label + +import ( + "github.com/containous/traefik/pkg/config" + "github.com/containous/traefik/pkg/config/parser" +) + +// DecodeConfiguration converts the labels to a configuration. +func DecodeConfiguration(labels map[string]string) (*config.Configuration, error) { + conf := &config.Configuration{ + HTTP: &config.HTTPConfiguration{}, + TCP: &config.TCPConfiguration{}, + } + + err := parser.Decode(labels, conf, "traefik.http", "traefik.tcp") + if err != nil { + return nil, err + } + + return conf, nil +} + +// EncodeConfiguration converts a configuration to labels. +func EncodeConfiguration(conf *config.Configuration) (map[string]string, error) { + return parser.Encode(conf) +} + +// Decode converts the labels to an element. +// labels -> [ node -> node + metadata (type) ] -> element (node) +func Decode(labels map[string]string, element interface{}, filters ...string) error { + return parser.Decode(labels, element, filters...) +} diff --git a/pkg/provider/label/parser_test.go b/pkg/config/label/label_test.go similarity index 99% rename from pkg/provider/label/parser_test.go rename to pkg/config/label/label_test.go index 5d03b2a14..78ee957ff 100644 --- a/pkg/provider/label/parser_test.go +++ b/pkg/config/label/label_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config" + "github.com/containous/traefik/pkg/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -306,12 +306,12 @@ func TestDecodeConfiguration(t *testing.T) { RateLimit: &config.RateLimit{ RateSet: map[string]*config.Rate{ "Rate0": { - Period: parse.Duration(42 * time.Second), + Period: types.Duration(42 * time.Second), Average: 42, Burst: 42, }, "Rate1": { - Period: parse.Duration(42 * time.Second), + Period: types.Duration(42 * time.Second), Average: 42, Burst: 42, }, @@ -700,12 +700,12 @@ func TestEncodeConfiguration(t *testing.T) { RateLimit: &config.RateLimit{ RateSet: map[string]*config.Rate{ "Rate0": { - Period: parse.Duration(42 * time.Nanosecond), + Period: types.Duration(42 * time.Nanosecond), Average: 42, Burst: 42, }, "Rate1": { - Period: parse.Duration(42 * time.Nanosecond), + Period: types.Duration(42 * time.Nanosecond), Average: 42, Burst: 42, }, diff --git a/pkg/config/middlewares.go b/pkg/config/middlewares.go index a76b5b744..aaa031752 100644 --- a/pkg/config/middlewares.go +++ b/pkg/config/middlewares.go @@ -1,8 +1,8 @@ package config import ( - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/ip" + "github.com/containous/traefik/pkg/types" ) // +k8s:deepcopy-gen=true @@ -52,7 +52,7 @@ type Auth struct { // BasicAuth holds the HTTP basic authentication configuration. type BasicAuth struct { - Users `json:"users,omitempty" mapstructure:","` + Users Users `json:"users,omitempty"` UsersFile string `json:"usersFile,omitempty"` Realm string `json:"realm,omitempty"` RemoveHeader bool `json:"removeHeader,omitempty"` @@ -93,7 +93,7 @@ type Compress struct{} // DigestAuth holds the Digest HTTP authentication configuration. type DigestAuth struct { - Users `json:"users,omitempty" mapstructure:","` + Users Users `json:"users,omitempty"` UsersFile string `json:"usersFile,omitempty"` RemoveHeader bool `json:"removeHeader,omitempty"` Realm string `json:"realm,omitempty" mapstructure:","` @@ -273,7 +273,7 @@ type PassTLSClientCert struct { // Rate holds the rate limiting configuration for a specific time period. type Rate struct { - Period parse.Duration `json:"period,omitempty"` + Period types.Duration `json:"period,omitempty"` Average int64 `json:"average,omitempty"` Burst int64 `json:"burst,omitempty"` } diff --git a/pkg/provider/label/internal/element_fill.go b/pkg/config/parser/element_fill.go similarity index 86% rename from pkg/provider/label/internal/element_fill.go rename to pkg/config/parser/element_fill.go index 7ad53abc4..8b6119b73 100644 --- a/pkg/provider/label/internal/element_fill.go +++ b/pkg/config/parser/element_fill.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "fmt" @@ -7,15 +7,14 @@ import ( "strings" "time" - "github.com/containous/flaeg/parse" + "github.com/containous/traefik/pkg/types" ) type initializer interface { SetDefaults() } -// Fill the fields of the element. -// nodes -> element +// Fill populates the fields of the element using the information in node. func Fill(element interface{}, node *Node) error { if element == nil || node == nil { return nil @@ -25,12 +24,12 @@ func Fill(element interface{}, node *Node) error { return fmt.Errorf("missing node type: %s", node.Name) } - elem := reflect.ValueOf(element) - if elem.Kind() == reflect.Struct { + root := reflect.ValueOf(element) + if root.Kind() == reflect.Struct { return fmt.Errorf("struct are not supported, use pointer instead") } - return fill(elem.Elem(), node) + return fill(root.Elem(), node) } func fill(field reflect.Value, node *Node) error { @@ -117,8 +116,9 @@ func setStruct(field reflect.Value, node *Node) error { } func setSlice(field reflect.Value, node *Node) error { - if field.Type().Elem().Kind() == reflect.Struct { - return setSliceAsStruct(field, node) + if field.Type().Elem().Kind() == reflect.Struct || + field.Type().Elem().Kind() == reflect.Ptr && field.Type().Elem().Elem().Kind() == reflect.Struct { + return setSliceStruct(field, node) } if len(node.Value) == 0 { @@ -135,7 +135,7 @@ func setSlice(field reflect.Value, node *Node) error { switch field.Type().Elem().Kind() { case reflect.String: - field.Index(i).Set(reflect.ValueOf(value)) + field.Index(i).SetString(value) case reflect.Int: val, err := strconv.ParseInt(value, 10, 64) if err != nil { @@ -211,6 +211,27 @@ func setSlice(field reflect.Value, node *Node) error { return nil } +func setSliceStruct(field reflect.Value, node *Node) error { + if node.Tag.Get(TagLabelSliceAsStruct) != "" { + return setSliceAsStruct(field, node) + } + + field.Set(reflect.MakeSlice(field.Type(), len(node.Children), len(node.Children))) + + for i, child := range node.Children { + // use Ptr to allow "SetDefaults" + value := reflect.New(reflect.PtrTo(field.Type().Elem())) + err := setPtr(value, child) + if err != nil { + return err + } + + field.Index(i).Set(value.Elem().Elem()) + } + + return nil +} + func setSliceAsStruct(field reflect.Value, node *Node) error { if len(node.Children) == 0 { return fmt.Errorf("invalid slice: node %s", node.Name) @@ -254,7 +275,7 @@ func setMap(field reflect.Value, node *Node) error { func setInt(field reflect.Value, value string, bitSize int) error { switch field.Type() { - case reflect.TypeOf(parse.Duration(0)): + case reflect.TypeOf(types.Duration(0)): return setDuration(field, value, bitSize, time.Second) case reflect.TypeOf(time.Duration(0)): return setDuration(field, value, bitSize, time.Nanosecond) diff --git a/pkg/provider/label/internal/element_fill_test.go b/pkg/config/parser/element_fill_test.go similarity index 81% rename from pkg/provider/label/internal/element_fill_test.go rename to pkg/config/parser/element_fill_test.go index 4465a7062..db0726c45 100644 --- a/pkg/provider/label/internal/element_fill_test.go +++ b/pkg/config/parser/element_fill_test.go @@ -1,11 +1,11 @@ -package internal +package parser import ( "reflect" "testing" "time" - "github.com/containous/flaeg/parse" + "github.com/containous/traefik/pkg/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -390,7 +390,7 @@ func TestFill(t *testing.T) { expected: expected{element: &struct{ Foo time.Duration }{Foo: 4 * time.Nanosecond}}, }, { - desc: "parse.Duration with unit", + desc: "types.Duration with unit", node: &Node{ Name: "traefik", Kind: reflect.Struct, @@ -398,11 +398,11 @@ func TestFill(t *testing.T) { {Name: "Foo", FieldName: "Foo", Value: "4s", Kind: reflect.Int64}, }, }, - element: &struct{ Foo parse.Duration }{}, - expected: expected{element: &struct{ Foo parse.Duration }{Foo: parse.Duration(4 * time.Second)}}, + element: &struct{ Foo types.Duration }{}, + expected: expected{element: &struct{ Foo types.Duration }{Foo: types.Duration(4 * time.Second)}}, }, { - desc: "parse.Duration without unit", + desc: "types.Duration without unit", node: &Node{ Name: "traefik", Kind: reflect.Struct, @@ -410,8 +410,8 @@ func TestFill(t *testing.T) { {Name: "Foo", FieldName: "Foo", Value: "4", Kind: reflect.Int64}, }, }, - element: &struct{ Foo parse.Duration }{}, - expected: expected{element: &struct{ Foo parse.Duration }{Foo: parse.Duration(4 * time.Second)}}, + element: &struct{ Foo types.Duration }{}, + expected: expected{element: &struct{ Foo types.Duration }{Foo: types.Duration(4 * time.Second)}}, }, { desc: "bool", @@ -722,6 +722,30 @@ func TestFill(t *testing.T) { element: &struct{ Foo []string }{}, expected: expected{element: &struct{ Foo []string }{Foo: []string{"huu", "hii", "hoo"}}}, }, + { + desc: "slice named type", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "huu,hii,hoo", Kind: reflect.Slice}, + }, + }, + element: &struct{ Foo []NamedType }{}, + expected: expected{element: &struct{ Foo []NamedType }{Foo: []NamedType{"huu", "hii", "hoo"}}}, + }, + { + desc: "slice named type int", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "1,2,3", Kind: reflect.Slice}, + }, + }, + element: &struct{ Foo []NamedTypeInt }{}, + expected: expected{element: &struct{ Foo []NamedTypeInt }{Foo: []NamedTypeInt{1, 2, 3}}}, + }, { desc: "empty slice", node: &Node{ @@ -1046,18 +1070,6 @@ func TestFill(t *testing.T) { element: &struct{ Foo []bool }{}, expected: expected{error: true}, }, - { - desc: "slice struct", - node: &Node{ - Name: "traefik", - Kind: reflect.Struct, - Children: []*Node{ - {Name: "Foo", FieldName: "Foo", Value: "huu", Kind: reflect.Slice}, - }, - }, - element: &struct{ Foo []struct{ Fii string } }{}, - expected: expected{error: true}, - }, { desc: "slice slice-as-struct", node: &Node{ @@ -1068,6 +1080,7 @@ func TestFill(t *testing.T) { Name: "Fii", FieldName: "Foo", Kind: reflect.Slice, + Tag: `label-slice-as-struct:"Fii"`, Children: []*Node{ {Name: "bar", FieldName: "Bar", Kind: reflect.String, Value: "haa"}, {Name: "bir", FieldName: "Bir", Kind: reflect.String, Value: "hii"}, @@ -1098,6 +1111,47 @@ func TestFill(t *testing.T) { }, }}, }, + { + desc: "slice slice-as-struct pointer", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + { + Name: "Fii", + FieldName: "Foo", + Kind: reflect.Slice, + Tag: `label-slice-as-struct:"Fii"`, + Children: []*Node{ + {Name: "bar", FieldName: "Bar", Kind: reflect.String, Value: "haa"}, + {Name: "bir", FieldName: "Bir", Kind: reflect.String, Value: "hii"}, + }, + }, + }, + }, + element: &struct { + Foo []*struct { + Bar string + Bir string + } `label-slice-as-struct:"Fii"` + }{}, + expected: expected{element: &struct { + Foo []*struct { + Bar string + Bir string + } `label-slice-as-struct:"Fii"` + }{ + Foo: []*struct { + Bar string + Bir string + }{ + { + Bar: "haa", + Bir: "hii", + }, + }, + }}, + }, { desc: "slice slice-as-struct without children", node: &Node{ @@ -1107,6 +1161,7 @@ func TestFill(t *testing.T) { { Name: "Fii", FieldName: "Foo", + Tag: `label-slice-as-struct:"Fii"`, Kind: reflect.Slice, }, }, @@ -1134,12 +1189,12 @@ func TestFill(t *testing.T) { }}, }}, element: &struct { - Foo *initialledFoo + Foo *InitializedFoo }{}, expected: expected{element: &struct { - Foo *initialledFoo + Foo *InitializedFoo }{ - Foo: &initialledFoo{ + Foo: &InitializedFoo{ Fii: "default", Fuu: "huu", }, @@ -1170,6 +1225,164 @@ func TestFill(t *testing.T) { }, }}, }, + { + desc: "int pointer", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "4", Kind: reflect.Ptr}, + }, + }, + element: &struct{ Foo *int }{}, + expected: expected{element: &struct{ Foo *int }{Foo: func(v int) *int { return &v }(4)}}, + }, + { + desc: "bool pointer", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "true", Kind: reflect.Ptr}, + }, + }, + element: &struct{ Foo *bool }{}, + expected: expected{element: &struct{ Foo *bool }{Foo: func(v bool) *bool { return &v }(true)}}, + }, + { + desc: "string pointer", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "bar", Kind: reflect.Ptr}, + }, + }, + element: &struct{ Foo *string }{}, + expected: expected{element: &struct{ Foo *string }{Foo: func(v string) *string { return &v }("bar")}}, + }, + { + desc: "embedded", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + { + Name: "Foo", + FieldName: "Foo", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String}, + }}, + }}, + element: &struct { + Foo struct { + FiiFoo + } + }{}, + expected: expected{element: &struct { + Foo struct { + FiiFoo + } + }{ + Foo: struct { + FiiFoo + }{ + FiiFoo: FiiFoo{ + Fii: "", + Fuu: "huu", + }, + }, + }}, + }, + { + desc: "slice struct", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Kind: reflect.Slice, Children: []*Node{ + {Name: "[0]", Kind: reflect.Struct, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "A", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "A", Kind: reflect.String}, + }}, + {Name: "[1]", Kind: reflect.Struct, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "B", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "B", Kind: reflect.String}, + }}, + {Name: "[2]", Kind: reflect.Struct, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "C", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "C", Kind: reflect.String}, + }}, + }}, + }, + }, + element: &struct { + Foo []struct { + Field1 string + Field2 string + } + }{}, + expected: expected{element: &struct { + Foo []struct { + Field1 string + Field2 string + } + }{ + Foo: []struct { + Field1 string + Field2 string + }{ + {Field1: "A", Field2: "A"}, + {Field1: "B", Field2: "B"}, + {Field1: "C", Field2: "C"}, + }, + }}, + }, + { + desc: "slice pointer struct", + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Kind: reflect.Slice, Children: []*Node{ + {Name: "[0]", Kind: reflect.Ptr, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "A", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "A", Kind: reflect.String}, + }}, + {Name: "[1]", Kind: reflect.Ptr, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "B", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "B", Kind: reflect.String}, + }}, + {Name: "[2]", Kind: reflect.Ptr, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "C", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "C", Kind: reflect.String}, + }}, + }}, + }, + }, + element: &struct { + Foo []*struct { + Field1 string + Field2 string + } + }{}, + expected: expected{element: &struct { + Foo []*struct { + Field1 string + Field2 string + } + }{ + Foo: []*struct { + Field1 string + Field2 string + }{ + {Field1: "A", Field2: "A"}, + {Field1: "B", Field2: "B"}, + {Field1: "C", Field2: "C"}, + }, + }}, + }, } for _, test := range testCases { @@ -1188,12 +1401,15 @@ func TestFill(t *testing.T) { } } -type initialledFoo struct { +type NamedType string +type NamedTypeInt int + +type InitializedFoo struct { Fii string Fuu string } -func (t *initialledFoo) SetDefaults() { +func (t *InitializedFoo) SetDefaults() { t.Fii = "default" } @@ -1206,3 +1422,10 @@ func (t *wrongInitialledFoo) SetDefaults() error { t.Fii = "default" return nil } + +type Bouya string + +type FiiFoo struct { + Fii string + Fuu Bouya +} diff --git a/pkg/provider/label/internal/element_nodes.go b/pkg/config/parser/element_nodes.go similarity index 55% rename from pkg/provider/label/internal/element_nodes.go rename to pkg/config/parser/element_nodes.go index 4436a00f8..3caabcce2 100644 --- a/pkg/provider/label/internal/element_nodes.go +++ b/pkg/config/parser/element_nodes.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "fmt" @@ -7,13 +7,15 @@ import ( "strings" ) -// EncodeToNode Converts an element to a node. +// EncodeToNode converts an element to a node. // element -> nodes -func EncodeToNode(element interface{}) (*Node, error) { +func EncodeToNode(element interface{}, omitEmpty bool) (*Node, error) { rValue := reflect.ValueOf(element) node := &Node{Name: "traefik"} - err := setNodeValue(node, rValue) + encoder := encoderToNode{omitEmpty: omitEmpty} + + err := encoder.setNodeValue(node, rValue) if err != nil { return nil, err } @@ -21,7 +23,11 @@ func EncodeToNode(element interface{}) (*Node, error) { return node, nil } -func setNodeValue(node *Node, rValue reflect.Value) error { +type encoderToNode struct { + omitEmpty bool +} + +func (e encoderToNode) setNodeValue(node *Node, rValue reflect.Value) error { switch rValue.Kind() { case reflect.String: node.Value = rValue.String() @@ -34,13 +40,13 @@ func setNodeValue(node *Node, rValue reflect.Value) error { case reflect.Bool: node.Value = strconv.FormatBool(rValue.Bool()) case reflect.Struct: - return setStructValue(node, rValue) + return e.setStructValue(node, rValue) case reflect.Ptr: - return setNodeValue(node, rValue.Elem()) + return e.setNodeValue(node, rValue.Elem()) case reflect.Map: - return setMapValue(node, rValue) + return e.setMapValue(node, rValue) case reflect.Slice: - return setSliceValue(node, rValue) + return e.setSliceValue(node, rValue) default: // noop } @@ -48,14 +54,14 @@ func setNodeValue(node *Node, rValue reflect.Value) error { return nil } -func setStructValue(node *Node, rValue reflect.Value) error { +func (e encoderToNode) setStructValue(node *Node, rValue reflect.Value) error { rType := rValue.Type() for i := 0; i < rValue.NumField(); i++ { field := rType.Field(i) fieldValue := rValue.Field(i) - if !isExported(field) { + if !IsExported(field) { continue } @@ -67,7 +73,7 @@ func setStructValue(node *Node, rValue reflect.Value) error { return err } - if isSkippedField(field, fieldValue) { + if e.isSkippedField(field, fieldValue) { continue } @@ -76,18 +82,31 @@ func setStructValue(node *Node, rValue reflect.Value) error { nodeName = field.Tag.Get(TagLabelSliceAsStruct) } - child := &Node{Name: nodeName, FieldName: field.Name} + if field.Anonymous { + if err := e.setNodeValue(node, fieldValue); err != nil { + return err + } + continue + } - if err := setNodeValue(child, fieldValue); err != nil { + child := &Node{Name: nodeName, FieldName: field.Name, Description: field.Tag.Get(TagDescription)} + + if err := e.setNodeValue(child, fieldValue); err != nil { return err } - if field.Type.Kind() == reflect.Ptr && len(child.Children) == 0 { - if field.Tag.Get(TagLabel) != "allowEmpty" { + if field.Type.Kind() == reflect.Ptr { + if field.Type.Elem().Kind() != reflect.Struct && fieldValue.IsNil() { continue } - child.Value = "true" + if field.Type.Elem().Kind() == reflect.Struct && len(child.Children) == 0 { + if field.Tag.Get(TagLabel) != TagLabelAllowEmpty { + continue + } + + child.Value = "true" + } } node.Children = append(node.Children, child) @@ -96,28 +115,44 @@ func setStructValue(node *Node, rValue reflect.Value) error { return nil } -func setMapValue(node *Node, rValue reflect.Value) error { +func (e encoderToNode) setMapValue(node *Node, rValue reflect.Value) error { for _, key := range rValue.MapKeys() { child := &Node{Name: key.String(), FieldName: key.String()} node.Children = append(node.Children, child) - if err := setNodeValue(child, rValue.MapIndex(key)); err != nil { + if err := e.setNodeValue(child, rValue.MapIndex(key)); err != nil { return err } } return nil } -func setSliceValue(node *Node, rValue reflect.Value) error { +func (e encoderToNode) setSliceValue(node *Node, rValue reflect.Value) error { // label-slice-as-struct if rValue.Type().Elem().Kind() == reflect.Struct && !strings.EqualFold(node.Name, node.FieldName) { if rValue.Len() > 1 { return fmt.Errorf("node %s has too many slice entries: %d", node.Name, rValue.Len()) } - if err := setNodeValue(node, rValue.Index(0)); err != nil { - return err + return e.setNodeValue(node, rValue.Index(0)) + } + + if rValue.Type().Elem().Kind() == reflect.Struct || + rValue.Type().Elem().Kind() == reflect.Ptr && rValue.Type().Elem().Elem().Kind() == reflect.Struct { + for i := 0; i < rValue.Len(); i++ { + child := &Node{Name: "[" + strconv.Itoa(i) + "]"} + + eValue := rValue.Index(i) + + err := e.setNodeValue(child, eValue) + if err != nil { + return err + } + + node.Children = append(node.Children, child) } + + return nil } var values []string @@ -145,8 +180,8 @@ func setSliceValue(node *Node, rValue reflect.Value) error { return nil } -func isSkippedField(field reflect.StructField, fieldValue reflect.Value) bool { - if field.Type.Kind() == reflect.String && fieldValue.Len() == 0 { +func (e encoderToNode) isSkippedField(field reflect.StructField, fieldValue reflect.Value) bool { + if e.omitEmpty && field.Type.Kind() == reflect.String && fieldValue.Len() == 0 { return true } @@ -154,7 +189,12 @@ func isSkippedField(field reflect.StructField, fieldValue reflect.Value) bool { return true } - if (field.Type.Kind() == reflect.Slice || field.Type.Kind() == reflect.Map) && + if e.omitEmpty && (field.Type.Kind() == reflect.Slice) && + (fieldValue.IsNil() || fieldValue.Len() == 0) { + return true + } + + if (field.Type.Kind() == reflect.Map) && (fieldValue.IsNil() || fieldValue.Len() == 0) { return true } diff --git a/pkg/provider/label/internal/element_nodes_test.go b/pkg/config/parser/element_nodes_test.go similarity index 78% rename from pkg/provider/label/internal/element_nodes_test.go rename to pkg/config/parser/element_nodes_test.go index 4f811affb..854c50fa7 100644 --- a/pkg/provider/label/internal/element_nodes_test.go +++ b/pkg/config/parser/element_nodes_test.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "testing" @@ -18,6 +18,16 @@ func TestEncodeToNode(t *testing.T) { element interface{} expected expected }{ + { + desc: "Description", + element: struct { + Foo string `description:"text"` + }{Foo: "bar"}, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "bar", Description: "text"}, + }}, + }, + }, { desc: "string", element: struct { @@ -257,10 +267,16 @@ func TestEncodeToNode(t *testing.T) { Fuu: "huu", }, }, - expected: expected{error: true}, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Children: []*Node{ + {Name: "Fii", FieldName: "Fii", Value: "hii"}, + {Name: "Fuu", FieldName: "Fuu", Value: "huu"}, + }}, + }}, + }, }, { - desc: "struct nil pointer", + desc: "string nil pointer", element: struct { Foo *struct { Fii *string @@ -271,10 +287,64 @@ func TestEncodeToNode(t *testing.T) { Fii *string Fuu string }{ + Fii: nil, Fuu: "huu", }, }, - expected: expected{error: true}, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Children: []*Node{ + {Name: "Fuu", FieldName: "Fuu", Value: "huu"}, + }}, + }}, + }, + }, + { + desc: "int pointer", + element: struct { + Foo *struct { + Fii *int + Fuu int + } + }{ + Foo: &struct { + Fii *int + Fuu int + }{ + Fii: func(v int) *int { return &v }(6), + Fuu: 4, + }, + }, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Children: []*Node{ + {Name: "Fii", FieldName: "Fii", Value: "6"}, + {Name: "Fuu", FieldName: "Fuu", Value: "4"}, + }}, + }}, + }, + }, + { + desc: "bool pointer", + element: struct { + Foo *struct { + Fii *bool + Fuu bool + } + }{ + Foo: &struct { + Fii *bool + Fuu bool + }{ + Fii: func(v bool) *bool { return &v }(true), + Fuu: true, + }, + }, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Children: []*Node{ + {Name: "Fii", FieldName: "Fii", Value: "true"}, + {Name: "Fuu", FieldName: "Fuu", Value: "true"}, + }}, + }}, + }, }, { desc: "struct nil struct pointer", @@ -545,6 +615,60 @@ func TestEncodeToNode(t *testing.T) { }, expected: expected{error: true}, }, + { + desc: "slice of struct", + element: struct { + Foo []struct { + Field string + } + }{ + Foo: []struct { + Field string + }{ + { + Field: "bar", + }, + { + Field: "bir", + }, + }, + }, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Children: []*Node{ + {Name: "[0]", Children: []*Node{ + {Name: "Field", FieldName: "Field", Value: "bar"}, + }}, + {Name: "[1]", Children: []*Node{ + {Name: "Field", FieldName: "Field", Value: "bir"}, + }}, + }}, + }}}, + }, + { + desc: "slice of pointer of struct", + element: struct { + Foo []*struct { + Field string + } + }{ + Foo: []*struct { + Field string + }{ + {Field: "bar"}, + {Field: "bir"}, + }, + }, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Children: []*Node{ + {Name: "[0]", Children: []*Node{ + {Name: "Field", FieldName: "Field", Value: "bar"}, + }}, + {Name: "[1]", Children: []*Node{ + {Name: "Field", FieldName: "Field", Value: "bir"}, + }}, + }}, + }}}, + }, { desc: "empty slice", element: struct { @@ -572,6 +696,26 @@ func TestEncodeToNode(t *testing.T) { }, expected: expected{node: &Node{Name: "traefik"}}, }, + { + desc: "embedded", + element: struct { + Foo struct{ FiiFoo } + }{ + Foo: struct{ FiiFoo }{ + FiiFoo: FiiFoo{ + Fii: "hii", + Fuu: "huu", + }, + }, + }, + expected: expected{node: &Node{Name: "traefik", Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Children: []*Node{ + {Name: "Fii", FieldName: "Fii", Value: "hii"}, + {Name: "Fuu", FieldName: "Fuu", Value: "huu"}, + }}, + }}, + }, + }, } for _, test := range testCases { @@ -579,7 +723,7 @@ func TestEncodeToNode(t *testing.T) { t.Run(test.desc, func(t *testing.T) { t.Parallel() - node, err := EncodeToNode(test.element) + node, err := EncodeToNode(test.element, true) if test.expected.error { require.Error(t, err) diff --git a/pkg/config/parser/flat_encode.go b/pkg/config/parser/flat_encode.go new file mode 100644 index 000000000..a18e6daf7 --- /dev/null +++ b/pkg/config/parser/flat_encode.go @@ -0,0 +1,166 @@ +package parser + +import ( + "fmt" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/containous/traefik/pkg/types" +) + +const defaultPtrValue = "false" + +// FlatOpts holds options used when encoding to Flat. +type FlatOpts struct { + Case string // "lower" or "upper", defaults to "lower". + Separator string + SkipRoot bool +} + +// Flat is a configuration item representation. +type Flat struct { + Name string + Description string + Default string +} + +// EncodeToFlat encodes a node to a Flat representation. +// Even though the given node argument should have already been augmented with metadata such as kind, +// the element (and its type information) is still needed to treat remaining edge cases. +func EncodeToFlat(element interface{}, node *Node, opts FlatOpts) ([]Flat, error) { + if element == nil || node == nil { + return nil, nil + } + + if node.Kind == 0 { + return nil, fmt.Errorf("missing node type: %s", node.Name) + } + + elem := reflect.ValueOf(element) + if elem.Kind() == reflect.Struct { + return nil, fmt.Errorf("structs are not supported, use pointer instead") + } + + encoder := encoderToFlat{FlatOpts: opts} + + var entries []Flat + if encoder.SkipRoot { + for _, child := range node.Children { + field := encoder.getField(elem.Elem(), child) + entries = append(entries, encoder.createFlat(field, child.Name, child)...) + } + } else { + entries = encoder.createFlat(elem, strings.ToLower(node.Name), node) + } + + sort.Slice(entries, func(i, j int) bool { return entries[i].Name < entries[j].Name }) + + return entries, nil +} + +type encoderToFlat struct { + FlatOpts +} + +func (e encoderToFlat) createFlat(field reflect.Value, name string, node *Node) []Flat { + var entries []Flat + if node.Kind != reflect.Map && node.Description != "-" { + if !(node.Kind == reflect.Ptr && len(node.Children) > 0) || + (node.Kind == reflect.Ptr && node.Tag.Get("label") == TagLabelAllowEmpty) { + if node.Name[0] != '[' { + entries = append(entries, Flat{ + Name: e.getName(name), + Description: node.Description, + Default: e.getNodeValue(e.getField(field, node), node), + }) + } + } + } + + for _, child := range node.Children { + if node.Kind == reflect.Map { + fChild := e.getField(field, child) + + var v string + if child.Kind == reflect.Struct { + v = defaultPtrValue + } else { + v = e.getNodeValue(fChild, child) + } + + if node.Description != "-" { + entries = append(entries, Flat{ + Name: e.getName(name, child.Name), + Description: node.Description, + Default: v, + }) + } + + if child.Kind == reflect.Struct || child.Kind == reflect.Ptr { + for _, ch := range child.Children { + f := e.getField(fChild, ch) + n := e.getName(name, child.Name, ch.Name) + entries = append(entries, e.createFlat(f, n, ch)...) + } + } + } else { + f := e.getField(field, child) + n := e.getName(name, child.Name) + entries = append(entries, e.createFlat(f, n, child)...) + } + } + + return entries +} + +func (e encoderToFlat) getField(field reflect.Value, node *Node) reflect.Value { + switch field.Kind() { + case reflect.Struct: + return field.FieldByName(node.FieldName) + case reflect.Ptr: + if field.Elem().Kind() == reflect.Struct { + return field.Elem().FieldByName(node.FieldName) + } + return field.Elem() + case reflect.Map: + return field.MapIndex(reflect.ValueOf(node.FieldName)) + default: + return field + } +} + +func (e encoderToFlat) getNodeValue(field reflect.Value, node *Node) string { + if node.Kind == reflect.Ptr && len(node.Children) > 0 { + return defaultPtrValue + } + + if field.Kind() == reflect.Int64 { + i, _ := strconv.ParseInt(node.Value, 10, 64) + + switch field.Type() { + case reflect.TypeOf(types.Duration(time.Second)): + return strconv.Itoa(int(i) / int(time.Second)) + case reflect.TypeOf(time.Second): + return time.Duration(i).String() + } + } + + return node.Value +} + +func (e encoderToFlat) getName(names ...string) string { + var name string + if names[len(names)-1][0] == '[' { + name = strings.Join(names, "") + } else { + name = strings.Join(names, e.Separator) + } + + if strings.EqualFold(e.Case, "upper") { + return strings.ToUpper(name) + } + return strings.ToLower(name) +} diff --git a/pkg/config/parser/flat_encode_test.go b/pkg/config/parser/flat_encode_test.go new file mode 100644 index 000000000..665bd894b --- /dev/null +++ b/pkg/config/parser/flat_encode_test.go @@ -0,0 +1,1250 @@ +package parser + +import ( + "reflect" + "testing" + "time" + + "github.com/containous/traefik/pkg/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEncodeToFlat(t *testing.T) { + testCases := []struct { + desc string + element interface{} + node *Node + opts *FlatOpts + expected []Flat + }{ + { + desc: "string field", + element: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + FieldName: "Field", + Description: "field description", + Value: "test", + Kind: reflect.String, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "test", + }}, + }, + { + desc: "int field", + element: &struct { + Field int `description:"field description"` + }{ + Field: 6, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "6", + Kind: reflect.Int, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "6", + }}, + }, + { + desc: "bool field", + element: &struct { + Field bool `description:"field description"` + }{ + Field: true, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "true", + Kind: reflect.Bool, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "true", + }}, + }, + { + desc: "string pointer field", + element: &struct { + Field *string `description:"field description"` + }{ + Field: func(v string) *string { return &v }("test"), + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "test", + Kind: reflect.Ptr, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "test", + }}, + }, + { + desc: "string pointer field, custom option", + element: &struct { + Field *string `description:"field description"` + }{ + Field: func(v string) *string { return &v }("test"), + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "test", + Kind: reflect.Ptr, + Tag: `description:"field description"`, + }, + }, + }, + opts: &FlatOpts{ + Case: "upper", + Separator: "_", + SkipRoot: false, + }, + expected: []Flat{{ + Name: "TRAEFIK_FIELD", + Description: "field description", + Default: "test", + }}, + }, + { + desc: "int pointer field", + element: &struct { + Field *int `description:"field description"` + }{ + Field: func(v int) *int { return &v }(6), + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "6", + Kind: reflect.Ptr, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "6", + }}, + }, + { + desc: "bool pointer field", + element: &struct { + Field *bool `description:"field description"` + }{ + Field: func(v bool) *bool { return &v }(true), + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "true", + Kind: reflect.Ptr, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "true", + }}, + }, + { + desc: "slice of string field, no initial value", + element: &struct { + Field []string `description:"field description"` + }{}, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Kind: reflect.Slice, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "", + }}, + }, + { + desc: "slice of string field, with initial value", + element: &struct { + Field []string `description:"field description"` + }{ + Field: []string{"foo", "bar"}, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "foo, bar", + Kind: reflect.Slice, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "foo, bar", + }}, + }, + { + desc: "slice of int field, no initial value", + element: &struct { + Field []int `description:"field description"` + }{}, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Kind: reflect.Slice, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "", + }}, + }, + { + desc: "slice of int field, with initial value", + element: &struct { + Field []int `description:"field description"` + }{ + Field: []int{6, 3}, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "6, 3", + Kind: reflect.Slice, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "6, 3", + }}, + }, + { + desc: "map string field", + element: &struct { + Field map[string]string `description:"field description"` + }{ + Field: map[string]string{ + MapNamePlaceholder: "", + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Kind: reflect.Map, + Tag: `description:"field description"`, + Children: []*Node{ + { + Name: "\u003cname\u003e", + FieldName: "\u003cname\u003e", + Kind: reflect.String, + }, + }, + }, + }, + }, + expected: []Flat{{ + Name: "field.", + Description: "field description", + Default: "", + }}, + }, + { + desc: "struct pointer field", + element: &struct { + Foo *struct { + Field string `description:"field description"` + } `description:"foo description"` + }{ + Foo: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Ptr, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "test", + Kind: reflect.String, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "struct pointer field, hide field", + element: &struct { + Foo *struct { + Field string `description:"-"` + } `description:"foo description"` + }{ + Foo: &struct { + Field string `description:"-"` + }{ + Field: "test", + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Ptr, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "Field", + Description: "-", + FieldName: "Field", + Value: "test", + Kind: reflect.String, + Tag: `description:"-"`, + }, + }, + }, + }, + }, + expected: nil, + }, + { + desc: "struct pointer field, allow empty", + element: &struct { + Foo *struct { + Field string `description:"field description"` + } `description:"foo description" label:"allowEmpty"` + }{ + Foo: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Ptr, + Tag: `description:"foo description" label:"allowEmpty"`, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "test", + Kind: reflect.String, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo", + Description: "foo description", + Default: "false", + }, + { + Name: "foo.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "struct pointer field level 2", + element: &struct { + Foo *struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description"` + } `description:"foo description"` + }{ + Foo: &struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description"` + }{ + Fii: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Ptr, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "Fii", + Description: "fii description", + FieldName: "Fii", + Kind: reflect.Ptr, + Tag: `description:"fii description"`, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "test", + Kind: reflect.String, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.fii.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "struct pointer field level 2, allow empty", + element: &struct { + Foo *struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description" label:"allowEmpty"` + } `description:"foo description" label:"allowEmpty"` + }{ + Foo: &struct { + Fii *struct { + Field string `description:"field description"` + } `description:"fii description" label:"allowEmpty"` + }{ + Fii: &struct { + Field string `description:"field description"` + }{ + Field: "test", + }, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Ptr, + Tag: `description:"foo description" label:"allowEmpty"`, + Children: []*Node{ + { + Name: "Fii", + Description: "fii description", + FieldName: "Fii", + Kind: reflect.Ptr, + Tag: `description:"fii description" label:"allowEmpty"`, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "test", + Kind: reflect.String, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo", + Description: "foo description", + Default: "false", + }, + { + Name: "foo.fii", + Description: "fii description", + Default: "false", + }, + { + Name: "foo.fii.field", + Description: "field description", + Default: "test", + }, + }, + }, + { + desc: "map string field level 2", + element: &struct { + Foo *struct { + Fii map[string]string `description:"fii description"` + } `description:"foo description"` + }{ + Foo: &struct { + Fii map[string]string `description:"fii description"` + }{ + Fii: map[string]string{ + MapNamePlaceholder: "", + }, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Ptr, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "Fii", + Description: "fii description", + FieldName: "Fii", + Kind: reflect.Map, + Tag: `description:"fii description"`, + Children: []*Node{ + { + Name: "\u003cname\u003e", + FieldName: "\u003cname\u003e", + Kind: reflect.String, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.fii.", + Description: "fii description", + Default: "", + }, + }, + }, + { + desc: "map string pointer field level 2", + element: &struct { + Foo *struct { + Fii map[string]*string `description:"fii description"` + } `description:"foo description"` + }{ + Foo: &struct { + Fii map[string]*string `description:"fii description"` + }{ + Fii: map[string]*string{ + MapNamePlaceholder: func(v string) *string { return &v }(""), + }, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Ptr, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "Fii", + Description: "fii description", + FieldName: "Fii", + Kind: reflect.Map, + Tag: `description:"fii description"`, + Children: []*Node{ + { + Name: "\u003cname\u003e", + FieldName: "\u003cname\u003e", + Kind: reflect.Ptr, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.fii.", + Description: "fii description", + Default: "", + }, + }, + }, + { + desc: "map struct level 1", + element: &struct { + Foo map[string]struct { + Field string `description:"field description"` + Yo int `description:"yo description"` + } `description:"foo description"` + }{}, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Map, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "\u003cname\u003e", + FieldName: "\u003cname\u003e", + Kind: reflect.Struct, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Kind: reflect.String, + Tag: `description:"field description"`, + }, + { + Name: "Yo", + Description: "yo description", + FieldName: "Yo", + Value: "0", + Kind: reflect.Int, + Tag: `description:"yo description"`, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..field", + Description: "field description", + Default: "", + }, + { + Name: "foo..yo", + Description: "yo description", + Default: "0", + }, + }, + }, + { + desc: "map struct pointer level 1", + element: &struct { + Foo map[string]*struct { + Field string `description:"field description"` + Yo string `description:"yo description"` + } `description:"foo description"` + }{}, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Map, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "\u003cname\u003e", + FieldName: "\u003cname\u003e", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Kind: reflect.String, + Tag: `description:"field description"`, + }, + { + Name: "Yo", + Description: "yo description", + FieldName: "Yo", + Kind: reflect.String, + Tag: `description:"yo description"`, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..field", + Description: "field description", + Default: "", + }, + { + Name: "foo..yo", + Description: "yo description", + Default: "", + }, + }, + }, + { + desc: "time duration field", + element: &struct { + Field time.Duration `description:"field description"` + }{ + Field: 1 * time.Second, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "1000000000", + Kind: reflect.Int64, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "1s", + }}, + }, + { + desc: "time duration field map", + element: &struct { + Foo map[string]*struct { + Field time.Duration `description:"field description"` + } `description:"foo description"` + }{ + Foo: map[string]*struct { + Field time.Duration `description:"field description"` + }{ + "": { + Field: 0, + }, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Map, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "\u003cname\u003e", + FieldName: "\u003cname\u003e", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "0", + Kind: reflect.Int64, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..field", + Description: "field description", + Default: "0s", + }, + }, + }, + { + desc: "time duration field map 2", + element: &struct { + Foo map[string]*struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + } `description:"foo description"` + }{ + Foo: map[string]*struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + }{ + "": { + Fii: &struct { + Field time.Duration `description:"field description"` + }{ + Field: 0, + }, + }, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + Description: "foo description", + FieldName: "Foo", + Kind: reflect.Map, + Tag: `description:"foo description"`, + Children: []*Node{ + { + Name: "\u003cname\u003e", + FieldName: "\u003cname\u003e", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Fii", + FieldName: "Fii", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "0", + Kind: reflect.Int64, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{ + { + Name: "foo.", + Description: "foo description", + Default: "false", + }, + { + Name: "foo..fii.field", + Description: "field description", + Default: "0s", + }, + }, + }, + { + desc: "time duration field 2", + element: &struct { + Foo *struct { + Field time.Duration `description:"field description"` + } + }{ + Foo: &struct { + Field time.Duration `description:"field description"` + }{ + Field: 1 * time.Second, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + FieldName: "Foo", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "1000000000", + Kind: reflect.Int64, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + expected: []Flat{{ + Name: "foo.field", + Description: "field description", + Default: "1s", + }}, + }, + { + desc: "time duration field 3", + element: &struct { + Foo *struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + } + }{ + Foo: &struct { + Fii *struct { + Field time.Duration `description:"field description"` + } + }{ + Fii: &struct { + Field time.Duration `description:"field description"` + }{ + Field: 1 * time.Second, + }, + }, + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Foo", + FieldName: "Foo", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Fii", + FieldName: "Fii", + Kind: reflect.Ptr, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "1000000000", + Kind: reflect.Int64, + Tag: `description:"field description"`, + }, + }, + }, + }, + }, + }, + }, + expected: []Flat{{ + Name: "foo.fii.field", + Description: "field description", + Default: "1s", + }}, + }, + { + desc: "time duration field", + element: &struct { + Field types.Duration `description:"field description"` + }{ + Field: types.Duration(180 * time.Second), + }, + node: &Node{ + Name: "traefik", + FieldName: "", + Kind: reflect.Struct, + Children: []*Node{ + { + Name: "Field", + Description: "field description", + FieldName: "Field", + Value: "180000000000", + Kind: reflect.Int64, + Tag: `description:"field description"`, + }, + }, + }, + expected: []Flat{{ + Name: "field", + Description: "field description", + Default: "180", + }}, + }, + { + desc: "slice of struct", + element: &struct { + Foo *struct { + Fii []struct { + Field1 string `description:"field1 description"` + Field2 int `description:"field2 description"` + } `description:"fii description"` + } `description:"foo description"` + }{ + Foo: &struct { + Fii []struct { + Field1 string `description:"field1 description"` + Field2 int `description:"field2 description"` + } `description:"fii description"` + }{ + Fii: []struct { + Field1 string `description:"field1 description"` + Field2 int `description:"field2 description"` + }{ + { + Field1: "", + Field2: 0, + }, + }, + }, + }, + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", Kind: reflect.Ptr, Description: "foo description", Children: []*Node{ + {Name: "Fii", Kind: reflect.Slice, Description: "fii description", Children: []*Node{ + {Name: "[0]", Kind: reflect.Struct, Children: []*Node{ + {Name: "Field1", Value: "", Kind: reflect.String, Description: "field1 description"}, + {Name: "Field2", Value: "0", Kind: reflect.Int, Description: "field2 description"}, + }}, + }}, + }}, + }, + }, + expected: []Flat{ + { + Name: "foo.fii", + Description: "fii description", + Default: "", + }, + { + Name: "foo.fii[0].field1", + Description: "field1 description", + Default: "", + }, + { + Name: "foo.fii[0].field2", + Description: "field2 description", + Default: "0", + }, + }, + }, + // Skipped: because realistically not needed in Traefik for now. + // { + // desc: "map of map field level 2", + // element: &struct { + // Foo *struct { + // Fii map[string]map[string]string `description:"fii description"` + // } `description:"foo description"` + // }{ + // Foo: &struct { + // Fii map[string]map[string]string `description:"fii description"` + // }{ + // Fii: map[string]map[string]string{ + // MapNamePlaceholder: { + // MapNamePlaceholder: "test", + // }, + // }, + // }, + // }, + // expected: `XXX`, + // }, + } + + for _, test := range testCases { + test := test + t.Run(test.desc, func(t *testing.T) { + t.Parallel() + + var opts FlatOpts + if test.opts == nil { + opts = FlatOpts{Separator: ".", SkipRoot: true} + } else { + opts = *test.opts + } + + entries, err := EncodeToFlat(test.element, test.node, opts) + require.NoError(t, err) + + assert.Equal(t, test.expected, entries) + }) + } +} diff --git a/pkg/provider/label/internal/labels_decode.go b/pkg/config/parser/labels_decode.go similarity index 64% rename from pkg/provider/label/internal/labels_decode.go rename to pkg/config/parser/labels_decode.go index a6e6c414d..13e560314 100644 --- a/pkg/provider/label/internal/labels_decode.go +++ b/pkg/config/parser/labels_decode.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "fmt" @@ -6,42 +6,39 @@ import ( "strings" ) -// DecodeToNode Converts the labels to a node. -// labels -> nodes +const labelRoot = "traefik" + +// DecodeToNode converts the labels to a tree of nodes. +// If any filters are present, labels which do not match the filters are skipped. func DecodeToNode(labels map[string]string, filters ...string) (*Node, error) { - var sortedKeys []string - for key := range labels { - if len(filters) == 0 { - sortedKeys = append(sortedKeys, key) - continue - } - - for _, filter := range filters { - if len(key) >= len(filter) && strings.EqualFold(key[:len(filter)], filter) { - sortedKeys = append(sortedKeys, key) - continue - } - } - } - sort.Strings(sortedKeys) - - labelRoot := "traefik" + sortedKeys := sortKeys(labels, filters) var node *Node for i, key := range sortedKeys { split := strings.Split(key, ".") if split[0] != labelRoot { - // TODO (@ldez): error or continue return nil, fmt.Errorf("invalid label root %s", split[0]) } - labelRoot = split[0] + var parts []string + for _, v := range split { + if v[0] == '[' { + return nil, fmt.Errorf("invalid leading character '[' in field name (bracket is a slice delimiter): %s", v) + } + + if strings.HasSuffix(v, "]") && v[0] != '[' { + indexLeft := strings.Index(v, "[") + parts = append(parts, v[:indexLeft], v[indexLeft:]) + } else { + parts = append(parts, v) + } + } if i == 0 { node = &Node{} } - decodeToNode(node, split, labels[key]) + decodeToNode(node, parts, labels[key]) } return node, nil @@ -76,3 +73,23 @@ func containsNode(nodes []*Node, name string) *Node { } return nil } + +func sortKeys(labels map[string]string, filters []string) []string { + var sortedKeys []string + for key := range labels { + if len(filters) == 0 { + sortedKeys = append(sortedKeys, key) + continue + } + + for _, filter := range filters { + if len(key) >= len(filter) && strings.EqualFold(key[:len(filter)], filter) { + sortedKeys = append(sortedKeys, key) + continue + } + } + } + sort.Strings(sortedKeys) + + return sortedKeys +} diff --git a/pkg/provider/label/internal/labels_decode_test.go b/pkg/config/parser/labels_decode_test.go similarity index 82% rename from pkg/provider/label/internal/labels_decode_test.go rename to pkg/config/parser/labels_decode_test.go index fc339b1cf..a6442b2a3 100644 --- a/pkg/provider/label/internal/labels_decode_test.go +++ b/pkg/config/parser/labels_decode_test.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "encoding/json" @@ -177,6 +177,40 @@ func TestDecodeToNode(t *testing.T) { }, }}, }, + { + desc: "several entries, slice syntax", + in: map[string]string{ + "traefik.foo[0].aaa": "bar0", + "traefik.foo[0].bbb": "bur0", + "traefik.foo[1].aaa": "bar1", + "traefik.foo[1].bbb": "bur1", + }, + expected: expected{node: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "foo", Children: []*Node{ + {Name: "[0]", Children: []*Node{ + {Name: "aaa", Value: "bar0"}, + {Name: "bbb", Value: "bur0"}, + }}, + {Name: "[1]", Children: []*Node{ + {Name: "aaa", Value: "bar1"}, + {Name: "bbb", Value: "bur1"}, + }}, + }}, + }, + }}, + }, + { + desc: "several entries, invalid slice syntax", + in: map[string]string{ + "traefik.foo.[0].aaa": "bar0", + "traefik.foo.[0].bbb": "bur0", + "traefik.foo.[1].aaa": "bar1", + "traefik.foo.[1].bbb": "bur1", + }, + expected: expected{error: true}, + }, } for _, test := range testCases { diff --git a/pkg/provider/label/internal/labels_encode.go b/pkg/config/parser/labels_encode.go similarity index 80% rename from pkg/provider/label/internal/labels_encode.go rename to pkg/config/parser/labels_encode.go index c55cf130c..e2b353119 100644 --- a/pkg/provider/label/internal/labels_encode.go +++ b/pkg/config/parser/labels_encode.go @@ -1,4 +1,4 @@ -package internal +package parser // EncodeNode Converts a node to labels. // nodes -> labels @@ -14,7 +14,13 @@ func encodeNode(labels map[string]string, root string, node *Node) { continue } - childName := root + "." + child.Name + var sep string + if child.Name[0] != '[' { + sep = "." + } + + childName := root + sep + child.Name + if len(child.Children) > 0 { encodeNode(labels, childName, child) } else if len(child.Name) > 0 { diff --git a/pkg/provider/label/internal/labels_encode_test.go b/pkg/config/parser/labels_encode_test.go similarity index 83% rename from pkg/provider/label/internal/labels_encode_test.go rename to pkg/config/parser/labels_encode_test.go index 6961c808b..cc8fa6930 100644 --- a/pkg/provider/label/internal/labels_encode_test.go +++ b/pkg/config/parser/labels_encode_test.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "testing" @@ -141,6 +141,30 @@ func TestEncodeNode(t *testing.T) { "traefik.bar.ccc": "bir", }, }, + { + desc: "slice of struct syntax", + node: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "foo", Children: []*Node{ + {Name: "[0]", Children: []*Node{ + {Name: "aaa", Value: "bar0"}, + {Name: "bbb", Value: "bur0"}, + }}, + {Name: "[1]", Children: []*Node{ + {Name: "aaa", Value: "bar1"}, + {Name: "bbb", Value: "bur1"}, + }}, + }}, + }, + }, + expected: map[string]string{ + "traefik.foo[0].aaa": "bar0", + "traefik.foo[0].bbb": "bur0", + "traefik.foo[1].aaa": "bar1", + "traefik.foo[1].bbb": "bur1", + }, + }, } for _, test := range testCases { diff --git a/pkg/config/parser/node.go b/pkg/config/parser/node.go new file mode 100644 index 000000000..f756a0f07 --- /dev/null +++ b/pkg/config/parser/node.go @@ -0,0 +1,18 @@ +package parser + +import "reflect" + +// MapNamePlaceholder is the placeholder for the map name. +const MapNamePlaceholder = "" + +// Node is a label node. +type Node struct { + Name string `json:"name"` + Description string `json:"description,omitempty"` + FieldName string `json:"fieldName"` + Value string `json:"value,omitempty"` + Disabled bool `json:"disabled,omitempty"` + Kind reflect.Kind `json:"kind,omitempty"` + Tag reflect.StructTag `json:"tag,omitempty"` + Children []*Node `json:"children,omitempty"` +} diff --git a/pkg/provider/label/internal/nodes_metadata.go b/pkg/config/parser/nodes_metadata.go similarity index 70% rename from pkg/provider/label/internal/nodes_metadata.go rename to pkg/config/parser/nodes_metadata.go index 5770c2ffd..170145caf 100644 --- a/pkg/provider/label/internal/nodes_metadata.go +++ b/pkg/config/parser/nodes_metadata.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "errors" @@ -7,9 +7,8 @@ import ( "strings" ) -// AddMetadata Adds metadata to a node. -// nodes + element -> nodes -func AddMetadata(structure interface{}, node *Node) error { +// AddMetadata adds metadata such as type, inferred from element, to a node. +func AddMetadata(element interface{}, node *Node) error { if node == nil { return nil } @@ -18,16 +17,25 @@ func AddMetadata(structure interface{}, node *Node) error { return fmt.Errorf("invalid node %s: no child", node.Name) } - if structure == nil { + if element == nil { return errors.New("nil structure") } - rootType := reflect.TypeOf(structure) + rootType := reflect.TypeOf(element) node.Kind = rootType.Kind() return browseChildren(rootType, node) } +func browseChildren(fType reflect.Type, node *Node) error { + for _, child := range node.Children { + if err := addMetadata(fType, child); err != nil { + return err + } + } + return nil +} + func addMetadata(rootType reflect.Type, node *Node) error { rType := rootType if rootType.Kind() == reflect.Ptr { @@ -45,14 +53,15 @@ func addMetadata(rootType reflect.Type, node *Node) error { fType := field.Type node.Kind = fType.Kind() + node.Tag = field.Tag if fType.Kind() == reflect.Struct || fType.Kind() == reflect.Ptr && fType.Elem().Kind() == reflect.Struct || fType.Kind() == reflect.Map { - if len(node.Children) == 0 && field.Tag.Get(TagLabel) != "allowEmpty" { - return fmt.Errorf("node %s (type %s) must have children", node.Name, fType) + if len(node.Children) == 0 && field.Tag.Get(TagLabel) != TagLabelAllowEmpty { + return fmt.Errorf("%s cannot be a standalone element (type %s)", node.Name, fType) } - node.Disabled = len(node.Value) > 0 && !strings.EqualFold(node.Value, "true") && field.Tag.Get(TagLabel) == "allowEmpty" + node.Disabled = len(node.Value) > 0 && !strings.EqualFold(node.Value, "true") && field.Tag.Get(TagLabel) == TagLabelAllowEmpty } if len(node.Children) == 0 { @@ -79,9 +88,18 @@ func addMetadata(rootType reflect.Type, node *Node) error { return nil } - // only for struct/Ptr with label-slice-as-struct tag if fType.Kind() == reflect.Slice { - return browseChildren(fType.Elem(), node) + if field.Tag.Get(TagLabelSliceAsStruct) != "" { + return browseChildren(fType.Elem(), node) + } + + for _, ch := range node.Children { + ch.Kind = fType.Elem().Kind() + if err = browseChildren(fType.Elem(), ch); err != nil { + return err + } + } + return nil } return fmt.Errorf("invalid node %s: %v", node.Name, fType.Kind()) @@ -96,31 +114,32 @@ func findTypedField(rType reflect.Type, node *Node) (reflect.StructField, error) fieldName = cField.Name } - if isExported(cField) && strings.EqualFold(fieldName, node.Name) { - node.FieldName = cField.Name - return cField, nil + if IsExported(cField) { + if cField.Anonymous { + if cField.Type.Kind() == reflect.Struct { + structField, err := findTypedField(cField.Type, node) + if err != nil { + continue + } + return structField, nil + } + } + + if strings.EqualFold(fieldName, node.Name) { + node.FieldName = cField.Name + return cField, nil + } } + } return reflect.StructField{}, fmt.Errorf("field not found, node: %s", node.Name) } -func browseChildren(fType reflect.Type, node *Node) error { - for _, child := range node.Children { - if err := addMetadata(fType, child); err != nil { - return err - } - } - return nil -} - -// isExported return true is a struct field is exported, else false +// IsExported reports whether f is exported. // https://golang.org/pkg/reflect/#StructField -func isExported(f reflect.StructField) bool { - if f.PkgPath != "" && !f.Anonymous { - return false - } - return true +func IsExported(f reflect.StructField) bool { + return f.PkgPath == "" } func isSupportedType(field reflect.StructField) error { @@ -142,20 +161,15 @@ func isSupportedType(field reflect.StructField) error { reflect.Uint64, reflect.Uintptr, reflect.Float32, - reflect.Float64: + reflect.Float64, + reflect.Struct, + reflect.Ptr: return nil default: - if len(field.Tag.Get(TagLabelSliceAsStruct)) > 0 { - return nil - } return fmt.Errorf("unsupported slice type: %v", fType) } } - if fType.Kind() == reflect.Ptr && fType.Elem().Kind() != reflect.Struct { - return fmt.Errorf("unsupported pointer type: %v", fType.Elem()) - } - if fType.Kind() == reflect.Map && fType.Key().Kind() != reflect.String { return fmt.Errorf("unsupported map key type: %v", fType.Key()) } diff --git a/pkg/provider/label/internal/nodes_metadata_test.go b/pkg/config/parser/nodes_metadata_test.go similarity index 72% rename from pkg/provider/label/internal/nodes_metadata_test.go rename to pkg/config/parser/nodes_metadata_test.go index d1b0f2ff5..74c09cb74 100644 --- a/pkg/provider/label/internal/nodes_metadata_test.go +++ b/pkg/config/parser/nodes_metadata_test.go @@ -1,4 +1,4 @@ -package internal +package parser import ( "encoding/json" @@ -122,19 +122,6 @@ func TestAddMetadata(t *testing.T) { structure: struct{ Foo interf }{}, expected: expected{error: true}, }, - { - desc: "level 1, slice struct", - tree: &Node{ - Name: "traefik", - Children: []*Node{ - {Name: "Foo", Value: "1,2"}, - }, - }, - structure: struct { - Foo []struct{ Foo string } - }{}, - expected: expected{error: true}, - }, { desc: "level 1, map string", tree: &Node{ @@ -217,7 +204,57 @@ func TestAddMetadata(t *testing.T) { structure: struct { Foo *int }{}, - expected: expected{error: true}, + expected: expected{ + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "0", Kind: reflect.Ptr}, + }, + }, + }, + }, + { + desc: "level 1, bool pointer", + tree: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "Foo", Value: "0"}, + }, + }, + structure: struct { + Foo *bool + }{}, + expected: expected{ + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "0", Kind: reflect.Ptr}, + }, + }, + }, + }, + { + desc: "level 1, string pointer", + tree: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "Foo", Value: "0"}, + }, + }, + structure: struct { + Foo *string + }{}, + expected: expected{ + node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Value: "0", Kind: reflect.Ptr}, + }, + }, + }, }, { desc: "level 1, 2 children with different types", @@ -385,6 +422,7 @@ func TestAddMetadata(t *testing.T) { Name: "Fii", FieldName: "Foo", Kind: reflect.Slice, + Tag: reflect.StructTag(`label-slice-as-struct:"Fii"`), Children: []*Node{ {Name: "bar", FieldName: "Bar", Kind: reflect.String, Value: "haa"}, {Name: "bir", FieldName: "Bir", Kind: reflect.String, Value: "hii"}, @@ -420,6 +458,7 @@ func TestAddMetadata(t *testing.T) { Name: "Fii", FieldName: "Foo", Kind: reflect.Slice, + Tag: reflect.StructTag(`label-slice-as-struct:"Fii"`), }, }, }}, @@ -446,7 +485,7 @@ func TestAddMetadata(t *testing.T) { Name: "traefik", Kind: reflect.Struct, Children: []*Node{ - {Name: "Foo", FieldName: "Foo", Value: "true", Kind: reflect.Struct}, + {Name: "Foo", FieldName: "Foo", Value: "true", Kind: reflect.Struct, Tag: reflect.StructTag(`label:"allowEmpty"`)}, }, }, }, @@ -473,7 +512,7 @@ func TestAddMetadata(t *testing.T) { Name: "traefik", Kind: reflect.Struct, Children: []*Node{ - {Name: "Foo", FieldName: "Foo", Value: "TruE", Kind: reflect.Struct}, + {Name: "Foo", FieldName: "Foo", Value: "TruE", Kind: reflect.Struct, Tag: reflect.StructTag(`label:"allowEmpty"`)}, }, }, }, @@ -500,7 +539,7 @@ func TestAddMetadata(t *testing.T) { Name: "traefik", Kind: reflect.Struct, Children: []*Node{ - {Name: "Foo", FieldName: "Foo", Value: "false", Disabled: true, Kind: reflect.Struct}, + {Name: "Foo", FieldName: "Foo", Value: "false", Disabled: true, Kind: reflect.Struct, Tag: reflect.StructTag(`label:"allowEmpty"`)}, }, }, }, @@ -535,6 +574,7 @@ func TestAddMetadata(t *testing.T) { Value: "false", Disabled: true, Kind: reflect.Struct, + Tag: reflect.StructTag(`label:"allowEmpty"`), Children: []*Node{ {Name: "Bar", FieldName: "Bar", Value: "hii", Kind: reflect.String}, }, @@ -777,6 +817,173 @@ func TestAddMetadata(t *testing.T) { }, }, }, + { + desc: "Slice struct", + tree: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "Foo", Children: []*Node{ + {Name: "[0]", Children: []*Node{ + {Name: "Field1", Value: "A"}, + {Name: "Field2", Value: "A"}, + }}, + {Name: "[1]", Children: []*Node{ + {Name: "Field1", Value: "B"}, + {Name: "Field2", Value: "B"}, + }}, + {Name: "[2]", Children: []*Node{ + {Name: "Field1", Value: "C"}, + {Name: "Field2", Value: "C"}, + }}, + }}, + }, + }, + structure: struct { + Foo []struct { + Field1 string + Field2 string + } + }{}, + expected: expected{node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Kind: reflect.Slice, Children: []*Node{ + {Name: "[0]", Kind: reflect.Struct, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "A", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "A", Kind: reflect.String}, + }}, + {Name: "[1]", Kind: reflect.Struct, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "B", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "B", Kind: reflect.String}, + }}, + {Name: "[2]", Kind: reflect.Struct, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "C", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "C", Kind: reflect.String}, + }}, + }}, + }, + }}, + }, + { + desc: "Slice pointer struct", + tree: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "Foo", Children: []*Node{ + {Name: "[0]", Children: []*Node{ + {Name: "Field1", Value: "A"}, + {Name: "Field2", Value: "A"}, + }}, + {Name: "[1]", Children: []*Node{ + {Name: "Field1", Value: "B"}, + {Name: "Field2", Value: "B"}, + }}, + {Name: "[2]", Children: []*Node{ + {Name: "Field1", Value: "C"}, + {Name: "Field2", Value: "C"}, + }}, + }}, + }, + }, + structure: struct { + Foo []*struct { + Field1 string + Field2 string + } + }{}, + expected: expected{node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Kind: reflect.Slice, Children: []*Node{ + {Name: "[0]", Kind: reflect.Ptr, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "A", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "A", Kind: reflect.String}, + }}, + {Name: "[1]", Kind: reflect.Ptr, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "B", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "B", Kind: reflect.String}, + }}, + {Name: "[2]", Kind: reflect.Ptr, Children: []*Node{ + {Name: "Field1", FieldName: "Field1", Value: "C", Kind: reflect.String}, + {Name: "Field2", FieldName: "Field2", Value: "C", Kind: reflect.String}, + }}, + }}, + }, + }}, + }, + { + desc: "embedded", + tree: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "Foo", Children: []*Node{ + {Name: "Fii", Value: "bir"}, + {Name: "Fuu", Value: "bur"}, + }}, + }, + }, + structure: struct { + Foo struct { + FiiFoo + } + }{}, + expected: expected{node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Kind: reflect.Struct, Children: []*Node{ + {Name: "Fii", FieldName: "Fii", Value: "bir", Kind: reflect.String}, + {Name: "Fuu", FieldName: "Fuu", Value: "bur", Kind: reflect.String}, + }}, + }, + }}, + }, + { + desc: "embedded slice", + tree: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "MySliceType", Value: "foo,fii"}, + }, + }, + structure: struct { + MySliceType + }{}, + expected: expected{node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "MySliceType", FieldName: "MySliceType", Value: "foo,fii", Kind: reflect.Slice}, + }, + }}, + }, + { + desc: "embedded slice 2", + tree: &Node{ + Name: "traefik", + Children: []*Node{ + {Name: "Foo", Children: []*Node{ + {Name: "MySliceType", Value: "foo,fii"}, + }}, + }, + }, + structure: struct { + Foo struct { + MySliceType + } + }{}, + expected: expected{node: &Node{ + Name: "traefik", + Kind: reflect.Struct, + Children: []*Node{ + {Name: "Foo", FieldName: "Foo", Kind: reflect.Struct, Children: []*Node{ + {Name: "MySliceType", FieldName: "MySliceType", Value: "foo,fii", Kind: reflect.Slice}, + }}, + }, + }}, + }, } for _, test := range testCases { @@ -800,3 +1007,5 @@ func TestAddMetadata(t *testing.T) { }) } } + +type MySliceType []string diff --git a/pkg/config/parser/parser.go b/pkg/config/parser/parser.go new file mode 100644 index 000000000..e806ecc89 --- /dev/null +++ b/pkg/config/parser/parser.go @@ -0,0 +1,38 @@ +// Package parser implements decoding and encoding between a flat map of labels and a typed Configuration. +package parser + +// Decode decodes the given map of labels into the given element. +// If any filters are present, labels which do not match the filters are skipped. +// The operation goes through three stages roughly summarized as: +// labels -> tree of untyped nodes +// untyped nodes -> nodes augmented with metadata such as kind (inferred from element) +// "typed" nodes -> typed element +func Decode(labels map[string]string, element interface{}, filters ...string) error { + node, err := DecodeToNode(labels, filters...) + if err != nil { + return err + } + + err = AddMetadata(element, node) + if err != nil { + return err + } + + err = Fill(element, node) + if err != nil { + return err + } + + return nil +} + +// Encode converts an element to labels. +// element -> node (value) -> label (node) +func Encode(element interface{}) (map[string]string, error) { + node, err := EncodeToNode(element, true) + if err != nil { + return nil, err + } + + return EncodeNode(node), nil +} diff --git a/pkg/config/parser/tags.go b/pkg/config/parser/tags.go new file mode 100644 index 000000000..3860b9665 --- /dev/null +++ b/pkg/config/parser/tags.go @@ -0,0 +1,18 @@ +package parser + +const ( + // TagLabel allows to apply a custom behavior. + // - "allowEmpty": allows to create an empty struct. + // - "-": ignore the field. + TagLabel = "label" + + // TagLabelSliceAsStruct allows to use a slice of struct by creating one entry into the slice. + // The value is the substitution name used in the label to access the slice. + TagLabelSliceAsStruct = "label-slice-as-struct" + + // TagDescription is the documentation for the field. + TagDescription = "description" + + // TagLabelAllowEmpty is related to TagLabel. + TagLabelAllowEmpty = "allowEmpty" +) diff --git a/pkg/config/static/entrypoints.go b/pkg/config/static/entrypoints.go index cbc2d7fba..3cfaac2dd 100644 --- a/pkg/config/static/entrypoints.go +++ b/pkg/config/static/entrypoints.go @@ -1,30 +1,30 @@ package static -import ( - "fmt" - "strings" - - "github.com/containous/traefik/pkg/log" -) - // EntryPoint holds the entry point configuration. type EntryPoint struct { - Address string - Transport *EntryPointsTransport - ProxyProtocol *ProxyProtocol - ForwardedHeaders *ForwardedHeaders + Address string `description:"Entry point address."` + Transport *EntryPointsTransport `description:"Configures communication between clients and Traefik."` + ProxyProtocol *ProxyProtocol `description:"Proxy-Protocol configuration." label:"allowEmpty"` + ForwardedHeaders *ForwardedHeaders `description:"Trust client forwarding headers."` +} + +// SetDefaults sets the default values. +func (e *EntryPoint) SetDefaults() { + e.Transport = &EntryPointsTransport{} + e.Transport.SetDefaults() + e.ForwardedHeaders = &ForwardedHeaders{} } // ForwardedHeaders Trust client forwarding headers. type ForwardedHeaders struct { - Insecure bool - TrustedIPs []string + Insecure bool `description:"Trust all forwarded headers." export:"true"` + TrustedIPs []string `description:"Trust only forwarded headers from selected IPs."` } // ProxyProtocol contains Proxy-Protocol configuration. type ProxyProtocol struct { - Insecure bool `export:"true"` - TrustedIPs []string + Insecure bool `description:"Trust all." export:"true"` + TrustedIPs []string `description:"Trust only selected IPs."` } // EntryPoints holds the HTTP entry point list. @@ -32,103 +32,14 @@ type EntryPoints map[string]*EntryPoint // EntryPointsTransport configures communication between clients and Traefik. type EntryPointsTransport struct { - LifeCycle *LifeCycle `description:"Timeouts influencing the server life cycle" export:"true"` - RespondingTimeouts *RespondingTimeouts `description:"Timeouts for incoming requests to the Traefik instance" export:"true"` + LifeCycle *LifeCycle `description:"Timeouts influencing the server life cycle." export:"true"` + RespondingTimeouts *RespondingTimeouts `description:"Timeouts for incoming requests to the Traefik instance." export:"true"` } -// String is the method to format the flag's value, part of the flag.Value interface. -// The String method's output will be used in diagnostics. -func (ep EntryPoints) String() string { - return fmt.Sprintf("%+v", map[string]*EntryPoint(ep)) -} - -// Get return the EntryPoints map. -func (ep *EntryPoints) Get() interface{} { - return *ep -} - -// SetValue sets the EntryPoints map with val. -func (ep *EntryPoints) SetValue(val interface{}) { - *ep = val.(EntryPoints) -} - -// Type is type of the struct. -func (ep *EntryPoints) Type() string { - return "entrypoints" -} - -// Set is the method to set the flag value, part of the flag.Value interface. -// Set's argument is a string to be parsed to set the flag. -// It's a comma-separated list, so we split it. -func (ep *EntryPoints) Set(value string) error { - result := parseEntryPointsConfiguration(value) - - (*ep)[result["name"]] = &EntryPoint{ - Address: result["address"], - ProxyProtocol: makeEntryPointProxyProtocol(result), - ForwardedHeaders: makeEntryPointForwardedHeaders(result), - } - - return nil -} - -func makeEntryPointProxyProtocol(result map[string]string) *ProxyProtocol { - var proxyProtocol *ProxyProtocol - - ppTrustedIPs := result["proxyprotocol_trustedips"] - if len(result["proxyprotocol_insecure"]) > 0 || len(ppTrustedIPs) > 0 { - proxyProtocol = &ProxyProtocol{ - Insecure: toBool(result, "proxyprotocol_insecure"), - } - if len(ppTrustedIPs) > 0 { - proxyProtocol.TrustedIPs = strings.Split(ppTrustedIPs, ",") - } - } - - if proxyProtocol != nil && proxyProtocol.Insecure { - log.Warn("ProxyProtocol.insecure:true is dangerous. Please use 'ProxyProtocol.TrustedIPs:IPs' and remove 'ProxyProtocol.insecure:true'") - } - - return proxyProtocol -} - -func parseEntryPointsConfiguration(raw string) map[string]string { - sections := strings.Fields(raw) - - config := make(map[string]string) - for _, part := range sections { - field := strings.SplitN(part, ":", 2) - name := strings.ToLower(strings.Replace(field[0], ".", "_", -1)) - if len(field) > 1 { - config[name] = field[1] - } else { - if strings.EqualFold(name, "TLS") { - config["tls_acme"] = "TLS" - } else { - config[name] = "" - } - } - } - return config -} - -func toBool(conf map[string]string, key string) bool { - if val, ok := conf[key]; ok { - return strings.EqualFold(val, "true") || - strings.EqualFold(val, "enable") || - strings.EqualFold(val, "on") - } - return false -} - -func makeEntryPointForwardedHeaders(result map[string]string) *ForwardedHeaders { - forwardedHeaders := &ForwardedHeaders{} - forwardedHeaders.Insecure = toBool(result, "forwardedheaders_insecure") - - fhTrustedIPs := result["forwardedheaders_trustedips"] - if len(fhTrustedIPs) > 0 { - forwardedHeaders.TrustedIPs = strings.Split(fhTrustedIPs, ",") - } - - return forwardedHeaders +// SetDefaults sets the default values. +func (t *EntryPointsTransport) SetDefaults() { + t.LifeCycle = &LifeCycle{} + t.LifeCycle.SetDefaults() + t.RespondingTimeouts = &RespondingTimeouts{} + t.RespondingTimeouts.SetDefaults() } diff --git a/pkg/config/static/entrypoints_test.go b/pkg/config/static/entrypoints_test.go deleted file mode 100644 index 5901f86d4..000000000 --- a/pkg/config/static/entrypoints_test.go +++ /dev/null @@ -1,257 +0,0 @@ -package static - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func Test_parseEntryPointsConfiguration(t *testing.T) { - testCases := []struct { - name string - value string - expectedResult map[string]string - }{ - { - name: "all parameters", - value: "Name:foo " + - "Address::8000 " + - "CA:car " + - "CA.Optional:true " + - "Redirect.EntryPoint:https " + - "Redirect.Regex:http://localhost/(.*) " + - "Redirect.Replacement:http://mydomain/$1 " + - "Redirect.Permanent:true " + - "Compress:true " + - "ProxyProtocol.TrustedIPs:192.168.0.1 " + - "ForwardedHeaders.TrustedIPs:10.0.0.3/24,20.0.0.3/24 " + - "Auth.Basic.Realm:myRealm " + - "Auth.Basic.Users:test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/,test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0 " + - "Auth.Basic.RemoveHeader:true " + - "Auth.Digest.Users:test:traefik:a2688e031edb4be6a3797f3882655c05,test2:traefik:518845800f9e2bfb1f1f740ec24f074e " + - "Auth.Digest.RemoveHeader:true " + - "Auth.HeaderField:X-WebAuth-User " + - "Auth.Forward.Address:https://authserver.com/auth " + - "Auth.Forward.AuthResponseHeaders:X-Auth,X-Test,X-Secret " + - "Auth.Forward.TrustForwardHeader:true " + - "Auth.Forward.TLS.CA:path/to/local.crt " + - "Auth.Forward.TLS.CAOptional:true " + - "Auth.Forward.TLS.Cert:path/to/foo.cert " + - "Auth.Forward.TLS.Key:path/to/foo.key " + - "Auth.Forward.TLS.InsecureSkipVerify:true " + - "WhiteList.SourceRange:10.42.0.0/16,152.89.1.33/32,afed:be44::/16 " + - "WhiteList.IPStrategy.depth:3 " + - "WhiteList.IPStrategy.ExcludedIPs:10.0.0.3/24,20.0.0.3/24 " + - "ClientIPStrategy.depth:3 " + - "ClientIPStrategy.ExcludedIPs:10.0.0.3/24,20.0.0.3/24 ", - expectedResult: map[string]string{ - "address": ":8000", - "auth_basic_realm": "myRealm", - "auth_basic_users": "test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/,test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0", - "auth_basic_removeheader": "true", - "auth_digest_users": "test:traefik:a2688e031edb4be6a3797f3882655c05,test2:traefik:518845800f9e2bfb1f1f740ec24f074e", - "auth_digest_removeheader": "true", - "auth_forward_address": "https://authserver.com/auth", - "auth_forward_authresponseheaders": "X-Auth,X-Test,X-Secret", - "auth_forward_tls_ca": "path/to/local.crt", - "auth_forward_tls_caoptional": "true", - "auth_forward_tls_cert": "path/to/foo.cert", - "auth_forward_tls_insecureskipverify": "true", - "auth_forward_tls_key": "path/to/foo.key", - "auth_forward_trustforwardheader": "true", - "auth_headerfield": "X-WebAuth-User", - "ca": "car", - "ca_optional": "true", - "compress": "true", - "forwardedheaders_trustedips": "10.0.0.3/24,20.0.0.3/24", - "name": "foo", - "proxyprotocol_trustedips": "192.168.0.1", - "redirect_entrypoint": "https", - "redirect_permanent": "true", - "redirect_regex": "http://localhost/(.*)", - "redirect_replacement": "http://mydomain/$1", - "whitelist_sourcerange": "10.42.0.0/16,152.89.1.33/32,afed:be44::/16", - "whitelist_ipstrategy_depth": "3", - "whitelist_ipstrategy_excludedips": "10.0.0.3/24,20.0.0.3/24", - "clientipstrategy_depth": "3", - "clientipstrategy_excludedips": "10.0.0.3/24,20.0.0.3/24", - }, - }, - { - name: "compress on", - value: "name:foo Compress:on", - expectedResult: map[string]string{ - "name": "foo", - "compress": "on", - }, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.name, func(t *testing.T) { - t.Parallel() - - conf := parseEntryPointsConfiguration(test.value) - - assert.Len(t, conf, len(test.expectedResult)) - assert.Equal(t, test.expectedResult, conf) - }) - } -} - -func Test_toBool(t *testing.T) { - testCases := []struct { - name string - value string - key string - expectedBool bool - }{ - { - name: "on", - value: "on", - key: "foo", - expectedBool: true, - }, - { - name: "true", - value: "true", - key: "foo", - expectedBool: true, - }, - { - name: "enable", - value: "enable", - key: "foo", - expectedBool: true, - }, - { - name: "arbitrary string", - value: "bar", - key: "foo", - expectedBool: false, - }, - { - name: "no existing entry", - value: "bar", - key: "fii", - expectedBool: false, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.name, func(t *testing.T) { - t.Parallel() - - conf := map[string]string{ - "foo": test.value, - } - - result := toBool(conf, test.key) - - assert.Equal(t, test.expectedBool, result) - }) - } -} - -func TestEntryPoints_Set(t *testing.T) { - testCases := []struct { - name string - expression string - expectedEntryPointName string - expectedEntryPoint *EntryPoint - }{ - { - name: "all parameters camelcase", - expression: "Name:foo " + - "Address::8000 " + - "CA:car " + - "CA.Optional:true " + - "ProxyProtocol.TrustedIPs:192.168.0.1 ", - expectedEntryPointName: "foo", - expectedEntryPoint: &EntryPoint{ - Address: ":8000", - ProxyProtocol: &ProxyProtocol{ - Insecure: false, - TrustedIPs: []string{"192.168.0.1"}, - }, - ForwardedHeaders: &ForwardedHeaders{}, - // FIXME Test ServersTransport - }, - }, - { - name: "all parameters lowercase", - expression: "Name:foo " + - "address::8000 " + - "tls " + - "tls.minversion:VersionTLS11 " + - "tls.ciphersuites:TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA " + - "ca:car " + - "ca.Optional:true " + - "proxyProtocol.TrustedIPs:192.168.0.1 ", - expectedEntryPointName: "foo", - expectedEntryPoint: &EntryPoint{ - Address: ":8000", - ProxyProtocol: &ProxyProtocol{ - Insecure: false, - TrustedIPs: []string{"192.168.0.1"}, - }, - ForwardedHeaders: &ForwardedHeaders{}, - // FIXME Test ServersTransport - }, - }, - { - name: "default", - expression: "Name:foo", - expectedEntryPointName: "foo", - expectedEntryPoint: &EntryPoint{ - ForwardedHeaders: &ForwardedHeaders{}, - }, - }, - { - name: "ProxyProtocol insecure true", - expression: "Name:foo ProxyProtocol.insecure:true", - expectedEntryPointName: "foo", - expectedEntryPoint: &EntryPoint{ - ProxyProtocol: &ProxyProtocol{Insecure: true}, - ForwardedHeaders: &ForwardedHeaders{}, - }, - }, - { - name: "ProxyProtocol insecure false", - expression: "Name:foo ProxyProtocol.insecure:false", - expectedEntryPointName: "foo", - expectedEntryPoint: &EntryPoint{ - ProxyProtocol: &ProxyProtocol{}, - ForwardedHeaders: &ForwardedHeaders{}, - }, - }, - { - name: "ProxyProtocol TrustedIPs", - expression: "Name:foo ProxyProtocol.TrustedIPs:10.0.0.3/24,20.0.0.3/24", - expectedEntryPointName: "foo", - expectedEntryPoint: &EntryPoint{ - ProxyProtocol: &ProxyProtocol{ - TrustedIPs: []string{"10.0.0.3/24", "20.0.0.3/24"}, - }, - ForwardedHeaders: &ForwardedHeaders{}, - }, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.name, func(t *testing.T) { - t.Parallel() - - eps := EntryPoints{} - err := eps.Set(test.expression) - require.NoError(t, err) - - ep := eps[test.expectedEntryPointName] - assert.EqualValues(t, test.expectedEntryPoint, ep) - }) - } -} diff --git a/pkg/config/static/static_config.go b/pkg/config/static/static_config.go index 0d0cdc640..9f7b08476 100644 --- a/pkg/config/static/static_config.go +++ b/pkg/config/static/static_config.go @@ -5,7 +5,6 @@ import ( "strings" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/ping" acmeprovider "github.com/containous/traefik/pkg/provider/acme" @@ -47,99 +46,127 @@ const ( type Configuration struct { Global *Global `description:"Global configuration options" export:"true"` - ServersTransport *ServersTransport `description:"Servers default transport" export:"true"` - EntryPoints EntryPoints `description:"Entry points definition using format: --entryPoints='Name:http Address::8000' --entryPoints='Name:https Address::4442'" export:"true"` - Providers *Providers `description:"Providers configuration" export:"true"` + ServersTransport *ServersTransport `description:"Servers default transport." export:"true"` + EntryPoints EntryPoints `description:"Entry points definition." export:"true"` + Providers *Providers `description:"Providers configuration." export:"true"` - API *API `description:"Enable api/dashboard" export:"true"` - Metrics *types.Metrics `description:"Enable a metrics exporter" export:"true"` - Ping *ping.Handler `description:"Enable ping" export:"true"` + API *API `description:"Enable api/dashboard." export:"true" label:"allowEmpty"` + Metrics *types.Metrics `description:"Enable a metrics exporter." export:"true"` + Ping *ping.Handler `description:"Enable ping." export:"true" label:"allowEmpty"` // Rest *rest.Provider `description:"Enable Rest backend with default settings" export:"true"` - Log *types.TraefikLog `description:"Traefik log settings" export:"true"` - AccessLog *types.AccessLog `description:"Access log settings" export:"true"` - Tracing *Tracing `description:"OpenTracing configuration" export:"true"` + Log *types.TraefikLog `description:"Traefik log settings." export:"true"` + AccessLog *types.AccessLog `description:"Access log settings." export:"true" label:"allowEmpty"` + Tracing *Tracing `description:"OpenTracing configuration." export:"true" label:"allowEmpty"` - HostResolver *types.HostResolverConfig `description:"Enable CNAME Flattening" export:"true"` + HostResolver *types.HostResolverConfig `description:"Enable CNAME Flattening." export:"true" label:"allowEmpty"` - ACME *acmeprovider.Configuration `description:"Enable ACME (Let's Encrypt): automatic SSL" export:"true"` + ACME *acmeprovider.Configuration `description:"Enable ACME (Let's Encrypt): automatic SSL." export:"true"` } // Global holds the global configuration. type Global struct { - Debug bool `short:"d" description:"Enable debug mode" export:"true"` - CheckNewVersion bool `description:"Periodically check if a new version has been released" export:"true"` - SendAnonymousUsage *bool `description:"send periodically anonymous usage statistics" export:"true"` + Debug bool `description:"Enable debug mode." export:"true"` + CheckNewVersion bool `description:"Periodically check if a new version has been released." export:"true"` + SendAnonymousUsage *bool `description:"Periodically send anonymous usage statistics. If the option is not specified, it will be enabled by default." export:"true"` } // ServersTransport options to configure communication between Traefik and the servers type ServersTransport struct { - InsecureSkipVerify bool `description:"Disable SSL certificate verification" export:"true"` - RootCAs tls.FilesOrContents `description:"Add cert file for self-signed certificate"` - MaxIdleConnsPerHost int `description:"If non-zero, controls the maximum idle (keep-alive) to keep per-host. If zero, DefaultMaxIdleConnsPerHost is used" export:"true"` - ForwardingTimeouts *ForwardingTimeouts `description:"Timeouts for requests forwarded to the backend servers" export:"true"` + InsecureSkipVerify bool `description:"Disable SSL certificate verification." export:"true"` + RootCAs []tls.FileOrContent `description:"Add cert file for self-signed certificate."` + MaxIdleConnsPerHost int `description:"If non-zero, controls the maximum idle (keep-alive) to keep per-host. If zero, DefaultMaxIdleConnsPerHost is used" export:"true"` + ForwardingTimeouts *ForwardingTimeouts `description:"Timeouts for requests forwarded to the backend servers." export:"true"` } // API holds the API configuration type API struct { - EntryPoint string `description:"EntryPoint" export:"true"` - Dashboard bool `description:"Activate dashboard" export:"true"` - Statistics *types.Statistics `description:"Enable more detailed statistics" export:"true"` - Middlewares []string `description:"Middleware list" export:"true"` - DashboardAssets *assetfs.AssetFS `json:"-"` + EntryPoint string `description:"EntryPoint." export:"true"` + Dashboard bool `description:"Activate dashboard." export:"true"` + Statistics *types.Statistics `description:"Enable more detailed statistics." export:"true" label:"allowEmpty"` + Middlewares []string `description:"Middleware list." export:"true"` + DashboardAssets *assetfs.AssetFS `json:"-" label:"-"` +} + +// SetDefaults sets the default values. +func (a *API) SetDefaults() { + a.EntryPoint = "traefik" + a.Dashboard = true } // RespondingTimeouts contains timeout configurations for incoming requests to the Traefik instance. type RespondingTimeouts struct { - ReadTimeout parse.Duration `description:"ReadTimeout is the maximum duration for reading the entire request, including the body. If zero, no timeout is set" export:"true"` - WriteTimeout parse.Duration `description:"WriteTimeout is the maximum duration before timing out writes of the response. If zero, no timeout is set" export:"true"` - IdleTimeout parse.Duration `description:"IdleTimeout is the maximum amount duration an idle (keep-alive) connection will remain idle before closing itself. Defaults to 180 seconds. If zero, no timeout is set" export:"true"` + ReadTimeout types.Duration `description:"ReadTimeout is the maximum duration for reading the entire request, including the body. If zero, no timeout is set." export:"true"` + WriteTimeout types.Duration `description:"WriteTimeout is the maximum duration before timing out writes of the response. If zero, no timeout is set." export:"true"` + IdleTimeout types.Duration `description:"IdleTimeout is the maximum amount duration an idle (keep-alive) connection will remain idle before closing itself. If zero, no timeout is set." export:"true"` +} + +// SetDefaults sets the default values. +func (a *RespondingTimeouts) SetDefaults() { + a.IdleTimeout = types.Duration(DefaultIdleTimeout) } // ForwardingTimeouts contains timeout configurations for forwarding requests to the backend servers. type ForwardingTimeouts struct { - DialTimeout parse.Duration `description:"The amount of time to wait until a connection to a backend server can be established. Defaults to 30 seconds. If zero, no timeout exists" export:"true"` - ResponseHeaderTimeout parse.Duration `description:"The amount of time to wait for a server's response headers after fully writing the request (including its body, if any). If zero, no timeout exists" export:"true"` + DialTimeout types.Duration `description:"The amount of time to wait until a connection to a backend server can be established. If zero, no timeout exists." export:"true"` + ResponseHeaderTimeout types.Duration `description:"The amount of time to wait for a server's response headers after fully writing the request (including its body, if any). If zero, no timeout exists." export:"true"` +} + +// SetDefaults sets the default values. +func (f *ForwardingTimeouts) SetDefaults() { + f.DialTimeout = types.Duration(30 * time.Second) } // LifeCycle contains configurations relevant to the lifecycle (such as the shutdown phase) of Traefik. type LifeCycle struct { - RequestAcceptGraceTimeout parse.Duration `description:"Duration to keep accepting requests before Traefik initiates the graceful shutdown procedure"` - GraceTimeOut parse.Duration `description:"Duration to give active requests a chance to finish before Traefik stops"` + RequestAcceptGraceTimeout types.Duration `description:"Duration to keep accepting requests before Traefik initiates the graceful shutdown procedure."` + GraceTimeOut types.Duration `description:"Duration to give active requests a chance to finish before Traefik stops."` +} + +// SetDefaults sets the default values. +func (a *LifeCycle) SetDefaults() { + a.GraceTimeOut = types.Duration(DefaultGraceTimeout) } // Tracing holds the tracing configuration. type Tracing struct { Backend string `description:"Selects the tracking backend ('jaeger','zipkin','datadog','instana')." export:"true"` - ServiceName string `description:"Set the name for this service" export:"true"` - SpanNameLimit int `description:"Set the maximum character limit for Span names (default 0 = no limit)" export:"true"` - Jaeger *jaeger.Config `description:"Settings for jaeger"` - Zipkin *zipkin.Config `description:"Settings for zipkin"` - DataDog *datadog.Config `description:"Settings for DataDog"` - Instana *instana.Config `description:"Settings for Instana"` - Haystack *haystack.Config `description:"Settings for Haystack"` + ServiceName string `description:"Set the name for this service." export:"true"` + SpanNameLimit int `description:"Set the maximum character limit for Span names (default 0 = no limit)." export:"true"` + Jaeger *jaeger.Config `description:"Settings for jaeger." label:"allowEmpty"` + Zipkin *zipkin.Config `description:"Settings for zipkin." label:"allowEmpty"` + DataDog *datadog.Config `description:"Settings for DataDog." label:"allowEmpty"` + Instana *instana.Config `description:"Settings for Instana." label:"allowEmpty"` + Haystack *haystack.Config `description:"Settings for Haystack." label:"allowEmpty"` +} + +// SetDefaults sets the default values. +func (t *Tracing) SetDefaults() { + t.Backend = "jaeger" + t.ServiceName = "traefik" + t.SpanNameLimit = 0 } // Providers contains providers configuration type Providers struct { - ProvidersThrottleDuration parse.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." export:"true"` - Docker *docker.Provider `description:"Enable Docker backend with default settings" export:"true"` - File *file.Provider `description:"Enable File backend with default settings" export:"true"` - Marathon *marathon.Provider `description:"Enable Marathon backend with default settings" export:"true"` - Kubernetes *ingress.Provider `description:"Enable Kubernetes backend with default settings" export:"true"` - KubernetesCRD *crd.Provider `description:"Enable Kubernetes backend with default settings" export:"true"` - Rest *rest.Provider `description:"Enable Rest backend with default settings" export:"true"` - Rancher *rancher.Provider `description:"Enable Rancher backend with default settings" export:"true"` + ProvidersThrottleDuration types.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." export:"true"` + Docker *docker.Provider `description:"Enable Docker backend with default settings." export:"true" label:"allowEmpty"` + File *file.Provider `description:"Enable File backend with default settings." export:"true" label:"allowEmpty"` + Marathon *marathon.Provider `description:"Enable Marathon backend with default settings." export:"true" label:"allowEmpty"` + Kubernetes *ingress.Provider `description:"Enable Kubernetes backend with default settings." export:"true" label:"allowEmpty"` + KubernetesCRD *crd.Provider `description:"Enable Kubernetes backend with default settings." export:"true" label:"allowEmpty"` + Rest *rest.Provider `description:"Enable Rest backend with default settings." export:"true" label:"allowEmpty"` + Rancher *rancher.Provider `description:"Enable Rancher backend with default settings." export:"true" label:"allowEmpty"` } // SetEffectiveConfiguration adds missing configuration parameters derived from existing ones. // It also takes care of maintaining backwards compatibility. func (c *Configuration) SetEffectiveConfiguration(configFile string) { if len(c.EntryPoints) == 0 { + ep := &EntryPoint{Address: ":80"} + ep.SetDefaults() c.EntryPoints = EntryPoints{ - "http": &EntryPoint{ - Address: ":80", - }, + "http": ep, } } @@ -148,33 +175,15 @@ func (c *Configuration) SetEffectiveConfiguration(configFile string) { (c.Metrics != nil && c.Metrics.Prometheus != nil && c.Metrics.Prometheus.EntryPoint == DefaultInternalEntryPointName) || (c.Providers.Rest != nil && c.Providers.Rest.EntryPoint == DefaultInternalEntryPointName) { if _, ok := c.EntryPoints[DefaultInternalEntryPointName]; !ok { - c.EntryPoints[DefaultInternalEntryPointName] = &EntryPoint{Address: ":8080"} - } - } - - for _, entryPoint := range c.EntryPoints { - if entryPoint.Transport == nil { - entryPoint.Transport = &EntryPointsTransport{} - } - - // Make sure LifeCycle isn't nil to spare nil checks elsewhere. - if entryPoint.Transport.LifeCycle == nil { - entryPoint.Transport.LifeCycle = &LifeCycle{ - GraceTimeOut: parse.Duration(DefaultGraceTimeout), - } - entryPoint.Transport.RespondingTimeouts = &RespondingTimeouts{ - IdleTimeout: parse.Duration(DefaultIdleTimeout), - } - } - - if entryPoint.ForwardedHeaders == nil { - entryPoint.ForwardedHeaders = &ForwardedHeaders{} + ep := &EntryPoint{Address: ":8080"} + ep.SetDefaults() + c.EntryPoints[DefaultInternalEntryPointName] = ep } } if c.Providers.Docker != nil { if c.Providers.Docker.SwarmModeRefreshSeconds <= 0 { - c.Providers.Docker.SwarmModeRefreshSeconds = 15 + c.Providers.Docker.SwarmModeRefreshSeconds = types.Duration(15 * time.Second) } } diff --git a/pkg/metrics/datadog.go b/pkg/metrics/datadog.go index 8642f95a4..e2118cead 100644 --- a/pkg/metrics/datadog.go +++ b/pkg/metrics/datadog.go @@ -64,13 +64,8 @@ func initDatadogClient(ctx context.Context, config *types.Datadog) *time.Ticker if len(address) == 0 { address = "localhost:8125" } - pushInterval, err := time.ParseDuration(config.PushInterval) - if err != nil { - log.FromContext(ctx).Warnf("Unable to parse %s from config.PushInterval: using 10s as the default value", config.PushInterval) - pushInterval = 10 * time.Second - } - report := time.NewTicker(pushInterval) + report := time.NewTicker(time.Duration(config.PushInterval)) safe.Go(func() { datadogClient.SendLoop(report.C, "udp", address) diff --git a/pkg/metrics/datadog_test.go b/pkg/metrics/datadog_test.go index dde19d035..e45976c13 100644 --- a/pkg/metrics/datadog_test.go +++ b/pkg/metrics/datadog_test.go @@ -16,7 +16,7 @@ func TestDatadog(t *testing.T) { // This is needed to make sure that UDP Listener listens for data a bit longer, otherwise it will quit after a millisecond udp.Timeout = 5 * time.Second - datadogRegistry := RegisterDatadog(context.Background(), &types.Datadog{Address: ":18125", PushInterval: "1s"}) + datadogRegistry := RegisterDatadog(context.Background(), &types.Datadog{Address: ":18125", PushInterval: types.Duration(time.Second)}) defer StopDatadog() if !datadogRegistry.IsEnabled() { diff --git a/pkg/metrics/influxdb.go b/pkg/metrics/influxdb.go index dfda872f8..795158033 100644 --- a/pkg/metrics/influxdb.go +++ b/pkg/metrics/influxdb.go @@ -51,7 +51,7 @@ func RegisterInfluxDB(ctx context.Context, config *types.InfluxDB) Registry { influxDBClient = initInfluxDBClient(ctx, config) } if influxDBTicker == nil { - influxDBTicker = initInfluxDBTicker(ctx, config) + influxDBTicker = initInfluxDBTicker(config) } return &standardRegistry{ @@ -115,14 +115,8 @@ func initInfluxDBClient(ctx context.Context, config *types.InfluxDB) *influx.Inf } // initInfluxDBTicker initializes metrics pusher -func initInfluxDBTicker(ctx context.Context, config *types.InfluxDB) *time.Ticker { - pushInterval, err := time.ParseDuration(config.PushInterval) - if err != nil { - log.FromContext(ctx).Warnf("Unable to parse %s from config.PushInterval: using 10s as the default value", config.PushInterval) - pushInterval = 10 * time.Second - } - - report := time.NewTicker(pushInterval) +func initInfluxDBTicker(config *types.InfluxDB) *time.Ticker { + report := time.NewTicker(time.Duration(config.PushInterval)) safe.Go(func() { var buf bytes.Buffer diff --git a/pkg/metrics/influxdb_test.go b/pkg/metrics/influxdb_test.go index 2eda36036..7c2fb1bed 100644 --- a/pkg/metrics/influxdb_test.go +++ b/pkg/metrics/influxdb_test.go @@ -20,7 +20,7 @@ func TestInfluxDB(t *testing.T) { // This is needed to make sure that UDP Listener listens for data a bit longer, otherwise it will quit after a millisecond udp.Timeout = 5 * time.Second - influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ":8089", PushInterval: "1s"}) + influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ":8089", PushInterval: types.Duration(time.Second)}) defer StopInfluxDB() if !influxDBRegistry.IsEnabled() { @@ -80,7 +80,7 @@ func TestInfluxDBHTTP(t *testing.T) { })) defer ts.Close() - influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ts.URL, Protocol: "http", PushInterval: "1s", Database: "test", RetentionPolicy: "autogen"}) + influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ts.URL, Protocol: "http", PushInterval: types.Duration(time.Second), Database: "test", RetentionPolicy: "autogen"}) defer StopInfluxDB() if !influxDBRegistry.IsEnabled() { diff --git a/pkg/metrics/statsd.go b/pkg/metrics/statsd.go index 4da010cca..5d1647edc 100644 --- a/pkg/metrics/statsd.go +++ b/pkg/metrics/statsd.go @@ -62,13 +62,8 @@ func initStatsdTicker(ctx context.Context, config *types.Statsd) *time.Ticker { if len(address) == 0 { address = "localhost:8125" } - pushInterval, err := time.ParseDuration(config.PushInterval) - if err != nil { - log.FromContext(ctx).Warnf("Unable to parse %s from config.PushInterval: using 10s as the default value", config.PushInterval) - pushInterval = 10 * time.Second - } - report := time.NewTicker(pushInterval) + report := time.NewTicker(time.Duration(config.PushInterval)) safe.Go(func() { statsdClient.SendLoop(report.C, "udp", address) diff --git a/pkg/metrics/statsd_test.go b/pkg/metrics/statsd_test.go index 0158f2b44..4b4552b26 100644 --- a/pkg/metrics/statsd_test.go +++ b/pkg/metrics/statsd_test.go @@ -15,7 +15,7 @@ func TestStatsD(t *testing.T) { // This is needed to make sure that UDP Listener listens for data a bit longer, otherwise it will quit after a millisecond udp.Timeout = 5 * time.Second - statsdRegistry := RegisterStatsd(context.Background(), &types.Statsd{Address: ":18125", PushInterval: "1s"}) + statsdRegistry := RegisterStatsd(context.Background(), &types.Statsd{Address: ":18125", PushInterval: types.Duration(time.Second)}) defer StopStatsd() if !statsdRegistry.IsEnabled() { diff --git a/pkg/middlewares/accesslog/logger.go b/pkg/middlewares/accesslog/logger.go index de184d1f8..a07945ed7 100644 --- a/pkg/middlewares/accesslog/logger.go +++ b/pkg/middlewares/accesslog/logger.go @@ -13,7 +13,6 @@ import ( "time" "github.com/containous/alice" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/types" "github.com/sirupsen/logrus" @@ -330,7 +329,7 @@ func (h *Handler) keepAccessLog(statusCode, retryAttempts int, duration time.Dur return true } - if h.config.Filters.MinDuration > 0 && (parse.Duration(duration) > h.config.Filters.MinDuration) { + if h.config.Filters.MinDuration > 0 && (types.Duration(duration) > h.config.Filters.MinDuration) { return true } diff --git a/pkg/middlewares/accesslog/logger_test.go b/pkg/middlewares/accesslog/logger_test.go index 2bfbd9224..db11adb2a 100644 --- a/pkg/middlewares/accesslog/logger_test.go +++ b/pkg/middlewares/accesslog/logger_test.go @@ -14,7 +14,6 @@ import ( "testing" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -287,12 +286,12 @@ func TestLoggerJSON(t *testing.T) { Format: JSONFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", - Names: types.FieldNames{ + Names: map[string]string{ RequestHost: "keep", }, Headers: &types.FieldHeaders{ DefaultMode: "drop", - Names: types.FieldHeaderNames{ + Names: map[string]string{ "Referer": "keep", }, }, @@ -388,7 +387,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) { FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{ - MinDuration: parse.Duration(1 * time.Hour), + MinDuration: types.Duration(1 * time.Hour), }, }, expectedLog: ``, @@ -399,7 +398,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) { FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{ - MinDuration: parse.Duration(1 * time.Millisecond), + MinDuration: types.Duration(1 * time.Millisecond), }, }, expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testRouter" "http://127.0.0.1/testService" 1ms`, @@ -433,7 +432,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) { Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "keep", - Names: types.FieldNames{ + Names: map[string]string{ ClientHost: "drop", }, }, @@ -458,7 +457,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) { Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", - Names: types.FieldNames{ + Names: map[string]string{ ClientHost: "drop", ClientUsername: "keep", }, @@ -473,7 +472,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) { Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", - Names: types.FieldNames{ + Names: map[string]string{ ClientHost: "drop", ClientUsername: "keep", }, @@ -491,7 +490,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) { Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", - Names: types.FieldNames{ + Names: map[string]string{ ClientHost: "drop", ClientUsername: "keep", }, @@ -509,13 +508,13 @@ func TestNewLogHandlerOutputStdout(t *testing.T) { Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", - Names: types.FieldNames{ + Names: map[string]string{ ClientHost: "drop", ClientUsername: "keep", }, Headers: &types.FieldHeaders{ DefaultMode: "keep", - Names: types.FieldHeaderNames{ + Names: map[string]string{ "Referer": "redact", }, }, diff --git a/pkg/ping/ping.go b/pkg/ping/ping.go index c36a0446a..0daf65499 100644 --- a/pkg/ping/ping.go +++ b/pkg/ping/ping.go @@ -10,11 +10,16 @@ import ( // Handler expose ping routes. type Handler struct { - EntryPoint string `description:"Ping entryPoint" export:"true"` - Middlewares []string `description:"Middleware list" export:"true"` + EntryPoint string `description:"Ping entryPoint." export:"true"` + Middlewares []string `description:"Middleware list." export:"true"` terminating bool } +// SetDefaults sets the default values. +func (h *Handler) SetDefaults() { + h.EntryPoint = "traefik" +} + // WithContext causes the ping endpoint to serve non 200 responses. func (h *Handler) WithContext(ctx context.Context) { go func() { diff --git a/pkg/provider/acme/provider.go b/pkg/provider/acme/provider.go index a0284b56f..8cdfd58b9 100644 --- a/pkg/provider/acme/provider.go +++ b/pkg/provider/acme/provider.go @@ -14,7 +14,6 @@ import ( "sync" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/rules" @@ -39,17 +38,24 @@ var ( // Configuration holds ACME configuration provided by users type Configuration struct { - Email string `description:"Email address used for registration"` + Email string `description:"Email address used for registration."` ACMELogging bool `description:"Enable debug logging of ACME actions."` CAServer string `description:"CA server to use."` Storage string `description:"Storage to use."` EntryPoint string `description:"EntryPoint to use."` - KeyType string `description:"KeyType used for generating certificate private key. Allow value 'EC256', 'EC384', 'RSA2048', 'RSA4096', 'RSA8192'. Default to 'RSA4096'"` - OnHostRule bool `description:"Enable certificate generation on frontends Host rules."` - DNSChallenge *DNSChallenge `description:"Activate DNS-01 Challenge"` - HTTPChallenge *HTTPChallenge `description:"Activate HTTP-01 Challenge"` - TLSChallenge *TLSChallenge `description:"Activate TLS-ALPN-01 Challenge"` - Domains []types.Domain `description:"CN and SANs (alternative domains) to each main domain using format: --acme.domains='main.com,san1.com,san2.com' --acme.domains='*.main.net'. Wildcard domains only accepted with DNSChallenge"` + KeyType string `description:"KeyType used for generating certificate private key. Allow value 'EC256', 'EC384', 'RSA2048', 'RSA4096', 'RSA8192'."` + OnHostRule bool `description:"Enable certificate generation on router Host rules."` + DNSChallenge *DNSChallenge `description:"Activate DNS-01 Challenge." label:"allowEmpty"` + HTTPChallenge *HTTPChallenge `description:"Activate HTTP-01 Challenge." label:"allowEmpty"` + TLSChallenge *TLSChallenge `description:"Activate TLS-ALPN-01 Challenge." label:"allowEmpty"` + Domains []types.Domain `description:"The list of domains for which certificates are generated on startup. Wildcard domains only accepted with DNSChallenge."` +} + +// SetDefaults sets the default values. +func (a *Configuration) SetDefaults() { + a.CAServer = lego.LEDirectoryProduction + a.Storage = "acme.json" + a.KeyType = "RSA4096" } // Certificate is a struct which contains all data needed from an ACME certificate @@ -61,10 +67,10 @@ type Certificate struct { // DNSChallenge contains DNS challenge Configuration type DNSChallenge struct { - Provider string `description:"Use a DNS-01 based challenge provider rather than HTTPS."` - DelayBeforeCheck parse.Duration `description:"Assume DNS propagates after a delay in seconds rather than finding and querying nameservers."` - Resolvers types.DNSResolvers `description:"Use following DNS servers to resolve the FQDN authority."` - DisablePropagationCheck bool `description:"Disable the DNS propagation checks before notifying ACME that the DNS challenge is ready. [not recommended]"` + Provider string `description:"Use a DNS-01 based challenge provider rather than HTTPS."` + DelayBeforeCheck types.Duration `description:"Assume DNS propagates after a delay in seconds rather than finding and querying nameservers."` + Resolvers []string `description:"Use following DNS servers to resolve the FQDN authority."` + DisablePropagationCheck bool `description:"Disable the DNS propagation checks before notifying ACME that the DNS challenge is ready. [not recommended]"` } // HTTPChallenge contains HTTP challenge Configuration @@ -239,7 +245,7 @@ func (p *Provider) getClient() (*lego.Client, error) { logger.Debug("Building ACME client...") - caServer := "https://acme-v02.api.letsencrypt.org/directory" + caServer := lego.LEDirectoryProduction if len(p.CAServer) > 0 { caServer = p.CAServer } diff --git a/pkg/provider/constrainer.go b/pkg/provider/constrainer.go index 44a276c51..2afe1abcd 100644 --- a/pkg/provider/constrainer.go +++ b/pkg/provider/constrainer.go @@ -4,7 +4,7 @@ import "github.com/containous/traefik/pkg/types" // Constrainer Filter services by constraint, matching with Traefik tags. type Constrainer struct { - Constraints types.Constraints `description:"Filter services by constraint, matching with Traefik tags." export:"true"` + Constraints []*types.Constraint `description:"Filter services by constraint, matching with Traefik tags." export:"true"` } // MatchConstraints must match with EVERY single constraint diff --git a/pkg/provider/docker/config.go b/pkg/provider/docker/config.go index 4967c3d04..c8cf95d3f 100644 --- a/pkg/provider/docker/config.go +++ b/pkg/provider/docker/config.go @@ -8,9 +8,9 @@ import ( "strings" "github.com/containous/traefik/pkg/config" + "github.com/containous/traefik/pkg/config/label" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/provider" - "github.com/containous/traefik/pkg/provider/label" "github.com/docker/go-connections/nat" ) diff --git a/pkg/provider/docker/config_test.go b/pkg/provider/docker/config_test.go index 945b79dbb..484aa510b 100644 --- a/pkg/provider/docker/config_test.go +++ b/pkg/provider/docker/config_test.go @@ -339,7 +339,7 @@ func Test_buildConfiguration(t *testing.T) { testCases := []struct { desc string containers []dockerData - constraints types.Constraints + constraints []*types.Constraint expected *config.Configuration }{ { @@ -1924,11 +1924,11 @@ func Test_buildConfiguration(t *testing.T) { }, }, }, - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "bar", + Value: "bar", }, }, expected: &config.Configuration{ @@ -1965,11 +1965,11 @@ func Test_buildConfiguration(t *testing.T) { }, }, }, - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "foo", + Value: "foo", }, }, expected: &config.Configuration{ diff --git a/pkg/provider/docker/docker.go b/pkg/provider/docker/docker.go index d2bd3e684..09bea10ce 100644 --- a/pkg/provider/docker/docker.go +++ b/pkg/provider/docker/docker.go @@ -45,19 +45,29 @@ var _ provider.Provider = (*Provider)(nil) // Provider holds configurations of the provider. type Provider struct { - provider.Constrainer `mapstructure:",squash" export:"true"` - Watch bool `description:"Watch provider" export:"true"` - Endpoint string `description:"Docker server endpoint. Can be a tcp or a unix socket endpoint"` - DefaultRule string `description:"Default rule"` - TLS *types.ClientTLS `description:"Enable Docker TLS support" export:"true"` - ExposedByDefault bool `description:"Expose containers by default" export:"true"` - UseBindPortIP bool `description:"Use the ip address from the bound port, rather than from the inner network" export:"true"` - SwarmMode bool `description:"Use Docker on Swarm Mode" export:"true"` - Network string `description:"Default Docker network used" export:"true"` - SwarmModeRefreshSeconds int `description:"Polling interval for swarm mode (in seconds)" export:"true"` + provider.Constrainer `description:"List of constraints used to filter out some containers." export:"true"` + Watch bool `description:"Watch provider." export:"true"` + Endpoint string `description:"Docker server endpoint. Can be a tcp or a unix socket endpoint."` + DefaultRule string `description:"Default rule."` + TLS *types.ClientTLS `description:"Enable Docker TLS support." export:"true"` + ExposedByDefault bool `description:"Expose containers by default." export:"true"` + UseBindPortIP bool `description:"Use the ip address from the bound port, rather than from the inner network." export:"true"` + SwarmMode bool `description:"Use Docker on Swarm Mode." export:"true"` + Network string `description:"Default Docker network used." export:"true"` + SwarmModeRefreshSeconds types.Duration `description:"Polling interval for swarm mode." export:"true"` defaultRuleTpl *template.Template } +// SetDefaults sets the default values. +func (p *Provider) SetDefaults() { + p.Watch = true + p.ExposedByDefault = true + p.Endpoint = "unix:///var/run/docker.sock" + p.SwarmMode = false + p.SwarmModeRefreshSeconds = types.Duration(15 * time.Second) + p.DefaultRule = DefaultTemplateRule +} + // Init the provider. func (p *Provider) Init() error { defaultRuleTpl, err := provider.MakeDefaultRuleTemplate(p.DefaultRule, nil) @@ -184,7 +194,7 @@ func (p *Provider) Provide(configurationChan chan<- config.Message, pool *safe.P if p.SwarmMode { errChan := make(chan error) // TODO: This need to be change. Linked to Swarm events docker/docker#23827 - ticker := time.NewTicker(time.Second * time.Duration(p.SwarmModeRefreshSeconds)) + ticker := time.NewTicker(time.Duration(p.SwarmModeRefreshSeconds)) pool.GoCtx(func(ctx context.Context) { ctx = log.With(ctx, log.Str(log.ProviderName, "docker")) diff --git a/pkg/provider/docker/label.go b/pkg/provider/docker/label.go index 061139163..99d8fb349 100644 --- a/pkg/provider/docker/label.go +++ b/pkg/provider/docker/label.go @@ -3,7 +3,7 @@ package docker import ( "fmt" - "github.com/containous/traefik/pkg/provider/label" + "github.com/containous/traefik/pkg/config/label" ) const ( diff --git a/pkg/provider/file/file.go b/pkg/provider/file/file.go index c226e43d3..48c525032 100644 --- a/pkg/provider/file/file.go +++ b/pkg/provider/file/file.go @@ -28,11 +28,17 @@ var _ provider.Provider = (*Provider)(nil) // Provider holds configurations of the provider. type Provider struct { - Directory string `description:"Load configuration from one or more .toml files in a directory" export:"true"` - Watch bool `description:"Watch provider" export:"true"` + Directory string `description:"Load configuration from one or more .toml files in a directory." export:"true"` + Watch bool `description:"Watch provider." export:"true"` Filename string `description:"Override default configuration template. For advanced users :)" export:"true"` DebugLogGeneratedTemplate bool `description:"Enable debug logging of generated configuration template." export:"true"` - TraefikFile string + TraefikFile string `description:"-"` +} + +// SetDefaults sets the default values. +func (p *Provider) SetDefaults() { + p.Watch = true + p.Filename = "" } // Init the provider diff --git a/pkg/provider/file/file_test.go b/pkg/provider/file/file_test.go index 96d70f8a0..95bbb26eb 100644 --- a/pkg/provider/file/file_test.go +++ b/pkg/provider/file/file_test.go @@ -224,7 +224,7 @@ func createProvider(t *testing.T, test ProvideTestCase, watch bool) (*Provider, tempDir := createTempDir(t, "testdir") provider := &Provider{} - provider.Watch = watch + provider.Watch = true if len(test.directoryContent) > 0 { if !watch { diff --git a/pkg/provider/kubernetes/crd/client.go b/pkg/provider/kubernetes/crd/client.go index 3e5f393e7..de37b2446 100644 --- a/pkg/provider/kubernetes/crd/client.go +++ b/pkg/provider/kubernetes/crd/client.go @@ -10,7 +10,6 @@ import ( "github.com/containous/traefik/pkg/provider/kubernetes/crd/generated/clientset/versioned" "github.com/containous/traefik/pkg/provider/kubernetes/crd/generated/informers/externalversions" "github.com/containous/traefik/pkg/provider/kubernetes/crd/traefik/v1alpha1" - "github.com/containous/traefik/pkg/provider/kubernetes/k8s" corev1 "k8s.io/api/core/v1" extensionsv1beta1 "k8s.io/api/extensions/v1beta1" kubeerror "k8s.io/apimachinery/pkg/api/errors" @@ -45,7 +44,7 @@ func (reh *resourceEventHandler) OnDelete(obj interface{}) { // WatchAll starts the watch of the Provider resources and updates the stores. // The stores can then be accessed via the Get* functions. type Client interface { - WatchAll(namespaces k8s.Namespaces, stopCh <-chan struct{}) (<-chan interface{}, error) + WatchAll(namespaces []string, stopCh <-chan struct{}) (<-chan interface{}, error) GetIngressRoutes() []*v1alpha1.IngressRoute GetIngressRouteTCPs() []*v1alpha1.IngressRouteTCP @@ -69,7 +68,7 @@ type clientWrapper struct { labelSelector labels.Selector isNamespaceAll bool - watchedNamespaces k8s.Namespaces + watchedNamespaces []string } func createClientFromConfig(c *rest.Config) (*clientWrapper, error) { @@ -144,12 +143,12 @@ func newExternalClusterClient(endpoint, token, caFilePath string) (*clientWrappe } // WatchAll starts namespace-specific controllers for all relevant kinds. -func (c *clientWrapper) WatchAll(namespaces k8s.Namespaces, stopCh <-chan struct{}) (<-chan interface{}, error) { +func (c *clientWrapper) WatchAll(namespaces []string, stopCh <-chan struct{}) (<-chan interface{}, error) { eventCh := make(chan interface{}, 1) eventHandler := c.newResourceEventHandler(eventCh) if len(namespaces) == 0 { - namespaces = k8s.Namespaces{metav1.NamespaceAll} + namespaces = []string{metav1.NamespaceAll} c.isNamespaceAll = true } c.watchedNamespaces = namespaces diff --git a/pkg/provider/kubernetes/crd/client_mock_test.go b/pkg/provider/kubernetes/crd/client_mock_test.go index 625975702..70ff381fe 100644 --- a/pkg/provider/kubernetes/crd/client_mock_test.go +++ b/pkg/provider/kubernetes/crd/client_mock_test.go @@ -132,7 +132,7 @@ func (c clientMock) GetSecret(namespace, name string) (*corev1.Secret, bool, err return nil, false, nil } -func (c clientMock) WatchAll(namespaces k8s.Namespaces, stopCh <-chan struct{}) (<-chan interface{}, error) { +func (c clientMock) WatchAll(namespaces []string, stopCh <-chan struct{}) (<-chan interface{}, error) { return c.watchChan, nil } diff --git a/pkg/provider/kubernetes/crd/kubernetes.go b/pkg/provider/kubernetes/crd/kubernetes.go index 9e9d8bd08..389a0485e 100644 --- a/pkg/provider/kubernetes/crd/kubernetes.go +++ b/pkg/provider/kubernetes/crd/kubernetes.go @@ -18,7 +18,6 @@ import ( "github.com/containous/traefik/pkg/job" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/provider/kubernetes/crd/traefik/v1alpha1" - "github.com/containous/traefik/pkg/provider/kubernetes/k8s" "github.com/containous/traefik/pkg/safe" "github.com/containous/traefik/pkg/tls" corev1 "k8s.io/api/core/v1" @@ -32,13 +31,13 @@ const ( // Provider holds configurations of the provider. type Provider struct { - Endpoint string `description:"Kubernetes server endpoint (required for external cluster client)"` - Token string `description:"Kubernetes bearer token (not needed for in-cluster client)"` - CertAuthFilePath string `description:"Kubernetes certificate authority file path (not needed for in-cluster client)"` - DisablePassHostHeaders bool `description:"Kubernetes disable PassHost Headers" export:"true"` - Namespaces k8s.Namespaces `description:"Kubernetes namespaces" export:"true"` - LabelSelector string `description:"Kubernetes label selector to use" export:"true"` - IngressClass string `description:"Value of kubernetes.io/ingress.class annotation to watch for" export:"true"` + Endpoint string `description:"Kubernetes server endpoint (required for external cluster client)."` + Token string `description:"Kubernetes bearer token (not needed for in-cluster client)."` + CertAuthFilePath string `description:"Kubernetes certificate authority file path (not needed for in-cluster client)."` + DisablePassHostHeaders bool `description:"Kubernetes disable PassHost Headers." export:"true"` + Namespaces []string `description:"Kubernetes namespaces." export:"true"` + LabelSelector string `description:"Kubernetes label selector to use." export:"true"` + IngressClass string `description:"Value of kubernetes.io/ingress.class annotation to watch for." export:"true"` lastConfiguration safe.Safe } diff --git a/pkg/provider/kubernetes/ingress/client.go b/pkg/provider/kubernetes/ingress/client.go index 61475bb1c..18b7321f1 100644 --- a/pkg/provider/kubernetes/ingress/client.go +++ b/pkg/provider/kubernetes/ingress/client.go @@ -7,7 +7,6 @@ import ( "time" "github.com/containous/traefik/pkg/log" - "github.com/containous/traefik/pkg/provider/kubernetes/k8s" corev1 "k8s.io/api/core/v1" extensionsv1beta1 "k8s.io/api/extensions/v1beta1" kubeerror "k8s.io/apimachinery/pkg/api/errors" @@ -42,7 +41,7 @@ func (reh *resourceEventHandler) OnDelete(obj interface{}) { // WatchAll starts the watch of the Provider resources and updates the stores. // The stores can then be accessed via the Get* functions. type Client interface { - WatchAll(namespaces k8s.Namespaces, stopCh <-chan struct{}) (<-chan interface{}, error) + WatchAll(namespaces []string, stopCh <-chan struct{}) (<-chan interface{}, error) GetIngresses() []*extensionsv1beta1.Ingress GetService(namespace, name string) (*corev1.Service, bool, error) GetSecret(namespace, name string) (*corev1.Secret, bool, error) @@ -55,7 +54,7 @@ type clientWrapper struct { factories map[string]informers.SharedInformerFactory ingressLabelSelector labels.Selector isNamespaceAll bool - watchedNamespaces k8s.Namespaces + watchedNamespaces []string } // newInClusterClient returns a new Provider client that is expected to run @@ -122,12 +121,12 @@ func newClientImpl(clientset *kubernetes.Clientset) *clientWrapper { } // WatchAll starts namespace-specific controllers for all relevant kinds. -func (c *clientWrapper) WatchAll(namespaces k8s.Namespaces, stopCh <-chan struct{}) (<-chan interface{}, error) { +func (c *clientWrapper) WatchAll(namespaces []string, stopCh <-chan struct{}) (<-chan interface{}, error) { eventCh := make(chan interface{}, 1) eventHandler := c.newResourceEventHandler(eventCh) if len(namespaces) == 0 { - namespaces = k8s.Namespaces{metav1.NamespaceAll} + namespaces = []string{metav1.NamespaceAll} c.isNamespaceAll = true } diff --git a/pkg/provider/kubernetes/ingress/client_mock_test.go b/pkg/provider/kubernetes/ingress/client_mock_test.go index a9c9091ca..1b5cc49cb 100644 --- a/pkg/provider/kubernetes/ingress/client_mock_test.go +++ b/pkg/provider/kubernetes/ingress/client_mock_test.go @@ -99,7 +99,7 @@ func (c clientMock) GetSecret(namespace, name string) (*corev1.Secret, bool, err return nil, false, nil } -func (c clientMock) WatchAll(namespaces k8s.Namespaces, stopCh <-chan struct{}) (<-chan interface{}, error) { +func (c clientMock) WatchAll(namespaces []string, stopCh <-chan struct{}) (<-chan interface{}, error) { return c.watchChan, nil } diff --git a/pkg/provider/kubernetes/ingress/kubernetes.go b/pkg/provider/kubernetes/ingress/kubernetes.go index 10b818ea8..b2a0095ba 100644 --- a/pkg/provider/kubernetes/ingress/kubernetes.go +++ b/pkg/provider/kubernetes/ingress/kubernetes.go @@ -17,7 +17,6 @@ import ( "github.com/containous/traefik/pkg/config" "github.com/containous/traefik/pkg/job" "github.com/containous/traefik/pkg/log" - "github.com/containous/traefik/pkg/provider/kubernetes/k8s" "github.com/containous/traefik/pkg/safe" "github.com/containous/traefik/pkg/tls" corev1 "k8s.io/api/core/v1" @@ -33,22 +32,22 @@ const ( // Provider holds configurations of the provider. type Provider struct { - Endpoint string `description:"Kubernetes server endpoint (required for external cluster client)"` - Token string `description:"Kubernetes bearer token (not needed for in-cluster client)"` - CertAuthFilePath string `description:"Kubernetes certificate authority file path (not needed for in-cluster client)"` - DisablePassHostHeaders bool `description:"Kubernetes disable PassHost Headers" export:"true"` - Namespaces k8s.Namespaces `description:"Kubernetes namespaces" export:"true"` - LabelSelector string `description:"Kubernetes Ingress label selector to use" export:"true"` - IngressClass string `description:"Value of kubernetes.io/ingress.class annotation to watch for" export:"true"` - IngressEndpoint *EndpointIngress `description:"Kubernetes Ingress Endpoint"` + Endpoint string `description:"Kubernetes server endpoint (required for external cluster client)."` + Token string `description:"Kubernetes bearer token (not needed for in-cluster client)."` + CertAuthFilePath string `description:"Kubernetes certificate authority file path (not needed for in-cluster client)."` + DisablePassHostHeaders bool `description:"Kubernetes disable PassHost Headers." export:"true"` + Namespaces []string `description:"Kubernetes namespaces." export:"true"` + LabelSelector string `description:"Kubernetes Ingress label selector to use." export:"true"` + IngressClass string `description:"Value of kubernetes.io/ingress.class annotation to watch for." export:"true"` + IngressEndpoint *EndpointIngress `description:"Kubernetes Ingress Endpoint."` lastConfiguration safe.Safe } // EndpointIngress holds the endpoint information for the Kubernetes provider type EndpointIngress struct { - IP string `description:"IP used for Kubernetes Ingress endpoints"` - Hostname string `description:"Hostname used for Kubernetes Ingress endpoints"` - PublishedService string `description:"Published Kubernetes Service to copy status from"` + IP string `description:"IP used for Kubernetes Ingress endpoints."` + Hostname string `description:"Hostname used for Kubernetes Ingress endpoints."` + PublishedService string `description:"Published Kubernetes Service to copy status from."` } func (p *Provider) newK8sClient(ctx context.Context, ingressLabelSelector string) (*clientWrapper, error) { diff --git a/pkg/provider/kubernetes/k8s/namespace.go b/pkg/provider/kubernetes/k8s/namespace.go deleted file mode 100644 index b06fd9eff..000000000 --- a/pkg/provider/kubernetes/k8s/namespace.go +++ /dev/null @@ -1,32 +0,0 @@ -package k8s - -import ( - "fmt" - "strings" -) - -// Namespaces holds kubernetes namespaces. -type Namespaces []string - -// Set adds strings elem into the the parser -// it splits str on , and ;. -func (ns *Namespaces) Set(str string) error { - fargs := func(c rune) bool { - return c == ',' || c == ';' - } - // get function - slice := strings.FieldsFunc(str, fargs) - *ns = append(*ns, slice...) - return nil -} - -// Get []string. -func (ns *Namespaces) Get() interface{} { return *ns } - -// String return slice in a string. -func (ns *Namespaces) String() string { return fmt.Sprintf("%v", *ns) } - -// SetValue sets []string into the parser. -func (ns *Namespaces) SetValue(val interface{}) { - *ns = val.(Namespaces) -} diff --git a/pkg/provider/label/internal/node.go b/pkg/provider/label/internal/node.go deleted file mode 100644 index fab2473a1..000000000 --- a/pkg/provider/label/internal/node.go +++ /dev/null @@ -1,13 +0,0 @@ -package internal - -import "reflect" - -// Node a label node. -type Node struct { - Name string `json:"name"` - FieldName string `json:"fieldName"` - Value string `json:"value,omitempty"` - Disabled bool `json:"disabled,omitempty"` - Kind reflect.Kind `json:"kind,omitempty"` - Children []*Node `json:"children,omitempty"` -} diff --git a/pkg/provider/label/internal/tags.go b/pkg/provider/label/internal/tags.go deleted file mode 100644 index 43d232375..000000000 --- a/pkg/provider/label/internal/tags.go +++ /dev/null @@ -1,12 +0,0 @@ -package internal - -const ( - // TagLabel allow to apply a custom behavior. - // - "allowEmpty": allow to create an empty struct. - // - "-": ignore the field. - TagLabel = "label" - - // TagLabelSliceAsStruct allow to use a slice of struct by creating one entry into the slice. - // The value is the substitution name use in the label to access the slice. - TagLabelSliceAsStruct = "label-slice-as-struct" -) diff --git a/pkg/provider/label/parser.go b/pkg/provider/label/parser.go deleted file mode 100644 index a31893728..000000000 --- a/pkg/provider/label/parser.go +++ /dev/null @@ -1,58 +0,0 @@ -package label - -import ( - "github.com/containous/traefik/pkg/config" - "github.com/containous/traefik/pkg/provider/label/internal" -) - -// DecodeConfiguration Converts the labels to a configuration. -func DecodeConfiguration(labels map[string]string) (*config.Configuration, error) { - conf := &config.Configuration{ - HTTP: &config.HTTPConfiguration{}, - TCP: &config.TCPConfiguration{}, - } - - err := Decode(labels, conf, "traefik.http", "traefik.tcp") - if err != nil { - return nil, err - } - - return conf, nil -} - -// EncodeConfiguration Converts a configuration to labels. -func EncodeConfiguration(conf *config.Configuration) (map[string]string, error) { - return Encode(conf) -} - -// Decode Converts the labels to an element. -// labels -> [ node -> node + metadata (type) ] -> element (node) -func Decode(labels map[string]string, element interface{}, filters ...string) error { - node, err := internal.DecodeToNode(labels, filters...) - if err != nil { - return err - } - - err = internal.AddMetadata(element, node) - if err != nil { - return err - } - - err = internal.Fill(element, node) - if err != nil { - return err - } - - return nil -} - -// Encode Converts an element to labels. -// element -> node (value) -> label (node) -func Encode(element interface{}) (map[string]string, error) { - node, err := internal.EncodeToNode(element) - if err != nil { - return nil, err - } - - return internal.EncodeNode(node), nil -} diff --git a/pkg/provider/marathon/config.go b/pkg/provider/marathon/config.go index 81bbcc3cd..166775bad 100644 --- a/pkg/provider/marathon/config.go +++ b/pkg/provider/marathon/config.go @@ -10,9 +10,9 @@ import ( "strings" "github.com/containous/traefik/pkg/config" + "github.com/containous/traefik/pkg/config/label" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/provider" - "github.com/containous/traefik/pkg/provider/label" "github.com/gambol99/go-marathon" ) diff --git a/pkg/provider/marathon/config_test.go b/pkg/provider/marathon/config_test.go index bd0337baf..4e8529de8 100644 --- a/pkg/provider/marathon/config_test.go +++ b/pkg/provider/marathon/config_test.go @@ -31,7 +31,7 @@ func TestBuildConfiguration(t *testing.T) { testCases := []struct { desc string applications *marathon.Applications - constraints types.Constraints + constraints []*types.Constraint filterMarathonConstraints bool defaultRule string expected *config.Configuration @@ -1065,11 +1065,11 @@ func TestBuildConfiguration(t *testing.T) { withTasks(localhostTask(taskPorts(80, 81))), withLabel("traefik.tags", "foo"), )), - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "bar", + Value: "bar", }, }, expected: &config.Configuration{ @@ -1094,11 +1094,11 @@ func TestBuildConfiguration(t *testing.T) { constraint("rack_id:CLUSTER:rack-1"), )), filterMarathonConstraints: true, - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "rack_id:CLUSTER:rack-2", + Value: "rack_id:CLUSTER:rack-2", }, }, expected: &config.Configuration{ @@ -1123,11 +1123,11 @@ func TestBuildConfiguration(t *testing.T) { constraint("rack_id:CLUSTER:rack-1"), )), filterMarathonConstraints: true, - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "rack_id:CLUSTER:rack-1", + Value: "rack_id:CLUSTER:rack-1", }, }, expected: &config.Configuration{ @@ -1168,11 +1168,11 @@ func TestBuildConfiguration(t *testing.T) { withLabel("traefik.tags", "bar"), )), - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "bar", + Value: "bar", }, }, expected: &config.Configuration{ @@ -1466,7 +1466,7 @@ func TestApplicationFilterEnabled(t *testing.T) { t.Run(test.desc, func(t *testing.T) { t.Parallel() - provider := &Provider{ExposedByDefault: test.exposedByDefault} + provider := &Provider{ExposedByDefault: true} app := application(withLabel("traefik.enable", test.enabledLabel)) diff --git a/pkg/provider/marathon/label.go b/pkg/provider/marathon/label.go index 38a319227..42a8a9bbe 100644 --- a/pkg/provider/marathon/label.go +++ b/pkg/provider/marathon/label.go @@ -4,7 +4,7 @@ import ( "math" "strings" - "github.com/containous/traefik/pkg/provider/label" + "github.com/containous/traefik/pkg/config/label" "github.com/gambol99/go-marathon" ) diff --git a/pkg/provider/marathon/marathon.go b/pkg/provider/marathon/marathon.go index 2a4ad9c73..3c7112c7d 100644 --- a/pkg/provider/marathon/marathon.go +++ b/pkg/provider/marathon/marathon.go @@ -10,7 +10,6 @@ import ( "time" "github.com/cenkalti/backoff" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config" "github.com/containous/traefik/pkg/job" "github.com/containous/traefik/pkg/log" @@ -46,31 +45,44 @@ var _ provider.Provider = (*Provider)(nil) // Provider holds configuration of the provider. type Provider struct { - provider.Constrainer `mapstructure:",squash" export:"true"` + provider.Constrainer `description:"List of constraints used to filter out some containers." export:"true"` + Trace bool `description:"Display additional provider logs." export:"true"` - Watch bool `description:"Watch provider" export:"true"` - Endpoint string `description:"Marathon server endpoint. You can also specify multiple endpoint for Marathon" export:"true"` - DefaultRule string `description:"Default rule"` - ExposedByDefault bool `description:"Expose Marathon apps by default" export:"true"` - DCOSToken string `description:"DCOSToken for DCOS environment, This will override the Authorization header" export:"true"` - FilterMarathonConstraints bool `description:"Enable use of Marathon constraints in constraint filtering" export:"true"` - TLS *types.ClientTLS `description:"Enable TLS support" export:"true"` - DialerTimeout parse.Duration `description:"Set a dialer timeout for Marathon" export:"true"` - ResponseHeaderTimeout parse.Duration `description:"Set a response header timeout for Marathon" export:"true"` - TLSHandshakeTimeout parse.Duration `description:"Set a TLS handhsake timeout for Marathon" export:"true"` - KeepAlive parse.Duration `description:"Set a TCP Keep Alive time in seconds" export:"true"` + Watch bool `description:"Watch provider." export:"true"` + Endpoint string `description:"Marathon server endpoint. You can also specify multiple endpoint for Marathon." export:"true"` + DefaultRule string `description:"Default rule."` + ExposedByDefault bool `description:"Expose Marathon apps by default." export:"true"` + DCOSToken string `description:"DCOSToken for DCOS environment, This will override the Authorization header." export:"true"` + FilterMarathonConstraints bool `description:"Enable use of Marathon constraints in constraint filtering." export:"true"` + TLS *types.ClientTLS `description:"Enable TLS support." export:"true"` + DialerTimeout types.Duration `description:"Set a dialer timeout for Marathon." export:"true"` + ResponseHeaderTimeout types.Duration `description:"Set a response header timeout for Marathon." export:"true"` + TLSHandshakeTimeout types.Duration `description:"Set a TLS handshake timeout for Marathon." export:"true"` + KeepAlive types.Duration `description:"Set a TCP Keep Alive time." export:"true"` ForceTaskHostname bool `description:"Force to use the task's hostname." export:"true"` - Basic *Basic `description:"Enable basic authentication" export:"true"` - RespectReadinessChecks bool `description:"Filter out tasks with non-successful readiness checks during deployments" export:"true"` + Basic *Basic `description:"Enable basic authentication." export:"true"` + RespectReadinessChecks bool `description:"Filter out tasks with non-successful readiness checks during deployments." export:"true"` readyChecker *readinessChecker marathonClient marathon.Marathon defaultRuleTpl *template.Template } +// SetDefaults sets the default values. +func (p *Provider) SetDefaults() { + p.Watch = true + p.Endpoint = "http://127.0.0.1:8080" + p.ExposedByDefault = true + p.DialerTimeout = types.Duration(5 * time.Second) + p.ResponseHeaderTimeout = types.Duration(60 * time.Second) + p.TLSHandshakeTimeout = types.Duration(5 * time.Second) + p.KeepAlive = types.Duration(10 * time.Second) + p.DefaultRule = DefaultTemplateRule +} + // Basic holds basic authentication specific configurations type Basic struct { - HTTPBasicAuthUser string `description:"Basic authentication User"` - HTTPBasicPassword string `description:"Basic authentication Password"` + HTTPBasicAuthUser string `description:"Basic authentication User."` + HTTPBasicPassword string `description:"Basic authentication Password."` } // Init the provider diff --git a/pkg/provider/rancher/config.go b/pkg/provider/rancher/config.go index 76a6313d4..e6bf6a044 100644 --- a/pkg/provider/rancher/config.go +++ b/pkg/provider/rancher/config.go @@ -8,9 +8,9 @@ import ( "strings" "github.com/containous/traefik/pkg/config" + "github.com/containous/traefik/pkg/config/label" "github.com/containous/traefik/pkg/log" "github.com/containous/traefik/pkg/provider" - "github.com/containous/traefik/pkg/provider/label" ) func (p *Provider) buildConfiguration(ctx context.Context, services []rancherData) *config.Configuration { diff --git a/pkg/provider/rancher/config_test.go b/pkg/provider/rancher/config_test.go index 88e60220a..3aa8b65d1 100644 --- a/pkg/provider/rancher/config_test.go +++ b/pkg/provider/rancher/config_test.go @@ -14,7 +14,7 @@ func Test_buildConfiguration(t *testing.T) { testCases := []struct { desc string containers []rancherData - constraints types.Constraints + constraints []*types.Constraint expected *config.Configuration }{ { @@ -330,11 +330,11 @@ func Test_buildConfiguration(t *testing.T) { State: "", }, }, - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "bar", + Value: "bar", }, }, expected: &config.Configuration{ @@ -363,11 +363,11 @@ func Test_buildConfiguration(t *testing.T) { State: "", }, }, - constraints: types.Constraints{ - &types.Constraint{ + constraints: []*types.Constraint{ + { Key: "tag", MustMatch: true, - Regex: "foo", + Value: "foo", }, }, expected: &config.Configuration{ diff --git a/pkg/provider/rancher/label.go b/pkg/provider/rancher/label.go index 36104b53e..64c36842f 100644 --- a/pkg/provider/rancher/label.go +++ b/pkg/provider/rancher/label.go @@ -1,7 +1,7 @@ package rancher import ( - "github.com/containous/traefik/pkg/provider/label" + "github.com/containous/traefik/pkg/config/label" ) type configuration struct { diff --git a/pkg/provider/rancher/rancher.go b/pkg/provider/rancher/rancher.go index 9cc74957e..1d120af36 100644 --- a/pkg/provider/rancher/rancher.go +++ b/pkg/provider/rancher/rancher.go @@ -40,15 +40,26 @@ var _ provider.Provider = (*Provider)(nil) // Provider holds configurations of the provider. type Provider struct { - provider.Constrainer `mapstructure:",squash" export:"true"` - Watch bool `description:"Watch provider" export:"true"` - DefaultRule string `description:"Default rule"` - ExposedByDefault bool `description:"Expose containers by default" export:"true"` - EnableServiceHealthFilter bool - RefreshSeconds int + provider.Constrainer `description:"List of constraints used to filter out some containers." export:"true"` + + Watch bool `description:"Watch provider." export:"true"` + DefaultRule string `description:"Default rule."` + ExposedByDefault bool `description:"Expose containers by default." export:"true"` + EnableServiceHealthFilter bool `description:"Filter services with unhealthy states and inactive states." export:"true"` + RefreshSeconds int `description:"Defines the polling interval in seconds." export:"true"` + IntervalPoll bool `description:"Poll the Rancher metadata service every 'rancher.refreshseconds' (less accurate)."` + Prefix string `description:"Prefix used for accessing the Rancher metadata service."` defaultRuleTpl *template.Template - IntervalPoll bool `description:"Poll the Rancher metadata service every 'rancher.refreshseconds' (less accurate)"` - Prefix string `description:"Prefix used for accessing the Rancher metadata service"` +} + +// SetDefaults sets the default values. +func (p *Provider) SetDefaults() { + p.Watch = true + p.ExposedByDefault = true + p.EnableServiceHealthFilter = true + p.RefreshSeconds = 15 + p.DefaultRule = DefaultTemplateRule + p.Prefix = "latest" } type rancherData struct { diff --git a/pkg/provider/rest/rest.go b/pkg/provider/rest/rest.go index 071aef178..60db12d8b 100644 --- a/pkg/provider/rest/rest.go +++ b/pkg/provider/rest/rest.go @@ -19,7 +19,13 @@ var _ provider.Provider = (*Provider)(nil) // Provider is a provider.Provider implementation that provides a Rest API. type Provider struct { configurationChan chan<- config.Message - EntryPoint string `description:"EntryPoint" export:"true"` + EntryPoint string `description:"EntryPoint." export:"true"` +} + +// SetDefaults sets the default values. +func (p *Provider) SetDefaults() { + p.EntryPoint = "traefik" + // FIXME p.EntryPoint = static.DefaultInternalEntryPointName } var templatesRenderer = render.New(render.Options{Directory: "nowhere"}) diff --git a/pkg/server/roundtripper.go b/pkg/server/roundtripper.go index 2de09411c..90b640e06 100644 --- a/pkg/server/roundtripper.go +++ b/pkg/server/roundtripper.go @@ -83,7 +83,7 @@ func createHTTPTransport(transportConfiguration *static.ServersTransport) (*http return transport, nil } -func createRootCACertPool(rootCAs traefiktls.FilesOrContents) *x509.CertPool { +func createRootCACertPool(rootCAs []traefiktls.FileOrContent) *x509.CertPool { roots := x509.NewCertPool() for _, cert := range rootCAs { diff --git a/pkg/server/server_entrypoint_tcp_test.go b/pkg/server/server_entrypoint_tcp_test.go index c6b590ede..041ecdcfd 100644 --- a/pkg/server/server_entrypoint_tcp_test.go +++ b/pkg/server/server_entrypoint_tcp_test.go @@ -8,9 +8,9 @@ import ( "testing" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config/static" "github.com/containous/traefik/pkg/tcp" + "github.com/containous/traefik/pkg/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -21,7 +21,7 @@ func TestShutdownHTTP(t *testing.T) { Transport: &static.EntryPointsTransport{ LifeCycle: &static.LifeCycle{ RequestAcceptGraceTimeout: 0, - GraceTimeOut: parse.Duration(5 * time.Second), + GraceTimeOut: types.Duration(5 * time.Second), }, }, ForwardedHeaders: &static.ForwardedHeaders{}, @@ -59,7 +59,7 @@ func TestShutdownHTTPHijacked(t *testing.T) { Transport: &static.EntryPointsTransport{ LifeCycle: &static.LifeCycle{ RequestAcceptGraceTimeout: 0, - GraceTimeOut: parse.Duration(5 * time.Second), + GraceTimeOut: types.Duration(5 * time.Second), }, }, ForwardedHeaders: &static.ForwardedHeaders{}, @@ -103,7 +103,7 @@ func TestShutdownTCPConn(t *testing.T) { Transport: &static.EntryPointsTransport{ LifeCycle: &static.LifeCycle{ RequestAcceptGraceTimeout: 0, - GraceTimeOut: parse.Duration(5 * time.Second), + GraceTimeOut: types.Duration(5 * time.Second), }, }, ForwardedHeaders: &static.ForwardedHeaders{}, diff --git a/pkg/server/server_test.go b/pkg/server/server_test.go index 75ea489df..255b8aa5a 100644 --- a/pkg/server/server_test.go +++ b/pkg/server/server_test.go @@ -7,10 +7,10 @@ import ( "testing" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config" "github.com/containous/traefik/pkg/config/static" th "github.com/containous/traefik/pkg/testhelpers" + "github.com/containous/traefik/pkg/types" "github.com/stretchr/testify/assert" ) @@ -132,7 +132,7 @@ func setupListenProvider(throttleDuration time.Duration) (server *Server, stop c staticConfiguration := static.Configuration{ Providers: &static.Providers{ - ProvidersThrottleDuration: parse.Duration(throttleDuration), + ProvidersThrottleDuration: types.Duration(throttleDuration), }, } diff --git a/pkg/server/service/proxy.go b/pkg/server/service/proxy.go index 3e15c5a05..b4f970748 100644 --- a/pkg/server/service/proxy.go +++ b/pkg/server/service/proxy.go @@ -10,9 +10,9 @@ import ( "net/url" "time" - "github.com/containous/flaeg/parse" "github.com/containous/traefik/pkg/config" "github.com/containous/traefik/pkg/log" + "github.com/containous/traefik/pkg/types" ) // StatusClientClosedRequest non-standard HTTP status code for client disconnection @@ -22,7 +22,7 @@ const StatusClientClosedRequest = 499 const StatusClientClosedRequestText = "Client Closed Request" func buildProxy(passHostHeader bool, responseForwarding *config.ResponseForwarding, defaultRoundTripper http.RoundTripper, bufferPool httputil.BufferPool, responseModifier func(*http.Response) error) (http.Handler, error) { - var flushInterval parse.Duration + var flushInterval types.Duration if responseForwarding != nil { err := flushInterval.Set(responseForwarding.FlushInterval) if err != nil { @@ -30,7 +30,7 @@ func buildProxy(passHostHeader bool, responseForwarding *config.ResponseForwardi } } if flushInterval == 0 { - flushInterval = parse.Duration(100 * time.Millisecond) + flushInterval = types.Duration(100 * time.Millisecond) } proxy := &httputil.ReverseProxy{ diff --git a/pkg/tls/tls.go b/pkg/tls/tls.go index 0a725100a..c2d09a5bb 100644 --- a/pkg/tls/tls.go +++ b/pkg/tls/tls.go @@ -1,16 +1,11 @@ package tls -import ( - "fmt" - "strings" -) - const certificateHeader = "-----BEGIN CERTIFICATE-----\n" // ClientCA defines traefik CA files for a entryPoint -// and it indicates if they are mandatory or have just to be analyzed if provided +// and it indicates if they are mandatory or have just to be analyzed if provided. type ClientCA struct { - Files FilesOrContents + Files []FileOrContent Optional bool } @@ -27,50 +22,8 @@ type Store struct { DefaultCertificate *Certificate } -// FilesOrContents hold the CA we want to have in root -type FilesOrContents []FileOrContent - -// Configuration allows mapping a TLS certificate to a list of entrypoints +// Configuration allows mapping a TLS certificate to a list of entry points. type Configuration struct { Stores []string Certificate *Certificate } - -// String is the method to format the flag's value, part of the flag.Value interface. -// The String method's output will be used in diagnostics. -func (r *FilesOrContents) String() string { - sliceOfString := make([]string, len([]FileOrContent(*r))) - for key, value := range *r { - sliceOfString[key] = value.String() - } - return strings.Join(sliceOfString, ",") -} - -// Set is the method to set the flag value, part of the flag.Value interface. -// Set's argument is a string to be parsed to set the flag. -// It's a comma-separated list, so we split it. -func (r *FilesOrContents) Set(value string) error { - filesOrContents := strings.Split(value, ",") - if len(filesOrContents) == 0 { - return fmt.Errorf("bad FilesOrContents format: %s", value) - } - for _, fileOrContent := range filesOrContents { - *r = append(*r, FileOrContent(fileOrContent)) - } - return nil -} - -// Get return the FilesOrContents list -func (r *FilesOrContents) Get() interface{} { - return *r -} - -// SetValue sets the FilesOrContents with val -func (r *FilesOrContents) SetValue(val interface{}) { - *r = val.(FilesOrContents) -} - -// Type is type of the struct -func (r *FilesOrContents) Type() string { - return "filesorcontents" -} diff --git a/pkg/tracing/datadog/datadog.go b/pkg/tracing/datadog/datadog.go index 42c0d3558..70af78cdd 100644 --- a/pkg/tracing/datadog/datadog.go +++ b/pkg/tracing/datadog/datadog.go @@ -15,14 +15,22 @@ const Name = "datadog" // Config provides configuration settings for a datadog tracer type Config struct { - LocalAgentHostPort string `description:"Set datadog-agent's host:port that the reporter will used. Defaults to localhost:8126" export:"false"` + LocalAgentHostPort string `description:"Set datadog-agent's host:port that the reporter will used." export:"false"` GlobalTag string `description:"Key:Value tag to be set on all the spans." export:"true"` Debug bool `description:"Enable DataDog debug." export:"true"` PrioritySampling bool `description:"Enable priority sampling. When using distributed tracing, this option must be enabled in order to get all the parts of a distributed trace sampled."` TraceIDHeaderName string `description:"Specifies the header name that will be used to store the trace ID." export:"true"` ParentIDHeaderName string `description:"Specifies the header name that will be used to store the parent ID." export:"true"` SamplingPriorityHeaderName string `description:"Specifies the header name that will be used to store the sampling priority." export:"true"` - BagagePrefixHeaderName string `description:"specifies the header name prefix that will be used to store baggage items in a map." export:"true"` + BagagePrefixHeaderName string `description:"Specifies the header name prefix that will be used to store baggage items in a map." export:"true"` +} + +// SetDefaults sets the default values. +func (c *Config) SetDefaults() { + c.LocalAgentHostPort = "localhost:8126" + c.GlobalTag = "" + c.Debug = false + c.PrioritySampling = false } // Setup sets up the tracer diff --git a/pkg/tracing/haystack/haystack.go b/pkg/tracing/haystack/haystack.go index ed73e4709..b37940895 100644 --- a/pkg/tracing/haystack/haystack.go +++ b/pkg/tracing/haystack/haystack.go @@ -15,15 +15,21 @@ const Name = "haystack" // Config provides configuration settings for a haystack tracer type Config struct { - LocalAgentHost string `description:"Set haystack-agent's host that the reporter will used. Defaults to localhost" export:"false"` - LocalAgentPort int `description:"Set haystack-agent's port that the reporter will used. Defaults to 35000" export:"false"` + LocalAgentHost string `description:"Set haystack-agent's host that the reporter will used." export:"false"` + LocalAgentPort int `description:"Set haystack-agent's port that the reporter will used." export:"false"` GlobalTag string `description:"Key:Value tag to be set on all the spans." export:"true"` - TraceIDHeaderName string `description:"Specifies the header name that will be used to store the trace ID.." export:"true"` + TraceIDHeaderName string `description:"Specifies the header name that will be used to store the trace ID." export:"true"` ParentIDHeaderName string `description:"Specifies the header name that will be used to store the parent ID." export:"true"` SpanIDHeaderName string `description:"Specifies the header name that will be used to store the span ID." export:"true"` BaggagePrefixHeaderName string `description:"specifies the header name prefix that will be used to store baggage items in a map." export:"true"` } +// SetDefaults sets the default values. +func (c *Config) SetDefaults() { + c.LocalAgentHost = "LocalAgentHost" + c.LocalAgentPort = 35000 +} + // Setup sets up the tracer func (c *Config) Setup(serviceName string) (opentracing.Tracer, io.Closer, error) { tag := strings.SplitN(c.GlobalTag, ":", 2) diff --git a/pkg/tracing/instana/instana.go b/pkg/tracing/instana/instana.go index 5302281d6..da66ea916 100644 --- a/pkg/tracing/instana/instana.go +++ b/pkg/tracing/instana/instana.go @@ -18,6 +18,13 @@ type Config struct { LogLevel string `description:"Set instana-agent's log level. ('error','warn','info','debug')" export:"false"` } +// SetDefaults sets the default values. +func (c *Config) SetDefaults() { + c.LocalAgentHost = "localhost" + c.LocalAgentPort = 42699 + c.LogLevel = "info" +} + // Setup sets up the tracer func (c *Config) Setup(serviceName string) (opentracing.Tracer, io.Closer, error) { // set default logLevel diff --git a/pkg/tracing/jaeger/jaeger.go b/pkg/tracing/jaeger/jaeger.go index 048bf8849..1aaf61ba5 100644 --- a/pkg/tracing/jaeger/jaeger.go +++ b/pkg/tracing/jaeger/jaeger.go @@ -7,6 +7,7 @@ import ( "github.com/containous/traefik/pkg/log" "github.com/opentracing/opentracing-go" jaeger "github.com/uber/jaeger-client-go" + jaegercli "github.com/uber/jaeger-client-go" jaegercfg "github.com/uber/jaeger-client-go/config" "github.com/uber/jaeger-client-go/zipkin" jaegermet "github.com/uber/jaeger-lib/metrics" @@ -17,13 +18,24 @@ const Name = "jaeger" // Config provides configuration settings for a jaeger tracer type Config struct { - SamplingServerURL string `description:"set the sampling server url." export:"false"` - SamplingType string `description:"set the sampling type." export:"true"` - SamplingParam float64 `description:"set the sampling parameter." export:"true"` - LocalAgentHostPort string `description:"set jaeger-agent's host:port that the reporter will used." export:"false"` - Gen128Bit bool `description:"generate 128 bit span IDs." export:"true"` - Propagation string `description:"which propgation format to use (jaeger/b3)." export:"true"` - TraceContextHeaderName string `description:"set the header to use for the trace-id." export:"true"` + SamplingServerURL string `description:"Set the sampling server url." export:"false"` + SamplingType string `description:"Set the sampling type." export:"true"` + SamplingParam float64 `description:"Set the sampling parameter." export:"true"` + LocalAgentHostPort string `description:"Set jaeger-agent's host:port that the reporter will used." export:"false"` + Gen128Bit bool `description:"Generate 128 bit span IDs." export:"true"` + Propagation string `description:"Which propgation format to use (jaeger/b3)." export:"true"` + TraceContextHeaderName string `description:"Set the header to use for the trace-id." export:"true"` +} + +// SetDefaults sets the default values. +func (c *Config) SetDefaults() { + c.SamplingServerURL = "http://localhost:5778/sampling" + c.SamplingType = "const" + c.SamplingParam = 1.0 + c.LocalAgentHostPort = "127.0.0.1:6831" + c.Propagation = "jaeger" + c.Gen128Bit = false + c.TraceContextHeaderName = jaegercli.TraceContextHeaderName } // Setup sets up the tracer diff --git a/pkg/tracing/zipkin/zipkin.go b/pkg/tracing/zipkin/zipkin.go index 91da920ea..e2bc34c0f 100644 --- a/pkg/tracing/zipkin/zipkin.go +++ b/pkg/tracing/zipkin/zipkin.go @@ -21,6 +21,15 @@ type Config struct { SampleRate float64 `description:"The rate between 0.0 and 1.0 of requests to trace." export:"true"` } +// SetDefaults sets the default values. +func (c *Config) SetDefaults() { + c.HTTPEndpoint = "http://localhost:9411/api/v1/spans" + c.SameSpan = false + c.ID128Bit = true + c.Debug = false + c.SampleRate = 1.0 +} + // Setup sets up the tracer func (c *Config) Setup(serviceName string) (opentracing.Tracer, io.Closer, error) { collector, err := zipkin.NewHTTPCollector(c.HTTPEndpoint) diff --git a/pkg/types/constraints.go b/pkg/types/constraints.go index dc658ce03..2328e795e 100644 --- a/pkg/types/constraints.go +++ b/pkg/types/constraints.go @@ -10,12 +10,12 @@ import ( ) // Constraint holds a parsed constraint expression. +// FIXME replace by a string. type Constraint struct { - Key string `export:"true"` + Key string `description:"The provider label that will be matched against. In practice, it is always 'tag'." export:"true"` // MustMatch is true if operator is "==" or false if operator is "!=" - MustMatch bool `export:"true"` - // TODO: support regex - Regex string `export:"true"` + MustMatch bool `description:"Whether the matching operator is equals or not equals." export:"true"` + Value string `description:"The value that will be matched against." export:"true"` // TODO: support regex } // NewConstraint receives a string and return a *Constraint, after checking syntax and parsing the constraint expression. @@ -42,7 +42,7 @@ func NewConstraint(exp string) (*Constraint, error) { } constraint.Key = kv[0] - constraint.Regex = kv[1] + constraint.Value = kv[1] return constraint, nil } @@ -51,9 +51,9 @@ func NewConstraint(exp string) (*Constraint, error) { func (c *Constraint) String() string { if c.MustMatch { - return c.Key + "==" + c.Regex + return c.Key + "==" + c.Value } - return c.Key + "!=" + c.Regex + return c.Key + "!=" + c.Value } var _ encoding.TextUnmarshaler = (*Constraint)(nil) @@ -66,7 +66,7 @@ func (c *Constraint) UnmarshalText(text []byte) error { } c.Key = constraint.Key c.MustMatch = constraint.MustMatch - c.Regex = constraint.Regex + c.Value = constraint.Value return nil } @@ -80,44 +80,9 @@ func (c *Constraint) MarshalText() (text []byte, err error) { // MatchConstraintWithAtLeastOneTag tests a constraint for one single service. func (c *Constraint) MatchConstraintWithAtLeastOneTag(tags []string) bool { for _, tag := range tags { - if glob.Glob(c.Regex, tag) { + if glob.Glob(c.Value, tag) { return true } } return false } - -// Set []*Constraint. -func (cs *Constraints) Set(str string) error { - exps := strings.Split(str, ",") - if len(exps) == 0 { - return fmt.Errorf("bad Constraint format: %s", str) - } - for _, exp := range exps { - constraint, err := NewConstraint(exp) - if err != nil { - return err - } - *cs = append(*cs, constraint) - } - return nil -} - -// Constraints holds a Constraint parser. -type Constraints []*Constraint - -// Get []*Constraint -func (cs *Constraints) Get() interface{} { return []*Constraint(*cs) } - -// String returns []*Constraint in string. -func (cs *Constraints) String() string { return fmt.Sprintf("%+v", *cs) } - -// SetValue sets []*Constraint into the parser. -func (cs *Constraints) SetValue(val interface{}) { - *cs = val.(Constraints) -} - -// Type exports the Constraints type as a string. -func (cs *Constraints) Type() string { - return "constraint" -} diff --git a/pkg/types/dns_resolvers.go b/pkg/types/dns_resolvers.go deleted file mode 100644 index dd96f7895..000000000 --- a/pkg/types/dns_resolvers.go +++ /dev/null @@ -1,44 +0,0 @@ -package types - -import ( - "fmt" - "strings" -) - -// DNSResolvers is a list of DNSes that we will try to resolve the challenged FQDN against -type DNSResolvers []string - -// String is the method to format the flag's value, part of the flag.Value interface. -// The String method's output will be used in diagnostics. -func (r *DNSResolvers) String() string { - return strings.Join(*r, ",") -} - -// Set is the method to set the flag value, part of the flag.Value interface. -// Set's argument is a string to be parsed to set the flag. -// It's a comma-separated list, so we split it. -func (r *DNSResolvers) Set(value string) error { - entryPoints := strings.Split(value, ",") - if len(entryPoints) == 0 { - return fmt.Errorf("wrong DNSResolvers format: %s", value) - } - for _, entryPoint := range entryPoints { - *r = append(*r, entryPoint) - } - return nil -} - -// Get return the DNSResolvers list -func (r *DNSResolvers) Get() interface{} { - return *r -} - -// SetValue sets the DNSResolvers list -func (r *DNSResolvers) SetValue(val interface{}) { - *r = val.(DNSResolvers) -} - -// Type is type of the struct -func (r *DNSResolvers) Type() string { - return "dnsresolvers" -} diff --git a/pkg/types/domains.go b/pkg/types/domains.go index 55d85c25e..70b3a3904 100644 --- a/pkg/types/domains.go +++ b/pkg/types/domains.go @@ -7,8 +7,8 @@ import ( // Domain holds a domain name with SANs. type Domain struct { - Main string - SANs []string + Main string `description:"Default subject name."` + SANs []string `description:"Subject alternative names."` } // ToStrArray convert a domain into an array of strings. diff --git a/pkg/types/duration.go b/pkg/types/duration.go new file mode 100644 index 000000000..87bc09f09 --- /dev/null +++ b/pkg/types/duration.go @@ -0,0 +1,69 @@ +package types + +import ( + "encoding/json" + "strconv" + "time" +) + +// Duration is a custom type suitable for parsing duration values. +// It supports `time.ParseDuration`-compatible values and suffix-less digits; in +// the latter case, seconds are assumed. +type Duration time.Duration + +// Set sets the duration from the given string value. +func (d *Duration) Set(s string) error { + if v, err := strconv.ParseInt(s, 10, 64); err == nil { + *d = Duration(time.Duration(v) * time.Second) + return nil + } + + v, err := time.ParseDuration(s) + *d = Duration(v) + return err +} + +// Get returns the duration value. +func (d *Duration) Get() interface{} { return time.Duration(*d) } + +// String returns a string representation of the duration value. +func (d *Duration) String() string { return (*time.Duration)(d).String() } + +// SetValue sets the duration from the given Duration-asserted value. +func (d *Duration) SetValue(val interface{}) { + *d = val.(Duration) +} + +// MarshalText serialize the given duration value into a text. +func (d *Duration) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText deserializes the given text into a duration value. +// It is meant to support TOML decoding of durations. +func (d *Duration) UnmarshalText(text []byte) error { + return d.Set(string(text)) +} + +// MarshalJSON serializes the given duration value. +func (d *Duration) MarshalJSON() ([]byte, error) { + return json.Marshal(time.Duration(*d)) +} + +// UnmarshalJSON deserializes the given text into a duration value. +func (d *Duration) UnmarshalJSON(text []byte) error { + if v, err := strconv.ParseInt(string(text), 10, 64); err == nil { + *d = Duration(time.Duration(v)) + return nil + } + + // We use json unmarshal on value because we have the quoted version + var value string + err := json.Unmarshal(text, &value) + if err != nil { + return err + } + v, err := time.ParseDuration(value) + *d = Duration(v) + return err +} diff --git a/pkg/types/host_resolver.go b/pkg/types/host_resolver.go index d48b59884..0e5ad3b18 100644 --- a/pkg/types/host_resolver.go +++ b/pkg/types/host_resolver.go @@ -6,3 +6,10 @@ type HostResolverConfig struct { ResolvConfig string `description:"resolv.conf used for DNS resolving" export:"true"` ResolvDepth int `description:"The maximal depth of DNS recursive resolving" export:"true"` } + +// SetDefaults sets the default values. +func (h *HostResolverConfig) SetDefaults() { + h.CnameFlattening = false + h.ResolvConfig = "/etc/resolv.conf" + h.ResolvDepth = 5 +} diff --git a/pkg/types/logs.go b/pkg/types/logs.go index b0e32d90a..fd1526e3c 100644 --- a/pkg/types/logs.go +++ b/pkg/types/logs.go @@ -1,12 +1,5 @@ package types -import ( - "fmt" - "strings" - - "github.com/containous/flaeg/parse" -) - const ( // AccessLogKeep is the keep string value AccessLogKeep = "keep" @@ -16,144 +9,71 @@ const ( AccessLogRedact = "redact" ) +const ( + // CommonFormat is the common logging format (CLF). + CommonFormat string = "common" + + // JSONFormat is the JSON logging format. + JSONFormat string = "json" +) + // TraefikLog holds the configuration settings for the traefik logger. type TraefikLog struct { Level string `description:"Log level set to traefik logs." export:"true"` - FilePath string `json:"file,omitempty" description:"Traefik log file path. Stdout is used when omitted or empty"` + FilePath string `json:"file,omitempty" description:"Traefik log file path. Stdout is used when omitted or empty."` Format string `json:"format,omitempty" description:"Traefik log format: json | common"` } +// SetDefaults sets the default values. +func (l *TraefikLog) SetDefaults() { + l.Format = CommonFormat + l.Level = "ERROR" +} + // AccessLog holds the configuration settings for the access logger (middlewares/accesslog). type AccessLog struct { - FilePath string `json:"file,omitempty" description:"Access log file path. Stdout is used when omitted or empty" export:"true"` + FilePath string `json:"file,omitempty" description:"Access log file path. Stdout is used when omitted or empty." export:"true"` Format string `json:"format,omitempty" description:"Access log format: json | common" export:"true"` - Filters *AccessLogFilters `json:"filters,omitempty" description:"Access log filters, used to keep only specific access logs" export:"true"` - Fields *AccessLogFields `json:"fields,omitempty" description:"AccessLogFields" export:"true"` - BufferingSize int64 `json:"bufferingSize,omitempty" description:"Number of access log lines to process in a buffered way. Default 0." export:"true"` + Filters *AccessLogFilters `json:"filters,omitempty" description:"Access log filters, used to keep only specific access logs." export:"true"` + Fields *AccessLogFields `json:"fields,omitempty" description:"AccessLogFields." export:"true"` + BufferingSize int64 `json:"bufferingSize,omitempty" description:"Number of access log lines to process in a buffered way." export:"true"` +} + +// SetDefaults sets the default values. +func (l *AccessLog) SetDefaults() { + l.Format = CommonFormat + l.FilePath = "" + l.Filters = &AccessLogFilters{} + l.Fields = &AccessLogFields{} + l.Fields.SetDefaults() } // AccessLogFilters holds filters configuration type AccessLogFilters struct { - StatusCodes StatusCodes `json:"statusCodes,omitempty" description:"Keep access logs with status codes in the specified range" export:"true"` - RetryAttempts bool `json:"retryAttempts,omitempty" description:"Keep access logs when at least one retry happened" export:"true"` - MinDuration parse.Duration `json:"duration,omitempty" description:"Keep access logs when request took longer than the specified duration" export:"true"` + StatusCodes []string `json:"statusCodes,omitempty" description:"Keep access logs with status codes in the specified range." export:"true"` + RetryAttempts bool `json:"retryAttempts,omitempty" description:"Keep access logs when at least one retry happened." export:"true"` + MinDuration Duration `json:"duration,omitempty" description:"Keep access logs when request took longer than the specified duration." export:"true"` } // FieldHeaders holds configuration for access log headers type FieldHeaders struct { - DefaultMode string `json:"defaultMode,omitempty" description:"Default mode for fields: keep | drop | redact" export:"true"` - Names FieldHeaderNames `json:"names,omitempty" description:"Override mode for headers" export:"true"` -} - -// StatusCodes holds status codes ranges to filter access log -type StatusCodes []string - -// Set adds strings elem into the the parser -// it splits str on , and ; -func (s *StatusCodes) Set(str string) error { - fargs := func(c rune) bool { - return c == ',' || c == ';' - } - // get function - slice := strings.FieldsFunc(str, fargs) - *s = append(*s, slice...) - return nil -} - -// Get StatusCodes -func (s *StatusCodes) Get() interface{} { return *s } - -// String return slice in a string -func (s *StatusCodes) String() string { return fmt.Sprintf("%v", *s) } - -// SetValue sets StatusCodes into the parser -func (s *StatusCodes) SetValue(val interface{}) { - *s = val.(StatusCodes) -} - -// FieldNames holds maps of fields with specific mode -type FieldNames map[string]string - -// String is the method to format the flag's value, part of the flag.Value interface. -// The String method's output will be used in diagnostics. -func (f *FieldNames) String() string { - return fmt.Sprintf("%+v", *f) -} - -// Get return the FieldNames map -func (f *FieldNames) Get() interface{} { - return *f -} - -// Set is the method to set the flag value, part of the flag.Value interface. -// Set's argument is a string to be parsed to set the flag. -// It's a space-separated list, so we split it. -func (f *FieldNames) Set(value string) error { - // When arguments are passed through YAML, escaped double quotes - // might be added to this string, and they would break the last - // key/value pair. This ensures the string is clean. - value = strings.Trim(value, "\"") - - fields := strings.Fields(value) - - for _, field := range fields { - n := strings.SplitN(field, "=", 2) - if len(n) == 2 { - (*f)[n[0]] = n[1] - } - } - - return nil -} - -// SetValue sets the FieldNames map with val -func (f *FieldNames) SetValue(val interface{}) { - *f = val.(FieldNames) -} - -// FieldHeaderNames holds maps of fields with specific mode -type FieldHeaderNames map[string]string - -// String is the method to format the flag's value, part of the flag.Value interface. -// The String method's output will be used in diagnostics. -func (f *FieldHeaderNames) String() string { - return fmt.Sprintf("%+v", *f) -} - -// Get return the FieldHeaderNames map -func (f *FieldHeaderNames) Get() interface{} { - return *f -} - -// Set is the method to set the flag value, part of the flag.Value interface. -// Set's argument is a string to be parsed to set the flag. -// It's a space-separated list, so we split it. -func (f *FieldHeaderNames) Set(value string) error { - // When arguments are passed through YAML, escaped double quotes - // might be added to this string, and they would break the last - // key/value pair. This ensures the string is clean. - value = strings.Trim(value, "\"") - - fields := strings.Fields(value) - - for _, field := range fields { - n := strings.SplitN(field, "=", 2) - (*f)[n[0]] = n[1] - } - - return nil -} - -// SetValue sets the FieldHeaderNames map with val -func (f *FieldHeaderNames) SetValue(val interface{}) { - *f = val.(FieldHeaderNames) + DefaultMode string `json:"defaultMode,omitempty" description:"Default mode for fields: keep | drop | redact" export:"true"` + Names map[string]string `json:"names,omitempty" description:"Override mode for headers" export:"true"` } // AccessLogFields holds configuration for access log fields type AccessLogFields struct { - DefaultMode string `json:"defaultMode,omitempty" description:"Default mode for fields: keep | drop" export:"true"` - Names FieldNames `json:"names,omitempty" description:"Override mode for fields" export:"true"` - Headers *FieldHeaders `json:"headers,omitempty" description:"Headers to keep, drop or redact" export:"true"` + DefaultMode string `json:"defaultMode,omitempty" description:"Default mode for fields: keep | drop" export:"true"` + Names map[string]string `json:"names,omitempty" description:"Override mode for fields" export:"true"` + Headers *FieldHeaders `json:"headers,omitempty" description:"Headers to keep, drop or redact" export:"true"` +} + +// SetDefaults sets the default values. +func (f *AccessLogFields) SetDefaults() { + f.DefaultMode = AccessLogKeep + f.Headers = &FieldHeaders{ + DefaultMode: AccessLogKeep, + } } // Keep check if the field need to be kept or dropped diff --git a/pkg/types/logs_test.go b/pkg/types/logs_test.go deleted file mode 100644 index 0b1bf8ebc..000000000 --- a/pkg/types/logs_test.go +++ /dev/null @@ -1,419 +0,0 @@ -package types - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestStatusCodesSet(t *testing.T) { - testCases := []struct { - desc string - value string - expected StatusCodes - }{ - { - desc: "One value should return StatusCodes of size 1", - value: "200", - expected: StatusCodes{"200"}, - }, - { - desc: "Two values separated by comma should return StatusCodes of size 2", - value: "200,400", - expected: StatusCodes{"200", "400"}, - }, - { - desc: "Two values separated by semicolon should return StatusCodes of size 2", - value: "200;400", - expected: StatusCodes{"200", "400"}, - }, - { - desc: "Three values separated by comma and semicolon should return StatusCodes of size 3", - value: "200,400;500", - expected: StatusCodes{"200", "400", "500"}, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - var statusCodes StatusCodes - err := statusCodes.Set(test.value) - assert.Nil(t, err) - assert.Equal(t, test.expected, statusCodes) - }) - } -} - -func TestStatusCodesGet(t *testing.T) { - testCases := []struct { - desc string - values StatusCodes - expected StatusCodes - }{ - { - desc: "Should return 1 value", - values: StatusCodes{"200"}, - expected: StatusCodes{"200"}, - }, - { - desc: "Should return 2 values", - values: StatusCodes{"200", "400"}, - expected: StatusCodes{"200", "400"}, - }, - { - desc: "Should return 3 values", - values: StatusCodes{"200", "400", "500"}, - expected: StatusCodes{"200", "400", "500"}, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - actual := test.values.Get() - assert.Equal(t, test.expected, actual) - }) - } -} - -func TestStatusCodesString(t *testing.T) { - testCases := []struct { - desc string - values StatusCodes - expected string - }{ - { - desc: "Should return 1 value", - values: StatusCodes{"200"}, - expected: "[200]", - }, - { - desc: "Should return 2 values", - values: StatusCodes{"200", "400"}, - expected: "[200 400]", - }, - { - desc: "Should return 3 values", - values: StatusCodes{"200", "400", "500"}, - expected: "[200 400 500]", - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - actual := test.values.String() - assert.Equal(t, test.expected, actual) - }) - } -} - -func TestStatusCodesSetValue(t *testing.T) { - testCases := []struct { - desc string - values StatusCodes - expected StatusCodes - }{ - { - desc: "Should return 1 value", - values: StatusCodes{"200"}, - expected: StatusCodes{"200"}, - }, - { - desc: "Should return 2 values", - values: StatusCodes{"200", "400"}, - expected: StatusCodes{"200", "400"}, - }, - { - desc: "Should return 3 values", - values: StatusCodes{"200", "400", "500"}, - expected: StatusCodes{"200", "400", "500"}, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - var slice StatusCodes - slice.SetValue(test.values) - assert.Equal(t, test.expected, slice) - }) - } -} - -func TestFieldsNamesSet(t *testing.T) { - testCases := []struct { - desc string - value string - expected *FieldNames - }{ - { - desc: "One value should return FieldNames of size 1", - value: "field-1=foo", - expected: &FieldNames{ - "field-1": "foo", - }, - }, - { - desc: "Two values separated by space should return FieldNames of size 2", - value: "field-1=foo field-2=bar", - expected: &FieldNames{ - "field-1": "foo", - "field-2": "bar", - }, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - fieldsNames := &FieldNames{} - err := fieldsNames.Set(test.value) - assert.NoError(t, err) - - assert.Equal(t, test.expected, fieldsNames) - }) - } -} - -func TestFieldsNamesGet(t *testing.T) { - testCases := []struct { - desc string - values FieldNames - expected FieldNames - }{ - { - desc: "Should return 1 value", - values: FieldNames{"field-1": "foo"}, - expected: FieldNames{"field-1": "foo"}, - }, - { - desc: "Should return 2 values", - values: FieldNames{"field-1": "foo", "field-2": "bar"}, - expected: FieldNames{"field-1": "foo", "field-2": "bar"}, - }, - { - desc: "Should return 3 values", - values: FieldNames{"field-1": "foo", "field-2": "bar", "field-3": "powpow"}, - expected: FieldNames{"field-1": "foo", "field-2": "bar", "field-3": "powpow"}, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - actual := test.values.Get() - assert.Equal(t, test.expected, actual) - }) - } -} - -func TestFieldsNamesString(t *testing.T) { - testCases := []struct { - desc string - values FieldNames - expected string - }{ - { - desc: "Should return 1 value", - values: FieldNames{"field-1": "foo"}, - expected: "map[field-1:foo]", - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - actual := test.values.String() - assert.Equal(t, test.expected, actual) - }) - } -} - -func TestFieldsNamesSetValue(t *testing.T) { - testCases := []struct { - desc string - values FieldNames - expected *FieldNames - }{ - { - desc: "Should return 1 value", - values: FieldNames{"field-1": "foo"}, - expected: &FieldNames{"field-1": "foo"}, - }, - { - desc: "Should return 2 values", - values: FieldNames{"field-1": "foo", "field-2": "bar"}, - expected: &FieldNames{"field-1": "foo", "field-2": "bar"}, - }, - { - desc: "Should return 3 values", - values: FieldNames{"field-1": "foo", "field-2": "bar", "field-3": "powpow"}, - expected: &FieldNames{"field-1": "foo", "field-2": "bar", "field-3": "powpow"}, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - fieldsNames := &FieldNames{} - fieldsNames.SetValue(test.values) - assert.Equal(t, test.expected, fieldsNames) - }) - } -} - -func TestFieldsHeadersNamesSet(t *testing.T) { - testCases := []struct { - desc string - value string - expected *FieldHeaderNames - }{ - { - desc: "One value should return FieldNames of size 1", - value: "X-HEADER-1=foo", - expected: &FieldHeaderNames{ - "X-HEADER-1": "foo", - }, - }, - { - desc: "Two values separated by space should return FieldNames of size 2", - value: "X-HEADER-1=foo X-HEADER-2=bar", - expected: &FieldHeaderNames{ - "X-HEADER-1": "foo", - "X-HEADER-2": "bar", - }, - }, - { - desc: "Two values separated by space with escaped double quotes should return FieldNames of size 2", - value: "\"X-HEADER-1=foo X-HEADER-2=bar\"", - expected: &FieldHeaderNames{ - "X-HEADER-1": "foo", - "X-HEADER-2": "bar", - }, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - headersNames := &FieldHeaderNames{} - err := headersNames.Set(test.value) - assert.NoError(t, err) - - assert.Equal(t, test.expected, headersNames) - }) - } -} - -func TestFieldsHeadersNamesGet(t *testing.T) { - testCases := []struct { - desc string - values FieldHeaderNames - expected FieldHeaderNames - }{ - { - desc: "Should return 1 value", - values: FieldHeaderNames{"X-HEADER-1": "foo"}, - expected: FieldHeaderNames{"X-HEADER-1": "foo"}, - }, - { - desc: "Should return 2 values", - values: FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar"}, - expected: FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar"}, - }, - { - desc: "Should return 3 values", - values: FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar", "X-HEADER-3": "powpow"}, - expected: FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar", "X-HEADER-3": "powpow"}, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - actual := test.values.Get() - assert.Equal(t, test.expected, actual) - }) - } -} - -func TestFieldsHeadersNamesString(t *testing.T) { - testCases := []struct { - desc string - values FieldHeaderNames - expected string - }{ - { - desc: "Should return 1 value", - values: FieldHeaderNames{"X-HEADER-1": "foo"}, - expected: "map[X-HEADER-1:foo]", - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - actual := test.values.String() - assert.Equal(t, test.expected, actual) - }) - } -} - -func TestFieldsHeadersNamesSetValue(t *testing.T) { - testCases := []struct { - desc string - values FieldHeaderNames - expected *FieldHeaderNames - }{ - { - desc: "Should return 1 value", - values: FieldHeaderNames{"X-HEADER-1": "foo"}, - expected: &FieldHeaderNames{"X-HEADER-1": "foo"}, - }, - { - desc: "Should return 2 values", - values: FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar"}, - expected: &FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar"}, - }, - { - desc: "Should return 3 values", - values: FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar", "X-HEADER-3": "powpow"}, - expected: &FieldHeaderNames{"X-HEADER-1": "foo", "X-HEADER-2": "bar", "X-HEADER-3": "powpow"}, - }, - } - - for _, test := range testCases { - test := test - t.Run(test.desc, func(t *testing.T) { - t.Parallel() - - headersNames := &FieldHeaderNames{} - headersNames.SetValue(test.values) - assert.Equal(t, test.expected, headersNames) - }) - } -} diff --git a/pkg/types/metrics.go b/pkg/types/metrics.go index 38a660825..736f4d306 100644 --- a/pkg/types/metrics.go +++ b/pkg/types/metrics.go @@ -1,82 +1,79 @@ package types import ( - "fmt" - "strconv" - "strings" + "time" ) // Metrics provides options to expose and send Traefik metrics to different third party monitoring systems type Metrics struct { - Prometheus *Prometheus `description:"Prometheus metrics exporter type" export:"true"` - Datadog *Datadog `description:"DataDog metrics exporter type" export:"true"` - StatsD *Statsd `description:"StatsD metrics exporter type" export:"true"` - InfluxDB *InfluxDB `description:"InfluxDB metrics exporter type"` + Prometheus *Prometheus `description:"Prometheus metrics exporter type." export:"true" label:"allowEmpty"` + Datadog *Datadog `description:"DataDog metrics exporter type." export:"true" label:"allowEmpty"` + StatsD *Statsd `description:"StatsD metrics exporter type." export:"true" label:"allowEmpty"` + InfluxDB *InfluxDB `description:"InfluxDB metrics exporter type." label:"allowEmpty"` } // Prometheus can contain specific configuration used by the Prometheus Metrics exporter type Prometheus struct { - Buckets Buckets `description:"Buckets for latency metrics" export:"true"` - EntryPoint string `description:"EntryPoint" export:"true"` - Middlewares []string `description:"Middlewares" export:"true"` + Buckets []float64 `description:"Buckets for latency metrics." export:"true"` + EntryPoint string `description:"EntryPoint." export:"true"` + Middlewares []string `description:"Middlewares." export:"true"` +} + +// SetDefaults sets the default values. +func (p *Prometheus) SetDefaults() { + p.Buckets = []float64{0.1, 0.3, 1.2, 5} + p.EntryPoint = "traefik" + // FIXME p.EntryPoint = static.DefaultInternalEntryPointName } // Datadog contains address and metrics pushing interval configuration type Datadog struct { - Address string `description:"DataDog's address"` - PushInterval string `description:"DataDog push interval" export:"true"` + Address string `description:"DataDog's address."` + PushInterval Duration `description:"DataDog push interval." export:"true"` +} + +// SetDefaults sets the default values. +func (d *Datadog) SetDefaults() { + d.Address = "localhost:8125" + d.PushInterval = Duration(10 * time.Second) } // Statsd contains address and metrics pushing interval configuration type Statsd struct { - Address string `description:"StatsD address"` - PushInterval string `description:"StatsD push interval" export:"true"` + Address string `description:"StatsD address."` + PushInterval Duration `description:"StatsD push interval." export:"true"` +} + +// SetDefaults sets the default values. +func (s *Statsd) SetDefaults() { + s.Address = "localhost:8125" + s.PushInterval = Duration(10 * time.Second) } // InfluxDB contains address, login and metrics pushing interval configuration type InfluxDB struct { - Address string `description:"InfluxDB address"` - Protocol string `description:"InfluxDB address protocol (udp or http)"` - PushInterval string `description:"InfluxDB push interval" export:"true"` - Database string `description:"InfluxDB database used when protocol is http" export:"true"` - RetentionPolicy string `description:"InfluxDB retention policy used when protocol is http" export:"true"` - Username string `description:"InfluxDB username (only with http)" export:"true"` - Password string `description:"InfluxDB password (only with http)" export:"true"` + Address string `description:"InfluxDB address."` + Protocol string `description:"InfluxDB address protocol (udp or http)."` + PushInterval Duration `description:"InfluxDB push interval." export:"true"` + Database string `description:"InfluxDB database used when protocol is http." export:"true"` + RetentionPolicy string `description:"InfluxDB retention policy used when protocol is http." export:"true"` + Username string `description:"InfluxDB username (only with http)." export:"true"` + Password string `description:"InfluxDB password (only with http)." export:"true"` +} + +// SetDefaults sets the default values. +func (i *InfluxDB) SetDefaults() { + i.Address = "localhost:8089" + i.Protocol = "udp" + i.PushInterval = Duration(10 * time.Second) } // Statistics provides options for monitoring request and response stats type Statistics struct { - RecentErrors int `description:"Number of recent errors logged" export:"true"` + RecentErrors int `description:"Number of recent errors logged." export:"true"` } -// Buckets holds Prometheus Buckets -type Buckets []float64 - -// Set adds strings elem into the the parser -// it splits str on "," and ";" and apply ParseFloat to string -func (b *Buckets) Set(str string) error { - fargs := func(c rune) bool { - return c == ',' || c == ';' - } - // get function - slice := strings.FieldsFunc(str, fargs) - for _, bucket := range slice { - bu, err := strconv.ParseFloat(bucket, 64) - if err != nil { - return err - } - *b = append(*b, bu) - } - return nil -} - -// Get []float64 -func (b *Buckets) Get() interface{} { return *b } - -// String return slice in a string -func (b *Buckets) String() string { return fmt.Sprintf("%v", *b) } - -// SetValue sets []float64 into the parser -func (b *Buckets) SetValue(val interface{}) { - *b = val.(Buckets) +// SetDefaults sets the default values. +func (s *Statistics) SetDefaults() { + s.RecentErrors = 10 } diff --git a/vendor/github.com/abronan/valkeyrie/valkeyrie.go b/vendor/github.com/abronan/valkeyrie/valkeyrie.go deleted file mode 100644 index 5bd7964b5..000000000 --- a/vendor/github.com/abronan/valkeyrie/valkeyrie.go +++ /dev/null @@ -1,40 +0,0 @@ -package valkeyrie - -import ( - "fmt" - "sort" - "strings" - - "github.com/abronan/valkeyrie/store" -) - -// Initialize creates a new Store object, initializing the client -type Initialize func(addrs []string, options *store.Config) (store.Store, error) - -var ( - // Backend initializers - initializers = make(map[store.Backend]Initialize) - - supportedBackend = func() string { - keys := make([]string, 0, len(initializers)) - for k := range initializers { - keys = append(keys, string(k)) - } - sort.Strings(keys) - return strings.Join(keys, ", ") - }() -) - -// NewStore creates an instance of store -func NewStore(backend store.Backend, addrs []string, options *store.Config) (store.Store, error) { - if init, exists := initializers[backend]; exists { - return init(addrs, options) - } - - return nil, fmt.Errorf("%s %s", store.ErrBackendNotSupported.Error(), supportedBackend) -} - -// AddStore adds a new store backend to valkeyrie -func AddStore(store store.Backend, init Initialize) { - initializers[store] = init -} diff --git a/vendor/github.com/containous/flaeg/LICENSE.md b/vendor/github.com/containous/flaeg/LICENSE.md deleted file mode 100644 index 14d0fd105..000000000 --- a/vendor/github.com/containous/flaeg/LICENSE.md +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Containous SAS, Emile Vauge, emile@vauge.com - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/containous/flaeg/flaeg.go b/vendor/github.com/containous/flaeg/flaeg.go deleted file mode 100644 index b643a5656..000000000 --- a/vendor/github.com/containous/flaeg/flaeg.go +++ /dev/null @@ -1,742 +0,0 @@ -package flaeg - -import ( - "errors" - "fmt" - "io" - "io/ioutil" - "os" - "path" - "reflect" - "sort" - "strings" - "text/tabwriter" - "text/template" - - "github.com/containous/flaeg/parse" - flag "github.com/ogier/pflag" -) - -// ErrParserNotFound is thrown when a field is flaged but not parser match its type -var ErrParserNotFound = errors.New("parser not found or custom parser missing") - -// GetTypesRecursive links in flagMap a flag with its reflect.StructField -// You can whether provide objValue on a structure or a pointer to structure as first argument -// Flags are generated from field name or from StructTag -func getTypesRecursive(objValue reflect.Value, flagMap map[string]reflect.StructField, key string) error { - name := key - switch objValue.Kind() { - case reflect.Struct: - for i := 0; i < objValue.NumField(); i++ { - if objValue.Type().Field(i).Anonymous { - if err := getTypesRecursive(objValue.Field(i), flagMap, name); err != nil { - return err - } - } else if len(objValue.Type().Field(i).Tag.Get("description")) > 0 { - fieldName := objValue.Type().Field(i).Name - if !isExported(fieldName) { - return fmt.Errorf("field %s is an unexported field", fieldName) - } - - if tag := objValue.Type().Field(i).Tag.Get("long"); len(tag) > 0 { - fieldName = tag - } - - if len(key) == 0 { - name = strings.ToLower(fieldName) - } else { - name = key + "." + strings.ToLower(fieldName) - } - - if _, ok := flagMap[name]; ok { - return fmt.Errorf("tag already exists: %s", name) - } - flagMap[name] = objValue.Type().Field(i) - - if err := getTypesRecursive(objValue.Field(i), flagMap, name); err != nil { - return err - } - } - } - case reflect.Ptr: - if len(key) > 0 { - field := flagMap[name] - field.Type = reflect.TypeOf(false) - flagMap[name] = field - } - - typ := objValue.Type().Elem() - inst := reflect.New(typ).Elem() - - if err := getTypesRecursive(inst, flagMap, name); err != nil { - return err - } - default: - return nil - } - return nil -} - -// GetBoolFlags returns flags on pointers -func GetBoolFlags(config interface{}) ([]string, error) { - flagMap := make(map[string]reflect.StructField) - if err := getTypesRecursive(reflect.ValueOf(config), flagMap, ""); err != nil { - return []string{}, err - } - - flags := make([]string, 0, len(flagMap)) - for f, structField := range flagMap { - if structField.Type.Kind() == reflect.Bool { - flags = append(flags, f) - } - } - return flags, nil -} - -// GetFlags returns flags -func GetFlags(config interface{}) ([]string, error) { - flagMap := make(map[string]reflect.StructField) - if err := getTypesRecursive(reflect.ValueOf(config), flagMap, ""); err != nil { - return []string{}, err - } - - flags := make([]string, 0, len(flagMap)) - for f := range flagMap { - flags = append(flags, f) - } - return flags, nil -} - -// ParseArgs : parses args return a map[flag]Getter, using parsers map[type]Getter -// args must be formatted as like as flag documentation. See https://golang.org/pkg/flag -func parseArgs(args []string, flagMap map[string]reflect.StructField, parsers map[reflect.Type]parse.Parser) (map[string]parse.Parser, error) { - newParsers := map[string]parse.Parser{} - flagSet := flag.NewFlagSet("flaeg.Load", flag.ContinueOnError) - - // Disable output - flagSet.SetOutput(ioutil.Discard) - - var err error - for flg, structField := range flagMap { - if parser, ok := parsers[structField.Type]; ok { - newParserValue := reflect.New(reflect.TypeOf(parser).Elem()) - newParserValue.Elem().Set(reflect.ValueOf(parser).Elem()) - newParser := newParserValue.Interface().(parse.Parser) - - if short := structField.Tag.Get("short"); len(short) == 1 { - flagSet.VarP(newParser, flg, short, structField.Tag.Get("description")) - } else { - flagSet.Var(newParser, flg, structField.Tag.Get("description")) - } - newParsers[flg] = newParser - } else { - err = ErrParserNotFound - } - } - - // prevents case sensitivity issue - args = argsToLower(args) - if errParse := flagSet.Parse(args); errParse != nil { - return nil, errParse - } - - // Visitor in flag.Parse - var flagList []*flag.Flag - visitor := func(fl *flag.Flag) { - flagList = append(flagList, fl) - } - - // Fill flagList with parsed flags - flagSet.Visit(visitor) - - // Return var - valMap := make(map[string]parse.Parser) - - // Return parsers on parsed flag - for _, flg := range flagList { - valMap[flg.Name] = newParsers[flg.Name] - } - - return valMap, err -} - -func getDefaultValue(defaultValue reflect.Value, defaultPointersValue reflect.Value, defaultValmap map[string]reflect.Value, key string) error { - if defaultValue.Type() != defaultPointersValue.Type() { - return fmt.Errorf("parameters defaultValue and defaultPointersValue must be the same struct. defaultValue type: %s is not defaultPointersValue type: %s", defaultValue.Type().String(), defaultPointersValue.Type().String()) - } - - name := key - switch defaultValue.Kind() { - case reflect.Struct: - for i := 0; i < defaultValue.NumField(); i++ { - if defaultValue.Type().Field(i).Anonymous { - if err := getDefaultValue(defaultValue.Field(i), defaultPointersValue.Field(i), defaultValmap, name); err != nil { - return err - } - } else if len(defaultValue.Type().Field(i).Tag.Get("description")) > 0 { - fieldName := defaultValue.Type().Field(i).Name - if tag := defaultValue.Type().Field(i).Tag.Get("long"); len(tag) > 0 { - fieldName = tag - } - - if len(key) == 0 { - name = strings.ToLower(fieldName) - } else { - name = key + "." + strings.ToLower(fieldName) - } - - if defaultValue.Field(i).Kind() != reflect.Ptr { - defaultValmap[name] = defaultValue.Field(i) - } - if err := getDefaultValue(defaultValue.Field(i), defaultPointersValue.Field(i), defaultValmap, name); err != nil { - return err - } - } - } - case reflect.Ptr: - if !defaultPointersValue.IsNil() { - if len(key) != 0 { - // turn ptr fields to nil - defaultPointersNilValue, err := setPointersNil(defaultPointersValue) - if err != nil { - return err - } - defaultValmap[name] = defaultPointersNilValue - } - - if !defaultValue.IsNil() { - if err := getDefaultValue(defaultValue.Elem(), defaultPointersValue.Elem(), defaultValmap, name); err != nil { - return err - } - } else { - if err := getDefaultValue(defaultPointersValue.Elem(), defaultPointersValue.Elem(), defaultValmap, name); err != nil { - return err - } - } - } else { - instValue := reflect.New(defaultPointersValue.Type().Elem()) - if len(key) != 0 { - defaultValmap[name] = instValue - } - - if !defaultValue.IsNil() { - if err := getDefaultValue(defaultValue.Elem(), instValue.Elem(), defaultValmap, name); err != nil { - return err - } - } else { - if err := getDefaultValue(instValue.Elem(), instValue.Elem(), defaultValmap, name); err != nil { - return err - } - } - } - default: - return nil - } - return nil -} - -// objValue a reflect.Value of a not-nil pointer on a struct -func setPointersNil(objValue reflect.Value) (reflect.Value, error) { - if objValue.Kind() != reflect.Ptr { - return objValue, fmt.Errorf("parameters objValue must be a not-nil pointer on a struct, not a %s", objValue.Kind()) - } else if objValue.IsNil() { - return objValue, errors.New("parameters objValue must be a not-nil pointer") - } else if objValue.Elem().Kind() != reflect.Struct { - // fmt.Printf("Parameters objValue must be a not-nil pointer on a struct, not a pointer on a %s\n", objValue.Elem().Kind().String()) - return objValue, nil - } - - // Clone - starObjValue := objValue.Elem() - nilPointersObjVal := reflect.New(starObjValue.Type()) - starNilPointersObjVal := nilPointersObjVal.Elem() - starNilPointersObjVal.Set(starObjValue) - - for i := 0; i < nilPointersObjVal.Elem().NumField(); i++ { - if field := nilPointersObjVal.Elem().Field(i); field.Kind() == reflect.Ptr && field.CanSet() { - field.Set(reflect.Zero(field.Type())) - } - } - return nilPointersObjVal, nil -} - -// FillStructRecursive initialize a value of any tagged Struct given by reference -func fillStructRecursive(objValue reflect.Value, defaultPointerValMap map[string]reflect.Value, valMap map[string]parse.Parser, key string) error { - name := key - switch objValue.Kind() { - case reflect.Struct: - - for i := 0; i < objValue.Type().NumField(); i++ { - if objValue.Type().Field(i).Anonymous { - if err := fillStructRecursive(objValue.Field(i), defaultPointerValMap, valMap, name); err != nil { - return err - } - } else if len(objValue.Type().Field(i).Tag.Get("description")) > 0 { - fieldName := objValue.Type().Field(i).Name - if tag := objValue.Type().Field(i).Tag.Get("long"); len(tag) > 0 { - fieldName = tag - } - - if len(key) == 0 { - name = strings.ToLower(fieldName) - } else { - name = key + "." + strings.ToLower(fieldName) - } - - if objValue.Field(i).Kind() != reflect.Ptr { - if val, ok := valMap[name]; ok { - if err := setFields(objValue.Field(i), val); err != nil { - return err - } - } - } - - if err := fillStructRecursive(objValue.Field(i), defaultPointerValMap, valMap, name); err != nil { - return err - } - } - } - - case reflect.Ptr: - if len(key) == 0 && !objValue.IsNil() { - return fillStructRecursive(objValue.Elem(), defaultPointerValMap, valMap, name) - } - - contains := false - for flg := range valMap { - // TODO replace by regexp - if strings.HasPrefix(flg, name+".") { - contains = true - break - } - } - - needDefault := false - if _, ok := valMap[name]; ok { - needDefault = valMap[name].Get().(bool) - } - if contains && objValue.IsNil() { - needDefault = true - } - - if needDefault { - if defVal, ok := defaultPointerValMap[name]; ok { - // set default pointer value - objValue.Set(defVal) - } else { - return fmt.Errorf("flag %s default value not provided", name) - } - } - - if !objValue.IsNil() && contains { - if objValue.Type().Elem().Kind() == reflect.Struct { - if err := fillStructRecursive(objValue.Elem(), defaultPointerValMap, valMap, name); err != nil { - return err - } - } - } - default: - return nil - } - return nil -} - -// SetFields sets value to fieldValue using tag as key in valMap -func setFields(fieldValue reflect.Value, val parse.Parser) error { - if fieldValue.CanSet() { - fieldValue.Set(reflect.ValueOf(val).Elem().Convert(fieldValue.Type())) - } else { - return fmt.Errorf("%s is not settable", fieldValue.Type().String()) - } - return nil -} - -// PrintHelp generates and prints command line help -func PrintHelp(flagMap map[string]reflect.StructField, defaultValmap map[string]reflect.Value, parsers map[reflect.Type]parse.Parser) error { - return PrintHelpWithCommand(flagMap, defaultValmap, parsers, nil, nil) -} - -// PrintError takes a not nil error and prints command line help -func PrintError(err error, flagMap map[string]reflect.StructField, defaultValmap map[string]reflect.Value, parsers map[reflect.Type]parse.Parser) error { - if err != flag.ErrHelp { - fmt.Printf("Error: %s\n", err) - } - if !strings.Contains(err.Error(), ":No parser for type") { - PrintHelp(flagMap, defaultValmap, parsers) - } - return err -} - -// LoadWithParsers initializes config : struct fields given by reference, with args : arguments. -// Some custom parsers may be given. -func LoadWithParsers(config interface{}, defaultValue interface{}, args []string, customParsers map[reflect.Type]parse.Parser) error { - cmd := &Command{ - Config: config, - DefaultPointersConfig: defaultValue, - } - _, cmd.Name = path.Split(os.Args[0]) - return LoadWithCommand(cmd, args, customParsers, nil) -} - -// Load initializes config : struct fields given by reference, with args : arguments. -// Some custom parsers may be given. -func Load(config interface{}, defaultValue interface{}, args []string) error { - return LoadWithParsers(config, defaultValue, args, nil) -} - -// Command structure contains program/command information (command name and description) -// Config must be a pointer on the configuration struct to parse (it contains default values of field) -// DefaultPointersConfig contains default pointers values: those values are set on pointers fields if their flags are called -// It must be the same type(struct) as Config -// Run is the func which launch the program using initialized configuration structure -type Command struct { - Name string - Description string - Config interface{} - DefaultPointersConfig interface{} // TODO: case DefaultPointersConfig is nil - Run func() error - Metadata map[string]string - HideHelp bool -} - -// LoadWithCommand initializes config : struct fields given by reference, with args : arguments. -// Some custom parsers and some subCommand may be given. -func LoadWithCommand(cmd *Command, cmdArgs []string, customParsers map[reflect.Type]parse.Parser, subCommand []*Command) error { - parsers, err := parse.LoadParsers(customParsers) - if err != nil { - return err - } - - tagsMap := make(map[string]reflect.StructField) - if err := getTypesRecursive(reflect.ValueOf(cmd.Config), tagsMap, ""); err != nil { - return err - } - defaultValMap := make(map[string]reflect.Value) - if err := getDefaultValue(reflect.ValueOf(cmd.Config), reflect.ValueOf(cmd.DefaultPointersConfig), defaultValMap, ""); err != nil { - return err - } - - valMap, errParseArgs := parseArgs(cmdArgs, tagsMap, parsers) - if errParseArgs != nil && errParseArgs != ErrParserNotFound { - return PrintErrorWithCommand(errParseArgs, tagsMap, defaultValMap, parsers, cmd, subCommand) - } - - if err := fillStructRecursive(reflect.ValueOf(cmd.Config), defaultValMap, valMap, ""); err != nil { - return err - } - - if errParseArgs == ErrParserNotFound { - return errParseArgs - } - - return nil -} - -// PrintHelpWithCommand generates and prints command line help for a Command -func PrintHelpWithCommand(flagMap map[string]reflect.StructField, defaultValMap map[string]reflect.Value, parsers map[reflect.Type]parse.Parser, cmd *Command, subCmd []*Command) error { - // Hide command from help - if cmd != nil && cmd.HideHelp { - return fmt.Errorf("command %s not found", cmd.Name) - } - - // Define a templates - // Using POSXE STD : http://pubs.opengroup.org/onlinepubs/9699919799/ - const helper = `{{if .ProgDescription}}{{.ProgDescription}} - -{{end}}Usage: {{.ProgName}} [flags] [] - -Use "{{.ProgName}} --help" for help on any command. -{{if .SubCommands}} -Commands:{{range $subCmdName, $subCmdDesc := .SubCommands}} -{{printf "\t%-50s %s" $subCmdName $subCmdDesc}}{{end}} -{{end}} -Flag's usage: {{.ProgName}} [--flag=flag_argument] [-f[flag_argument]] ... set flag_argument to flag(s) - or: {{.ProgName}} [--flag[=true|false| ]] [-f[true|false| ]] ... set true/false to boolean flag(s) - -Flags: -` - // Use a struct to give data to template - type TempStruct struct { - ProgName string - ProgDescription string - SubCommands map[string]string - } - tempStruct := TempStruct{} - if cmd != nil { - tempStruct.ProgName = cmd.Name - tempStruct.ProgDescription = cmd.Description - tempStruct.SubCommands = map[string]string{} - if len(subCmd) > 1 && cmd == subCmd[0] { - for _, c := range subCmd[1:] { - if !c.HideHelp { - tempStruct.SubCommands[c.Name] = c.Description - } - } - } - } else { - _, tempStruct.ProgName = path.Split(os.Args[0]) - } - - // Run Template - tmplHelper, err := template.New("helper").Parse(helper) - if err != nil { - return err - } - err = tmplHelper.Execute(os.Stdout, tempStruct) - if err != nil { - return err - } - - return printFlagsDescriptionsDefaultValues(flagMap, defaultValMap, parsers, os.Stdout) -} - -func printFlagsDescriptionsDefaultValues(flagMap map[string]reflect.StructField, defaultValMap map[string]reflect.Value, parsers map[reflect.Type]parse.Parser, output io.Writer) error { - // Sort alphabetically & Delete unparsable flags in a slice - var flags []string - for flg, field := range flagMap { - if _, ok := parsers[field.Type]; ok { - flags = append(flags, flg) - } - } - sort.Strings(flags) - - // Process data - var descriptions []string - var defaultValues []string - var flagsWithDash []string - var shortFlagsWithDash []string - for _, flg := range flags { - field := flagMap[flg] - if short := field.Tag.Get("short"); len(short) == 1 { - shortFlagsWithDash = append(shortFlagsWithDash, "-"+short+",") - } else { - shortFlagsWithDash = append(shortFlagsWithDash, "") - } - flagsWithDash = append(flagsWithDash, "--"+flg) - - // flag on pointer ? - if defVal, ok := defaultValMap[flg]; ok { - if defVal.Kind() != reflect.Ptr { - // Set defaultValue on parsers - parsers[field.Type].SetValue(defaultValMap[flg].Interface()) - } - - if defVal := parsers[field.Type].String(); len(defVal) > 0 { - defaultValues = append(defaultValues, fmt.Sprintf("(default \"%s\")", defVal)) - } else { - defaultValues = append(defaultValues, "") - } - } - - splittedDescriptions := split(field.Tag.Get("description"), 80) - for i, description := range splittedDescriptions { - descriptions = append(descriptions, description) - if i != 0 { - defaultValues = append(defaultValues, "") - flagsWithDash = append(flagsWithDash, "") - shortFlagsWithDash = append(shortFlagsWithDash, "") - } - } - } - - // add help flag - shortFlagsWithDash = append(shortFlagsWithDash, "-h,") - flagsWithDash = append(flagsWithDash, "--help") - descriptions = append(descriptions, "Print Help (this message) and exit") - defaultValues = append(defaultValues, "") - - return displayTab(output, shortFlagsWithDash, flagsWithDash, descriptions, defaultValues) -} - -func split(str string, width int) []string { - if len(str) > width { - index := strings.LastIndex(str[:width], " ") - if index == -1 { - index = width - } - - return append([]string{strings.TrimSpace(str[:index])}, split(strings.TrimSpace(str[index:]), width)...) - } - return []string{str} -} - -func displayTab(output io.Writer, columns ...[]string) error { - w := new(tabwriter.Writer) - w.Init(output, 0, 4, 1, ' ', 0) - - nbRow := len(columns[0]) - nbCol := len(columns) - - for i := 0; i < nbRow; i++ { - row := "" - for j, col := range columns { - row += col[i] - if j != nbCol-1 { - row += "\t" - } - } - fmt.Fprintln(w, row) - } - - return w.Flush() -} - -// PrintErrorWithCommand takes a not nil error and prints command line help -func PrintErrorWithCommand(err error, flagMap map[string]reflect.StructField, defaultValMap map[string]reflect.Value, parsers map[reflect.Type]parse.Parser, cmd *Command, subCmd []*Command) error { - if err != flag.ErrHelp { - fmt.Printf("Error here : %s\n", err) - } - - if errHelp := PrintHelpWithCommand(flagMap, defaultValMap, parsers, cmd, subCmd); errHelp != nil { - return errHelp - } - - return err -} - -// Flaeg struct contains commands (at least the root one) -// and row arguments (command and/or flags) -// a map of custom parsers could be use -type Flaeg struct { - calledCommand *Command - commands []*Command // rootCommand is th fist one in this slice - args []string - commandArgs []string - customParsers map[reflect.Type]parse.Parser -} - -// New creates and initialize a pointer on Flaeg -func New(rootCommand *Command, args []string) *Flaeg { - var f Flaeg - f.commands = []*Command{rootCommand} - f.args = args - f.customParsers = map[reflect.Type]parse.Parser{} - return &f -} - -// AddCommand adds sub-command to the root command -func (f *Flaeg) AddCommand(command *Command) { - f.commands = append(f.commands, command) -} - -// AddParser adds custom parser for a type to the map of custom parsers -func (f *Flaeg) AddParser(typ reflect.Type, parser parse.Parser) { - f.customParsers[typ] = parser -} - -// Run calls the command with flags given as arguments -func (f *Flaeg) Run() error { - if f.calledCommand == nil { - if _, _, err := f.findCommandWithCommandArgs(); err != nil { - return err - } - } - - if _, err := f.Parse(f.calledCommand); err != nil { - return err - } - return f.calledCommand.Run() -} - -// Parse calls Flaeg Load Function end returns the parsed command structure (by reference) -// It returns nil and a not nil error if it fails -func (f *Flaeg) Parse(cmd *Command) (*Command, error) { - if f.calledCommand == nil { - f.commandArgs = f.args - } - - if err := LoadWithCommand(cmd, f.commandArgs, f.customParsers, f.commands); err != nil { - return cmd, err - } - return cmd, nil -} - -// splitArgs takes args (type []string) and return command ("" if rootCommand) and command's args -func splitArgs(args []string) (string, []string) { - if len(args) >= 1 && len(args[0]) >= 1 && string(args[0][0]) != "-" { - if len(args) == 1 { - return strings.ToLower(args[0]), []string{} - } - return strings.ToLower(args[0]), args[1:] - } - return "", args -} - -// findCommandWithCommandArgs returns the called command (by reference) and command's args -// the error returned is not nil if it fails -func (f *Flaeg) findCommandWithCommandArgs() (*Command, []string, error) { - var commandName string - commandName, f.commandArgs = splitArgs(f.args) - if len(commandName) > 0 { - for _, command := range f.commands { - if commandName == command.Name { - f.calledCommand = command - return f.calledCommand, f.commandArgs, nil - } - } - return nil, []string{}, fmt.Errorf("command %s not found", commandName) - } - - f.calledCommand = f.commands[0] - return f.calledCommand, f.commandArgs, nil -} - -// GetCommand splits args and returns the called command (by reference) -// It returns nil and a not nil error if it fails -func (f *Flaeg) GetCommand() (*Command, error) { - if f.calledCommand == nil { - _, _, err := f.findCommandWithCommandArgs() - return f.calledCommand, err - } - return f.calledCommand, nil -} - -// isExported return true is the field (from fieldName) is exported, -// else false -func isExported(fieldName string) bool { - if len(fieldName) < 1 { - return false - } - - if string(fieldName[0]) == strings.ToUpper(string(fieldName[0])) { - return true - } - - return false -} - -func argToLower(inArg string) string { - if len(inArg) < 2 { - return strings.ToLower(inArg) - } - - var outArg string - dashIndex := strings.Index(inArg, "--") - if dashIndex == -1 { - if dashIndex = strings.Index(inArg, "-"); dashIndex == -1 { - return inArg - } - // -fValue - outArg = strings.ToLower(inArg[dashIndex:dashIndex+2]) + inArg[dashIndex+2:] - return outArg - } - - // --flag - if equalIndex := strings.Index(inArg, "="); equalIndex != -1 { - // --flag=value - outArg = strings.ToLower(inArg[dashIndex:equalIndex]) + inArg[equalIndex:] - } else { - // --boolflag - outArg = strings.ToLower(inArg[dashIndex:]) - } - - return outArg -} - -func argsToLower(inArgs []string) []string { - outArgs := make([]string, len(inArgs)) - for i, inArg := range inArgs { - outArgs[i] = argToLower(inArg) - } - return outArgs -} diff --git a/vendor/github.com/containous/flaeg/flaeg_types.go b/vendor/github.com/containous/flaeg/flaeg_types.go deleted file mode 100644 index 54ee8f600..000000000 --- a/vendor/github.com/containous/flaeg/flaeg_types.go +++ /dev/null @@ -1,7 +0,0 @@ -package flaeg - -import "github.com/containous/flaeg/parse" - -// Duration is deprecated use parse.Duration instead -// Deprecated -type Duration = parse.Duration diff --git a/vendor/github.com/containous/flaeg/parse/parse.go b/vendor/github.com/containous/flaeg/parse/parse.go deleted file mode 100644 index dbfd1dce5..000000000 --- a/vendor/github.com/containous/flaeg/parse/parse.go +++ /dev/null @@ -1,313 +0,0 @@ -package parse - -import ( - "encoding/json" - "flag" - "fmt" - "reflect" - "strconv" - "strings" - "time" -) - -// Parser is an interface that allows the contents of a flag.Getter to be set. -type Parser interface { - flag.Getter - SetValue(interface{}) -} - -// BoolValue bool Value type -type BoolValue bool - -// Set sets bool value from the given string value. -func (b *BoolValue) Set(s string) error { - v, err := strconv.ParseBool(s) - *b = BoolValue(v) - return err -} - -// Get returns the bool value. -func (b *BoolValue) Get() interface{} { return bool(*b) } - -func (b *BoolValue) String() string { return fmt.Sprintf("%v", *b) } - -// IsBoolFlag return true -func (b *BoolValue) IsBoolFlag() bool { return true } - -// SetValue sets the duration from the given bool-asserted value. -func (b *BoolValue) SetValue(val interface{}) { - *b = BoolValue(val.(bool)) -} - -// BoolFlag optional interface to indicate boolean flags that can be -// supplied without "=value" text -type BoolFlag interface { - flag.Value - IsBoolFlag() bool -} - -// IntValue int Value -type IntValue int - -// Set sets int value from the given string value. -func (i *IntValue) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 64) - *i = IntValue(v) - return err -} - -// Get returns the int value. -func (i *IntValue) Get() interface{} { return int(*i) } - -func (i *IntValue) String() string { return fmt.Sprintf("%v", *i) } - -// SetValue sets the IntValue from the given int-asserted value. -func (i *IntValue) SetValue(val interface{}) { - *i = IntValue(val.(int)) -} - -// Int64Value int64 Value -type Int64Value int64 - -// Set sets int64 value from the given string value. -func (i *Int64Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 64) - *i = Int64Value(v) - return err -} - -// Get returns the int64 value. -func (i *Int64Value) Get() interface{} { return int64(*i) } - -func (i *Int64Value) String() string { return fmt.Sprintf("%v", *i) } - -// SetValue sets the Int64Value from the given int64-asserted value. -func (i *Int64Value) SetValue(val interface{}) { - *i = Int64Value(val.(int64)) -} - -// UintValue uint Value -type UintValue uint - -// Set sets uint value from the given string value. -func (i *UintValue) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 64) - *i = UintValue(v) - return err -} - -// Get returns the uint value. -func (i *UintValue) Get() interface{} { return uint(*i) } - -func (i *UintValue) String() string { return fmt.Sprintf("%v", *i) } - -// SetValue sets the UintValue from the given uint-asserted value. -func (i *UintValue) SetValue(val interface{}) { - *i = UintValue(val.(uint)) -} - -// Uint64Value uint64 Value -type Uint64Value uint64 - -// Set sets uint64 value from the given string value. -func (i *Uint64Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 64) - *i = Uint64Value(v) - return err -} - -// Get returns the uint64 value. -func (i *Uint64Value) Get() interface{} { return uint64(*i) } - -func (i *Uint64Value) String() string { return fmt.Sprintf("%v", *i) } - -// SetValue sets the Uint64Value from the given uint64-asserted value. -func (i *Uint64Value) SetValue(val interface{}) { - *i = Uint64Value(val.(uint64)) -} - -// StringValue string Value -type StringValue string - -// Set sets string value from the given string value. -func (s *StringValue) Set(val string) error { - *s = StringValue(val) - return nil -} - -// Get returns the string value. -func (s *StringValue) Get() interface{} { return string(*s) } - -func (s *StringValue) String() string { return string(*s) } - -// SetValue sets the StringValue from the given string-asserted value. -func (s *StringValue) SetValue(val interface{}) { - *s = StringValue(val.(string)) -} - -// Float64Value float64 Value -type Float64Value float64 - -// Set sets float64 value from the given string value. -func (f *Float64Value) Set(s string) error { - v, err := strconv.ParseFloat(s, 64) - *f = Float64Value(v) - return err -} - -// Get returns the float64 value. -func (f *Float64Value) Get() interface{} { return float64(*f) } - -func (f *Float64Value) String() string { return fmt.Sprintf("%v", *f) } - -// SetValue sets the Float64Value from the given float64-asserted value. -func (f *Float64Value) SetValue(val interface{}) { - *f = Float64Value(val.(float64)) -} - -// Duration is a custom type suitable for parsing duration values. -// It supports `time.ParseDuration`-compatible values and suffix-less digits; in -// the latter case, seconds are assumed. -type Duration time.Duration - -// Set sets the duration from the given string value. -func (d *Duration) Set(s string) error { - if v, err := strconv.ParseInt(s, 10, 64); err == nil { - *d = Duration(time.Duration(v) * time.Second) - return nil - } - - v, err := time.ParseDuration(s) - *d = Duration(v) - return err -} - -// Get returns the duration value. -func (d *Duration) Get() interface{} { return time.Duration(*d) } - -// String returns a string representation of the duration value. -func (d *Duration) String() string { return (*time.Duration)(d).String() } - -// SetValue sets the duration from the given Duration-asserted value. -func (d *Duration) SetValue(val interface{}) { - *d = val.(Duration) -} - -// MarshalText serialize the given duration value into a text. -func (d *Duration) MarshalText() ([]byte, error) { - return []byte(d.String()), nil -} - -// UnmarshalText deserializes the given text into a duration value. -// It is meant to support TOML decoding of durations. -func (d *Duration) UnmarshalText(text []byte) error { - return d.Set(string(text)) -} - -// MarshalJSON serializes the given duration value. -func (d *Duration) MarshalJSON() ([]byte, error) { - return json.Marshal(time.Duration(*d)) -} - -// UnmarshalJSON deserializes the given text into a duration value. -func (d *Duration) UnmarshalJSON(text []byte) error { - if v, err := strconv.ParseInt(string(text), 10, 64); err == nil { - *d = Duration(time.Duration(v)) - return nil - } - - // We use json unmarshal on value because we have the quoted version - var value string - err := json.Unmarshal(text, &value) - if err != nil { - return err - } - v, err := time.ParseDuration(value) - *d = Duration(v) - return err -} - -// TimeValue time.Time Value -type TimeValue time.Time - -// Set sets time.Time value from the given string value. -func (t *TimeValue) Set(s string) error { - v, err := time.Parse(time.RFC3339, s) - *t = TimeValue(v) - return err -} - -// Get returns the time.Time value. -func (t *TimeValue) Get() interface{} { return time.Time(*t) } - -func (t *TimeValue) String() string { return (*time.Time)(t).String() } - -// SetValue sets the TimeValue from the given time.Time-asserted value. -func (t *TimeValue) SetValue(val interface{}) { - *t = TimeValue(val.(time.Time)) -} - -// SliceStrings parse slice of strings -type SliceStrings []string - -// Set adds strings elem into the the parser. -// It splits str on , and ; -func (s *SliceStrings) Set(str string) error { - fargs := func(c rune) bool { - return c == ',' || c == ';' - } - // get function - slice := strings.FieldsFunc(str, fargs) - *s = append(*s, slice...) - return nil -} - -// Get []string -func (s *SliceStrings) Get() interface{} { return []string(*s) } - -// String return slice in a string -func (s *SliceStrings) String() string { return fmt.Sprintf("%v", *s) } - -// SetValue sets []string into the parser -func (s *SliceStrings) SetValue(val interface{}) { - *s = SliceStrings(val.([]string)) -} - -// LoadParsers loads default parsers and custom parsers given as parameter. -// Return a map [reflect.Type]parsers -// bool, int, int64, uint, uint64, float64, -func LoadParsers(customParsers map[reflect.Type]Parser) (map[reflect.Type]Parser, error) { - parsers := map[reflect.Type]Parser{} - - var boolParser BoolValue - parsers[reflect.TypeOf(true)] = &boolParser - - var intParser IntValue - parsers[reflect.TypeOf(1)] = &intParser - - var int64Parser Int64Value - parsers[reflect.TypeOf(int64(1))] = &int64Parser - - var uintParser UintValue - parsers[reflect.TypeOf(uint(1))] = &uintParser - - var uint64Parser Uint64Value - parsers[reflect.TypeOf(uint64(1))] = &uint64Parser - - var stringParser StringValue - parsers[reflect.TypeOf("")] = &stringParser - - var float64Parser Float64Value - parsers[reflect.TypeOf(float64(1.5))] = &float64Parser - - var durationParser Duration - parsers[reflect.TypeOf(Duration(time.Second))] = &durationParser - - var timeParser TimeValue - parsers[reflect.TypeOf(time.Now())] = &timeParser - - for rType, parser := range customParsers { - parsers[rType] = parser - } - return parsers, nil -} diff --git a/vendor/github.com/containous/staert/LICENSE.md b/vendor/github.com/containous/staert/LICENSE.md deleted file mode 100644 index 14d0fd105..000000000 --- a/vendor/github.com/containous/staert/LICENSE.md +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Containous SAS, Emile Vauge, emile@vauge.com - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/containous/staert/kv.go b/vendor/github.com/containous/staert/kv.go deleted file mode 100644 index 60ad3c2d6..000000000 --- a/vendor/github.com/containous/staert/kv.go +++ /dev/null @@ -1,395 +0,0 @@ -package staert - -import ( - "bytes" - "compress/gzip" - "encoding" - "encoding/base64" - "errors" - "fmt" - "io" - "io/ioutil" - "reflect" - "sort" - "strconv" - "strings" - - "github.com/abronan/valkeyrie" - "github.com/abronan/valkeyrie/store" - "github.com/containous/flaeg" - "github.com/mitchellh/mapstructure" -) - -// KvSource implements Source -// It handles all mapstructure features(Squashed Embedded Sub-Structures, Maps, Pointers) -// It supports Slices (and maybe Arrays). They must be sorted in the KvStore like this : -// Key : ".../[sliceIndex]" -> Value -type KvSource struct { - store.Store - Prefix string // like this "prefix" (without the /) -} - -// NewKvSource creates a new KvSource -func NewKvSource(backend store.Backend, addrs []string, options *store.Config, prefix string) (*KvSource, error) { - kvStore, err := valkeyrie.NewStore(backend, addrs, options) - return &KvSource{Store: kvStore, Prefix: prefix}, err -} - -// Parse uses valkeyrie and mapstructure to fill the structure -func (kv *KvSource) Parse(cmd *flaeg.Command) (*flaeg.Command, error) { - err := kv.LoadConfig(cmd.Config) - if err != nil { - return nil, err - } - return cmd, nil -} - -// LoadConfig loads data from the KV Store into the config structure (given by reference) -func (kv *KvSource) LoadConfig(config interface{}) error { - pairs, err := kv.ListValuedPairWithPrefix(kv.Prefix) - if err != nil { - return err - } - - mapStruct, err := generateMapstructure(convertPairs(pairs), kv.Prefix) - if err != nil { - return err - } - - configDecoder := &mapstructure.DecoderConfig{ - Metadata: nil, - Result: config, - WeaklyTypedInput: true, - DecodeHook: decodeHook, - } - decoder, err := mapstructure.NewDecoder(configDecoder) - if err != nil { - return err - } - if err := decoder.Decode(mapStruct); err != nil { - return err - } - return nil -} - -func generateMapstructure(pairs []*store.KVPair, prefix string) (map[string]interface{}, error) { - raw := make(map[string]interface{}) - for _, p := range pairs { - // Trim the prefix off our key first - key := strings.TrimPrefix(strings.Trim(p.Key, "/"), strings.Trim(prefix, "/")+"/") - var err error - raw, err = processKV(key, p.Value, raw) - if err != nil { - return raw, err - } - } - return raw, nil -} - -func processKV(key string, v []byte, raw map[string]interface{}) (map[string]interface{}, error) { - // Determine which map we're writing the value to. - // We split by '/' to determine any sub-maps that need to be created. - m := raw - children := strings.Split(key, "/") - if len(children) > 0 { - key = children[len(children)-1] - children = children[:len(children)-1] - for _, child := range children { - if m[child] == nil { - m[child] = make(map[string]interface{}) - } - subm, ok := m[child].(map[string]interface{}) - if !ok { - return nil, fmt.Errorf("child is both a data item and dir: %s", child) - } - m = subm - } - } - m[key] = string(v) - return raw, nil -} - -func decodeHook(fromType reflect.Type, toType reflect.Type, data interface{}) (interface{}, error) { - // TODO : Array support - - // custom unmarshaler - textUnmarshalerType := reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() - if toType.Implements(textUnmarshalerType) { - object := reflect.New(toType.Elem()).Interface() - err := object.(encoding.TextUnmarshaler).UnmarshalText([]byte(data.(string))) - if err != nil { - return nil, fmt.Errorf("error unmarshaling %v: %v", data, err) - } - return object, nil - } - switch toType.Kind() { - case reflect.Ptr: - if fromType.Kind() == reflect.String { - if data == "" { - // default value Pointer - return make(map[string]interface{}), nil - } - } - case reflect.Slice: - if fromType.Kind() == reflect.Map { - // Type assertion - dataMap, ok := data.(map[string]interface{}) - if !ok { - return data, fmt.Errorf("input data is not a map : %#v", data) - } - // Sorting map - indexes := make([]int, len(dataMap)) - i := 0 - for k := range dataMap { - ind, err := strconv.Atoi(k) - if err != nil { - return dataMap, err - } - indexes[i] = ind - i++ - } - sort.Ints(indexes) - // Building slice - dataOutput := make([]interface{}, i) - i = 0 - for _, k := range indexes { - dataOutput[i] = dataMap[strconv.Itoa(k)] - i++ - } - - return dataOutput, nil - } else if fromType.Kind() == reflect.String { - return readCompressedData(data.(string), gzipReader, base64Reader) - } - } - return data, nil -} - -func readCompressedData(data string, fs ...func(io.Reader) (io.Reader, error)) ([]byte, error) { - var err error - for _, f := range fs { - var reader io.Reader - reader, err = f(bytes.NewBufferString(data)) - if err == nil { - return ioutil.ReadAll(reader) - } - } - return nil, err -} - -func base64Reader(r io.Reader) (io.Reader, error) { - return base64.NewDecoder(base64.StdEncoding, r), nil -} - -func gzipReader(r io.Reader) (io.Reader, error) { - return gzip.NewReader(r) -} - -// StoreConfig stores the config into the KV Store -func (kv *KvSource) StoreConfig(config interface{}) error { - kvMap := map[string]string{} - if err := collateKvRecursive(reflect.ValueOf(config), kvMap, kv.Prefix); err != nil { - return err - } - var keys []string - for key := range kvMap { - keys = append(keys, key) - } - sort.Strings(keys) - for _, k := range keys { - var writeOptions *store.WriteOptions - // is it a directory ? - if strings.HasSuffix(k, "/") { - writeOptions = &store.WriteOptions{ - IsDir: true, - } - } - if err := kv.Put(k, []byte(kvMap[k]), writeOptions); err != nil { - return err - } - } - return nil -} - -func collateKvRecursive(objValue reflect.Value, kv map[string]string, key string) error { - name := key - kind := objValue.Kind() - - // custom marshaler - if marshaler, ok := objValue.Interface().(encoding.TextMarshaler); ok { - test, err := marshaler.MarshalText() - if err != nil { - return fmt.Errorf("error marshaling key %s: %v", name, err) - } - kv[name] = string(test) - return nil - } - switch kind { - case reflect.Struct: - for i := 0; i < objValue.NumField(); i++ { - objType := objValue.Type() - if objType.Field(i).Name[:1] != strings.ToUpper(objType.Field(i).Name[:1]) { - //if unexported field - continue - } - squashed := false - if objType.Field(i).Anonymous { - if objValue.Field(i).Kind() == reflect.Struct { - tags := objType.Field(i).Tag - if strings.Contains(string(tags), "squash") { - squashed = true - } - } - } - if squashed { - if err := collateKvRecursive(objValue.Field(i), kv, name); err != nil { - return err - } - } else { - fieldName := objType.Field(i).Name - //useless if not empty Prefix is required ? - if len(key) == 0 { - name = strings.ToLower(fieldName) - } else { - name = key + "/" + strings.ToLower(fieldName) - } - - if err := collateKvRecursive(objValue.Field(i), kv, name); err != nil { - return err - } - } - } - - case reflect.Ptr: - if !objValue.IsNil() { - // hack to avoid calling this at the beginning - if len(kv) > 0 { - kv[name+"/"] = "" - } - if err := collateKvRecursive(objValue.Elem(), kv, name); err != nil { - return err - } - } - case reflect.Map: - for _, k := range objValue.MapKeys() { - if k.Kind() == reflect.Struct { - return errors.New("struct as key not supported") - } - name = key + "/" + fmt.Sprint(k) - if err := collateKvRecursive(objValue.MapIndex(k), kv, name); err != nil { - return err - } - } - case reflect.Array, reflect.Slice: - // Byte slices get special treatment - if objValue.Type().Elem().Kind() == reflect.Uint8 { - compressedData, err := writeCompressedData(objValue.Bytes()) - if err != nil { - return err - } - kv[name] = compressedData - } else { - for i := 0; i < objValue.Len(); i++ { - name = key + "/" + strconv.Itoa(i) - if err := collateKvRecursive(objValue.Index(i), kv, name); err != nil { - return err - } - } - } - case reflect.Interface, reflect.String, reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, - reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, - reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64: - if _, ok := kv[name]; ok { - return errors.New("key already exists: " + name) - } - kv[name] = fmt.Sprint(objValue) - - default: - return fmt.Errorf("kind %s not supported", kind.String()) - } - return nil -} - -func writeCompressedData(data []byte) (string, error) { - var buffer bytes.Buffer - gzipWriter := gzip.NewWriter(&buffer) - - _, err := gzipWriter.Write(data) - if err != nil { - return "", err - } - - err = gzipWriter.Close() - if err != nil { - return "", err - } - - return buffer.String(), nil -} - -// ListRecursive lists all key value children under key -// Replaced by ListValuedPairWithPrefix -// Deprecated -func (kv *KvSource) ListRecursive(key string, pairs map[string][]byte) error { - pairsN1, err := kv.List(key, nil) - if err == store.ErrKeyNotFound { - return nil - } - if err != nil { - return err - } - if len(pairsN1) == 0 { - pairLeaf, err := kv.Get(key, nil) - if err != nil { - return err - } - if pairLeaf == nil { - return nil - } - pairs[pairLeaf.Key] = pairLeaf.Value - return nil - } - for _, p := range pairsN1 { - if p.Key != key { - err := kv.ListRecursive(p.Key, pairs) - if err != nil { - return err - } - } - } - return nil -} - -// ListValuedPairWithPrefix lists all key value children under key -func (kv *KvSource) ListValuedPairWithPrefix(key string) (map[string][]byte, error) { - pairs := make(map[string][]byte) - - pairsN1, err := kv.List(key, nil) - if err == store.ErrKeyNotFound { - return pairs, nil - } - if err != nil { - return pairs, err - } - - for _, p := range pairsN1 { - if len(p.Value) > 0 { - pairs[p.Key] = p.Value - } - } - - return pairs, nil -} - -func convertPairs(pairs map[string][]byte) []*store.KVPair { - slicePairs := make([]*store.KVPair, len(pairs)) - i := 0 - for k, v := range pairs { - slicePairs[i] = &store.KVPair{ - Key: k, - Value: v, - } - i++ - } - return slicePairs -} diff --git a/vendor/github.com/containous/staert/staert.go b/vendor/github.com/containous/staert/staert.go deleted file mode 100644 index fa2fa6f14..000000000 --- a/vendor/github.com/containous/staert/staert.go +++ /dev/null @@ -1,80 +0,0 @@ -package staert - -import ( - "fmt" - "reflect" - - "github.com/containous/flaeg" -) - -// Source interface must be satisfy to Add any kink of Source to Staert as like as TomlFile or Flaeg -type Source interface { - Parse(cmd *flaeg.Command) (*flaeg.Command, error) -} - -// Staert contains the struct to configure, thee default values inside structs and the sources -type Staert struct { - command *flaeg.Command - sources []Source -} - -// NewStaert creates and return a pointer on Staert. Need defaultConfig and defaultPointersConfig given by references -func NewStaert(rootCommand *flaeg.Command) *Staert { - return &Staert{command: rootCommand} -} - -// AddSource adds new Source to Staert, give it by reference -func (s *Staert) AddSource(src Source) { - s.sources = append(s.sources, src) -} - -// LoadConfig check which command is called and parses config -// It returns the the parsed config or an error if it fails -func (s *Staert) LoadConfig() (interface{}, error) { - for _, src := range s.sources { - // Type assertion - if flg, ok := src.(*flaeg.Flaeg); ok { - fCmd, err := flg.GetCommand() - if err != nil { - return nil, err - } - - // if fleag sub-command - if s.command != fCmd { - // if parseAllSources - if fCmd.Metadata["parseAllSources"] == "true" { - fCmdConfigType := reflect.TypeOf(fCmd.Config) - sCmdConfigType := reflect.TypeOf(s.command.Config) - if fCmdConfigType != sCmdConfigType { - return nil, fmt.Errorf("command %s : Config type doesn't match with root command config type. Expected %s got %s", - fCmd.Name, sCmdConfigType.Name(), fCmdConfigType.Name()) - } - s.command = fCmd - } else { - // (not parseAllSources) - s.command, err = flg.Parse(fCmd) - return s.command.Config, err - } - } - } - } - err := s.parseConfigAllSources(s.command) - return s.command.Config, err -} - -// parseConfigAllSources getConfig for a flaeg.Command run sources Parse func in the raw -func (s *Staert) parseConfigAllSources(cmd *flaeg.Command) error { - for _, src := range s.sources { - _, err := src.Parse(cmd) - if err != nil { - return err - } - } - return nil -} - -// Run calls the Run func of the command -// Warning, Run doesn't parse the config -func (s *Staert) Run() error { - return s.command.Run() -} diff --git a/vendor/github.com/containous/staert/toml.go b/vendor/github.com/containous/staert/toml.go deleted file mode 100644 index e86374b87..000000000 --- a/vendor/github.com/containous/staert/toml.go +++ /dev/null @@ -1,118 +0,0 @@ -package staert - -import ( - "os" - "path/filepath" - "strings" - - "github.com/BurntSushi/toml" - "github.com/containous/flaeg" -) - -var _ Source = (*TomlSource)(nil) - -// TomlSource implement staert.Source -type TomlSource struct { - filename string - dirNFullPath []string - fullPath string -} - -// NewTomlSource creates and return a pointer on Source. -// Parameter filename is the file name (without extension type, ".toml" will be added) -// dirNFullPath may contain directories or fullPath to the file. -func NewTomlSource(filename string, dirNFullPath []string) *TomlSource { - return &TomlSource{filename, dirNFullPath, ""} -} - -// ConfigFileUsed return config file used -func (ts *TomlSource) ConfigFileUsed() string { - return ts.fullPath -} - -// Parse calls toml.DecodeFile() func -func (ts *TomlSource) Parse(cmd *flaeg.Command) (*flaeg.Command, error) { - ts.fullPath = findFile(ts.filename, ts.dirNFullPath) - if len(ts.fullPath) < 2 { - return cmd, nil - } - - metadata, err := toml.DecodeFile(ts.fullPath, cmd.Config) - if err != nil { - return nil, err - } - - boolFlags, err := flaeg.GetBoolFlags(cmd.Config) - if err != nil { - return nil, err - } - - flgArgs, hasUnderField := generateArgs(metadata, boolFlags) - - err = flaeg.Load(cmd.Config, cmd.DefaultPointersConfig, flgArgs) - if err != nil && err != flaeg.ErrParserNotFound { - return nil, err - } - - if hasUnderField { - _, err := toml.DecodeFile(ts.fullPath, cmd.Config) - if err != nil { - return nil, err - } - } - - return cmd, nil -} - -func preProcessDir(dirIn string) (string, error) { - expanded := os.ExpandEnv(dirIn) - return filepath.Abs(expanded) -} - -func findFile(filename string, dirNFile []string) string { - for _, df := range dirNFile { - if df != "" { - fullPath, _ := preProcessDir(df) - if fileInfo, err := os.Stat(fullPath); err == nil && !fileInfo.IsDir() { - return fullPath - } - - fullPath = filepath.Join(fullPath, filename+".toml") - if fileInfo, err := os.Stat(fullPath); err == nil && !fileInfo.IsDir() { - return fullPath - } - } - } - return "" -} - -func generateArgs(metadata toml.MetaData, flags []string) ([]string, bool) { - var flgArgs []string - keys := metadata.Keys() - hasUnderField := false - - for i, key := range keys { - if metadata.Type(key.String()) == "Hash" { - // TOML hashes correspond to Go structs or maps. - for j := i; j < len(keys); j++ { - if strings.Contains(keys[j].String(), key.String()+".") { - hasUnderField = true - break - } - } - - match := false - for _, flag := range flags { - if flag == strings.ToLower(key.String()) { - match = true - break - } - } - if match { - flgArgs = append(flgArgs, "--"+strings.ToLower(key.String())) - } - } - } - - return flgArgs, hasUnderField -} diff --git a/vendor/github.com/ogier/pflag/LICENSE b/vendor/github.com/ogier/pflag/LICENSE deleted file mode 100644 index 63ed1cfea..000000000 --- a/vendor/github.com/ogier/pflag/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2012 Alex Ogier. All rights reserved. -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/ogier/pflag/bool.go b/vendor/github.com/ogier/pflag/bool.go deleted file mode 100644 index 617971a08..000000000 --- a/vendor/github.com/ogier/pflag/bool.go +++ /dev/null @@ -1,79 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// optional interface to indicate boolean flags that can be -// supplied without "=value" text -type boolFlag interface { - Value - IsBoolFlag() bool -} - -// -- bool Value -type boolValue bool - -func newBoolValue(val bool, p *bool) *boolValue { - *p = val - return (*boolValue)(p) -} - -func (b *boolValue) Set(s string) error { - v, err := strconv.ParseBool(s) - *b = boolValue(v) - return err -} - -func (b *boolValue) String() string { return fmt.Sprintf("%v", *b) } - -func (b *boolValue) IsBoolFlag() bool { return true } - -// BoolVar defines a bool flag with specified name, default value, and usage string. -// The argument p points to a bool variable in which to store the value of the flag. -func (f *FlagSet) BoolVar(p *bool, name string, value bool, usage string) { - f.VarP(newBoolValue(value, p), name, "", usage) -} - -// Like BoolVar, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) BoolVarP(p *bool, name, shorthand string, value bool, usage string) { - f.VarP(newBoolValue(value, p), name, shorthand, usage) -} - -// BoolVar defines a bool flag with specified name, default value, and usage string. -// The argument p points to a bool variable in which to store the value of the flag. -func BoolVar(p *bool, name string, value bool, usage string) { - CommandLine.VarP(newBoolValue(value, p), name, "", usage) -} - -// Like BoolVar, but accepts a shorthand letter that can be used after a single dash. -func BoolVarP(p *bool, name, shorthand string, value bool, usage string) { - CommandLine.VarP(newBoolValue(value, p), name, shorthand, usage) -} - -// Bool defines a bool flag with specified name, default value, and usage string. -// The return value is the address of a bool variable that stores the value of the flag. -func (f *FlagSet) Bool(name string, value bool, usage string) *bool { - p := new(bool) - f.BoolVarP(p, name, "", value, usage) - return p -} - -// Like Bool, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) BoolP(name, shorthand string, value bool, usage string) *bool { - p := new(bool) - f.BoolVarP(p, name, shorthand, value, usage) - return p -} - -// Bool defines a bool flag with specified name, default value, and usage string. -// The return value is the address of a bool variable that stores the value of the flag. -func Bool(name string, value bool, usage string) *bool { - return CommandLine.BoolP(name, "", value, usage) -} - -// Like Bool, but accepts a shorthand letter that can be used after a single dash. -func BoolP(name, shorthand string, value bool, usage string) *bool { - return CommandLine.BoolP(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/duration.go b/vendor/github.com/ogier/pflag/duration.go deleted file mode 100644 index db594639e..000000000 --- a/vendor/github.com/ogier/pflag/duration.go +++ /dev/null @@ -1,74 +0,0 @@ -package pflag - -import "time" - -// -- time.Duration Value -type durationValue time.Duration - -func newDurationValue(val time.Duration, p *time.Duration) *durationValue { - *p = val - return (*durationValue)(p) -} - -func (d *durationValue) Set(s string) error { - v, err := time.ParseDuration(s) - *d = durationValue(v) - return err -} - -func (d *durationValue) String() string { return (*time.Duration)(d).String() } - -// Value is the interface to the dynamic value stored in a flag. -// (The default value is represented as a string.) -type Value interface { - String() string - Set(string) error -} - -// DurationVar defines a time.Duration flag with specified name, default value, and usage string. -// The argument p points to a time.Duration variable in which to store the value of the flag. -func (f *FlagSet) DurationVar(p *time.Duration, name string, value time.Duration, usage string) { - f.VarP(newDurationValue(value, p), name, "", usage) -} - -// Like DurationVar, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) DurationVarP(p *time.Duration, name, shorthand string, value time.Duration, usage string) { - f.VarP(newDurationValue(value, p), name, shorthand, usage) -} - -// DurationVar defines a time.Duration flag with specified name, default value, and usage string. -// The argument p points to a time.Duration variable in which to store the value of the flag. -func DurationVar(p *time.Duration, name string, value time.Duration, usage string) { - CommandLine.VarP(newDurationValue(value, p), name, "", usage) -} - -// Like DurationVar, but accepts a shorthand letter that can be used after a single dash. -func DurationVarP(p *time.Duration, name, shorthand string, value time.Duration, usage string) { - CommandLine.VarP(newDurationValue(value, p), name, shorthand, usage) -} - -// Duration defines a time.Duration flag with specified name, default value, and usage string. -// The return value is the address of a time.Duration variable that stores the value of the flag. -func (f *FlagSet) Duration(name string, value time.Duration, usage string) *time.Duration { - p := new(time.Duration) - f.DurationVarP(p, name, "", value, usage) - return p -} - -// Like Duration, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) DurationP(name, shorthand string, value time.Duration, usage string) *time.Duration { - p := new(time.Duration) - f.DurationVarP(p, name, shorthand, value, usage) - return p -} - -// Duration defines a time.Duration flag with specified name, default value, and usage string. -// The return value is the address of a time.Duration variable that stores the value of the flag. -func Duration(name string, value time.Duration, usage string) *time.Duration { - return CommandLine.DurationP(name, "", value, usage) -} - -// Like Duration, but accepts a shorthand letter that can be used after a single dash. -func DurationP(name, shorthand string, value time.Duration, usage string) *time.Duration { - return CommandLine.DurationP(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/flag.go b/vendor/github.com/ogier/pflag/flag.go deleted file mode 100644 index 9d1e0ca53..000000000 --- a/vendor/github.com/ogier/pflag/flag.go +++ /dev/null @@ -1,624 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* - pflag is a drop-in replacement for Go's flag package, implementing - POSIX/GNU-style --flags. - - pflag is compatible with the GNU extensions to the POSIX recommendations - for command-line options. See - http://www.gnu.org/software/libc/manual/html_node/Argument-Syntax.html - - Usage: - - pflag is a drop-in replacement of Go's native flag package. If you import - pflag under the name "flag" then all code should continue to function - with no changes. - - import flag "github.com/ogier/pflag" - - There is one exception to this: if you directly instantiate the Flag struct - there is one more field "Shorthand" that you will need to set. - Most code never instantiates this struct directly, and instead uses - functions such as String(), BoolVar(), and Var(), and is therefore - unaffected. - - Define flags using flag.String(), Bool(), Int(), etc. - - This declares an integer flag, -flagname, stored in the pointer ip, with type *int. - var ip = flag.Int("flagname", 1234, "help message for flagname") - If you like, you can bind the flag to a variable using the Var() functions. - var flagvar int - func init() { - flag.IntVar(&flagvar, "flagname", 1234, "help message for flagname") - } - Or you can create custom flags that satisfy the Value interface (with - pointer receivers) and couple them to flag parsing by - flag.Var(&flagVal, "name", "help message for flagname") - For such flags, the default value is just the initial value of the variable. - - After all flags are defined, call - flag.Parse() - to parse the command line into the defined flags. - - Flags may then be used directly. If you're using the flags themselves, - they are all pointers; if you bind to variables, they're values. - fmt.Println("ip has value ", *ip) - fmt.Println("flagvar has value ", flagvar) - - After parsing, the arguments after the flag are available as the - slice flag.Args() or individually as flag.Arg(i). - The arguments are indexed from 0 through flag.NArg()-1. - - The pflag package also defines some new functions that are not in flag, - that give one-letter shorthands for flags. You can use these by appending - 'P' to the name of any function that defines a flag. - var ip = flag.IntP("flagname", "f", 1234, "help message") - var flagvar bool - func init() { - flag.BoolVarP("boolname", "b", true, "help message") - } - flag.VarP(&flagVar, "varname", "v", 1234, "help message") - Shorthand letters can be used with single dashes on the command line. - Boolean shorthand flags can be combined with other shorthand flags. - - Command line flag syntax: - --flag // boolean flags only - --flag=x - - Unlike the flag package, a single dash before an option means something - different than a double dash. Single dashes signify a series of shorthand - letters for flags. All but the last shorthand letter must be boolean flags. - // boolean flags - -f - -abc - // non-boolean flags - -n 1234 - -Ifile - // mixed - -abcs "hello" - -abcn1234 - - Flag parsing stops after the terminator "--". Unlike the flag package, - flags can be interspersed with arguments anywhere on the command line - before this terminator. - - Integer flags accept 1234, 0664, 0x1234 and may be negative. - Boolean flags (in their long form) accept 1, 0, t, f, true, false, - TRUE, FALSE, True, False. - Duration flags accept any input valid for time.ParseDuration. - - The default set of command-line flags is controlled by - top-level functions. The FlagSet type allows one to define - independent sets of flags, such as to implement subcommands - in a command-line interface. The methods of FlagSet are - analogous to the top-level functions for the command-line - flag set. -*/ -package pflag - -import ( - "errors" - "fmt" - "io" - "os" - "sort" - "strings" -) - -// ErrHelp is the error returned if the flag -help is invoked but no such flag is defined. -var ErrHelp = errors.New("pflag: help requested") - -// ErrorHandling defines how to handle flag parsing errors. -type ErrorHandling int - -const ( - ContinueOnError ErrorHandling = iota - ExitOnError - PanicOnError -) - -// A FlagSet represents a set of defined flags. -type FlagSet struct { - // Usage is the function called when an error occurs while parsing flags. - // The field is a function (not a method) that may be changed to point to - // a custom error handler. - Usage func() - - name string - parsed bool - actual map[string]*Flag - formal map[string]*Flag - shorthands map[byte]*Flag - args []string // arguments after flags - exitOnError bool // does the program exit if there's an error? - errorHandling ErrorHandling - output io.Writer // nil means stderr; use out() accessor - interspersed bool // allow interspersed option/non-option args -} - -// A Flag represents the state of a flag. -type Flag struct { - Name string // name as it appears on command line - Shorthand string // one-letter abbreviated flag - Usage string // help message - Value Value // value as set - DefValue string // default value (as text); for usage message -} - -// sortFlags returns the flags as a slice in lexicographical sorted order. -func sortFlags(flags map[string]*Flag) []*Flag { - list := make(sort.StringSlice, len(flags)) - i := 0 - for _, f := range flags { - list[i] = f.Name - i++ - } - list.Sort() - result := make([]*Flag, len(list)) - for i, name := range list { - result[i] = flags[name] - } - return result -} - -func (f *FlagSet) out() io.Writer { - if f.output == nil { - return os.Stderr - } - return f.output -} - -// SetOutput sets the destination for usage and error messages. -// If output is nil, os.Stderr is used. -func (f *FlagSet) SetOutput(output io.Writer) { - f.output = output -} - -// VisitAll visits the flags in lexicographical order, calling fn for each. -// It visits all flags, even those not set. -func (f *FlagSet) VisitAll(fn func(*Flag)) { - for _, flag := range sortFlags(f.formal) { - fn(flag) - } -} - -// VisitAll visits the command-line flags in lexicographical order, calling -// fn for each. It visits all flags, even those not set. -func VisitAll(fn func(*Flag)) { - CommandLine.VisitAll(fn) -} - -// Visit visits the flags in lexicographical order, calling fn for each. -// It visits only those flags that have been set. -func (f *FlagSet) Visit(fn func(*Flag)) { - for _, flag := range sortFlags(f.actual) { - fn(flag) - } -} - -// Visit visits the command-line flags in lexicographical order, calling fn -// for each. It visits only those flags that have been set. -func Visit(fn func(*Flag)) { - CommandLine.Visit(fn) -} - -// Lookup returns the Flag structure of the named flag, returning nil if none exists. -func (f *FlagSet) Lookup(name string) *Flag { - return f.formal[name] -} - -// Lookup returns the Flag structure of the named command-line flag, -// returning nil if none exists. -func Lookup(name string) *Flag { - return CommandLine.formal[name] -} - -// Set sets the value of the named flag. -func (f *FlagSet) Set(name, value string) error { - flag, ok := f.formal[name] - if !ok { - return fmt.Errorf("no such flag -%v", name) - } - err := flag.Value.Set(value) - if err != nil { - return err - } - if f.actual == nil { - f.actual = make(map[string]*Flag) - } - f.actual[name] = flag - return nil -} - -// Set sets the value of the named command-line flag. -func Set(name, value string) error { - return CommandLine.Set(name, value) -} - -// isZeroValue guesses whether the string represents the zero -// value for a flag. It is not accurate but in practice works OK. -func isZeroValue(value string) bool { - switch value { - case "false": - return true - case "": - return true - case "0": - return true - } - return false -} - -// UnquoteUsage extracts a back-quoted name from the usage -// string for a flag and returns it and the un-quoted usage. -// Given "a `name` to show" it returns ("name", "a name to show"). -// If there are no back quotes, the name is an educated guess of the -// type of the flag's value, or the empty string if the flag is boolean. -func UnquoteUsage(flag *Flag) (name string, usage string) { - // Look for a back-quoted name, but avoid the strings package. - usage = flag.Usage - for i := 0; i < len(usage); i++ { - if usage[i] == '`' { - for j := i + 1; j < len(usage); j++ { - if usage[j] == '`' { - name = usage[i+1 : j] - usage = usage[:i] + name + usage[j+1:] - return name, usage - } - } - break // Only one back quote; use type name. - } - } - // No explicit name, so use type if we can find one. - name = "value" - switch flag.Value.(type) { - case boolFlag: - name = "" - case *durationValue: - name = "duration" - case *float64Value: - name = "float" - case *intValue, *int64Value: - name = "int" - case *stringValue: - name = "string" - case *uintValue, *uint64Value: - name = "uint" - } - return -} - -// PrintDefaults prints to standard error the default values of all -// defined command-line flags in the set. See the documentation for -// the global function PrintDefaults for more information. -func (f *FlagSet) PrintDefaults() { - f.VisitAll(func(flag *Flag) { - s := "" - if len(flag.Shorthand) > 0 { - s = fmt.Sprintf(" -%s, --%s", flag.Shorthand, flag.Name) - } else { - s = fmt.Sprintf(" --%s", flag.Name) - } - - name, usage := UnquoteUsage(flag) - if len(name) > 0 { - s += " " + name - } - - s += "\n \t" - s += usage - if !isZeroValue(flag.DefValue) { - if _, ok := flag.Value.(*stringValue); ok { - // put quotes on the value - s += fmt.Sprintf(" (default %q)", flag.DefValue) - } else { - s += fmt.Sprintf(" (default %v)", flag.DefValue) - } - } - fmt.Fprint(f.out(), s, "\n") - }) -} - -// PrintDefaults prints to standard error the default values of all defined command-line flags. -func PrintDefaults() { - CommandLine.PrintDefaults() -} - -// defaultUsage is the default function to print a usage message. -func defaultUsage(f *FlagSet) { - if f.name == "" { - fmt.Fprintf(f.out(), "Usage:\n") - } else { - fmt.Fprintf(f.out(), "Usage of %s:\n", f.name) - } - f.PrintDefaults() -} - -// NOTE: Usage is not just defaultUsage(CommandLine) -// because it serves (via godoc flag Usage) as the example -// for how to write your own usage function. - -// Usage prints to standard error a usage message documenting all defined command-line flags. -// The function is a variable that may be changed to point to a custom function. -var Usage = func() { - fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) - PrintDefaults() -} - -// NFlag returns the number of flags that have been set. -func (f *FlagSet) NFlag() int { return len(f.actual) } - -// NFlag returns the number of command-line flags that have been set. -func NFlag() int { return len(CommandLine.actual) } - -// Arg returns the i'th argument. Arg(0) is the first remaining argument -// after flags have been processed. -func (f *FlagSet) Arg(i int) string { - if i < 0 || i >= len(f.args) { - return "" - } - return f.args[i] -} - -// Arg returns the i'th command-line argument. Arg(0) is the first remaining argument -// after flags have been processed. -func Arg(i int) string { - return CommandLine.Arg(i) -} - -// NArg is the number of arguments remaining after flags have been processed. -func (f *FlagSet) NArg() int { return len(f.args) } - -// NArg is the number of arguments remaining after flags have been processed. -func NArg() int { return len(CommandLine.args) } - -// Args returns the non-flag arguments. -func (f *FlagSet) Args() []string { return f.args } - -// Args returns the non-flag command-line arguments. -func Args() []string { return CommandLine.args } - -// Var defines a flag with the specified name and usage string. The type and -// value of the flag are represented by the first argument, of type Value, which -// typically holds a user-defined implementation of Value. For instance, the -// caller could create a flag that turns a comma-separated string into a slice -// of strings by giving the slice the methods of Value; in particular, Set would -// decompose the comma-separated string into the slice. -func (f *FlagSet) Var(value Value, name string, usage string) { - f.VarP(value, name, "", usage) -} - -// Like Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) VarP(value Value, name, shorthand, usage string) { - // Remember the default value as a string; it won't change. - flag := &Flag{name, shorthand, usage, value, value.String()} - _, alreadythere := f.formal[name] - if alreadythere { - msg := fmt.Sprintf("%s flag redefined: %s", f.name, name) - fmt.Fprintln(f.out(), msg) - panic(msg) // Happens only if flags are declared with identical names - } - if f.formal == nil { - f.formal = make(map[string]*Flag) - } - f.formal[name] = flag - - if len(shorthand) == 0 { - return - } - if len(shorthand) > 1 { - fmt.Fprintf(f.out(), "%s shorthand more than ASCII character: %s\n", f.name, shorthand) - panic("shorthand is more than one character") - } - if f.shorthands == nil { - f.shorthands = make(map[byte]*Flag) - } - c := shorthand[0] - old, alreadythere := f.shorthands[c] - if alreadythere { - fmt.Fprintf(f.out(), "%s shorthand reused: %q for %s already used for %s\n", f.name, c, name, old.Name) - panic("shorthand redefinition") - } - f.shorthands[c] = flag -} - -// Var defines a flag with the specified name and usage string. The type and -// value of the flag are represented by the first argument, of type Value, which -// typically holds a user-defined implementation of Value. For instance, the -// caller could create a flag that turns a comma-separated string into a slice -// of strings by giving the slice the methods of Value; in particular, Set would -// decompose the comma-separated string into the slice. -func Var(value Value, name string, usage string) { - CommandLine.VarP(value, name, "", usage) -} - -// Like Var, but accepts a shorthand letter that can be used after a single dash. -func VarP(value Value, name, shorthand, usage string) { - CommandLine.VarP(value, name, shorthand, usage) -} - -// failf prints to standard error a formatted error and usage message and -// returns the error. -func (f *FlagSet) failf(format string, a ...interface{}) error { - err := fmt.Errorf(format, a...) - fmt.Fprintln(f.out(), err) - f.usage() - return err -} - -// usage calls the Usage method for the flag set, or the usage function if -// the flag set is CommandLine. -func (f *FlagSet) usage() { - if f == CommandLine { - Usage() - } else if f.Usage == nil { - defaultUsage(f) - } else { - f.Usage() - } -} - -func (f *FlagSet) setFlag(flag *Flag, value string, origArg string) error { - if err := flag.Value.Set(value); err != nil { - return f.failf("invalid argument %q for %s: %v", value, origArg, err) - } - // mark as visited for Visit() - if f.actual == nil { - f.actual = make(map[string]*Flag) - } - f.actual[flag.Name] = flag - - return nil -} - -func (f *FlagSet) parseArgs(args []string) error { - for len(args) > 0 { - s := args[0] - args = args[1:] - if len(s) == 0 || s[0] != '-' || len(s) == 1 { - if !f.interspersed { - f.args = append(f.args, s) - f.args = append(f.args, args...) - return nil - } - f.args = append(f.args, s) - continue - } - - if s[1] == '-' { - if len(s) == 2 { // "--" terminates the flags - f.args = append(f.args, args...) - return nil - } - name := s[2:] - if len(name) == 0 || name[0] == '-' || name[0] == '=' { - return f.failf("bad flag syntax: %s", s) - } - split := strings.SplitN(name, "=", 2) - name = split[0] - m := f.formal - flag, alreadythere := m[name] // BUG - if !alreadythere { - if name == "help" { // special case for nice help message. - f.usage() - return ErrHelp - } - return f.failf("unknown flag: --%s", name) - } - if len(split) == 1 { - if bv, ok := flag.Value.(boolFlag); !ok || !bv.IsBoolFlag() { - return f.failf("flag needs an argument: %s", s) - } - f.setFlag(flag, "true", s) - } else { - if err := f.setFlag(flag, split[1], s); err != nil { - return err - } - } - } else { - shorthands := s[1:] - for i := 0; i < len(shorthands); i++ { - c := shorthands[i] - flag, alreadythere := f.shorthands[c] - if !alreadythere { - if c == 'h' { // special case for nice help message. - f.usage() - return ErrHelp - } - return f.failf("unknown shorthand flag: %q in -%s", c, shorthands) - } - if bv, ok := flag.Value.(boolFlag); ok && bv.IsBoolFlag() { - f.setFlag(flag, "true", s) - continue - } - if i < len(shorthands)-1 { - if err := f.setFlag(flag, shorthands[i+1:], s); err != nil { - return err - } - break - } - if len(args) == 0 { - return f.failf("flag needs an argument: %q in -%s", c, shorthands) - } - if err := f.setFlag(flag, args[0], s); err != nil { - return err - } - args = args[1:] - break // should be unnecessary - } - } - } - return nil -} - -// Parse parses flag definitions from the argument list, which should not -// include the command name. Must be called after all flags in the FlagSet -// are defined and before flags are accessed by the program. -// The return value will be ErrHelp if -help was set but not defined. -func (f *FlagSet) Parse(arguments []string) error { - f.parsed = true - f.args = make([]string, 0, len(arguments)) - err := f.parseArgs(arguments) - if err != nil { - switch f.errorHandling { - case ContinueOnError: - return err - case ExitOnError: - os.Exit(2) - case PanicOnError: - panic(err) - } - } - return nil -} - -// Parsed reports whether f.Parse has been called. -func (f *FlagSet) Parsed() bool { - return f.parsed -} - -// Parse parses the command-line flags from os.Args[1:]. Must be called -// after all flags are defined and before flags are accessed by the program. -func Parse() { - // Ignore errors; CommandLine is set for ExitOnError. - CommandLine.Parse(os.Args[1:]) -} - -// Whether to support interspersed option/non-option arguments. -func SetInterspersed(interspersed bool) { - CommandLine.SetInterspersed(interspersed) -} - -// Parsed returns true if the command-line flags have been parsed. -func Parsed() bool { - return CommandLine.Parsed() -} - -// The default set of command-line flags, parsed from os.Args. -var CommandLine = NewFlagSet(os.Args[0], ExitOnError) - -// NewFlagSet returns a new, empty flag set with the specified name and -// error handling property. -func NewFlagSet(name string, errorHandling ErrorHandling) *FlagSet { - f := &FlagSet{ - name: name, - errorHandling: errorHandling, - interspersed: true, - } - return f -} - -// Whether to support interspersed option/non-option arguments. -func (f *FlagSet) SetInterspersed(interspersed bool) { - f.interspersed = interspersed -} - -// Init sets the name and error handling property for a flag set. -// By default, the zero FlagSet uses an empty name and the -// ContinueOnError error handling policy. -func (f *FlagSet) Init(name string, errorHandling ErrorHandling) { - f.name = name - f.errorHandling = errorHandling -} diff --git a/vendor/github.com/ogier/pflag/float32.go b/vendor/github.com/ogier/pflag/float32.go deleted file mode 100644 index a0041e256..000000000 --- a/vendor/github.com/ogier/pflag/float32.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- float32 Value -type float32Value float32 - -func newFloat32Value(val float32, p *float32) *float32Value { - *p = val - return (*float32Value)(p) -} - -func (f *float32Value) Set(s string) error { - v, err := strconv.ParseFloat(s, 32) - *f = float32Value(v) - return err -} - -func (f *float32Value) String() string { return fmt.Sprintf("%v", *f) } - -// Float32Var defines a float32 flag with specified name, default value, and usage string. -// The argument p points to a float32 variable in which to store the value of the flag. -func (f *FlagSet) Float32Var(p *float32, name string, value float32, usage string) { - f.VarP(newFloat32Value(value, p), name, "", usage) -} - -// Like Float32Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Float32VarP(p *float32, name, shorthand string, value float32, usage string) { - f.VarP(newFloat32Value(value, p), name, shorthand, usage) -} - -// Float32Var defines a float32 flag with specified name, default value, and usage string. -// The argument p points to a float32 variable in which to store the value of the flag. -func Float32Var(p *float32, name string, value float32, usage string) { - CommandLine.VarP(newFloat32Value(value, p), name, "", usage) -} - -// Like Float32Var, but accepts a shorthand letter that can be used after a single dash. -func Float32VarP(p *float32, name, shorthand string, value float32, usage string) { - CommandLine.VarP(newFloat32Value(value, p), name, shorthand, usage) -} - -// Float32 defines a float32 flag with specified name, default value, and usage string. -// The return value is the address of a float32 variable that stores the value of the flag. -func (f *FlagSet) Float32(name string, value float32, usage string) *float32 { - p := new(float32) - f.Float32VarP(p, name, "", value, usage) - return p -} - -// Like Float32, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Float32P(name, shorthand string, value float32, usage string) *float32 { - p := new(float32) - f.Float32VarP(p, name, shorthand, value, usage) - return p -} - -// Float32 defines a float32 flag with specified name, default value, and usage string. -// The return value is the address of a float32 variable that stores the value of the flag. -func Float32(name string, value float32, usage string) *float32 { - return CommandLine.Float32P(name, "", value, usage) -} - -// Like Float32, but accepts a shorthand letter that can be used after a single dash. -func Float32P(name, shorthand string, value float32, usage string) *float32 { - return CommandLine.Float32P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/float64.go b/vendor/github.com/ogier/pflag/float64.go deleted file mode 100644 index 8d79be065..000000000 --- a/vendor/github.com/ogier/pflag/float64.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- float64 Value -type float64Value float64 - -func newFloat64Value(val float64, p *float64) *float64Value { - *p = val - return (*float64Value)(p) -} - -func (f *float64Value) Set(s string) error { - v, err := strconv.ParseFloat(s, 64) - *f = float64Value(v) - return err -} - -func (f *float64Value) String() string { return fmt.Sprintf("%v", *f) } - -// Float64Var defines a float64 flag with specified name, default value, and usage string. -// The argument p points to a float64 variable in which to store the value of the flag. -func (f *FlagSet) Float64Var(p *float64, name string, value float64, usage string) { - f.VarP(newFloat64Value(value, p), name, "", usage) -} - -// Like Float64Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Float64VarP(p *float64, name, shorthand string, value float64, usage string) { - f.VarP(newFloat64Value(value, p), name, shorthand, usage) -} - -// Float64Var defines a float64 flag with specified name, default value, and usage string. -// The argument p points to a float64 variable in which to store the value of the flag. -func Float64Var(p *float64, name string, value float64, usage string) { - CommandLine.VarP(newFloat64Value(value, p), name, "", usage) -} - -// Like Float64Var, but accepts a shorthand letter that can be used after a single dash. -func Float64VarP(p *float64, name, shorthand string, value float64, usage string) { - CommandLine.VarP(newFloat64Value(value, p), name, shorthand, usage) -} - -// Float64 defines a float64 flag with specified name, default value, and usage string. -// The return value is the address of a float64 variable that stores the value of the flag. -func (f *FlagSet) Float64(name string, value float64, usage string) *float64 { - p := new(float64) - f.Float64VarP(p, name, "", value, usage) - return p -} - -// Like Float64, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Float64P(name, shorthand string, value float64, usage string) *float64 { - p := new(float64) - f.Float64VarP(p, name, shorthand, value, usage) - return p -} - -// Float64 defines a float64 flag with specified name, default value, and usage string. -// The return value is the address of a float64 variable that stores the value of the flag. -func Float64(name string, value float64, usage string) *float64 { - return CommandLine.Float64P(name, "", value, usage) -} - -// Like Float64, but accepts a shorthand letter that can be used after a single dash. -func Float64P(name, shorthand string, value float64, usage string) *float64 { - return CommandLine.Float64P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/int.go b/vendor/github.com/ogier/pflag/int.go deleted file mode 100644 index cb85e1496..000000000 --- a/vendor/github.com/ogier/pflag/int.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- int Value -type intValue int - -func newIntValue(val int, p *int) *intValue { - *p = val - return (*intValue)(p) -} - -func (i *intValue) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 64) - *i = intValue(v) - return err -} - -func (i *intValue) String() string { return fmt.Sprintf("%v", *i) } - -// IntVar defines an int flag with specified name, default value, and usage string. -// The argument p points to an int variable in which to store the value of the flag. -func (f *FlagSet) IntVar(p *int, name string, value int, usage string) { - f.VarP(newIntValue(value, p), name, "", usage) -} - -// Like IntVar, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) IntVarP(p *int, name, shorthand string, value int, usage string) { - f.VarP(newIntValue(value, p), name, shorthand, usage) -} - -// IntVar defines an int flag with specified name, default value, and usage string. -// The argument p points to an int variable in which to store the value of the flag. -func IntVar(p *int, name string, value int, usage string) { - CommandLine.VarP(newIntValue(value, p), name, "", usage) -} - -// Like IntVar, but accepts a shorthand letter that can be used after a single dash. -func IntVarP(p *int, name, shorthand string, value int, usage string) { - CommandLine.VarP(newIntValue(value, p), name, shorthand, usage) -} - -// Int defines an int flag with specified name, default value, and usage string. -// The return value is the address of an int variable that stores the value of the flag. -func (f *FlagSet) Int(name string, value int, usage string) *int { - p := new(int) - f.IntVarP(p, name, "", value, usage) - return p -} - -// Like Int, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) IntP(name, shorthand string, value int, usage string) *int { - p := new(int) - f.IntVarP(p, name, shorthand, value, usage) - return p -} - -// Int defines an int flag with specified name, default value, and usage string. -// The return value is the address of an int variable that stores the value of the flag. -func Int(name string, value int, usage string) *int { - return CommandLine.IntP(name, "", value, usage) -} - -// Like Int, but accepts a shorthand letter that can be used after a single dash. -func IntP(name, shorthand string, value int, usage string) *int { - return CommandLine.IntP(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/int32.go b/vendor/github.com/ogier/pflag/int32.go deleted file mode 100644 index 2e1a317f3..000000000 --- a/vendor/github.com/ogier/pflag/int32.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- int32 Value -type int32Value int32 - -func newInt32Value(val int32, p *int32) *int32Value { - *p = val - return (*int32Value)(p) -} - -func (i *int32Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 32) - *i = int32Value(v) - return err -} - -func (i *int32Value) String() string { return fmt.Sprintf("%v", *i) } - -// Int32Var defines an int32 flag with specified name, default value, and usage string. -// The argument p points to an int32 variable in which to store the value of the flag. -func (f *FlagSet) Int32Var(p *int32, name string, value int32, usage string) { - f.VarP(newInt32Value(value, p), name, "", usage) -} - -// Like Int32Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Int32VarP(p *int32, name, shorthand string, value int32, usage string) { - f.VarP(newInt32Value(value, p), name, shorthand, usage) -} - -// Int32Var defines an int32 flag with specified name, default value, and usage string. -// The argument p points to an int32 variable in which to store the value of the flag. -func Int32Var(p *int32, name string, value int32, usage string) { - CommandLine.VarP(newInt32Value(value, p), name, "", usage) -} - -// Like Int32Var, but accepts a shorthand letter that can be used after a single dash. -func Int32VarP(p *int32, name, shorthand string, value int32, usage string) { - CommandLine.VarP(newInt32Value(value, p), name, shorthand, usage) -} - -// Int32 defines an int32 flag with specified name, default value, and usage string. -// The return value is the address of an int32 variable that stores the value of the flag. -func (f *FlagSet) Int32(name string, value int32, usage string) *int32 { - p := new(int32) - f.Int32VarP(p, name, "", value, usage) - return p -} - -// Like Int32, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Int32P(name, shorthand string, value int32, usage string) *int32 { - p := new(int32) - f.Int32VarP(p, name, shorthand, value, usage) - return p -} - -// Int32 defines an int32 flag with specified name, default value, and usage string. -// The return value is the address of an int32 variable that stores the value of the flag. -func Int32(name string, value int32, usage string) *int32 { - return CommandLine.Int32P(name, "", value, usage) -} - -// Like Int32, but accepts a shorthand letter that can be used after a single dash. -func Int32P(name, shorthand string, value int32, usage string) *int32 { - return CommandLine.Int32P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/int64.go b/vendor/github.com/ogier/pflag/int64.go deleted file mode 100644 index 43aeced81..000000000 --- a/vendor/github.com/ogier/pflag/int64.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- int64 Value -type int64Value int64 - -func newInt64Value(val int64, p *int64) *int64Value { - *p = val - return (*int64Value)(p) -} - -func (i *int64Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 64) - *i = int64Value(v) - return err -} - -func (i *int64Value) String() string { return fmt.Sprintf("%v", *i) } - -// Int64Var defines an int64 flag with specified name, default value, and usage string. -// The argument p points to an int64 variable in which to store the value of the flag. -func (f *FlagSet) Int64Var(p *int64, name string, value int64, usage string) { - f.VarP(newInt64Value(value, p), name, "", usage) -} - -// Like Int64Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Int64VarP(p *int64, name, shorthand string, value int64, usage string) { - f.VarP(newInt64Value(value, p), name, shorthand, usage) -} - -// Int64Var defines an int64 flag with specified name, default value, and usage string. -// The argument p points to an int64 variable in which to store the value of the flag. -func Int64Var(p *int64, name string, value int64, usage string) { - CommandLine.VarP(newInt64Value(value, p), name, "", usage) -} - -// Like Int64Var, but accepts a shorthand letter that can be used after a single dash. -func Int64VarP(p *int64, name, shorthand string, value int64, usage string) { - CommandLine.VarP(newInt64Value(value, p), name, shorthand, usage) -} - -// Int64 defines an int64 flag with specified name, default value, and usage string. -// The return value is the address of an int64 variable that stores the value of the flag. -func (f *FlagSet) Int64(name string, value int64, usage string) *int64 { - p := new(int64) - f.Int64VarP(p, name, "", value, usage) - return p -} - -// Like Int64, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Int64P(name, shorthand string, value int64, usage string) *int64 { - p := new(int64) - f.Int64VarP(p, name, shorthand, value, usage) - return p -} - -// Int64 defines an int64 flag with specified name, default value, and usage string. -// The return value is the address of an int64 variable that stores the value of the flag. -func Int64(name string, value int64, usage string) *int64 { - return CommandLine.Int64P(name, "", value, usage) -} - -// Like Int64, but accepts a shorthand letter that can be used after a single dash. -func Int64P(name, shorthand string, value int64, usage string) *int64 { - return CommandLine.Int64P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/int8.go b/vendor/github.com/ogier/pflag/int8.go deleted file mode 100644 index 539c4eb3b..000000000 --- a/vendor/github.com/ogier/pflag/int8.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- int8 Value -type int8Value int8 - -func newInt8Value(val int8, p *int8) *int8Value { - *p = val - return (*int8Value)(p) -} - -func (i *int8Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 8) - *i = int8Value(v) - return err -} - -func (i *int8Value) String() string { return fmt.Sprintf("%v", *i) } - -// Int8Var defines an int8 flag with specified name, default value, and usage string. -// The argument p points to an int8 variable in which to store the value of the flag. -func (f *FlagSet) Int8Var(p *int8, name string, value int8, usage string) { - f.VarP(newInt8Value(value, p), name, "", usage) -} - -// Like Int8Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Int8VarP(p *int8, name, shorthand string, value int8, usage string) { - f.VarP(newInt8Value(value, p), name, shorthand, usage) -} - -// Int8Var defines an int8 flag with specified name, default value, and usage string. -// The argument p points to an int8 variable in which to store the value of the flag. -func Int8Var(p *int8, name string, value int8, usage string) { - CommandLine.VarP(newInt8Value(value, p), name, "", usage) -} - -// Like Int8Var, but accepts a shorthand letter that can be used after a single dash. -func Int8VarP(p *int8, name, shorthand string, value int8, usage string) { - CommandLine.VarP(newInt8Value(value, p), name, shorthand, usage) -} - -// Int8 defines an int8 flag with specified name, default value, and usage string. -// The return value is the address of an int8 variable that stores the value of the flag. -func (f *FlagSet) Int8(name string, value int8, usage string) *int8 { - p := new(int8) - f.Int8VarP(p, name, "", value, usage) - return p -} - -// Like Int8, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Int8P(name, shorthand string, value int8, usage string) *int8 { - p := new(int8) - f.Int8VarP(p, name, shorthand, value, usage) - return p -} - -// Int8 defines an int8 flag with specified name, default value, and usage string. -// The return value is the address of an int8 variable that stores the value of the flag. -func Int8(name string, value int8, usage string) *int8 { - return CommandLine.Int8P(name, "", value, usage) -} - -// Like Int8, but accepts a shorthand letter that can be used after a single dash. -func Int8P(name, shorthand string, value int8, usage string) *int8 { - return CommandLine.Int8P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/ip.go b/vendor/github.com/ogier/pflag/ip.go deleted file mode 100644 index 3a411fcd4..000000000 --- a/vendor/github.com/ogier/pflag/ip.go +++ /dev/null @@ -1,75 +0,0 @@ -package pflag - -import ( - "fmt" - "net" -) - -// -- net.IP value -type ipValue net.IP - -func newIPValue(val net.IP, p *net.IP) *ipValue { - *p = val - return (*ipValue)(p) -} - -func (i *ipValue) String() string { return net.IP(*i).String() } -func (i *ipValue) Set(s string) error { - ip := net.ParseIP(s) - if ip == nil { - return fmt.Errorf("failed to parse IP: %q", s) - } - *i = ipValue(ip) - return nil -} -func (i *ipValue) Get() interface{} { - return net.IP(*i) -} - -// IPVar defines an net.IP flag with specified name, default value, and usage string. -// The argument p points to an net.IP variable in which to store the value of the flag. -func (f *FlagSet) IPVar(p *net.IP, name string, value net.IP, usage string) { - f.VarP(newIPValue(value, p), name, "", usage) -} - -// Like IPVar, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) IPVarP(p *net.IP, name, shorthand string, value net.IP, usage string) { - f.VarP(newIPValue(value, p), name, shorthand, usage) -} - -// IPVar defines an net.IP flag with specified name, default value, and usage string. -// The argument p points to an net.IP variable in which to store the value of the flag. -func IPVar(p *net.IP, name string, value net.IP, usage string) { - CommandLine.VarP(newIPValue(value, p), name, "", usage) -} - -// Like IPVar, but accepts a shorthand letter that can be used after a single dash. -func IPVarP(p *net.IP, name, shorthand string, value net.IP, usage string) { - CommandLine.VarP(newIPValue(value, p), name, shorthand, usage) -} - -// IP defines an net.IP flag with specified name, default value, and usage string. -// The return value is the address of an net.IP variable that stores the value of the flag. -func (f *FlagSet) IP(name string, value net.IP, usage string) *net.IP { - p := new(net.IP) - f.IPVarP(p, name, "", value, usage) - return p -} - -// Like IP, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) IPP(name, shorthand string, value net.IP, usage string) *net.IP { - p := new(net.IP) - f.IPVarP(p, name, shorthand, value, usage) - return p -} - -// IP defines an net.IP flag with specified name, default value, and usage string. -// The return value is the address of an net.IP variable that stores the value of the flag. -func IP(name string, value net.IP, usage string) *net.IP { - return CommandLine.IPP(name, "", value, usage) -} - -// Like IP, but accepts a shorthand letter that can be used after a single dash. -func IPP(name, shorthand string, value net.IP, usage string) *net.IP { - return CommandLine.IPP(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/ipmask.go b/vendor/github.com/ogier/pflag/ipmask.go deleted file mode 100644 index b8a164ae2..000000000 --- a/vendor/github.com/ogier/pflag/ipmask.go +++ /dev/null @@ -1,85 +0,0 @@ -package pflag - -import ( - "fmt" - "net" -) - -// -- net.IPMask value -type ipMaskValue net.IPMask - -func newIPMaskValue(val net.IPMask, p *net.IPMask) *ipMaskValue { - *p = val - return (*ipMaskValue)(p) -} - -func (i *ipMaskValue) String() string { return net.IPMask(*i).String() } -func (i *ipMaskValue) Set(s string) error { - ip := ParseIPv4Mask(s) - if ip == nil { - return fmt.Errorf("failed to parse IP mask: %q", s) - } - *i = ipMaskValue(ip) - return nil -} -func (i *ipMaskValue) Get() interface{} { - return net.IPMask(*i) -} - -// Parse IPv4 netmask written in IP form (e.g. 255.255.255.0). -// This function should really belong to the net package. -func ParseIPv4Mask(s string) net.IPMask { - mask := net.ParseIP(s) - if mask == nil { - return nil - } - return net.IPv4Mask(mask[12], mask[13], mask[14], mask[15]) -} - -// IPMaskVar defines an net.IPMask flag with specified name, default value, and usage string. -// The argument p points to an net.IPMask variable in which to store the value of the flag. -func (f *FlagSet) IPMaskVar(p *net.IPMask, name string, value net.IPMask, usage string) { - f.VarP(newIPMaskValue(value, p), name, "", usage) -} - -// Like IPMaskVar, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) IPMaskVarP(p *net.IPMask, name, shorthand string, value net.IPMask, usage string) { - f.VarP(newIPMaskValue(value, p), name, shorthand, usage) -} - -// IPMaskVar defines an net.IPMask flag with specified name, default value, and usage string. -// The argument p points to an net.IPMask variable in which to store the value of the flag. -func IPMaskVar(p *net.IPMask, name string, value net.IPMask, usage string) { - CommandLine.VarP(newIPMaskValue(value, p), name, "", usage) -} - -// Like IPMaskVar, but accepts a shorthand letter that can be used after a single dash. -func IPMaskVarP(p *net.IPMask, name, shorthand string, value net.IPMask, usage string) { - CommandLine.VarP(newIPMaskValue(value, p), name, shorthand, usage) -} - -// IPMask defines an net.IPMask flag with specified name, default value, and usage string. -// The return value is the address of an net.IPMask variable that stores the value of the flag. -func (f *FlagSet) IPMask(name string, value net.IPMask, usage string) *net.IPMask { - p := new(net.IPMask) - f.IPMaskVarP(p, name, "", value, usage) - return p -} - -// Like IPMask, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) IPMaskP(name, shorthand string, value net.IPMask, usage string) *net.IPMask { - p := new(net.IPMask) - f.IPMaskVarP(p, name, shorthand, value, usage) - return p -} - -// IPMask defines an net.IPMask flag with specified name, default value, and usage string. -// The return value is the address of an net.IPMask variable that stores the value of the flag. -func IPMask(name string, value net.IPMask, usage string) *net.IPMask { - return CommandLine.IPMaskP(name, "", value, usage) -} - -// Like IP, but accepts a shorthand letter that can be used after a single dash. -func IPMaskP(name, shorthand string, value net.IPMask, usage string) *net.IPMask { - return CommandLine.IPMaskP(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/string.go b/vendor/github.com/ogier/pflag/string.go deleted file mode 100644 index 65c0cb746..000000000 --- a/vendor/github.com/ogier/pflag/string.go +++ /dev/null @@ -1,66 +0,0 @@ -package pflag - -import "fmt" - -// -- string Value -type stringValue string - -func newStringValue(val string, p *string) *stringValue { - *p = val - return (*stringValue)(p) -} - -func (s *stringValue) Set(val string) error { - *s = stringValue(val) - return nil -} - -func (s *stringValue) String() string { return fmt.Sprintf("%s", *s) } - -// StringVar defines a string flag with specified name, default value, and usage string. -// The argument p points to a string variable in which to store the value of the flag. -func (f *FlagSet) StringVar(p *string, name string, value string, usage string) { - f.VarP(newStringValue(value, p), name, "", usage) -} - -// Like StringVar, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) StringVarP(p *string, name, shorthand string, value string, usage string) { - f.VarP(newStringValue(value, p), name, shorthand, usage) -} - -// StringVar defines a string flag with specified name, default value, and usage string. -// The argument p points to a string variable in which to store the value of the flag. -func StringVar(p *string, name string, value string, usage string) { - CommandLine.VarP(newStringValue(value, p), name, "", usage) -} - -// Like StringVar, but accepts a shorthand letter that can be used after a single dash. -func StringVarP(p *string, name, shorthand string, value string, usage string) { - CommandLine.VarP(newStringValue(value, p), name, shorthand, usage) -} - -// String defines a string flag with specified name, default value, and usage string. -// The return value is the address of a string variable that stores the value of the flag. -func (f *FlagSet) String(name string, value string, usage string) *string { - p := new(string) - f.StringVarP(p, name, "", value, usage) - return p -} - -// Like String, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) StringP(name, shorthand string, value string, usage string) *string { - p := new(string) - f.StringVarP(p, name, shorthand, value, usage) - return p -} - -// String defines a string flag with specified name, default value, and usage string. -// The return value is the address of a string variable that stores the value of the flag. -func String(name string, value string, usage string) *string { - return CommandLine.StringP(name, "", value, usage) -} - -// Like String, but accepts a shorthand letter that can be used after a single dash. -func StringP(name, shorthand string, value string, usage string) *string { - return CommandLine.StringP(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/uint.go b/vendor/github.com/ogier/pflag/uint.go deleted file mode 100644 index 40b9ebbe1..000000000 --- a/vendor/github.com/ogier/pflag/uint.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- uint Value -type uintValue uint - -func newUintValue(val uint, p *uint) *uintValue { - *p = val - return (*uintValue)(p) -} - -func (i *uintValue) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 64) - *i = uintValue(v) - return err -} - -func (i *uintValue) String() string { return fmt.Sprintf("%v", *i) } - -// UintVar defines a uint flag with specified name, default value, and usage string. -// The argument p points to a uint variable in which to store the value of the flag. -func (f *FlagSet) UintVar(p *uint, name string, value uint, usage string) { - f.VarP(newUintValue(value, p), name, "", usage) -} - -// Like UintVar, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) UintVarP(p *uint, name, shorthand string, value uint, usage string) { - f.VarP(newUintValue(value, p), name, shorthand, usage) -} - -// UintVar defines a uint flag with specified name, default value, and usage string. -// The argument p points to a uint variable in which to store the value of the flag. -func UintVar(p *uint, name string, value uint, usage string) { - CommandLine.VarP(newUintValue(value, p), name, "", usage) -} - -// Like UintVar, but accepts a shorthand letter that can be used after a single dash. -func UintVarP(p *uint, name, shorthand string, value uint, usage string) { - CommandLine.VarP(newUintValue(value, p), name, shorthand, usage) -} - -// Uint defines a uint flag with specified name, default value, and usage string. -// The return value is the address of a uint variable that stores the value of the flag. -func (f *FlagSet) Uint(name string, value uint, usage string) *uint { - p := new(uint) - f.UintVarP(p, name, "", value, usage) - return p -} - -// Like Uint, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) UintP(name, shorthand string, value uint, usage string) *uint { - p := new(uint) - f.UintVarP(p, name, shorthand, value, usage) - return p -} - -// Uint defines a uint flag with specified name, default value, and usage string. -// The return value is the address of a uint variable that stores the value of the flag. -func Uint(name string, value uint, usage string) *uint { - return CommandLine.UintP(name, "", value, usage) -} - -// Like Uint, but accepts a shorthand letter that can be used after a single dash. -func UintP(name, shorthand string, value uint, usage string) *uint { - return CommandLine.UintP(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/uint16.go b/vendor/github.com/ogier/pflag/uint16.go deleted file mode 100644 index 182dc4095..000000000 --- a/vendor/github.com/ogier/pflag/uint16.go +++ /dev/null @@ -1,71 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- uint16 value -type uint16Value uint16 - -func newUint16Value(val uint16, p *uint16) *uint16Value { - *p = val - return (*uint16Value)(p) -} -func (i *uint16Value) String() string { return fmt.Sprintf("%d", *i) } -func (i *uint16Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 16) - *i = uint16Value(v) - return err -} -func (i *uint16Value) Get() interface{} { - return uint16(*i) -} - -// Uint16Var defines a uint flag with specified name, default value, and usage string. -// The argument p points to a uint variable in which to store the value of the flag. -func (f *FlagSet) Uint16Var(p *uint16, name string, value uint16, usage string) { - f.VarP(newUint16Value(value, p), name, "", usage) -} - -// Like Uint16Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint16VarP(p *uint16, name, shorthand string, value uint16, usage string) { - f.VarP(newUint16Value(value, p), name, shorthand, usage) -} - -// Uint16Var defines a uint flag with specified name, default value, and usage string. -// The argument p points to a uint variable in which to store the value of the flag. -func Uint16Var(p *uint16, name string, value uint16, usage string) { - CommandLine.VarP(newUint16Value(value, p), name, "", usage) -} - -// Like Uint16Var, but accepts a shorthand letter that can be used after a single dash. -func Uint16VarP(p *uint16, name, shorthand string, value uint16, usage string) { - CommandLine.VarP(newUint16Value(value, p), name, shorthand, usage) -} - -// Uint16 defines a uint flag with specified name, default value, and usage string. -// The return value is the address of a uint variable that stores the value of the flag. -func (f *FlagSet) Uint16(name string, value uint16, usage string) *uint16 { - p := new(uint16) - f.Uint16VarP(p, name, "", value, usage) - return p -} - -// Like Uint16, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint16P(name, shorthand string, value uint16, usage string) *uint16 { - p := new(uint16) - f.Uint16VarP(p, name, shorthand, value, usage) - return p -} - -// Uint16 defines a uint flag with specified name, default value, and usage string. -// The return value is the address of a uint variable that stores the value of the flag. -func Uint16(name string, value uint16, usage string) *uint16 { - return CommandLine.Uint16P(name, "", value, usage) -} - -// Like Uint16, but accepts a shorthand letter that can be used after a single dash. -func Uint16P(name, shorthand string, value uint16, usage string) *uint16 { - return CommandLine.Uint16P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/uint32.go b/vendor/github.com/ogier/pflag/uint32.go deleted file mode 100644 index 165c8b259..000000000 --- a/vendor/github.com/ogier/pflag/uint32.go +++ /dev/null @@ -1,71 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- uint16 value -type uint32Value uint32 - -func newUint32Value(val uint32, p *uint32) *uint32Value { - *p = val - return (*uint32Value)(p) -} -func (i *uint32Value) String() string { return fmt.Sprintf("%d", *i) } -func (i *uint32Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 32) - *i = uint32Value(v) - return err -} -func (i *uint32Value) Get() interface{} { - return uint32(*i) -} - -// Uint32Var defines a uint32 flag with specified name, default value, and usage string. -// The argument p points to a uint32 variable in which to store the value of the flag. -func (f *FlagSet) Uint32Var(p *uint32, name string, value uint32, usage string) { - f.VarP(newUint32Value(value, p), name, "", usage) -} - -// Like Uint32Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint32VarP(p *uint32, name, shorthand string, value uint32, usage string) { - f.VarP(newUint32Value(value, p), name, shorthand, usage) -} - -// Uint32Var defines a uint32 flag with specified name, default value, and usage string. -// The argument p points to a uint32 variable in which to store the value of the flag. -func Uint32Var(p *uint32, name string, value uint32, usage string) { - CommandLine.VarP(newUint32Value(value, p), name, "", usage) -} - -// Like Uint32Var, but accepts a shorthand letter that can be used after a single dash. -func Uint32VarP(p *uint32, name, shorthand string, value uint32, usage string) { - CommandLine.VarP(newUint32Value(value, p), name, shorthand, usage) -} - -// Uint32 defines a uint32 flag with specified name, default value, and usage string. -// The return value is the address of a uint32 variable that stores the value of the flag. -func (f *FlagSet) Uint32(name string, value uint32, usage string) *uint32 { - p := new(uint32) - f.Uint32VarP(p, name, "", value, usage) - return p -} - -// Like Uint32, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint32P(name, shorthand string, value uint32, usage string) *uint32 { - p := new(uint32) - f.Uint32VarP(p, name, shorthand, value, usage) - return p -} - -// Uint32 defines a uint32 flag with specified name, default value, and usage string. -// The return value is the address of a uint32 variable that stores the value of the flag. -func Uint32(name string, value uint32, usage string) *uint32 { - return CommandLine.Uint32P(name, "", value, usage) -} - -// Like Uint32, but accepts a shorthand letter that can be used after a single dash. -func Uint32P(name, shorthand string, value uint32, usage string) *uint32 { - return CommandLine.Uint32P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/uint64.go b/vendor/github.com/ogier/pflag/uint64.go deleted file mode 100644 index f41c5a293..000000000 --- a/vendor/github.com/ogier/pflag/uint64.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- uint64 Value -type uint64Value uint64 - -func newUint64Value(val uint64, p *uint64) *uint64Value { - *p = val - return (*uint64Value)(p) -} - -func (i *uint64Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 64) - *i = uint64Value(v) - return err -} - -func (i *uint64Value) String() string { return fmt.Sprintf("%v", *i) } - -// Uint64Var defines a uint64 flag with specified name, default value, and usage string. -// The argument p points to a uint64 variable in which to store the value of the flag. -func (f *FlagSet) Uint64Var(p *uint64, name string, value uint64, usage string) { - f.VarP(newUint64Value(value, p), name, "", usage) -} - -// Like Uint64Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint64VarP(p *uint64, name, shorthand string, value uint64, usage string) { - f.VarP(newUint64Value(value, p), name, shorthand, usage) -} - -// Uint64Var defines a uint64 flag with specified name, default value, and usage string. -// The argument p points to a uint64 variable in which to store the value of the flag. -func Uint64Var(p *uint64, name string, value uint64, usage string) { - CommandLine.VarP(newUint64Value(value, p), name, "", usage) -} - -// Like Uint64Var, but accepts a shorthand letter that can be used after a single dash. -func Uint64VarP(p *uint64, name, shorthand string, value uint64, usage string) { - CommandLine.VarP(newUint64Value(value, p), name, shorthand, usage) -} - -// Uint64 defines a uint64 flag with specified name, default value, and usage string. -// The return value is the address of a uint64 variable that stores the value of the flag. -func (f *FlagSet) Uint64(name string, value uint64, usage string) *uint64 { - p := new(uint64) - f.Uint64VarP(p, name, "", value, usage) - return p -} - -// Like Uint64, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint64P(name, shorthand string, value uint64, usage string) *uint64 { - p := new(uint64) - f.Uint64VarP(p, name, shorthand, value, usage) - return p -} - -// Uint64 defines a uint64 flag with specified name, default value, and usage string. -// The return value is the address of a uint64 variable that stores the value of the flag. -func Uint64(name string, value uint64, usage string) *uint64 { - return CommandLine.Uint64P(name, "", value, usage) -} - -// Like Uint64, but accepts a shorthand letter that can be used after a single dash. -func Uint64P(name, shorthand string, value uint64, usage string) *uint64 { - return CommandLine.Uint64P(name, shorthand, value, usage) -} diff --git a/vendor/github.com/ogier/pflag/uint8.go b/vendor/github.com/ogier/pflag/uint8.go deleted file mode 100644 index 174f99cad..000000000 --- a/vendor/github.com/ogier/pflag/uint8.go +++ /dev/null @@ -1,70 +0,0 @@ -package pflag - -import ( - "fmt" - "strconv" -) - -// -- uint8 Value -type uint8Value uint8 - -func newUint8Value(val uint8, p *uint8) *uint8Value { - *p = val - return (*uint8Value)(p) -} - -func (i *uint8Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 8) - *i = uint8Value(v) - return err -} - -func (i *uint8Value) String() string { return fmt.Sprintf("%v", *i) } - -// Uint8Var defines a uint8 flag with specified name, default value, and usage string. -// The argument p points to a uint8 variable in which to store the value of the flag. -func (f *FlagSet) Uint8Var(p *uint8, name string, value uint8, usage string) { - f.VarP(newUint8Value(value, p), name, "", usage) -} - -// Like Uint8Var, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint8VarP(p *uint8, name, shorthand string, value uint8, usage string) { - f.VarP(newUint8Value(value, p), name, shorthand, usage) -} - -// Uint8Var defines a uint8 flag with specified name, default value, and usage string. -// The argument p points to a uint8 variable in which to store the value of the flag. -func Uint8Var(p *uint8, name string, value uint8, usage string) { - CommandLine.VarP(newUint8Value(value, p), name, "", usage) -} - -// Like Uint8Var, but accepts a shorthand letter that can be used after a single dash. -func Uint8VarP(p *uint8, name, shorthand string, value uint8, usage string) { - CommandLine.VarP(newUint8Value(value, p), name, shorthand, usage) -} - -// Uint8 defines a uint8 flag with specified name, default value, and usage string. -// The return value is the address of a uint8 variable that stores the value of the flag. -func (f *FlagSet) Uint8(name string, value uint8, usage string) *uint8 { - p := new(uint8) - f.Uint8VarP(p, name, "", value, usage) - return p -} - -// Like Uint8, but accepts a shorthand letter that can be used after a single dash. -func (f *FlagSet) Uint8P(name, shorthand string, value uint8, usage string) *uint8 { - p := new(uint8) - f.Uint8VarP(p, name, shorthand, value, usage) - return p -} - -// Uint8 defines a uint8 flag with specified name, default value, and usage string. -// The return value is the address of a uint8 variable that stores the value of the flag. -func Uint8(name string, value uint8, usage string) *uint8 { - return CommandLine.Uint8P(name, "", value, usage) -} - -// Like Uint8, but accepts a shorthand letter that can be used after a single dash. -func Uint8P(name, shorthand string, value uint8, usage string) *uint8 { - return CommandLine.Uint8P(name, shorthand, value, usage) -}