chore: move the parser to a dedicated package.
This commit is contained in:
parent
eecc2f4dd7
commit
1502d20def
90 changed files with 191 additions and 14278 deletions
|
@ -4,7 +4,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/static"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
// TraefikCmdConfiguration wraps the static configuration and extra parameters.
|
||||
|
@ -23,7 +23,7 @@ func NewTraefikConfiguration() *TraefikCmdConfiguration {
|
|||
},
|
||||
EntryPoints: make(static.EntryPoints),
|
||||
Providers: &static.Providers{
|
||||
ProvidersThrottleDuration: types.Duration(2 * time.Second),
|
||||
ProvidersThrottleDuration: ptypes.Duration(2 * time.Second),
|
||||
},
|
||||
ServersTransport: &static.ServersTransport{
|
||||
MaxIdleConnsPerHost: 200,
|
||||
|
|
|
@ -7,8 +7,8 @@ import (
|
|||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/cli"
|
||||
"github.com/containous/traefik/v2/pkg/config/static"
|
||||
"github.com/traefik/paerser/cli"
|
||||
)
|
||||
|
||||
// NewCmd builds a new HealthCheck command.
|
||||
|
|
|
@ -15,7 +15,7 @@ import (
|
|||
"github.com/containous/traefik/v2/cmd"
|
||||
"github.com/containous/traefik/v2/cmd/healthcheck"
|
||||
cmdVersion "github.com/containous/traefik/v2/cmd/version"
|
||||
"github.com/containous/traefik/v2/pkg/cli"
|
||||
tcli "github.com/containous/traefik/v2/pkg/cli"
|
||||
"github.com/containous/traefik/v2/pkg/collector"
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/config/runtime"
|
||||
|
@ -38,6 +38,7 @@ import (
|
|||
"github.com/coreos/go-systemd/daemon"
|
||||
assetfs "github.com/elazarl/go-bindata-assetfs"
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/traefik/paerser/cli"
|
||||
"github.com/vulcand/oxy/roundrobin"
|
||||
)
|
||||
|
||||
|
@ -45,7 +46,7 @@ func main() {
|
|||
// traefik config inits
|
||||
tConfig := cmd.NewTraefikConfiguration()
|
||||
|
||||
loaders := []cli.ResourceLoader{&cli.FileLoader{}, &cli.FlagLoader{}, &cli.EnvLoader{}}
|
||||
loaders := []cli.ResourceLoader{&tcli.FileLoader{}, &tcli.FlagLoader{}, &tcli.EnvLoader{}}
|
||||
|
||||
cmdTraefik := &cli.Command{
|
||||
Name: "traefik",
|
||||
|
|
|
@ -7,8 +7,8 @@ import (
|
|||
"runtime"
|
||||
"text/template"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/cli"
|
||||
"github.com/containous/traefik/v2/pkg/version"
|
||||
"github.com/traefik/paerser/cli"
|
||||
)
|
||||
|
||||
var versionTemplate = `Version: {{.Version}}
|
||||
|
|
6
go.mod
6
go.mod
|
@ -6,7 +6,6 @@ require (
|
|||
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 // indirect
|
||||
github.com/BurntSushi/toml v0.3.1
|
||||
github.com/ExpediaDotCom/haystack-client-go v0.0.0-20190315171017-e7edbdf53a61
|
||||
github.com/Masterminds/goutils v1.1.0 // indirect
|
||||
github.com/Masterminds/semver v1.4.2 // indirect
|
||||
github.com/Masterminds/sprig v2.22.0+incompatible
|
||||
github.com/Microsoft/hcsshim v0.8.7 // indirect
|
||||
|
@ -47,7 +46,6 @@ require (
|
|||
github.com/gorilla/websocket v1.4.2
|
||||
github.com/hashicorp/consul/api v1.3.0
|
||||
github.com/hashicorp/go-version v1.2.0
|
||||
github.com/huandu/xstrings v1.2.0 // indirect
|
||||
github.com/influxdata/influxdb1-client v0.0.0-20190809212627-fc22c7df067e
|
||||
github.com/instana/go-sensor v1.5.1
|
||||
github.com/libkermit/compose v0.0.0-20171122111507-c04e39c026ad
|
||||
|
@ -73,9 +71,10 @@ require (
|
|||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4
|
||||
github.com/rancher/go-rancher-metadata v0.0.0-20200311180630-7f4c936a06ac
|
||||
github.com/sirupsen/logrus v1.4.2
|
||||
github.com/stretchr/testify v1.5.1
|
||||
github.com/stretchr/testify v1.6.1
|
||||
github.com/stvp/go-udp-testing v0.0.0-20191102171040-06b61409b154
|
||||
github.com/tinylib/msgp v1.0.2 // indirect
|
||||
github.com/traefik/paerser v0.1.0
|
||||
github.com/uber/jaeger-client-go v2.22.1+incompatible
|
||||
github.com/uber/jaeger-lib v2.2.0+incompatible
|
||||
github.com/unrolled/render v1.0.2
|
||||
|
@ -92,7 +91,6 @@ require (
|
|||
gopkg.in/DataDog/dd-trace-go.v1 v1.19.0
|
||||
gopkg.in/fsnotify.v1 v1.4.7
|
||||
gopkg.in/jcmturner/goidentity.v3 v3.0.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.2.8
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776
|
||||
k8s.io/api v0.18.2
|
||||
k8s.io/apimachinery v0.18.2
|
||||
|
|
21
go.sum
21
go.sum
|
@ -68,8 +68,12 @@ github.com/Masterminds/goutils v1.1.0 h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RP
|
|||
github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
||||
github.com/Masterminds/semver v1.4.2 h1:WBLTQ37jOCzSLtXNdoo8bNM8876KhNqOKvrlGITgsTc=
|
||||
github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
|
||||
github.com/Masterminds/semver/v3 v3.1.0 h1:Y2lUDsFKVRSYGojLJ1yLxSXdMmMYTYls0rCvoqmMUQk=
|
||||
github.com/Masterminds/semver/v3 v3.1.0/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
|
||||
github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60=
|
||||
github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o=
|
||||
github.com/Masterminds/sprig/v3 v3.1.0 h1:j7GpgZ7PdFqNsmncycTHsLmVPf5/3wJtlgW9TNDYD9Y=
|
||||
github.com/Masterminds/sprig/v3 v3.1.0/go.mod h1:ONGMf7UfYGAbMXCZmQLy8x3lCDIPrEZE/rU8pmrbihA=
|
||||
github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5 h1:ygIc8M6trr62pF5DucadTWGdEB4mEyvzi0e2nbcmcyA=
|
||||
github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
|
||||
github.com/Microsoft/hcsshim v0.8.7 h1:ptnOoufxGSzauVTsdE+wMYnCWA301PdoN4xg5oRdZpg=
|
||||
|
@ -418,13 +422,15 @@ github.com/hashicorp/serf v0.8.2 h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=
|
|||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/huandu/xstrings v1.2.0 h1:yPeWdRnmynF7p+lLYz0H2tthW9lqhMJrQV/U7yy4wX0=
|
||||
github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4=
|
||||
github.com/huandu/xstrings v1.3.1 h1:4jgBlKK6tLKFvO8u5pmYjG91cqytmDCDvGh7ECVFfFs=
|
||||
github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/iij/doapi v0.0.0-20190504054126-0bbf12d6d7df h1:MZf03xP9WdakyXhOWuAD5uPK3wHh96wCsqe3hCMKh8E=
|
||||
github.com/iij/doapi v0.0.0-20190504054126-0bbf12d6d7df/go.mod h1:QMZY7/J/KSQEhKWFeDesPjMj+wCHReeknARU3wqlyN4=
|
||||
github.com/imdario/mergo v0.3.5 h1:JboBksRwiiAJWvIYJVo46AfV+IAIKZpfrSzVKj42R4Q=
|
||||
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
||||
github.com/imdario/mergo v0.3.8 h1:CGgOkSJeqMRmt0D9XLWExdT4m4F1vd3FV3VPt+0VxkQ=
|
||||
github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
||||
github.com/influxdata/influxdb1-client v0.0.0-20190809212627-fc22c7df067e h1:txQltCyjXAqVVSZDArPEhUTg35hKwVIuXwtQo7eAMNQ=
|
||||
github.com/influxdata/influxdb1-client v0.0.0-20190809212627-fc22c7df067e/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
||||
github.com/instana/go-sensor v1.5.1 h1:GLxYsYiDWD15RSXDHS70VvTVU/CbwUimWrK6/e4eBPQ=
|
||||
|
@ -665,6 +671,8 @@ github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9
|
|||
github.com/soheilhy/cmux v0.1.4 h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
||||
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
|
||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
|
@ -678,6 +686,8 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
|
|||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stvp/go-udp-testing v0.0.0-20191102171040-06b61409b154 h1:XGopsea1Dw7ecQ8JscCNQXDGYAKDiWjDeXnpN/+BY9g=
|
||||
github.com/stvp/go-udp-testing v0.0.0-20191102171040-06b61409b154/go.mod h1:7jxmlfBCDBXRzr0eAQJ48XC1hBu1np4CS5+cHEYfwpc=
|
||||
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
|
@ -687,6 +697,8 @@ github.com/tinylib/msgp v1.0.2 h1:DfdQrzQa7Yh2es9SuLkixqxuXS2SxsdYn0KbdrOGWD8=
|
|||
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5 h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/traefik/paerser v0.1.0 h1:B4v1tbvd8YnHsA7spwHKEWJoGrRP+2jYpIozsCMHhl0=
|
||||
github.com/traefik/paerser v0.1.0/go.mod h1:yYnAgdEC2wJH5CgG75qGWC8SsFDEapg09o9RrA6FfrE=
|
||||
github.com/transip/gotransip/v6 v6.0.2 h1:rOCMY607PYF+YvMHHtJt7eZRd0mx/uhyz6dsXWPmn+4=
|
||||
github.com/transip/gotransip/v6 v6.0.2/go.mod h1:pQZ36hWWRahCUXkFWlx9Hs711gLd8J4qdgLdRzmtY+g=
|
||||
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
|
||||
|
@ -765,6 +777,8 @@ golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPh
|
|||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200317142112-1b76d66859c6 h1:TjszyFsQsyZNHwdVdZ5m7bjmreu0znc2kRYsEml9/Ww=
|
||||
golang.org/x/crypto v0.0.0-20200317142112-1b76d66859c6/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8=
|
||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
|
@ -1023,6 +1037,9 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
|
||||
|
|
|
@ -10,16 +10,18 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/containous/traefik/v2/pkg/config/env"
|
||||
"github.com/containous/traefik/v2/pkg/config/flag"
|
||||
"github.com/containous/traefik/v2/pkg/config/generator"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/containous/traefik/v2/pkg/config/static"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/traefik/paerser/env"
|
||||
"github.com/traefik/paerser/flag"
|
||||
"github.com/traefik/paerser/generator"
|
||||
"github.com/traefik/paerser/parser"
|
||||
)
|
||||
|
||||
func main() {
|
||||
genStaticConfDoc("./docs/content/reference/static-configuration/env-ref.md", "", env.Encode)
|
||||
genStaticConfDoc("./docs/content/reference/static-configuration/env-ref.md", "", func(i interface{}) ([]parser.Flat, error) {
|
||||
return env.Encode(env.DefaultNamePrefix, i)
|
||||
})
|
||||
genStaticConfDoc("./docs/content/reference/static-configuration/cli-ref.md", "--", flag.Encode)
|
||||
genKVDynConfDoc("./docs/content/reference/dynamic-configuration/kv-ref.md")
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/tracing/zipkin"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
assetfs "github.com/elazarl/go-bindata-assetfs"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestDo_globalConfiguration(t *testing.T) {
|
||||
|
@ -65,9 +66,9 @@ func TestDo_globalConfiguration(t *testing.T) {
|
|||
Address: "foo Address",
|
||||
Transport: &static.EntryPointsTransport{
|
||||
RespondingTimeouts: &static.RespondingTimeouts{
|
||||
ReadTimeout: types.Duration(111 * time.Second),
|
||||
WriteTimeout: types.Duration(111 * time.Second),
|
||||
IdleTimeout: types.Duration(111 * time.Second),
|
||||
ReadTimeout: ptypes.Duration(111 * time.Second),
|
||||
WriteTimeout: ptypes.Duration(111 * time.Second),
|
||||
IdleTimeout: ptypes.Duration(111 * time.Second),
|
||||
},
|
||||
},
|
||||
ProxyProtocol: &static.ProxyProtocol{
|
||||
|
@ -78,9 +79,9 @@ func TestDo_globalConfiguration(t *testing.T) {
|
|||
Address: "fii Address",
|
||||
Transport: &static.EntryPointsTransport{
|
||||
RespondingTimeouts: &static.RespondingTimeouts{
|
||||
ReadTimeout: types.Duration(111 * time.Second),
|
||||
WriteTimeout: types.Duration(111 * time.Second),
|
||||
IdleTimeout: types.Duration(111 * time.Second),
|
||||
ReadTimeout: ptypes.Duration(111 * time.Second),
|
||||
WriteTimeout: ptypes.Duration(111 * time.Second),
|
||||
IdleTimeout: ptypes.Duration(111 * time.Second),
|
||||
},
|
||||
},
|
||||
ProxyProtocol: &static.ProxyProtocol{
|
||||
|
@ -104,7 +105,7 @@ func TestDo_globalConfiguration(t *testing.T) {
|
|||
},
|
||||
}
|
||||
config.Providers = &static.Providers{
|
||||
ProvidersThrottleDuration: types.Duration(111 * time.Second),
|
||||
ProvidersThrottleDuration: ptypes.Duration(111 * time.Second),
|
||||
}
|
||||
|
||||
config.ServersTransport = &static.ServersTransport{
|
||||
|
@ -112,8 +113,8 @@ func TestDo_globalConfiguration(t *testing.T) {
|
|||
RootCAs: []traefiktls.FileOrContent{"RootCAs 1", "RootCAs 2", "RootCAs 3"},
|
||||
MaxIdleConnsPerHost: 111,
|
||||
ForwardingTimeouts: &static.ForwardingTimeouts{
|
||||
DialTimeout: types.Duration(111 * time.Second),
|
||||
ResponseHeaderTimeout: types.Duration(111 * time.Second),
|
||||
DialTimeout: ptypes.Duration(111 * time.Second),
|
||||
ResponseHeaderTimeout: ptypes.Duration(111 * time.Second),
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -1,148 +0,0 @@
|
|||
// Package cli provides tools to create commands that support advanced configuration features,
|
||||
// sub-commands, and allowing configuration from command-line flags, configuration files, and environment variables.
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// Command structure contains program/command information (command name and description).
|
||||
type Command struct {
|
||||
Name string
|
||||
Description string
|
||||
Configuration interface{}
|
||||
Resources []ResourceLoader
|
||||
Run func([]string) error
|
||||
CustomHelpFunc func(io.Writer, *Command) error
|
||||
Hidden bool
|
||||
// AllowArg if not set, disallows any argument that is not a known command or a sub-command.
|
||||
AllowArg bool
|
||||
subCommands []*Command
|
||||
}
|
||||
|
||||
// AddCommand Adds a sub command.
|
||||
func (c *Command) AddCommand(cmd *Command) error {
|
||||
if c == nil || cmd == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if c.Name == cmd.Name {
|
||||
return fmt.Errorf("child command cannot have the same name as their parent: %s", cmd.Name)
|
||||
}
|
||||
|
||||
c.subCommands = append(c.subCommands, cmd)
|
||||
return nil
|
||||
}
|
||||
|
||||
// PrintHelp calls the custom help function of the command if it's set.
|
||||
// Otherwise, it calls the default help function.
|
||||
func (c *Command) PrintHelp(w io.Writer) error {
|
||||
if c.CustomHelpFunc != nil {
|
||||
return c.CustomHelpFunc(w, c)
|
||||
}
|
||||
return PrintHelp(w, c)
|
||||
}
|
||||
|
||||
// Execute Executes a command.
|
||||
func Execute(cmd *Command) error {
|
||||
return execute(cmd, os.Args, true)
|
||||
}
|
||||
|
||||
func execute(cmd *Command, args []string, root bool) error {
|
||||
// Calls command without args.
|
||||
if len(args) == 1 {
|
||||
if err := run(cmd, args[1:]); err != nil {
|
||||
return fmt.Errorf("command %s error: %w", args[0], err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Special case: if the command is the top level one,
|
||||
// and the first arg (`args[1]`) is not the command name or a known sub-command,
|
||||
// then we run the top level command itself.
|
||||
if root && cmd.Name != args[1] && !contains(cmd.subCommands, args[1]) {
|
||||
if err := run(cmd, args[1:]); err != nil {
|
||||
return fmt.Errorf("command %s error: %w", filepath.Base(args[0]), err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Calls command by its name.
|
||||
if len(args) >= 2 && cmd.Name == args[1] {
|
||||
if len(args) < 3 || !contains(cmd.subCommands, args[2]) {
|
||||
if err := run(cmd, args[2:]); err != nil {
|
||||
return fmt.Errorf("command %s error: %w", cmd.Name, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// No sub-command, calls the current command.
|
||||
if len(cmd.subCommands) == 0 {
|
||||
if err := run(cmd, args[1:]); err != nil {
|
||||
return fmt.Errorf("command %s error: %w", cmd.Name, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Trying to find the sub-command.
|
||||
for _, subCmd := range cmd.subCommands {
|
||||
if len(args) >= 2 && subCmd.Name == args[1] {
|
||||
return execute(subCmd, args, false)
|
||||
}
|
||||
if len(args) >= 3 && subCmd.Name == args[2] {
|
||||
return execute(subCmd, args[1:], false)
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("command not found: %v", args)
|
||||
}
|
||||
|
||||
func run(cmd *Command, args []string) error {
|
||||
if len(args) > 0 && !isFlag(args[0]) && !cmd.AllowArg {
|
||||
_ = cmd.PrintHelp(os.Stdout)
|
||||
return fmt.Errorf("command not found: %s", args[0])
|
||||
}
|
||||
|
||||
if isHelp(args) {
|
||||
return cmd.PrintHelp(os.Stdout)
|
||||
}
|
||||
|
||||
if cmd.Run == nil {
|
||||
_ = cmd.PrintHelp(os.Stdout)
|
||||
return fmt.Errorf("command %s is not runnable", cmd.Name)
|
||||
}
|
||||
|
||||
if cmd.Configuration == nil {
|
||||
return cmd.Run(args)
|
||||
}
|
||||
|
||||
for _, resource := range cmd.Resources {
|
||||
done, err := resource.Load(args, cmd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if done {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return cmd.Run(args)
|
||||
}
|
||||
|
||||
func contains(cmds []*Command, name string) bool {
|
||||
for _, cmd := range cmds {
|
||||
if cmd.Name == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isFlag(arg string) bool {
|
||||
return len(arg) > 0 && arg[0] == '-'
|
||||
}
|
|
@ -1,941 +0,0 @@
|
|||
package cli
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCommand_AddCommand(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
subCommand *Command
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
desc: "sub command nil",
|
||||
subCommand: nil,
|
||||
},
|
||||
{
|
||||
desc: "add a simple command",
|
||||
subCommand: &Command{
|
||||
Name: "sub",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "add a sub command with the same name as their parent",
|
||||
subCommand: &Command{
|
||||
Name: "root",
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
}
|
||||
|
||||
err := rootCmd.AddCommand(test.subCommand)
|
||||
|
||||
if test.expectedError {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommand_PrintHelp(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
command *Command
|
||||
expectedOutput string
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
desc: "print default help",
|
||||
command: &Command{},
|
||||
expectedOutput: " \n\nUsage: [command] [flags] [arguments]\n\nUse \" [command] --help\" for help on any command.\n\n",
|
||||
},
|
||||
{
|
||||
desc: "print custom help",
|
||||
command: &Command{
|
||||
Name: "root",
|
||||
Description: "Description for root",
|
||||
Configuration: &struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{},
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
CustomHelpFunc: func(w io.Writer, _ *Command) error {
|
||||
_, _ = fmt.Fprintln(w, "test")
|
||||
return nil
|
||||
},
|
||||
},
|
||||
expectedOutput: "test\n",
|
||||
},
|
||||
{
|
||||
desc: "error is returned from called help",
|
||||
command: &Command{
|
||||
CustomHelpFunc: func(_ io.Writer, _ *Command) error {
|
||||
return errors.New("test")
|
||||
},
|
||||
},
|
||||
expectedError: errors.New("test"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
buffer := &bytes.Buffer{}
|
||||
err := test.command.PrintHelp(buffer)
|
||||
|
||||
assert.Equal(t, test.expectedError, err)
|
||||
assert.Equal(t, test.expectedOutput, buffer.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_execute(t *testing.T) {
|
||||
var called string
|
||||
|
||||
type expected struct {
|
||||
result string
|
||||
error bool
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
command func() *Command
|
||||
expected expected
|
||||
}{
|
||||
{
|
||||
desc: "root command",
|
||||
args: []string{""},
|
||||
command: func() *Command {
|
||||
return &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called = "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
},
|
||||
expected: expected{result: "root"},
|
||||
},
|
||||
{
|
||||
desc: "root command, with argument, command not found",
|
||||
args: []string{"", "echo"},
|
||||
command: func() *Command {
|
||||
return &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called = "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "root command, call help, with argument, command not found",
|
||||
args: []string{"", "echo", "--help"},
|
||||
command: func() *Command {
|
||||
return &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called = "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "one sub command",
|
||||
args: []string{"", "sub1"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "sub1"},
|
||||
},
|
||||
{
|
||||
desc: "one sub command, with argument, command not found",
|
||||
args: []string{"", "sub1", "echo"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "two sub commands",
|
||||
args: []string{"", "sub2"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub2",
|
||||
Description: "sub2",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub2"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "sub2"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub sub command, call sub command",
|
||||
args: []string{"", "sub1"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
sub1 := &Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
_ = rootCmd.AddCommand(sub1)
|
||||
|
||||
_ = sub1.AddCommand(&Command{
|
||||
Name: "sub2",
|
||||
Description: "sub2",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub2"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "sub1"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub sub command, call sub sub command",
|
||||
args: []string{"", "sub1", "sub2"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
sub1 := &Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
_ = rootCmd.AddCommand(sub1)
|
||||
|
||||
_ = sub1.AddCommand(&Command{
|
||||
Name: "sub2",
|
||||
Description: "sub2",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub2"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "sub2"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call root command explicitly",
|
||||
args: []string{"", "root"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "root"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call root command implicitly",
|
||||
args: []string{""},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "root"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call sub command which has no run",
|
||||
args: []string{"", "sub1"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call root command which has no run",
|
||||
args: []string{"", "root"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call implicitly root command which has no run",
|
||||
args: []string{""},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called += "sub1"
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call sub command with arguments",
|
||||
args: []string{"", "sub1", "foobar.txt"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called = "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
AllowArg: true,
|
||||
Run: func(args []string) error {
|
||||
called += "sub1-" + strings.Join(args, "-")
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "sub1-foobar.txt"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call root command with arguments",
|
||||
args: []string{"", "foobar.txt"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
AllowArg: true,
|
||||
Run: func(args []string) error {
|
||||
called += "root-" + strings.Join(args, "-")
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(args []string) error {
|
||||
called += "sub1-" + strings.Join(args, "-")
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "root-foobar.txt"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call sub command with flags",
|
||||
args: []string{"", "sub1", "--foo=bar", "--fii=bir"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
called = "root"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(args []string) error {
|
||||
called += "sub1-" + strings.Join(args, "")
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "sub1---foo=bar--fii=bir"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call explicitly root command with flags",
|
||||
args: []string{"", "root", "--foo=bar", "--fii=bir"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(args []string) error {
|
||||
called += "root-" + strings.Join(args, "")
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(args []string) error {
|
||||
called += "sub1-" + strings.Join(args, "")
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "root---foo=bar--fii=bir"},
|
||||
},
|
||||
{
|
||||
desc: "command with sub command, call implicitly root command with flags",
|
||||
args: []string{"", "--foo=bar", "--fii=bir"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(args []string) error {
|
||||
called += "root-" + strings.Join(args, "")
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
_ = rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: nil,
|
||||
Run: func(args []string) error {
|
||||
called += "sub1-" + strings.Join(args, "")
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{result: "root---foo=bar--fii=bir"},
|
||||
},
|
||||
{
|
||||
desc: "sub command help",
|
||||
args: []string{"", "test", "subtest", "--help"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
subCmd := &Command{
|
||||
Name: "subtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
err := rootCmd.AddCommand(subCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
subSubCmd := &Command{
|
||||
Name: "subsubtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
err = subCmd.AddCommand(subSubCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
subSubSubCmd := &Command{
|
||||
Name: "subsubsubtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
Run: func([]string) error {
|
||||
called = "subsubsubtest"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
err = subSubCmd.AddCommand(subSubSubCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{},
|
||||
},
|
||||
{
|
||||
desc: "sub sub command help",
|
||||
args: []string{"", "test", "subtest", "subsubtest", "--help"},
|
||||
command: func() *Command {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
subCmd := &Command{
|
||||
Name: "subtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
err := rootCmd.AddCommand(subCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
subSubCmd := &Command{
|
||||
Name: "subsubtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
err = subCmd.AddCommand(subSubCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
subSubSubCmd := &Command{
|
||||
Name: "subsubsubtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
Run: func([]string) error {
|
||||
called = "subsubsubtest"
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
err = subSubCmd.AddCommand(subSubSubCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
return rootCmd
|
||||
},
|
||||
expected: expected{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
defer func() {
|
||||
called = ""
|
||||
}()
|
||||
|
||||
err := execute(test.command(), test.args, true)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expected.result, called)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_execute_configuration(t *testing.T) {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
sub1 := &Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: element,
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
err := rootCmd.AddCommand(sub1)
|
||||
require.NoError(t, err)
|
||||
|
||||
args := []string{"", "sub1", "--foo=bar", "--fii=bir", "--yi"}
|
||||
|
||||
err = execute(rootCmd, args, true)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
func Test_execute_configuration_file(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
}{
|
||||
{
|
||||
desc: "configFile arg in camel case",
|
||||
args: []string{"", "sub1", "--configFile=./fixtures/config.toml"},
|
||||
},
|
||||
{
|
||||
desc: "configfile arg in lower case",
|
||||
args: []string{"", "sub1", "--configfile=./fixtures/config.toml"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "This is a test",
|
||||
Configuration: nil,
|
||||
Run: func(_ []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
sub1 := &Command{
|
||||
Name: "sub1",
|
||||
Description: "sub1",
|
||||
Configuration: element,
|
||||
Resources: []ResourceLoader{&FileLoader{}, &FlagLoader{}},
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
err := rootCmd.AddCommand(sub1)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = execute(rootCmd, test.args, true)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_execute_help(t *testing.T) {
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
rooCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "Description for root",
|
||||
Configuration: element,
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
args := []string{"", "--help", "--foo"}
|
||||
|
||||
backupStdout := os.Stdout
|
||||
defer func() {
|
||||
os.Stdout = backupStdout
|
||||
}()
|
||||
|
||||
r, w, _ := os.Pipe()
|
||||
os.Stdout = w
|
||||
|
||||
err := execute(rooCmd, args, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// read and restore stdout
|
||||
if err = w.Close(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
out, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
os.Stdout = backupStdout
|
||||
|
||||
assert.Equal(t, `root Description for root
|
||||
|
||||
Usage: root [command] [flags] [arguments]
|
||||
|
||||
Use "root [command] --help" for help on any command.
|
||||
|
||||
Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s)
|
||||
or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s)
|
||||
|
||||
Flags:
|
||||
--fii (Default: "fii")
|
||||
Fii description
|
||||
|
||||
--foo (Default: "foo")
|
||||
Foo description
|
||||
|
||||
--fuu (Default: "test")
|
||||
Fuu description
|
||||
|
||||
--yi (Default: "false")
|
||||
|
||||
--yi.fii (Default: "fii")
|
||||
|
||||
--yi.foo (Default: "foo")
|
||||
|
||||
--yi.fuu (Default: "")
|
||||
|
||||
--yu.fii (Default: "fii")
|
||||
|
||||
--yu.foo (Default: "foo")
|
||||
|
||||
--yu.fuu (Default: "")
|
||||
|
||||
`, string(out))
|
||||
}
|
||||
|
||||
func TestName(t *testing.T) {
|
||||
rootCmd := &Command{
|
||||
Name: "test",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
subCmd := &Command{
|
||||
Name: "subtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
}
|
||||
|
||||
err := rootCmd.AddCommand(subCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
subSubCmd := &Command{
|
||||
Name: "subsubtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
Run: func([]string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
err = subCmd.AddCommand(subSubCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
subSubSubCmd := &Command{
|
||||
Name: "subsubsubtest",
|
||||
Resources: []ResourceLoader{&FlagLoader{}},
|
||||
Run: func([]string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
err = subSubCmd.AddCommand(subSubSubCmd)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = execute(rootCmd, []string{"", "test", "subtest", "subsubtest", "subsubsubtest", "--help"}, true)
|
||||
require.NoError(t, err)
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
package cli
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Finder holds a list of file paths.
|
||||
type Finder struct {
|
||||
BasePaths []string
|
||||
Extensions []string
|
||||
}
|
||||
|
||||
// Find returns the first valid existing file among configFile
|
||||
// and the paths already registered with Finder.
|
||||
func (f Finder) Find(configFile string) (string, error) {
|
||||
paths := f.getPaths(configFile)
|
||||
|
||||
for _, filePath := range paths {
|
||||
fp := os.ExpandEnv(filePath)
|
||||
|
||||
_, err := os.Stat(fp)
|
||||
if os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return filepath.Abs(fp)
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (f Finder) getPaths(configFile string) []string {
|
||||
var paths []string
|
||||
if strings.TrimSpace(configFile) != "" {
|
||||
paths = append(paths, configFile)
|
||||
}
|
||||
|
||||
for _, basePath := range f.BasePaths {
|
||||
for _, ext := range f.Extensions {
|
||||
paths = append(paths, basePath+"."+ext)
|
||||
}
|
||||
}
|
||||
|
||||
return paths
|
||||
}
|
|
@ -1,161 +0,0 @@
|
|||
package cli
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFinder_Find(t *testing.T) {
|
||||
configFile, err := ioutil.TempFile("", "traefik-file-finder-test-*.toml")
|
||||
require.NoError(t, err)
|
||||
|
||||
defer func() {
|
||||
_ = os.Remove(configFile.Name())
|
||||
}()
|
||||
|
||||
dir, err := ioutil.TempDir("", "traefik-file-finder-test")
|
||||
require.NoError(t, err)
|
||||
|
||||
defer func() {
|
||||
_ = os.RemoveAll(dir)
|
||||
}()
|
||||
|
||||
fooFile, err := os.Create(filepath.Join(dir, "foo.toml"))
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = os.Create(filepath.Join(dir, "bar.toml"))
|
||||
require.NoError(t, err)
|
||||
|
||||
type expected struct {
|
||||
error bool
|
||||
path string
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
basePaths []string
|
||||
configFile string
|
||||
expected expected
|
||||
}{
|
||||
{
|
||||
desc: "not found: no config file",
|
||||
configFile: "",
|
||||
expected: expected{path: ""},
|
||||
},
|
||||
{
|
||||
desc: "not found: no config file, no other paths available",
|
||||
configFile: "",
|
||||
basePaths: []string{"/my/path/traefik", "$HOME/my/path/traefik", "./my-traefik"},
|
||||
expected: expected{path: ""},
|
||||
},
|
||||
{
|
||||
desc: "not found: with non existing config file",
|
||||
configFile: "/my/path/config.toml",
|
||||
expected: expected{path: ""},
|
||||
},
|
||||
{
|
||||
desc: "found: with config file",
|
||||
configFile: configFile.Name(),
|
||||
expected: expected{path: configFile.Name()},
|
||||
},
|
||||
{
|
||||
desc: "found: no config file, first base path",
|
||||
configFile: "",
|
||||
basePaths: []string{filepath.Join(dir, "foo"), filepath.Join(dir, "bar")},
|
||||
expected: expected{path: fooFile.Name()},
|
||||
},
|
||||
{
|
||||
desc: "found: no config file, base path",
|
||||
configFile: "",
|
||||
basePaths: []string{"/my/path/traefik", "$HOME/my/path/traefik", filepath.Join(dir, "foo")},
|
||||
expected: expected{path: fooFile.Name()},
|
||||
},
|
||||
{
|
||||
desc: "found: config file over base path",
|
||||
configFile: configFile.Name(),
|
||||
basePaths: []string{filepath.Join(dir, "foo"), filepath.Join(dir, "bar")},
|
||||
expected: expected{path: configFile.Name()},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
finder := Finder{
|
||||
BasePaths: test.basePaths,
|
||||
Extensions: []string{"toml", "yaml", "yml"},
|
||||
}
|
||||
|
||||
path, err := finder.Find(test.configFile)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expected.path, path)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFinder_getPaths(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
basePaths []string
|
||||
configFile string
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "no config file",
|
||||
basePaths: []string{"/etc/traefik/traefik", "$HOME/.config/traefik", "./traefik"},
|
||||
configFile: "",
|
||||
expected: []string{
|
||||
"/etc/traefik/traefik.toml",
|
||||
"/etc/traefik/traefik.yaml",
|
||||
"/etc/traefik/traefik.yml",
|
||||
"$HOME/.config/traefik.toml",
|
||||
"$HOME/.config/traefik.yaml",
|
||||
"$HOME/.config/traefik.yml",
|
||||
"./traefik.toml",
|
||||
"./traefik.yaml",
|
||||
"./traefik.yml",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with config file",
|
||||
basePaths: []string{"/etc/traefik/traefik", "$HOME/.config/traefik", "./traefik"},
|
||||
configFile: "/my/path/config.toml",
|
||||
expected: []string{
|
||||
"/my/path/config.toml",
|
||||
"/etc/traefik/traefik.toml",
|
||||
"/etc/traefik/traefik.yaml",
|
||||
"/etc/traefik/traefik.yml",
|
||||
"$HOME/.config/traefik.toml",
|
||||
"$HOME/.config/traefik.yaml",
|
||||
"$HOME/.config/traefik.yml",
|
||||
"./traefik.toml",
|
||||
"./traefik.yaml",
|
||||
"./traefik.yml",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
finder := Finder{
|
||||
BasePaths: test.basePaths,
|
||||
Extensions: []string{"toml", "yaml", "yml"},
|
||||
}
|
||||
paths := finder.getPaths(test.configFile)
|
||||
|
||||
assert.Equal(t, test.expected, paths)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
foo = "bar"
|
||||
fii = "bir"
|
||||
[yi]
|
|
@ -1,25 +0,0 @@
|
|||
package cli
|
||||
|
||||
type Yo struct {
|
||||
Foo string `description:"Foo description"`
|
||||
Fii string `description:"Fii description"`
|
||||
Fuu string `description:"Fuu description"`
|
||||
Yi *Yi `label:"allowEmpty" file:"allowEmpty"`
|
||||
Yu *Yi
|
||||
}
|
||||
|
||||
func (y *Yo) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
||||
|
||||
type Yi struct {
|
||||
Foo string
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
|
||||
func (y *Yi) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
package cli
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
"text/template"
|
||||
|
||||
"github.com/Masterminds/sprig"
|
||||
"github.com/containous/traefik/v2/pkg/config/flag"
|
||||
"github.com/containous/traefik/v2/pkg/config/generator"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
const tmplHelp = `{{ .Cmd.Name }} {{ .Cmd.Description }}
|
||||
|
||||
Usage: {{ .Cmd.Name }} [command] [flags] [arguments]
|
||||
|
||||
Use "{{ .Cmd.Name }} [command] --help" for help on any command.
|
||||
{{if .SubCommands }}
|
||||
Commands:
|
||||
{{- range $i, $subCmd := .SubCommands }}
|
||||
{{ if not $subCmd.Hidden }} {{ $subCmd.Name }} {{ $subCmd.Description }}{{end}}{{end}}
|
||||
{{end}}
|
||||
{{- if .Flags }}
|
||||
Flag's usage: {{ .Cmd.Name }} [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s)
|
||||
or: {{ .Cmd.Name }} [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s)
|
||||
|
||||
Flags:
|
||||
{{- range $i, $flag := .Flags }}
|
||||
--{{ SliceIndexN $flag.Name }} {{if ne $flag.Name "global.sendanonymoususage"}}(Default: "{{ $flag.Default}}"){{end}}
|
||||
{{if $flag.Description }} {{ wrapWith 80 "\n\t\t" $flag.Description }}
|
||||
{{else}}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
`
|
||||
|
||||
func isHelp(args []string) bool {
|
||||
for _, name := range args {
|
||||
if name == "--help" || name == "-help" || name == "-h" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// PrintHelp prints the help for the command given as argument.
|
||||
func PrintHelp(w io.Writer, cmd *Command) error {
|
||||
var flags []parser.Flat
|
||||
if cmd.Configuration != nil {
|
||||
generator.Generate(cmd.Configuration)
|
||||
|
||||
var err error
|
||||
flags, err = flag.Encode(cmd.Configuration)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
model := map[string]interface{}{
|
||||
"Cmd": cmd,
|
||||
"Flags": flags,
|
||||
"SubCommands": cmd.subCommands,
|
||||
}
|
||||
|
||||
funcs := sprig.TxtFuncMap()
|
||||
funcs["SliceIndexN"] = sliceIndexN
|
||||
|
||||
tmpl, err := template.New("flags").
|
||||
Funcs(funcs).
|
||||
Parse(tmplHelp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tw := tabwriter.NewWriter(w, 4, 0, 4, ' ', 0)
|
||||
|
||||
err = tmpl.Execute(tw, model)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return tw.Flush()
|
||||
}
|
||||
|
||||
func sliceIndexN(flag string) string {
|
||||
return strings.ReplaceAll(flag, "[0]", "[n]")
|
||||
}
|
|
@ -1,211 +0,0 @@
|
|||
package cli
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestPrintHelp(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
command *Command
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
desc: "no sub-command, with flags",
|
||||
command: func() *Command {
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
return &Command{
|
||||
Name: "root",
|
||||
Description: "Description for root",
|
||||
Configuration: element,
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}(),
|
||||
expected: `root Description for root
|
||||
|
||||
Usage: root [command] [flags] [arguments]
|
||||
|
||||
Use "root [command] --help" for help on any command.
|
||||
|
||||
Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s)
|
||||
or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s)
|
||||
|
||||
Flags:
|
||||
--fii (Default: "fii")
|
||||
Fii description
|
||||
|
||||
--foo (Default: "foo")
|
||||
Foo description
|
||||
|
||||
--fuu (Default: "test")
|
||||
Fuu description
|
||||
|
||||
--yi (Default: "false")
|
||||
|
||||
--yi.fii (Default: "fii")
|
||||
|
||||
--yi.foo (Default: "foo")
|
||||
|
||||
--yi.fuu (Default: "")
|
||||
|
||||
--yu.fii (Default: "fii")
|
||||
|
||||
--yu.foo (Default: "foo")
|
||||
|
||||
--yu.fuu (Default: "")
|
||||
|
||||
`,
|
||||
},
|
||||
{
|
||||
desc: "with sub-commands, with flags, call root help",
|
||||
command: func() *Command {
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
rootCmd := &Command{
|
||||
Name: "root",
|
||||
Description: "Description for root",
|
||||
Configuration: element,
|
||||
Run: func(_ []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
err := rootCmd.AddCommand(&Command{
|
||||
Name: "sub1",
|
||||
Description: "Description for sub1",
|
||||
Configuration: element,
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
err = rootCmd.AddCommand(&Command{
|
||||
Name: "sub2",
|
||||
Description: "Description for sub2",
|
||||
Configuration: element,
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
return rootCmd
|
||||
}(),
|
||||
expected: `root Description for root
|
||||
|
||||
Usage: root [command] [flags] [arguments]
|
||||
|
||||
Use "root [command] --help" for help on any command.
|
||||
|
||||
Commands:
|
||||
sub1 Description for sub1
|
||||
sub2 Description for sub2
|
||||
|
||||
Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s)
|
||||
or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s)
|
||||
|
||||
Flags:
|
||||
--fii (Default: "fii")
|
||||
Fii description
|
||||
|
||||
--foo (Default: "foo")
|
||||
Foo description
|
||||
|
||||
--fuu (Default: "test")
|
||||
Fuu description
|
||||
|
||||
--yi (Default: "false")
|
||||
|
||||
--yi.fii (Default: "fii")
|
||||
|
||||
--yi.foo (Default: "foo")
|
||||
|
||||
--yi.fuu (Default: "")
|
||||
|
||||
--yu.fii (Default: "fii")
|
||||
|
||||
--yu.foo (Default: "foo")
|
||||
|
||||
--yu.fuu (Default: "")
|
||||
|
||||
`,
|
||||
},
|
||||
{
|
||||
desc: "no sub-command, no flags",
|
||||
command: func() *Command {
|
||||
return &Command{
|
||||
Name: "root",
|
||||
Description: "Description for root",
|
||||
Configuration: nil,
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}(),
|
||||
expected: `root Description for root
|
||||
|
||||
Usage: root [command] [flags] [arguments]
|
||||
|
||||
Use "root [command] --help" for help on any command.
|
||||
|
||||
`,
|
||||
},
|
||||
{
|
||||
desc: "no sub-command, slice flags",
|
||||
command: func() *Command {
|
||||
return &Command{
|
||||
Name: "root",
|
||||
Description: "Description for root",
|
||||
Configuration: &struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{},
|
||||
Run: func(args []string) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}(),
|
||||
expected: `root Description for root
|
||||
|
||||
Usage: root [command] [flags] [arguments]
|
||||
|
||||
Use "root [command] --help" for help on any command.
|
||||
|
||||
Flag's usage: root [--flag=flag_argument] [-f [flag_argument]] # set flag_argument to flag(s)
|
||||
or: root [--flag[=true|false| ]] [-f [true|false| ]] # set true/false to boolean flag(s)
|
||||
|
||||
Flags:
|
||||
--foo (Default: "")
|
||||
|
||||
--foo[n].field (Default: "")
|
||||
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
buffer := &bytes.Buffer{}
|
||||
err := PrintHelp(buffer, test.command)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, buffer.String())
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
package cli
|
||||
|
||||
// ResourceLoader is a configuration resource loader.
|
||||
type ResourceLoader interface {
|
||||
// Load populates cmd.Configuration, optionally using args to do so.
|
||||
Load(args []string, cmd *Command) (bool, error)
|
||||
}
|
|
@ -5,15 +5,16 @@ import (
|
|||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/env"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/traefik/paerser/cli"
|
||||
"github.com/traefik/paerser/env"
|
||||
)
|
||||
|
||||
// EnvLoader loads a configuration from all the environment variables prefixed with "TRAEFIK_".
|
||||
type EnvLoader struct{}
|
||||
|
||||
// Load loads the command's configuration from the environment variables.
|
||||
func (e *EnvLoader) Load(_ []string, cmd *Command) (bool, error) {
|
||||
func (e *EnvLoader) Load(_ []string, cmd *cli.Command) (bool, error) {
|
||||
vars := env.FindPrefixedEnvVars(os.Environ(), env.DefaultNamePrefix, cmd.Configuration)
|
||||
if len(vars) == 0 {
|
||||
return false, nil
|
||||
|
|
|
@ -5,9 +5,10 @@ import (
|
|||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/file"
|
||||
"github.com/containous/traefik/v2/pkg/config/flag"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/traefik/paerser/cli"
|
||||
"github.com/traefik/paerser/file"
|
||||
"github.com/traefik/paerser/flag"
|
||||
)
|
||||
|
||||
// FileLoader loads a configuration from a file.
|
||||
|
@ -22,7 +23,7 @@ func (f *FileLoader) GetFilename() string {
|
|||
}
|
||||
|
||||
// Load loads the command's configuration from a file either specified with the -traefik.configfile flag, or from default locations.
|
||||
func (f *FileLoader) Load(args []string, cmd *Command) (bool, error) {
|
||||
func (f *FileLoader) Load(args []string, cmd *cli.Command) (bool, error) {
|
||||
ref, err := flag.Parse(args, cmd.Configuration)
|
||||
if err != nil {
|
||||
_ = cmd.PrintHelp(os.Stdout)
|
||||
|
@ -64,7 +65,7 @@ func (f *FileLoader) Load(args []string, cmd *Command) (bool, error) {
|
|||
// loadConfigFiles tries to decode the given configuration file and all default locations for the configuration file.
|
||||
// It stops as soon as decoding one of them is successful.
|
||||
func loadConfigFiles(configFile string, element interface{}) (string, error) {
|
||||
finder := Finder{
|
||||
finder := cli.Finder{
|
||||
BasePaths: []string{"/etc/traefik/traefik", "$XDG_CONFIG_HOME/traefik", "$HOME/.config/traefik", "./traefik"},
|
||||
Extensions: []string{"toml", "yaml", "yml"},
|
||||
}
|
||||
|
|
|
@ -3,15 +3,16 @@ package cli
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/flag"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/traefik/paerser/cli"
|
||||
"github.com/traefik/paerser/flag"
|
||||
)
|
||||
|
||||
// FlagLoader loads configuration from flags.
|
||||
type FlagLoader struct{}
|
||||
|
||||
// Load loads the command's configuration from flag arguments.
|
||||
func (*FlagLoader) Load(args []string, cmd *Command) (bool, error) {
|
||||
func (*FlagLoader) Load(args []string, cmd *cli.Command) (bool, error) {
|
||||
if len(args) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
|
|
@ -178,9 +178,9 @@ type HealthCheck struct {
|
|||
Scheme string `json:"scheme,omitempty" toml:"scheme,omitempty" yaml:"scheme,omitempty"`
|
||||
Path string `json:"path,omitempty" toml:"path,omitempty" yaml:"path,omitempty"`
|
||||
Port int `json:"port,omitempty" toml:"port,omitempty,omitzero" yaml:"port,omitempty"`
|
||||
// FIXME change string to types.Duration
|
||||
// FIXME change string to ptypes.Duration
|
||||
Interval string `json:"interval,omitempty" toml:"interval,omitempty" yaml:"interval,omitempty"`
|
||||
// FIXME change string to types.Duration
|
||||
// FIXME change string to ptypes.Duration
|
||||
Timeout string `json:"timeout,omitempty" toml:"timeout,omitempty" yaml:"timeout,omitempty"`
|
||||
Hostname string `json:"hostname,omitempty" toml:"hostname,omitempty" yaml:"hostname,omitempty"`
|
||||
FollowRedirects *bool `json:"followRedirects" toml:"followRedirects" yaml:"followRedirects"`
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/ip"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
// +k8s:deepcopy-gen=true
|
||||
|
@ -319,7 +319,7 @@ type RateLimit struct {
|
|||
|
||||
// Period, in combination with Average, defines the actual maximum rate, such as:
|
||||
// r = Average / Period. It defaults to a second.
|
||||
Period types.Duration `json:"period,omitempty" toml:"period,omitempty" yaml:"period,omitempty"`
|
||||
Period ptypes.Duration `json:"period,omitempty" toml:"period,omitempty" yaml:"period,omitempty"`
|
||||
|
||||
// Burst is the maximum number of requests allowed to arrive in the same arbitrarily small period of time.
|
||||
// It defaults to 1.
|
||||
|
@ -331,7 +331,7 @@ type RateLimit struct {
|
|||
// SetDefaults sets the default values on a RateLimit.
|
||||
func (r *RateLimit) SetDefaults() {
|
||||
r.Burst = 1
|
||||
r.Period = types.Duration(time.Second)
|
||||
r.Period = ptypes.Duration(time.Second)
|
||||
}
|
||||
|
||||
// +k8s:deepcopy-gen=true
|
||||
|
|
77
pkg/config/env/env.go
vendored
77
pkg/config/env/env.go
vendored
|
@ -1,77 +0,0 @@
|
|||
// Package env implements encoding and decoding between environment variable and a typed Configuration.
|
||||
package env
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
// DefaultNamePrefix is the default prefix for environment variable names.
|
||||
const DefaultNamePrefix = "TRAEFIK_"
|
||||
|
||||
// Decode decodes the given environment variables into the given element.
|
||||
// The operation goes through four stages roughly summarized as:
|
||||
// env vars -> map
|
||||
// map -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element.
|
||||
func Decode(environ []string, prefix string, element interface{}) error {
|
||||
if err := checkPrefix(prefix); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
vars := make(map[string]string)
|
||||
for _, evr := range environ {
|
||||
n := strings.SplitN(evr, "=", 2)
|
||||
if strings.HasPrefix(strings.ToUpper(n[0]), prefix) {
|
||||
key := strings.ReplaceAll(strings.ToLower(n[0]), "_", ".")
|
||||
vars[key] = n[1]
|
||||
}
|
||||
}
|
||||
|
||||
rootName := strings.ToLower(prefix[:len(prefix)-1])
|
||||
return parser.Decode(vars, element, rootName)
|
||||
}
|
||||
|
||||
// Encode encodes the configuration in element into the environment variables represented in the returned Flats.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// typed configuration in element -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> environment variables with default values (determined by type/kind).
|
||||
func Encode(element interface{}) ([]parser.Flat, error) {
|
||||
if element == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
etnOpts := parser.EncoderToNodeOpts{OmitEmpty: false, TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := parser.EncodeToNode(element, parser.DefaultRootName, etnOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
err = parser.AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
flatOpts := parser.FlatOpts{Case: "upper", Separator: "_", TagName: parser.TagLabel}
|
||||
return parser.EncodeToFlat(element, node, flatOpts)
|
||||
}
|
||||
|
||||
func checkPrefix(prefix string) error {
|
||||
prefixPattern := `[a-zA-Z0-9]+_`
|
||||
matched, err := regexp.MatchString(prefixPattern, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !matched {
|
||||
return fmt.Errorf("invalid prefix %q, the prefix pattern must match the following pattern: %s", prefix, prefixPattern)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
462
pkg/config/env/env_test.go
vendored
462
pkg/config/env/env_test.go
vendored
|
@ -1,462 +0,0 @@
|
|||
package env
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/generator"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
environ []string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "no env vars",
|
||||
environ: nil,
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "bool value",
|
||||
environ: []string{"TRAEFIK_FOO=true"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
}{
|
||||
Foo: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
environ: []string{"TRAEFIK_FOO=bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple bool flags without value",
|
||||
environ: []string{"TRAEFIK_FOO=true", "TRAEFIK_BAR=true"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{
|
||||
Foo: true,
|
||||
Bar: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
environ: []string{"TRAEFIK_FOO_NAME=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]string
|
||||
}{
|
||||
Foo: map[string]string{
|
||||
"name": "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
environ: []string{"TRAEFIK_FOO_NAME_VALUE=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{
|
||||
Foo: map[string]struct{ Value string }{
|
||||
"name": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-struct",
|
||||
environ: []string{"TRAEFIK_FOO_NAME_BAR_VALUE=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}{
|
||||
"name": {
|
||||
Bar: &struct {
|
||||
Value string
|
||||
}{
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map",
|
||||
environ: []string{"TRAEFIK_FOO_NAME1_BAR_NAME2_VALUE=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}{
|
||||
"name1": {
|
||||
Bar: map[string]struct{ Value string }{
|
||||
"name2": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice",
|
||||
environ: []string{"TRAEFIK_FOO=bar,baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{"bar", "baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer value",
|
||||
environ: []string{"TRAEFIK_FOO=true"},
|
||||
element: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct{ Field string }{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
err := Decode(test.environ, DefaultNamePrefix, test.element)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncode(t *testing.T) {
|
||||
element := &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "bar",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 1,
|
||||
FieldIn4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
FieldIn5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
FieldIn6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: &struct{ Field string }{},
|
||||
FieldIn12: func(v string) *string { return &v }(""),
|
||||
FieldIn13: func(v bool) *bool { return &v }(false),
|
||||
FieldIn14: func(v int) *int { return &v }(0),
|
||||
},
|
||||
Field1: "bir",
|
||||
Field2: true,
|
||||
Field3: 0,
|
||||
Field4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Field5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
Field6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: &struct{ Field string }{},
|
||||
Field12: func(v string) *string { return &v }(""),
|
||||
Field13: func(v bool) *bool { return &v }(false),
|
||||
Field14: func(v int) *int { return &v }(0),
|
||||
Field15: []int{7},
|
||||
}
|
||||
generator.Generate(element)
|
||||
|
||||
flats, err := Encode(element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := []parser.Flat{
|
||||
{
|
||||
Name: "TRAEFIK_FIELD1",
|
||||
Description: "",
|
||||
Default: "bir",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD10",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD10_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD11_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD12",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD13",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD14",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD15",
|
||||
Description: "",
|
||||
Default: "7",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD2",
|
||||
Description: "",
|
||||
Default: "true",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD3",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD4_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD5_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD6_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD6_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD7_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD7_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD8_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD8_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD9_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD9_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN1",
|
||||
Description: "",
|
||||
Default: "bar",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN10",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN10_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN11_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN12",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN13",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN14",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN2",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN3",
|
||||
Description: "",
|
||||
Default: "1",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN4_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN5_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN6_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN6_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN7_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN7_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN8_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN8_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN9_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN9_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, flats)
|
||||
}
|
64
pkg/config/env/filter.go
vendored
64
pkg/config/env/filter.go
vendored
|
@ -1,64 +0,0 @@
|
|||
package env
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
// FindPrefixedEnvVars finds prefixed environment variables.
|
||||
func FindPrefixedEnvVars(environ []string, prefix string, element interface{}) []string {
|
||||
prefixes := getRootPrefixes(element, prefix)
|
||||
|
||||
var values []string
|
||||
for _, px := range prefixes {
|
||||
for _, value := range environ {
|
||||
if strings.HasPrefix(value, px) {
|
||||
values = append(values, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return values
|
||||
}
|
||||
|
||||
func getRootPrefixes(element interface{}, prefix string) []string {
|
||||
if element == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
rootType := reflect.TypeOf(element)
|
||||
|
||||
return getPrefixes(prefix, rootType)
|
||||
}
|
||||
|
||||
func getPrefixes(prefix string, rootType reflect.Type) []string {
|
||||
var names []string
|
||||
|
||||
if rootType.Kind() == reflect.Ptr {
|
||||
rootType = rootType.Elem()
|
||||
}
|
||||
|
||||
if rootType.Kind() != reflect.Struct {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i := 0; i < rootType.NumField(); i++ {
|
||||
field := rootType.Field(i)
|
||||
|
||||
if !parser.IsExported(field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Anonymous &&
|
||||
(field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct || field.Type.Kind() == reflect.Struct) {
|
||||
names = append(names, getPrefixes(prefix, field.Type)...)
|
||||
continue
|
||||
}
|
||||
|
||||
names = append(names, prefix+strings.ToUpper(field.Name))
|
||||
}
|
||||
|
||||
return names
|
||||
}
|
87
pkg/config/env/filter_test.go
vendored
87
pkg/config/env/filter_test.go
vendored
|
@ -1,87 +0,0 @@
|
|||
package env
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFindPrefixedEnvVars(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
environ []string
|
||||
element interface{}
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "exact name",
|
||||
environ: []string{"TRAEFIK_FOO"},
|
||||
element: &Yo{},
|
||||
expected: []string{"TRAEFIK_FOO"},
|
||||
},
|
||||
{
|
||||
desc: "prefixed name",
|
||||
environ: []string{"TRAEFIK_FII01"},
|
||||
element: &Yo{},
|
||||
expected: []string{"TRAEFIK_FII01"},
|
||||
},
|
||||
{
|
||||
desc: "excluded env vars",
|
||||
environ: []string{"TRAEFIK_NOPE", "TRAEFIK_NO"},
|
||||
element: &Yo{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "filter",
|
||||
environ: []string{"TRAEFIK_NOPE", "TRAEFIK_NO", "TRAEFIK_FOO", "TRAEFIK_FII01"},
|
||||
element: &Yo{},
|
||||
expected: []string{"TRAEFIK_FOO", "TRAEFIK_FII01"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
vars := FindPrefixedEnvVars(test.environ, DefaultNamePrefix, test.element)
|
||||
|
||||
assert.Equal(t, test.expected, vars)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getRootFieldNames(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "simple fields",
|
||||
element: &Yo{},
|
||||
expected: []string{"TRAEFIK_FOO", "TRAEFIK_FII", "TRAEFIK_FUU", "TRAEFIK_YI", "TRAEFIK_YU"},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct",
|
||||
element: &Yu{},
|
||||
expected: []string{"TRAEFIK_FOO", "TRAEFIK_FII", "TRAEFIK_FUU"},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct pointer",
|
||||
element: &Ye{},
|
||||
expected: []string{"TRAEFIK_FOO", "TRAEFIK_FII", "TRAEFIK_FUU"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
names := getRootPrefixes(test.element, DefaultNamePrefix)
|
||||
|
||||
assert.Equal(t, test.expected, names)
|
||||
})
|
||||
}
|
||||
}
|
69
pkg/config/env/fixtures_test.go
vendored
69
pkg/config/env/fixtures_test.go
vendored
|
@ -1,69 +0,0 @@
|
|||
package env
|
||||
|
||||
type Ya struct {
|
||||
Foo *Yaa
|
||||
Field1 string
|
||||
Field2 bool
|
||||
Field3 int
|
||||
Field4 map[string]string
|
||||
Field5 map[string]int
|
||||
Field6 map[string]struct{ Field string }
|
||||
Field7 map[string]struct{ Field map[string]string }
|
||||
Field8 map[string]*struct{ Field string }
|
||||
Field9 map[string]*struct{ Field map[string]string }
|
||||
Field10 struct{ Field string }
|
||||
Field11 *struct{ Field string }
|
||||
Field12 *string
|
||||
Field13 *bool
|
||||
Field14 *int
|
||||
Field15 []int
|
||||
}
|
||||
|
||||
type Yaa struct {
|
||||
FieldIn1 string
|
||||
FieldIn2 bool
|
||||
FieldIn3 int
|
||||
FieldIn4 map[string]string
|
||||
FieldIn5 map[string]int
|
||||
FieldIn6 map[string]struct{ Field string }
|
||||
FieldIn7 map[string]struct{ Field map[string]string }
|
||||
FieldIn8 map[string]*struct{ Field string }
|
||||
FieldIn9 map[string]*struct{ Field map[string]string }
|
||||
FieldIn10 struct{ Field string }
|
||||
FieldIn11 *struct{ Field string }
|
||||
FieldIn12 *string
|
||||
FieldIn13 *bool
|
||||
FieldIn14 *int
|
||||
}
|
||||
|
||||
type Yo struct {
|
||||
Foo string `description:"Foo description"`
|
||||
Fii string `description:"Fii description"`
|
||||
Fuu string `description:"Fuu description"`
|
||||
Yi *Yi `label:"allowEmpty"`
|
||||
Yu *Yi
|
||||
}
|
||||
|
||||
func (y *Yo) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
||||
|
||||
type Yi struct {
|
||||
Foo string
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
|
||||
func (y *Yi) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
||||
|
||||
type Yu struct {
|
||||
Yi
|
||||
}
|
||||
|
||||
type Ye struct {
|
||||
*Yi
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
// Package file implements decoding between configuration in a file and a typed Configuration.
|
||||
package file
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// Decode decodes the given configuration file into the given element.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// file contents -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element.
|
||||
func Decode(filePath string, element interface{}) error {
|
||||
if element == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
filters := getRootFieldNames(element)
|
||||
|
||||
root, err := decodeFileToNode(filePath, filters...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagFile, AllowSliceAsStruct: false}
|
||||
err = parser.AddMetadata(element, root, metaOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return parser.Fill(element, root, parser.FillerOpts{AllowSliceAsStruct: false})
|
||||
}
|
||||
|
||||
// DecodeContent decodes the given configuration file content into the given element.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// file contents -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element.
|
||||
func DecodeContent(content, extension string, element interface{}) error {
|
||||
data := make(map[string]interface{})
|
||||
|
||||
switch extension {
|
||||
case ".toml":
|
||||
_, err := toml.Decode(content, &data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case ".yml", ".yaml":
|
||||
var err error
|
||||
err = yaml.Unmarshal([]byte(content), &data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
default:
|
||||
return fmt.Errorf("unsupported file extension: %s", extension)
|
||||
}
|
||||
|
||||
filters := getRootFieldNames(element)
|
||||
|
||||
node, err := decodeRawToNode(data, parser.DefaultRootName, filters...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(node.Children) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagFile, AllowSliceAsStruct: false}
|
||||
err = parser.AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return parser.Fill(element, node, parser.FillerOpts{AllowSliceAsStruct: false})
|
||||
}
|
|
@ -1,96 +0,0 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// decodeFileToNode decodes the configuration in filePath in a tree of untyped nodes.
|
||||
// If filters is not empty, it skips any configuration element whose name is not among filters.
|
||||
func decodeFileToNode(filePath string, filters ...string) (*parser.Node, error) {
|
||||
content, err := ioutil.ReadFile(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data := make(map[string]interface{})
|
||||
|
||||
switch strings.ToLower(filepath.Ext(filePath)) {
|
||||
case ".toml":
|
||||
err = toml.Unmarshal(content, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
case ".yml", ".yaml":
|
||||
err = yaml.Unmarshal(content, data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported file extension: %s", filePath)
|
||||
}
|
||||
|
||||
if len(data) == 0 {
|
||||
return nil, fmt.Errorf("no configuration found in file: %s", filePath)
|
||||
}
|
||||
|
||||
node, err := decodeRawToNode(data, parser.DefaultRootName, filters...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(node.Children) == 0 {
|
||||
return nil, fmt.Errorf("no valid configuration found in file: %s", filePath)
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func getRootFieldNames(element interface{}) []string {
|
||||
if element == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
rootType := reflect.TypeOf(element)
|
||||
|
||||
return getFieldNames(rootType)
|
||||
}
|
||||
|
||||
func getFieldNames(rootType reflect.Type) []string {
|
||||
var names []string
|
||||
|
||||
if rootType.Kind() == reflect.Ptr {
|
||||
rootType = rootType.Elem()
|
||||
}
|
||||
|
||||
if rootType.Kind() != reflect.Struct {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i := 0; i < rootType.NumField(); i++ {
|
||||
field := rootType.Field(i)
|
||||
|
||||
if !parser.IsExported(field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Anonymous &&
|
||||
(field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct || field.Type.Kind() == reflect.Struct) {
|
||||
names = append(names, getFieldNames(field.Type)...)
|
||||
continue
|
||||
}
|
||||
|
||||
names = append(names, field.Name)
|
||||
}
|
||||
|
||||
return names
|
||||
}
|
|
@ -1,646 +0,0 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_getRootFieldNames(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "simple fields",
|
||||
element: &Yo{},
|
||||
expected: []string{"Foo", "Fii", "Fuu", "Yi"},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct",
|
||||
element: &Yu{},
|
||||
expected: []string{"Foo", "Fii", "Fuu"},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct pointer",
|
||||
element: &Ye{},
|
||||
expected: []string{"Foo", "Fii", "Fuu"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
names := getRootFieldNames(test.element)
|
||||
|
||||
assert.Equal(t, test.expected, names)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_decodeFileToNode_errors(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
confFile string
|
||||
}{
|
||||
{
|
||||
desc: "non existing file",
|
||||
confFile: "./fixtures/not_existing.toml",
|
||||
},
|
||||
{
|
||||
desc: "file without content",
|
||||
confFile: "./fixtures/empty.toml",
|
||||
},
|
||||
{
|
||||
desc: "file without any valid configuration",
|
||||
confFile: "./fixtures/no_conf.toml",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
node, err := decodeFileToNode(test.confFile,
|
||||
"Global", "ServersTransport", "EntryPoints", "Providers", "API", "Metrics", "Ping", "Log", "AccessLog", "Tracing", "HostResolver", "CertificatesResolvers")
|
||||
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, node)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_decodeFileToNode_compare(t *testing.T) {
|
||||
nodeToml, err := decodeFileToNode("./fixtures/sample.toml",
|
||||
"Global", "ServersTransport", "EntryPoints", "Providers", "API", "Metrics", "Ping", "Log", "AccessLog", "Tracing", "HostResolver", "CertificatesResolvers")
|
||||
require.NoError(t, err)
|
||||
|
||||
nodeYaml, err := decodeFileToNode("./fixtures/sample.yml")
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, nodeToml, nodeYaml)
|
||||
}
|
||||
|
||||
func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||
node, err := decodeFileToNode("./fixtures/sample.toml",
|
||||
"Global", "ServersTransport", "EntryPoints", "Providers", "API", "Metrics", "Ping", "Log", "AccessLog", "Tracing", "HostResolver", "CertificatesResolvers")
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "accessLog", Children: []*parser.Node{
|
||||
{Name: "bufferingSize", Value: "42"},
|
||||
{Name: "fields", Children: []*parser.Node{
|
||||
{Name: "defaultMode", Value: "foobar"},
|
||||
{Name: "headers", Children: []*parser.Node{
|
||||
{Name: "defaultMode", Value: "foobar"},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "filters", Children: []*parser.Node{
|
||||
{Name: "minDuration", Value: "42"},
|
||||
{Name: "retryAttempts", Value: "true"},
|
||||
{Name: "statusCodes", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "format", Value: "foobar"},
|
||||
}},
|
||||
{Name: "api", Children: []*parser.Node{
|
||||
{Name: "dashboard", Value: "true"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
{Name: "statistics", Children: []*parser.Node{
|
||||
{Name: "recentErrors", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
{Name: "certificatesResolvers", Children: []*parser.Node{
|
||||
{Name: "default", Children: []*parser.Node{
|
||||
{
|
||||
Name: "acme",
|
||||
Children: []*parser.Node{
|
||||
{Name: "acmeLogging", Value: "true"},
|
||||
{Name: "caServer", Value: "foobar"},
|
||||
{Name: "dnsChallenge", Children: []*parser.Node{
|
||||
{Name: "delayBeforeCheck", Value: "42"},
|
||||
{Name: "disablePropagationCheck", Value: "true"},
|
||||
{Name: "provider", Value: "foobar"},
|
||||
{Name: "resolvers", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "email", Value: "foobar"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "httpChallenge", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
{Name: "keyType", Value: "foobar"},
|
||||
{Name: "storage", Value: "foobar"},
|
||||
{Name: "tlsChallenge"},
|
||||
},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
{Name: "entryPoints", Children: []*parser.Node{
|
||||
{Name: "EntryPoint0", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "forwardedHeaders", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "proxyProtocol", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "transport", Children: []*parser.Node{
|
||||
{Name: "lifeCycle", Children: []*parser.Node{
|
||||
{Name: "graceTimeOut", Value: "42"},
|
||||
{Name: "requestAcceptGraceTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "respondingTimeouts", Children: []*parser.Node{
|
||||
{Name: "idleTimeout", Value: "42"},
|
||||
{Name: "readTimeout", Value: "42"},
|
||||
{Name: "writeTimeout", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
{Name: "global", Children: []*parser.Node{
|
||||
{Name: "checkNewVersion", Value: "true"},
|
||||
{Name: "sendAnonymousUsage", Value: "true"},
|
||||
}},
|
||||
{Name: "hostResolver", Children: []*parser.Node{
|
||||
{Name: "cnameFlattening", Value: "true"},
|
||||
{Name: "resolvConfig", Value: "foobar"},
|
||||
{Name: "resolvDepth", Value: "42"},
|
||||
}},
|
||||
{Name: "log", Children: []*parser.Node{
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "format", Value: "foobar"},
|
||||
{Name: "level", Value: "foobar"},
|
||||
}},
|
||||
{Name: "metrics", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
{Name: "influxDB", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "database", Value: "foobar"},
|
||||
{Name: "password", Value: "foobar"},
|
||||
{Name: "protocol", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
{Name: "retentionPolicy", Value: "foobar"},
|
||||
{Name: "username", Value: "foobar"},
|
||||
}},
|
||||
{Name: "prometheus", Children: []*parser.Node{
|
||||
{Name: "buckets", Value: "42,42"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "statsD", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
}},
|
||||
{Name: "ping", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "providers", Children: []*parser.Node{
|
||||
{Name: "docker", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "exposedByDefault", Value: "true"},
|
||||
{Name: "network", Value: "foobar"},
|
||||
{Name: "swarmMode", Value: "true"},
|
||||
{Name: "swarmModeRefreshSeconds", Value: "42"},
|
||||
{Name: "tls", Children: []*parser.Node{
|
||||
{Name: "ca", Value: "foobar"},
|
||||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "useBindPortIP", Value: "true"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "file", Children: []*parser.Node{
|
||||
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
||||
{Name: "directory", Value: "foobar"},
|
||||
{Name: "filename", Value: "foobar"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{
|
||||
Name: "kubernetesCRD",
|
||||
Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "ingressClass", Value: "foobar"},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"},
|
||||
},
|
||||
},
|
||||
{Name: "kubernetesIngress", Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "ingressClass", Value: "foobar"},
|
||||
{Name: "ingressEndpoint", Children: []*parser.Node{
|
||||
{Name: "hostname", Value: "foobar"},
|
||||
{Name: "ip", Value: "foobar"},
|
||||
{Name: "publishedService", Value: "foobar"},
|
||||
}},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"},
|
||||
}},
|
||||
{Name: "marathon", Children: []*parser.Node{
|
||||
{Name: "basic", Children: []*parser.Node{
|
||||
{Name: "httpBasicAuthUser", Value: "foobar"},
|
||||
{Name: "httpBasicPassword", Value: "foobar"},
|
||||
}},
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "dcosToken", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
{Name: "dialerTimeout", Value: "42"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "exposedByDefault", Value: "true"},
|
||||
{Name: "forceTaskHostname", Value: "true"},
|
||||
{Name: "keepAlive", Value: "42"},
|
||||
{Name: "respectReadinessChecks", Value: "true"},
|
||||
{Name: "responseHeaderTimeout", Value: "42"},
|
||||
{Name: "tls", Children: []*parser.Node{
|
||||
{Name: "ca", Value: "foobar"},
|
||||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "tlsHandshakeTimeout", Value: "42"},
|
||||
{Name: "trace", Value: "true"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "providersThrottleDuration", Value: "42"},
|
||||
{Name: "rancher", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
{Name: "enableServiceHealthFilter", Value: "true"},
|
||||
{Name: "exposedByDefault", Value: "true"},
|
||||
{Name: "intervalPoll", Value: "true"},
|
||||
{Name: "prefix", Value: "foobar"},
|
||||
{Name: "refreshSeconds", Value: "42"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "rest", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "serversTransport", Children: []*parser.Node{
|
||||
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
||||
{Name: "dialTimeout", Value: "42"},
|
||||
{Name: "idleConnTimeout", Value: "42"},
|
||||
{Name: "responseHeaderTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "maxIdleConnsPerHost", Value: "42"},
|
||||
{Name: "rootCAs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "tracing", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
||||
{Name: "debug", Value: "true"},
|
||||
{Name: "globalTag", Value: "foobar"},
|
||||
{Name: "localAgentHostPort", Value: "foobar"},
|
||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "prioritySampling", Value: "true"},
|
||||
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "haystack", Children: []*parser.Node{
|
||||
{Name: "globalTag", Value: "foobar"},
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "spanIDHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "instana", Children: []*parser.Node{
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "logLevel", Value: "foobar"},
|
||||
}},
|
||||
{Name: "jaeger", Children: []*parser.Node{
|
||||
{Name: "gen128Bit", Value: "true"},
|
||||
{Name: "localAgentHostPort", Value: "foobar"},
|
||||
{Name: "propagation", Value: "foobar"},
|
||||
{Name: "samplingParam", Value: "42"},
|
||||
{Name: "samplingServerURL", Value: "foobar"},
|
||||
{Name: "samplingType", Value: "foobar"},
|
||||
{Name: "traceContextHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "serviceName", Value: "foobar"},
|
||||
{Name: "spanNameLimit", Value: "42"},
|
||||
{Name: "zipkin", Children: []*parser.Node{
|
||||
{Name: "httpEndpoint", Value: "foobar"},
|
||||
{Name: "id128Bit", Value: "true"},
|
||||
{Name: "sameSpan", Value: "true"},
|
||||
{Name: "sampleRate", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, node)
|
||||
}
|
||||
|
||||
func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||
node, err := decodeFileToNode("./fixtures/sample.yml")
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "accessLog", Children: []*parser.Node{
|
||||
{Name: "bufferingSize", Value: "42"},
|
||||
{Name: "fields", Children: []*parser.Node{
|
||||
{Name: "defaultMode", Value: "foobar"},
|
||||
{Name: "headers", Children: []*parser.Node{
|
||||
{Name: "defaultMode", Value: "foobar"},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "filters", Children: []*parser.Node{
|
||||
{Name: "minDuration", Value: "42"},
|
||||
{Name: "retryAttempts", Value: "true"},
|
||||
{Name: "statusCodes", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "format", Value: "foobar"},
|
||||
}},
|
||||
{Name: "api", Children: []*parser.Node{
|
||||
{Name: "dashboard", Value: "true"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
{Name: "statistics", Children: []*parser.Node{
|
||||
{Name: "recentErrors", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
{Name: "certificatesResolvers", Children: []*parser.Node{
|
||||
{Name: "default", Children: []*parser.Node{
|
||||
{
|
||||
Name: "acme",
|
||||
Children: []*parser.Node{
|
||||
{Name: "acmeLogging", Value: "true"},
|
||||
{Name: "caServer", Value: "foobar"},
|
||||
{Name: "dnsChallenge", Children: []*parser.Node{
|
||||
{Name: "delayBeforeCheck", Value: "42"},
|
||||
{Name: "disablePropagationCheck", Value: "true"},
|
||||
{Name: "provider", Value: "foobar"},
|
||||
{Name: "resolvers", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "email", Value: "foobar"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "httpChallenge", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
{Name: "keyType", Value: "foobar"},
|
||||
{Name: "storage", Value: "foobar"},
|
||||
{Name: "tlsChallenge"},
|
||||
},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
{Name: "entryPoints", Children: []*parser.Node{
|
||||
{Name: "EntryPoint0", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "forwardedHeaders", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "proxyProtocol", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "transport", Children: []*parser.Node{
|
||||
{Name: "lifeCycle", Children: []*parser.Node{
|
||||
{Name: "graceTimeOut", Value: "42"},
|
||||
{Name: "requestAcceptGraceTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "respondingTimeouts", Children: []*parser.Node{
|
||||
{Name: "idleTimeout", Value: "42"},
|
||||
{Name: "readTimeout", Value: "42"},
|
||||
{Name: "writeTimeout", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
{Name: "global", Children: []*parser.Node{
|
||||
{Name: "checkNewVersion", Value: "true"},
|
||||
{Name: "sendAnonymousUsage", Value: "true"},
|
||||
}},
|
||||
{Name: "hostResolver", Children: []*parser.Node{
|
||||
{Name: "cnameFlattening", Value: "true"},
|
||||
{Name: "resolvConfig", Value: "foobar"},
|
||||
{Name: "resolvDepth", Value: "42"},
|
||||
}},
|
||||
{Name: "log", Children: []*parser.Node{
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "format", Value: "foobar"},
|
||||
{Name: "level", Value: "foobar"},
|
||||
}},
|
||||
{Name: "metrics", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
{Name: "influxDB", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "database", Value: "foobar"},
|
||||
{Name: "password", Value: "foobar"},
|
||||
{Name: "protocol", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
{Name: "retentionPolicy", Value: "foobar"},
|
||||
{Name: "username", Value: "foobar"},
|
||||
}},
|
||||
{Name: "prometheus", Children: []*parser.Node{
|
||||
{Name: "buckets", Value: "42,42"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "statsD", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
}},
|
||||
{Name: "ping", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "providers", Children: []*parser.Node{
|
||||
{Name: "docker", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "exposedByDefault", Value: "true"},
|
||||
{Name: "network", Value: "foobar"},
|
||||
{Name: "swarmMode", Value: "true"},
|
||||
{Name: "swarmModeRefreshSeconds", Value: "42"},
|
||||
{Name: "tls", Children: []*parser.Node{
|
||||
{Name: "ca", Value: "foobar"},
|
||||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "useBindPortIP", Value: "true"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "file", Children: []*parser.Node{
|
||||
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
||||
{Name: "directory", Value: "foobar"},
|
||||
{Name: "filename", Value: "foobar"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{
|
||||
Name: "kubernetesCRD",
|
||||
Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "ingressClass", Value: "foobar"},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"},
|
||||
},
|
||||
},
|
||||
{Name: "kubernetesIngress", Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "ingressClass", Value: "foobar"},
|
||||
{Name: "ingressEndpoint", Children: []*parser.Node{
|
||||
{Name: "hostname", Value: "foobar"},
|
||||
{Name: "ip", Value: "foobar"},
|
||||
{Name: "publishedService", Value: "foobar"},
|
||||
}},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"},
|
||||
}},
|
||||
{Name: "marathon", Children: []*parser.Node{
|
||||
{Name: "basic", Children: []*parser.Node{
|
||||
{Name: "httpBasicAuthUser", Value: "foobar"},
|
||||
{Name: "httpBasicPassword", Value: "foobar"},
|
||||
}},
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "dcosToken", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
{Name: "dialerTimeout", Value: "42"},
|
||||
{Name: "endpoint", Value: "foobar"},
|
||||
{Name: "exposedByDefault", Value: "true"},
|
||||
{Name: "forceTaskHostname", Value: "true"},
|
||||
{Name: "keepAlive", Value: "42"},
|
||||
{Name: "respectReadinessChecks", Value: "true"},
|
||||
{Name: "responseHeaderTimeout", Value: "42"},
|
||||
{Name: "tls", Children: []*parser.Node{
|
||||
{Name: "ca", Value: "foobar"},
|
||||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "tlsHandshakeTimeout", Value: "42"},
|
||||
{Name: "trace", Value: "true"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "providersThrottleDuration", Value: "42"},
|
||||
{Name: "rancher", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
{Name: "enableServiceHealthFilter", Value: "true"},
|
||||
{Name: "exposedByDefault", Value: "true"},
|
||||
{Name: "intervalPoll", Value: "true"},
|
||||
{Name: "prefix", Value: "foobar"},
|
||||
{Name: "refreshSeconds", Value: "42"},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "rest", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "serversTransport", Children: []*parser.Node{
|
||||
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
||||
{Name: "dialTimeout", Value: "42"},
|
||||
{Name: "idleConnTimeout", Value: "42"},
|
||||
{Name: "responseHeaderTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "maxIdleConnsPerHost", Value: "42"},
|
||||
{Name: "rootCAs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "tracing", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
||||
{Name: "debug", Value: "true"},
|
||||
{Name: "globalTag", Value: "foobar"},
|
||||
{Name: "localAgentHostPort", Value: "foobar"},
|
||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "prioritySampling", Value: "true"},
|
||||
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "haystack", Children: []*parser.Node{
|
||||
{Name: "globalTag", Value: "foobar"},
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "spanIDHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "instana", Children: []*parser.Node{
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "logLevel", Value: "foobar"},
|
||||
}},
|
||||
{Name: "jaeger", Children: []*parser.Node{
|
||||
{Name: "gen128Bit", Value: "true"},
|
||||
{Name: "localAgentHostPort", Value: "foobar"},
|
||||
{Name: "propagation", Value: "foobar"},
|
||||
{Name: "samplingParam", Value: "42"},
|
||||
{Name: "samplingServerURL", Value: "foobar"},
|
||||
{Name: "samplingType", Value: "foobar"},
|
||||
{Name: "traceContextHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "serviceName", Value: "foobar"},
|
||||
{Name: "spanNameLimit", Value: "42"},
|
||||
{Name: "zipkin", Children: []*parser.Node{
|
||||
{Name: "httpEndpoint", Value: "foobar"},
|
||||
{Name: "id128Bit", Value: "true"},
|
||||
{Name: "sameSpan", Value: "true"},
|
||||
{Name: "sampleRate", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, node)
|
||||
}
|
|
@ -1,177 +0,0 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode_TOML(t *testing.T) {
|
||||
f, err := ioutil.TempFile("", "traefik-config-*.toml")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
_ = os.Remove(f.Name())
|
||||
}()
|
||||
|
||||
_, err = f.Write([]byte(`
|
||||
foo = "bar"
|
||||
fii = "bir"
|
||||
[yi]
|
||||
`))
|
||||
require.NoError(t, err)
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
err = Decode(f.Name(), element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
func TestDecodeContent_TOML(t *testing.T) {
|
||||
content := `
|
||||
foo = "bar"
|
||||
fii = "bir"
|
||||
[yi]
|
||||
`
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
err := DecodeContent(content, ".toml", element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
func TestDecodeContent_TOML_rawValue(t *testing.T) {
|
||||
content := `
|
||||
name = "test"
|
||||
[[meta.aaa]]
|
||||
bbb = 1
|
||||
`
|
||||
|
||||
type Foo struct {
|
||||
Name string
|
||||
Meta map[string]interface{}
|
||||
}
|
||||
|
||||
element := &Foo{}
|
||||
|
||||
err := DecodeContent(content, ".toml", element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Foo{
|
||||
Name: "test",
|
||||
Meta: map[string]interface{}{"aaa": []interface{}{map[string]interface{}{"bbb": "1"}}},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
func TestDecode_YAML(t *testing.T) {
|
||||
f, err := ioutil.TempFile("", "traefik-config-*.yaml")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
_ = os.Remove(f.Name())
|
||||
}()
|
||||
|
||||
_, err = f.Write([]byte(`
|
||||
foo: bar
|
||||
fii: bir
|
||||
yi: {}
|
||||
`))
|
||||
require.NoError(t, err)
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
err = Decode(f.Name(), element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
func TestDecodeContent_YAML(t *testing.T) {
|
||||
content := `
|
||||
foo: bar
|
||||
fii: bir
|
||||
yi: {}
|
||||
`
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
err := DecodeContent(content, ".yaml", element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
func TestDecodeContent_YAML_rawValue(t *testing.T) {
|
||||
content := `
|
||||
name: test
|
||||
meta:
|
||||
aaa:
|
||||
- bbb: 1
|
||||
`
|
||||
|
||||
type Foo struct {
|
||||
Name string
|
||||
Meta map[string]interface{}
|
||||
}
|
||||
|
||||
element := &Foo{}
|
||||
|
||||
err := DecodeContent(content, ".yaml", element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Foo{
|
||||
Name: "test",
|
||||
Meta: map[string]interface{}{"aaa": []interface{}{map[string]interface{}{"bbb": "1"}}},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
[foo]
|
||||
bar = "test"
|
|
@ -1,474 +0,0 @@
|
|||
[global]
|
||||
checkNewVersion = true
|
||||
sendAnonymousUsage = true
|
||||
|
||||
[serversTransport]
|
||||
insecureSkipVerify = true
|
||||
rootCAs = ["foobar", "foobar"]
|
||||
maxIdleConnsPerHost = 42
|
||||
[serversTransport.forwardingTimeouts]
|
||||
dialTimeout = 42
|
||||
responseHeaderTimeout = 42
|
||||
idleConnTimeout = 42
|
||||
|
||||
[entryPoints]
|
||||
[entryPoints.EntryPoint0]
|
||||
address = "foobar"
|
||||
[entryPoints.EntryPoint0.transport]
|
||||
[entryPoints.EntryPoint0.transport.lifeCycle]
|
||||
requestAcceptGraceTimeout = 42
|
||||
graceTimeOut = 42
|
||||
[entryPoints.EntryPoint0.transport.respondingTimeouts]
|
||||
readTimeout = 42
|
||||
writeTimeout = 42
|
||||
idleTimeout = 42
|
||||
[entryPoints.EntryPoint0.proxyProtocol]
|
||||
insecure = true
|
||||
trustedIPs = ["foobar", "foobar"]
|
||||
[entryPoints.EntryPoint0.forwardedHeaders]
|
||||
insecure = true
|
||||
trustedIPs = ["foobar", "foobar"]
|
||||
|
||||
[providers]
|
||||
providersThrottleDuration = 42
|
||||
[providers.docker]
|
||||
constraints = "foobar"
|
||||
watch = true
|
||||
endpoint = "foobar"
|
||||
defaultRule = "foobar"
|
||||
exposedByDefault = true
|
||||
useBindPortIP = true
|
||||
swarmMode = true
|
||||
network = "foobar"
|
||||
swarmModeRefreshSeconds = 42
|
||||
[providers.docker.tls]
|
||||
ca = "foobar"
|
||||
caOptional = true
|
||||
cert = "foobar"
|
||||
key = "foobar"
|
||||
insecureSkipVerify = true
|
||||
[providers.file]
|
||||
directory = "foobar"
|
||||
watch = true
|
||||
filename = "foobar"
|
||||
debugLogGeneratedTemplate = true
|
||||
[providers.marathon]
|
||||
constraints = "foobar"
|
||||
trace = true
|
||||
watch = true
|
||||
endpoint = "foobar"
|
||||
defaultRule = "foobar"
|
||||
exposedByDefault = true
|
||||
dcosToken = "foobar"
|
||||
dialerTimeout = 42
|
||||
responseHeaderTimeout = 42
|
||||
tlsHandshakeTimeout = 42
|
||||
keepAlive = 42
|
||||
forceTaskHostname = true
|
||||
respectReadinessChecks = true
|
||||
[providers.marathon.tls]
|
||||
ca = "foobar"
|
||||
caOptional = true
|
||||
cert = "foobar"
|
||||
key = "foobar"
|
||||
insecureSkipVerify = true
|
||||
[providers.marathon.basic]
|
||||
httpBasicAuthUser = "foobar"
|
||||
httpBasicPassword = "foobar"
|
||||
[providers.kubernetesIngress]
|
||||
endpoint = "foobar"
|
||||
token = "foobar"
|
||||
certAuthFilePath = "foobar"
|
||||
disablePassHostHeaders = true
|
||||
namespaces = ["foobar", "foobar"]
|
||||
labelSelector = "foobar"
|
||||
ingressClass = "foobar"
|
||||
[providers.kubernetesIngress.ingressEndpoint]
|
||||
ip = "foobar"
|
||||
hostname = "foobar"
|
||||
publishedService = "foobar"
|
||||
[providers.kubernetesCRD]
|
||||
endpoint = "foobar"
|
||||
token = "foobar"
|
||||
certAuthFilePath = "foobar"
|
||||
disablePassHostHeaders = true
|
||||
namespaces = ["foobar", "foobar"]
|
||||
labelSelector = "foobar"
|
||||
ingressClass = "foobar"
|
||||
[providers.rest]
|
||||
entryPoint = "foobar"
|
||||
[providers.rancher]
|
||||
constraints = "foobar"
|
||||
watch = true
|
||||
defaultRule = "foobar"
|
||||
exposedByDefault = true
|
||||
enableServiceHealthFilter = true
|
||||
refreshSeconds = 42
|
||||
intervalPoll = true
|
||||
prefix = "foobar"
|
||||
|
||||
[api]
|
||||
entryPoint = "foobar"
|
||||
dashboard = true
|
||||
middlewares = ["foobar", "foobar"]
|
||||
[api.statistics]
|
||||
recentErrors = 42
|
||||
|
||||
[metrics]
|
||||
[metrics.prometheus]
|
||||
buckets = [42.0, 42.0]
|
||||
entryPoint = "foobar"
|
||||
middlewares = ["foobar", "foobar"]
|
||||
[metrics.datadog]
|
||||
address = "foobar"
|
||||
pushInterval = "10s"
|
||||
[metrics.statsD]
|
||||
address = "foobar"
|
||||
pushInterval = "10s"
|
||||
[metrics.influxDB]
|
||||
address = "foobar"
|
||||
protocol = "foobar"
|
||||
pushInterval = "10s"
|
||||
database = "foobar"
|
||||
retentionPolicy = "foobar"
|
||||
username = "foobar"
|
||||
password = "foobar"
|
||||
|
||||
[ping]
|
||||
entryPoint = "foobar"
|
||||
middlewares = ["foobar", "foobar"]
|
||||
|
||||
[log]
|
||||
level = "foobar"
|
||||
filePath = "foobar"
|
||||
format = "foobar"
|
||||
|
||||
[accessLog]
|
||||
filePath = "foobar"
|
||||
format = "foobar"
|
||||
bufferingSize = 42
|
||||
[accessLog.filters]
|
||||
statusCodes = ["foobar", "foobar"]
|
||||
retryAttempts = true
|
||||
minDuration = 42
|
||||
[accessLog.fields]
|
||||
defaultMode = "foobar"
|
||||
[accessLog.fields.names]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[accessLog.fields.headers]
|
||||
defaultMode = "foobar"
|
||||
[accessLog.fields.headers.names]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
|
||||
[tracing]
|
||||
serviceName = "foobar"
|
||||
spanNameLimit = 42
|
||||
[tracing.jaeger]
|
||||
samplingServerURL = "foobar"
|
||||
samplingType = "foobar"
|
||||
samplingParam = 42.0
|
||||
localAgentHostPort = "foobar"
|
||||
gen128Bit = true
|
||||
propagation = "foobar"
|
||||
traceContextHeaderName = "foobar"
|
||||
[tracing.zipkin]
|
||||
httpEndpoint = "foobar"
|
||||
sameSpan = true
|
||||
id128Bit = true
|
||||
sampleRate = 42.0
|
||||
[tracing.datadog]
|
||||
localAgentHostPort = "foobar"
|
||||
globalTag = "foobar"
|
||||
debug = true
|
||||
prioritySampling = true
|
||||
traceIDHeaderName = "foobar"
|
||||
parentIDHeaderName = "foobar"
|
||||
samplingPriorityHeaderName = "foobar"
|
||||
bagagePrefixHeaderName = "foobar"
|
||||
[tracing.instana]
|
||||
localAgentHost = "foobar"
|
||||
localAgentPort = 42
|
||||
logLevel = "foobar"
|
||||
[tracing.haystack]
|
||||
localAgentHost = "foobar"
|
||||
localAgentPort = 42
|
||||
globalTag = "foobar"
|
||||
traceIDHeaderName = "foobar"
|
||||
parentIDHeaderName = "foobar"
|
||||
spanIDHeaderName = "foobar"
|
||||
|
||||
[hostResolver]
|
||||
cnameFlattening = true
|
||||
resolvConfig = "foobar"
|
||||
resolvDepth = 42
|
||||
|
||||
[certificatesResolvers.default.acme]
|
||||
email = "foobar"
|
||||
acmeLogging = true
|
||||
caServer = "foobar"
|
||||
storage = "foobar"
|
||||
entryPoint = "foobar"
|
||||
keyType = "foobar"
|
||||
[certificatesResolvers.default.acme.dnsChallenge]
|
||||
provider = "foobar"
|
||||
delayBeforeCheck = 42
|
||||
resolvers = ["foobar", "foobar"]
|
||||
disablePropagationCheck = true
|
||||
[certificatesResolvers.default.acme.httpChallenge]
|
||||
entryPoint = "foobar"
|
||||
[certificatesResolvers.default.acme.tlsChallenge]
|
||||
|
||||
## Dynamic configuration
|
||||
|
||||
[http]
|
||||
[http.routers]
|
||||
[http.routers.Router0]
|
||||
entryPoints = ["foobar", "foobar"]
|
||||
middlewares = ["foobar", "foobar"]
|
||||
service = "foobar"
|
||||
rule = "foobar"
|
||||
priority = 42
|
||||
[http.routers.Router0.tls]
|
||||
[http.middlewares]
|
||||
[http.middlewares.Middleware0]
|
||||
[http.middlewares.Middleware0.addPrefix]
|
||||
prefix = "foobar"
|
||||
[http.middlewares.Middleware1]
|
||||
[http.middlewares.Middleware1.stripPrefix]
|
||||
prefixes = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware10]
|
||||
[http.middlewares.Middleware10.rateLimit]
|
||||
average = 42
|
||||
burst = 42
|
||||
[http.middlewares.Middleware10.rateLimit.sourceCriterion]
|
||||
requestHeaderName = "foobar"
|
||||
requestHost = true
|
||||
[http.middlewares.Middleware10.rateLimit.sourceCriterion.ipStrategy]
|
||||
depth = 42
|
||||
excludedIPs = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware11]
|
||||
[http.middlewares.Middleware11.redirectRegex]
|
||||
regex = "foobar"
|
||||
replacement = "foobar"
|
||||
permanent = true
|
||||
[http.middlewares.Middleware12]
|
||||
[http.middlewares.Middleware12.redirectScheme]
|
||||
scheme = "foobar"
|
||||
port = "foobar"
|
||||
permanent = true
|
||||
[http.middlewares.Middleware13]
|
||||
[http.middlewares.Middleware13.basicAuth]
|
||||
users = ["foobar", "foobar"]
|
||||
usersFile = "foobar"
|
||||
realm = "foobar"
|
||||
removeHeader = true
|
||||
headerField = "foobar"
|
||||
[http.middlewares.Middleware14]
|
||||
[http.middlewares.Middleware14.digestAuth]
|
||||
users = ["foobar", "foobar"]
|
||||
usersFile = "foobar"
|
||||
removeHeader = true
|
||||
realm = "foobar"
|
||||
headerField = "foobar"
|
||||
[http.middlewares.Middleware15]
|
||||
[http.middlewares.Middleware15.forwardAuth]
|
||||
address = "foobar"
|
||||
trustForwardHeader = true
|
||||
authResponseHeaders = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware15.forwardAuth.tls]
|
||||
ca = "foobar"
|
||||
caOptional = true
|
||||
cert = "foobar"
|
||||
key = "foobar"
|
||||
insecureSkipVerify = true
|
||||
[http.middlewares.Middleware16]
|
||||
[http.middlewares.Middleware16.inFlightReq]
|
||||
amount = 42
|
||||
[http.middlewares.Middleware16.inFlightReq.sourceCriterion]
|
||||
requestHeaderName = "foobar"
|
||||
requestHost = true
|
||||
[http.middlewares.Middleware16.inFlightReq.sourceCriterion.ipStrategy]
|
||||
depth = 42
|
||||
excludedIPs = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware17]
|
||||
[http.middlewares.Middleware17.buffering]
|
||||
maxRequestBodyBytes = 42
|
||||
memRequestBodyBytes = 42
|
||||
maxResponseBodyBytes = 42
|
||||
memResponseBodyBytes = 42
|
||||
retryExpression = "foobar"
|
||||
[http.middlewares.Middleware18]
|
||||
[http.middlewares.Middleware18.circuitBreaker]
|
||||
expression = "foobar"
|
||||
[http.middlewares.Middleware19]
|
||||
[http.middlewares.Middleware19.compress]
|
||||
[http.middlewares.Middleware2]
|
||||
[http.middlewares.Middleware2.stripPrefixRegex]
|
||||
regex = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware20]
|
||||
[http.middlewares.Middleware20.passTLSClientCert]
|
||||
pem = true
|
||||
[http.middlewares.Middleware20.passTLSClientCert.info]
|
||||
notAfter = true
|
||||
notBefore = true
|
||||
sans = true
|
||||
[http.middlewares.Middleware20.passTLSClientCert.info.subject]
|
||||
country = true
|
||||
province = true
|
||||
locality = true
|
||||
organization = true
|
||||
commonName = true
|
||||
serialNumber = true
|
||||
domainComponent = true
|
||||
[http.middlewares.Middleware20.passTLSClientCert.info.issuer]
|
||||
country = true
|
||||
province = true
|
||||
locality = true
|
||||
organization = true
|
||||
commonName = true
|
||||
serialNumber = true
|
||||
domainComponent = true
|
||||
[http.middlewares.Middleware21]
|
||||
[http.middlewares.Middleware21.retry]
|
||||
regex = 0
|
||||
[http.middlewares.Middleware3]
|
||||
[http.middlewares.Middleware3.replacePath]
|
||||
path = "foobar"
|
||||
[http.middlewares.Middleware4]
|
||||
[http.middlewares.Middleware4.replacePathRegex]
|
||||
regex = "foobar"
|
||||
replacement = "foobar"
|
||||
[http.middlewares.Middleware5]
|
||||
[http.middlewares.Middleware5.chain]
|
||||
middlewares = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware6]
|
||||
[http.middlewares.Middleware6.ipWhiteList]
|
||||
sourceRange = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware7]
|
||||
[http.middlewares.Middleware7.ipWhiteList]
|
||||
[http.middlewares.Middleware7.ipWhiteList.ipStrategy]
|
||||
depth = 42
|
||||
excludedIPs = ["foobar", "foobar"]
|
||||
[http.middlewares.Middleware8]
|
||||
[http.middlewares.Middleware8.headers]
|
||||
accessControlAllowCredentials = true
|
||||
accessControlAllowHeaders = ["foobar", "foobar"]
|
||||
accessControlAllowMethods = ["foobar", "foobar"]
|
||||
accessControlAllowOrigin = "foobar"
|
||||
accessControlExposeHeaders = ["foobar", "foobar"]
|
||||
accessControlMaxAge = 42
|
||||
addVaryHeader = true
|
||||
allowedHosts = ["foobar", "foobar"]
|
||||
hostsProxyHeaders = ["foobar", "foobar"]
|
||||
sslRedirect = true
|
||||
sslTemporaryRedirect = true
|
||||
sslHost = "foobar"
|
||||
sslForceHost = true
|
||||
stsSeconds = 42
|
||||
stsIncludeSubdomains = true
|
||||
stsPreload = true
|
||||
forceSTSHeader = true
|
||||
frameDeny = true
|
||||
customFrameOptionsValue = "foobar"
|
||||
contentTypeNosniff = true
|
||||
browserXssFilter = true
|
||||
customBrowserXSSValue = "foobar"
|
||||
contentSecurityPolicy = "foobar"
|
||||
publicKey = "foobar"
|
||||
referrerPolicy = "foobar"
|
||||
featurePolicy = "foobar"
|
||||
isDevelopment = true
|
||||
[http.middlewares.Middleware8.headers.customRequestHeaders]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[http.middlewares.Middleware8.headers.customResponseHeaders]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[http.middlewares.Middleware8.headers.sslProxyHeaders]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[http.middlewares.Middleware9]
|
||||
[http.middlewares.Middleware9.errors]
|
||||
status = ["foobar", "foobar"]
|
||||
service = "foobar"
|
||||
query = "foobar"
|
||||
[http.services]
|
||||
[http.services.Service0]
|
||||
[http.services.Service0.loadBalancer]
|
||||
passHostHeader = true
|
||||
[http.services.Service0.loadBalancer.sticky.cookie]
|
||||
name = "foobar"
|
||||
|
||||
[[http.services.Service0.loadBalancer.servers]]
|
||||
url = "foobar"
|
||||
|
||||
[[http.services.Service0.loadBalancer.servers]]
|
||||
url = "foobar"
|
||||
[http.services.Service0.loadBalancer.healthCheck]
|
||||
scheme = "foobar"
|
||||
path = "foobar"
|
||||
port = 42
|
||||
interval = "foobar"
|
||||
timeout = "foobar"
|
||||
hostname = "foobar"
|
||||
[http.services.Service0.loadBalancer.healthCheck.headers]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[http.services.Service0.loadBalancer.responseForwarding]
|
||||
flushInterval = "foobar"
|
||||
|
||||
[tcp]
|
||||
[tcp.routers]
|
||||
[tcp.routers.TCPRouter0]
|
||||
entryPoints = ["foobar", "foobar"]
|
||||
service = "foobar"
|
||||
rule = "foobar"
|
||||
[tcp.routers.TCPRouter0.tls]
|
||||
passthrough = true
|
||||
[tcp.services]
|
||||
[tcp.services.TCPService0]
|
||||
[tcp.services.TCPService0.loadBalancer]
|
||||
|
||||
[[tcp.services.TCPService0.loadBalancer.servers]]
|
||||
address = "foobar"
|
||||
|
||||
[[tcp.services.TCPService0.loadBalancer.servers]]
|
||||
address = "foobar"
|
||||
|
||||
[tls]
|
||||
|
||||
[[tls.Certificates]]
|
||||
certFile = "foobar"
|
||||
keyFile = "foobar"
|
||||
stores = ["foobar", "foobar"]
|
||||
|
||||
[[tls.Certificates]]
|
||||
certFile = "foobar"
|
||||
keyFile = "foobar"
|
||||
stores = ["foobar", "foobar"]
|
||||
[tls.options]
|
||||
[tls.options.TLS0]
|
||||
minVersion = "foobar"
|
||||
cipherSuites = ["foobar", "foobar"]
|
||||
sniStrict = true
|
||||
[tls.options.TLS0.clientAuth]
|
||||
caFiles = ["foobar", "foobar"]
|
||||
clientAuthType = "VerifyClientCertIfGiven"
|
||||
[tls.options.TLS1]
|
||||
minVersion = "foobar"
|
||||
cipherSuites = ["foobar", "foobar"]
|
||||
sniStrict = true
|
||||
[tls.options.TLS1.clientAuth]
|
||||
caFiles = ["foobar", "foobar"]
|
||||
clientAuthType = "VerifyClientCertIfGiven"
|
||||
[tls.stores]
|
||||
[tls.stores.Store0]
|
||||
[tls.stores.Store0.defaultCertificate]
|
||||
certFile = "foobar"
|
||||
keyFile = "foobar"
|
||||
[tls.stores.Store1]
|
||||
[tls.stores.Store1.defaultCertificate]
|
||||
certFile = "foobar"
|
||||
keyFile = "foobar"
|
|
@ -1,235 +0,0 @@
|
|||
global:
|
||||
checkNewVersion: true
|
||||
sendAnonymousUsage: true
|
||||
serversTransport:
|
||||
insecureSkipVerify: true
|
||||
rootCAs:
|
||||
- foobar
|
||||
- foobar
|
||||
maxIdleConnsPerHost: 42
|
||||
forwardingTimeouts:
|
||||
dialTimeout: 42
|
||||
responseHeaderTimeout: 42
|
||||
idleConnTimeout: 42
|
||||
entryPoints:
|
||||
EntryPoint0:
|
||||
address: foobar
|
||||
transport:
|
||||
lifeCycle:
|
||||
requestAcceptGraceTimeout: 42
|
||||
graceTimeOut: 42
|
||||
respondingTimeouts:
|
||||
readTimeout: 42
|
||||
writeTimeout: 42
|
||||
idleTimeout: 42
|
||||
proxyProtocol:
|
||||
insecure: true
|
||||
trustedIPs:
|
||||
- foobar
|
||||
- foobar
|
||||
forwardedHeaders:
|
||||
insecure: true
|
||||
trustedIPs:
|
||||
- foobar
|
||||
- foobar
|
||||
providers:
|
||||
providersThrottleDuration: 42
|
||||
docker:
|
||||
constraints: foobar
|
||||
watch: true
|
||||
endpoint: foobar
|
||||
defaultRule: foobar
|
||||
tls:
|
||||
ca: foobar
|
||||
caOptional: true
|
||||
cert: foobar
|
||||
key: foobar
|
||||
insecureSkipVerify: true
|
||||
exposedByDefault: true
|
||||
useBindPortIP: true
|
||||
swarmMode: true
|
||||
network: foobar
|
||||
swarmModeRefreshSeconds: 42
|
||||
file:
|
||||
directory: foobar
|
||||
watch: true
|
||||
filename: foobar
|
||||
debugLogGeneratedTemplate: true
|
||||
marathon:
|
||||
constraints: foobar
|
||||
trace: true
|
||||
watch: true
|
||||
endpoint: foobar
|
||||
defaultRule: foobar
|
||||
exposedByDefault: true
|
||||
dcosToken: foobar
|
||||
tls:
|
||||
ca: foobar
|
||||
caOptional: true
|
||||
cert: foobar
|
||||
key: foobar
|
||||
insecureSkipVerify: true
|
||||
dialerTimeout: 42
|
||||
responseHeaderTimeout: 42
|
||||
tlsHandshakeTimeout: 42
|
||||
keepAlive: 42
|
||||
forceTaskHostname: true
|
||||
basic:
|
||||
httpBasicAuthUser: foobar
|
||||
httpBasicPassword: foobar
|
||||
respectReadinessChecks: true
|
||||
kubernetesIngress:
|
||||
endpoint: foobar
|
||||
token: foobar
|
||||
certAuthFilePath: foobar
|
||||
disablePassHostHeaders: true
|
||||
namespaces:
|
||||
- foobar
|
||||
- foobar
|
||||
labelSelector: foobar
|
||||
ingressClass: foobar
|
||||
ingressEndpoint:
|
||||
ip: foobar
|
||||
hostname: foobar
|
||||
publishedService: foobar
|
||||
kubernetesCRD:
|
||||
endpoint: foobar
|
||||
token: foobar
|
||||
certAuthFilePath: foobar
|
||||
disablePassHostHeaders: true
|
||||
namespaces:
|
||||
- foobar
|
||||
- foobar
|
||||
labelSelector: foobar
|
||||
ingressClass: foobar
|
||||
rest:
|
||||
entryPoint: foobar
|
||||
rancher:
|
||||
constraints: foobar
|
||||
watch: true
|
||||
defaultRule: foobar
|
||||
exposedByDefault: true
|
||||
enableServiceHealthFilter: true
|
||||
refreshSeconds: 42
|
||||
intervalPoll: true
|
||||
prefix: foobar
|
||||
api:
|
||||
entryPoint: foobar
|
||||
dashboard: true
|
||||
statistics:
|
||||
recentErrors: 42
|
||||
middlewares:
|
||||
- foobar
|
||||
- foobar
|
||||
metrics:
|
||||
prometheus:
|
||||
buckets:
|
||||
- 42
|
||||
- 42
|
||||
entryPoint: foobar
|
||||
middlewares:
|
||||
- foobar
|
||||
- foobar
|
||||
datadog:
|
||||
address: foobar
|
||||
pushInterval: 10s
|
||||
statsD:
|
||||
address: foobar
|
||||
pushInterval: 10s
|
||||
influxDB:
|
||||
address: foobar
|
||||
protocol: foobar
|
||||
pushInterval: 10s
|
||||
database: foobar
|
||||
retentionPolicy: foobar
|
||||
username: foobar
|
||||
password: foobar
|
||||
ping:
|
||||
entryPoint: foobar
|
||||
middlewares:
|
||||
- foobar
|
||||
- foobar
|
||||
log:
|
||||
level: foobar
|
||||
filePath: foobar
|
||||
format: foobar
|
||||
accessLog:
|
||||
filePath: foobar
|
||||
format: foobar
|
||||
filters:
|
||||
statusCodes:
|
||||
- foobar
|
||||
- foobar
|
||||
retryAttempts: true
|
||||
minDuration: 42
|
||||
fields:
|
||||
defaultMode: foobar
|
||||
names:
|
||||
name0: foobar
|
||||
name1: foobar
|
||||
headers:
|
||||
defaultMode: foobar
|
||||
names:
|
||||
name0: foobar
|
||||
name1: foobar
|
||||
bufferingSize: 42
|
||||
tracing:
|
||||
serviceName: foobar
|
||||
spanNameLimit: 42
|
||||
jaeger:
|
||||
samplingServerURL: foobar
|
||||
samplingType: foobar
|
||||
samplingParam: 42
|
||||
localAgentHostPort: foobar
|
||||
gen128Bit: true
|
||||
propagation: foobar
|
||||
traceContextHeaderName: foobar
|
||||
zipkin:
|
||||
httpEndpoint: foobar
|
||||
sameSpan: true
|
||||
id128Bit: true
|
||||
sampleRate: 42
|
||||
datadog:
|
||||
localAgentHostPort: foobar
|
||||
globalTag: foobar
|
||||
debug: true
|
||||
prioritySampling: true
|
||||
traceIDHeaderName: foobar
|
||||
parentIDHeaderName: foobar
|
||||
samplingPriorityHeaderName: foobar
|
||||
bagagePrefixHeaderName: foobar
|
||||
instana:
|
||||
localAgentHost: foobar
|
||||
localAgentPort: 42
|
||||
logLevel: foobar
|
||||
haystack:
|
||||
localAgentHost: foobar
|
||||
localAgentPort: 42
|
||||
globalTag: foobar
|
||||
traceIDHeaderName: foobar
|
||||
parentIDHeaderName: foobar
|
||||
spanIDHeaderName: foobar
|
||||
hostResolver:
|
||||
cnameFlattening: true
|
||||
resolvConfig: foobar
|
||||
resolvDepth: 42
|
||||
|
||||
certificatesResolvers:
|
||||
default:
|
||||
acme:
|
||||
email: foobar
|
||||
acmeLogging: true
|
||||
caServer: foobar
|
||||
storage: foobar
|
||||
entryPoint: foobar
|
||||
keyType: foobar
|
||||
dnsChallenge:
|
||||
provider: foobar
|
||||
delayBeforeCheck: 42
|
||||
resolvers:
|
||||
- foobar
|
||||
- foobar
|
||||
disablePropagationCheck: true
|
||||
httpChallenge:
|
||||
entryPoint: foobar
|
||||
tlsChallenge: {}
|
|
@ -1,34 +0,0 @@
|
|||
package file
|
||||
|
||||
type bar string
|
||||
|
||||
type Yo struct {
|
||||
Foo string
|
||||
Fii string
|
||||
Fuu string
|
||||
Yi *Yi `file:"allowEmpty"`
|
||||
}
|
||||
|
||||
func (y *Yo) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
||||
|
||||
type Yi struct {
|
||||
Foo string
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
|
||||
func (y *Yi) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
||||
|
||||
type Yu struct {
|
||||
Yi
|
||||
}
|
||||
|
||||
type Ye struct {
|
||||
*Yi
|
||||
}
|
|
@ -1,152 +0,0 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
func decodeRawToNode(data map[string]interface{}, rootName string, filters ...string) (*parser.Node, error) {
|
||||
root := &parser.Node{
|
||||
Name: rootName,
|
||||
}
|
||||
|
||||
vData := reflect.ValueOf(data)
|
||||
err := decodeRaw(root, vData, filters...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func decodeRaw(node *parser.Node, vData reflect.Value, filters ...string) error {
|
||||
sortedKeys := sortKeys(vData, filters)
|
||||
|
||||
for _, key := range sortedKeys {
|
||||
if vData.MapIndex(key).IsNil() {
|
||||
continue
|
||||
}
|
||||
|
||||
value := reflect.ValueOf(vData.MapIndex(key).Interface())
|
||||
|
||||
child := &parser.Node{Name: key.String()}
|
||||
|
||||
switch value.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
fallthrough
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
fallthrough
|
||||
case reflect.Float32, reflect.Float64:
|
||||
fallthrough
|
||||
case reflect.Bool:
|
||||
fallthrough
|
||||
case reflect.String:
|
||||
value, err := getSimpleValue(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
child.Value = value
|
||||
case reflect.Slice:
|
||||
var values []string
|
||||
|
||||
for i := 0; i < value.Len(); i++ {
|
||||
item := value.Index(i)
|
||||
switch item.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
fallthrough
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
fallthrough
|
||||
case reflect.Bool:
|
||||
fallthrough
|
||||
case reflect.String:
|
||||
fallthrough
|
||||
case reflect.Map:
|
||||
fallthrough
|
||||
case reflect.Interface:
|
||||
sValue := reflect.ValueOf(item.Interface())
|
||||
if sValue.Kind() == reflect.Map {
|
||||
ch := &parser.Node{
|
||||
Name: "[" + strconv.Itoa(i) + "]",
|
||||
}
|
||||
|
||||
child.Children = append(child.Children, ch)
|
||||
err := decodeRaw(ch, sValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
val, err := getSimpleValue(sValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
values = append(values, val)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("field %s uses unsupported slice type: %s", child.Name, item.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
child.Value = strings.Join(values, ",")
|
||||
case reflect.Map:
|
||||
err := decodeRaw(child, value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("field %s uses unsupported type: %s", child.Name, value.Kind().String())
|
||||
}
|
||||
|
||||
node.Children = append(node.Children, child)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSimpleValue(item reflect.Value) (string, error) {
|
||||
switch item.Kind() {
|
||||
case reflect.String:
|
||||
return item.String(), nil
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return strconv.FormatInt(item.Int(), 10), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return strconv.FormatUint(item.Uint(), 10), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return strings.TrimSuffix(strconv.FormatFloat(item.Float(), 'f', 6, 64), ".000000"), nil
|
||||
case reflect.Bool:
|
||||
return strconv.FormatBool(item.Bool()), nil
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported simple value type: %s", item.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
func sortKeys(vData reflect.Value, filters []string) []reflect.Value {
|
||||
var sortedKeys []reflect.Value
|
||||
|
||||
for _, v := range vData.MapKeys() {
|
||||
rValue := reflect.ValueOf(v.Interface())
|
||||
key := rValue.String()
|
||||
|
||||
if len(filters) == 0 {
|
||||
sortedKeys = append(sortedKeys, rValue)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, filter := range filters {
|
||||
if strings.EqualFold(key, filter) {
|
||||
sortedKeys = append(sortedKeys, rValue)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(sortedKeys, func(i, j int) bool {
|
||||
return sortedKeys[i].String() < sortedKeys[j].String()
|
||||
})
|
||||
|
||||
return sortedKeys
|
||||
}
|
|
@ -1,578 +0,0 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_decodeRawToNode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
data map[string]interface{}
|
||||
expected *parser.Node
|
||||
}{
|
||||
{
|
||||
desc: "empty",
|
||||
data: map[string]interface{}{},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string",
|
||||
data: map[string]interface{}{
|
||||
"foo": "bar",
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string named type",
|
||||
data: map[string]interface{}{
|
||||
"foo": bar("bar"),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool",
|
||||
data: map[string]interface{}{
|
||||
"foo": true,
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "true"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int",
|
||||
data: map[string]interface{}{
|
||||
"foo": 1,
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int8",
|
||||
data: map[string]interface{}{
|
||||
"foo": int8(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int16",
|
||||
data: map[string]interface{}{
|
||||
"foo": int16(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int32",
|
||||
data: map[string]interface{}{
|
||||
"foo": int32(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int64",
|
||||
data: map[string]interface{}{
|
||||
"foo": int64(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint8",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint8(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint16",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint16(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint32",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint32(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint64",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint64(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float32",
|
||||
data: map[string]interface{}{
|
||||
"foo": float32(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float64",
|
||||
data: map[string]interface{}{
|
||||
"foo": float64(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []string{"A", "B"},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "A,B"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int8 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int8{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int16 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int16{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int32 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int32{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int64 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int64{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []bool{true, false},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "true,false"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "interface (string) slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []interface{}{"A", "B"},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "A,B"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "interface (int) slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []interface{}{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 strings",
|
||||
data: map[string]interface{}{
|
||||
"foo": "bar",
|
||||
"fii": "bir",
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Value: "bir"},
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": "bur",
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "bur"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": 1,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": uint(1),
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": true,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "true"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string, level 3",
|
||||
data: map[string]interface{}{
|
||||
"foo": map[interface{}]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": "bur",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "bur"}}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int, level 3",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": 1,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint, level 3",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": uint(1),
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool, level 3",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": true,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "true"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct",
|
||||
data: map[string]interface{}{
|
||||
"foo": map[interface{}]interface{}{
|
||||
"field1": "C",
|
||||
"field2": "C",
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "C"},
|
||||
{Name: "field2", Value: "C"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice struct 1",
|
||||
data: map[string]interface{}{
|
||||
"foo": []map[string]interface{}{
|
||||
{"field1": "A", "field2": "A"},
|
||||
{"field1": "B", "field2": "B"},
|
||||
{"field2": "C", "field1": "C"},
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "A"},
|
||||
{Name: "field2", Value: "A"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "B"},
|
||||
{Name: "field2", Value: "B"},
|
||||
}},
|
||||
{Name: "[2]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "C"},
|
||||
{Name: "field2", Value: "C"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice struct 2",
|
||||
data: map[string]interface{}{
|
||||
"foo": []interface{}{
|
||||
map[interface{}]interface{}{
|
||||
"field2": "A",
|
||||
"field1": "A",
|
||||
},
|
||||
map[interface{}]interface{}{
|
||||
"field1": "B",
|
||||
"field2": "B",
|
||||
},
|
||||
map[interface{}]interface{}{
|
||||
"field1": "C",
|
||||
"field2": "C",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "A"},
|
||||
{Name: "field2", Value: "A"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "B"},
|
||||
{Name: "field2", Value: "B"},
|
||||
}},
|
||||
{Name: "[2]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "C"},
|
||||
{Name: "field2", Value: "C"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "nil value",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": nil,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
node, err := decodeRawToNode(test.data, parser.DefaultRootName)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, node)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_decodeRawToNode_errors(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
data map[string]interface{}
|
||||
}{
|
||||
{
|
||||
desc: "invalid type",
|
||||
data: map[string]interface{}{
|
||||
"foo": struct{}{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
_, err := decodeRawToNode(test.data, parser.DefaultRootName)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
// Package flag implements encoding and decoding between flag arguments and a typed Configuration.
|
||||
package flag
|
||||
|
||||
import (
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
// Decode decodes the given flag arguments into the given element.
|
||||
// The operation goes through four stages roughly summarized as:
|
||||
// flag arguments -> parsed map of flags
|
||||
// map -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element.
|
||||
func Decode(args []string, element interface{}) error {
|
||||
ref, err := Parse(args, element)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return parser.Decode(ref, element, parser.DefaultRootName)
|
||||
}
|
||||
|
||||
// Encode encodes the configuration in element into the flags represented in the returned Flats.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// typed configuration in element -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> flags with default values (determined by type/kind).
|
||||
func Encode(element interface{}) ([]parser.Flat, error) {
|
||||
if element == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
etnOpts := parser.EncoderToNodeOpts{OmitEmpty: false, TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := parser.EncodeToNode(element, parser.DefaultRootName, etnOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
err = parser.AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
flatOpts := parser.FlatOpts{Separator: ".", SkipRoot: true, TagName: parser.TagLabel}
|
||||
return parser.EncodeToFlat(element, node, flatOpts)
|
||||
}
|
|
@ -1,940 +0,0 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/generator"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "no args",
|
||||
args: nil,
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "types.Duration value",
|
||||
args: []string{"--foo=1"},
|
||||
element: &struct {
|
||||
Foo types.Duration
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo types.Duration
|
||||
}{
|
||||
Foo: types.Duration(1 * time.Second),
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time.Duration value",
|
||||
args: []string{"--foo=1"},
|
||||
element: &struct {
|
||||
Foo time.Duration
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo time.Duration
|
||||
}{
|
||||
Foo: 1 * time.Nanosecond,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
}{
|
||||
Foo: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
args: []string{"--foo=bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated",
|
||||
args: []string{"--foo", "bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated with end of parameter",
|
||||
args: []string{"--foo=bir", "--", "--bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple bool flags without value",
|
||||
args: []string{"--foo", "--bar"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{
|
||||
Foo: true,
|
||||
Bar: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags",
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{"bar", "baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
args: []string{"--foo.name=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]string
|
||||
}{
|
||||
Foo: map[string]string{
|
||||
"name": "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string case sensitive",
|
||||
args: []string{"--foo.caseSensitiveName=barBoo"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]string
|
||||
}{
|
||||
Foo: map[string]string{
|
||||
"caseSensitiveName": "barBoo",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
args: []string{"--foo.name.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{
|
||||
Foo: map[string]struct{ Value string }{
|
||||
"name": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-struct",
|
||||
args: []string{"--foo.name.bar.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}{
|
||||
"name": {
|
||||
Bar: &struct {
|
||||
Value string
|
||||
}{
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map",
|
||||
args: []string{"--foo.name1.bar.name2.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}{
|
||||
"name1": {
|
||||
Bar: map[string]struct{ Value string }{
|
||||
"name2": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 2",
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{"bar", "baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 3",
|
||||
args: []string{"--foo", "bar", "--foo=", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{
|
||||
Foo: []string{"bar", ""},
|
||||
Baz: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 4",
|
||||
args: []string{"--foo", "bar", "--foo", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{
|
||||
Foo: []string{"bar", "--baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct",
|
||||
args: []string{
|
||||
"--foo[0].Field1", "bar", "--foo[0].Field2", "6",
|
||||
"--foo[1].Field1", "bur", "--foo[1].Field2", "2",
|
||||
},
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{
|
||||
Foo: []struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}{
|
||||
{
|
||||
Field1: "bar",
|
||||
Field2: 6,
|
||||
},
|
||||
{
|
||||
Field1: "bur",
|
||||
Field2: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of pointer of struct",
|
||||
args: []string{
|
||||
"--foo[0].Field1", "bar", "--foo[0].Field2", "6",
|
||||
"--foo[1].Field1", "bur", "--foo[1].Field2", "2",
|
||||
},
|
||||
element: &struct {
|
||||
Foo []*struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []*struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{
|
||||
Foo: []*struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}{
|
||||
{
|
||||
Field1: "bar",
|
||||
Field2: 6,
|
||||
},
|
||||
{
|
||||
Field1: "bur",
|
||||
Field2: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag 2",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string without value",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{},
|
||||
args: []string{"--foo", "--bar"},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{
|
||||
Foo: "--bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct{ Field string }{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
err := Decode(test.args, test.element)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected []parser.Flat
|
||||
}{
|
||||
{
|
||||
desc: "string field",
|
||||
element: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "int field",
|
||||
element: &struct {
|
||||
Field int `description:"field description"`
|
||||
}{
|
||||
Field: 6,
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "6",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "bool field",
|
||||
element: &struct {
|
||||
Field bool `description:"field description"`
|
||||
}{
|
||||
Field: true,
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "true",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "string pointer field",
|
||||
element: &struct {
|
||||
Field *string `description:"field description"`
|
||||
}{
|
||||
Field: func(v string) *string { return &v }("test"),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "int pointer field",
|
||||
element: &struct {
|
||||
Field *int `description:"field description"`
|
||||
}{
|
||||
Field: func(v int) *int { return &v }(6),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "6",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "bool pointer field",
|
||||
element: &struct {
|
||||
Field *bool `description:"field description"`
|
||||
}{
|
||||
Field: func(v bool) *bool { return &v }(true),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "true",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of string field, no initial value",
|
||||
element: &struct {
|
||||
Field []string `description:"field description"`
|
||||
}{},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of string field, with initial value",
|
||||
element: &struct {
|
||||
Field []string `description:"field description"`
|
||||
}{
|
||||
Field: []string{"foo", "bar"},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "foo, bar",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of int field, no initial value",
|
||||
element: &struct {
|
||||
Field []int `description:"field description"`
|
||||
}{},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of int field, with initial value",
|
||||
element: &struct {
|
||||
Field []int `description:"field description"`
|
||||
}{
|
||||
Field: []int{6, 3},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "6, 3",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "map string field",
|
||||
element: &struct {
|
||||
Field map[string]string `description:"field description"`
|
||||
}{
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field.<name>",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field, allow empty",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"foo description" label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description"`
|
||||
}{
|
||||
Fii: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field level 2, allow empty",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description" label:"allowEmpty"`
|
||||
} `description:"foo description" label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description" label:"allowEmpty"`
|
||||
}{
|
||||
Fii: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii",
|
||||
Description: "fii description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii map[string]string `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii map[string]string `description:"fii description"`
|
||||
}{
|
||||
Fii: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii.<name>",
|
||||
Description: "fii description",
|
||||
Default: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string pointer field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii map[string]*string `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii map[string]*string `description:"fii description"`
|
||||
}{
|
||||
Fii: map[string]*string{
|
||||
parser.MapNamePlaceholder: func(v string) *string { return &v }(""),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii.<name>",
|
||||
Description: "fii description",
|
||||
Default: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct level 1",
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Field string `description:"field description"`
|
||||
Yo int `description:"yo description"`
|
||||
} `description:"foo description"`
|
||||
}{},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.yo",
|
||||
Description: "yo description",
|
||||
Default: "0",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct pointer level 1",
|
||||
element: &struct {
|
||||
Foo map[string]*struct {
|
||||
Field string `description:"field description"`
|
||||
Yo string `description:"yo description"`
|
||||
} `description:"foo description"`
|
||||
}{},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.yo",
|
||||
Description: "yo description",
|
||||
Default: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time duration field",
|
||||
element: &struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{
|
||||
Field: 1 * time.Second,
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "1s",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "time duration field map",
|
||||
element: &struct {
|
||||
Foo map[string]*struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: map[string]*struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.field",
|
||||
Description: "field description",
|
||||
Default: "0s",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time duration field map 2",
|
||||
element: &struct {
|
||||
Foo map[string]*struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: map[string]*struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}{},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.fii.field",
|
||||
Description: "field description",
|
||||
Default: "0s",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time duration field 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{
|
||||
Field: 1 * time.Second,
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "foo.field",
|
||||
Description: "field description",
|
||||
Default: "1s",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "time duration field 3",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}{
|
||||
Fii: &struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{
|
||||
Field: 1 * time.Second,
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "foo.fii.field",
|
||||
Description: "field description",
|
||||
Default: "1s",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "time duration field",
|
||||
element: &struct {
|
||||
Field types.Duration `description:"field description"`
|
||||
}{
|
||||
Field: types.Duration(180 * time.Second),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "180",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii []struct {
|
||||
Field1 string `description:"field1 description"`
|
||||
Field2 int `description:"field2 description"`
|
||||
} `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii",
|
||||
Description: "fii description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii[0].field1",
|
||||
Description: "field1 description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii[0].field2",
|
||||
Description: "field2 description",
|
||||
Default: "0",
|
||||
},
|
||||
},
|
||||
},
|
||||
// Skipped: because realistically not needed in Traefik for now.
|
||||
// {
|
||||
// desc: "map of map field level 2",
|
||||
// element: &struct {
|
||||
// Foo *struct {
|
||||
// Fii map[string]map[string]string `description:"fii description"`
|
||||
// } `description:"foo description"`
|
||||
// }{
|
||||
// Foo: &struct {
|
||||
// Fii map[string]map[string]string `description:"fii description"`
|
||||
// }{
|
||||
// Fii: map[string]map[string]string{
|
||||
// parser.MapNamePlaceholder: {
|
||||
// parser.MapNamePlaceholder: "test",
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// expected: `XXX`,
|
||||
// },
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
generator.Generate(test.element)
|
||||
|
||||
entries, err := Encode(test.element)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, entries)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,141 +0,0 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
// Parse parses the command-line flag arguments into a map,
|
||||
// using the type information in element to discriminate whether a flag is supposed to be a bool,
|
||||
// and other such ambiguities.
|
||||
func Parse(args []string, element interface{}) (map[string]string, error) {
|
||||
f := flagSet{
|
||||
flagTypes: getFlagTypes(element),
|
||||
args: args,
|
||||
values: make(map[string]string),
|
||||
keys: make(map[string]string),
|
||||
}
|
||||
|
||||
for {
|
||||
seen, err := f.parseOne()
|
||||
if seen {
|
||||
continue
|
||||
}
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return f.values, nil
|
||||
}
|
||||
|
||||
type flagSet struct {
|
||||
flagTypes map[string]reflect.Kind
|
||||
args []string
|
||||
values map[string]string
|
||||
keys map[string]string
|
||||
}
|
||||
|
||||
func (f *flagSet) parseOne() (bool, error) {
|
||||
if len(f.args) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
s := f.args[0]
|
||||
if len(s) < 2 || s[0] != '-' {
|
||||
return false, nil
|
||||
}
|
||||
numMinuses := 1
|
||||
if s[1] == '-' {
|
||||
numMinuses++
|
||||
if len(s) == 2 { // "--" terminates the flags
|
||||
f.args = f.args[1:]
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
name := s[numMinuses:]
|
||||
if len(name) == 0 || name[0] == '-' || name[0] == '=' {
|
||||
return false, fmt.Errorf("bad flag syntax: %s", s)
|
||||
}
|
||||
|
||||
// it's a flag. does it have an argument?
|
||||
f.args = f.args[1:]
|
||||
hasValue := false
|
||||
value := ""
|
||||
for i := 1; i < len(name); i++ { // equals cannot be first
|
||||
if name[i] == '=' {
|
||||
value = name[i+1:]
|
||||
hasValue = true
|
||||
name = name[0:i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if hasValue {
|
||||
f.setValue(name, value)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
flagType := f.getFlagType(name)
|
||||
if flagType == reflect.Bool || flagType == reflect.Ptr {
|
||||
f.setValue(name, "true")
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if len(f.args) > 0 {
|
||||
// value is the next arg
|
||||
hasValue = true
|
||||
value, f.args = f.args[0], f.args[1:]
|
||||
}
|
||||
|
||||
if !hasValue {
|
||||
return false, fmt.Errorf("flag needs an argument: -%s", name)
|
||||
}
|
||||
|
||||
f.setValue(name, value)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (f *flagSet) setValue(name, value string) {
|
||||
srcKey := parser.DefaultRootName + "." + name
|
||||
neutralKey := strings.ToLower(srcKey)
|
||||
|
||||
key, ok := f.keys[neutralKey]
|
||||
if !ok {
|
||||
f.keys[neutralKey] = srcKey
|
||||
key = srcKey
|
||||
}
|
||||
|
||||
v, ok := f.values[key]
|
||||
if ok && f.getFlagType(name) == reflect.Slice {
|
||||
f.values[key] = v + "," + value
|
||||
return
|
||||
}
|
||||
|
||||
f.values[key] = value
|
||||
}
|
||||
|
||||
func (f *flagSet) getFlagType(name string) reflect.Kind {
|
||||
neutral := strings.ToLower(name)
|
||||
|
||||
kind, ok := f.flagTypes[neutral]
|
||||
if ok {
|
||||
return kind
|
||||
}
|
||||
|
||||
for n, k := range f.flagTypes {
|
||||
if strings.Contains(n, parser.MapNamePlaceholder) {
|
||||
p := strings.NewReplacer(".", `\.`, parser.MapNamePlaceholder, `([^.]+)`).Replace(n)
|
||||
if regexp.MustCompile(p).MatchString(neutral) {
|
||||
return k
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return reflect.Invalid
|
||||
}
|
|
@ -1,353 +0,0 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
element interface{}
|
||||
expected map[string]string
|
||||
}{
|
||||
{
|
||||
desc: "no args",
|
||||
args: nil,
|
||||
expected: map[string]string{},
|
||||
},
|
||||
{
|
||||
desc: "bool value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool value capitalized",
|
||||
args: []string{"--Foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.Foo": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
args: []string{"--foo=bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
args: []string{"--Foo=Bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.Foo": "Bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated",
|
||||
args: []string{"--foo", "bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated capitalized",
|
||||
args: []string{"--Foo", "Bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.Foo": "Bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated with end of parameter",
|
||||
args: []string{"--foo=bir", "--", "--bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple bool flags without value",
|
||||
args: []string{"--foo", "--bar"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "true",
|
||||
"traefik.bar": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags",
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
args: []string{"--foo.name=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string capitalized",
|
||||
args: []string{"--foo.Name=Bar"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.Name": "Bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
args: []string{"--foo.name.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name.value": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-struct",
|
||||
args: []string{"--foo.name.bar.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name.bar.value": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map",
|
||||
args: []string{"--foo.name1.bar.name2.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name1.bar.name2.value": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 2",
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 3",
|
||||
args: []string{"--foo", "bar", "--foo=", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,",
|
||||
"traefik.baz": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 4",
|
||||
args: []string{"--foo", "bar", "--foo", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,--baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag 2",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string without value",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{},
|
||||
args: []string{"--foo", "--bar"},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "--bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo *struct{ Field string }
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string case sensitive",
|
||||
args: []string{"--foo.caseSensitiveName=barBoo"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.caseSensitiveName": "barBoo",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map case sensitive",
|
||||
args: []string{"--foo.Name1.bar.name2.value=firstValue", "--foo.naMe1.bar.name2.value=secondValue"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.Name1.bar.name2.value": "secondValue",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map and different case",
|
||||
args: []string{"--foo.Name1.bar.name2.value=firstValue", "--foo.naMe1.bar.name2.value=secondValue"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.Name1.bar.name2.value": "secondValue",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "pointer of struct and map without explicit value",
|
||||
args: []string{"--foo.default.bar.fuu"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct {
|
||||
Fuu *struct{ Value string }
|
||||
}
|
||||
}
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.default.bar.fuu": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 2 and different cases.",
|
||||
args: []string{"--foo", "bar", "--Foo", "baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,baz",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fl, err := Parse(test.args, test.element)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expected, fl)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse_Errors(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
element interface{}
|
||||
}{
|
||||
{
|
||||
desc: "triple hyphen",
|
||||
args: []string{"---foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
args: []string{"--=foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
},
|
||||
{
|
||||
desc: "string without value",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{},
|
||||
args: []string{"--foo"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
_, err := Parse(test.args, test.element)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
func getFlagTypes(element interface{}) map[string]reflect.Kind {
|
||||
ref := map[string]reflect.Kind{}
|
||||
|
||||
if element == nil {
|
||||
return ref
|
||||
}
|
||||
|
||||
tp := reflect.TypeOf(element).Elem()
|
||||
|
||||
addFlagType(ref, "", tp)
|
||||
|
||||
return ref
|
||||
}
|
||||
|
||||
func addFlagType(ref map[string]reflect.Kind, name string, typ reflect.Type) {
|
||||
switch typ.Kind() {
|
||||
case reflect.Bool, reflect.Slice:
|
||||
ref[name] = typ.Kind()
|
||||
|
||||
case reflect.Map:
|
||||
addFlagType(ref, getName(name, parser.MapNamePlaceholder), typ.Elem())
|
||||
|
||||
case reflect.Ptr:
|
||||
if typ.Elem().Kind() == reflect.Struct {
|
||||
ref[name] = typ.Kind()
|
||||
}
|
||||
addFlagType(ref, name, typ.Elem())
|
||||
|
||||
case reflect.Struct:
|
||||
for j := 0; j < typ.NumField(); j++ {
|
||||
subField := typ.Field(j)
|
||||
|
||||
if !parser.IsExported(subField) {
|
||||
continue
|
||||
}
|
||||
|
||||
if subField.Anonymous {
|
||||
addFlagType(ref, getName(name), subField.Type)
|
||||
} else {
|
||||
addFlagType(ref, getName(name, subField.Name), subField.Type)
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
// noop
|
||||
}
|
||||
}
|
||||
|
||||
func getName(names ...string) string {
|
||||
return strings.TrimPrefix(strings.ToLower(strings.Join(names, ".")), ".")
|
||||
}
|
|
@ -1,226 +0,0 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getFlagTypes(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected map[string]reflect.Kind
|
||||
}{
|
||||
{
|
||||
desc: "nil",
|
||||
element: nil,
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "no fields",
|
||||
element: &struct {
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "string field",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 0",
|
||||
element: &struct {
|
||||
Foo bool
|
||||
fii bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 1",
|
||||
element: &struct {
|
||||
Foo struct {
|
||||
Field bool
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo.field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field bool
|
||||
}
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Ptr,
|
||||
"foo.fii": reflect.Ptr,
|
||||
"foo.fii.field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "pointer field",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field string
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Ptr,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 3",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Fuu *struct {
|
||||
Field bool
|
||||
}
|
||||
}
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Ptr,
|
||||
"foo.fii": reflect.Ptr,
|
||||
"foo.fii.fuu": reflect.Ptr,
|
||||
"foo.fii.fuu.field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "map bool",
|
||||
element: &struct {
|
||||
Foo map[string]bool
|
||||
Fii struct{}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder: reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Field bool
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder + ".field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map map bool",
|
||||
element: &struct {
|
||||
Foo map[string]map[string]bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder + "." + parser.MapNamePlaceholder: reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct map",
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Fii map[string]bool
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder + ".fii." + parser.MapNamePlaceholder: reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "pointer bool field level 0",
|
||||
element: &struct {
|
||||
Foo *bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "pointer int field level 0",
|
||||
element: &struct {
|
||||
Foo *int
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "bool slice field level 0",
|
||||
element: &struct {
|
||||
Foo []bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string slice field level 0",
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice field level 1",
|
||||
element: &struct {
|
||||
Foo struct {
|
||||
Field []string
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo.field": reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map slice string",
|
||||
element: &struct {
|
||||
Foo map[string][]string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder: reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct",
|
||||
element: &struct {
|
||||
Yo
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Bool,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := getFlagTypes(test.element)
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type Yo struct {
|
||||
Foo bool
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
// Package generator implements the custom initialization of all the fields of an empty interface.
|
||||
package generator
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
type initializer interface {
|
||||
SetDefaults()
|
||||
}
|
||||
|
||||
// Generate recursively initializes an empty structure, calling SetDefaults on each field, when it applies.
|
||||
func Generate(element interface{}) {
|
||||
if element == nil {
|
||||
return
|
||||
}
|
||||
|
||||
generate(element)
|
||||
}
|
||||
|
||||
func generate(element interface{}) {
|
||||
field := reflect.ValueOf(element)
|
||||
|
||||
fill(field)
|
||||
}
|
||||
|
||||
func fill(field reflect.Value) {
|
||||
switch field.Kind() {
|
||||
case reflect.Ptr:
|
||||
setPtr(field)
|
||||
case reflect.Struct:
|
||||
setStruct(field)
|
||||
case reflect.Map:
|
||||
setMap(field)
|
||||
case reflect.Slice:
|
||||
if field.Type().Elem().Kind() == reflect.Struct ||
|
||||
field.Type().Elem().Kind() == reflect.Ptr && field.Type().Elem().Elem().Kind() == reflect.Struct {
|
||||
slice := reflect.MakeSlice(field.Type(), 1, 1)
|
||||
field.Set(slice)
|
||||
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
setPtr(value)
|
||||
|
||||
elem := value.Elem().Elem()
|
||||
field.Index(0).Set(elem)
|
||||
} else if field.Len() == 0 {
|
||||
slice := reflect.MakeSlice(field.Type(), 0, 0)
|
||||
field.Set(slice)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setPtr(field reflect.Value) {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.New(field.Type().Elem()))
|
||||
}
|
||||
|
||||
if field.Type().Implements(reflect.TypeOf((*initializer)(nil)).Elem()) {
|
||||
method := field.MethodByName("SetDefaults")
|
||||
if method.IsValid() {
|
||||
method.Call([]reflect.Value{})
|
||||
}
|
||||
}
|
||||
|
||||
fill(field.Elem())
|
||||
}
|
||||
|
||||
func setStruct(field reflect.Value) {
|
||||
for i := 0; i < field.NumField(); i++ {
|
||||
fd := field.Field(i)
|
||||
structField := field.Type().Field(i)
|
||||
|
||||
if structField.Tag.Get(parser.TagLabel) == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
if parser.IsExported(structField) {
|
||||
fill(fd)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setMap(field reflect.Value) {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.MakeMap(field.Type()))
|
||||
}
|
||||
|
||||
ptrValue := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
fill(ptrValue)
|
||||
|
||||
value := ptrValue.Elem().Elem()
|
||||
key := reflect.ValueOf(parser.MapNamePlaceholder)
|
||||
field.SetMapIndex(key, value)
|
||||
}
|
|
@ -1,439 +0,0 @@
|
|||
package generator
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGenerate(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "nil",
|
||||
},
|
||||
{
|
||||
desc: "simple",
|
||||
element: &Ya{},
|
||||
expected: &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 0,
|
||||
FieldIn4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
FieldIn5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
FieldIn6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: &struct{ Field string }{},
|
||||
FieldIn12: func(v string) *string { return &v }(""),
|
||||
FieldIn13: func(v bool) *bool { return &v }(false),
|
||||
FieldIn14: func(v int) *int { return &v }(0),
|
||||
},
|
||||
Field1: "",
|
||||
Field2: false,
|
||||
Field3: 0,
|
||||
Field4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Field5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
Field6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: &struct{ Field string }{},
|
||||
Field12: func(v string) *string { return &v }(""),
|
||||
Field13: func(v bool) *bool { return &v }(false),
|
||||
Field14: func(v int) *int { return &v }(0),
|
||||
Field15: []int{},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with initial state",
|
||||
element: &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "bar",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 1,
|
||||
FieldIn4: nil,
|
||||
FieldIn5: nil,
|
||||
FieldIn6: nil,
|
||||
FieldIn7: nil,
|
||||
FieldIn8: nil,
|
||||
FieldIn9: nil,
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: nil,
|
||||
FieldIn12: nil,
|
||||
FieldIn13: nil,
|
||||
FieldIn14: nil,
|
||||
},
|
||||
Field1: "bir",
|
||||
Field2: true,
|
||||
Field3: 0,
|
||||
Field4: nil,
|
||||
Field5: nil,
|
||||
Field6: nil,
|
||||
Field7: nil,
|
||||
Field8: nil,
|
||||
Field9: nil,
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: nil,
|
||||
Field12: nil,
|
||||
Field13: nil,
|
||||
Field14: nil,
|
||||
Field15: []int{7},
|
||||
},
|
||||
expected: &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "bar",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 1,
|
||||
FieldIn4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
FieldIn5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
FieldIn6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: &struct{ Field string }{},
|
||||
FieldIn12: func(v string) *string { return &v }(""),
|
||||
FieldIn13: func(v bool) *bool { return &v }(false),
|
||||
FieldIn14: func(v int) *int { return &v }(0),
|
||||
},
|
||||
Field1: "bir",
|
||||
Field2: true,
|
||||
Field3: 0,
|
||||
Field4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Field5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
Field6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: &struct{ Field string }{},
|
||||
Field12: func(v string) *string { return &v }(""),
|
||||
Field13: func(v bool) *bool { return &v }(false),
|
||||
Field14: func(v int) *int { return &v }(0),
|
||||
Field15: []int{7},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "setDefault",
|
||||
element: &Hu{},
|
||||
expected: &Hu{
|
||||
Foo: "hu",
|
||||
Fii: &Hi{
|
||||
Field: "hi",
|
||||
},
|
||||
Fuu: map[string]string{"<name>": ""},
|
||||
Fee: map[string]Hi{"<name>": {Field: "hi"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
Generate(test.element)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_generate(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "struct pointer",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fii *struct{ Field string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fii *struct{ Field string }
|
||||
}{
|
||||
Foo: "",
|
||||
Fii: &struct{ Field string }{
|
||||
Field: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string slice",
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int slice",
|
||||
element: &struct {
|
||||
Foo []int
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []int
|
||||
}{
|
||||
Foo: []int{},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct slice",
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Foo: []struct {
|
||||
Field string
|
||||
}{
|
||||
{Field: ""},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fii map[string]string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fii map[string]string
|
||||
}{
|
||||
Foo: "",
|
||||
Fii: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fii map[string]struct{ Field string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fii map[string]struct{ Field string }
|
||||
}{
|
||||
Foo: "",
|
||||
Fii: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct pointer level 2",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fuu *struct {
|
||||
Fii map[string]*struct{ Field string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fuu *struct {
|
||||
Fii map[string]*struct{ Field string }
|
||||
}
|
||||
}{
|
||||
Foo: "",
|
||||
Fuu: &struct {
|
||||
Fii map[string]*struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Fii: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "SetDefaults",
|
||||
element: &Hu{},
|
||||
expected: &Hu{
|
||||
Foo: "hu",
|
||||
Fii: &Hi{
|
||||
Field: "hi",
|
||||
},
|
||||
Fuu: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Fee: map[string]Hi{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: "hi",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
generate(test.element)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type Hu struct {
|
||||
Foo string
|
||||
Fii *Hi
|
||||
Fuu map[string]string
|
||||
Fee map[string]Hi
|
||||
}
|
||||
|
||||
func (h *Hu) SetDefaults() {
|
||||
h.Foo = "hu"
|
||||
}
|
||||
|
||||
type Hi struct {
|
||||
Field string
|
||||
}
|
||||
|
||||
func (h *Hi) SetDefaults() {
|
||||
h.Field = "hi"
|
||||
}
|
||||
|
||||
type Ya struct {
|
||||
Foo *Yaa
|
||||
Field1 string
|
||||
Field2 bool
|
||||
Field3 int
|
||||
Field4 map[string]string
|
||||
Field5 map[string]int
|
||||
Field6 map[string]struct{ Field string }
|
||||
Field7 map[string]struct{ Field map[string]string }
|
||||
Field8 map[string]*struct{ Field string }
|
||||
Field9 map[string]*struct{ Field map[string]string }
|
||||
Field10 struct{ Field string }
|
||||
Field11 *struct{ Field string }
|
||||
Field12 *string
|
||||
Field13 *bool
|
||||
Field14 *int
|
||||
Field15 []int
|
||||
}
|
||||
|
||||
type Yaa struct {
|
||||
FieldIn1 string
|
||||
FieldIn2 bool
|
||||
FieldIn3 int
|
||||
FieldIn4 map[string]string
|
||||
FieldIn5 map[string]int
|
||||
FieldIn6 map[string]struct{ Field string }
|
||||
FieldIn7 map[string]struct{ Field map[string]string }
|
||||
FieldIn8 map[string]*struct{ Field string }
|
||||
FieldIn9 map[string]*struct{ Field map[string]string }
|
||||
FieldIn10 struct{ Field string }
|
||||
FieldIn11 *struct{ Field string }
|
||||
FieldIn12 *string
|
||||
FieldIn13 *bool
|
||||
FieldIn14 *int
|
||||
}
|
|
@ -5,7 +5,7 @@ import (
|
|||
"reflect"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/traefik/paerser/parser"
|
||||
)
|
||||
|
||||
// Decode decodes the given KV pairs into the given element.
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/traefik/paerser/parser"
|
||||
)
|
||||
|
||||
// DecodeToNode converts the labels to a tree of nodes.
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/traefik/paerser/parser"
|
||||
)
|
||||
|
||||
func TestDecodeToNode(t *testing.T) {
|
||||
|
|
|
@ -3,7 +3,7 @@ package label
|
|||
|
||||
import (
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/traefik/paerser/parser"
|
||||
)
|
||||
|
||||
// DecodeConfiguration converts the labels to a configuration.
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestDecodeConfiguration(t *testing.T) {
|
||||
|
@ -376,7 +376,7 @@ func TestDecodeConfiguration(t *testing.T) {
|
|||
RateLimit: &dynamic.RateLimit{
|
||||
Average: 42,
|
||||
Burst: 42,
|
||||
Period: types.Duration(time.Second),
|
||||
Period: ptypes.Duration(time.Second),
|
||||
SourceCriterion: &dynamic.SourceCriterion{
|
||||
IPStrategy: &dynamic.IPStrategy{
|
||||
Depth: 42,
|
||||
|
@ -836,7 +836,7 @@ func TestEncodeConfiguration(t *testing.T) {
|
|||
RateLimit: &dynamic.RateLimit{
|
||||
Average: 42,
|
||||
Burst: 42,
|
||||
Period: types.Duration(time.Second),
|
||||
Period: ptypes.Duration(time.Second),
|
||||
SourceCriterion: &dynamic.SourceCriterion{
|
||||
IPStrategy: &dynamic.IPStrategy{
|
||||
Depth: 42,
|
||||
|
|
|
@ -1,372 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
)
|
||||
|
||||
type initializer interface {
|
||||
SetDefaults()
|
||||
}
|
||||
|
||||
// FillerOpts Options for the filler.
|
||||
type FillerOpts struct {
|
||||
AllowSliceAsStruct bool
|
||||
}
|
||||
|
||||
// Fill populates the fields of the element using the information in node.
|
||||
func Fill(element interface{}, node *Node, opts FillerOpts) error {
|
||||
return filler{FillerOpts: opts}.Fill(element, node)
|
||||
}
|
||||
|
||||
type filler struct {
|
||||
FillerOpts
|
||||
}
|
||||
|
||||
// Fill populates the fields of the element using the information in node.
|
||||
func (f filler) Fill(element interface{}, node *Node) error {
|
||||
if element == nil || node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if node.Kind == 0 {
|
||||
return fmt.Errorf("missing node type: %s", node.Name)
|
||||
}
|
||||
|
||||
root := reflect.ValueOf(element)
|
||||
if root.Kind() == reflect.Struct {
|
||||
return fmt.Errorf("struct are not supported, use pointer instead")
|
||||
}
|
||||
|
||||
return f.fill(root.Elem(), node)
|
||||
}
|
||||
|
||||
func (f filler) fill(field reflect.Value, node *Node) error {
|
||||
// related to allow-empty tag
|
||||
if node.Disabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch field.Kind() {
|
||||
case reflect.String:
|
||||
field.SetString(node.Value)
|
||||
return nil
|
||||
case reflect.Bool:
|
||||
val, err := strconv.ParseBool(node.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.SetBool(val)
|
||||
return nil
|
||||
case reflect.Int8:
|
||||
return setInt(field, node.Value, 8)
|
||||
case reflect.Int16:
|
||||
return setInt(field, node.Value, 16)
|
||||
case reflect.Int32:
|
||||
return setInt(field, node.Value, 32)
|
||||
case reflect.Int64, reflect.Int:
|
||||
return setInt(field, node.Value, 64)
|
||||
case reflect.Uint8:
|
||||
return setUint(field, node.Value, 8)
|
||||
case reflect.Uint16:
|
||||
return setUint(field, node.Value, 16)
|
||||
case reflect.Uint32:
|
||||
return setUint(field, node.Value, 32)
|
||||
case reflect.Uint64, reflect.Uint:
|
||||
return setUint(field, node.Value, 64)
|
||||
case reflect.Float32:
|
||||
return setFloat(field, node.Value, 32)
|
||||
case reflect.Float64:
|
||||
return setFloat(field, node.Value, 64)
|
||||
case reflect.Struct:
|
||||
return f.setStruct(field, node)
|
||||
case reflect.Ptr:
|
||||
return f.setPtr(field, node)
|
||||
case reflect.Map:
|
||||
return f.setMap(field, node)
|
||||
case reflect.Slice:
|
||||
return f.setSlice(field, node)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filler) setPtr(field reflect.Value, node *Node) error {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.New(field.Type().Elem()))
|
||||
|
||||
if field.Type().Implements(reflect.TypeOf((*initializer)(nil)).Elem()) {
|
||||
method := field.MethodByName("SetDefaults")
|
||||
if method.IsValid() {
|
||||
method.Call([]reflect.Value{})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return f.fill(field.Elem(), node)
|
||||
}
|
||||
|
||||
func (f filler) setStruct(field reflect.Value, node *Node) error {
|
||||
for _, child := range node.Children {
|
||||
fd := field.FieldByName(child.FieldName)
|
||||
|
||||
zeroValue := reflect.Value{}
|
||||
if fd == zeroValue {
|
||||
return fmt.Errorf("field not found, node: %s (%s)", child.Name, child.FieldName)
|
||||
}
|
||||
|
||||
err := f.fill(fd, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f filler) setSlice(field reflect.Value, node *Node) error {
|
||||
if field.Type().Elem().Kind() == reflect.Struct ||
|
||||
field.Type().Elem().Kind() == reflect.Ptr && field.Type().Elem().Elem().Kind() == reflect.Struct {
|
||||
return f.setSliceStruct(field, node)
|
||||
}
|
||||
|
||||
if len(node.Value) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
values := strings.Split(node.Value, ",")
|
||||
|
||||
slice := reflect.MakeSlice(field.Type(), len(values), len(values))
|
||||
field.Set(slice)
|
||||
|
||||
for i := 0; i < len(values); i++ {
|
||||
value := strings.TrimSpace(values[i])
|
||||
|
||||
switch field.Type().Elem().Kind() {
|
||||
case reflect.String:
|
||||
field.Index(i).SetString(value)
|
||||
case reflect.Int:
|
||||
val, err := strconv.ParseInt(value, 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.Index(i).SetInt(val)
|
||||
case reflect.Int8:
|
||||
err := setInt(field.Index(i), value, 8)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Int16:
|
||||
err := setInt(field.Index(i), value, 16)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Int32:
|
||||
err := setInt(field.Index(i), value, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Int64:
|
||||
err := setInt(field.Index(i), value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint:
|
||||
val, err := strconv.ParseUint(value, 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.Index(i).SetUint(val)
|
||||
case reflect.Uint8:
|
||||
err := setUint(field.Index(i), value, 8)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint16:
|
||||
err := setUint(field.Index(i), value, 16)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint32:
|
||||
err := setUint(field.Index(i), value, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint64:
|
||||
err := setUint(field.Index(i), value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Float32:
|
||||
err := setFloat(field.Index(i), value, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Float64:
|
||||
err := setFloat(field.Index(i), value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Bool:
|
||||
val, err := strconv.ParseBool(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.Index(i).SetBool(val)
|
||||
default:
|
||||
return fmt.Errorf("unsupported type: %s", field.Type().Elem())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f filler) setSliceStruct(field reflect.Value, node *Node) error {
|
||||
if f.AllowSliceAsStruct && node.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return f.setSliceAsStruct(field, node)
|
||||
}
|
||||
|
||||
field.Set(reflect.MakeSlice(field.Type(), len(node.Children), len(node.Children)))
|
||||
|
||||
for i, child := range node.Children {
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
err := f.setPtr(value, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Index(i).Set(value.Elem().Elem())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f filler) setSliceAsStruct(field reflect.Value, node *Node) error {
|
||||
if len(node.Children) == 0 {
|
||||
return fmt.Errorf("invalid slice: node %s", node.Name)
|
||||
}
|
||||
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
err := f.setPtr(value, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
elem := value.Elem().Elem()
|
||||
|
||||
field.Set(reflect.MakeSlice(field.Type(), 1, 1))
|
||||
field.Index(0).Set(elem)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f filler) setMap(field reflect.Value, node *Node) error {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.MakeMap(field.Type()))
|
||||
}
|
||||
|
||||
if field.Type().Elem().Kind() == reflect.Interface {
|
||||
fillRawValue(field, node, false)
|
||||
|
||||
for _, child := range node.Children {
|
||||
fillRawValue(field, child, true)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, child := range node.Children {
|
||||
ptrValue := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
|
||||
err := f.fill(ptrValue, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value := ptrValue.Elem().Elem()
|
||||
|
||||
key := reflect.ValueOf(child.Name)
|
||||
field.SetMapIndex(key, value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func setInt(field reflect.Value, value string, bitSize int) error {
|
||||
switch field.Type() {
|
||||
case reflect.TypeOf(types.Duration(0)):
|
||||
return setDuration(field, value, bitSize, time.Second)
|
||||
case reflect.TypeOf(time.Duration(0)):
|
||||
return setDuration(field, value, bitSize, time.Nanosecond)
|
||||
default:
|
||||
val, err := strconv.ParseInt(value, 10, bitSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(val).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func setDuration(field reflect.Value, value string, bitSize int, defaultUnit time.Duration) error {
|
||||
val, err := strconv.ParseInt(value, 10, bitSize)
|
||||
if err == nil {
|
||||
field.Set(reflect.ValueOf(time.Duration(val) * defaultUnit).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
duration, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(duration).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
func setUint(field reflect.Value, value string, bitSize int) error {
|
||||
val, err := strconv.ParseUint(value, 10, bitSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(val).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
func setFloat(field reflect.Value, value string, bitSize int) error {
|
||||
val, err := strconv.ParseFloat(value, bitSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(val).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
func fillRawValue(field reflect.Value, node *Node, subMap bool) {
|
||||
m, ok := node.RawValue.(map[string]interface{})
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
if _, self := m[node.Name]; self || !subMap {
|
||||
for k, v := range m {
|
||||
field.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(v))
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
p := map[string]interface{}{node.Name: m}
|
||||
node.RawValue = p
|
||||
|
||||
field.SetMapIndex(reflect.ValueOf(node.Name), reflect.ValueOf(p[node.Name]))
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,215 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// EncoderToNodeOpts Options for the encoderToNode.
|
||||
type EncoderToNodeOpts struct {
|
||||
OmitEmpty bool
|
||||
TagName string
|
||||
AllowSliceAsStruct bool
|
||||
}
|
||||
|
||||
// EncodeToNode converts an element to a node.
|
||||
// element -> nodes.
|
||||
func EncodeToNode(element interface{}, rootName string, opts EncoderToNodeOpts) (*Node, error) {
|
||||
rValue := reflect.ValueOf(element)
|
||||
node := &Node{Name: rootName}
|
||||
|
||||
encoder := encoderToNode{EncoderToNodeOpts: opts}
|
||||
|
||||
err := encoder.setNodeValue(node, rValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
type encoderToNode struct {
|
||||
EncoderToNodeOpts
|
||||
}
|
||||
|
||||
func (e encoderToNode) setNodeValue(node *Node, rValue reflect.Value) error {
|
||||
switch rValue.Kind() {
|
||||
case reflect.String:
|
||||
node.Value = rValue.String()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
node.Value = strconv.FormatInt(rValue.Int(), 10)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
node.Value = strconv.FormatUint(rValue.Uint(), 10)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
node.Value = strconv.FormatFloat(rValue.Float(), 'f', 6, 64)
|
||||
case reflect.Bool:
|
||||
node.Value = strconv.FormatBool(rValue.Bool())
|
||||
case reflect.Struct:
|
||||
return e.setStructValue(node, rValue)
|
||||
case reflect.Ptr:
|
||||
return e.setNodeValue(node, rValue.Elem())
|
||||
case reflect.Map:
|
||||
return e.setMapValue(node, rValue)
|
||||
case reflect.Slice:
|
||||
return e.setSliceValue(node, rValue)
|
||||
default:
|
||||
// noop
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) setStructValue(node *Node, rValue reflect.Value) error {
|
||||
rType := rValue.Type()
|
||||
|
||||
for i := 0; i < rValue.NumField(); i++ {
|
||||
field := rType.Field(i)
|
||||
fieldValue := rValue.Field(i)
|
||||
|
||||
if !IsExported(field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Tag.Get(e.TagName) == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := isSupportedType(field); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if e.isSkippedField(field, fieldValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
nodeName := field.Name
|
||||
if e.AllowSliceAsStruct && field.Type.Kind() == reflect.Slice && len(field.Tag.Get(TagLabelSliceAsStruct)) != 0 {
|
||||
nodeName = field.Tag.Get(TagLabelSliceAsStruct)
|
||||
}
|
||||
|
||||
if field.Anonymous {
|
||||
if err := e.setNodeValue(node, fieldValue); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
child := &Node{Name: nodeName, FieldName: field.Name, Description: field.Tag.Get(TagDescription)}
|
||||
|
||||
if err := e.setNodeValue(child, fieldValue); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if field.Type.Kind() == reflect.Ptr {
|
||||
if field.Type.Elem().Kind() != reflect.Struct && fieldValue.IsNil() {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Type.Elem().Kind() == reflect.Struct && len(child.Children) == 0 {
|
||||
if field.Tag.Get(e.TagName) != TagLabelAllowEmpty {
|
||||
continue
|
||||
}
|
||||
|
||||
child.Value = "true"
|
||||
}
|
||||
}
|
||||
|
||||
node.Children = append(node.Children, child)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) setMapValue(node *Node, rValue reflect.Value) error {
|
||||
if rValue.Type().Elem().Kind() == reflect.Interface {
|
||||
node.RawValue = rValue.Interface()
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, key := range rValue.MapKeys() {
|
||||
child := &Node{Name: key.String(), FieldName: key.String()}
|
||||
node.Children = append(node.Children, child)
|
||||
|
||||
if err := e.setNodeValue(child, rValue.MapIndex(key)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) setSliceValue(node *Node, rValue reflect.Value) error {
|
||||
// label-slice-as-struct
|
||||
if rValue.Type().Elem().Kind() == reflect.Struct && !strings.EqualFold(node.Name, node.FieldName) {
|
||||
if rValue.Len() > 1 {
|
||||
return fmt.Errorf("node %s has too many slice entries: %d", node.Name, rValue.Len())
|
||||
}
|
||||
|
||||
return e.setNodeValue(node, rValue.Index(0))
|
||||
}
|
||||
|
||||
if rValue.Type().Elem().Kind() == reflect.Struct ||
|
||||
rValue.Type().Elem().Kind() == reflect.Ptr && rValue.Type().Elem().Elem().Kind() == reflect.Struct {
|
||||
for i := 0; i < rValue.Len(); i++ {
|
||||
child := &Node{Name: "[" + strconv.Itoa(i) + "]"}
|
||||
|
||||
eValue := rValue.Index(i)
|
||||
|
||||
err := e.setNodeValue(child, eValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
node.Children = append(node.Children, child)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var values []string
|
||||
|
||||
for i := 0; i < rValue.Len(); i++ {
|
||||
eValue := rValue.Index(i)
|
||||
|
||||
switch eValue.Kind() {
|
||||
case reflect.String:
|
||||
values = append(values, eValue.String())
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
values = append(values, strconv.FormatInt(eValue.Int(), 10))
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
values = append(values, strconv.FormatUint(eValue.Uint(), 10))
|
||||
case reflect.Float32, reflect.Float64:
|
||||
values = append(values, strconv.FormatFloat(eValue.Float(), 'f', 6, 64))
|
||||
case reflect.Bool:
|
||||
values = append(values, strconv.FormatBool(eValue.Bool()))
|
||||
default:
|
||||
// noop
|
||||
}
|
||||
}
|
||||
|
||||
node.Value = strings.Join(values, ", ")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) isSkippedField(field reflect.StructField, fieldValue reflect.Value) bool {
|
||||
if e.OmitEmpty && field.Type.Kind() == reflect.String && fieldValue.Len() == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct && fieldValue.IsNil() {
|
||||
return true
|
||||
}
|
||||
|
||||
if e.OmitEmpty && (field.Type.Kind() == reflect.Slice) &&
|
||||
(fieldValue.IsNil() || fieldValue.Len() == 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (field.Type.Kind() == reflect.Map) &&
|
||||
(fieldValue.IsNil() || fieldValue.Len() == 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
|
@ -1,813 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestEncodeToNode(t *testing.T) {
|
||||
type expected struct {
|
||||
node *Node
|
||||
error bool
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected expected
|
||||
}{
|
||||
{
|
||||
desc: "Description",
|
||||
element: struct {
|
||||
Foo string `description:"text"`
|
||||
}{Foo: "bar"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar", Description: "text"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string",
|
||||
element: struct {
|
||||
Foo string
|
||||
}{Foo: "bar"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 string fields",
|
||||
element: struct {
|
||||
Foo string
|
||||
Fii string
|
||||
}{Foo: "bar", Fii: "hii"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int",
|
||||
element: struct {
|
||||
Foo int
|
||||
}{Foo: 1},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int8",
|
||||
element: struct {
|
||||
Foo int8
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int16",
|
||||
element: struct {
|
||||
Foo int16
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int32",
|
||||
element: struct {
|
||||
Foo int32
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int64",
|
||||
element: struct {
|
||||
Foo int64
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint",
|
||||
element: struct {
|
||||
Foo uint
|
||||
}{Foo: 1},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint8",
|
||||
element: struct {
|
||||
Foo uint8
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint16",
|
||||
element: struct {
|
||||
Foo uint16
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint32",
|
||||
element: struct {
|
||||
Foo uint32
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint64",
|
||||
element: struct {
|
||||
Foo uint64
|
||||
}{Foo: 2},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float32",
|
||||
element: struct {
|
||||
Foo float32
|
||||
}{Foo: 1.12},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float64",
|
||||
element: struct {
|
||||
Foo float64
|
||||
}{Foo: 1.12},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool",
|
||||
element: struct {
|
||||
Foo bool
|
||||
}{Foo: true},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: "hii",
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct unexported field",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Fii string
|
||||
fuu string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Fii string
|
||||
fuu string
|
||||
}{
|
||||
Fii: "hii",
|
||||
fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: "hii",
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: func(v string) *string { return &v }("hii"),
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string nil pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: nil,
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *int
|
||||
Fuu int
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *int
|
||||
Fuu int
|
||||
}{
|
||||
Fii: func(v int) *int { return &v }(6),
|
||||
Fuu: 4,
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "6"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "4"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *bool
|
||||
Fuu bool
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *bool
|
||||
Fuu bool
|
||||
}{
|
||||
Fii: func(v bool) *bool { return &v }(true),
|
||||
Fuu: true,
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "true"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "true"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct nil struct pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: nil,
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer, not allowEmpty",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer, allowEmpty",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
} `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Bar map[string]string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Bar map[string]string
|
||||
}{
|
||||
Bar: map[string]string{
|
||||
"name1": "huu",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Children: []*Node{
|
||||
{Name: "name1", FieldName: "name1", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
}}},
|
||||
},
|
||||
{
|
||||
desc: "empty map",
|
||||
element: struct {
|
||||
Bar map[string]string
|
||||
}{
|
||||
Bar: map[string]string{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "map nil",
|
||||
element: struct {
|
||||
Bar map[string]string
|
||||
}{
|
||||
Bar: nil,
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "map with non string key",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Bar map[int]string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Bar map[int]string
|
||||
}{
|
||||
Bar: map[int]string{
|
||||
1: "huu",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "slice of string",
|
||||
element: struct{ Bar []string }{Bar: []string{"huu", "hii"}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "huu, hii"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int",
|
||||
element: struct{ Bar []int }{Bar: []int{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int8",
|
||||
element: struct{ Bar []int8 }{Bar: []int8{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int16",
|
||||
element: struct{ Bar []int16 }{Bar: []int16{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int32",
|
||||
element: struct{ Bar []int32 }{Bar: []int32{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int64",
|
||||
element: struct{ Bar []int64 }{Bar: []int64{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint",
|
||||
element: struct{ Bar []uint }{Bar: []uint{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint8",
|
||||
element: struct{ Bar []uint8 }{Bar: []uint8{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint16",
|
||||
element: struct{ Bar []uint16 }{Bar: []uint16{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint32",
|
||||
element: struct{ Bar []uint32 }{Bar: []uint32{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint64",
|
||||
element: struct{ Bar []uint64 }{Bar: []uint64{4, 2, 3}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of float32",
|
||||
element: struct{ Bar []float32 }{Bar: []float32{4.1, 2, 3.2}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of float64",
|
||||
element: struct{ Bar []float64 }{Bar: []float64{4.1, 2, 3.2}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of bool",
|
||||
element: struct{ Bar []bool }{Bar: []bool{true, false, true}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "true, false, true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice label-slice-as-struct",
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
} `label-slice-as-struct:"Fii"`
|
||||
}{
|
||||
Foo: []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
}{
|
||||
{
|
||||
Bar: "haa",
|
||||
Bir: "hii",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{{
|
||||
Name: "Fii",
|
||||
FieldName: "Foo",
|
||||
Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "haa"},
|
||||
{Name: "Bir", FieldName: "Bir", Value: "hii"},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice label-slice-as-struct several slice entries",
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
} `label-slice-as-struct:"Fii"`
|
||||
}{
|
||||
Foo: []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
}{
|
||||
{
|
||||
Bar: "haa",
|
||||
Bir: "hii",
|
||||
},
|
||||
{
|
||||
Bar: "haa",
|
||||
Bir: "hii",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct",
|
||||
element: struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Foo: []struct {
|
||||
Field string
|
||||
}{
|
||||
{
|
||||
Field: "bar",
|
||||
},
|
||||
{
|
||||
Field: "bir",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bir"},
|
||||
}},
|
||||
}},
|
||||
}}},
|
||||
},
|
||||
{
|
||||
desc: "slice of pointer of struct",
|
||||
element: struct {
|
||||
Foo []*struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Foo: []*struct {
|
||||
Field string
|
||||
}{
|
||||
{Field: "bar"},
|
||||
{Field: "bir"},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bir"},
|
||||
}},
|
||||
}},
|
||||
}}},
|
||||
},
|
||||
{
|
||||
desc: "empty slice",
|
||||
element: struct {
|
||||
Bar []string
|
||||
}{
|
||||
Bar: []string{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "nil slice",
|
||||
element: struct {
|
||||
Bar []string
|
||||
}{
|
||||
Bar: nil,
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "ignore slice",
|
||||
element: struct {
|
||||
Bar []string `label:"-"`
|
||||
}{
|
||||
Bar: []string{"huu", "hii"},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "embedded",
|
||||
element: struct {
|
||||
Foo struct{ FiiFoo }
|
||||
}{
|
||||
Foo: struct{ FiiFoo }{
|
||||
FiiFoo: FiiFoo{
|
||||
Fii: "hii",
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "raw value",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Bar map[string]interface{}
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Bar map[string]interface{}
|
||||
}{
|
||||
Bar: map[string]interface{}{
|
||||
"AAA": "valueA",
|
||||
"BBB": map[string]interface{}{
|
||||
"CCC": map[string]interface{}{
|
||||
"DDD": "valueD",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", RawValue: map[string]interface{}{
|
||||
"AAA": "valueA",
|
||||
"BBB": map[string]interface{}{
|
||||
"CCC": map[string]interface{}{
|
||||
"DDD": "valueD",
|
||||
},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
etnOpts := EncoderToNodeOpts{OmitEmpty: true, TagName: TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := EncodeToNode(test.element, DefaultRootName, etnOpts)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected.node, node)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,167 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
)
|
||||
|
||||
const defaultPtrValue = "false"
|
||||
|
||||
// FlatOpts holds options used when encoding to Flat.
|
||||
type FlatOpts struct {
|
||||
Case string // "lower" or "upper", defaults to "lower".
|
||||
Separator string
|
||||
SkipRoot bool
|
||||
TagName string
|
||||
}
|
||||
|
||||
// Flat is a configuration item representation.
|
||||
type Flat struct {
|
||||
Name string
|
||||
Description string
|
||||
Default string
|
||||
}
|
||||
|
||||
// EncodeToFlat encodes a node to a Flat representation.
|
||||
// Even though the given node argument should have already been augmented with metadata such as kind,
|
||||
// the element (and its type information) is still needed to treat remaining edge cases.
|
||||
func EncodeToFlat(element interface{}, node *Node, opts FlatOpts) ([]Flat, error) {
|
||||
if element == nil || node == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if node.Kind == 0 {
|
||||
return nil, fmt.Errorf("missing node type: %s", node.Name)
|
||||
}
|
||||
|
||||
elem := reflect.ValueOf(element)
|
||||
if elem.Kind() == reflect.Struct {
|
||||
return nil, fmt.Errorf("structs are not supported, use pointer instead")
|
||||
}
|
||||
|
||||
encoder := encoderToFlat{FlatOpts: opts}
|
||||
|
||||
var entries []Flat
|
||||
if encoder.SkipRoot {
|
||||
for _, child := range node.Children {
|
||||
field := encoder.getField(elem.Elem(), child)
|
||||
entries = append(entries, encoder.createFlat(field, child.Name, child)...)
|
||||
}
|
||||
} else {
|
||||
entries = encoder.createFlat(elem, strings.ToLower(node.Name), node)
|
||||
}
|
||||
|
||||
sort.Slice(entries, func(i, j int) bool { return entries[i].Name < entries[j].Name })
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
type encoderToFlat struct {
|
||||
FlatOpts
|
||||
}
|
||||
|
||||
func (e encoderToFlat) createFlat(field reflect.Value, name string, node *Node) []Flat {
|
||||
var entries []Flat
|
||||
if node.Kind != reflect.Map && node.Description != "-" {
|
||||
if !(node.Kind == reflect.Ptr && len(node.Children) > 0) ||
|
||||
(node.Kind == reflect.Ptr && node.Tag.Get(e.TagName) == TagLabelAllowEmpty) {
|
||||
if node.Name[0] != '[' {
|
||||
entries = append(entries, Flat{
|
||||
Name: e.getName(name),
|
||||
Description: node.Description,
|
||||
Default: e.getNodeValue(e.getField(field, node), node),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, child := range node.Children {
|
||||
if node.Kind == reflect.Map {
|
||||
fChild := e.getField(field, child)
|
||||
|
||||
var v string
|
||||
if child.Kind == reflect.Struct {
|
||||
v = defaultPtrValue
|
||||
} else {
|
||||
v = e.getNodeValue(fChild, child)
|
||||
}
|
||||
|
||||
if node.Description != "-" {
|
||||
entries = append(entries, Flat{
|
||||
Name: e.getName(name, child.Name),
|
||||
Description: node.Description,
|
||||
Default: v,
|
||||
})
|
||||
}
|
||||
|
||||
if child.Kind == reflect.Struct || child.Kind == reflect.Ptr {
|
||||
for _, ch := range child.Children {
|
||||
f := e.getField(fChild, ch)
|
||||
n := e.getName(name, child.Name, ch.Name)
|
||||
entries = append(entries, e.createFlat(f, n, ch)...)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
f := e.getField(field, child)
|
||||
n := e.getName(name, child.Name)
|
||||
entries = append(entries, e.createFlat(f, n, child)...)
|
||||
}
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
func (e encoderToFlat) getField(field reflect.Value, node *Node) reflect.Value {
|
||||
switch field.Kind() {
|
||||
case reflect.Struct:
|
||||
return field.FieldByName(node.FieldName)
|
||||
case reflect.Ptr:
|
||||
if field.Elem().Kind() == reflect.Struct {
|
||||
return field.Elem().FieldByName(node.FieldName)
|
||||
}
|
||||
return field.Elem()
|
||||
case reflect.Map:
|
||||
return field.MapIndex(reflect.ValueOf(node.FieldName))
|
||||
default:
|
||||
return field
|
||||
}
|
||||
}
|
||||
|
||||
func (e encoderToFlat) getNodeValue(field reflect.Value, node *Node) string {
|
||||
if node.Kind == reflect.Ptr && len(node.Children) > 0 {
|
||||
return defaultPtrValue
|
||||
}
|
||||
|
||||
if field.Kind() == reflect.Int64 {
|
||||
i, _ := strconv.ParseInt(node.Value, 10, 64)
|
||||
|
||||
switch field.Type() {
|
||||
case reflect.TypeOf(types.Duration(time.Second)):
|
||||
return strconv.Itoa(int(i) / int(time.Second))
|
||||
case reflect.TypeOf(time.Second):
|
||||
return time.Duration(i).String()
|
||||
}
|
||||
}
|
||||
|
||||
return node.Value
|
||||
}
|
||||
|
||||
func (e encoderToFlat) getName(names ...string) string {
|
||||
var name string
|
||||
if names[len(names)-1][0] == '[' {
|
||||
name = strings.Join(names, "")
|
||||
} else {
|
||||
name = strings.Join(names, e.Separator)
|
||||
}
|
||||
|
||||
if strings.EqualFold(e.Case, "upper") {
|
||||
return strings.ToUpper(name)
|
||||
}
|
||||
return strings.ToLower(name)
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,97 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// DecodeToNode converts the labels to a tree of nodes.
|
||||
// If any filters are present, labels which do not match the filters are skipped.
|
||||
func DecodeToNode(labels map[string]string, rootName string, filters ...string) (*Node, error) {
|
||||
sortedKeys := sortKeys(labels, filters)
|
||||
|
||||
var node *Node
|
||||
for i, key := range sortedKeys {
|
||||
split := strings.Split(key, ".")
|
||||
|
||||
if split[0] != rootName {
|
||||
return nil, fmt.Errorf("invalid label root %s", split[0])
|
||||
}
|
||||
|
||||
var parts []string
|
||||
for _, v := range split {
|
||||
if v == "" {
|
||||
return nil, fmt.Errorf("invalid element: %s", key)
|
||||
}
|
||||
|
||||
if v[0] == '[' {
|
||||
return nil, fmt.Errorf("invalid leading character '[' in field name (bracket is a slice delimiter): %s", v)
|
||||
}
|
||||
|
||||
if strings.HasSuffix(v, "]") && v[0] != '[' {
|
||||
indexLeft := strings.Index(v, "[")
|
||||
parts = append(parts, v[:indexLeft], v[indexLeft:])
|
||||
} else {
|
||||
parts = append(parts, v)
|
||||
}
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
node = &Node{}
|
||||
}
|
||||
decodeToNode(node, parts, labels[key])
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func decodeToNode(root *Node, path []string, value string) {
|
||||
if len(root.Name) == 0 {
|
||||
root.Name = path[0]
|
||||
}
|
||||
|
||||
// it's a leaf or not -> children
|
||||
if len(path) > 1 {
|
||||
if n := containsNode(root.Children, path[1]); n != nil {
|
||||
// the child already exists
|
||||
decodeToNode(n, path[1:], value)
|
||||
} else {
|
||||
// new child
|
||||
child := &Node{Name: path[1]}
|
||||
decodeToNode(child, path[1:], value)
|
||||
root.Children = append(root.Children, child)
|
||||
}
|
||||
} else {
|
||||
root.Value = value
|
||||
}
|
||||
}
|
||||
|
||||
func containsNode(nodes []*Node, name string) *Node {
|
||||
for _, n := range nodes {
|
||||
if strings.EqualFold(name, n.Name) {
|
||||
return n
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sortKeys(labels map[string]string, filters []string) []string {
|
||||
var sortedKeys []string
|
||||
for key := range labels {
|
||||
if len(filters) == 0 {
|
||||
sortedKeys = append(sortedKeys, key)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, filter := range filters {
|
||||
if len(key) >= len(filter) && strings.EqualFold(key[:len(filter)], filter) {
|
||||
sortedKeys = append(sortedKeys, key)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
sort.Strings(sortedKeys)
|
||||
|
||||
return sortedKeys
|
||||
}
|
|
@ -1,261 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecodeToNode(t *testing.T) {
|
||||
type expected struct {
|
||||
error bool
|
||||
node *Node
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
in map[string]string
|
||||
filters []string
|
||||
expected expected
|
||||
}{
|
||||
{
|
||||
desc: "no label",
|
||||
in: map[string]string{},
|
||||
expected: expected{node: nil},
|
||||
},
|
||||
{
|
||||
desc: "invalid label, ending by a dot",
|
||||
in: map[string]string{
|
||||
"traefik.http.": "bar",
|
||||
},
|
||||
expected: expected{
|
||||
error: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "level 1",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "level 1 empty value",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Value: ""},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "level 2",
|
||||
in: map[string]string{
|
||||
"traefik.foo.bar": "bar",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{{
|
||||
Name: "foo",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 0",
|
||||
in: map[string]string{
|
||||
"traefik": "bar",
|
||||
"traefic": "bur",
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "several entries, prefix filter",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
"traefik.fii": "bir",
|
||||
},
|
||||
filters: []string{"traefik.Foo"},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 1",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
"traefik.fii": "bur",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "fii", Value: "bur"},
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2",
|
||||
in: map[string]string{
|
||||
"traefik.foo.aaa": "bar",
|
||||
"traefik.foo.bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2, case insensitive",
|
||||
in: map[string]string{
|
||||
"traefik.foo.aaa": "bar",
|
||||
"traefik.Foo.bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "Foo", Children: []*Node{
|
||||
{Name: "bbb", Value: "bur"},
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2, 3 children",
|
||||
in: map[string]string{
|
||||
"traefik.foo.aaa": "bar",
|
||||
"traefik.foo.bbb": "bur",
|
||||
"traefik.foo.ccc": "bir",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
{Name: "ccc", Value: "bir"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 3",
|
||||
in: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 3, 2 children level 1",
|
||||
in: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
"traefik.bar.foo.bbb": "bir",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bbb", Value: "bir"},
|
||||
}},
|
||||
}},
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, slice syntax",
|
||||
in: map[string]string{
|
||||
"traefik.foo[0].aaa": "bar0",
|
||||
"traefik.foo[0].bbb": "bur0",
|
||||
"traefik.foo[1].aaa": "bar1",
|
||||
"traefik.foo[1].bbb": "bur1",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar0"},
|
||||
{Name: "bbb", Value: "bur0"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar1"},
|
||||
{Name: "bbb", Value: "bur1"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, invalid slice syntax",
|
||||
in: map[string]string{
|
||||
"traefik.foo.[0].aaa": "bar0",
|
||||
"traefik.foo.[0].bbb": "bur0",
|
||||
"traefik.foo.[1].aaa": "bar1",
|
||||
"traefik.foo.[1].bbb": "bur1",
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
out, err := DecodeToNode(test.in, DefaultRootName, test.filters...)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
|
||||
if !assert.Equal(t, test.expected.node, out) {
|
||||
bytes, err := json.MarshalIndent(out, "", " ")
|
||||
require.NoError(t, err)
|
||||
fmt.Println(string(bytes))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// EncodeNode Converts a node to labels.
|
||||
// nodes -> labels.
|
||||
func EncodeNode(node *Node) map[string]string {
|
||||
labels := make(map[string]string)
|
||||
encodeNode(labels, node.Name, node)
|
||||
return labels
|
||||
}
|
||||
|
||||
func encodeNode(labels map[string]string, root string, node *Node) {
|
||||
for _, child := range node.Children {
|
||||
if child.Disabled {
|
||||
continue
|
||||
}
|
||||
|
||||
var sep string
|
||||
if child.Name[0] != '[' {
|
||||
sep = "."
|
||||
}
|
||||
|
||||
childName := root + sep + child.Name
|
||||
|
||||
if child.RawValue != nil {
|
||||
encodeRawValue(labels, childName, child.RawValue)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(child.Children) > 0 {
|
||||
encodeNode(labels, childName, child)
|
||||
} else if len(child.Name) > 0 {
|
||||
labels[childName] = child.Value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func encodeRawValue(labels map[string]string, root string, rawValue interface{}) {
|
||||
if rawValue == nil {
|
||||
return
|
||||
}
|
||||
|
||||
tValue := reflect.TypeOf(rawValue)
|
||||
|
||||
if tValue.Kind() == reflect.Map && tValue.Elem().Kind() == reflect.Interface {
|
||||
r := reflect.ValueOf(rawValue).
|
||||
Convert(reflect.TypeOf((map[string]interface{})(nil))).
|
||||
Interface().(map[string]interface{})
|
||||
|
||||
for k, v := range r {
|
||||
switch tv := v.(type) {
|
||||
case string:
|
||||
labels[root+"."+k] = tv
|
||||
case []interface{}:
|
||||
for i, e := range tv {
|
||||
encodeRawValue(labels, fmt.Sprintf("%s.%s[%d]", root, k, i), e)
|
||||
}
|
||||
default:
|
||||
encodeRawValue(labels, root+"."+k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,234 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestEncodeNode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
node *Node
|
||||
expected map[string]string
|
||||
}{
|
||||
{
|
||||
desc: "1 label",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 labels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa": "bar",
|
||||
"traefik.bbb": "bur",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 labels, 1 disabled",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur", Disabled: true},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 levels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "3 levels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 levels, same root",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "several levels, different root",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "ccc", Value: "bir"},
|
||||
}},
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.bar.ccc": "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple labels, multiple levels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "ccc", Value: "bir"},
|
||||
}},
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
"traefik.bar.ccc": "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct syntax",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar0"},
|
||||
{Name: "bbb", Value: "bur0"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar1"},
|
||||
{Name: "bbb", Value: "bur1"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo[0].aaa": "bar0",
|
||||
"traefik.foo[0].bbb": "bur0",
|
||||
"traefik.foo[1].aaa": "bar1",
|
||||
"traefik.foo[1].bbb": "bur1",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "raw value, level 1",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", RawValue: map[string]interface{}{
|
||||
"bbb": "test1",
|
||||
"ccc": "test2",
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa.bbb": "test1",
|
||||
"traefik.aaa.ccc": "test2",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "raw value, level 2",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", RawValue: map[string]interface{}{
|
||||
"bbb": "test1",
|
||||
"ccc": map[string]interface{}{
|
||||
"ddd": "test2",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa.bbb": "test1",
|
||||
"traefik.aaa.ccc.ddd": "test2",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "raw value, slice of struct",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", RawValue: map[string]interface{}{
|
||||
"bbb": []interface{}{
|
||||
map[string]interface{}{
|
||||
"ccc": "test1",
|
||||
"ddd": "test2",
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa.bbb[0].ccc": "test1",
|
||||
"traefik.aaa.bbb[0].ddd": "test2",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
labels := EncodeNode(test.node)
|
||||
|
||||
assert.Equal(t, test.expected, labels)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
package parser
|
||||
|
||||
import "reflect"
|
||||
|
||||
// DefaultRootName is the default name of the root node and the prefix of element name from the resources.
|
||||
const DefaultRootName = "traefik"
|
||||
|
||||
// MapNamePlaceholder is the placeholder for the map name.
|
||||
const MapNamePlaceholder = "<name>"
|
||||
|
||||
// Node is a label node.
|
||||
type Node struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
FieldName string `json:"fieldName"`
|
||||
Value string `json:"value,omitempty"`
|
||||
RawValue interface{} `json:"rawValue,omitempty"`
|
||||
Disabled bool `json:"disabled,omitempty"`
|
||||
Kind reflect.Kind `json:"kind,omitempty"`
|
||||
Tag reflect.StructTag `json:"tag,omitempty"`
|
||||
Children []*Node `json:"children,omitempty"`
|
||||
}
|
|
@ -1,278 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// MetadataOpts Options for the metadata.
|
||||
type MetadataOpts struct {
|
||||
TagName string
|
||||
AllowSliceAsStruct bool
|
||||
}
|
||||
|
||||
// AddMetadata adds metadata such as type, inferred from element, to a node.
|
||||
func AddMetadata(element interface{}, node *Node, opts MetadataOpts) error {
|
||||
return metadata{MetadataOpts: opts}.Add(element, node)
|
||||
}
|
||||
|
||||
type metadata struct {
|
||||
MetadataOpts
|
||||
}
|
||||
|
||||
// Add adds metadata such as type, inferred from element, to a node.
|
||||
func (m metadata) Add(element interface{}, node *Node) error {
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(node.Children) == 0 {
|
||||
return fmt.Errorf("invalid node %s: no child", node.Name)
|
||||
}
|
||||
|
||||
if element == nil {
|
||||
return errors.New("nil structure")
|
||||
}
|
||||
|
||||
rootType := reflect.TypeOf(element)
|
||||
node.Kind = rootType.Kind()
|
||||
|
||||
return m.browseChildren(rootType, node)
|
||||
}
|
||||
|
||||
func (m metadata) browseChildren(fType reflect.Type, node *Node) error {
|
||||
for _, child := range node.Children {
|
||||
if err := m.add(fType, child); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m metadata) add(rootType reflect.Type, node *Node) error {
|
||||
rType := rootType
|
||||
if rootType.Kind() == reflect.Ptr {
|
||||
rType = rootType.Elem()
|
||||
}
|
||||
|
||||
if rType.Kind() == reflect.Map && rType.Elem().Kind() == reflect.Interface {
|
||||
addRawValue(node)
|
||||
return nil
|
||||
}
|
||||
|
||||
field, err := m.findTypedField(rType, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = isSupportedType(field); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fType := field.Type
|
||||
node.Kind = fType.Kind()
|
||||
node.Tag = field.Tag
|
||||
|
||||
if fType.Kind() == reflect.Struct || fType.Kind() == reflect.Ptr && fType.Elem().Kind() == reflect.Struct ||
|
||||
fType.Kind() == reflect.Map {
|
||||
if len(node.Children) == 0 && field.Tag.Get(m.TagName) != TagLabelAllowEmpty {
|
||||
return fmt.Errorf("%s cannot be a standalone element (type %s)", node.Name, fType)
|
||||
}
|
||||
|
||||
node.Disabled = len(node.Value) > 0 && !strings.EqualFold(node.Value, "true") && field.Tag.Get(m.TagName) == TagLabelAllowEmpty
|
||||
}
|
||||
|
||||
if len(node.Children) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Struct || fType.Kind() == reflect.Ptr && fType.Elem().Kind() == reflect.Struct {
|
||||
return m.browseChildren(fType, node)
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Map {
|
||||
if fType.Elem().Kind() == reflect.Interface {
|
||||
addRawValue(node)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, child := range node.Children {
|
||||
// elem is a map entry value type
|
||||
elem := fType.Elem()
|
||||
child.Kind = elem.Kind()
|
||||
|
||||
if elem.Kind() == reflect.Map || elem.Kind() == reflect.Struct ||
|
||||
(elem.Kind() == reflect.Ptr && elem.Elem().Kind() == reflect.Struct) {
|
||||
if err = m.browseChildren(elem, child); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Slice {
|
||||
if m.AllowSliceAsStruct && field.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return m.browseChildren(fType.Elem(), node)
|
||||
}
|
||||
|
||||
for _, ch := range node.Children {
|
||||
ch.Kind = fType.Elem().Kind()
|
||||
if err = m.browseChildren(fType.Elem(), ch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("invalid node %s: %v", node.Name, fType.Kind())
|
||||
}
|
||||
|
||||
func (m metadata) findTypedField(rType reflect.Type, node *Node) (reflect.StructField, error) {
|
||||
for i := 0; i < rType.NumField(); i++ {
|
||||
cField := rType.Field(i)
|
||||
|
||||
fieldName := cField.Tag.Get(TagLabelSliceAsStruct)
|
||||
if !m.AllowSliceAsStruct || len(fieldName) == 0 {
|
||||
fieldName = cField.Name
|
||||
}
|
||||
|
||||
if IsExported(cField) {
|
||||
if cField.Anonymous {
|
||||
if cField.Type.Kind() == reflect.Struct {
|
||||
structField, err := m.findTypedField(cField.Type, node)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
return structField, nil
|
||||
}
|
||||
}
|
||||
|
||||
if strings.EqualFold(fieldName, node.Name) {
|
||||
node.FieldName = cField.Name
|
||||
return cField, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return reflect.StructField{}, fmt.Errorf("field not found, node: %s", node.Name)
|
||||
}
|
||||
|
||||
// IsExported reports whether f is exported.
|
||||
// https://golang.org/pkg/reflect/#StructField
|
||||
func IsExported(f reflect.StructField) bool {
|
||||
return f.PkgPath == ""
|
||||
}
|
||||
|
||||
func isSupportedType(field reflect.StructField) error {
|
||||
fType := field.Type
|
||||
|
||||
if fType.Kind() == reflect.Slice {
|
||||
switch fType.Elem().Kind() {
|
||||
case reflect.String,
|
||||
reflect.Bool,
|
||||
reflect.Int,
|
||||
reflect.Int8,
|
||||
reflect.Int16,
|
||||
reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint,
|
||||
reflect.Uint8,
|
||||
reflect.Uint16,
|
||||
reflect.Uint32,
|
||||
reflect.Uint64,
|
||||
reflect.Uintptr,
|
||||
reflect.Float32,
|
||||
reflect.Float64,
|
||||
reflect.Struct,
|
||||
reflect.Ptr:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("unsupported slice type: %v", fType)
|
||||
}
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Map && fType.Key().Kind() != reflect.String {
|
||||
return fmt.Errorf("unsupported map key type: %v", fType.Key())
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Func {
|
||||
return fmt.Errorf("unsupported type: %v", fType)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
RawMap section
|
||||
*/
|
||||
|
||||
func addRawValue(node *Node) {
|
||||
if node.RawValue == nil {
|
||||
node.RawValue = nodeToRawMap(node)
|
||||
}
|
||||
|
||||
node.Children = nil
|
||||
}
|
||||
|
||||
func nodeToRawMap(node *Node) map[string]interface{} {
|
||||
result := map[string]interface{}{}
|
||||
|
||||
squashNode(node, result, true)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func squashNode(node *Node, acc map[string]interface{}, root bool) {
|
||||
if len(node.Children) == 0 {
|
||||
acc[node.Name] = node.Value
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// slice
|
||||
if isArrayKey(node.Children[0].Name) {
|
||||
var accChild []interface{}
|
||||
|
||||
for _, child := range node.Children {
|
||||
tmp := map[string]interface{}{}
|
||||
squashNode(child, tmp, false)
|
||||
accChild = append(accChild, tmp[child.Name])
|
||||
}
|
||||
|
||||
acc[node.Name] = accChild
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// map
|
||||
var accChild map[string]interface{}
|
||||
if root {
|
||||
accChild = acc
|
||||
} else {
|
||||
accChild = typedRawMap(acc, node.Name)
|
||||
}
|
||||
|
||||
for _, child := range node.Children {
|
||||
squashNode(child, accChild, false)
|
||||
}
|
||||
}
|
||||
|
||||
func typedRawMap(m map[string]interface{}, k string) map[string]interface{} {
|
||||
if m[k] == nil {
|
||||
m[k] = map[string]interface{}{}
|
||||
}
|
||||
|
||||
r, ok := m[k].(map[string]interface{})
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("unsupported value (key: %s): %T", k, m[k]))
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func isArrayKey(name string) bool {
|
||||
return name[0] == '[' && name[len(name)-1] == ']'
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,35 +0,0 @@
|
|||
// Package parser implements decoding and encoding between a flat map of labels and a typed Configuration.
|
||||
package parser
|
||||
|
||||
// Decode decodes the given map of labels into the given element.
|
||||
// If any filters are present, labels which do not match the filters are skipped.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// labels -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element.
|
||||
func Decode(labels map[string]string, element interface{}, rootName string, filters ...string) error {
|
||||
node, err := DecodeToNode(labels, rootName, filters...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
metaOpts := MetadataOpts{TagName: TagLabel, AllowSliceAsStruct: true}
|
||||
err = AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return Fill(element, node, FillerOpts{AllowSliceAsStruct: true})
|
||||
}
|
||||
|
||||
// Encode converts an element to labels.
|
||||
// element -> node (value) -> label (node).
|
||||
func Encode(element interface{}, rootName string) (map[string]string, error) {
|
||||
etnOpts := EncoderToNodeOpts{OmitEmpty: true, TagName: TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := EncodeToNode(element, rootName, etnOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return EncodeNode(node), nil
|
||||
}
|
|
@ -1,346 +0,0 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type Tomato struct {
|
||||
Name string
|
||||
Meta map[string]interface{}
|
||||
}
|
||||
|
||||
type Potato struct {
|
||||
Name string
|
||||
Meta map[string]map[string]interface{}
|
||||
}
|
||||
|
||||
func TestDecode_RawValue(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
elt interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "level 1",
|
||||
elt: &Tomato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test",
|
||||
"traefik.meta.aaa": "test",
|
||||
},
|
||||
expected: &Tomato{
|
||||
Name: "test",
|
||||
Meta: map[string]interface{}{
|
||||
"aaa": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "level 2",
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test",
|
||||
"traefik.meta.aaa": "test",
|
||||
"traefik.meta.bbb.ccc": "test",
|
||||
},
|
||||
elt: &Tomato{},
|
||||
expected: &Tomato{
|
||||
Name: "test",
|
||||
Meta: map[string]interface{}{
|
||||
"aaa": "test",
|
||||
"bbb": map[string]interface{}{
|
||||
"ccc": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "level 3",
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test",
|
||||
"traefik.meta.aaa": "test",
|
||||
"traefik.meta.bbb.ccc": "test",
|
||||
"traefik.meta.bbb.ddd.eee": "test",
|
||||
},
|
||||
elt: &Tomato{},
|
||||
expected: &Tomato{
|
||||
Name: "test",
|
||||
Meta: map[string]interface{}{
|
||||
"aaa": "test",
|
||||
"bbb": map[string]interface{}{
|
||||
"ccc": "test",
|
||||
"ddd": map[string]interface{}{
|
||||
"eee": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct slice, one entry",
|
||||
elt: &Tomato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test1",
|
||||
"traefik.meta.aaa[0].bbb": "test2",
|
||||
"traefik.meta.aaa[0].ccc": "test3",
|
||||
},
|
||||
expected: &Tomato{
|
||||
Name: "test1",
|
||||
Meta: map[string]interface{}{
|
||||
"aaa": []interface{}{
|
||||
map[string]interface{}{
|
||||
"bbb": "test2",
|
||||
"ccc": "test3",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct slice, multiple entries",
|
||||
elt: &Tomato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test1",
|
||||
"traefik.meta.aaa[0].bbb": "test2",
|
||||
"traefik.meta.aaa[0].ccc": "test3",
|
||||
"traefik.meta.aaa[1].bbb": "test4",
|
||||
"traefik.meta.aaa[1].ccc": "test5",
|
||||
"traefik.meta.aaa[2].bbb": "test6",
|
||||
"traefik.meta.aaa[2].ccc": "test7",
|
||||
},
|
||||
expected: &Tomato{
|
||||
Name: "test1",
|
||||
Meta: map[string]interface{}{
|
||||
"aaa": []interface{}{
|
||||
map[string]interface{}{
|
||||
"bbb": "test2",
|
||||
"ccc": "test3",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"bbb": "test4",
|
||||
"ccc": "test5",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"bbb": "test6",
|
||||
"ccc": "test7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, level 1",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test",
|
||||
"traefik.meta.aaa.bbb": "test1",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": "test1",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, level 2",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test",
|
||||
"traefik.meta.aaa.bbb": "test1",
|
||||
"traefik.meta.aaa.ccc": "test2",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": "test1",
|
||||
"ccc": "test2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, level 3",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test",
|
||||
"traefik.meta.aaa.bbb.ccc": "test1",
|
||||
"traefik.meta.aaa.bbb.ddd": "test2",
|
||||
"traefik.meta.aaa.eee": "test3",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": map[string]interface{}{
|
||||
"ccc": "test1",
|
||||
"ddd": "test2",
|
||||
},
|
||||
"eee": "test3",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, level 4",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test",
|
||||
"traefik.meta.aaa.bbb.ccc.ddd": "test1",
|
||||
"traefik.meta.aaa.bbb.ccc.eee": "test2",
|
||||
"traefik.meta.aaa.bbb.fff": "test3",
|
||||
"traefik.meta.aaa.ggg": "test4",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": map[string]interface{}{
|
||||
"ccc": map[string]interface{}{
|
||||
"ddd": "test1",
|
||||
"eee": "test2",
|
||||
},
|
||||
"fff": "test3",
|
||||
},
|
||||
"ggg": "test4",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, struct slice, level 1, one entry",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test1",
|
||||
"traefik.meta.aaa.bbb[0].ccc": "test2",
|
||||
"traefik.meta.aaa.bbb[0].ddd": "test3",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test1",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": []interface{}{
|
||||
map[string]interface{}{
|
||||
"ccc": "test2",
|
||||
"ddd": "test3",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, struct slice, level 1, multiple entries",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test1",
|
||||
"traefik.meta.aaa.bbb[0].ccc": "test2",
|
||||
"traefik.meta.aaa.bbb[0].ddd": "test3",
|
||||
"traefik.meta.aaa.bbb[1].ccc": "test4",
|
||||
"traefik.meta.aaa.bbb[1].ddd": "test5",
|
||||
"traefik.meta.aaa.bbb[2].ccc": "test6",
|
||||
"traefik.meta.aaa.bbb[2].ddd": "test7",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test1",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": []interface{}{
|
||||
map[string]interface{}{
|
||||
"ccc": "test2",
|
||||
"ddd": "test3",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"ccc": "test4",
|
||||
"ddd": "test5",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"ccc": "test6",
|
||||
"ddd": "test7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, struct slice, level 2, one entry",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test1",
|
||||
"traefik.meta.aaa.bbb.ccc[0].ddd": "test2",
|
||||
"traefik.meta.aaa.bbb.ccc[0].eee": "test3",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test1",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": map[string]interface{}{
|
||||
"ccc": []interface{}{
|
||||
map[string]interface{}{
|
||||
"ddd": "test2",
|
||||
"eee": "test3",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "explicit map of map, struct slice, level 2, multiple entries",
|
||||
elt: &Potato{},
|
||||
labels: map[string]string{
|
||||
"traefik.name": "test1",
|
||||
"traefik.meta.aaa.bbb.ccc[0].ddd": "test2",
|
||||
"traefik.meta.aaa.bbb.ccc[0].eee": "test3",
|
||||
"traefik.meta.aaa.bbb.ccc[1].ddd": "test4",
|
||||
"traefik.meta.aaa.bbb.ccc[1].eee": "test5",
|
||||
"traefik.meta.aaa.bbb.ccc[2].ddd": "test6",
|
||||
"traefik.meta.aaa.bbb.ccc[2].eee": "test7",
|
||||
},
|
||||
expected: &Potato{
|
||||
Name: "test1",
|
||||
Meta: map[string]map[string]interface{}{
|
||||
"aaa": {
|
||||
"bbb": map[string]interface{}{
|
||||
"ccc": []interface{}{
|
||||
map[string]interface{}{
|
||||
"ddd": "test2",
|
||||
"eee": "test3",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"ddd": "test4",
|
||||
"eee": "test5",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"ddd": "test6",
|
||||
"eee": "test7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
if test.desc != "level 3" {
|
||||
continue
|
||||
}
|
||||
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
err := Decode(test.labels, test.elt, "traefik")
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, test.elt)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
package parser
|
||||
|
||||
const (
|
||||
// TagLabel allows to apply a custom behavior.
|
||||
// - "allowEmpty": allows to create an empty struct.
|
||||
// - "-": ignore the field.
|
||||
TagLabel = "label"
|
||||
|
||||
// TagFile allows to apply a custom behavior.
|
||||
// - "allowEmpty": allows to create an empty struct.
|
||||
// - "-": ignore the field.
|
||||
TagFile = "file"
|
||||
|
||||
// TagLabelSliceAsStruct allows to use a slice of struct by creating one entry into the slice.
|
||||
// The value is the substitution name used in the label to access the slice.
|
||||
TagLabelSliceAsStruct = "label-slice-as-struct"
|
||||
|
||||
// TagDescription is the documentation for the field.
|
||||
// - "-": ignore the field.
|
||||
TagDescription = "description"
|
||||
|
||||
// TagLabelAllowEmpty is related to TagLabel.
|
||||
TagLabelAllowEmpty = "allowEmpty"
|
||||
)
|
|
@ -34,6 +34,7 @@ import (
|
|||
assetfs "github.com/elazarl/go-bindata-assetfs"
|
||||
legolog "github.com/go-acme/lego/v3/log"
|
||||
"github.com/sirupsen/logrus"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -110,38 +111,38 @@ func (a *API) SetDefaults() {
|
|||
|
||||
// RespondingTimeouts contains timeout configurations for incoming requests to the Traefik instance.
|
||||
type RespondingTimeouts struct {
|
||||
ReadTimeout types.Duration `description:"ReadTimeout is the maximum duration for reading the entire request, including the body. If zero, no timeout is set." json:"readTimeout,omitempty" toml:"readTimeout,omitempty" yaml:"readTimeout,omitempty" export:"true"`
|
||||
WriteTimeout types.Duration `description:"WriteTimeout is the maximum duration before timing out writes of the response. If zero, no timeout is set." json:"writeTimeout,omitempty" toml:"writeTimeout,omitempty" yaml:"writeTimeout,omitempty" export:"true"`
|
||||
IdleTimeout types.Duration `description:"IdleTimeout is the maximum amount duration an idle (keep-alive) connection will remain idle before closing itself. If zero, no timeout is set." json:"idleTimeout,omitempty" toml:"idleTimeout,omitempty" yaml:"idleTimeout,omitempty" export:"true"`
|
||||
ReadTimeout ptypes.Duration `description:"ReadTimeout is the maximum duration for reading the entire request, including the body. If zero, no timeout is set." json:"readTimeout,omitempty" toml:"readTimeout,omitempty" yaml:"readTimeout,omitempty" export:"true"`
|
||||
WriteTimeout ptypes.Duration `description:"WriteTimeout is the maximum duration before timing out writes of the response. If zero, no timeout is set." json:"writeTimeout,omitempty" toml:"writeTimeout,omitempty" yaml:"writeTimeout,omitempty" export:"true"`
|
||||
IdleTimeout ptypes.Duration `description:"IdleTimeout is the maximum amount duration an idle (keep-alive) connection will remain idle before closing itself. If zero, no timeout is set." json:"idleTimeout,omitempty" toml:"idleTimeout,omitempty" yaml:"idleTimeout,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (a *RespondingTimeouts) SetDefaults() {
|
||||
a.IdleTimeout = types.Duration(DefaultIdleTimeout)
|
||||
a.IdleTimeout = ptypes.Duration(DefaultIdleTimeout)
|
||||
}
|
||||
|
||||
// ForwardingTimeouts contains timeout configurations for forwarding requests to the backend servers.
|
||||
type ForwardingTimeouts struct {
|
||||
DialTimeout types.Duration `description:"The amount of time to wait until a connection to a backend server can be established. If zero, no timeout exists." json:"dialTimeout,omitempty" toml:"dialTimeout,omitempty" yaml:"dialTimeout,omitempty" export:"true"`
|
||||
ResponseHeaderTimeout types.Duration `description:"The amount of time to wait for a server's response headers after fully writing the request (including its body, if any). If zero, no timeout exists." json:"responseHeaderTimeout,omitempty" toml:"responseHeaderTimeout,omitempty" yaml:"responseHeaderTimeout,omitempty" export:"true"`
|
||||
IdleConnTimeout types.Duration `description:"The maximum period for which an idle HTTP keep-alive connection will remain open before closing itself" json:"idleConnTimeout,omitempty" toml:"idleConnTimeout,omitempty" yaml:"idleConnTimeout,omitempty" export:"true"`
|
||||
DialTimeout ptypes.Duration `description:"The amount of time to wait until a connection to a backend server can be established. If zero, no timeout exists." json:"dialTimeout,omitempty" toml:"dialTimeout,omitempty" yaml:"dialTimeout,omitempty" export:"true"`
|
||||
ResponseHeaderTimeout ptypes.Duration `description:"The amount of time to wait for a server's response headers after fully writing the request (including its body, if any). If zero, no timeout exists." json:"responseHeaderTimeout,omitempty" toml:"responseHeaderTimeout,omitempty" yaml:"responseHeaderTimeout,omitempty" export:"true"`
|
||||
IdleConnTimeout ptypes.Duration `description:"The maximum period for which an idle HTTP keep-alive connection will remain open before closing itself" json:"idleConnTimeout,omitempty" toml:"idleConnTimeout,omitempty" yaml:"idleConnTimeout,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (f *ForwardingTimeouts) SetDefaults() {
|
||||
f.DialTimeout = types.Duration(30 * time.Second)
|
||||
f.IdleConnTimeout = types.Duration(90 * time.Second)
|
||||
f.DialTimeout = ptypes.Duration(30 * time.Second)
|
||||
f.IdleConnTimeout = ptypes.Duration(90 * time.Second)
|
||||
}
|
||||
|
||||
// LifeCycle contains configurations relevant to the lifecycle (such as the shutdown phase) of Traefik.
|
||||
type LifeCycle struct {
|
||||
RequestAcceptGraceTimeout types.Duration `description:"Duration to keep accepting requests before Traefik initiates the graceful shutdown procedure." json:"requestAcceptGraceTimeout,omitempty" toml:"requestAcceptGraceTimeout,omitempty" yaml:"requestAcceptGraceTimeout,omitempty" export:"true"`
|
||||
GraceTimeOut types.Duration `description:"Duration to give active requests a chance to finish before Traefik stops." json:"graceTimeOut,omitempty" toml:"graceTimeOut,omitempty" yaml:"graceTimeOut,omitempty" export:"true"`
|
||||
RequestAcceptGraceTimeout ptypes.Duration `description:"Duration to keep accepting requests before Traefik initiates the graceful shutdown procedure." json:"requestAcceptGraceTimeout,omitempty" toml:"requestAcceptGraceTimeout,omitempty" yaml:"requestAcceptGraceTimeout,omitempty" export:"true"`
|
||||
GraceTimeOut ptypes.Duration `description:"Duration to give active requests a chance to finish before Traefik stops." json:"graceTimeOut,omitempty" toml:"graceTimeOut,omitempty" yaml:"graceTimeOut,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (a *LifeCycle) SetDefaults() {
|
||||
a.GraceTimeOut = types.Duration(DefaultGraceTimeout)
|
||||
a.GraceTimeOut = ptypes.Duration(DefaultGraceTimeout)
|
||||
}
|
||||
|
||||
// Tracing holds the tracing configuration.
|
||||
|
@ -164,7 +165,7 @@ func (t *Tracing) SetDefaults() {
|
|||
|
||||
// Providers contains providers configuration.
|
||||
type Providers struct {
|
||||
ProvidersThrottleDuration types.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." json:"providersThrottleDuration,omitempty" toml:"providersThrottleDuration,omitempty" yaml:"providersThrottleDuration,omitempty" export:"true"`
|
||||
ProvidersThrottleDuration ptypes.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." json:"providersThrottleDuration,omitempty" toml:"providersThrottleDuration,omitempty" yaml:"providersThrottleDuration,omitempty" export:"true"`
|
||||
|
||||
Docker *docker.Provider `description:"Enable Docker backend with default settings." json:"docker,omitempty" toml:"docker,omitempty" yaml:"docker,omitempty" export:"true" label:"allowEmpty" file:"allowEmpty"`
|
||||
File *file.Provider `description:"Enable File backend with default settings." json:"file,omitempty" toml:"file,omitempty" yaml:"file,omitempty" export:"true"`
|
||||
|
@ -207,7 +208,7 @@ func (c *Configuration) SetEffectiveConfiguration() {
|
|||
|
||||
if c.Providers.Docker != nil {
|
||||
if c.Providers.Docker.SwarmModeRefreshSeconds <= 0 {
|
||||
c.Providers.Docker.SwarmModeRefreshSeconds = types.Duration(15 * time.Second)
|
||||
c.Providers.Docker.SwarmModeRefreshSeconds = ptypes.Duration(15 * time.Second)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stvp/go-udp-testing"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestDatadog(t *testing.T) {
|
||||
|
@ -16,7 +17,7 @@ func TestDatadog(t *testing.T) {
|
|||
// This is needed to make sure that UDP Listener listens for data a bit longer, otherwise it will quit after a millisecond
|
||||
udp.Timeout = 5 * time.Second
|
||||
|
||||
datadogRegistry := RegisterDatadog(context.Background(), &types.Datadog{Address: ":18125", PushInterval: types.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
datadogRegistry := RegisterDatadog(context.Background(), &types.Datadog{Address: ":18125", PushInterval: ptypes.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
defer StopDatadog()
|
||||
|
||||
if !datadogRegistry.IsEpEnabled() || !datadogRegistry.IsSvcEnabled() {
|
||||
|
|
|
@ -13,6 +13,7 @@ import (
|
|||
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stvp/go-udp-testing"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestInfluxDB(t *testing.T) {
|
||||
|
@ -20,7 +21,7 @@ func TestInfluxDB(t *testing.T) {
|
|||
// This is needed to make sure that UDP Listener listens for data a bit longer, otherwise it will quit after a millisecond
|
||||
udp.Timeout = 5 * time.Second
|
||||
|
||||
influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ":8089", PushInterval: types.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ":8089", PushInterval: ptypes.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
defer StopInfluxDB()
|
||||
|
||||
if !influxDBRegistry.IsEpEnabled() || !influxDBRegistry.IsSvcEnabled() {
|
||||
|
@ -79,7 +80,7 @@ func TestInfluxDBHTTP(t *testing.T) {
|
|||
}))
|
||||
defer ts.Close()
|
||||
|
||||
influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ts.URL, Protocol: "http", PushInterval: types.Duration(time.Second), Database: "test", RetentionPolicy: "autogen", AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
influxDBRegistry := RegisterInfluxDB(context.Background(), &types.InfluxDB{Address: ts.URL, Protocol: "http", PushInterval: ptypes.Duration(time.Second), Database: "test", RetentionPolicy: "autogen", AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
defer StopInfluxDB()
|
||||
|
||||
if !influxDBRegistry.IsEpEnabled() || !influxDBRegistry.IsSvcEnabled() {
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stvp/go-udp-testing"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestStatsD(t *testing.T) {
|
||||
|
@ -16,7 +17,7 @@ func TestStatsD(t *testing.T) {
|
|||
// This is needed to make sure that UDP Listener listens for data a bit longer, otherwise it will quit after a millisecond
|
||||
udp.Timeout = 5 * time.Second
|
||||
|
||||
statsdRegistry := RegisterStatsd(context.Background(), &types.Statsd{Address: ":18125", PushInterval: types.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
statsdRegistry := RegisterStatsd(context.Background(), &types.Statsd{Address: ":18125", PushInterval: ptypes.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true})
|
||||
defer StopStatsd()
|
||||
|
||||
if !statsdRegistry.IsEpEnabled() || !statsdRegistry.IsSvcEnabled() {
|
||||
|
@ -56,7 +57,7 @@ func TestStatsDWithPrefix(t *testing.T) {
|
|||
// This is needed to make sure that UDP Listener listens for data a bit longer, otherwise it will quit after a millisecond
|
||||
udp.Timeout = 5 * time.Second
|
||||
|
||||
statsdRegistry := RegisterStatsd(context.Background(), &types.Statsd{Address: ":18125", PushInterval: types.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true, Prefix: "testPrefix"})
|
||||
statsdRegistry := RegisterStatsd(context.Background(), &types.Statsd{Address: ":18125", PushInterval: ptypes.Duration(time.Second), AddEntryPointsLabels: true, AddServicesLabels: true, Prefix: "testPrefix"})
|
||||
defer StopStatsd()
|
||||
|
||||
if !statsdRegistry.IsEpEnabled() || !statsdRegistry.IsSvcEnabled() {
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/sirupsen/logrus"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
type key string
|
||||
|
@ -369,7 +370,7 @@ func (h *Handler) keepAccessLog(statusCode, retryAttempts int, duration time.Dur
|
|||
return true
|
||||
}
|
||||
|
||||
if h.config.Filters.MinDuration > 0 && (types.Duration(duration) > h.config.Filters.MinDuration) {
|
||||
if h.config.Filters.MinDuration > 0 && (ptypes.Duration(duration) > h.config.Filters.MinDuration) {
|
||||
return true
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -527,7 +528,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) {
|
|||
FilePath: "",
|
||||
Format: CommonFormat,
|
||||
Filters: &types.AccessLogFilters{
|
||||
MinDuration: types.Duration(1 * time.Hour),
|
||||
MinDuration: ptypes.Duration(1 * time.Hour),
|
||||
},
|
||||
},
|
||||
expectedLog: ``,
|
||||
|
@ -538,7 +539,7 @@ func TestNewLogHandlerOutputStdout(t *testing.T) {
|
|||
FilePath: "",
|
||||
Format: CommonFormat,
|
||||
Filters: &types.AccessLogFilters{
|
||||
MinDuration: types.Duration(1 * time.Millisecond),
|
||||
MinDuration: ptypes.Duration(1 * time.Millisecond),
|
||||
},
|
||||
},
|
||||
expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testRouter" "http://127.0.0.1/testService" 1ms`,
|
||||
|
|
|
@ -10,9 +10,9 @@ import (
|
|||
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/testhelpers"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
"github.com/vulcand/oxy/utils"
|
||||
)
|
||||
|
||||
|
@ -37,7 +37,7 @@ func TestNewRateLimiter(t *testing.T) {
|
|||
desc: "maxDelay computation, low rate regime",
|
||||
config: dynamic.RateLimit{
|
||||
Average: 2,
|
||||
Period: types.Duration(10 * time.Second),
|
||||
Period: ptypes.Duration(10 * time.Second),
|
||||
Burst: 10,
|
||||
},
|
||||
expectedMaxDelay: 500 * time.Millisecond,
|
||||
|
@ -183,7 +183,7 @@ func TestRateLimit(t *testing.T) {
|
|||
desc: "lower than 1/s",
|
||||
config: dynamic.RateLimit{
|
||||
Average: 5,
|
||||
Period: types.Duration(10 * time.Second),
|
||||
Period: ptypes.Duration(10 * time.Second),
|
||||
},
|
||||
loadDuration: 2 * time.Second,
|
||||
incomingLoad: 100,
|
||||
|
@ -193,7 +193,7 @@ func TestRateLimit(t *testing.T) {
|
|||
desc: "lower than 1/s, longer",
|
||||
config: dynamic.RateLimit{
|
||||
Average: 5,
|
||||
Period: types.Duration(10 * time.Second),
|
||||
Period: ptypes.Duration(10 * time.Second),
|
||||
},
|
||||
loadDuration: time.Minute,
|
||||
incomingLoad: 100,
|
||||
|
@ -203,7 +203,7 @@ func TestRateLimit(t *testing.T) {
|
|||
desc: "lower than 1/s, longer, harsher",
|
||||
config: dynamic.RateLimit{
|
||||
Average: 1,
|
||||
Period: types.Duration(time.Minute),
|
||||
Period: ptypes.Duration(time.Minute),
|
||||
},
|
||||
loadDuration: time.Minute,
|
||||
incomingLoad: 100,
|
||||
|
@ -213,7 +213,7 @@ func TestRateLimit(t *testing.T) {
|
|||
desc: "period below 1 second",
|
||||
config: dynamic.RateLimit{
|
||||
Average: 50,
|
||||
Period: types.Duration(500 * time.Millisecond),
|
||||
Period: ptypes.Duration(500 * time.Millisecond),
|
||||
},
|
||||
loadDuration: 2 * time.Second,
|
||||
incomingLoad: 300,
|
||||
|
|
|
@ -25,6 +25,7 @@ import (
|
|||
"github.com/go-acme/lego/v3/lego"
|
||||
"github.com/go-acme/lego/v3/providers/dns"
|
||||
"github.com/go-acme/lego/v3/registration"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
// oscpMustStaple enables OSCP stapling as from https://github.com/go-acme/lego/issues/270.
|
||||
|
@ -63,10 +64,10 @@ type Certificate struct {
|
|||
|
||||
// DNSChallenge contains DNS challenge Configuration.
|
||||
type DNSChallenge struct {
|
||||
Provider string `description:"Use a DNS-01 based challenge provider rather than HTTPS." json:"provider,omitempty" toml:"provider,omitempty" yaml:"provider,omitempty"`
|
||||
DelayBeforeCheck types.Duration `description:"Assume DNS propagates after a delay in seconds rather than finding and querying nameservers." json:"delayBeforeCheck,omitempty" toml:"delayBeforeCheck,omitempty" yaml:"delayBeforeCheck,omitempty"`
|
||||
Resolvers []string `description:"Use following DNS servers to resolve the FQDN authority." json:"resolvers,omitempty" toml:"resolvers,omitempty" yaml:"resolvers,omitempty"`
|
||||
DisablePropagationCheck bool `description:"Disable the DNS propagation checks before notifying ACME that the DNS challenge is ready. [not recommended]" json:"disablePropagationCheck,omitempty" toml:"disablePropagationCheck,omitempty" yaml:"disablePropagationCheck,omitempty"`
|
||||
Provider string `description:"Use a DNS-01 based challenge provider rather than HTTPS." json:"provider,omitempty" toml:"provider,omitempty" yaml:"provider,omitempty"`
|
||||
DelayBeforeCheck ptypes.Duration `description:"Assume DNS propagates after a delay in seconds rather than finding and querying nameservers." json:"delayBeforeCheck,omitempty" toml:"delayBeforeCheck,omitempty" yaml:"delayBeforeCheck,omitempty"`
|
||||
Resolvers []string `description:"Use following DNS servers to resolve the FQDN authority." json:"resolvers,omitempty" toml:"resolvers,omitempty" yaml:"resolvers,omitempty"`
|
||||
DisablePropagationCheck bool `description:"Disable the DNS propagation checks before notifying ACME that the DNS challenge is ready. [not recommended]" json:"disablePropagationCheck,omitempty" toml:"disablePropagationCheck,omitempty" yaml:"disablePropagationCheck,omitempty"`
|
||||
}
|
||||
|
||||
// HTTPChallenge contains HTTP challenge Configuration.
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/hashicorp/consul/api"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
// DefaultTemplateRule The default template for the default rule.
|
||||
|
@ -41,7 +42,7 @@ type Provider struct {
|
|||
Constraints string `description:"Constraints is an expression that Traefik matches against the container's labels to determine whether to create any route for that container." json:"constraints,omitempty" toml:"constraints,omitempty" yaml:"constraints,omitempty" export:"true"`
|
||||
Endpoint *EndpointConfig `description:"Consul endpoint settings" json:"endpoint,omitempty" toml:"endpoint,omitempty" yaml:"endpoint,omitempty" export:"true"`
|
||||
Prefix string `description:"Prefix for consul service tags. Default 'traefik'" json:"prefix,omitempty" toml:"prefix,omitempty" yaml:"prefix,omitempty" export:"true"`
|
||||
RefreshInterval types.Duration `description:"Interval for check Consul API. Default 100ms" json:"refreshInterval,omitempty" toml:"refreshInterval,omitempty" yaml:"refreshInterval,omitempty" export:"true"`
|
||||
RefreshInterval ptypes.Duration `description:"Interval for check Consul API. Default 100ms" json:"refreshInterval,omitempty" toml:"refreshInterval,omitempty" yaml:"refreshInterval,omitempty" export:"true"`
|
||||
RequireConsistent bool `description:"Forces the read to be fully consistent." json:"requireConsistent,omitempty" toml:"requireConsistent,omitempty" yaml:"requireConsistent,omitempty" export:"true"`
|
||||
Stale bool `description:"Use stale consistency for catalog reads." json:"stale,omitempty" toml:"stale,omitempty" yaml:"stale,omitempty" export:"true"`
|
||||
Cache bool `description:"Use local agent caching for catalog reads." json:"cache,omitempty" toml:"cache,omitempty" yaml:"cache,omitempty" export:"true"`
|
||||
|
@ -60,7 +61,7 @@ type EndpointConfig struct {
|
|||
Token string `description:"Token is used to provide a per-request ACL token which overrides the agent's default token" json:"token,omitempty" toml:"token,omitempty" yaml:"token,omitempty" export:"true"`
|
||||
TLS *types.ClientTLS `description:"Enable TLS support." json:"tls,omitempty" toml:"tls,omitempty" yaml:"tls,omitempty" export:"true"`
|
||||
HTTPAuth *EndpointHTTPAuthConfig `description:"Auth info to use for http access" json:"httpAuth,omitempty" toml:"httpAuth,omitempty" yaml:"httpAuth,omitempty" export:"true"`
|
||||
EndpointWaitTime types.Duration `description:"WaitTime limits how long a Watch will block. If not provided, the agent default values will be used" json:"endpointWaitTime,omitempty" toml:"endpointWaitTime,omitempty" yaml:"endpointWaitTime,omitempty" export:"true"`
|
||||
EndpointWaitTime ptypes.Duration `description:"WaitTime limits how long a Watch will block. If not provided, the agent default values will be used" json:"endpointWaitTime,omitempty" toml:"endpointWaitTime,omitempty" yaml:"endpointWaitTime,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
|
@ -79,7 +80,7 @@ func (p *Provider) SetDefaults() {
|
|||
endpoint := &EndpointConfig{}
|
||||
endpoint.SetDefaults()
|
||||
p.Endpoint = endpoint
|
||||
p.RefreshInterval = types.Duration(15 * time.Second)
|
||||
p.RefreshInterval = ptypes.Duration(15 * time.Second)
|
||||
p.Prefix = "traefik"
|
||||
p.ExposedByDefault = true
|
||||
p.DefaultRule = DefaultTemplateRule
|
||||
|
|
|
@ -29,6 +29,7 @@ import (
|
|||
"github.com/docker/docker/client"
|
||||
"github.com/docker/go-connections/nat"
|
||||
"github.com/docker/go-connections/sockets"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -55,7 +56,7 @@ type Provider struct {
|
|||
UseBindPortIP bool `description:"Use the ip address from the bound port, rather than from the inner network." json:"useBindPortIP,omitempty" toml:"useBindPortIP,omitempty" yaml:"useBindPortIP,omitempty" export:"true"`
|
||||
SwarmMode bool `description:"Use Docker on Swarm Mode." json:"swarmMode,omitempty" toml:"swarmMode,omitempty" yaml:"swarmMode,omitempty" export:"true"`
|
||||
Network string `description:"Default Docker network used." json:"network,omitempty" toml:"network,omitempty" yaml:"network,omitempty" export:"true"`
|
||||
SwarmModeRefreshSeconds types.Duration `description:"Polling interval for swarm mode." json:"swarmModeRefreshSeconds,omitempty" toml:"swarmModeRefreshSeconds,omitempty" yaml:"swarmModeRefreshSeconds,omitempty" export:"true"`
|
||||
SwarmModeRefreshSeconds ptypes.Duration `description:"Polling interval for swarm mode." json:"swarmModeRefreshSeconds,omitempty" toml:"swarmModeRefreshSeconds,omitempty" yaml:"swarmModeRefreshSeconds,omitempty" export:"true"`
|
||||
defaultRuleTpl *template.Template
|
||||
}
|
||||
|
||||
|
@ -65,7 +66,7 @@ func (p *Provider) SetDefaults() {
|
|||
p.ExposedByDefault = true
|
||||
p.Endpoint = "unix:///var/run/docker.sock"
|
||||
p.SwarmMode = false
|
||||
p.SwarmModeRefreshSeconds = types.Duration(15 * time.Second)
|
||||
p.SwarmModeRefreshSeconds = ptypes.Duration(15 * time.Second)
|
||||
p.DefaultRule = DefaultTemplateRule
|
||||
}
|
||||
|
||||
|
|
|
@ -13,11 +13,11 @@ import (
|
|||
|
||||
"github.com/Masterminds/sprig"
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/config/file"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/containous/traefik/v2/pkg/provider"
|
||||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/tls"
|
||||
"github.com/traefik/paerser/file"
|
||||
"gopkg.in/fsnotify.v1"
|
||||
)
|
||||
|
||||
|
|
|
@ -10,13 +10,14 @@ import (
|
|||
|
||||
"github.com/cenkalti/backoff/v4"
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/config/file"
|
||||
"github.com/containous/traefik/v2/pkg/job"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/containous/traefik/v2/pkg/provider"
|
||||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/tls"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/traefik/paerser/file"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
var _ provider.Provider = (*Provider)(nil)
|
||||
|
@ -24,8 +25,8 @@ var _ provider.Provider = (*Provider)(nil)
|
|||
// Provider is a provider.Provider implementation that queries an HTTP(s) endpoint for a configuration.
|
||||
type Provider struct {
|
||||
Endpoint string `description:"Load configuration from this endpoint." json:"endpoint" toml:"endpoint" yaml:"endpoint" export:"true"`
|
||||
PollInterval types.Duration `description:"Polling interval for endpoint." json:"pollInterval,omitempty" toml:"pollInterval,omitempty" yaml:"pollInterval,omitempty"`
|
||||
PollTimeout types.Duration `description:"Polling timeout for endpoint." json:"pollTimeout,omitempty" toml:"pollTimeout,omitempty" yaml:"pollTimeout,omitempty"`
|
||||
PollInterval ptypes.Duration `description:"Polling interval for endpoint." json:"pollInterval,omitempty" toml:"pollInterval,omitempty" yaml:"pollInterval,omitempty"`
|
||||
PollTimeout ptypes.Duration `description:"Polling timeout for endpoint." json:"pollTimeout,omitempty" toml:"pollTimeout,omitempty" yaml:"pollTimeout,omitempty"`
|
||||
TLS *types.ClientTLS `description:"Enable TLS support." json:"tls,omitempty" toml:"tls,omitempty" yaml:"tls,omitempty" export:"true"`
|
||||
httpClient *http.Client
|
||||
lastConfigurationHash uint64
|
||||
|
@ -33,8 +34,8 @@ type Provider struct {
|
|||
|
||||
// SetDefaults sets the default values.
|
||||
func (p *Provider) SetDefaults() {
|
||||
p.PollInterval = types.Duration(5 * time.Second)
|
||||
p.PollTimeout = types.Duration(5 * time.Second)
|
||||
p.PollInterval = ptypes.Duration(5 * time.Second)
|
||||
p.PollTimeout = ptypes.Duration(5 * time.Second)
|
||||
}
|
||||
|
||||
// Init the provider.
|
||||
|
|
|
@ -11,16 +11,16 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/tls"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestProvider_Init(t *testing.T) {
|
||||
tests := []struct {
|
||||
desc string
|
||||
endpoint string
|
||||
pollInterval types.Duration
|
||||
pollInterval ptypes.Duration
|
||||
expErr bool
|
||||
}{
|
||||
{
|
||||
|
@ -35,7 +35,7 @@ func TestProvider_Init(t *testing.T) {
|
|||
{
|
||||
desc: "should not return an error",
|
||||
endpoint: "http://localhost:8080",
|
||||
pollInterval: types.Duration(time.Second),
|
||||
pollInterval: ptypes.Duration(time.Second),
|
||||
expErr: false,
|
||||
},
|
||||
}
|
||||
|
@ -63,8 +63,8 @@ func TestProvider_SetDefaults(t *testing.T) {
|
|||
|
||||
provider.SetDefaults()
|
||||
|
||||
assert.Equal(t, provider.PollInterval, types.Duration(5*time.Second))
|
||||
assert.Equal(t, provider.PollTimeout, types.Duration(5*time.Second))
|
||||
assert.Equal(t, provider.PollInterval, ptypes.Duration(5*time.Second))
|
||||
assert.Equal(t, provider.PollTimeout, ptypes.Duration(5*time.Second))
|
||||
}
|
||||
|
||||
func TestProvider_fetchConfigurationData(t *testing.T) {
|
||||
|
@ -98,8 +98,8 @@ func TestProvider_fetchConfigurationData(t *testing.T) {
|
|||
|
||||
provider := Provider{
|
||||
Endpoint: server.URL,
|
||||
PollInterval: types.Duration(1 * time.Second),
|
||||
PollTimeout: types.Duration(1 * time.Second),
|
||||
PollInterval: ptypes.Duration(1 * time.Second),
|
||||
PollTimeout: ptypes.Duration(1 * time.Second),
|
||||
}
|
||||
|
||||
err := provider.Init()
|
||||
|
@ -181,8 +181,8 @@ func TestProvider_Provide(t *testing.T) {
|
|||
|
||||
provider := Provider{
|
||||
Endpoint: server.URL,
|
||||
PollTimeout: types.Duration(1 * time.Second),
|
||||
PollInterval: types.Duration(100 * time.Millisecond),
|
||||
PollTimeout: ptypes.Duration(1 * time.Second),
|
||||
PollInterval: ptypes.Duration(100 * time.Millisecond),
|
||||
}
|
||||
|
||||
err := provider.Init()
|
||||
|
@ -235,8 +235,8 @@ func TestProvider_ProvideConfigurationOnlyOnceIfUnchanged(t *testing.T) {
|
|||
|
||||
provider := Provider{
|
||||
Endpoint: server.URL + "/endpoint",
|
||||
PollTimeout: types.Duration(1 * time.Second),
|
||||
PollInterval: types.Duration(100 * time.Millisecond),
|
||||
PollTimeout: ptypes.Duration(1 * time.Second),
|
||||
PollInterval: ptypes.Duration(100 * time.Millisecond),
|
||||
}
|
||||
|
||||
err := provider.Init()
|
||||
|
|
|
@ -20,8 +20,8 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/provider/kubernetes/crd/traefik/v1alpha1"
|
||||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/tls"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/mitchellh/hashstructure"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
"k8s.io/apimachinery/pkg/labels"
|
||||
)
|
||||
|
@ -38,14 +38,14 @@ const (
|
|||
|
||||
// Provider holds configurations of the provider.
|
||||
type Provider struct {
|
||||
Endpoint string `description:"Kubernetes server endpoint (required for external cluster client)." json:"endpoint,omitempty" toml:"endpoint,omitempty" yaml:"endpoint,omitempty"`
|
||||
Token string `description:"Kubernetes bearer token (not needed for in-cluster client)." json:"token,omitempty" toml:"token,omitempty" yaml:"token,omitempty"`
|
||||
CertAuthFilePath string `description:"Kubernetes certificate authority file path (not needed for in-cluster client)." json:"certAuthFilePath,omitempty" toml:"certAuthFilePath,omitempty" yaml:"certAuthFilePath,omitempty"`
|
||||
DisablePassHostHeaders bool `description:"Kubernetes disable PassHost Headers." json:"disablePassHostHeaders,omitempty" toml:"disablePassHostHeaders,omitempty" yaml:"disablePassHostHeaders,omitempty" export:"true"`
|
||||
Namespaces []string `description:"Kubernetes namespaces." json:"namespaces,omitempty" toml:"namespaces,omitempty" yaml:"namespaces,omitempty" export:"true"`
|
||||
LabelSelector string `description:"Kubernetes label selector to use." json:"labelSelector,omitempty" toml:"labelSelector,omitempty" yaml:"labelSelector,omitempty" export:"true"`
|
||||
IngressClass string `description:"Value of kubernetes.io/ingress.class annotation to watch for." json:"ingressClass,omitempty" toml:"ingressClass,omitempty" yaml:"ingressClass,omitempty" export:"true"`
|
||||
ThrottleDuration types.Duration `description:"Ingress refresh throttle duration" json:"throttleDuration,omitempty" toml:"throttleDuration,omitempty" yaml:"throttleDuration,omitempty"`
|
||||
Endpoint string `description:"Kubernetes server endpoint (required for external cluster client)." json:"endpoint,omitempty" toml:"endpoint,omitempty" yaml:"endpoint,omitempty"`
|
||||
Token string `description:"Kubernetes bearer token (not needed for in-cluster client)." json:"token,omitempty" toml:"token,omitempty" yaml:"token,omitempty"`
|
||||
CertAuthFilePath string `description:"Kubernetes certificate authority file path (not needed for in-cluster client)." json:"certAuthFilePath,omitempty" toml:"certAuthFilePath,omitempty" yaml:"certAuthFilePath,omitempty"`
|
||||
DisablePassHostHeaders bool `description:"Kubernetes disable PassHost Headers." json:"disablePassHostHeaders,omitempty" toml:"disablePassHostHeaders,omitempty" yaml:"disablePassHostHeaders,omitempty" export:"true"`
|
||||
Namespaces []string `description:"Kubernetes namespaces." json:"namespaces,omitempty" toml:"namespaces,omitempty" yaml:"namespaces,omitempty" export:"true"`
|
||||
LabelSelector string `description:"Kubernetes label selector to use." json:"labelSelector,omitempty" toml:"labelSelector,omitempty" yaml:"labelSelector,omitempty" export:"true"`
|
||||
IngressClass string `description:"Value of kubernetes.io/ingress.class annotation to watch for." json:"ingressClass,omitempty" toml:"ingressClass,omitempty" yaml:"ingressClass,omitempty" export:"true"`
|
||||
ThrottleDuration ptypes.Duration `description:"Ingress refresh throttle duration" json:"throttleDuration,omitempty" toml:"throttleDuration,omitempty" yaml:"throttleDuration,omitempty"`
|
||||
lastConfiguration safe.Safe
|
||||
}
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/provider"
|
||||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/tls"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/mitchellh/hashstructure"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
"k8s.io/api/networking/v1beta1"
|
||||
networkingv1beta1 "k8s.io/api/networking/v1beta1"
|
||||
|
@ -43,7 +43,7 @@ type Provider struct {
|
|||
LabelSelector string `description:"Kubernetes Ingress label selector to use." json:"labelSelector,omitempty" toml:"labelSelector,omitempty" yaml:"labelSelector,omitempty" export:"true"`
|
||||
IngressClass string `description:"Value of kubernetes.io/ingress.class annotation to watch for." json:"ingressClass,omitempty" toml:"ingressClass,omitempty" yaml:"ingressClass,omitempty" export:"true"`
|
||||
IngressEndpoint *EndpointIngress `description:"Kubernetes Ingress Endpoint." json:"ingressEndpoint,omitempty" toml:"ingressEndpoint,omitempty" yaml:"ingressEndpoint,omitempty"`
|
||||
ThrottleDuration types.Duration `description:"Ingress refresh throttle duration" json:"throttleDuration,omitempty" toml:"throttleDuration,omitempty" yaml:"throttleDuration,omitempty"`
|
||||
ThrottleDuration ptypes.Duration `description:"Ingress refresh throttle duration" json:"throttleDuration,omitempty" toml:"throttleDuration,omitempty" yaml:"throttleDuration,omitempty"`
|
||||
lastConfiguration safe.Safe
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func Test_buildConfiguration(t *testing.T) {
|
||||
|
@ -346,7 +347,7 @@ func Test_buildConfiguration(t *testing.T) {
|
|||
RateLimit: &dynamic.RateLimit{
|
||||
Average: 42,
|
||||
Burst: 42,
|
||||
Period: types.Duration(time.Second),
|
||||
Period: ptypes.Duration(time.Second),
|
||||
SourceCriterion: &dynamic.SourceCriterion{
|
||||
IPStrategy: &dynamic.IPStrategy{
|
||||
Depth: 42,
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/gambol99/go-marathon"
|
||||
"github.com/sirupsen/logrus"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -53,10 +54,10 @@ type Provider struct {
|
|||
ExposedByDefault bool `description:"Expose Marathon apps by default." json:"exposedByDefault,omitempty" toml:"exposedByDefault,omitempty" yaml:"exposedByDefault,omitempty" export:"true"`
|
||||
DCOSToken string `description:"DCOSToken for DCOS environment, This will override the Authorization header." json:"dcosToken,omitempty" toml:"dcosToken,omitempty" yaml:"dcosToken,omitempty" export:"true"`
|
||||
TLS *types.ClientTLS `description:"Enable TLS support." json:"tls,omitempty" toml:"tls,omitempty" yaml:"tls,omitempty" export:"true"`
|
||||
DialerTimeout types.Duration `description:"Set a dialer timeout for Marathon." json:"dialerTimeout,omitempty" toml:"dialerTimeout,omitempty" yaml:"dialerTimeout,omitempty" export:"true"`
|
||||
ResponseHeaderTimeout types.Duration `description:"Set a response header timeout for Marathon." json:"responseHeaderTimeout,omitempty" toml:"responseHeaderTimeout,omitempty" yaml:"responseHeaderTimeout,omitempty" export:"true"`
|
||||
TLSHandshakeTimeout types.Duration `description:"Set a TLS handshake timeout for Marathon." json:"tlsHandshakeTimeout,omitempty" toml:"tlsHandshakeTimeout,omitempty" yaml:"tlsHandshakeTimeout,omitempty" export:"true"`
|
||||
KeepAlive types.Duration `description:"Set a TCP Keep Alive time." json:"keepAlive,omitempty" toml:"keepAlive,omitempty" yaml:"keepAlive,omitempty" export:"true"`
|
||||
DialerTimeout ptypes.Duration `description:"Set a dialer timeout for Marathon." json:"dialerTimeout,omitempty" toml:"dialerTimeout,omitempty" yaml:"dialerTimeout,omitempty" export:"true"`
|
||||
ResponseHeaderTimeout ptypes.Duration `description:"Set a response header timeout for Marathon." json:"responseHeaderTimeout,omitempty" toml:"responseHeaderTimeout,omitempty" yaml:"responseHeaderTimeout,omitempty" export:"true"`
|
||||
TLSHandshakeTimeout ptypes.Duration `description:"Set a TLS handshake timeout for Marathon." json:"tlsHandshakeTimeout,omitempty" toml:"tlsHandshakeTimeout,omitempty" yaml:"tlsHandshakeTimeout,omitempty" export:"true"`
|
||||
KeepAlive ptypes.Duration `description:"Set a TCP Keep Alive time." json:"keepAlive,omitempty" toml:"keepAlive,omitempty" yaml:"keepAlive,omitempty" export:"true"`
|
||||
ForceTaskHostname bool `description:"Force to use the task's hostname." json:"forceTaskHostname,omitempty" toml:"forceTaskHostname,omitempty" yaml:"forceTaskHostname,omitempty" export:"true"`
|
||||
Basic *Basic `description:"Enable basic authentication." json:"basic,omitempty" toml:"basic,omitempty" yaml:"basic,omitempty" export:"true"`
|
||||
RespectReadinessChecks bool `description:"Filter out tasks with non-successful readiness checks during deployments." json:"respectReadinessChecks,omitempty" toml:"respectReadinessChecks,omitempty" yaml:"respectReadinessChecks,omitempty" export:"true"`
|
||||
|
@ -70,10 +71,10 @@ func (p *Provider) SetDefaults() {
|
|||
p.Watch = true
|
||||
p.Endpoint = "http://127.0.0.1:8080"
|
||||
p.ExposedByDefault = true
|
||||
p.DialerTimeout = types.Duration(5 * time.Second)
|
||||
p.ResponseHeaderTimeout = types.Duration(60 * time.Second)
|
||||
p.TLSHandshakeTimeout = types.Duration(5 * time.Second)
|
||||
p.KeepAlive = types.Duration(10 * time.Second)
|
||||
p.DialerTimeout = ptypes.Duration(5 * time.Second)
|
||||
p.ResponseHeaderTimeout = ptypes.Duration(60 * time.Second)
|
||||
p.TLSHandshakeTimeout = ptypes.Duration(5 * time.Second)
|
||||
p.KeepAlive = ptypes.Duration(10 * time.Second)
|
||||
p.DefaultRule = DefaultTemplateRule
|
||||
}
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@ import (
|
|||
|
||||
"github.com/containous/traefik/v2/pkg/config/static"
|
||||
"github.com/containous/traefik/v2/pkg/tcp"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestShutdownHijacked(t *testing.T) {
|
||||
|
@ -65,7 +65,7 @@ func testShutdown(t *testing.T, router *tcp.Router) {
|
|||
epConfig.SetDefaults()
|
||||
|
||||
epConfig.LifeCycle.RequestAcceptGraceTimeout = 0
|
||||
epConfig.LifeCycle.GraceTimeOut = types.Duration(5 * time.Second)
|
||||
epConfig.LifeCycle.GraceTimeOut = ptypes.Duration(5 * time.Second)
|
||||
|
||||
entryPoint, err := NewTCPEntryPoint(context.Background(), &static.EntryPoint{
|
||||
// We explicitly use an IPV4 address because on Alpine, with an IPV6 address
|
||||
|
@ -150,7 +150,7 @@ func startEntrypoint(entryPoint *TCPEntryPoint, router *tcp.Router) (net.Conn, e
|
|||
func TestReadTimeoutWithoutFirstByte(t *testing.T) {
|
||||
epConfig := &static.EntryPointsTransport{}
|
||||
epConfig.SetDefaults()
|
||||
epConfig.RespondingTimeouts.ReadTimeout = types.Duration(2 * time.Second)
|
||||
epConfig.RespondingTimeouts.ReadTimeout = ptypes.Duration(2 * time.Second)
|
||||
|
||||
entryPoint, err := NewTCPEntryPoint(context.Background(), &static.EntryPoint{
|
||||
Address: ":0",
|
||||
|
@ -186,7 +186,7 @@ func TestReadTimeoutWithoutFirstByte(t *testing.T) {
|
|||
func TestReadTimeoutWithFirstByte(t *testing.T) {
|
||||
epConfig := &static.EntryPointsTransport{}
|
||||
epConfig.SetDefaults()
|
||||
epConfig.RespondingTimeouts.ReadTimeout = types.Duration(2 * time.Second)
|
||||
epConfig.RespondingTimeouts.ReadTimeout = ptypes.Duration(2 * time.Second)
|
||||
|
||||
entryPoint, err := NewTCPEntryPoint(context.Background(), &static.EntryPoint{
|
||||
Address: ":0",
|
||||
|
|
|
@ -8,9 +8,9 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/containous/traefik/v2/pkg/config/static"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/containous/traefik/v2/pkg/udp"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
func TestShutdownUDPConn(t *testing.T) {
|
||||
|
@ -18,7 +18,7 @@ func TestShutdownUDPConn(t *testing.T) {
|
|||
Address: ":0",
|
||||
Transport: &static.EntryPointsTransport{
|
||||
LifeCycle: &static.LifeCycle{
|
||||
GraceTimeOut: types.Duration(5 * time.Second),
|
||||
GraceTimeOut: ptypes.Duration(5 * time.Second),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
ptypes "github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
// StatusClientClosedRequest non-standard HTTP status code for client disconnection.
|
||||
|
@ -22,7 +22,7 @@ const StatusClientClosedRequest = 499
|
|||
const StatusClientClosedRequestText = "Client Closed Request"
|
||||
|
||||
func buildProxy(passHostHeader *bool, responseForwarding *dynamic.ResponseForwarding, defaultRoundTripper http.RoundTripper, bufferPool httputil.BufferPool, responseModifier func(*http.Response) error) (http.Handler, error) {
|
||||
var flushInterval types.Duration
|
||||
var flushInterval ptypes.Duration
|
||||
if responseForwarding != nil {
|
||||
err := flushInterval.Set(responseForwarding.FlushInterval)
|
||||
if err != nil {
|
||||
|
@ -30,7 +30,7 @@ func buildProxy(passHostHeader *bool, responseForwarding *dynamic.ResponseForwar
|
|||
}
|
||||
}
|
||||
if flushInterval == 0 {
|
||||
flushInterval = types.Duration(100 * time.Millisecond)
|
||||
flushInterval = ptypes.Duration(100 * time.Millisecond)
|
||||
}
|
||||
|
||||
proxy := &httputil.ReverseProxy{
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
package types
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Duration is a custom type suitable for parsing duration values.
|
||||
// It supports `time.ParseDuration`-compatible values and suffix-less digits; in
|
||||
// the latter case, seconds are assumed.
|
||||
type Duration time.Duration
|
||||
|
||||
// Set sets the duration from the given string value.
|
||||
func (d *Duration) Set(s string) error {
|
||||
if v, err := strconv.ParseInt(s, 10, 64); err == nil {
|
||||
*d = Duration(time.Duration(v) * time.Second)
|
||||
return nil
|
||||
}
|
||||
|
||||
v, err := time.ParseDuration(s)
|
||||
*d = Duration(v)
|
||||
return err
|
||||
}
|
||||
|
||||
// String returns a string representation of the duration value.
|
||||
func (d Duration) String() string { return (time.Duration)(d).String() }
|
||||
|
||||
// MarshalText serialize the given duration value into a text.
|
||||
func (d Duration) MarshalText() ([]byte, error) {
|
||||
return []byte(d.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText deserializes the given text into a duration value.
|
||||
// It is meant to support TOML decoding of durations.
|
||||
func (d *Duration) UnmarshalText(text []byte) error {
|
||||
return d.Set(string(text))
|
||||
}
|
||||
|
||||
// MarshalJSON serializes the given duration value.
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(time.Duration(d))
|
||||
}
|
||||
|
||||
// UnmarshalJSON deserializes the given text into a duration value.
|
||||
func (d *Duration) UnmarshalJSON(text []byte) error {
|
||||
if v, err := strconv.ParseInt(string(text), 10, 64); err == nil {
|
||||
*d = Duration(time.Duration(v))
|
||||
return nil
|
||||
}
|
||||
|
||||
// We use json unmarshal on value because we have the quoted version
|
||||
var value string
|
||||
err := json.Unmarshal(text, &value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v, err := time.ParseDuration(value)
|
||||
*d = Duration(v)
|
||||
return err
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
package types
|
||||
|
||||
import "github.com/traefik/paerser/types"
|
||||
|
||||
const (
|
||||
// AccessLogKeep is the keep string value.
|
||||
AccessLogKeep = "keep"
|
||||
|
@ -50,9 +52,9 @@ func (l *AccessLog) SetDefaults() {
|
|||
|
||||
// AccessLogFilters holds filters configuration.
|
||||
type AccessLogFilters struct {
|
||||
StatusCodes []string `description:"Keep access logs with status codes in the specified range." json:"statusCodes,omitempty" toml:"statusCodes,omitempty" yaml:"statusCodes,omitempty" export:"true"`
|
||||
RetryAttempts bool `description:"Keep access logs when at least one retry happened." json:"retryAttempts,omitempty" toml:"retryAttempts,omitempty" yaml:"retryAttempts,omitempty" export:"true"`
|
||||
MinDuration Duration `description:"Keep access logs when request took longer than the specified duration." json:"minDuration,omitempty" toml:"minDuration,omitempty" yaml:"minDuration,omitempty" export:"true"`
|
||||
StatusCodes []string `description:"Keep access logs with status codes in the specified range." json:"statusCodes,omitempty" toml:"statusCodes,omitempty" yaml:"statusCodes,omitempty" export:"true"`
|
||||
RetryAttempts bool `description:"Keep access logs when at least one retry happened." json:"retryAttempts,omitempty" toml:"retryAttempts,omitempty" yaml:"retryAttempts,omitempty" export:"true"`
|
||||
MinDuration types.Duration `description:"Keep access logs when request took longer than the specified duration." json:"minDuration,omitempty" toml:"minDuration,omitempty" yaml:"minDuration,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// FieldHeaders holds configuration for access log headers.
|
||||
|
|
|
@ -2,6 +2,8 @@ package types
|
|||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/traefik/paerser/types"
|
||||
)
|
||||
|
||||
// Metrics provides options to expose and send Traefik metrics to different third party monitoring systems.
|
||||
|
@ -31,33 +33,33 @@ func (p *Prometheus) SetDefaults() {
|
|||
|
||||
// Datadog contains address and metrics pushing interval configuration.
|
||||
type Datadog struct {
|
||||
Address string `description:"Datadog's address." json:"address,omitempty" toml:"address,omitempty" yaml:"address,omitempty"`
|
||||
PushInterval Duration `description:"Datadog push interval." json:"pushInterval,omitempty" toml:"pushInterval,omitempty" yaml:"pushInterval,omitempty" export:"true"`
|
||||
AddEntryPointsLabels bool `description:"Enable metrics on entry points." json:"addEntryPointsLabels,omitempty" toml:"addEntryPointsLabels,omitempty" yaml:"addEntryPointsLabels,omitempty" export:"true"`
|
||||
AddServicesLabels bool `description:"Enable metrics on services." json:"addServicesLabels,omitempty" toml:"addServicesLabels,omitempty" yaml:"addServicesLabels,omitempty" export:"true"`
|
||||
Address string `description:"Datadog's address." json:"address,omitempty" toml:"address,omitempty" yaml:"address,omitempty"`
|
||||
PushInterval types.Duration `description:"Datadog push interval." json:"pushInterval,omitempty" toml:"pushInterval,omitempty" yaml:"pushInterval,omitempty" export:"true"`
|
||||
AddEntryPointsLabels bool `description:"Enable metrics on entry points." json:"addEntryPointsLabels,omitempty" toml:"addEntryPointsLabels,omitempty" yaml:"addEntryPointsLabels,omitempty" export:"true"`
|
||||
AddServicesLabels bool `description:"Enable metrics on services." json:"addServicesLabels,omitempty" toml:"addServicesLabels,omitempty" yaml:"addServicesLabels,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (d *Datadog) SetDefaults() {
|
||||
d.Address = "localhost:8125"
|
||||
d.PushInterval = Duration(10 * time.Second)
|
||||
d.PushInterval = types.Duration(10 * time.Second)
|
||||
d.AddEntryPointsLabels = true
|
||||
d.AddServicesLabels = true
|
||||
}
|
||||
|
||||
// Statsd contains address and metrics pushing interval configuration.
|
||||
type Statsd struct {
|
||||
Address string `description:"StatsD address." json:"address,omitempty" toml:"address,omitempty" yaml:"address,omitempty"`
|
||||
PushInterval Duration `description:"StatsD push interval." json:"pushInterval,omitempty" toml:"pushInterval,omitempty" yaml:"pushInterval,omitempty" export:"true"`
|
||||
AddEntryPointsLabels bool `description:"Enable metrics on entry points." json:"addEntryPointsLabels,omitempty" toml:"addEntryPointsLabels,omitempty" yaml:"addEntryPointsLabels,omitempty" export:"true"`
|
||||
AddServicesLabels bool `description:"Enable metrics on services." json:"addServicesLabels,omitempty" toml:"addServicesLabels,omitempty" yaml:"addServicesLabels,omitempty" export:"true"`
|
||||
Prefix string `description:"Prefix to use for metrics collection." json:"prefix,omitempty" toml:"prefix,omitempty" yaml:"prefix,omitempty" export:"true"`
|
||||
Address string `description:"StatsD address." json:"address,omitempty" toml:"address,omitempty" yaml:"address,omitempty"`
|
||||
PushInterval types.Duration `description:"StatsD push interval." json:"pushInterval,omitempty" toml:"pushInterval,omitempty" yaml:"pushInterval,omitempty" export:"true"`
|
||||
AddEntryPointsLabels bool `description:"Enable metrics on entry points." json:"addEntryPointsLabels,omitempty" toml:"addEntryPointsLabels,omitempty" yaml:"addEntryPointsLabels,omitempty" export:"true"`
|
||||
AddServicesLabels bool `description:"Enable metrics on services." json:"addServicesLabels,omitempty" toml:"addServicesLabels,omitempty" yaml:"addServicesLabels,omitempty" export:"true"`
|
||||
Prefix string `description:"Prefix to use for metrics collection." json:"prefix,omitempty" toml:"prefix,omitempty" yaml:"prefix,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (s *Statsd) SetDefaults() {
|
||||
s.Address = "localhost:8125"
|
||||
s.PushInterval = Duration(10 * time.Second)
|
||||
s.PushInterval = types.Duration(10 * time.Second)
|
||||
s.AddEntryPointsLabels = true
|
||||
s.AddServicesLabels = true
|
||||
s.Prefix = "traefik"
|
||||
|
@ -65,22 +67,22 @@ func (s *Statsd) SetDefaults() {
|
|||
|
||||
// InfluxDB contains address, login and metrics pushing interval configuration.
|
||||
type InfluxDB struct {
|
||||
Address string `description:"InfluxDB address." json:"address,omitempty" toml:"address,omitempty" yaml:"address,omitempty"`
|
||||
Protocol string `description:"InfluxDB address protocol (udp or http)." json:"protocol,omitempty" toml:"protocol,omitempty" yaml:"protocol,omitempty"`
|
||||
PushInterval Duration `description:"InfluxDB push interval." json:"pushInterval,omitempty" toml:"pushInterval,omitempty" yaml:"pushInterval,omitempty" export:"true"`
|
||||
Database string `description:"InfluxDB database used when protocol is http." json:"database,omitempty" toml:"database,omitempty" yaml:"database,omitempty" export:"true"`
|
||||
RetentionPolicy string `description:"InfluxDB retention policy used when protocol is http." json:"retentionPolicy,omitempty" toml:"retentionPolicy,omitempty" yaml:"retentionPolicy,omitempty" export:"true"`
|
||||
Username string `description:"InfluxDB username (only with http)." json:"username,omitempty" toml:"username,omitempty" yaml:"username,omitempty" export:"true"`
|
||||
Password string `description:"InfluxDB password (only with http)." json:"password,omitempty" toml:"password,omitempty" yaml:"password,omitempty" export:"true"`
|
||||
AddEntryPointsLabels bool `description:"Enable metrics on entry points." json:"addEntryPointsLabels,omitempty" toml:"addEntryPointsLabels,omitempty" yaml:"addEntryPointsLabels,omitempty" export:"true"`
|
||||
AddServicesLabels bool `description:"Enable metrics on services." json:"addServicesLabels,omitempty" toml:"addServicesLabels,omitempty" yaml:"addServicesLabels,omitempty" export:"true"`
|
||||
Address string `description:"InfluxDB address." json:"address,omitempty" toml:"address,omitempty" yaml:"address,omitempty"`
|
||||
Protocol string `description:"InfluxDB address protocol (udp or http)." json:"protocol,omitempty" toml:"protocol,omitempty" yaml:"protocol,omitempty"`
|
||||
PushInterval types.Duration `description:"InfluxDB push interval." json:"pushInterval,omitempty" toml:"pushInterval,omitempty" yaml:"pushInterval,omitempty" export:"true"`
|
||||
Database string `description:"InfluxDB database used when protocol is http." json:"database,omitempty" toml:"database,omitempty" yaml:"database,omitempty" export:"true"`
|
||||
RetentionPolicy string `description:"InfluxDB retention policy used when protocol is http." json:"retentionPolicy,omitempty" toml:"retentionPolicy,omitempty" yaml:"retentionPolicy,omitempty" export:"true"`
|
||||
Username string `description:"InfluxDB username (only with http)." json:"username,omitempty" toml:"username,omitempty" yaml:"username,omitempty" export:"true"`
|
||||
Password string `description:"InfluxDB password (only with http)." json:"password,omitempty" toml:"password,omitempty" yaml:"password,omitempty" export:"true"`
|
||||
AddEntryPointsLabels bool `description:"Enable metrics on entry points." json:"addEntryPointsLabels,omitempty" toml:"addEntryPointsLabels,omitempty" yaml:"addEntryPointsLabels,omitempty" export:"true"`
|
||||
AddServicesLabels bool `description:"Enable metrics on services." json:"addServicesLabels,omitempty" toml:"addServicesLabels,omitempty" yaml:"addServicesLabels,omitempty" export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (i *InfluxDB) SetDefaults() {
|
||||
i.Address = "localhost:8089"
|
||||
i.Protocol = "udp"
|
||||
i.PushInterval = Duration(10 * time.Second)
|
||||
i.PushInterval = types.Duration(10 * time.Second)
|
||||
i.AddEntryPointsLabels = true
|
||||
i.AddServicesLabels = true
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue