chore: update linter.
This commit is contained in:
parent
d698eba1e7
commit
a20e90aa17
91 changed files with 646 additions and 459 deletions
|
@ -52,6 +52,8 @@
|
||||||
"testpackage", # Too strict
|
"testpackage", # Too strict
|
||||||
"goerr113", # Too strict
|
"goerr113", # Too strict
|
||||||
"nestif", # Too many false-positive.
|
"nestif", # Too many false-positive.
|
||||||
|
"noctx", # Too strict
|
||||||
|
"exhaustive", # Too strict
|
||||||
]
|
]
|
||||||
|
|
||||||
[issues]
|
[issues]
|
||||||
|
|
|
@ -19,7 +19,7 @@ RUN mkdir -p /usr/local/bin \
|
||||||
&& chmod +x /usr/local/bin/go-bindata
|
&& chmod +x /usr/local/bin/go-bindata
|
||||||
|
|
||||||
# Download golangci-lint binary to bin folder in $GOPATH
|
# Download golangci-lint binary to bin folder in $GOPATH
|
||||||
RUN curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s -- -b $GOPATH/bin v1.26.0
|
RUN curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s -- -b $GOPATH/bin v1.28.0
|
||||||
|
|
||||||
# Download misspell binary to bin folder in $GOPATH
|
# Download misspell binary to bin folder in $GOPATH
|
||||||
RUN curl -sfL https://raw.githubusercontent.com/client9/misspell/master/install-misspell.sh | bash -s -- -b $GOPATH/bin v0.3.4
|
RUN curl -sfL https://raw.githubusercontent.com/client9/misspell/master/install-misspell.sh | bash -s -- -b $GOPATH/bin v0.3.4
|
||||||
|
|
|
@ -403,7 +403,7 @@ func configureLogging(staticConfiguration *static.Configuration) {
|
||||||
if len(logFile) > 0 {
|
if len(logFile) > 0 {
|
||||||
dir := filepath.Dir(logFile)
|
dir := filepath.Dir(logFile)
|
||||||
|
|
||||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||||
log.WithoutContext().Errorf("Failed to create log path %s: %s", dir, err)
|
log.WithoutContext().Errorf("Failed to create log path %s: %s", dir, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -138,7 +138,7 @@ func (s *ConsulSuite) TestSimpleConfiguration(c *check.C) {
|
||||||
expectedJSON := filepath.FromSlash("testdata/rawdata-consul.json")
|
expectedJSON := filepath.FromSlash("testdata/rawdata-consul.json")
|
||||||
|
|
||||||
if *updateExpected {
|
if *updateExpected {
|
||||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||||
c.Assert(err, checker.IsNil)
|
c.Assert(err, checker.IsNil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -36,8 +36,8 @@ func (s *DockerComposeSuite) TearDownSuite(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *DockerComposeSuite) TestComposeScale(c *check.C) {
|
func (s *DockerComposeSuite) TestComposeScale(c *check.C) {
|
||||||
var serviceCount = 2
|
serviceCount := 2
|
||||||
var composeService = "whoami1"
|
composeService := "whoami1"
|
||||||
|
|
||||||
s.composeProject.Scale(c, composeService, serviceCount)
|
s.composeProject.Scale(c, composeService, serviceCount)
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ func (s *DockerSuite) startContainerWithLabels(c *check.C, image string, labels
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *DockerSuite) startContainerWithNameAndLabels(c *check.C, name string, image string, labels map[string]string, args ...string) string {
|
func (s *DockerSuite) startContainerWithNameAndLabels(c *check.C, name, image string, labels map[string]string, args ...string) string {
|
||||||
return s.startContainerWithConfig(c, image, d.ContainerConfig{
|
return s.startContainerWithConfig(c, image, d.ContainerConfig{
|
||||||
Name: name,
|
Name: name,
|
||||||
Cmd: args,
|
Cmd: args,
|
||||||
|
|
|
@ -138,7 +138,7 @@ func (s *EtcdSuite) TestSimpleConfiguration(c *check.C) {
|
||||||
expectedJSON := filepath.FromSlash("testdata/rawdata-etcd.json")
|
expectedJSON := filepath.FromSlash("testdata/rawdata-etcd.json")
|
||||||
|
|
||||||
if *updateExpected {
|
if *updateExpected {
|
||||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||||
c.Assert(err, checker.IsNil)
|
c.Assert(err, checker.IsNil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,8 +19,10 @@ import (
|
||||||
"google.golang.org/grpc/credentials"
|
"google.golang.org/grpc/credentials"
|
||||||
)
|
)
|
||||||
|
|
||||||
var LocalhostCert []byte
|
var (
|
||||||
var LocalhostKey []byte
|
LocalhostCert []byte
|
||||||
|
LocalhostKey []byte
|
||||||
|
)
|
||||||
|
|
||||||
const randCharset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
|
const randCharset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
|
||||||
|
|
||||||
|
|
|
@ -15,9 +15,11 @@ It has these top-level messages:
|
||||||
*/
|
*/
|
||||||
package helloworld
|
package helloworld
|
||||||
|
|
||||||
import proto "github.com/golang/protobuf/proto"
|
import (
|
||||||
import fmt "fmt"
|
proto "github.com/golang/protobuf/proto"
|
||||||
import math "math"
|
fmt "fmt"
|
||||||
|
math "math"
|
||||||
|
)
|
||||||
|
|
||||||
import (
|
import (
|
||||||
context "context"
|
context "context"
|
||||||
|
@ -26,9 +28,11 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Reference imports to suppress errors if they are not otherwise used.
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
var _ = proto.Marshal
|
var (
|
||||||
var _ = fmt.Errorf
|
_ = proto.Marshal
|
||||||
var _ = math.Inf
|
_ = fmt.Errorf
|
||||||
|
_ = math.Inf
|
||||||
|
)
|
||||||
|
|
||||||
// This is a compile-time assertion to ensure that this generated file
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
// is compatible with the proto package it is being compiled against.
|
// is compatible with the proto package it is being compiled against.
|
||||||
|
@ -102,8 +106,10 @@ func init() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reference imports to suppress errors if they are not otherwise used.
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
var _ context.Context
|
var (
|
||||||
var _ grpc.ClientConn
|
_ context.Context
|
||||||
|
_ grpc.ClientConn
|
||||||
|
)
|
||||||
|
|
||||||
// This is a compile-time assertion to ensure that this generated file
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
// is compatible with the grpc package it is being compiled against.
|
// is compatible with the grpc package it is being compiled against.
|
||||||
|
|
|
@ -956,11 +956,13 @@ func modifyCertificateConfFileContent(c *check.C, certFileName, confFileName str
|
||||||
if len(certFileName) > 0 {
|
if len(certFileName) > 0 {
|
||||||
tlsConf := dynamic.Configuration{
|
tlsConf := dynamic.Configuration{
|
||||||
TLS: &dynamic.TLSConfiguration{
|
TLS: &dynamic.TLSConfiguration{
|
||||||
Certificates: []*traefiktls.CertAndStores{{
|
Certificates: []*traefiktls.CertAndStores{
|
||||||
Certificate: traefiktls.Certificate{
|
{
|
||||||
CertFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".cert"),
|
Certificate: traefiktls.Certificate{
|
||||||
KeyFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".key"),
|
CertFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".cert"),
|
||||||
}},
|
KeyFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".key"),
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,10 +21,12 @@ import (
|
||||||
checker "github.com/vdemeester/shakers"
|
checker "github.com/vdemeester/shakers"
|
||||||
)
|
)
|
||||||
|
|
||||||
var integration = flag.Bool("integration", false, "run integration tests")
|
var (
|
||||||
var container = flag.Bool("container", false, "run container integration tests")
|
integration = flag.Bool("integration", false, "run integration tests")
|
||||||
var host = flag.Bool("host", false, "run host integration tests")
|
container = flag.Bool("container", false, "run container integration tests")
|
||||||
var showLog = flag.Bool("tlog", false, "always show Traefik logs")
|
host = flag.Bool("host", false, "run host integration tests")
|
||||||
|
showLog = flag.Bool("tlog", false, "always show Traefik logs")
|
||||||
|
)
|
||||||
|
|
||||||
func Test(t *testing.T) {
|
func Test(t *testing.T) {
|
||||||
if !*integration {
|
if !*integration {
|
||||||
|
|
|
@ -119,7 +119,7 @@ func testConfiguration(c *check.C, path, apiPort string) {
|
||||||
newJSON, err := json.MarshalIndent(rtRepr, "", "\t")
|
newJSON, err := json.MarshalIndent(rtRepr, "", "\t")
|
||||||
c.Assert(err, checker.IsNil)
|
c.Assert(err, checker.IsNil)
|
||||||
|
|
||||||
err = ioutil.WriteFile(expectedJSON, newJSON, 0644)
|
err = ioutil.WriteFile(expectedJSON, newJSON, 0o644)
|
||||||
c.Assert(err, checker.IsNil)
|
c.Assert(err, checker.IsNil)
|
||||||
c.Errorf("We do not want a passing test in file update mode")
|
c.Errorf("We do not want a passing test in file update mode")
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,7 @@ func (s *MarathonSuite15) extendDockerHostsFile(host, ipAddr string) error {
|
||||||
// (See also https://groups.google.com/d/topic/docker-user/JOGE7AnJ3Gw/discussion.)
|
// (See also https://groups.google.com/d/topic/docker-user/JOGE7AnJ3Gw/discussion.)
|
||||||
if os.Getenv("CONTAINER") == "DOCKER" {
|
if os.Getenv("CONTAINER") == "DOCKER" {
|
||||||
// We are running inside a container -- extend the hosts file.
|
// We are running inside a container -- extend the hosts file.
|
||||||
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0600)
|
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0o600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ func (s *MarathonSuite) extendDockerHostsFile(host, ipAddr string) error {
|
||||||
// (See also https://groups.google.com/d/topic/docker-user/JOGE7AnJ3Gw/discussion.)
|
// (See also https://groups.google.com/d/topic/docker-user/JOGE7AnJ3Gw/discussion.)
|
||||||
if os.Getenv("CONTAINER") == "DOCKER" {
|
if os.Getenv("CONTAINER") == "DOCKER" {
|
||||||
// We are running inside a container -- extend the hosts file.
|
// We are running inside a container -- extend the hosts file.
|
||||||
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0600)
|
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0o600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -138,7 +138,7 @@ func (s *RedisSuite) TestSimpleConfiguration(c *check.C) {
|
||||||
expectedJSON := filepath.FromSlash("testdata/rawdata-redis.json")
|
expectedJSON := filepath.FromSlash("testdata/rawdata-redis.json")
|
||||||
|
|
||||||
if *updateExpected {
|
if *updateExpected {
|
||||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||||
c.Assert(err, checker.IsNil)
|
c.Assert(err, checker.IsNil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ import (
|
||||||
type WebsocketSuite struct{ BaseSuite }
|
type WebsocketSuite struct{ BaseSuite }
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestBase(c *check.C) {
|
func (s *WebsocketSuite) TestBase(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||||
|
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
c, err := upgrader.Upgrade(w, r, nil)
|
c, err := upgrader.Upgrade(w, r, nil)
|
||||||
|
@ -72,7 +72,7 @@ func (s *WebsocketSuite) TestBase(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestWrongOrigin(c *check.C) {
|
func (s *WebsocketSuite) TestWrongOrigin(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||||
|
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
c, err := upgrader.Upgrade(w, r, nil)
|
c, err := upgrader.Upgrade(w, r, nil)
|
||||||
|
@ -122,7 +122,7 @@ func (s *WebsocketSuite) TestWrongOrigin(c *check.C) {
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestOrigin(c *check.C) {
|
func (s *WebsocketSuite) TestOrigin(c *check.C) {
|
||||||
// use default options
|
// use default options
|
||||||
var upgrader = gorillawebsocket.Upgrader{}
|
upgrader := gorillawebsocket.Upgrader{}
|
||||||
|
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
c, err := upgrader.Upgrade(w, r, nil)
|
c, err := upgrader.Upgrade(w, r, nil)
|
||||||
|
@ -180,7 +180,7 @@ func (s *WebsocketSuite) TestOrigin(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestWrongOriginIgnoredByServer(c *check.C) {
|
func (s *WebsocketSuite) TestWrongOriginIgnoredByServer(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{CheckOrigin: func(r *http.Request) bool {
|
upgrader := gorillawebsocket.Upgrader{CheckOrigin: func(r *http.Request) bool {
|
||||||
return true
|
return true
|
||||||
}}
|
}}
|
||||||
|
|
||||||
|
@ -240,7 +240,7 @@ func (s *WebsocketSuite) TestWrongOriginIgnoredByServer(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestSSLTermination(c *check.C) {
|
func (s *WebsocketSuite) TestSSLTermination(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||||
|
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
c, err := upgrader.Upgrade(w, r, nil)
|
c, err := upgrader.Upgrade(w, r, nil)
|
||||||
|
@ -297,11 +297,10 @@ func (s *WebsocketSuite) TestSSLTermination(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestBasicAuth(c *check.C) {
|
func (s *WebsocketSuite) TestBasicAuth(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||||
|
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
conn, err := upgrader.Upgrade(w, r, nil)
|
conn, err := upgrader.Upgrade(w, r, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -390,7 +389,7 @@ func (s *WebsocketSuite) TestSpecificResponseFromBackend(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestURLWithURLEncodedChar(c *check.C) {
|
func (s *WebsocketSuite) TestURLWithURLEncodedChar(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||||
|
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
c.Assert(r.URL.EscapedPath(), check.Equals, "/ws/http%3A%2F%2Ftest")
|
c.Assert(r.URL.EscapedPath(), check.Equals, "/ws/http%3A%2F%2Ftest")
|
||||||
|
@ -441,7 +440,7 @@ func (s *WebsocketSuite) TestURLWithURLEncodedChar(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestSSLhttp2(c *check.C) {
|
func (s *WebsocketSuite) TestSSLhttp2(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||||
|
|
||||||
ts := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
ts := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
c, err := upgrader.Upgrade(w, r, nil)
|
c, err := upgrader.Upgrade(w, r, nil)
|
||||||
|
@ -504,7 +503,7 @@ func (s *WebsocketSuite) TestSSLhttp2(c *check.C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WebsocketSuite) TestHeaderAreForwared(c *check.C) {
|
func (s *WebsocketSuite) TestHeaderAreForwared(c *check.C) {
|
||||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||||
|
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
c.Assert(r.Header.Get("X-Token"), check.Equals, "my-token")
|
c.Assert(r.Header.Get("X-Token"), check.Equals, "my-token")
|
||||||
|
|
|
@ -138,7 +138,7 @@ func (s *ZookeeperSuite) TestSimpleConfiguration(c *check.C) {
|
||||||
expectedJSON := filepath.FromSlash("testdata/rawdata-zk.json")
|
expectedJSON := filepath.FromSlash("testdata/rawdata-zk.json")
|
||||||
|
|
||||||
if *updateExpected {
|
if *updateExpected {
|
||||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||||
c.Assert(err, checker.IsNil)
|
c.Assert(err, checker.IsNil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ func main() {
|
||||||
genKVDynConfDoc("./docs/content/reference/dynamic-configuration/kv-ref.md")
|
genKVDynConfDoc("./docs/content/reference/dynamic-configuration/kv-ref.md")
|
||||||
}
|
}
|
||||||
|
|
||||||
func genStaticConfDoc(outputFile string, prefix string, encodeFn func(interface{}) ([]parser.Flat, error)) {
|
func genStaticConfDoc(outputFile, prefix string, encodeFn func(interface{}) ([]parser.Flat, error)) {
|
||||||
logger := log.WithoutContext().WithField("file", outputFile)
|
logger := log.WithoutContext().WithField("file", outputFile)
|
||||||
|
|
||||||
element := &static.Configuration{}
|
element := &static.Configuration{}
|
||||||
|
@ -41,7 +41,7 @@ func genStaticConfDoc(outputFile string, prefix string, encodeFn func(interface{
|
||||||
logger.Fatal(err)
|
logger.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
file, err := os.OpenFile(outputFile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
|
file, err := os.OpenFile(outputFile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Fatal(err)
|
logger.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ type Courgette struct {
|
||||||
Ji string
|
Ji string
|
||||||
Ho string
|
Ho string
|
||||||
}
|
}
|
||||||
|
|
||||||
type Tomate struct {
|
type Tomate struct {
|
||||||
Ji string
|
Ji string
|
||||||
Ho string
|
Ho string
|
||||||
|
|
|
@ -226,7 +226,7 @@ func TestHandler_EntryPoints(t *testing.T) {
|
||||||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -840,7 +840,7 @@ func TestHandler_HTTP(t *testing.T) {
|
||||||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -277,7 +277,7 @@ func TestHandler_Overview(t *testing.T) {
|
||||||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -548,7 +548,7 @@ func TestHandler_TCP(t *testing.T) {
|
||||||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -161,7 +161,7 @@ func TestHandler_RawData(t *testing.T) {
|
||||||
newJSON, err := json.MarshalIndent(rtRepr, "", "\t")
|
newJSON, err := json.MarshalIndent(rtRepr, "", "\t")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = ioutil.WriteFile(test.expected.json, newJSON, 0644)
|
err = ioutil.WriteFile(test.expected.json, newJSON, 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -525,7 +525,7 @@ func TestHandler_UDP(t *testing.T) {
|
||||||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -100,25 +100,34 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "defaultMode", Value: "foobar"},
|
{Name: "defaultMode", Value: "foobar"},
|
||||||
{Name: "names", Children: []*parser.Node{
|
{Name: "names", Children: []*parser.Node{
|
||||||
{Name: "name0", Value: "foobar"},
|
{Name: "name0", Value: "foobar"},
|
||||||
{Name: "name1", Value: "foobar"}}}}},
|
{Name: "name1", Value: "foobar"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "names", Children: []*parser.Node{
|
{Name: "names", Children: []*parser.Node{
|
||||||
{Name: "name0", Value: "foobar"},
|
{Name: "name0", Value: "foobar"},
|
||||||
{Name: "name1", Value: "foobar"}}}}},
|
{Name: "name1", Value: "foobar"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "filePath", Value: "foobar"},
|
{Name: "filePath", Value: "foobar"},
|
||||||
{Name: "filters", Children: []*parser.Node{
|
{Name: "filters", Children: []*parser.Node{
|
||||||
{Name: "minDuration", Value: "42"},
|
{Name: "minDuration", Value: "42"},
|
||||||
{Name: "retryAttempts", Value: "true"},
|
{Name: "retryAttempts", Value: "true"},
|
||||||
{Name: "statusCodes", Value: "foobar,foobar"}}},
|
{Name: "statusCodes", Value: "foobar,foobar"},
|
||||||
{Name: "format", Value: "foobar"}}},
|
}},
|
||||||
|
{Name: "format", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "api", Children: []*parser.Node{
|
{Name: "api", Children: []*parser.Node{
|
||||||
{Name: "dashboard", Value: "true"},
|
{Name: "dashboard", Value: "true"},
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "middlewares", Value: "foobar,foobar"},
|
{Name: "middlewares", Value: "foobar,foobar"},
|
||||||
{Name: "statistics", Children: []*parser.Node{
|
{Name: "statistics", Children: []*parser.Node{
|
||||||
{Name: "recentErrors", Value: "42"}}}}},
|
{Name: "recentErrors", Value: "42"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "certificatesResolvers", Children: []*parser.Node{
|
{Name: "certificatesResolvers", Children: []*parser.Node{
|
||||||
{Name: "default", Children: []*parser.Node{
|
{Name: "default", Children: []*parser.Node{
|
||||||
{Name: "acme",
|
{
|
||||||
|
Name: "acme",
|
||||||
Children: []*parser.Node{
|
Children: []*parser.Node{
|
||||||
{Name: "acmeLogging", Value: "true"},
|
{Name: "acmeLogging", Value: "true"},
|
||||||
{Name: "caServer", Value: "foobar"},
|
{Name: "caServer", Value: "foobar"},
|
||||||
|
@ -131,7 +140,8 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "email", Value: "foobar"},
|
{Name: "email", Value: "foobar"},
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "httpChallenge", Children: []*parser.Node{
|
{Name: "httpChallenge", Children: []*parser.Node{
|
||||||
{Name: "entryPoint", Value: "foobar"}}},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "keyType", Value: "foobar"},
|
{Name: "keyType", Value: "foobar"},
|
||||||
{Name: "storage", Value: "foobar"},
|
{Name: "storage", Value: "foobar"},
|
||||||
{Name: "tlsChallenge"},
|
{Name: "tlsChallenge"},
|
||||||
|
@ -144,33 +154,44 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "forwardedHeaders", Children: []*parser.Node{
|
{Name: "forwardedHeaders", Children: []*parser.Node{
|
||||||
{Name: "insecure", Value: "true"},
|
{Name: "insecure", Value: "true"},
|
||||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "proxyProtocol", Children: []*parser.Node{
|
{Name: "proxyProtocol", Children: []*parser.Node{
|
||||||
{Name: "insecure", Value: "true"},
|
{Name: "insecure", Value: "true"},
|
||||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "transport", Children: []*parser.Node{
|
{Name: "transport", Children: []*parser.Node{
|
||||||
{Name: "lifeCycle", Children: []*parser.Node{
|
{Name: "lifeCycle", Children: []*parser.Node{
|
||||||
{Name: "graceTimeOut", Value: "42"},
|
{Name: "graceTimeOut", Value: "42"},
|
||||||
{Name: "requestAcceptGraceTimeout", Value: "42"}}},
|
{Name: "requestAcceptGraceTimeout", Value: "42"},
|
||||||
|
}},
|
||||||
{Name: "respondingTimeouts", Children: []*parser.Node{
|
{Name: "respondingTimeouts", Children: []*parser.Node{
|
||||||
{Name: "idleTimeout", Value: "42"},
|
{Name: "idleTimeout", Value: "42"},
|
||||||
{Name: "readTimeout", Value: "42"},
|
{Name: "readTimeout", Value: "42"},
|
||||||
{Name: "writeTimeout", Value: "42"}}}}}}}}},
|
{Name: "writeTimeout", Value: "42"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "global", Children: []*parser.Node{
|
{Name: "global", Children: []*parser.Node{
|
||||||
{Name: "checkNewVersion", Value: "true"},
|
{Name: "checkNewVersion", Value: "true"},
|
||||||
{Name: "sendAnonymousUsage", Value: "true"}}},
|
{Name: "sendAnonymousUsage", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "hostResolver", Children: []*parser.Node{
|
{Name: "hostResolver", Children: []*parser.Node{
|
||||||
{Name: "cnameFlattening", Value: "true"},
|
{Name: "cnameFlattening", Value: "true"},
|
||||||
{Name: "resolvConfig", Value: "foobar"},
|
{Name: "resolvConfig", Value: "foobar"},
|
||||||
{Name: "resolvDepth", Value: "42"}}},
|
{Name: "resolvDepth", Value: "42"},
|
||||||
|
}},
|
||||||
{Name: "log", Children: []*parser.Node{
|
{Name: "log", Children: []*parser.Node{
|
||||||
{Name: "filePath", Value: "foobar"},
|
{Name: "filePath", Value: "foobar"},
|
||||||
{Name: "format", Value: "foobar"},
|
{Name: "format", Value: "foobar"},
|
||||||
{Name: "level", Value: "foobar"}}},
|
{Name: "level", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "metrics", Children: []*parser.Node{
|
{Name: "metrics", Children: []*parser.Node{
|
||||||
{Name: "datadog", Children: []*parser.Node{
|
{Name: "datadog", Children: []*parser.Node{
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "pushInterval", Value: "10s"}}},
|
{Name: "pushInterval", Value: "10s"},
|
||||||
|
}},
|
||||||
{Name: "influxDB", Children: []*parser.Node{
|
{Name: "influxDB", Children: []*parser.Node{
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "database", Value: "foobar"},
|
{Name: "database", Value: "foobar"},
|
||||||
|
@ -178,17 +199,22 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "protocol", Value: "foobar"},
|
{Name: "protocol", Value: "foobar"},
|
||||||
{Name: "pushInterval", Value: "10s"},
|
{Name: "pushInterval", Value: "10s"},
|
||||||
{Name: "retentionPolicy", Value: "foobar"},
|
{Name: "retentionPolicy", Value: "foobar"},
|
||||||
{Name: "username", Value: "foobar"}}},
|
{Name: "username", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "prometheus", Children: []*parser.Node{
|
{Name: "prometheus", Children: []*parser.Node{
|
||||||
{Name: "buckets", Value: "42,42"},
|
{Name: "buckets", Value: "42,42"},
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
{Name: "middlewares", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "statsD", Children: []*parser.Node{
|
{Name: "statsD", Children: []*parser.Node{
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "pushInterval", Value: "10s"}}}}},
|
{Name: "pushInterval", Value: "10s"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "ping", Children: []*parser.Node{
|
{Name: "ping", Children: []*parser.Node{
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
{Name: "middlewares", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "providers", Children: []*parser.Node{
|
{Name: "providers", Children: []*parser.Node{
|
||||||
{Name: "docker", Children: []*parser.Node{
|
{Name: "docker", Children: []*parser.Node{
|
||||||
{Name: "constraints", Value: "foobar"},
|
{Name: "constraints", Value: "foobar"},
|
||||||
|
@ -203,15 +229,19 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "caOptional", Value: "true"},
|
{Name: "caOptional", Value: "true"},
|
||||||
{Name: "cert", Value: "foobar"},
|
{Name: "cert", Value: "foobar"},
|
||||||
{Name: "insecureSkipVerify", Value: "true"},
|
{Name: "insecureSkipVerify", Value: "true"},
|
||||||
{Name: "key", Value: "foobar"}}},
|
{Name: "key", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "useBindPortIP", Value: "true"},
|
{Name: "useBindPortIP", Value: "true"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "file", Children: []*parser.Node{
|
{Name: "file", Children: []*parser.Node{
|
||||||
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
||||||
{Name: "directory", Value: "foobar"},
|
{Name: "directory", Value: "foobar"},
|
||||||
{Name: "filename", Value: "foobar"},
|
{Name: "filename", Value: "foobar"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
{Name: "kubernetesCRD",
|
}},
|
||||||
|
{
|
||||||
|
Name: "kubernetesCRD",
|
||||||
Children: []*parser.Node{
|
Children: []*parser.Node{
|
||||||
{Name: "certAuthFilePath", Value: "foobar"},
|
{Name: "certAuthFilePath", Value: "foobar"},
|
||||||
{Name: "disablePassHostHeaders", Value: "true"},
|
{Name: "disablePassHostHeaders", Value: "true"},
|
||||||
|
@ -219,7 +249,9 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "ingressClass", Value: "foobar"},
|
{Name: "ingressClass", Value: "foobar"},
|
||||||
{Name: "labelSelector", Value: "foobar"},
|
{Name: "labelSelector", Value: "foobar"},
|
||||||
{Name: "namespaces", Value: "foobar,foobar"},
|
{Name: "namespaces", Value: "foobar,foobar"},
|
||||||
{Name: "token", Value: "foobar"}}},
|
{Name: "token", Value: "foobar"},
|
||||||
|
},
|
||||||
|
},
|
||||||
{Name: "kubernetesIngress", Children: []*parser.Node{
|
{Name: "kubernetesIngress", Children: []*parser.Node{
|
||||||
{Name: "certAuthFilePath", Value: "foobar"},
|
{Name: "certAuthFilePath", Value: "foobar"},
|
||||||
{Name: "disablePassHostHeaders", Value: "true"},
|
{Name: "disablePassHostHeaders", Value: "true"},
|
||||||
|
@ -228,14 +260,17 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "ingressEndpoint", Children: []*parser.Node{
|
{Name: "ingressEndpoint", Children: []*parser.Node{
|
||||||
{Name: "hostname", Value: "foobar"},
|
{Name: "hostname", Value: "foobar"},
|
||||||
{Name: "ip", Value: "foobar"},
|
{Name: "ip", Value: "foobar"},
|
||||||
{Name: "publishedService", Value: "foobar"}}},
|
{Name: "publishedService", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "labelSelector", Value: "foobar"},
|
{Name: "labelSelector", Value: "foobar"},
|
||||||
{Name: "namespaces", Value: "foobar,foobar"},
|
{Name: "namespaces", Value: "foobar,foobar"},
|
||||||
{Name: "token", Value: "foobar"}}},
|
{Name: "token", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "marathon", Children: []*parser.Node{
|
{Name: "marathon", Children: []*parser.Node{
|
||||||
{Name: "basic", Children: []*parser.Node{
|
{Name: "basic", Children: []*parser.Node{
|
||||||
{Name: "httpBasicAuthUser", Value: "foobar"},
|
{Name: "httpBasicAuthUser", Value: "foobar"},
|
||||||
{Name: "httpBasicPassword", Value: "foobar"}}},
|
{Name: "httpBasicPassword", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "constraints", Value: "foobar"},
|
{Name: "constraints", Value: "foobar"},
|
||||||
{Name: "dcosToken", Value: "foobar"},
|
{Name: "dcosToken", Value: "foobar"},
|
||||||
{Name: "defaultRule", Value: "foobar"},
|
{Name: "defaultRule", Value: "foobar"},
|
||||||
|
@ -251,10 +286,12 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "caOptional", Value: "true"},
|
{Name: "caOptional", Value: "true"},
|
||||||
{Name: "cert", Value: "foobar"},
|
{Name: "cert", Value: "foobar"},
|
||||||
{Name: "insecureSkipVerify", Value: "true"},
|
{Name: "insecureSkipVerify", Value: "true"},
|
||||||
{Name: "key", Value: "foobar"}}},
|
{Name: "key", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "tlsHandshakeTimeout", Value: "42"},
|
{Name: "tlsHandshakeTimeout", Value: "42"},
|
||||||
{Name: "trace", Value: "true"},
|
{Name: "trace", Value: "true"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "providersThrottleDuration", Value: "42"},
|
{Name: "providersThrottleDuration", Value: "42"},
|
||||||
{Name: "rancher", Children: []*parser.Node{
|
{Name: "rancher", Children: []*parser.Node{
|
||||||
{Name: "constraints", Value: "foobar"},
|
{Name: "constraints", Value: "foobar"},
|
||||||
|
@ -264,17 +301,22 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "intervalPoll", Value: "true"},
|
{Name: "intervalPoll", Value: "true"},
|
||||||
{Name: "prefix", Value: "foobar"},
|
{Name: "prefix", Value: "foobar"},
|
||||||
{Name: "refreshSeconds", Value: "42"},
|
{Name: "refreshSeconds", Value: "42"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "rest", Children: []*parser.Node{
|
{Name: "rest", Children: []*parser.Node{
|
||||||
{Name: "entryPoint", Value: "foobar"}}}}},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "serversTransport", Children: []*parser.Node{
|
{Name: "serversTransport", Children: []*parser.Node{
|
||||||
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
||||||
{Name: "dialTimeout", Value: "42"},
|
{Name: "dialTimeout", Value: "42"},
|
||||||
{Name: "idleConnTimeout", Value: "42"},
|
{Name: "idleConnTimeout", Value: "42"},
|
||||||
{Name: "responseHeaderTimeout", Value: "42"}}},
|
{Name: "responseHeaderTimeout", Value: "42"},
|
||||||
|
}},
|
||||||
{Name: "insecureSkipVerify", Value: "true"},
|
{Name: "insecureSkipVerify", Value: "true"},
|
||||||
{Name: "maxIdleConnsPerHost", Value: "42"},
|
{Name: "maxIdleConnsPerHost", Value: "42"},
|
||||||
{Name: "rootCAs", Value: "foobar,foobar"}}},
|
{Name: "rootCAs", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "tracing", Children: []*parser.Node{
|
{Name: "tracing", Children: []*parser.Node{
|
||||||
{Name: "datadog", Children: []*parser.Node{
|
{Name: "datadog", Children: []*parser.Node{
|
||||||
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
||||||
|
@ -284,18 +326,21 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||||
{Name: "prioritySampling", Value: "true"},
|
{Name: "prioritySampling", Value: "true"},
|
||||||
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
||||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "haystack", Children: []*parser.Node{
|
{Name: "haystack", Children: []*parser.Node{
|
||||||
{Name: "globalTag", Value: "foobar"},
|
{Name: "globalTag", Value: "foobar"},
|
||||||
{Name: "localAgentHost", Value: "foobar"},
|
{Name: "localAgentHost", Value: "foobar"},
|
||||||
{Name: "localAgentPort", Value: "42"},
|
{Name: "localAgentPort", Value: "42"},
|
||||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||||
{Name: "spanIDHeaderName", Value: "foobar"},
|
{Name: "spanIDHeaderName", Value: "foobar"},
|
||||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "instana", Children: []*parser.Node{
|
{Name: "instana", Children: []*parser.Node{
|
||||||
{Name: "localAgentHost", Value: "foobar"},
|
{Name: "localAgentHost", Value: "foobar"},
|
||||||
{Name: "localAgentPort", Value: "42"},
|
{Name: "localAgentPort", Value: "42"},
|
||||||
{Name: "logLevel", Value: "foobar"}}},
|
{Name: "logLevel", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "jaeger", Children: []*parser.Node{
|
{Name: "jaeger", Children: []*parser.Node{
|
||||||
{Name: "gen128Bit", Value: "true"},
|
{Name: "gen128Bit", Value: "true"},
|
||||||
{Name: "localAgentHostPort", Value: "foobar"},
|
{Name: "localAgentHostPort", Value: "foobar"},
|
||||||
|
@ -303,14 +348,17 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||||
{Name: "samplingParam", Value: "42"},
|
{Name: "samplingParam", Value: "42"},
|
||||||
{Name: "samplingServerURL", Value: "foobar"},
|
{Name: "samplingServerURL", Value: "foobar"},
|
||||||
{Name: "samplingType", Value: "foobar"},
|
{Name: "samplingType", Value: "foobar"},
|
||||||
{Name: "traceContextHeaderName", Value: "foobar"}}},
|
{Name: "traceContextHeaderName", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "serviceName", Value: "foobar"},
|
{Name: "serviceName", Value: "foobar"},
|
||||||
{Name: "spanNameLimit", Value: "42"},
|
{Name: "spanNameLimit", Value: "42"},
|
||||||
{Name: "zipkin", Children: []*parser.Node{
|
{Name: "zipkin", Children: []*parser.Node{
|
||||||
{Name: "httpEndpoint", Value: "foobar"},
|
{Name: "httpEndpoint", Value: "foobar"},
|
||||||
{Name: "id128Bit", Value: "true"},
|
{Name: "id128Bit", Value: "true"},
|
||||||
{Name: "sameSpan", Value: "true"},
|
{Name: "sameSpan", Value: "true"},
|
||||||
{Name: "sampleRate", Value: "42"}}}}},
|
{Name: "sampleRate", Value: "42"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -332,25 +380,34 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "defaultMode", Value: "foobar"},
|
{Name: "defaultMode", Value: "foobar"},
|
||||||
{Name: "names", Children: []*parser.Node{
|
{Name: "names", Children: []*parser.Node{
|
||||||
{Name: "name0", Value: "foobar"},
|
{Name: "name0", Value: "foobar"},
|
||||||
{Name: "name1", Value: "foobar"}}}}},
|
{Name: "name1", Value: "foobar"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "names", Children: []*parser.Node{
|
{Name: "names", Children: []*parser.Node{
|
||||||
{Name: "name0", Value: "foobar"},
|
{Name: "name0", Value: "foobar"},
|
||||||
{Name: "name1", Value: "foobar"}}}}},
|
{Name: "name1", Value: "foobar"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "filePath", Value: "foobar"},
|
{Name: "filePath", Value: "foobar"},
|
||||||
{Name: "filters", Children: []*parser.Node{
|
{Name: "filters", Children: []*parser.Node{
|
||||||
{Name: "minDuration", Value: "42"},
|
{Name: "minDuration", Value: "42"},
|
||||||
{Name: "retryAttempts", Value: "true"},
|
{Name: "retryAttempts", Value: "true"},
|
||||||
{Name: "statusCodes", Value: "foobar,foobar"}}},
|
{Name: "statusCodes", Value: "foobar,foobar"},
|
||||||
{Name: "format", Value: "foobar"}}},
|
}},
|
||||||
|
{Name: "format", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "api", Children: []*parser.Node{
|
{Name: "api", Children: []*parser.Node{
|
||||||
{Name: "dashboard", Value: "true"},
|
{Name: "dashboard", Value: "true"},
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "middlewares", Value: "foobar,foobar"},
|
{Name: "middlewares", Value: "foobar,foobar"},
|
||||||
{Name: "statistics", Children: []*parser.Node{
|
{Name: "statistics", Children: []*parser.Node{
|
||||||
{Name: "recentErrors", Value: "42"}}}}},
|
{Name: "recentErrors", Value: "42"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "certificatesResolvers", Children: []*parser.Node{
|
{Name: "certificatesResolvers", Children: []*parser.Node{
|
||||||
{Name: "default", Children: []*parser.Node{
|
{Name: "default", Children: []*parser.Node{
|
||||||
{Name: "acme",
|
{
|
||||||
|
Name: "acme",
|
||||||
Children: []*parser.Node{
|
Children: []*parser.Node{
|
||||||
{Name: "acmeLogging", Value: "true"},
|
{Name: "acmeLogging", Value: "true"},
|
||||||
{Name: "caServer", Value: "foobar"},
|
{Name: "caServer", Value: "foobar"},
|
||||||
|
@ -363,7 +420,8 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "email", Value: "foobar"},
|
{Name: "email", Value: "foobar"},
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "httpChallenge", Children: []*parser.Node{
|
{Name: "httpChallenge", Children: []*parser.Node{
|
||||||
{Name: "entryPoint", Value: "foobar"}}},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "keyType", Value: "foobar"},
|
{Name: "keyType", Value: "foobar"},
|
||||||
{Name: "storage", Value: "foobar"},
|
{Name: "storage", Value: "foobar"},
|
||||||
{Name: "tlsChallenge"},
|
{Name: "tlsChallenge"},
|
||||||
|
@ -376,33 +434,44 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "forwardedHeaders", Children: []*parser.Node{
|
{Name: "forwardedHeaders", Children: []*parser.Node{
|
||||||
{Name: "insecure", Value: "true"},
|
{Name: "insecure", Value: "true"},
|
||||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "proxyProtocol", Children: []*parser.Node{
|
{Name: "proxyProtocol", Children: []*parser.Node{
|
||||||
{Name: "insecure", Value: "true"},
|
{Name: "insecure", Value: "true"},
|
||||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "transport", Children: []*parser.Node{
|
{Name: "transport", Children: []*parser.Node{
|
||||||
{Name: "lifeCycle", Children: []*parser.Node{
|
{Name: "lifeCycle", Children: []*parser.Node{
|
||||||
{Name: "graceTimeOut", Value: "42"},
|
{Name: "graceTimeOut", Value: "42"},
|
||||||
{Name: "requestAcceptGraceTimeout", Value: "42"}}},
|
{Name: "requestAcceptGraceTimeout", Value: "42"},
|
||||||
|
}},
|
||||||
{Name: "respondingTimeouts", Children: []*parser.Node{
|
{Name: "respondingTimeouts", Children: []*parser.Node{
|
||||||
{Name: "idleTimeout", Value: "42"},
|
{Name: "idleTimeout", Value: "42"},
|
||||||
{Name: "readTimeout", Value: "42"},
|
{Name: "readTimeout", Value: "42"},
|
||||||
{Name: "writeTimeout", Value: "42"}}}}}}}}},
|
{Name: "writeTimeout", Value: "42"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "global", Children: []*parser.Node{
|
{Name: "global", Children: []*parser.Node{
|
||||||
{Name: "checkNewVersion", Value: "true"},
|
{Name: "checkNewVersion", Value: "true"},
|
||||||
{Name: "sendAnonymousUsage", Value: "true"}}},
|
{Name: "sendAnonymousUsage", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "hostResolver", Children: []*parser.Node{
|
{Name: "hostResolver", Children: []*parser.Node{
|
||||||
{Name: "cnameFlattening", Value: "true"},
|
{Name: "cnameFlattening", Value: "true"},
|
||||||
{Name: "resolvConfig", Value: "foobar"},
|
{Name: "resolvConfig", Value: "foobar"},
|
||||||
{Name: "resolvDepth", Value: "42"}}},
|
{Name: "resolvDepth", Value: "42"},
|
||||||
|
}},
|
||||||
{Name: "log", Children: []*parser.Node{
|
{Name: "log", Children: []*parser.Node{
|
||||||
{Name: "filePath", Value: "foobar"},
|
{Name: "filePath", Value: "foobar"},
|
||||||
{Name: "format", Value: "foobar"},
|
{Name: "format", Value: "foobar"},
|
||||||
{Name: "level", Value: "foobar"}}},
|
{Name: "level", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "metrics", Children: []*parser.Node{
|
{Name: "metrics", Children: []*parser.Node{
|
||||||
{Name: "datadog", Children: []*parser.Node{
|
{Name: "datadog", Children: []*parser.Node{
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "pushInterval", Value: "10s"}}},
|
{Name: "pushInterval", Value: "10s"},
|
||||||
|
}},
|
||||||
{Name: "influxDB", Children: []*parser.Node{
|
{Name: "influxDB", Children: []*parser.Node{
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "database", Value: "foobar"},
|
{Name: "database", Value: "foobar"},
|
||||||
|
@ -410,17 +479,22 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "protocol", Value: "foobar"},
|
{Name: "protocol", Value: "foobar"},
|
||||||
{Name: "pushInterval", Value: "10s"},
|
{Name: "pushInterval", Value: "10s"},
|
||||||
{Name: "retentionPolicy", Value: "foobar"},
|
{Name: "retentionPolicy", Value: "foobar"},
|
||||||
{Name: "username", Value: "foobar"}}},
|
{Name: "username", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "prometheus", Children: []*parser.Node{
|
{Name: "prometheus", Children: []*parser.Node{
|
||||||
{Name: "buckets", Value: "42,42"},
|
{Name: "buckets", Value: "42,42"},
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
{Name: "middlewares", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "statsD", Children: []*parser.Node{
|
{Name: "statsD", Children: []*parser.Node{
|
||||||
{Name: "address", Value: "foobar"},
|
{Name: "address", Value: "foobar"},
|
||||||
{Name: "pushInterval", Value: "10s"}}}}},
|
{Name: "pushInterval", Value: "10s"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "ping", Children: []*parser.Node{
|
{Name: "ping", Children: []*parser.Node{
|
||||||
{Name: "entryPoint", Value: "foobar"},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
{Name: "middlewares", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "providers", Children: []*parser.Node{
|
{Name: "providers", Children: []*parser.Node{
|
||||||
{Name: "docker", Children: []*parser.Node{
|
{Name: "docker", Children: []*parser.Node{
|
||||||
{Name: "constraints", Value: "foobar"},
|
{Name: "constraints", Value: "foobar"},
|
||||||
|
@ -435,15 +509,19 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "caOptional", Value: "true"},
|
{Name: "caOptional", Value: "true"},
|
||||||
{Name: "cert", Value: "foobar"},
|
{Name: "cert", Value: "foobar"},
|
||||||
{Name: "insecureSkipVerify", Value: "true"},
|
{Name: "insecureSkipVerify", Value: "true"},
|
||||||
{Name: "key", Value: "foobar"}}},
|
{Name: "key", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "useBindPortIP", Value: "true"},
|
{Name: "useBindPortIP", Value: "true"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "file", Children: []*parser.Node{
|
{Name: "file", Children: []*parser.Node{
|
||||||
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
||||||
{Name: "directory", Value: "foobar"},
|
{Name: "directory", Value: "foobar"},
|
||||||
{Name: "filename", Value: "foobar"},
|
{Name: "filename", Value: "foobar"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
{Name: "kubernetesCRD",
|
}},
|
||||||
|
{
|
||||||
|
Name: "kubernetesCRD",
|
||||||
Children: []*parser.Node{
|
Children: []*parser.Node{
|
||||||
{Name: "certAuthFilePath", Value: "foobar"},
|
{Name: "certAuthFilePath", Value: "foobar"},
|
||||||
{Name: "disablePassHostHeaders", Value: "true"},
|
{Name: "disablePassHostHeaders", Value: "true"},
|
||||||
|
@ -451,7 +529,9 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "ingressClass", Value: "foobar"},
|
{Name: "ingressClass", Value: "foobar"},
|
||||||
{Name: "labelSelector", Value: "foobar"},
|
{Name: "labelSelector", Value: "foobar"},
|
||||||
{Name: "namespaces", Value: "foobar,foobar"},
|
{Name: "namespaces", Value: "foobar,foobar"},
|
||||||
{Name: "token", Value: "foobar"}}},
|
{Name: "token", Value: "foobar"},
|
||||||
|
},
|
||||||
|
},
|
||||||
{Name: "kubernetesIngress", Children: []*parser.Node{
|
{Name: "kubernetesIngress", Children: []*parser.Node{
|
||||||
{Name: "certAuthFilePath", Value: "foobar"},
|
{Name: "certAuthFilePath", Value: "foobar"},
|
||||||
{Name: "disablePassHostHeaders", Value: "true"},
|
{Name: "disablePassHostHeaders", Value: "true"},
|
||||||
|
@ -460,14 +540,17 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "ingressEndpoint", Children: []*parser.Node{
|
{Name: "ingressEndpoint", Children: []*parser.Node{
|
||||||
{Name: "hostname", Value: "foobar"},
|
{Name: "hostname", Value: "foobar"},
|
||||||
{Name: "ip", Value: "foobar"},
|
{Name: "ip", Value: "foobar"},
|
||||||
{Name: "publishedService", Value: "foobar"}}},
|
{Name: "publishedService", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "labelSelector", Value: "foobar"},
|
{Name: "labelSelector", Value: "foobar"},
|
||||||
{Name: "namespaces", Value: "foobar,foobar"},
|
{Name: "namespaces", Value: "foobar,foobar"},
|
||||||
{Name: "token", Value: "foobar"}}},
|
{Name: "token", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "marathon", Children: []*parser.Node{
|
{Name: "marathon", Children: []*parser.Node{
|
||||||
{Name: "basic", Children: []*parser.Node{
|
{Name: "basic", Children: []*parser.Node{
|
||||||
{Name: "httpBasicAuthUser", Value: "foobar"},
|
{Name: "httpBasicAuthUser", Value: "foobar"},
|
||||||
{Name: "httpBasicPassword", Value: "foobar"}}},
|
{Name: "httpBasicPassword", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "constraints", Value: "foobar"},
|
{Name: "constraints", Value: "foobar"},
|
||||||
{Name: "dcosToken", Value: "foobar"},
|
{Name: "dcosToken", Value: "foobar"},
|
||||||
{Name: "defaultRule", Value: "foobar"},
|
{Name: "defaultRule", Value: "foobar"},
|
||||||
|
@ -483,10 +566,12 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "caOptional", Value: "true"},
|
{Name: "caOptional", Value: "true"},
|
||||||
{Name: "cert", Value: "foobar"},
|
{Name: "cert", Value: "foobar"},
|
||||||
{Name: "insecureSkipVerify", Value: "true"},
|
{Name: "insecureSkipVerify", Value: "true"},
|
||||||
{Name: "key", Value: "foobar"}}},
|
{Name: "key", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "tlsHandshakeTimeout", Value: "42"},
|
{Name: "tlsHandshakeTimeout", Value: "42"},
|
||||||
{Name: "trace", Value: "true"},
|
{Name: "trace", Value: "true"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "providersThrottleDuration", Value: "42"},
|
{Name: "providersThrottleDuration", Value: "42"},
|
||||||
{Name: "rancher", Children: []*parser.Node{
|
{Name: "rancher", Children: []*parser.Node{
|
||||||
{Name: "constraints", Value: "foobar"},
|
{Name: "constraints", Value: "foobar"},
|
||||||
|
@ -496,17 +581,22 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "intervalPoll", Value: "true"},
|
{Name: "intervalPoll", Value: "true"},
|
||||||
{Name: "prefix", Value: "foobar"},
|
{Name: "prefix", Value: "foobar"},
|
||||||
{Name: "refreshSeconds", Value: "42"},
|
{Name: "refreshSeconds", Value: "42"},
|
||||||
{Name: "watch", Value: "true"}}},
|
{Name: "watch", Value: "true"},
|
||||||
|
}},
|
||||||
{Name: "rest", Children: []*parser.Node{
|
{Name: "rest", Children: []*parser.Node{
|
||||||
{Name: "entryPoint", Value: "foobar"}}}}},
|
{Name: "entryPoint", Value: "foobar"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
{Name: "serversTransport", Children: []*parser.Node{
|
{Name: "serversTransport", Children: []*parser.Node{
|
||||||
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
||||||
{Name: "dialTimeout", Value: "42"},
|
{Name: "dialTimeout", Value: "42"},
|
||||||
{Name: "idleConnTimeout", Value: "42"},
|
{Name: "idleConnTimeout", Value: "42"},
|
||||||
{Name: "responseHeaderTimeout", Value: "42"}}},
|
{Name: "responseHeaderTimeout", Value: "42"},
|
||||||
|
}},
|
||||||
{Name: "insecureSkipVerify", Value: "true"},
|
{Name: "insecureSkipVerify", Value: "true"},
|
||||||
{Name: "maxIdleConnsPerHost", Value: "42"},
|
{Name: "maxIdleConnsPerHost", Value: "42"},
|
||||||
{Name: "rootCAs", Value: "foobar,foobar"}}},
|
{Name: "rootCAs", Value: "foobar,foobar"},
|
||||||
|
}},
|
||||||
{Name: "tracing", Children: []*parser.Node{
|
{Name: "tracing", Children: []*parser.Node{
|
||||||
{Name: "datadog", Children: []*parser.Node{
|
{Name: "datadog", Children: []*parser.Node{
|
||||||
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
||||||
|
@ -516,18 +606,21 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||||
{Name: "prioritySampling", Value: "true"},
|
{Name: "prioritySampling", Value: "true"},
|
||||||
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
||||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "haystack", Children: []*parser.Node{
|
{Name: "haystack", Children: []*parser.Node{
|
||||||
{Name: "globalTag", Value: "foobar"},
|
{Name: "globalTag", Value: "foobar"},
|
||||||
{Name: "localAgentHost", Value: "foobar"},
|
{Name: "localAgentHost", Value: "foobar"},
|
||||||
{Name: "localAgentPort", Value: "42"},
|
{Name: "localAgentPort", Value: "42"},
|
||||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||||
{Name: "spanIDHeaderName", Value: "foobar"},
|
{Name: "spanIDHeaderName", Value: "foobar"},
|
||||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "instana", Children: []*parser.Node{
|
{Name: "instana", Children: []*parser.Node{
|
||||||
{Name: "localAgentHost", Value: "foobar"},
|
{Name: "localAgentHost", Value: "foobar"},
|
||||||
{Name: "localAgentPort", Value: "42"},
|
{Name: "localAgentPort", Value: "42"},
|
||||||
{Name: "logLevel", Value: "foobar"}}},
|
{Name: "logLevel", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "jaeger", Children: []*parser.Node{
|
{Name: "jaeger", Children: []*parser.Node{
|
||||||
{Name: "gen128Bit", Value: "true"},
|
{Name: "gen128Bit", Value: "true"},
|
||||||
{Name: "localAgentHostPort", Value: "foobar"},
|
{Name: "localAgentHostPort", Value: "foobar"},
|
||||||
|
@ -535,14 +628,17 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||||
{Name: "samplingParam", Value: "42"},
|
{Name: "samplingParam", Value: "42"},
|
||||||
{Name: "samplingServerURL", Value: "foobar"},
|
{Name: "samplingServerURL", Value: "foobar"},
|
||||||
{Name: "samplingType", Value: "foobar"},
|
{Name: "samplingType", Value: "foobar"},
|
||||||
{Name: "traceContextHeaderName", Value: "foobar"}}},
|
{Name: "traceContextHeaderName", Value: "foobar"},
|
||||||
|
}},
|
||||||
{Name: "serviceName", Value: "foobar"},
|
{Name: "serviceName", Value: "foobar"},
|
||||||
{Name: "spanNameLimit", Value: "42"},
|
{Name: "spanNameLimit", Value: "42"},
|
||||||
{Name: "zipkin", Children: []*parser.Node{
|
{Name: "zipkin", Children: []*parser.Node{
|
||||||
{Name: "httpEndpoint", Value: "foobar"},
|
{Name: "httpEndpoint", Value: "foobar"},
|
||||||
{Name: "id128Bit", Value: "true"},
|
{Name: "id128Bit", Value: "true"},
|
||||||
{Name: "sameSpan", Value: "true"},
|
{Name: "sameSpan", Value: "true"},
|
||||||
{Name: "sampleRate", Value: "42"}}}}},
|
{Name: "sampleRate", Value: "42"},
|
||||||
|
}},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -101,7 +101,7 @@ func (f *flagSet) parseOne() (bool, error) {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *flagSet) setValue(name string, value string) {
|
func (f *flagSet) setValue(name, value string) {
|
||||||
srcKey := parser.DefaultRootName + "." + name
|
srcKey := parser.DefaultRootName + "." + name
|
||||||
neutralKey := strings.ToLower(srcKey)
|
neutralKey := strings.ToLower(srcKey)
|
||||||
|
|
||||||
|
|
|
@ -81,8 +81,8 @@ func filterPairs(pairs []*store.KVPair, filters []string) []*store.KVPair {
|
||||||
return pairs[i].Key < pairs[j].Key
|
return pairs[i].Key < pairs[j].Key
|
||||||
})
|
})
|
||||||
|
|
||||||
var simplePairs = map[string]*store.KVPair{}
|
simplePairs := map[string]*store.KVPair{}
|
||||||
var slicePairs = map[string][]string{}
|
slicePairs := map[string][]string{}
|
||||||
|
|
||||||
for _, pair := range pairs {
|
for _, pair := range pairs {
|
||||||
if len(filters) == 0 {
|
if len(filters) == 0 {
|
||||||
|
|
|
@ -498,27 +498,31 @@ func TestFill(t *testing.T) {
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo struct {
|
Foo struct {
|
||||||
Fii string
|
Fii string
|
||||||
Fuu int
|
Fuu int
|
||||||
}
|
}
|
||||||
}{},
|
}{},
|
||||||
expected: expected{element: &struct {
|
expected: expected{
|
||||||
Foo struct {
|
element: &struct {
|
||||||
Fii string
|
Foo struct {
|
||||||
Fuu int
|
Fii string
|
||||||
}
|
Fuu int
|
||||||
}{
|
}
|
||||||
Foo: struct {
|
|
||||||
Fii string
|
|
||||||
Fuu int
|
|
||||||
}{
|
}{
|
||||||
Fii: "huu",
|
Foo: struct {
|
||||||
Fuu: 6,
|
Fii string
|
||||||
}},
|
Fuu int
|
||||||
|
}{
|
||||||
|
Fii: "huu",
|
||||||
|
Fuu: 6,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -534,27 +538,31 @@ func TestFill(t *testing.T) {
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo *struct {
|
Foo *struct {
|
||||||
Fii string
|
Fii string
|
||||||
Fuu int
|
Fuu int
|
||||||
}
|
}
|
||||||
}{},
|
}{},
|
||||||
expected: expected{element: &struct {
|
expected: expected{
|
||||||
Foo *struct {
|
element: &struct {
|
||||||
Fii string
|
Foo *struct {
|
||||||
Fuu int
|
Fii string
|
||||||
}
|
Fuu int
|
||||||
}{
|
}
|
||||||
Foo: &struct {
|
|
||||||
Fii string
|
|
||||||
Fuu int
|
|
||||||
}{
|
}{
|
||||||
Fii: "huu",
|
Foo: &struct {
|
||||||
Fuu: 6,
|
Fii string
|
||||||
}},
|
Fuu int
|
||||||
|
}{
|
||||||
|
Fii: "huu",
|
||||||
|
Fuu: 6,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -568,23 +576,26 @@ func TestFill(t *testing.T) {
|
||||||
FieldName: "Foo",
|
FieldName: "Foo",
|
||||||
Kind: reflect.Ptr,
|
Kind: reflect.Ptr,
|
||||||
},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo *struct {
|
Foo *struct {
|
||||||
Fii string
|
Fii string
|
||||||
Fuu int
|
Fuu int
|
||||||
} `label:"allowEmpty"`
|
} `label:"allowEmpty"`
|
||||||
}{},
|
}{},
|
||||||
expected: expected{element: &struct {
|
expected: expected{
|
||||||
Foo *struct {
|
element: &struct {
|
||||||
Fii string
|
Foo *struct {
|
||||||
Fuu int
|
Fii string
|
||||||
} `label:"allowEmpty"`
|
Fuu int
|
||||||
}{
|
} `label:"allowEmpty"`
|
||||||
Foo: &struct {
|
}{
|
||||||
Fii string
|
Foo: &struct {
|
||||||
Fuu int
|
Fii string
|
||||||
}{}},
|
Fuu int
|
||||||
|
}{},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -599,19 +610,21 @@ func TestFill(t *testing.T) {
|
||||||
Kind: reflect.Ptr,
|
Kind: reflect.Ptr,
|
||||||
Disabled: true,
|
Disabled: true,
|
||||||
},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo *struct {
|
Foo *struct {
|
||||||
Fii string
|
Fii string
|
||||||
Fuu int
|
Fuu int
|
||||||
} `label:"allowEmpty"`
|
} `label:"allowEmpty"`
|
||||||
}{},
|
}{},
|
||||||
expected: expected{element: &struct {
|
expected: expected{
|
||||||
Foo *struct {
|
element: &struct {
|
||||||
Fii string
|
Foo *struct {
|
||||||
Fuu int
|
Fii string
|
||||||
} `label:"allowEmpty"`
|
Fuu int
|
||||||
}{},
|
} `label:"allowEmpty"`
|
||||||
|
}{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -628,20 +641,23 @@ func TestFill(t *testing.T) {
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo *struct {
|
Foo *struct {
|
||||||
Fii string
|
Fii string
|
||||||
Fuu int
|
Fuu int
|
||||||
} `label:"allowEmpty"`
|
} `label:"allowEmpty"`
|
||||||
}{},
|
}{},
|
||||||
expected: expected{element: &struct {
|
expected: expected{
|
||||||
Foo *struct {
|
element: &struct {
|
||||||
Fii string
|
Foo *struct {
|
||||||
Fuu int
|
Fii string
|
||||||
} `label:"allowEmpty"`
|
Fuu int
|
||||||
}{},
|
} `label:"allowEmpty"`
|
||||||
|
}{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -657,18 +673,22 @@ func TestFill(t *testing.T) {
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "name1", Value: "hii", Kind: reflect.String},
|
{Name: "name1", Value: "hii", Kind: reflect.String},
|
||||||
{Name: "name2", Value: "huu", Kind: reflect.String},
|
{Name: "name2", Value: "huu", Kind: reflect.String},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo map[string]string
|
Foo map[string]string
|
||||||
}{},
|
}{},
|
||||||
expected: expected{element: &struct {
|
expected: expected{
|
||||||
Foo map[string]string
|
element: &struct {
|
||||||
}{
|
Foo map[string]string
|
||||||
Foo: map[string]string{
|
}{
|
||||||
"name1": "hii",
|
Foo: map[string]string{
|
||||||
"name2": "huu",
|
"name1": "hii",
|
||||||
}},
|
"name2": "huu",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -696,18 +716,22 @@ func TestFill(t *testing.T) {
|
||||||
{Name: "Fii", FieldName: "Fii", Kind: reflect.String, Value: "huu"},
|
{Name: "Fii", FieldName: "Fii", Kind: reflect.String, Value: "huu"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo map[string]struct{ Fii string }
|
Foo map[string]struct{ Fii string }
|
||||||
}{},
|
}{},
|
||||||
expected: expected{element: &struct {
|
expected: expected{
|
||||||
Foo map[string]struct{ Fii string }
|
element: &struct {
|
||||||
}{
|
Foo map[string]struct{ Fii string }
|
||||||
Foo: map[string]struct{ Fii string }{
|
}{
|
||||||
"name1": {Fii: "hii"},
|
Foo: map[string]struct{ Fii string }{
|
||||||
"name2": {Fii: "huu"},
|
"name1": {Fii: "hii"},
|
||||||
}},
|
"name2": {Fii: "huu"},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -1186,8 +1210,10 @@ func TestFill(t *testing.T) {
|
||||||
Kind: reflect.Struct,
|
Kind: reflect.Struct,
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo *InitializedFoo
|
Foo *InitializedFoo
|
||||||
}{},
|
}{},
|
||||||
|
@ -1212,8 +1238,10 @@ func TestFill(t *testing.T) {
|
||||||
Kind: reflect.Struct,
|
Kind: reflect.Struct,
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo *wrongInitialledFoo
|
Foo *wrongInitialledFoo
|
||||||
}{},
|
}{},
|
||||||
|
@ -1273,8 +1301,10 @@ func TestFill(t *testing.T) {
|
||||||
Kind: reflect.Struct,
|
Kind: reflect.Struct,
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
element: &struct {
|
element: &struct {
|
||||||
Foo struct {
|
Foo struct {
|
||||||
FiiFoo
|
FiiFoo
|
||||||
|
@ -1401,8 +1431,10 @@ func TestFill(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type NamedType string
|
type (
|
||||||
type NamedTypeInt int
|
NamedType string
|
||||||
|
NamedTypeInt int
|
||||||
|
)
|
||||||
|
|
||||||
type InitializedFoo struct {
|
type InitializedFoo struct {
|
||||||
Fii string
|
Fii string
|
||||||
|
|
|
@ -23,9 +23,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo string `description:"text"`
|
Foo string `description:"text"`
|
||||||
}{Foo: "bar"},
|
}{Foo: "bar"},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "bar", Description: "text"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "bar", Description: "text"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -33,9 +34,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo string
|
Foo string
|
||||||
}{Foo: "bar"},
|
}{Foo: "bar"},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -44,10 +46,11 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Foo string
|
Foo string
|
||||||
Fii string
|
Fii string
|
||||||
}{Foo: "bar", Fii: "hii"},
|
}{Foo: "bar", Fii: "hii"},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||||
}},
|
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -55,9 +58,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo int
|
Foo int
|
||||||
}{Foo: 1},
|
}{Foo: 1},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -65,9 +69,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo int8
|
Foo int8
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -75,9 +80,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo int16
|
Foo int16
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -85,9 +91,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo int32
|
Foo int32
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -95,9 +102,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo int64
|
Foo int64
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -105,9 +113,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo uint
|
Foo uint
|
||||||
}{Foo: 1},
|
}{Foo: 1},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -115,9 +124,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo uint8
|
Foo uint8
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -125,9 +135,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo uint16
|
Foo uint16
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -135,9 +146,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo uint32
|
Foo uint32
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -145,9 +157,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo uint64
|
Foo uint64
|
||||||
}{Foo: 2},
|
}{Foo: 2},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -155,9 +168,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo float32
|
Foo float32
|
||||||
}{Foo: 1.12},
|
}{Foo: 1.12},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -165,9 +179,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo float64
|
Foo float64
|
||||||
}{Foo: 1.12},
|
}{Foo: 1.12},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -175,9 +190,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
element: struct {
|
element: struct {
|
||||||
Foo bool
|
Foo bool
|
||||||
}{Foo: true},
|
}{Foo: true},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -196,12 +212,13 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Fuu: "huu",
|
Fuu: "huu",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||||
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -220,11 +237,12 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
fuu: "huu",
|
fuu: "huu",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
|
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -243,12 +261,13 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Fuu: "huu",
|
Fuu: "huu",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||||
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -267,12 +286,13 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Fuu: "huu",
|
Fuu: "huu",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||||
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -291,11 +311,12 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Fuu: "huu",
|
Fuu: "huu",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -314,12 +335,13 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Fuu: 4,
|
Fuu: 4,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "6"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "4"},
|
{Name: "Fii", FieldName: "Fii", Value: "6"},
|
||||||
|
{Name: "Fuu", FieldName: "Fuu", Value: "4"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -338,12 +360,13 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Fuu: true,
|
Fuu: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "true"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "true"},
|
{Name: "Fii", FieldName: "Fii", Value: "true"},
|
||||||
|
{Name: "Fuu", FieldName: "Fuu", Value: "true"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -386,9 +409,10 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
Fuu string
|
Fuu string
|
||||||
}{},
|
}{},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -452,113 +476,127 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
{
|
{
|
||||||
desc: "slice of string",
|
desc: "slice of string",
|
||||||
element: struct{ Bar []string }{Bar: []string{"huu", "hii"}},
|
element: struct{ Bar []string }{Bar: []string{"huu", "hii"}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "huu, hii"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "huu, hii"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of int",
|
desc: "slice of int",
|
||||||
element: struct{ Bar []int }{Bar: []int{4, 2, 3}},
|
element: struct{ Bar []int }{Bar: []int{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of int8",
|
desc: "slice of int8",
|
||||||
element: struct{ Bar []int8 }{Bar: []int8{4, 2, 3}},
|
element: struct{ Bar []int8 }{Bar: []int8{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of int16",
|
desc: "slice of int16",
|
||||||
element: struct{ Bar []int16 }{Bar: []int16{4, 2, 3}},
|
element: struct{ Bar []int16 }{Bar: []int16{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of int32",
|
desc: "slice of int32",
|
||||||
element: struct{ Bar []int32 }{Bar: []int32{4, 2, 3}},
|
element: struct{ Bar []int32 }{Bar: []int32{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of int64",
|
desc: "slice of int64",
|
||||||
element: struct{ Bar []int64 }{Bar: []int64{4, 2, 3}},
|
element: struct{ Bar []int64 }{Bar: []int64{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of uint",
|
desc: "slice of uint",
|
||||||
element: struct{ Bar []uint }{Bar: []uint{4, 2, 3}},
|
element: struct{ Bar []uint }{Bar: []uint{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of uint8",
|
desc: "slice of uint8",
|
||||||
element: struct{ Bar []uint8 }{Bar: []uint8{4, 2, 3}},
|
element: struct{ Bar []uint8 }{Bar: []uint8{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of uint16",
|
desc: "slice of uint16",
|
||||||
element: struct{ Bar []uint16 }{Bar: []uint16{4, 2, 3}},
|
element: struct{ Bar []uint16 }{Bar: []uint16{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of uint32",
|
desc: "slice of uint32",
|
||||||
element: struct{ Bar []uint32 }{Bar: []uint32{4, 2, 3}},
|
element: struct{ Bar []uint32 }{Bar: []uint32{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of uint64",
|
desc: "slice of uint64",
|
||||||
element: struct{ Bar []uint64 }{Bar: []uint64{4, 2, 3}},
|
element: struct{ Bar []uint64 }{Bar: []uint64{4, 2, 3}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of float32",
|
desc: "slice of float32",
|
||||||
element: struct{ Bar []float32 }{Bar: []float32{4.1, 2, 3.2}},
|
element: struct{ Bar []float32 }{Bar: []float32{4.1, 2, 3.2}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of float64",
|
desc: "slice of float64",
|
||||||
element: struct{ Bar []float64 }{Bar: []float64{4.1, 2, 3.2}},
|
element: struct{ Bar []float64 }{Bar: []float64{4.1, 2, 3.2}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "slice of bool",
|
desc: "slice of bool",
|
||||||
element: struct{ Bar []bool }{Bar: []bool{true, false, true}},
|
element: struct{ Bar []bool }{Bar: []bool{true, false, true}},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "true, false, true"},
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
}},
|
{Name: "Bar", FieldName: "Bar", Value: "true, false, true"},
|
||||||
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -708,12 +746,13 @@ func TestEncodeToNode(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
expected: expected{
|
||||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
node: &Node{Name: "traefik", Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||||
|
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||||
|
}},
|
||||||
}},
|
}},
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -682,7 +682,8 @@ func TestAddMetadata(t *testing.T) {
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Bar", FieldName: "Bar", Value: "bir", Kind: reflect.String},
|
{Name: "Bar", FieldName: "Bar", Value: "bir", Kind: reflect.String},
|
||||||
{Name: "Bur", FieldName: "Bur", Value: "fuu", Kind: reflect.String},
|
{Name: "Bur", FieldName: "Bur", Value: "fuu", Kind: reflect.String},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -728,8 +729,10 @@ func TestAddMetadata(t *testing.T) {
|
||||||
Kind: reflect.Struct,
|
Kind: reflect.Struct,
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Bur", FieldName: "Bur", Value: "fuu", Kind: reflect.String},
|
{Name: "Bur", FieldName: "Bur", Value: "fuu", Kind: reflect.String},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -797,22 +800,26 @@ func TestAddMetadata(t *testing.T) {
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Fii", FieldName: "Fii", Kind: reflect.String, Value: "fii"},
|
{Name: "Fii", FieldName: "Fii", Kind: reflect.String, Value: "fii"},
|
||||||
{Name: "Fee", FieldName: "Fee", Kind: reflect.Int, Value: "1"},
|
{Name: "Fee", FieldName: "Fee", Kind: reflect.Int, Value: "1"},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "Bur",
|
Name: "Bur",
|
||||||
FieldName: "Bur",
|
FieldName: "Bur",
|
||||||
Kind: reflect.Struct,
|
Kind: reflect.Struct,
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "Faa", FieldName: "Faa", Kind: reflect.String, Value: "faa"},
|
{Name: "Faa", FieldName: "Faa", Kind: reflect.String, Value: "faa"},
|
||||||
}},
|
},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "Fii",
|
Name: "Fii",
|
||||||
FieldName: "Fii",
|
FieldName: "Fii",
|
||||||
Kind: reflect.Struct,
|
Kind: reflect.Struct,
|
||||||
Children: []*Node{
|
Children: []*Node{
|
||||||
{Name: "FiiBar", FieldName: "FiiBar", Kind: reflect.String, Value: "fiiBar"},
|
{Name: "FiiBar", FieldName: "FiiBar", Kind: reflect.String, Value: "fiiBar"},
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -168,7 +168,7 @@ func (s *ServiceInfo) AddError(err error, critical bool) {
|
||||||
|
|
||||||
// UpdateServerStatus sets the status of the server in the ServiceInfo.
|
// UpdateServerStatus sets the status of the server in the ServiceInfo.
|
||||||
// It is the responsibility of the caller to check that s is not nil.
|
// It is the responsibility of the caller to check that s is not nil.
|
||||||
func (s *ServiceInfo) UpdateServerStatus(server string, status string) {
|
func (s *ServiceInfo) UpdateServerStatus(server, status string) {
|
||||||
s.serverStatusMu.Lock()
|
s.serverStatusMu.Lock()
|
||||||
defer s.serverStatusMu.Unlock()
|
defer s.serverStatusMu.Unlock()
|
||||||
|
|
||||||
|
|
|
@ -22,8 +22,10 @@ const (
|
||||||
serverDown = "DOWN"
|
serverDown = "DOWN"
|
||||||
)
|
)
|
||||||
|
|
||||||
var singleton *HealthCheck
|
var (
|
||||||
var once sync.Once
|
singleton *HealthCheck
|
||||||
|
once sync.Once
|
||||||
|
)
|
||||||
|
|
||||||
// Balancer is the set of operations required to manage the list of servers in a load-balancer.
|
// Balancer is the set of operations required to manage the list of servers in a load-balancer.
|
||||||
type Balancer interface {
|
type Balancer interface {
|
||||||
|
|
|
@ -16,8 +16,10 @@ import (
|
||||||
"github.com/vulcand/oxy/roundrobin"
|
"github.com/vulcand/oxy/roundrobin"
|
||||||
)
|
)
|
||||||
|
|
||||||
const healthCheckInterval = 200 * time.Millisecond
|
const (
|
||||||
const healthCheckTimeout = 100 * time.Millisecond
|
healthCheckInterval = 200 * time.Millisecond
|
||||||
|
healthCheckTimeout = 100 * time.Millisecond
|
||||||
|
)
|
||||||
|
|
||||||
type testHandler struct {
|
type testHandler struct {
|
||||||
done func()
|
done func()
|
||||||
|
@ -148,7 +150,7 @@ func TestSetBackendsConfiguration(t *testing.T) {
|
||||||
assert.Equal(t, test.expectedNumRemovedServers, lb.numRemovedServers, "removed servers")
|
assert.Equal(t, test.expectedNumRemovedServers, lb.numRemovedServers, "removed servers")
|
||||||
assert.Equal(t, test.expectedNumUpsertedServers, lb.numUpsertedServers, "upserted servers")
|
assert.Equal(t, test.expectedNumUpsertedServers, lb.numUpsertedServers, "upserted servers")
|
||||||
// FIXME re add metrics
|
// FIXME re add metrics
|
||||||
//assert.Equal(t, test.expectedGaugeValue, collectingMetrics.Gauge.GaugeValue, "ServerUp Gauge")
|
// assert.Equal(t, test.expectedGaugeValue, collectingMetrics.Gauge.GaugeValue, "ServerUp Gauge")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,9 +6,7 @@ import (
|
||||||
"github.com/cenkalti/backoff/v4"
|
"github.com/cenkalti/backoff/v4"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var _ backoff.BackOff = (*BackOff)(nil)
|
||||||
_ backoff.BackOff = (*BackOff)(nil)
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
defaultMinJobInterval = 30 * time.Second
|
defaultMinJobInterval = 30 * time.Second
|
||||||
|
|
|
@ -24,21 +24,21 @@ func TestJobBackOff(t *testing.T) {
|
||||||
exp.MinJobInterval = testMinJobInterval
|
exp.MinJobInterval = testMinJobInterval
|
||||||
exp.Reset()
|
exp.Reset()
|
||||||
|
|
||||||
var expectedResults = []time.Duration{500, 500, 500, 1000, 2000, 4000, 5000, 5000, 500, 1000, 2000, 4000, 5000, 5000}
|
expectedResults := []time.Duration{500, 500, 500, 1000, 2000, 4000, 5000, 5000, 500, 1000, 2000, 4000, 5000, 5000}
|
||||||
for i, d := range expectedResults {
|
for i, d := range expectedResults {
|
||||||
expectedResults[i] = d * time.Millisecond
|
expectedResults[i] = d * time.Millisecond
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, expected := range expectedResults {
|
for i, expected := range expectedResults {
|
||||||
// Assert that the next backoff falls in the expected range.
|
// Assert that the next backoff falls in the expected range.
|
||||||
var minInterval = expected - time.Duration(testRandomizationFactor*float64(expected))
|
minInterval := expected - time.Duration(testRandomizationFactor*float64(expected))
|
||||||
var maxInterval = expected + time.Duration(testRandomizationFactor*float64(expected))
|
maxInterval := expected + time.Duration(testRandomizationFactor*float64(expected))
|
||||||
|
|
||||||
if i < 3 || i == 8 {
|
if i < 3 || i == 8 {
|
||||||
time.Sleep(2 * time.Second)
|
time.Sleep(2 * time.Second)
|
||||||
}
|
}
|
||||||
|
|
||||||
var actualInterval = exp.NextBackOff()
|
actualInterval := exp.NextBackOff()
|
||||||
if !(minInterval <= actualInterval && actualInterval <= maxInterval) {
|
if !(minInterval <= actualInterval && actualInterval <= maxInterval) {
|
||||||
t.Error("error")
|
t.Error("error")
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,7 +101,7 @@ func OpenFile(path string) error {
|
||||||
logFilePath = path
|
logFilePath = path
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
logFile, err = os.OpenFile(logFilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
|
logFile, err = os.OpenFile(logFilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0o666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,8 +11,10 @@ import (
|
||||||
"github.com/go-kit/kit/metrics/statsd"
|
"github.com/go-kit/kit/metrics/statsd"
|
||||||
)
|
)
|
||||||
|
|
||||||
var statsdClient *statsd.Statsd
|
var (
|
||||||
var statsdTicker *time.Ticker
|
statsdClient *statsd.Statsd
|
||||||
|
statsdTicker *time.Ticker
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
statsdMetricsServiceReqsName = "service.request.total"
|
statsdMetricsServiceReqsName = "service.request.total"
|
||||||
|
|
|
@ -9,9 +9,7 @@ import (
|
||||||
"github.com/containous/traefik/v2/pkg/middlewares"
|
"github.com/containous/traefik/v2/pkg/middlewares"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var _ middlewares.Stateful = &captureResponseWriterWithCloseNotify{}
|
||||||
_ middlewares.Stateful = &captureResponseWriterWithCloseNotify{}
|
|
||||||
)
|
|
||||||
|
|
||||||
type capturer interface {
|
type capturer interface {
|
||||||
http.ResponseWriter
|
http.ResponseWriter
|
||||||
|
|
|
@ -19,7 +19,7 @@ type FieldHandler struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewFieldHandler creates a Field handler.
|
// NewFieldHandler creates a Field handler.
|
||||||
func NewFieldHandler(next http.Handler, name string, value string, applyFn FieldApply) http.Handler {
|
func NewFieldHandler(next http.Handler, name, value string, applyFn FieldApply) http.Handler {
|
||||||
return &FieldHandler{next: next, name: name, value: value, applyFn: applyFn}
|
return &FieldHandler{next: next, name: name, value: value, applyFn: applyFn}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -131,11 +131,11 @@ func NewHandler(config *types.AccessLog) (*Handler, error) {
|
||||||
func openAccessLogFile(filePath string) (*os.File, error) {
|
func openAccessLogFile(filePath string) (*os.File, error) {
|
||||||
dir := filepath.Dir(filePath)
|
dir := filepath.Dir(filePath)
|
||||||
|
|
||||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||||
return nil, fmt.Errorf("failed to create log path %s: %w", dir, err)
|
return nil, fmt.Errorf("failed to create log path %s: %w", dir, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
file, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0664)
|
file, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0o664)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error opening file %s: %w", filePath, err)
|
return nil, fmt.Errorf("error opening file %s: %w", filePath, err)
|
||||||
}
|
}
|
||||||
|
@ -249,7 +249,7 @@ func (h *Handler) Rotate() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
h.file, err = os.OpenFile(h.config.FilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0664)
|
h.file, err = os.OpenFile(h.config.FilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0o664)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -259,7 +259,7 @@ func (h *Handler) Rotate() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func silentSplitHostPort(value string) (host string, port string) {
|
func silentSplitHostPort(value string) (host, port string) {
|
||||||
host, port, err := net.SplitHostPort(value)
|
host, port, err := net.SplitHostPort(value)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return value, "-"
|
return value, "-"
|
||||||
|
|
|
@ -21,7 +21,7 @@ type CommonLogFormatter struct{}
|
||||||
func (f *CommonLogFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
func (f *CommonLogFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||||
b := &bytes.Buffer{}
|
b := &bytes.Buffer{}
|
||||||
|
|
||||||
var timestamp = defaultValue
|
timestamp := defaultValue
|
||||||
if v, ok := entry.Data[StartUTC]; ok {
|
if v, ok := entry.Data[StartUTC]; ok {
|
||||||
timestamp = v.(time.Time).Format(commonLogTimeFormat)
|
timestamp = v.(time.Time).Format(commonLogTimeFormat)
|
||||||
} else if v, ok := entry.Data[StartLocal]; ok {
|
} else if v, ok := entry.Data[StartLocal]; ok {
|
||||||
|
@ -52,7 +52,7 @@ func (f *CommonLogFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||||
return b.Bytes(), err
|
return b.Bytes(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
func toLog(fields logrus.Fields, key string, defaultValue string, quoted bool) interface{} {
|
func toLog(fields logrus.Fields, key, defaultValue string, quoted bool) interface{} {
|
||||||
if v, ok := fields[key]; ok {
|
if v, ok := fields[key]; ok {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return defaultValue
|
return defaultValue
|
||||||
|
@ -73,7 +73,7 @@ func toLog(fields logrus.Fields, key string, defaultValue string, quoted bool) i
|
||||||
return defaultValue
|
return defaultValue
|
||||||
}
|
}
|
||||||
|
|
||||||
func toLogEntry(s string, defaultValue string, quote bool) string {
|
func toLogEntry(s, defaultValue string, quote bool) string {
|
||||||
if len(s) == 0 {
|
if len(s) == 0 {
|
||||||
return defaultValue
|
return defaultValue
|
||||||
}
|
}
|
||||||
|
|
|
@ -158,7 +158,7 @@ func (fa *forwardAuth) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||||
fa.next.ServeHTTP(rw, req)
|
fa.next.ServeHTTP(rw, req)
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeHeader(req *http.Request, forwardReq *http.Request, trustForwardHeader bool) {
|
func writeHeader(req, forwardReq *http.Request, trustForwardHeader bool) {
|
||||||
utils.CopyHeaders(forwardReq.Header, req.Header)
|
utils.CopyHeaders(forwardReq.Header, req.Header)
|
||||||
utils.RemoveHeaders(forwardReq.Header, forward.HopHeaders...)
|
utils.RemoveHeaders(forwardReq.Header, forward.HopHeaders...)
|
||||||
|
|
||||||
|
|
|
@ -325,7 +325,8 @@ func Test_writeHeader(t *testing.T) {
|
||||||
"X-Forwarded-Host": "foo.bar",
|
"X-Forwarded-Host": "foo.bar",
|
||||||
"X-Forwarded-Uri": "/path?q=1",
|
"X-Forwarded-Uri": "/path?q=1",
|
||||||
},
|
},
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
name: "trust Forward Header with forwarded request Method",
|
name: "trust Forward Header with forwarded request Method",
|
||||||
headers: map[string]string{
|
headers: map[string]string{
|
||||||
"X-Forwarded-Method": "OPTIONS",
|
"X-Forwarded-Method": "OPTIONS",
|
||||||
|
|
|
@ -246,7 +246,8 @@ func TestServeHTTP(t *testing.T) {
|
||||||
expectedHeaders: map[string]string{
|
expectedHeaders: map[string]string{
|
||||||
xForwardedHost: "foo.com:8080",
|
xForwardedHost: "foo.com:8080",
|
||||||
},
|
},
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
desc: "xForwardedServer from req XForwarded",
|
desc: "xForwardedServer from req XForwarded",
|
||||||
host: "foo.com:8080",
|
host: "foo.com:8080",
|
||||||
expectedHeaders: map[string]string{
|
expectedHeaders: map[string]string{
|
||||||
|
|
|
@ -540,7 +540,8 @@ func TestCORSResponses(t *testing.T) {
|
||||||
expected: map[string][]string{
|
expected: map[string][]string{
|
||||||
"Access-Control-Allow-Origin": {"*"},
|
"Access-Control-Allow-Origin": {"*"},
|
||||||
},
|
},
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
desc: "Test Simple CustomRequestHeaders Not Hijacked by CORS",
|
desc: "Test Simple CustomRequestHeaders Not Hijacked by CORS",
|
||||||
header: NewHeader(emptyHandler, dynamic.Headers{
|
header: NewHeader(emptyHandler, dynamic.Headers{
|
||||||
CustomRequestHeaders: map[string]string{"foo": "bar"},
|
CustomRequestHeaders: map[string]string{"foo": "bar"},
|
||||||
|
|
|
@ -7,6 +7,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetLoggerCtx creates a logger context with the middleware fields.
|
// GetLoggerCtx creates a logger context with the middleware fields.
|
||||||
func GetLoggerCtx(ctx context.Context, middleware string, middlewareType string) context.Context {
|
func GetLoggerCtx(ctx context.Context, middleware, middlewareType string) context.Context {
|
||||||
return log.With(ctx, log.Str(log.MiddlewareName, middleware), log.Str(log.MiddlewareType, middlewareType))
|
return log.With(ctx, log.Str(log.MiddlewareName, middleware), log.Str(log.MiddlewareType, middlewareType))
|
||||||
}
|
}
|
||||||
|
|
|
@ -235,7 +235,7 @@ func writeParts(ctx context.Context, content io.StringWriter, entries []string,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func writePart(ctx context.Context, content io.StringWriter, entry string, prefix string) {
|
func writePart(ctx context.Context, content io.StringWriter, entry, prefix string) {
|
||||||
if len(entry) > 0 {
|
if len(entry) > 0 {
|
||||||
_, err := content.WriteString(fmt.Sprintf("%s=%s%s", prefix, entry, subFieldSeparator))
|
_, err := content.WriteString(fmt.Sprintf("%s=%s%s", prefix, entry, subFieldSeparator))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -24,7 +24,7 @@ type redirect struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// New creates a Redirect middleware.
|
// New creates a Redirect middleware.
|
||||||
func newRedirect(next http.Handler, regex string, replacement string, permanent bool, name string) (http.Handler, error) {
|
func newRedirect(next http.Handler, regex, replacement string, permanent bool, name string) (http.Handler, error) {
|
||||||
re, err := regexp.Compile(regex)
|
re, err := regexp.Compile(regex)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
@ -47,7 +47,7 @@ func (hr *Resolver) CNAMEFlatten(ctx context.Context, host string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
logger := log.FromContext(ctx)
|
logger := log.FromContext(ctx)
|
||||||
var cacheDuration = 0 * time.Second
|
cacheDuration := 0 * time.Second
|
||||||
for depth := 0; depth < hr.ResolvDepth; depth++ {
|
for depth := 0; depth < hr.ResolvDepth; depth++ {
|
||||||
resolv, err := cnameResolve(ctx, request, hr.ResolvConfig)
|
resolv, err := cnameResolve(ctx, request, hr.ResolvConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -73,7 +73,7 @@ func (hr *Resolver) CNAMEFlatten(ctx context.Context, host string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// cnameResolve resolves CNAME if exists, and return with the highest TTL.
|
// cnameResolve resolves CNAME if exists, and return with the highest TTL.
|
||||||
func cnameResolve(ctx context.Context, host string, resolvPath string) (*cnameResolv, error) {
|
func cnameResolve(ctx context.Context, host, resolvPath string) (*cnameResolv, error) {
|
||||||
config, err := dns.ClientConfigFromFile(resolvPath)
|
config, err := dns.ClientConfigFromFile(resolvPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("invalid resolver configuration file: %s", resolvPath)
|
return nil, fmt.Errorf("invalid resolver configuration file: %s", resolvPath)
|
||||||
|
@ -102,7 +102,7 @@ func cnameResolve(ctx context.Context, host string, resolvPath string) (*cnameRe
|
||||||
return result[0], nil
|
return result[0], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getRecord(client *dns.Client, msg *dns.Msg, server string, port string) (*cnameResolv, error) {
|
func getRecord(client *dns.Client, msg *dns.Msg, server, port string) (*cnameResolv, error) {
|
||||||
resp, _, err := client.Exchange(msg, net.JoinHostPort(server, port))
|
resp, _, err := client.Exchange(msg, net.JoinHostPort(server, port))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("exchange error for server %s: %w", server, err)
|
return nil, fmt.Errorf("exchange error for server %s: %w", server, err)
|
||||||
|
|
|
@ -45,7 +45,8 @@ func TestNewForwarder(t *testing.T) {
|
||||||
},
|
},
|
||||||
OperationName: "forward some-service.domain.tld/some-service.domain.tld",
|
OperationName: "forward some-service.domain.tld/some-service.domain.tld",
|
||||||
},
|
},
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
desc: "Simple Forward Tracer with truncation and hashing",
|
desc: "Simple Forward Tracer with truncation and hashing",
|
||||||
spanNameLimit: 101,
|
spanNameLimit: 101,
|
||||||
tracing: &trackingBackenMock{
|
tracing: &trackingBackenMock{
|
||||||
|
|
|
@ -18,12 +18,12 @@ func (n MockTracer) StartSpan(operationName string, opts ...opentracing.StartSpa
|
||||||
}
|
}
|
||||||
|
|
||||||
// Inject belongs to the Tracer interface.
|
// Inject belongs to the Tracer interface.
|
||||||
func (n MockTracer) Inject(sp opentracing.SpanContext, format interface{}, carrier interface{}) error {
|
func (n MockTracer) Inject(sp opentracing.SpanContext, format, carrier interface{}) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract belongs to the Tracer interface.
|
// Extract belongs to the Tracer interface.
|
||||||
func (n MockTracer) Extract(format interface{}, carrier interface{}) (opentracing.SpanContext, error) {
|
func (n MockTracer) Extract(format, carrier interface{}) (opentracing.SpanContext, error) {
|
||||||
return nil, opentracing.ErrSpanContextNotFound
|
return nil, opentracing.ErrSpanContextNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewAccount creates an account.
|
// NewAccount creates an account.
|
||||||
func NewAccount(ctx context.Context, email string, keyTypeValue string) (*Account, error) {
|
func NewAccount(ctx context.Context, email, keyTypeValue string) (*Account, error) {
|
||||||
keyType := GetKeyType(ctx, keyTypeValue)
|
keyType := GetKeyType(ctx, keyTypeValue)
|
||||||
|
|
||||||
// Create a user. New accounts need an email and private key to start
|
// Create a user. New accounts need an email and private key to start
|
||||||
|
|
|
@ -103,7 +103,7 @@ func (s *LocalStore) listenSaveAction() {
|
||||||
logger.Error(err)
|
logger.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = ioutil.WriteFile(s.filename, data, 0600)
|
err = ioutil.WriteFile(s.filename, data, 0o600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(err)
|
logger.Error(err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ func CheckFile(name string) (bool, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
return false, f.Chmod(0600)
|
return false, f.Chmod(0o600)
|
||||||
}
|
}
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ func CheckFile(name string) (bool, error) {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if fi.Mode().Perm()&0077 != 0 {
|
if fi.Mode().Perm()&0o077 != 0 {
|
||||||
return false, fmt.Errorf("permissions %o for %s are too open, please use 600", fi.Mode().Perm(), name)
|
return false, fmt.Errorf("permissions %o for %s are too open, please use 600", fi.Mode().Perm(), name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ func CheckFile(name string) (bool, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
return false, f.Chmod(0600)
|
return false, f.Chmod(0o600)
|
||||||
}
|
}
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,10 +27,8 @@ import (
|
||||||
"github.com/go-acme/lego/v3/registration"
|
"github.com/go-acme/lego/v3/registration"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
// oscpMustStaple enables OSCP stapling as from https://github.com/go-acme/lego/issues/270.
|
||||||
// oscpMustStaple enables OSCP stapling as from https://github.com/go-acme/lego/issues/270.
|
var oscpMustStaple = false
|
||||||
oscpMustStaple = false
|
|
||||||
)
|
|
||||||
|
|
||||||
// Configuration holds ACME configuration provided by users.
|
// Configuration holds ACME configuration provided by users.
|
||||||
type Configuration struct {
|
type Configuration struct {
|
||||||
|
@ -145,7 +143,7 @@ func (p *Provider) Init() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func isAccountMatchingCaServer(ctx context.Context, accountURI string, serverURI string) bool {
|
func isAccountMatchingCaServer(ctx context.Context, accountURI, serverURI string) bool {
|
||||||
logger := log.FromContext(ctx)
|
logger := log.FromContext(ctx)
|
||||||
|
|
||||||
aru, err := url.Parse(accountURI)
|
aru, err := url.Parse(accountURI)
|
||||||
|
@ -492,7 +490,7 @@ func (p *Provider) addResolvingDomains(resolvingDomains []string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Provider) addCertificateForDomain(domain types.Domain, certificate []byte, key []byte, tlsStore string) {
|
func (p *Provider) addCertificateForDomain(domain types.Domain, certificate, key []byte, tlsStore string) {
|
||||||
p.certsChan <- &CertAndStore{Certificate: Certificate{Certificate: certificate, Key: key, Domain: domain}, Store: tlsStore}
|
p.certsChan <- &CertAndStore{Certificate: Certificate{Certificate: certificate, Key: key, Domain: domain}, Store: tlsStore}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -640,7 +638,6 @@ func (p *Provider) renewCertificates(ctx context.Context) {
|
||||||
PrivateKey: cert.Key,
|
PrivateKey: cert.Key,
|
||||||
Certificate: cert.Certificate.Certificate,
|
Certificate: cert.Certificate.Certificate,
|
||||||
}, true, oscpMustStaple)
|
}, true, oscpMustStaple)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("Error renewing certificate from LE: %v, %v", cert.Domain, err)
|
logger.Errorf("Error renewing certificate from LE: %v, %v", cert.Domain, err)
|
||||||
continue
|
continue
|
||||||
|
@ -679,7 +676,7 @@ func (p *Provider) getUncheckedDomains(ctx context.Context, domainsToCheck []str
|
||||||
return searchUncheckedDomains(ctx, domainsToCheck, allDomains)
|
return searchUncheckedDomains(ctx, domainsToCheck, allDomains)
|
||||||
}
|
}
|
||||||
|
|
||||||
func searchUncheckedDomains(ctx context.Context, domainsToCheck []string, existentDomains []string) []string {
|
func searchUncheckedDomains(ctx context.Context, domainsToCheck, existentDomains []string) []string {
|
||||||
var uncheckedDomains []string
|
var uncheckedDomains []string
|
||||||
for _, domainToCheck := range domainsToCheck {
|
for _, domainToCheck := range domainsToCheck {
|
||||||
if !isDomainAlreadyChecked(domainToCheck, existentDomains) {
|
if !isDomainAlreadyChecked(domainToCheck, existentDomains) {
|
||||||
|
|
|
@ -95,7 +95,7 @@ func taskSlot(slot int) func(*swarm.Task) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func taskNetworkAttachment(id string, name string, driver string, addresses []string) func(*swarm.Task) {
|
func taskNetworkAttachment(id, name, driver string, addresses []string) func(*swarm.Task) {
|
||||||
return func(task *swarm.Task) {
|
return func(task *swarm.Task) {
|
||||||
task.NetworksAttachments = append(task.NetworksAttachments, swarm.NetworkAttachment{
|
task.NetworksAttachments = append(task.NetworksAttachments, swarm.NetworkAttachment{
|
||||||
Network: swarm.Network{
|
Network: swarm.Network{
|
||||||
|
|
|
@ -350,7 +350,6 @@ func (p Provider) getIPAddress(ctx context.Context, container dockerData) string
|
||||||
// the network specified on the current container.
|
// the network specified on the current container.
|
||||||
containerParsed := parseContainer(containerInspected)
|
containerParsed := parseContainer(containerInspected)
|
||||||
extraConf, err := p.getConfiguration(containerParsed)
|
extraConf, err := p.getConfiguration(containerParsed)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warnf("Unable to get IP address for container %s : failed to get extra configuration for container %s: %s", container.Name, containerInspected.Name, err)
|
logger.Warnf("Unable to get IP address for container %s : failed to get extra configuration for container %s: %s", container.Name, containerInspected.Name, err)
|
||||||
return ""
|
return ""
|
||||||
|
|
|
@ -55,7 +55,7 @@ func getStringMultipleStrict(labels map[string]string, labelNames ...string) (ma
|
||||||
}
|
}
|
||||||
|
|
||||||
// getStringValue get string value associated to a label.
|
// getStringValue get string value associated to a label.
|
||||||
func getStringValue(labels map[string]string, labelName string, defaultValue string) string {
|
func getStringValue(labels map[string]string, labelName, defaultValue string) string {
|
||||||
if value, ok := labels[labelName]; ok && len(value) > 0 {
|
if value, ok := labels[labelName]; ok && len(value) > 0 {
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,6 @@ func (p *Provider) Init() error {
|
||||||
// using the given configuration channel.
|
// using the given configuration channel.
|
||||||
func (p *Provider) Provide(configurationChan chan<- dynamic.Message, pool *safe.Pool) error {
|
func (p *Provider) Provide(configurationChan chan<- dynamic.Message, pool *safe.Pool) error {
|
||||||
configuration, err := p.BuildConfiguration()
|
configuration, err := p.BuildConfiguration()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -357,7 +356,7 @@ func (p *Provider) CreateConfiguration(ctx context.Context, filename string, fun
|
||||||
return nil, fmt.Errorf("error reading configuration file: %s - %w", filename, err)
|
return nil, fmt.Errorf("error reading configuration file: %s - %w", filename, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultFuncMap = sprig.TxtFuncMap()
|
defaultFuncMap := sprig.TxtFuncMap()
|
||||||
defaultFuncMap["normalize"] = provider.Normalize
|
defaultFuncMap["normalize"] = provider.Normalize
|
||||||
defaultFuncMap["split"] = strings.Split
|
defaultFuncMap["split"] = strings.Split
|
||||||
for funcID, funcElement := range funcMap {
|
for funcID, funcElement := range funcMap {
|
||||||
|
@ -377,7 +376,7 @@ func (p *Provider) CreateConfiguration(ctx context.Context, filename string, fun
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var renderedTemplate = buffer.String()
|
renderedTemplate := buffer.String()
|
||||||
if p.DebugLogGeneratedTemplate {
|
if p.DebugLogGeneratedTemplate {
|
||||||
logger := log.FromContext(ctx)
|
logger := log.FromContext(ctx)
|
||||||
logger.Debugf("Template content: %s", tmplContent)
|
logger.Debugf("Template content: %s", tmplContent)
|
||||||
|
@ -397,7 +396,7 @@ func (p *Provider) DecodeConfiguration(filename string) (*dynamic.Configuration,
|
||||||
return p.decodeConfiguration(filename, content)
|
return p.decodeConfiguration(filename, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Provider) decodeConfiguration(filePath string, content string) (*dynamic.Configuration, error) {
|
func (p *Provider) decodeConfiguration(filePath, content string) (*dynamic.Configuration, error) {
|
||||||
configuration := &dynamic.Configuration{
|
configuration := &dynamic.Configuration{
|
||||||
HTTP: &dynamic.HTTPConfiguration{
|
HTTP: &dynamic.HTTPConfiguration{
|
||||||
Routers: make(map[string]*dynamic.Router),
|
Routers: make(map[string]*dynamic.Router),
|
||||||
|
|
|
@ -292,7 +292,7 @@ func createTempDir(t *testing.T, dir string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func copyFile(srcPath, dstPath string) error {
|
func copyFile(srcPath, dstPath string) error {
|
||||||
dst, err := os.OpenFile(dstPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
|
dst, err := os.OpenFile(dstPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,12 +35,14 @@ import (
|
||||||
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
|
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
|
||||||
)
|
)
|
||||||
|
|
||||||
var scheme = runtime.NewScheme()
|
var (
|
||||||
var codecs = serializer.NewCodecFactory(scheme)
|
scheme = runtime.NewScheme()
|
||||||
var parameterCodec = runtime.NewParameterCodec(scheme)
|
codecs = serializer.NewCodecFactory(scheme)
|
||||||
var localSchemeBuilder = runtime.SchemeBuilder{
|
parameterCodec = runtime.NewParameterCodec(scheme)
|
||||||
traefikv1alpha1.AddToScheme,
|
localSchemeBuilder = runtime.SchemeBuilder{
|
||||||
}
|
traefikv1alpha1.AddToScheme,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
// AddToScheme adds all types of this clientset into the given scheme. This allows composition
|
// AddToScheme adds all types of this clientset into the given scheme. This allows composition
|
||||||
// of clientsets, like in:
|
// of clientsets, like in:
|
||||||
|
|
|
@ -35,12 +35,14 @@ import (
|
||||||
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
|
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
|
||||||
)
|
)
|
||||||
|
|
||||||
var Scheme = runtime.NewScheme()
|
var (
|
||||||
var Codecs = serializer.NewCodecFactory(Scheme)
|
Scheme = runtime.NewScheme()
|
||||||
var ParameterCodec = runtime.NewParameterCodec(Scheme)
|
Codecs = serializer.NewCodecFactory(Scheme)
|
||||||
var localSchemeBuilder = runtime.SchemeBuilder{
|
ParameterCodec = runtime.NewParameterCodec(Scheme)
|
||||||
traefikv1alpha1.AddToScheme,
|
localSchemeBuilder = runtime.SchemeBuilder{
|
||||||
}
|
traefikv1alpha1.AddToScheme,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
// AddToScheme adds all types of this clientset into the given scheme. This allows composition
|
// AddToScheme adds all types of this clientset into the given scheme. This allows composition
|
||||||
// of clientsets, like in:
|
// of clientsets, like in:
|
||||||
|
|
|
@ -83,7 +83,6 @@ func (c *FakeIngressRoutes) List(opts v1.ListOptions) (result *v1alpha1.IngressR
|
||||||
func (c *FakeIngressRoutes) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
func (c *FakeIngressRoutes) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
||||||
return c.Fake.
|
return c.Fake.
|
||||||
InvokesWatch(testing.NewWatchAction(ingressroutesResource, c.ns, opts))
|
InvokesWatch(testing.NewWatchAction(ingressroutesResource, c.ns, opts))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create takes the representation of a ingressRoute and creates it. Returns the server's representation of the ingressRoute, and an error, if there is any.
|
// Create takes the representation of a ingressRoute and creates it. Returns the server's representation of the ingressRoute, and an error, if there is any.
|
||||||
|
|
|
@ -83,7 +83,6 @@ func (c *FakeIngressRouteTCPs) List(opts v1.ListOptions) (result *v1alpha1.Ingre
|
||||||
func (c *FakeIngressRouteTCPs) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
func (c *FakeIngressRouteTCPs) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
||||||
return c.Fake.
|
return c.Fake.
|
||||||
InvokesWatch(testing.NewWatchAction(ingressroutetcpsResource, c.ns, opts))
|
InvokesWatch(testing.NewWatchAction(ingressroutetcpsResource, c.ns, opts))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create takes the representation of a ingressRouteTCP and creates it. Returns the server's representation of the ingressRouteTCP, and an error, if there is any.
|
// Create takes the representation of a ingressRouteTCP and creates it. Returns the server's representation of the ingressRouteTCP, and an error, if there is any.
|
||||||
|
|
|
@ -83,7 +83,6 @@ func (c *FakeIngressRouteUDPs) List(opts v1.ListOptions) (result *v1alpha1.Ingre
|
||||||
func (c *FakeIngressRouteUDPs) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
func (c *FakeIngressRouteUDPs) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
||||||
return c.Fake.
|
return c.Fake.
|
||||||
InvokesWatch(testing.NewWatchAction(ingressrouteudpsResource, c.ns, opts))
|
InvokesWatch(testing.NewWatchAction(ingressrouteudpsResource, c.ns, opts))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create takes the representation of a ingressRouteUDP and creates it. Returns the server's representation of the ingressRouteUDP, and an error, if there is any.
|
// Create takes the representation of a ingressRouteUDP and creates it. Returns the server's representation of the ingressRouteUDP, and an error, if there is any.
|
||||||
|
|
|
@ -83,7 +83,6 @@ func (c *FakeMiddlewares) List(opts v1.ListOptions) (result *v1alpha1.Middleware
|
||||||
func (c *FakeMiddlewares) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
func (c *FakeMiddlewares) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
||||||
return c.Fake.
|
return c.Fake.
|
||||||
InvokesWatch(testing.NewWatchAction(middlewaresResource, c.ns, opts))
|
InvokesWatch(testing.NewWatchAction(middlewaresResource, c.ns, opts))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create takes the representation of a middleware and creates it. Returns the server's representation of the middleware, and an error, if there is any.
|
// Create takes the representation of a middleware and creates it. Returns the server's representation of the middleware, and an error, if there is any.
|
||||||
|
|
|
@ -83,7 +83,6 @@ func (c *FakeTLSOptions) List(opts v1.ListOptions) (result *v1alpha1.TLSOptionLi
|
||||||
func (c *FakeTLSOptions) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
func (c *FakeTLSOptions) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
||||||
return c.Fake.
|
return c.Fake.
|
||||||
InvokesWatch(testing.NewWatchAction(tlsoptionsResource, c.ns, opts))
|
InvokesWatch(testing.NewWatchAction(tlsoptionsResource, c.ns, opts))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create takes the representation of a tLSOption and creates it. Returns the server's representation of the tLSOption, and an error, if there is any.
|
// Create takes the representation of a tLSOption and creates it. Returns the server's representation of the tLSOption, and an error, if there is any.
|
||||||
|
|
|
@ -83,7 +83,6 @@ func (c *FakeTLSStores) List(opts v1.ListOptions) (result *v1alpha1.TLSStoreList
|
||||||
func (c *FakeTLSStores) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
func (c *FakeTLSStores) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
||||||
return c.Fake.
|
return c.Fake.
|
||||||
InvokesWatch(testing.NewWatchAction(tlsstoresResource, c.ns, opts))
|
InvokesWatch(testing.NewWatchAction(tlsstoresResource, c.ns, opts))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create takes the representation of a tLSStore and creates it. Returns the server's representation of the tLSStore, and an error, if there is any.
|
// Create takes the representation of a tLSStore and creates it. Returns the server's representation of the tLSStore, and an error, if there is any.
|
||||||
|
|
|
@ -83,7 +83,6 @@ func (c *FakeTraefikServices) List(opts v1.ListOptions) (result *v1alpha1.Traefi
|
||||||
func (c *FakeTraefikServices) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
func (c *FakeTraefikServices) Watch(opts v1.ListOptions) (watch.Interface, error) {
|
||||||
return c.Fake.
|
return c.Fake.
|
||||||
InvokesWatch(testing.NewWatchAction(traefikservicesResource, c.ns, opts))
|
InvokesWatch(testing.NewWatchAction(traefikservicesResource, c.ns, opts))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create takes the representation of a traefikService and creates it. Returns the server's representation of the traefikService, and an error, if there is any.
|
// Create takes the representation of a traefikService and creates it. Returns the server's representation of the traefikService, and an error, if there is any.
|
||||||
|
|
|
@ -101,7 +101,6 @@ func (p *Provider) Provide(configurationChan chan<- dynamic.Message, pool *safe.
|
||||||
pool.GoCtx(func(ctxPool context.Context) {
|
pool.GoCtx(func(ctxPool context.Context) {
|
||||||
operation := func() error {
|
operation := func() error {
|
||||||
eventsChan, err := k8sClient.WatchAll(p.Namespaces, ctxPool.Done())
|
eventsChan, err := k8sClient.WatchAll(p.Namespaces, ctxPool.Done())
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("Error watching kubernetes events: %v", err)
|
logger.Errorf("Error watching kubernetes events: %v", err)
|
||||||
timer := time.NewTimer(1 * time.Second)
|
timer := time.NewTimer(1 * time.Second)
|
||||||
|
@ -627,7 +626,7 @@ func makeID(namespace, name string) string {
|
||||||
return namespace + "-" + name
|
return namespace + "-" + name
|
||||||
}
|
}
|
||||||
|
|
||||||
func shouldProcessIngress(ingressClass string, ingressClassAnnotation string) bool {
|
func shouldProcessIngress(ingressClass, ingressClassAnnotation string) bool {
|
||||||
return ingressClass == ingressClassAnnotation ||
|
return ingressClass == ingressClassAnnotation ||
|
||||||
(len(ingressClass) == 0 && ingressClassAnnotation == traefikDefaultIngressClass)
|
(len(ingressClass) == 0 && ingressClassAnnotation == traefikDefaultIngressClass)
|
||||||
}
|
}
|
||||||
|
|
|
@ -432,7 +432,7 @@ func getTLSHTTP(ctx context.Context, ingressRoute *v1alpha1.IngressRoute, k8sCli
|
||||||
|
|
||||||
// parseServiceProtocol parses the scheme, port name, and number to determine the correct protocol.
|
// parseServiceProtocol parses the scheme, port name, and number to determine the correct protocol.
|
||||||
// an error is returned if the scheme provided is invalid.
|
// an error is returned if the scheme provided is invalid.
|
||||||
func parseServiceProtocol(providedScheme string, portName string, portNumber int32) (string, error) {
|
func parseServiceProtocol(providedScheme, portName string, portNumber int32) (string, error) {
|
||||||
switch providedScheme {
|
switch providedScheme {
|
||||||
case httpProtocol, httpsProtocol, "h2c":
|
case httpProtocol, httpsProtocol, "h2c":
|
||||||
return providedScheme, nil
|
return providedScheme, nil
|
||||||
|
|
|
@ -2940,7 +2940,8 @@ func TestLoadIngressRoutes(t *testing.T) {
|
||||||
EntryPoints: []string{"foo"},
|
EntryPoints: []string{"foo"},
|
||||||
Service: "default-test-route-6f97418635c7e18853da",
|
Service: "default-test-route-6f97418635c7e18853da",
|
||||||
Rule: "Host(`foo.com`)",
|
Rule: "Host(`foo.com`)",
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
Middlewares: map[string]*dynamic.Middleware{},
|
Middlewares: map[string]*dynamic.Middleware{},
|
||||||
Services: map[string]*dynamic.Service{
|
Services: map[string]*dynamic.Service{
|
||||||
"default-test-route-6f97418635c7e18853da": {
|
"default-test-route-6f97418635c7e18853da": {
|
||||||
|
@ -2976,7 +2977,8 @@ func TestLoadIngressRoutes(t *testing.T) {
|
||||||
EntryPoints: []string{"foo"},
|
EntryPoints: []string{"foo"},
|
||||||
Service: "default-test-route-6f97418635c7e18853da",
|
Service: "default-test-route-6f97418635c7e18853da",
|
||||||
Rule: "Host(`foo.com`)",
|
Rule: "Host(`foo.com`)",
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
Middlewares: map[string]*dynamic.Middleware{},
|
Middlewares: map[string]*dynamic.Middleware{},
|
||||||
Services: map[string]*dynamic.Service{
|
Services: map[string]*dynamic.Service{
|
||||||
"default-test-route-6f97418635c7e18853da": {
|
"default-test-route-6f97418635c7e18853da": {
|
||||||
|
@ -3012,7 +3014,8 @@ func TestLoadIngressRoutes(t *testing.T) {
|
||||||
EntryPoints: []string{"foo"},
|
EntryPoints: []string{"foo"},
|
||||||
Service: "default-test-route-6f97418635c7e18853da",
|
Service: "default-test-route-6f97418635c7e18853da",
|
||||||
Rule: "Host(`foo.com`)",
|
Rule: "Host(`foo.com`)",
|
||||||
}},
|
},
|
||||||
|
},
|
||||||
Middlewares: map[string]*dynamic.Middleware{},
|
Middlewares: map[string]*dynamic.Middleware{},
|
||||||
Services: map[string]*dynamic.Service{
|
Services: map[string]*dynamic.Service{
|
||||||
"default-test-route-6f97418635c7e18853da": {
|
"default-test-route-6f97418635c7e18853da": {
|
||||||
|
|
|
@ -107,6 +107,7 @@ func loadUDPServers(client Client, namespace string, svc v1alpha1.ServiceUDP) ([
|
||||||
|
|
||||||
var portSpec *corev1.ServicePort
|
var portSpec *corev1.ServicePort
|
||||||
for _, p := range service.Spec.Ports {
|
for _, p := range service.Spec.Ports {
|
||||||
|
p := p
|
||||||
if svc.Port == p.Port {
|
if svc.Port == p.Port {
|
||||||
portSpec = &p
|
portSpec = &p
|
||||||
break
|
break
|
||||||
|
|
|
@ -319,7 +319,7 @@ func buildHostRule(host string) string {
|
||||||
return "Host(`" + host + "`)"
|
return "Host(`" + host + "`)"
|
||||||
}
|
}
|
||||||
|
|
||||||
func shouldProcessIngress(ingressClass string, ingressClassAnnotation string) bool {
|
func shouldProcessIngress(ingressClass, ingressClassAnnotation string) bool {
|
||||||
return ingressClass == ingressClassAnnotation ||
|
return ingressClass == ingressClassAnnotation ||
|
||||||
(len(ingressClass) == 0 && ingressClassAnnotation == traefikDefaultIngressClass)
|
(len(ingressClass) == 0 && ingressClassAnnotation == traefikDefaultIngressClass)
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,7 +129,7 @@ func (_m *Marathon) ApplicationBy(name string, opts *marathon.GetAppOpts) (*mara
|
||||||
}
|
}
|
||||||
|
|
||||||
// ApplicationByVersion provides a mock function with given fields: name, version
|
// ApplicationByVersion provides a mock function with given fields: name, version
|
||||||
func (_m *Marathon) ApplicationByVersion(name string, version string) (*marathon.Application, error) {
|
func (_m *Marathon) ApplicationByVersion(name, version string) (*marathon.Application, error) {
|
||||||
ret := _m.Called(name, version)
|
ret := _m.Called(name, version)
|
||||||
|
|
||||||
var r0 *marathon.Application
|
var r0 *marathon.Application
|
||||||
|
@ -394,7 +394,7 @@ func (_m *Marathon) DeletePod(name string, force bool) (*marathon.DeploymentID,
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeletePodInstance provides a mock function with given fields: name, instance
|
// DeletePodInstance provides a mock function with given fields: name, instance
|
||||||
func (_m *Marathon) DeletePodInstance(name string, instance string) (*marathon.PodInstance, error) {
|
func (_m *Marathon) DeletePodInstance(name, instance string) (*marathon.PodInstance, error) {
|
||||||
ret := _m.Called(name, instance)
|
ret := _m.Called(name, instance)
|
||||||
|
|
||||||
var r0 *marathon.PodInstance
|
var r0 *marathon.PodInstance
|
||||||
|
@ -583,7 +583,7 @@ func (_m *Marathon) GroupsBy(opts *marathon.GetGroupOpts) (*marathon.Groups, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// HasApplicationVersion provides a mock function with given fields: name, version
|
// HasApplicationVersion provides a mock function with given fields: name, version
|
||||||
func (_m *Marathon) HasApplicationVersion(name string, version string) (bool, error) {
|
func (_m *Marathon) HasApplicationVersion(name, version string) (bool, error) {
|
||||||
ret := _m.Called(name, version)
|
ret := _m.Called(name, version)
|
||||||
|
|
||||||
var r0 bool
|
var r0 bool
|
||||||
|
@ -817,7 +817,7 @@ func (_m *Marathon) Pod(name string) (*marathon.Pod, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// PodByVersion provides a mock function with given fields: name, version
|
// PodByVersion provides a mock function with given fields: name, version
|
||||||
func (_m *Marathon) PodByVersion(name string, version string) (*marathon.Pod, error) {
|
func (_m *Marathon) PodByVersion(name, version string) (*marathon.Pod, error) {
|
||||||
ret := _m.Called(name, version)
|
ret := _m.Called(name, version)
|
||||||
|
|
||||||
var r0 *marathon.Pod
|
var r0 *marathon.Pod
|
||||||
|
|
|
@ -209,7 +209,8 @@ func Test_createConfiguration(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
desc: "redirection_port.json",
|
desc: "redirection_port.json",
|
||||||
staticCfg: static.Configuration{
|
staticCfg: static.Configuration{
|
||||||
EntryPoints: map[string]*static.EntryPoint{
|
EntryPoints: map[string]*static.EntryPoint{
|
||||||
|
@ -248,7 +249,7 @@ func Test_createConfiguration(t *testing.T) {
|
||||||
newJSON, err := json.MarshalIndent(cfg, "", " ")
|
newJSON, err := json.MarshalIndent(cfg, "", " ")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = ioutil.WriteFile(filename, newJSON, 0644)
|
err = ioutil.WriteFile(filename, newJSON, 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// getLogger creates a logger configured with the middleware fields.
|
// getLogger creates a logger configured with the middleware fields.
|
||||||
func getLogger(ctx context.Context, middleware string, middlewareType string) logrus.FieldLogger {
|
func getLogger(ctx context.Context, middleware, middlewareType string) logrus.FieldLogger {
|
||||||
return log.FromContext(ctx).WithField(log.MiddlewareName, middleware).WithField(log.MiddlewareType, middlewareType)
|
return log.FromContext(ctx).WithField(log.MiddlewareName, middleware).WithField(log.MiddlewareType, middlewareType)
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ import (
|
||||||
const cookieNameLength = 6
|
const cookieNameLength = 6
|
||||||
|
|
||||||
// GetName of a cookie.
|
// GetName of a cookie.
|
||||||
func GetName(cookieName string, backendName string) string {
|
func GetName(cookieName, backendName string) string {
|
||||||
if len(cookieName) != 0 {
|
if len(cookieName) != 0 {
|
||||||
return sanitizeName(cookieName)
|
return sanitizeName(cookieName)
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,6 +40,6 @@ func GetQualifiedName(ctx context.Context, elementName string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// MakeQualifiedName Creates a qualified name for an element.
|
// MakeQualifiedName Creates a qualified name for an element.
|
||||||
func MakeQualifiedName(providerName string, elementName string) string {
|
func MakeQualifiedName(providerName, elementName string) string {
|
||||||
return elementName + "@" + providerName
|
return elementName + "@" + providerName
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,7 +90,7 @@ type nameAndConfig struct {
|
||||||
TLSConfig *tls.Config
|
TLSConfig *tls.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Manager) buildEntryPointHandler(ctx context.Context, configs map[string]*runtime.TCPRouterInfo, configsHTTP map[string]*runtime.RouterInfo, handlerHTTP http.Handler, handlerHTTPS http.Handler) (*tcp.Router, error) {
|
func (m *Manager) buildEntryPointHandler(ctx context.Context, configs map[string]*runtime.TCPRouterInfo, configsHTTP map[string]*runtime.RouterInfo, handlerHTTP, handlerHTTPS http.Handler) (*tcp.Router, error) {
|
||||||
router := &tcp.Router{}
|
router := &tcp.Router{}
|
||||||
router.HTTPHandler(handlerHTTP)
|
router.HTTPHandler(handlerHTTP)
|
||||||
|
|
||||||
|
|
|
@ -379,7 +379,6 @@ func buildProxyProtocolListener(ctx context.Context, entryPoint *static.EntryPoi
|
||||||
|
|
||||||
func buildListener(ctx context.Context, entryPoint *static.EntryPoint) (net.Listener, error) {
|
func buildListener(ctx context.Context, entryPoint *static.EntryPoint) (net.Listener, error) {
|
||||||
listener, err := net.Listen("tcp", entryPoint.GetAddress())
|
listener, err := net.Listen("tcp", entryPoint.GetAddress())
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error opening listener: %w", err)
|
return nil, fmt.Errorf("error opening listener: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ type InternalHandlers struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewInternalHandlers creates a new InternalHandlers.
|
// NewInternalHandlers creates a new InternalHandlers.
|
||||||
func NewInternalHandlers(api func(configuration *runtime.Configuration) http.Handler, configuration *runtime.Configuration, rest http.Handler, metricsHandler http.Handler, pingHandler http.Handler, dashboard http.Handler, next serviceManager) *InternalHandlers {
|
func NewInternalHandlers(api func(configuration *runtime.Configuration) http.Handler, configuration *runtime.Configuration, rest, metricsHandler, pingHandler, dashboard http.Handler, next serviceManager) *InternalHandlers {
|
||||||
var apiHandler http.Handler
|
var apiHandler http.Handler
|
||||||
if api != nil {
|
if api != nil {
|
||||||
apiHandler = api(configuration)
|
apiHandler = api(configuration)
|
||||||
|
|
|
@ -63,7 +63,7 @@ func TestWebSocketPingPong(t *testing.T) {
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
var upgrader = gorillawebsocket.Upgrader{
|
upgrader := gorillawebsocket.Upgrader{
|
||||||
HandshakeTimeout: 10 * time.Second,
|
HandshakeTimeout: 10 * time.Second,
|
||||||
CheckOrigin: func(*http.Request) bool {
|
CheckOrigin: func(*http.Request) bool {
|
||||||
return true
|
return true
|
||||||
|
@ -670,7 +670,7 @@ func (w *websocketRequest) send() (string, error) {
|
||||||
if _, err := conn.Write([]byte(w.Data)); err != nil {
|
if _, err := conn.Write([]byte(w.Data)); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
var msg = make([]byte, 512)
|
msg := make([]byte, 512)
|
||||||
var n int
|
var n int
|
||||||
n, err = conn.Read(msg)
|
n, err = conn.Read(msg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -118,7 +118,7 @@ func (c CertificateStore) ResetCache() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchDomain return true if a domain match the cert domain.
|
// MatchDomain return true if a domain match the cert domain.
|
||||||
func MatchDomain(domain string, certDomain string) bool {
|
func MatchDomain(domain, certDomain string) bool {
|
||||||
if domain == certDomain {
|
if domain == certDomain {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ func (m *Manager) UpdateConfigs(ctx context.Context, stores map[string]Store, co
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get gets the TLS configuration to use for a given store / configuration.
|
// Get gets the TLS configuration to use for a given store / configuration.
|
||||||
func (m *Manager) Get(storeName string, configName string) (*tls.Config, error) {
|
func (m *Manager) Get(storeName, configName string) (*tls.Config, error) {
|
||||||
m.lock.RLock()
|
m.lock.RLock()
|
||||||
defer m.lock.RUnlock()
|
defer m.lock.RUnlock()
|
||||||
|
|
||||||
|
|
|
@ -80,12 +80,12 @@ func (t *Tracing) StartSpanf(r *http.Request, spanKind ext.SpanKindEnum, opPrefi
|
||||||
}
|
}
|
||||||
|
|
||||||
// Inject delegates to opentracing.Tracer.
|
// Inject delegates to opentracing.Tracer.
|
||||||
func (t *Tracing) Inject(sm opentracing.SpanContext, format interface{}, carrier interface{}) error {
|
func (t *Tracing) Inject(sm opentracing.SpanContext, format, carrier interface{}) error {
|
||||||
return t.tracer.Inject(sm, format, carrier)
|
return t.tracer.Inject(sm, format, carrier)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract delegates to opentracing.Tracer.
|
// Extract delegates to opentracing.Tracer.
|
||||||
func (t *Tracing) Extract(format interface{}, carrier interface{}) (opentracing.SpanContext, error) {
|
func (t *Tracing) Extract(format, carrier interface{}) (opentracing.SpanContext, error) {
|
||||||
return t.tracer.Extract(format, carrier)
|
return t.tracer.Extract(format, carrier)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ func (d *Domain) Set(domains []string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchDomain returns true if a domain match the cert domain.
|
// MatchDomain returns true if a domain match the cert domain.
|
||||||
func MatchDomain(domain string, certDomain string) bool {
|
func MatchDomain(domain, certDomain string) bool {
|
||||||
if domain == certDomain {
|
if domain == certDomain {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,7 +113,7 @@ func checkFieldValue(value string, defaultKeep bool) bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkFieldHeaderValue(value string, defaultValue string) string {
|
func checkFieldHeaderValue(value, defaultValue string) string {
|
||||||
if value == AccessLogKeep || value == AccessLogDrop || value == AccessLogRedact {
|
if value == AccessLogKeep || value == AccessLogDrop || value == AccessLogRedact {
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,11 +24,9 @@ var (
|
||||||
// Handler expose version routes.
|
// Handler expose version routes.
|
||||||
type Handler struct{}
|
type Handler struct{}
|
||||||
|
|
||||||
var (
|
var templatesRenderer = render.New(render.Options{
|
||||||
templatesRenderer = render.New(render.Options{
|
Directory: "nowhere",
|
||||||
Directory: "nowhere",
|
})
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
// Append adds version routes on a router.
|
// Append adds version routes on a router.
|
||||||
func (v Handler) Append(router *mux.Router) {
|
func (v Handler) Append(router *mux.Router) {
|
||||||
|
|
Loading…
Reference in a new issue