Merge remote-tracking branch 'upstream/v2.2' into mrg-current-v2.2
This commit is contained in:
commit
73ca7ad0c1
156 changed files with 1768 additions and 892 deletions
|
@ -52,6 +52,8 @@
|
|||
"testpackage", # Too strict
|
||||
"goerr113", # Too strict
|
||||
"nestif", # Too many false-positive.
|
||||
"noctx", # Too strict
|
||||
"exhaustive", # Too strict
|
||||
]
|
||||
|
||||
[issues]
|
||||
|
|
52
CHANGELOG.md
52
CHANGELOG.md
|
@ -1,3 +1,55 @@
|
|||
## [v2.2.3](https://github.com/containous/traefik/tree/v2.2.3) (2020-07-09)
|
||||
[All Commits](https://github.com/containous/traefik/compare/v2.2.2...v2.2.3)
|
||||
|
||||
**Bug fixes:**
|
||||
- **[middleware]** Fix panic when using chain middleware. ([#7016](https://github.com/containous/traefik/pull/7016) by [juliens](https://github.com/juliens))
|
||||
|
||||
## [v2.2.2](https://github.com/containous/traefik/tree/v2.2.2) (2020-07-08)
|
||||
[All Commits](https://github.com/containous/traefik/compare/v2.2.1...v2.2.2)
|
||||
|
||||
**Bug fixes:**
|
||||
- **[acme]** Update go-acme/lego to v3.8.0 ([#6988](https://github.com/containous/traefik/pull/6988) by [ldez](https://github.com/ldez))
|
||||
- **[acme]** Fix triggering multiple concurrent requests to ACME ([#6939](https://github.com/containous/traefik/pull/6939) by [ddtmachado](https://github.com/ddtmachado))
|
||||
- **[acme]** Update go-acme/lego to v3.7.0 ([#6792](https://github.com/containous/traefik/pull/6792) by [ldez](https://github.com/ldez))
|
||||
- **[acme]** added required quotes to domains config ([#6867](https://github.com/containous/traefik/pull/6867) by [tompson](https://github.com/tompson))
|
||||
- **[authentication,logs,middleware]** Provide username in log data on auth failure ([#6827](https://github.com/containous/traefik/pull/6827) by [rtribotte](https://github.com/rtribotte))
|
||||
- **[docker]** Use specified network for "container" network mode ([#6763](https://github.com/containous/traefik/pull/6763) by [bjeanes](https://github.com/bjeanes))
|
||||
- **[k8s,k8s/crd]** Remove checkStringQuoteValidity in loadIngressRouteConf ([#6775](https://github.com/containous/traefik/pull/6775) by [fefe982](https://github.com/fefe982))
|
||||
- **[middleware,websocket]** Fix wss in x-forwarded-proto ([#6752](https://github.com/containous/traefik/pull/6752) by [juliens](https://github.com/juliens))
|
||||
- **[middleware]** internal handlers: support for response modifiers ([#6750](https://github.com/containous/traefik/pull/6750) by [mpl](https://github.com/mpl))
|
||||
- **[middleware]** Fix ipv6 handling in redirect middleware ([#6902](https://github.com/containous/traefik/pull/6902) by [rtribotte](https://github.com/rtribotte))
|
||||
- **[middleware]** refactor X-Forwarded-Proto ([#6863](https://github.com/containous/traefik/pull/6863) by [jcgruenhage](https://github.com/jcgruenhage))
|
||||
- **[provider]** Fix race condition issues with provided dynamic configuration ([#6979](https://github.com/containous/traefik/pull/6979) by [kevinpollet](https://github.com/kevinpollet))
|
||||
- **[rules,server,tls]** Disable domain fronting ([#7008](https://github.com/containous/traefik/pull/7008) by [rtribotte](https://github.com/rtribotte))
|
||||
- **[udp]** Fix mem leak on UDP connections ([#6815](https://github.com/containous/traefik/pull/6815) by [ddtmachado](https://github.com/ddtmachado))
|
||||
- **[udp]** Avoid overwriting already received UDP messages ([#6797](https://github.com/containous/traefik/pull/6797) by [cbachert](https://github.com/cbachert))
|
||||
- **[webui]** Add missing accessControlAllowOrigin list to middleware view ([#6747](https://github.com/containous/traefik/pull/6747) by [barthez](https://github.com/barthez))
|
||||
|
||||
**Documentation:**
|
||||
- **[acme]** Fix doc url for Aurora DNS provider ([#6899](https://github.com/containous/traefik/pull/6899) by [rtribotte](https://github.com/rtribotte))
|
||||
- **[acme]** Fix acme.md typo ([#6817](https://github.com/containous/traefik/pull/6817) by [juliocc](https://github.com/juliocc))
|
||||
- **[acme]** fix certResolver typo ([#6983](https://github.com/containous/traefik/pull/6983) by [DavidBadura](https://github.com/DavidBadura))
|
||||
- **[acme]** Fix statement about lego _FILE env var ([#6964](https://github.com/containous/traefik/pull/6964) by [solvaholic](https://github.com/solvaholic))
|
||||
- **[acme]** Improve acme CLI options in Let's Encrypt documentation ([#6762](https://github.com/containous/traefik/pull/6762) by [netoax](https://github.com/netoax))
|
||||
- **[docker]** fix a broken link on Docker plugins documentation ([#6908](https://github.com/containous/traefik/pull/6908) by [jbdoumenjou](https://github.com/jbdoumenjou))
|
||||
- **[docker]** Fix healthcheck.interval in docs ([#6847](https://github.com/containous/traefik/pull/6847) by [OndrejIT](https://github.com/OndrejIT))
|
||||
- **[k8s,k8s/ingress]** Remove redundant paragraph in Kubernetes ingress documentation ([#6806](https://github.com/containous/traefik/pull/6806) by [lpfann](https://github.com/lpfann))
|
||||
- **[k8s,k8s/ingress]** Fix sticky cookie ingress annotation doc ([#6938](https://github.com/containous/traefik/pull/6938) by [rtribotte](https://github.com/rtribotte))
|
||||
- **[k8s]** fixing typo in Provider KubernetesIngress at Routing documentation ([#6845](https://github.com/containous/traefik/pull/6845) by [sw360cab](https://github.com/sw360cab))
|
||||
- **[k8s]** Update kubernetes-crd.md ([#6878](https://github.com/containous/traefik/pull/6878) by [rherrick](https://github.com/rherrick))
|
||||
- **[logs]** Fixed incorrect logging parameter in documentation ([#6819](https://github.com/containous/traefik/pull/6819) by [cplewnia](https://github.com/cplewnia))
|
||||
- **[logs]** Use "headers" instead of "header" in access log docs ([#6836](https://github.com/containous/traefik/pull/6836) by [bradjones1](https://github.com/bradjones1))
|
||||
- **[middleware,k8s/crd]** Fix Headers middleware documentation, usage of proper bool ([#6928](https://github.com/containous/traefik/pull/6928) by [rtribotte](https://github.com/rtribotte))
|
||||
- **[middleware]** Improve redirectScheme documentation ([#6769](https://github.com/containous/traefik/pull/6769) by [dtomcej](https://github.com/dtomcej))
|
||||
- **[middleware]** Update basicauth.md ([#6967](https://github.com/containous/traefik/pull/6967) by [vitalets](https://github.com/vitalets))
|
||||
- Update Dashboard examples and move it after 'Router Rule' section ([#6874](https://github.com/containous/traefik/pull/6874) by [ddtmachado](https://github.com/ddtmachado))
|
||||
- Fix log field names in documentation ([#6952](https://github.com/containous/traefik/pull/6952) by [gysel](https://github.com/gysel))
|
||||
- Minor fix to Go templating documentation ([#6977](https://github.com/containous/traefik/pull/6977) by [PCM2](https://github.com/PCM2))
|
||||
- Add rtribotte to maintainers ([#6936](https://github.com/containous/traefik/pull/6936) by [emilevauge](https://github.com/emilevauge))
|
||||
- Update Copyright ([#6795](https://github.com/containous/traefik/pull/6795) by [mmatur](https://github.com/mmatur))
|
||||
- fix: dead link. ([#6876](https://github.com/containous/traefik/pull/6876) by [ldez](https://github.com/ldez))
|
||||
- Fix v1-> v2 migration: unify domain name in documentation example ([#6904](https://github.com/containous/traefik/pull/6904) by [sinacek](https://github.com/sinacek))
|
||||
|
||||
## [v2.2.1](https://github.com/containous/traefik/tree/v2.2.1) (2020-04-29)
|
||||
[All Commits](https://github.com/containous/traefik/compare/v2.2.0...v2.2.1)
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ RUN mkdir -p /usr/local/bin \
|
|||
&& chmod +x /usr/local/bin/go-bindata
|
||||
|
||||
# Download golangci-lint binary to bin folder in $GOPATH
|
||||
RUN curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s -- -b $GOPATH/bin v1.26.0
|
||||
RUN curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s -- -b $GOPATH/bin v1.28.0
|
||||
|
||||
# Download misspell binary to bin folder in $GOPATH
|
||||
RUN curl -sfL https://raw.githubusercontent.com/client9/misspell/master/install-misspell.sh | bash -s -- -b $GOPATH/bin v0.3.4
|
||||
|
|
|
@ -25,6 +25,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/provider/acme"
|
||||
"github.com/containous/traefik/v2/pkg/provider/aggregator"
|
||||
"github.com/containous/traefik/v2/pkg/provider/traefik"
|
||||
"github.com/containous/traefik/v2/pkg/rules"
|
||||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/server"
|
||||
"github.com/containous/traefik/v2/pkg/server/middleware"
|
||||
|
@ -161,6 +162,8 @@ func runCmd(staticConfiguration *static.Configuration) error {
|
|||
}
|
||||
|
||||
func setupServer(staticConfiguration *static.Configuration) (*server.Server, error) {
|
||||
rules.EnableDomainFronting(staticConfiguration.Global.InsecureSNI)
|
||||
|
||||
providerAggregator := aggregator.NewProviderAggregator(*staticConfiguration.Providers)
|
||||
|
||||
// adds internal provider
|
||||
|
@ -403,7 +406,7 @@ func configureLogging(staticConfiguration *static.Configuration) {
|
|||
if len(logFile) > 0 {
|
||||
dir := filepath.Dir(logFile)
|
||||
|
||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
log.WithoutContext().Errorf("Failed to create log path %s: %s", dir, err)
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
* Gérald Croës [@geraldcroes](https://github.com/geraldcroes)
|
||||
* Jean-Baptiste Doumenjou [@jbdoumenjou](https://github.com/jbdoumenjou)
|
||||
* Mathieu Lonjaret [@mpl](https://github.com/mpl)
|
||||
* Romain Tribotté [@rtribotte](https://github.com/rtribotte)
|
||||
|
||||
## Contributions Daily Meeting
|
||||
|
||||
|
|
|
@ -275,13 +275,16 @@ Here is a list of supported `providers`, that can automate the DNS verification,
|
|||
along with the required environment variables and their [wildcard & root domain support](#wildcard-domains).
|
||||
Do not hesitate to complete it.
|
||||
|
||||
Every lego environment variable can be overridden by their respective `_FILE` counterpart, which should have a filepath to a file that contains the secret as its value.
|
||||
Many lego environment variables can be overridden by their respective `_FILE` counterpart, which should have a filepath to a file that contains the secret as its value.
|
||||
For example, `CF_API_EMAIL_FILE=/run/secrets/traefik_cf-api-email` could be used to provide a Cloudflare API email address as a Docker secret named `traefik_cf-api-email`.
|
||||
|
||||
For complete details, refer to your provider's _Additional configuration_ link.
|
||||
|
||||
| Provider Name | Provider Code | Environment Variables | |
|
||||
|-------------------------------------------------------------|----------------|---------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------|
|
||||
| [ACME DNS](https://github.com/joohoi/acme-dns) | `acme-dns` | `ACME_DNS_API_BASE`, `ACME_DNS_STORAGE_PATH` | [Additional configuration](https://go-acme.github.io/lego/dns/acme-dns) |
|
||||
| [Alibaba Cloud](https://www.alibabacloud.com) | `alidns` | `ALICLOUD_ACCESS_KEY`, `ALICLOUD_SECRET_KEY`, `ALICLOUD_REGION_ID` | [Additional configuration](https://go-acme.github.io/lego/dns/alidns) |
|
||||
| [ArvanCloud](https://arvancloud.com) | `arvancloud` | `ARVANCLOUD_API_KEY` | [Additional configuration](https://go-acme.github.io/lego/dns/arvancloud) |
|
||||
| [Auroradns](https://www.pcextreme.com/dns-health-checks) | `auroradns` | `AURORA_USER_ID`, `AURORA_KEY`, `AURORA_ENDPOINT` | [Additional configuration](https://go-acme.github.io/lego/dns/auroradns) |
|
||||
| [Autodns](https://www.internetx.com/domains/autodns/) | `autodns` | `AUTODNS_API_USER`, `AUTODNS_API_PASSWORD` | [Additional configuration](https://go-acme.github.io/lego/dns/autodns) |
|
||||
| [Azure](https://azure.microsoft.com/services/dns/) | `azure` | `AZURE_CLIENT_ID`, `AZURE_CLIENT_SECRET`, `AZURE_SUBSCRIPTION_ID`, `AZURE_TENANT_ID`, `AZURE_RESOURCE_GROUP`, `[AZURE_METADATA_ENDPOINT]` | [Additional configuration](https://go-acme.github.io/lego/dns/azure) |
|
||||
|
|
|
@ -40,7 +40,7 @@ spec:
|
|||
domains:
|
||||
- main: example.org
|
||||
sans:
|
||||
- *.example.org
|
||||
- '*.example.org'
|
||||
```
|
||||
|
||||
```json tab="Marathon"
|
||||
|
|
|
@ -32,7 +32,7 @@ spec:
|
|||
- name: blog
|
||||
port: 8080
|
||||
tls:
|
||||
certresolver: myresolver
|
||||
certResolver: myresolver
|
||||
```
|
||||
|
||||
```json tab="Marathon"
|
||||
|
|
|
@ -32,7 +32,7 @@ spec:
|
|||
- name: blog
|
||||
port: 8080
|
||||
tls:
|
||||
certresolver: myresolver
|
||||
certResolver: myresolver
|
||||
```
|
||||
|
||||
```json tab="Marathon"
|
||||
|
|
|
@ -130,6 +130,20 @@ tls:
|
|||
|
||||
If no default certificate is provided, Traefik generates and uses a self-signed certificate.
|
||||
|
||||
## Domain fronting
|
||||
|
||||
Basically, [domain fronting](https://en.wikipedia.org/wiki/Domain_fronting) is a technique that allows to open a
|
||||
connection with a specific domain name, thanks to the
|
||||
[Server Name Indication](https://en.wikipedia.org/wiki/Server_Name_Indication), then access a service with another
|
||||
domain set in the HTTP `Host` header.
|
||||
|
||||
Since the `v2.2.2`, Traefik avoids (by default) using domain fronting.
|
||||
As it is valid for advanced use cases, the `HostHeader` and `HostSNI` [rules](../routing/routers/index.md#rule) allow
|
||||
to fine tune the routing with the `Server Name Indication` and `Host header` value.
|
||||
|
||||
If you encounter routing issues with a previously working configuration, please refer to the
|
||||
[migration guide](../migration/v2.md) to update your configuration.
|
||||
|
||||
## TLS Options
|
||||
|
||||
The TLS options allow one to configure some parameters of the TLS connection.
|
||||
|
@ -317,7 +331,7 @@ spec:
|
|||
### Strict SNI Checking
|
||||
|
||||
With strict SNI checking, Traefik won't allow connections from clients connections
|
||||
that do not specify a server_name extension.
|
||||
that do not specify a server_name extension or don't match any certificate configured on the tlsOption.
|
||||
|
||||
```toml tab="File (TOML)"
|
||||
# Dynamic configuration
|
||||
|
|
|
@ -12,9 +12,11 @@ The BasicAuth middleware is a quick way to restrict access to your services to k
|
|||
```yaml tab="Docker"
|
||||
# Declaring the user list
|
||||
#
|
||||
# Note: all dollar signs in the hash need to be doubled for escaping.
|
||||
# Note: when used in docker-compose.yml all dollar signs in the hash need to be doubled for escaping.
|
||||
# To create user:password pair, it's possible to use this command:
|
||||
# echo $(htpasswd -nb user password) | sed -e s/\\$/\\$\\$/g
|
||||
#
|
||||
# Also note that dollar signs should NOT be doubled when they not evaluated (e.g. Ansible docker_container module).
|
||||
labels:
|
||||
- "traefik.http.middlewares.test-auth.basicauth.users=test:$$apr1$$H6uskkkW$$IgXLP6ewTrSuBkTrqE8wj/,test2:$$apr1$$d9hr9HBB$$4HxwgUir3HP4EsggP/QNo0"
|
||||
```
|
||||
|
|
|
@ -151,8 +151,8 @@ metadata:
|
|||
name: testHeader
|
||||
spec:
|
||||
headers:
|
||||
frameDeny: "true"
|
||||
sslRedirect: "true"
|
||||
frameDeny: true
|
||||
sslRedirect: true
|
||||
```
|
||||
|
||||
```yaml tab="Consul Catalog"
|
||||
|
@ -217,7 +217,7 @@ spec:
|
|||
- "https://foo.bar.org"
|
||||
- "https://example.org"
|
||||
accessControlMaxAge: 100
|
||||
addVaryHeader: "true"
|
||||
addVaryHeader: true
|
||||
```
|
||||
|
||||
```yaml tab="Consul Catalog"
|
||||
|
|
|
@ -1,5 +1,117 @@
|
|||
# Migration: Steps needed between the versions
|
||||
|
||||
## v2.x to v2.2.2
|
||||
|
||||
### Domain fronting
|
||||
|
||||
In `v2.2.2` we introduced the ability to avoid [Domain fronting](https://en.wikipedia.org/wiki/Domain_fronting),
|
||||
and enabled it by default for [https routers](../routing/routers/index.md#rule) configured with ```Host(`something`)```.
|
||||
|
||||
!!! example "Allow Domain Fronting on a Specific Router"
|
||||
|
||||
!!! info "Before v2.2.2"
|
||||
|
||||
```yaml tab="Docker"
|
||||
labels:
|
||||
- "traefik.http.routers.router0.rule=Host(`test.localhost`)"
|
||||
```
|
||||
|
||||
```yaml tab="K8s Ingress"
|
||||
apiVersion: traefik.containo.us/v1alpha1
|
||||
kind: IngressRoute
|
||||
metadata:
|
||||
name: ingressroutebar
|
||||
|
||||
spec:
|
||||
entryPoints:
|
||||
- http
|
||||
routes:
|
||||
- match: Host(`test.localhost`)
|
||||
kind: Rule
|
||||
services:
|
||||
- name: server0
|
||||
port: 80
|
||||
- name: server1
|
||||
port: 80
|
||||
```
|
||||
|
||||
```toml tab="File (TOML)"
|
||||
[http.routers.router0]
|
||||
rule = "Host(`test.localhost`)"
|
||||
service = "my-service"
|
||||
```
|
||||
|
||||
```toml tab="File (YAML)"
|
||||
http:
|
||||
routers:
|
||||
router0:
|
||||
rule: "Host(`test.localhost`)"
|
||||
service: my-service
|
||||
```
|
||||
|
||||
!!! info "v2.2.2"
|
||||
|
||||
```yaml tab="Docker"
|
||||
labels:
|
||||
- "traefik.http.routers.router0.rule=HostHeader(`test.localhost`)"
|
||||
```
|
||||
|
||||
```yaml tab="K8s Ingress"
|
||||
apiVersion: traefik.containo.us/v1alpha1
|
||||
kind: IngressRoute
|
||||
metadata:
|
||||
name: ingressroutebar
|
||||
|
||||
spec:
|
||||
entryPoints:
|
||||
- http
|
||||
routes:
|
||||
- match: HostHeader(`test.localhost`)
|
||||
kind: Rule
|
||||
services:
|
||||
- name: server0
|
||||
port: 80
|
||||
- name: server1
|
||||
port: 80
|
||||
```
|
||||
|
||||
```toml tab="File (TOML)"
|
||||
[http.routers.router0]
|
||||
rule = "HostHeader(`test.localhost`)"
|
||||
service = "my-service"
|
||||
```
|
||||
|
||||
```toml tab="File (YAML)"
|
||||
http:
|
||||
routers:
|
||||
router0:
|
||||
rule: "HostHeader(`test.localhost`)"
|
||||
service: my-service
|
||||
```
|
||||
|
||||
As a fallback, a new flag is available as a global option:
|
||||
|
||||
!!! example "Enabling Domain Fronting for All Routers"
|
||||
|
||||
```toml tab="File (TOML)"
|
||||
# Static configuration
|
||||
[global]
|
||||
# Enabling domain fronting
|
||||
insecureSNI = true
|
||||
```
|
||||
|
||||
```yaml tab="File (YAML)"
|
||||
# Static configuration
|
||||
global:
|
||||
# Enabling domain fronting
|
||||
insecureSNI: true
|
||||
```
|
||||
|
||||
```bash tab="CLI"
|
||||
# Enabling domain fronting
|
||||
--global.insecureSNI
|
||||
```
|
||||
|
||||
## v2.0 to v2.1
|
||||
|
||||
### Kubernetes CRD
|
||||
|
|
|
@ -181,10 +181,10 @@ accessLog:
|
|||
| `StartUTC` | The time at which request processing started. |
|
||||
| `StartLocal` | The local time at which request processing started. |
|
||||
| `Duration` | The total time taken (in nanoseconds) by processing the response, including the origin server's time but not the log writing time. |
|
||||
| `FrontendName` | The name of the Traefik frontend. |
|
||||
| `BackendName` | The name of the Traefik backend. |
|
||||
| `BackendURL` | The URL of the Traefik backend. |
|
||||
| `BackendAddr` | The IP:port of the Traefik backend (extracted from `BackendURL`) |
|
||||
| `RouterName` | The name of the Traefik router. |
|
||||
| `ServiceName` | The name of the Traefik backend. |
|
||||
| `ServiceURL` | The URL of the Traefik backend. |
|
||||
| `ServiceAddr` | The IP:port of the Traefik backend (extracted from `ServiceURL`) |
|
||||
| `ClientAddr` | The remote address in its original form (usually IP:port). |
|
||||
| `ClientHost` | The remote IP address from which the client request was received. |
|
||||
| `ClientPort` | The remote TCP port from which the client request was received. |
|
||||
|
|
|
@ -72,9 +72,6 @@ to allow defining:
|
|||
- A [router rule](#dashboard-router-rule) for accessing the dashboard,
|
||||
through Traefik itself (sometimes referred as "Traefik-ception").
|
||||
|
||||
??? example "Dashboard Dynamic Configuration Examples"
|
||||
--8<-- "content/operations/include-api-examples.md"
|
||||
|
||||
### Dashboard Router Rule
|
||||
|
||||
As underlined in the [documentation for the `api.dashboard` option](./api.md#dashboard),
|
||||
|
@ -99,6 +96,9 @@ rule = "PathPrefix(`/api`) || PathPrefix(`/dashboard`)"
|
|||
rule = "Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))"
|
||||
```
|
||||
|
||||
??? example "Dashboard Dynamic Configuration Examples"
|
||||
--8<-- "content/operations/include-dashboard-examples.md"
|
||||
|
||||
## Insecure Mode
|
||||
|
||||
This mode is not recommended because it does not allow the use of security features.
|
||||
|
|
101
docs/content/operations/include-dashboard-examples.md
Normal file
101
docs/content/operations/include-dashboard-examples.md
Normal file
|
@ -0,0 +1,101 @@
|
|||
```yaml tab="Docker"
|
||||
# Dynamic Configuration
|
||||
labels:
|
||||
- "traefik.http.routers.dashboard.rule=Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))"
|
||||
- "traefik.http.routers.dashboard.service=api@internal"
|
||||
- "traefik.http.routers.dashboard.middlewares=auth"
|
||||
- "traefik.http.middlewares.auth.basicauth.users=test:$$apr1$$H6uskkkW$$IgXLP6ewTrSuBkTrqE8wj/,test2:$$apr1$$d9hr9HBB$$4HxwgUir3HP4EsggP/QNo0"
|
||||
```
|
||||
|
||||
```yaml tab="Docker (Swarm)"
|
||||
# Dynamic Configuration
|
||||
deploy:
|
||||
labels:
|
||||
- "traefik.http.routers.dashboard.rule=Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))"
|
||||
- "traefik.http.routers.dashboard.service=api@internal"
|
||||
- "traefik.http.routers.dashboard.middlewares=auth"
|
||||
- "traefik.http.middlewares.auth.basicauth.users=test:$$apr1$$H6uskkkW$$IgXLP6ewTrSuBkTrqE8wj/,test2:$$apr1$$d9hr9HBB$$4HxwgUir3HP4EsggP/QNo0"
|
||||
# Dummy service for Swarm port detection. The port can be any valid integer value.
|
||||
- "traefik.http.services.dummy-svc.loadbalancer.server.port=9999"
|
||||
```
|
||||
|
||||
```yaml tab="Kubernetes CRD"
|
||||
apiVersion: traefik.containo.us/v1alpha1
|
||||
kind: IngressRoute
|
||||
metadata:
|
||||
name: traefik-dashboard
|
||||
spec:
|
||||
routes:
|
||||
- match: Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))
|
||||
kind: Rule
|
||||
services:
|
||||
- name: api@internal
|
||||
kind: TraefikService
|
||||
middlewares:
|
||||
- name: auth
|
||||
---
|
||||
apiVersion: traefik.containo.us/v1alpha1
|
||||
kind: Middleware
|
||||
metadata:
|
||||
name: auth
|
||||
spec:
|
||||
basicAuth:
|
||||
secret: secretName # Kubernetes secret named "secretName"
|
||||
```
|
||||
|
||||
```yaml tab="Consul Catalog"
|
||||
# Dynamic Configuration
|
||||
- "traefik.http.routers.dashboard.rule=Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))"
|
||||
- "traefik.http.routers.dashboard.service=api@internal"
|
||||
- "traefik.http.routers.dashboard.middlewares=auth"
|
||||
- "traefik.http.middlewares.auth.basicauth.users=test:$$apr1$$H6uskkkW$$IgXLP6ewTrSuBkTrqE8wj/,test2:$$apr1$$d9hr9HBB$$4HxwgUir3HP4EsggP/QNo0"
|
||||
```
|
||||
|
||||
```json tab="Marathon"
|
||||
"labels": {
|
||||
"traefik.http.routers.dashboard.rule": "Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))",
|
||||
"traefik.http.routers.dashboard.service": "api@internal",
|
||||
"traefik.http.routers.dashboard.middlewares": "auth",
|
||||
"traefik.http.middlewares.auth.basicauth.users": "test:$$apr1$$H6uskkkW$$IgXLP6ewTrSuBkTrqE8wj/,test2:$$apr1$$d9hr9HBB$$4HxwgUir3HP4EsggP/QNo0"
|
||||
}
|
||||
```
|
||||
|
||||
```yaml tab="Rancher"
|
||||
# Dynamic Configuration
|
||||
labels:
|
||||
- "traefik.http.routers.dashboard.rule=Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))"
|
||||
- "traefik.http.routers.dashboard.service=api@internal"
|
||||
- "traefik.http.routers.dashboard.middlewares=auth"
|
||||
- "traefik.http.middlewares.auth.basicauth.users=test:$$apr1$$H6uskkkW$$IgXLP6ewTrSuBkTrqE8wj/,test2:$$apr1$$d9hr9HBB$$4HxwgUir3HP4EsggP/QNo0"
|
||||
```
|
||||
|
||||
```toml tab="File (TOML)"
|
||||
# Dynamic Configuration
|
||||
[http.routers.my-api]
|
||||
rule = "Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))"
|
||||
service = "api@internal"
|
||||
middlewares = ["auth"]
|
||||
|
||||
[http.middlewares.auth.basicAuth]
|
||||
users = [
|
||||
"test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/",
|
||||
"test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0",
|
||||
]
|
||||
```
|
||||
|
||||
```yaml tab="File (YAML)"
|
||||
# Dynamic Configuration
|
||||
http:
|
||||
routers:
|
||||
dashboard:
|
||||
rule: Host(`traefik.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))
|
||||
service: api@internal
|
||||
middlewares:
|
||||
- auth
|
||||
middlewares:
|
||||
auth:
|
||||
basicAuth:
|
||||
users:
|
||||
- "test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/"
|
||||
- "test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0"
|
||||
```
|
|
@ -191,14 +191,14 @@ providers:
|
|||
### Go Templating
|
||||
|
||||
!!! warning
|
||||
Go Templating only works along with dedicated dynamic configuration files.
|
||||
Go Templating only works with dedicated dynamic configuration files.
|
||||
Templating does not work in the Traefik main static configuration file.
|
||||
|
||||
Traefik allows using Go templating,
|
||||
it must be a valid [Go template](https://golang.org/pkg/text/template/),
|
||||
augmented with the [sprig template functions](http://masterminds.github.io/sprig/).
|
||||
Traefik supports using Go templating to automatically generate repetitive portions of configuration files.
|
||||
These sections must be valid [Go templates](https://golang.org/pkg/text/template/),
|
||||
augmented with the [Sprig template functions](http://masterminds.github.io/sprig/).
|
||||
|
||||
Thus, it's possible to define easily lot of routers, services and TLS certificates as described in the following examples:
|
||||
To illustrate, it's possible to easily define multiple routers, services, and TLS certificates as described in the following examples:
|
||||
|
||||
??? example "Configuring Using Templating"
|
||||
|
||||
|
|
|
@ -162,6 +162,9 @@ WriteTimeout is the maximum duration before timing out writes of the response. I
|
|||
`--global.checknewversion`:
|
||||
Periodically check if a new version has been released. (Default: ```false```)
|
||||
|
||||
`--global.insecuresni`:
|
||||
Allow domain fronting. If the option is not specified, it will be disabled by default. (Default: ```false```)
|
||||
|
||||
`--global.sendanonymoususage`:
|
||||
Periodically send anonymous usage statistics. If the option is not specified, it will be enabled by default. (Default: ```false```)
|
||||
|
||||
|
|
|
@ -162,6 +162,9 @@ WriteTimeout is the maximum duration before timing out writes of the response. I
|
|||
`TRAEFIK_GLOBAL_CHECKNEWVERSION`:
|
||||
Periodically check if a new version has been released. (Default: ```false```)
|
||||
|
||||
`TRAEFIK_GLOBAL_INSECURESNI`:
|
||||
Allow domain fronting. If the option is not specified, it will be disabled by default. (Default: ```false```)
|
||||
|
||||
`TRAEFIK_GLOBAL_SENDANONYMOUSUSAGE`:
|
||||
Periodically send anonymous usage statistics. If the option is not specified, it will be enabled by default. (Default: ```false```)
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[global]
|
||||
checkNewVersion = true
|
||||
sendAnonymousUsage = true
|
||||
insecureSNI = false
|
||||
|
||||
[serversTransport]
|
||||
insecureSkipVerify = true
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
global:
|
||||
checkNewVersion: true
|
||||
sendAnonymousUsage: true
|
||||
insecureSNI: false
|
||||
|
||||
serversTransport:
|
||||
insecureSkipVerify: true
|
||||
rootCAs:
|
||||
|
|
|
@ -282,12 +282,12 @@ which in turn will create the resulting routers, services, handlers, etc.
|
|||
traefik.ingress.kubernetes.io/service.passhostheader: "true"
|
||||
```
|
||||
|
||||
??? info "`traefik.ingress.kubernetes.io/service.sticky`"
|
||||
??? info "`traefik.ingress.kubernetes.io/service.sticky.cookie`"
|
||||
|
||||
See [sticky sessions](../services/index.md#sticky-sessions) for more information.
|
||||
|
||||
```yaml
|
||||
traefik.ingress.kubernetes.io/service.sticky: "true"
|
||||
traefik.ingress.kubernetes.io/service.sticky.cookie: "true"
|
||||
```
|
||||
|
||||
??? info "`traefik.ingress.kubernetes.io/service.sticky.cookie.name`"
|
||||
|
|
|
@ -228,16 +228,18 @@ If the rule is verified, the router becomes active, calls middlewares, and then
|
|||
|
||||
The table below lists all the available matchers:
|
||||
|
||||
| Rule | Description |
|
||||
|------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|
|
||||
| ```Headers(`key`, `value`)``` | Check if there is a key `key`defined in the headers, with the value `value` |
|
||||
| ```HeadersRegexp(`key`, `regexp`)``` | Check if there is a key `key`defined in the headers, with a value that matches the regular expression `regexp` |
|
||||
| ```Host(`example.com`, ...)``` | Check if the request domain targets one of the given `domains`. |
|
||||
| ```HostRegexp(`example.com`, `{subdomain:[a-z]+}.example.com`, ...)``` | Check if the request domain matches the given `regexp`. |
|
||||
| ```Method(`GET`, ...)``` | Check if the request method is one of the given `methods` (`GET`, `POST`, `PUT`, `DELETE`, `PATCH`) |
|
||||
| ```Path(`/path`, `/articles/{cat:[a-z]+}/{id:[0-9]+}`, ...)``` | Match exact request path. It accepts a sequence of literal and regular expression paths. |
|
||||
| ```PathPrefix(`/products/`, `/articles/{cat:[a-z]+}/{id:[0-9]+}`)``` | Match request prefix path. It accepts a sequence of literal and regular expression prefix paths. |
|
||||
| ```Query(`foo=bar`, `bar=baz`)``` | Match Query String parameters. It accepts a sequence of key=value pairs. |
|
||||
| Rule | Description |
|
||||
|------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| ```Headers(`key`, `value`)``` | Check if there is a key `key`defined in the headers, with the value `value` |
|
||||
| ```HeadersRegexp(`key`, `regexp`)``` | Check if there is a key `key`defined in the headers, with a value that matches the regular expression `regexp` |
|
||||
| ```Host(`example.com`, ...)``` | By default, is equivalent to `HostHeader` **AND** `HostSNI` rules. See [Domain Fronting](../../https/tls.md#domain-fronting) and the [migration guide](../../migration/v2.md#domain-fronting) for more details. |
|
||||
| ```HostHeader(`example.com`, ...)``` | Check if the request domain (host header value) targets one of the given `domains`. |
|
||||
| ```HostSNI(`example.com`, ...)``` | Check if the [Server Name Indication](https://en.wikipedia.org/wiki/Server_Name_Indication) corresponds to the given `domains`. |
|
||||
| ```HostRegexp(`example.com`, `{subdomain:[a-z]+}.example.com`, ...)``` | Check if the request domain matches the given `regexp`. |
|
||||
| ```Method(`GET`, ...)``` | Check if the request method is one of the given `methods` (`GET`, `POST`, `PUT`, `DELETE`, `PATCH`) |
|
||||
| ```Path(`/path`, `/articles/{cat:[a-z]+}/{id:[0-9]+}`, ...)``` | Match exact request path. It accepts a sequence of literal and regular expression paths. |
|
||||
| ```PathPrefix(`/products/`, `/articles/{cat:[a-z]+}/{id:[0-9]+}`)``` | Match request prefix path. It accepts a sequence of literal and regular expression prefix paths. |
|
||||
| ```Query(`foo=bar`, `bar=baz`)``` | Match Query String parameters. It accepts a sequence of key=value pairs. |
|
||||
|
||||
!!! important "Regexp Syntax"
|
||||
|
||||
|
|
|
@ -28,10 +28,6 @@ theme:
|
|||
|
||||
copyright: "Copyright © 2016-2020 Containous"
|
||||
|
||||
google_analytics:
|
||||
- 'UA-51880359-3'
|
||||
- 'docs.traefik.io'
|
||||
|
||||
extra_css:
|
||||
- assets/styles/extra.css # Our custom styles
|
||||
- assets/styles/atom-one-light.css # HightlightJS's CSS theme
|
||||
|
|
10
docs/theme/main.html
vendored
10
docs/theme/main.html
vendored
|
@ -1,5 +1,15 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block analytics %}
|
||||
<!-- Google Tag Manager -->
|
||||
<script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
|
||||
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
|
||||
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
|
||||
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
|
||||
})(window,document,'script','dataLayer','GTM-NMWC63S');</script>
|
||||
<!-- End Google Tag Manager -->
|
||||
{% endblock %}
|
||||
|
||||
{% block footer %}
|
||||
|
||||
{% import "partials/language.html" as lang with context %}
|
||||
|
|
2
go.mod
2
go.mod
|
@ -36,7 +36,7 @@ require (
|
|||
github.com/fatih/structs v1.1.0
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 // indirect
|
||||
github.com/gambol99/go-marathon v0.0.0-20180614232016-99a156b96fb2
|
||||
github.com/go-acme/lego/v3 v3.7.0
|
||||
github.com/go-acme/lego/v3 v3.8.0
|
||||
github.com/go-check/check v0.0.0-00010101000000-000000000000
|
||||
github.com/go-kit/kit v0.9.0
|
||||
github.com/golang/protobuf v1.3.4
|
||||
|
|
26
go.sum
26
go.sum
|
@ -117,6 +117,8 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
|||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
|
||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
github.com/c0va23/go-proxyprotocol v0.9.1 h1:5BCkp0fDJOhzzH1lhjUgHhmZz9VvRMMif1U2D31hb34=
|
||||
github.com/c0va23/go-proxyprotocol v0.9.1/go.mod h1:TNjUV+llvk8TvWJxlPYAeAYZgSzT/iicNr3nWBWX320=
|
||||
github.com/cenkalti/backoff/v4 v4.0.0 h1:6VeaLF9aI+MAUQ95106HwWzYZgJJpZ4stumjj6RFYAU=
|
||||
|
@ -247,8 +249,8 @@ github.com/gambol99/go-marathon v0.0.0-20180614232016-99a156b96fb2/go.mod h1:GLy
|
|||
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-acme/lego/v3 v3.7.0 h1:qC5/8/CbltyAE8fGLE6bGlqucj7pXc/vBxiLwLOsmAQ=
|
||||
github.com/go-acme/lego/v3 v3.7.0/go.mod h1:4eDjjYkAsDXyNcwN8IhhZAwxz9Ltiks1Zmpv0q20J7A=
|
||||
github.com/go-acme/lego/v3 v3.8.0 h1:9OOEn54eZvEPRRdM7xiC5f7EBW0MlEeChr+kzlIhdN8=
|
||||
github.com/go-acme/lego/v3 v3.8.0/go.mod h1:kYiHYgSRzb1l2NQPWvWvkVG5etNCusGFsZc2MTak3m0=
|
||||
github.com/go-cmd/cmd v1.0.5/go.mod h1:y8q8qlK5wQibcw63djSl/ntiHUHXHGdCkPk0j4QeW4s=
|
||||
github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w=
|
||||
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||
|
@ -378,6 +380,8 @@ github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv
|
|||
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=
|
||||
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
|
||||
github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=
|
||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||
|
@ -387,6 +391,8 @@ github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:
|
|||
github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=
|
||||
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
||||
github.com/hashicorp/go-retryablehttp v0.6.6 h1:HJunrbHTDDbBb/ay4kxa1n+dLmttUlnP3V9oNE4hmsM=
|
||||
github.com/hashicorp/go-retryablehttp v0.6.6/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
|
||||
github.com/hashicorp/go-rootcerts v1.0.0 h1:Rqb66Oo1X/eSV1x66xbDccZjhJigjg0+e82kpwzSwCI=
|
||||
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||
github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs=
|
||||
|
@ -445,8 +451,8 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
|
|||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/kolo/xmlrpc v0.0.0-20190717152603-07c4ee3fd181 h1:TrxPzApUukas24OMMVDUMlCs1XCExJtnGaDEiIAR4oQ=
|
||||
github.com/kolo/xmlrpc v0.0.0-20190717152603-07c4ee3fd181/go.mod h1:o03bZfuBwAXHetKXuInt4S7omeXUu62/A845kiycsSQ=
|
||||
github.com/kolo/xmlrpc v0.0.0-20200310150728-e0350524596b h1:DzHy0GlWeF0KAglaTMY7Q+khIFoG8toHP+wLFBVBQJc=
|
||||
github.com/kolo/xmlrpc v0.0.0-20200310150728-e0350524596b/go.mod h1:o03bZfuBwAXHetKXuInt4S7omeXUu62/A845kiycsSQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
|
@ -512,6 +518,8 @@ github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0Qu
|
|||
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.3.1 h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=
|
||||
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=
|
||||
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
|
@ -533,8 +541,8 @@ github.com/nrdcg/auroradns v1.0.1 h1:m/kBq83Xvy3cU261MOknd8BdnOk12q4lAWM+kOdsC2Y
|
|||
github.com/nrdcg/auroradns v1.0.1/go.mod h1:y4pc0i9QXYlFCWrhWrUSIETnZgrf4KuwjDIWmmXo3JI=
|
||||
github.com/nrdcg/dnspod-go v0.4.0 h1:c/jn1mLZNKF3/osJ6mz3QPxTudvPArXTjpkmYj0uK6U=
|
||||
github.com/nrdcg/dnspod-go v0.4.0/go.mod h1:vZSoFSFeQVm2gWLMkyX61LZ8HI3BaqtHZWgPTGKr6KQ=
|
||||
github.com/nrdcg/goinwx v0.6.1 h1:AJnjoWPELyCtofhGcmzzcEMFd9YdF2JB/LgutWsWt/s=
|
||||
github.com/nrdcg/goinwx v0.6.1/go.mod h1:XPiut7enlbEdntAqalBIqcYcTEVhpv/dKWgDCX2SwKQ=
|
||||
github.com/nrdcg/goinwx v0.7.0 h1:j6JlOp0nNwtvaP09TvKqc9pktjH81nOad0+Gx9S1t9U=
|
||||
github.com/nrdcg/goinwx v0.7.0/go.mod h1:4tKJOCi/1lTxuw9/yB2Ez0aojwtUCSkckjc22eALpqE=
|
||||
github.com/nrdcg/namesilo v0.2.1 h1:kLjCjsufdW/IlC+iSfAqj0iQGgKjlbUUeDJio5Y6eMg=
|
||||
github.com/nrdcg/namesilo v0.2.1/go.mod h1:lwMvfQTyYq+BbjJd30ylEG4GPSS6PII0Tia4rRpRiyw=
|
||||
github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
|
@ -596,6 +604,8 @@ github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6J
|
|||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/pquerna/otp v1.2.0 h1:/A3+Jn+cagqayeR3iHs/L62m5ue7710D35zl1zJ1kok=
|
||||
github.com/pquerna/otp v1.2.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM=
|
||||
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
|
||||
|
@ -694,8 +704,8 @@ github.com/vulcand/oxy v1.1.0 h1:DbBijGo1+6cFqR9jarkMxasdj0lgWwrrFtue6ijek4Q=
|
|||
github.com/vulcand/oxy v1.1.0/go.mod h1:ADiMYHi8gkGl2987yQIzDRoXZilANF4WtKaQ92OppKY=
|
||||
github.com/vulcand/predicate v1.1.0 h1:Gq/uWopa4rx/tnZu2opOSBqHK63Yqlou/SzrbwdJiNg=
|
||||
github.com/vulcand/predicate v1.1.0/go.mod h1:mlccC5IRBoc2cIFmCB8ZM62I3VDb6p2GXESMHa3CnZg=
|
||||
github.com/vultr/govultr v0.1.4 h1:UnNMixYFVO0p80itc8PcweoVENyo1PasfvwKhoasR9U=
|
||||
github.com/vultr/govultr v0.1.4/go.mod h1:9H008Uxr/C4vFNGLqKx232C206GL0PBHzOP0809bGNA=
|
||||
github.com/vultr/govultr v0.4.2 h1:9i8xKZ+xp6vwZ9raqHoBLzhB4wCnMj7nOQTj5YIRLWY=
|
||||
github.com/vultr/govultr v0.4.2/go.mod h1:TUuUizMOFc7z+PNMssb6iGjKjQfpw5arIaOLfocVudQ=
|
||||
github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
|
||||
github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
|
||||
|
|
|
@ -111,6 +111,20 @@ func (s *AccessLogSuite) TestAccessLogAuthFrontend(c *check.C) {
|
|||
routerName: "rt-authFrontend",
|
||||
serviceURL: "-",
|
||||
},
|
||||
{
|
||||
formatOnly: false,
|
||||
code: "401",
|
||||
user: "test",
|
||||
routerName: "rt-authFrontend",
|
||||
serviceURL: "-",
|
||||
},
|
||||
{
|
||||
formatOnly: false,
|
||||
code: "200",
|
||||
user: "test",
|
||||
routerName: "rt-authFrontend",
|
||||
serviceURL: "http://172.17.0",
|
||||
},
|
||||
}
|
||||
|
||||
// Start Traefik
|
||||
|
@ -130,7 +144,7 @@ func (s *AccessLogSuite) TestAccessLogAuthFrontend(c *check.C) {
|
|||
// Verify Traefik started OK
|
||||
checkTraefikStarted(c)
|
||||
|
||||
// Test auth frontend
|
||||
// Test auth entrypoint
|
||||
req, err := http.NewRequest(http.MethodGet, "http://127.0.0.1:8006/", nil)
|
||||
c.Assert(err, checker.IsNil)
|
||||
req.Host = "frontend.auth.docker.local"
|
||||
|
@ -138,6 +152,16 @@ func (s *AccessLogSuite) TestAccessLogAuthFrontend(c *check.C) {
|
|||
err = try.Request(req, 500*time.Millisecond, try.StatusCodeIs(http.StatusUnauthorized), try.HasBody())
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
req.SetBasicAuth("test", "")
|
||||
|
||||
err = try.Request(req, 500*time.Millisecond, try.StatusCodeIs(http.StatusUnauthorized), try.HasBody())
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
req.SetBasicAuth("test", "test")
|
||||
|
||||
err = try.Request(req, 500*time.Millisecond, try.StatusCodeIs(http.StatusOK), try.HasBody())
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
// Verify access.log output as expected
|
||||
count := checkAccessLogExactValuesOutput(c, expected)
|
||||
|
||||
|
@ -158,6 +182,13 @@ func (s *AccessLogSuite) TestAccessLogDigestAuthMiddleware(c *check.C) {
|
|||
routerName: "rt-digestAuthMiddleware",
|
||||
serviceURL: "-",
|
||||
},
|
||||
{
|
||||
formatOnly: false,
|
||||
code: "401",
|
||||
user: "test",
|
||||
routerName: "rt-digestAuthMiddleware",
|
||||
serviceURL: "-",
|
||||
},
|
||||
{
|
||||
formatOnly: false,
|
||||
code: "200",
|
||||
|
@ -192,15 +223,22 @@ func (s *AccessLogSuite) TestAccessLogDigestAuthMiddleware(c *check.C) {
|
|||
resp, err := try.ResponseUntilStatusCode(req, 500*time.Millisecond, http.StatusUnauthorized)
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
digestParts := digestParts(resp)
|
||||
digestParts["uri"] = "/"
|
||||
digestParts["method"] = http.MethodGet
|
||||
digestParts["username"] = "test"
|
||||
digestParts["password"] = "test"
|
||||
digest := digestParts(resp)
|
||||
digest["uri"] = "/"
|
||||
digest["method"] = http.MethodGet
|
||||
digest["username"] = "test"
|
||||
digest["password"] = "wrong"
|
||||
|
||||
req.Header.Set("Authorization", getDigestAuthorization(digestParts))
|
||||
req.Header.Set("Authorization", getDigestAuthorization(digest))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
err = try.Request(req, 500*time.Millisecond, try.StatusCodeIs(http.StatusUnauthorized), try.HasBody())
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
digest["password"] = "test"
|
||||
|
||||
req.Header.Set("Authorization", getDigestAuthorization(digest))
|
||||
|
||||
err = try.Request(req, 500*time.Millisecond, try.StatusCodeIs(http.StatusOK), try.HasBody())
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
|
|
|
@ -138,7 +138,7 @@ func (s *ConsulSuite) TestSimpleConfiguration(c *check.C) {
|
|||
expectedJSON := filepath.FromSlash("testdata/rawdata-consul.json")
|
||||
|
||||
if *updateExpected {
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||
c.Assert(err, checker.IsNil)
|
||||
}
|
||||
|
||||
|
|
|
@ -36,8 +36,8 @@ func (s *DockerComposeSuite) TearDownSuite(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *DockerComposeSuite) TestComposeScale(c *check.C) {
|
||||
var serviceCount = 2
|
||||
var composeService = "whoami1"
|
||||
serviceCount := 2
|
||||
composeService := "whoami1"
|
||||
|
||||
s.composeProject.Scale(c, composeService, serviceCount)
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ func (s *DockerSuite) startContainerWithLabels(c *check.C, image string, labels
|
|||
})
|
||||
}
|
||||
|
||||
func (s *DockerSuite) startContainerWithNameAndLabels(c *check.C, name string, image string, labels map[string]string, args ...string) string {
|
||||
func (s *DockerSuite) startContainerWithNameAndLabels(c *check.C, name, image string, labels map[string]string, args ...string) string {
|
||||
return s.startContainerWithConfig(c, image, d.ContainerConfig{
|
||||
Name: name,
|
||||
Cmd: args,
|
||||
|
|
|
@ -138,7 +138,7 @@ func (s *EtcdSuite) TestSimpleConfiguration(c *check.C) {
|
|||
expectedJSON := filepath.FromSlash("testdata/rawdata-etcd.json")
|
||||
|
||||
if *updateExpected {
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||
c.Assert(err, checker.IsNil)
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
[http.routers]
|
||||
[http.routers.router1]
|
||||
rule = "Host(`127.0.0.1`)"
|
||||
rule = "Host(`localhost`)"
|
||||
service = "service1"
|
||||
[http.routers.router1.tls]
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
[http.routers]
|
||||
[http.routers.router1]
|
||||
rule = "Host(`127.0.0.1`)"
|
||||
rule = "Host(`localhost`)"
|
||||
service = "service1"
|
||||
[http.routers.router1.tls]
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
[http.routers]
|
||||
[http.routers.router1]
|
||||
rule = "Host(`127.0.0.1`)"
|
||||
rule = "Host(`localhost`)"
|
||||
service = "service1"
|
||||
[http.routers.router1.tls]
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
[http.routers]
|
||||
[http.routers.router1]
|
||||
rule = "Host(`127.0.0.1`)"
|
||||
rule = "Host(`localhost`)"
|
||||
service = "service1"
|
||||
middlewares = ["retryer"]
|
||||
[http.routers.router1.tls]
|
||||
|
|
|
@ -2,6 +2,9 @@
|
|||
checkNewVersion = false
|
||||
sendAnonymousUsage = false
|
||||
|
||||
[api]
|
||||
insecure = true
|
||||
|
||||
[log]
|
||||
level = "DEBUG"
|
||||
|
||||
|
@ -24,6 +27,11 @@
|
|||
rule = "Host(`test2.localhost`)"
|
||||
service = "service1"
|
||||
|
||||
[http.routers.router3]
|
||||
rule = "Host(`internal.localhost`)"
|
||||
middlewares = ["secure"]
|
||||
service = "api@internal"
|
||||
|
||||
[http.middlewares]
|
||||
[http.middlewares.secure.headers]
|
||||
featurePolicy = "vibrate 'none';"
|
||||
|
|
|
@ -19,8 +19,10 @@ import (
|
|||
"google.golang.org/grpc/credentials"
|
||||
)
|
||||
|
||||
var LocalhostCert []byte
|
||||
var LocalhostKey []byte
|
||||
var (
|
||||
LocalhostCert []byte
|
||||
LocalhostKey []byte
|
||||
)
|
||||
|
||||
const randCharset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
|
||||
|
||||
|
@ -84,7 +86,7 @@ func starth2cGRPCServer(lis net.Listener, server *myserver) error {
|
|||
func getHelloClientGRPC() (helloworld.GreeterClient, func() error, error) {
|
||||
roots := x509.NewCertPool()
|
||||
roots.AppendCertsFromPEM(LocalhostCert)
|
||||
credsClient := credentials.NewClientTLSFromCert(roots, "")
|
||||
credsClient := credentials.NewClientTLSFromCert(roots, "localhost")
|
||||
conn, err := grpc.Dial("127.0.0.1:4443", grpc.WithTransportCredentials(credsClient))
|
||||
if err != nil {
|
||||
return nil, func() error { return nil }, err
|
||||
|
@ -165,7 +167,7 @@ func (s *GRPCSuite) TestGRPC(c *check.C) {
|
|||
defer cmd.Process.Kill()
|
||||
|
||||
// wait for Traefik
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`127.0.0.1`)"))
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`localhost`)"))
|
||||
c.Assert(err, check.IsNil)
|
||||
|
||||
var response string
|
||||
|
@ -245,7 +247,7 @@ func (s *GRPCSuite) TestGRPCh2cTermination(c *check.C) {
|
|||
defer cmd.Process.Kill()
|
||||
|
||||
// wait for Traefik
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`127.0.0.1`)"))
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`localhost`)"))
|
||||
c.Assert(err, check.IsNil)
|
||||
|
||||
var response string
|
||||
|
@ -287,7 +289,7 @@ func (s *GRPCSuite) TestGRPCInsecure(c *check.C) {
|
|||
defer cmd.Process.Kill()
|
||||
|
||||
// wait for Traefik
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`127.0.0.1`)"))
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`localhost`)"))
|
||||
c.Assert(err, check.IsNil)
|
||||
|
||||
var response string
|
||||
|
@ -334,7 +336,7 @@ func (s *GRPCSuite) TestGRPCBuffer(c *check.C) {
|
|||
defer cmd.Process.Kill()
|
||||
|
||||
// wait for Traefik
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`127.0.0.1`)"))
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`localhost`)"))
|
||||
c.Assert(err, check.IsNil)
|
||||
var client helloworld.Greeter_StreamExampleClient
|
||||
client, closer, err := callStreamExampleClientGRPC()
|
||||
|
@ -393,7 +395,7 @@ func (s *GRPCSuite) TestGRPCBufferWithFlushInterval(c *check.C) {
|
|||
defer cmd.Process.Kill()
|
||||
|
||||
// wait for Traefik
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`127.0.0.1`)"))
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`localhost`)"))
|
||||
c.Assert(err, check.IsNil)
|
||||
|
||||
var client helloworld.Greeter_StreamExampleClient
|
||||
|
@ -451,7 +453,7 @@ func (s *GRPCSuite) TestGRPCWithRetry(c *check.C) {
|
|||
defer cmd.Process.Kill()
|
||||
|
||||
// wait for Traefik
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`127.0.0.1`)"))
|
||||
err = try.GetRequest("http://127.0.0.1:8080/api/rawdata", 1*time.Second, try.BodyContains("Host(`localhost`)"))
|
||||
c.Assert(err, check.IsNil)
|
||||
|
||||
var response string
|
||||
|
|
|
@ -131,16 +131,18 @@ func (s *HeadersSuite) TestSecureHeadersResponses(c *check.C) {
|
|||
c.Assert(err, checker.IsNil)
|
||||
|
||||
testCase := []struct {
|
||||
desc string
|
||||
expected http.Header
|
||||
reqHost string
|
||||
desc string
|
||||
expected http.Header
|
||||
reqHost string
|
||||
internalReqHost string
|
||||
}{
|
||||
{
|
||||
desc: "Feature-Policy Set",
|
||||
expected: http.Header{
|
||||
"Feature-Policy": {"vibrate 'none';"},
|
||||
},
|
||||
reqHost: "test.localhost",
|
||||
reqHost: "test.localhost",
|
||||
internalReqHost: "internal.localhost",
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -149,7 +151,14 @@ func (s *HeadersSuite) TestSecureHeadersResponses(c *check.C) {
|
|||
c.Assert(err, checker.IsNil)
|
||||
req.Host = test.reqHost
|
||||
|
||||
err = try.Request(req, 500*time.Millisecond, try.HasHeaderStruct(test.expected))
|
||||
err = try.Request(req, 500*time.Millisecond, try.StatusCodeIs(http.StatusOK), try.HasHeaderStruct(test.expected))
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
req, err = http.NewRequest(http.MethodGet, "http://127.0.0.1:8000/api/rawdata", nil)
|
||||
c.Assert(err, checker.IsNil)
|
||||
req.Host = test.internalReqHost
|
||||
|
||||
err = try.Request(req, 500*time.Millisecond, try.StatusCodeIs(http.StatusOK), try.HasHeaderStruct(test.expected))
|
||||
c.Assert(err, checker.IsNil)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,9 +15,12 @@ It has these top-level messages:
|
|||
*/
|
||||
package helloworld
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import (
|
||||
fmt "fmt"
|
||||
math "math"
|
||||
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
)
|
||||
|
||||
import (
|
||||
context "context"
|
||||
|
@ -26,9 +29,11 @@ import (
|
|||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
var (
|
||||
_ = proto.Marshal
|
||||
_ = fmt.Errorf
|
||||
_ = math.Inf
|
||||
)
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
|
@ -102,8 +107,10 @@ func init() {
|
|||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ context.Context
|
||||
var _ grpc.ClientConn
|
||||
var (
|
||||
_ context.Context
|
||||
_ grpc.ClientConn
|
||||
)
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
|
|
|
@ -956,11 +956,13 @@ func modifyCertificateConfFileContent(c *check.C, certFileName, confFileName str
|
|||
if len(certFileName) > 0 {
|
||||
tlsConf := dynamic.Configuration{
|
||||
TLS: &dynamic.TLSConfiguration{
|
||||
Certificates: []*traefiktls.CertAndStores{{
|
||||
Certificate: traefiktls.Certificate{
|
||||
CertFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".cert"),
|
||||
KeyFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".key"),
|
||||
}},
|
||||
Certificates: []*traefiktls.CertAndStores{
|
||||
{
|
||||
Certificate: traefiktls.Certificate{
|
||||
CertFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".cert"),
|
||||
KeyFile: traefiktls.FileOrContent("fixtures/https/" + certFileName + ".key"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -21,10 +21,12 @@ import (
|
|||
checker "github.com/vdemeester/shakers"
|
||||
)
|
||||
|
||||
var integration = flag.Bool("integration", false, "run integration tests")
|
||||
var container = flag.Bool("container", false, "run container integration tests")
|
||||
var host = flag.Bool("host", false, "run host integration tests")
|
||||
var showLog = flag.Bool("tlog", false, "always show Traefik logs")
|
||||
var (
|
||||
integration = flag.Bool("integration", false, "run integration tests")
|
||||
container = flag.Bool("container", false, "run container integration tests")
|
||||
host = flag.Bool("host", false, "run host integration tests")
|
||||
showLog = flag.Bool("tlog", false, "always show Traefik logs")
|
||||
)
|
||||
|
||||
func Test(t *testing.T) {
|
||||
if !*integration {
|
||||
|
|
|
@ -119,7 +119,7 @@ func testConfiguration(c *check.C, path, apiPort string) {
|
|||
newJSON, err := json.MarshalIndent(rtRepr, "", "\t")
|
||||
c.Assert(err, checker.IsNil)
|
||||
|
||||
err = ioutil.WriteFile(expectedJSON, newJSON, 0644)
|
||||
err = ioutil.WriteFile(expectedJSON, newJSON, 0o644)
|
||||
c.Assert(err, checker.IsNil)
|
||||
c.Errorf("We do not want a passing test in file update mode")
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ func (s *MarathonSuite15) extendDockerHostsFile(host, ipAddr string) error {
|
|||
// (See also https://groups.google.com/d/topic/docker-user/JOGE7AnJ3Gw/discussion.)
|
||||
if os.Getenv("CONTAINER") == "DOCKER" {
|
||||
// We are running inside a container -- extend the hosts file.
|
||||
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0600)
|
||||
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0o600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ func (s *MarathonSuite) extendDockerHostsFile(host, ipAddr string) error {
|
|||
// (See also https://groups.google.com/d/topic/docker-user/JOGE7AnJ3Gw/discussion.)
|
||||
if os.Getenv("CONTAINER") == "DOCKER" {
|
||||
// We are running inside a container -- extend the hosts file.
|
||||
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0600)
|
||||
file, err := os.OpenFile(hostsFile, os.O_APPEND|os.O_WRONLY, 0o600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -138,7 +138,7 @@ func (s *RedisSuite) TestSimpleConfiguration(c *check.C) {
|
|||
expectedJSON := filepath.FromSlash("testdata/rawdata-redis.json")
|
||||
|
||||
if *updateExpected {
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||
c.Assert(err, checker.IsNil)
|
||||
}
|
||||
|
||||
|
|
2
integration/testdata/rawdata-consul.json
vendored
2
integration/testdata/rawdata-consul.json
vendored
|
@ -86,7 +86,7 @@
|
|||
},
|
||||
"dashboard_redirect@internal": {
|
||||
"redirectRegex": {
|
||||
"regex": "^(http:\\/\\/[^:\\/]+(:\\d+)?)\\/$",
|
||||
"regex": "^(http:\\/\\/(\\[[\\w:.]+\\]|[\\w\\._-]+)(:\\d+)?)\\/$",
|
||||
"replacement": "${1}/dashboard/",
|
||||
"permanent": true
|
||||
},
|
||||
|
|
2
integration/testdata/rawdata-etcd.json
vendored
2
integration/testdata/rawdata-etcd.json
vendored
|
@ -86,7 +86,7 @@
|
|||
},
|
||||
"dashboard_redirect@internal": {
|
||||
"redirectRegex": {
|
||||
"regex": "^(http:\\/\\/[^:\\/]+(:\\d+)?)\\/$",
|
||||
"regex": "^(http:\\/\\/(\\[[\\w:.]+\\]|[\\w\\._-]+)(:\\d+)?)\\/$",
|
||||
"replacement": "${1}/dashboard/",
|
||||
"permanent": true
|
||||
},
|
||||
|
|
2
integration/testdata/rawdata-ingress.json
vendored
2
integration/testdata/rawdata-ingress.json
vendored
|
@ -54,7 +54,7 @@
|
|||
"middlewares": {
|
||||
"dashboard_redirect@internal": {
|
||||
"redirectRegex": {
|
||||
"regex": "^(http:\\/\\/[^:\\/]+(:\\d+)?)\\/$",
|
||||
"regex": "^(http:\\/\\/(\\[[\\w:.]+\\]|[\\w\\._-]+)(:\\d+)?)\\/$",
|
||||
"replacement": "${1}/dashboard/",
|
||||
"permanent": true
|
||||
},
|
||||
|
|
2
integration/testdata/rawdata-redis.json
vendored
2
integration/testdata/rawdata-redis.json
vendored
|
@ -86,7 +86,7 @@
|
|||
},
|
||||
"dashboard_redirect@internal": {
|
||||
"redirectRegex": {
|
||||
"regex": "^(http:\\/\\/[^:\\/]+(:\\d+)?)\\/$",
|
||||
"regex": "^(http:\\/\\/(\\[[\\w:.]+\\]|[\\w\\._-]+)(:\\d+)?)\\/$",
|
||||
"replacement": "${1}/dashboard/",
|
||||
"permanent": true
|
||||
},
|
||||
|
|
2
integration/testdata/rawdata-zk.json
vendored
2
integration/testdata/rawdata-zk.json
vendored
|
@ -86,7 +86,7 @@
|
|||
},
|
||||
"dashboard_redirect@internal": {
|
||||
"redirectRegex": {
|
||||
"regex": "^(http:\\/\\/[^:\\/]+(:\\d+)?)\\/$",
|
||||
"regex": "^(http:\\/\\/(\\[[\\w:.]+\\]|[\\w\\._-]+)(:\\d+)?)\\/$",
|
||||
"replacement": "${1}/dashboard/",
|
||||
"permanent": true
|
||||
},
|
||||
|
|
|
@ -22,7 +22,7 @@ import (
|
|||
type WebsocketSuite struct{ BaseSuite }
|
||||
|
||||
func (s *WebsocketSuite) TestBase(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
||||
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c, err := upgrader.Upgrade(w, r, nil)
|
||||
|
@ -72,7 +72,7 @@ func (s *WebsocketSuite) TestBase(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *WebsocketSuite) TestWrongOrigin(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
||||
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c, err := upgrader.Upgrade(w, r, nil)
|
||||
|
@ -122,7 +122,7 @@ func (s *WebsocketSuite) TestWrongOrigin(c *check.C) {
|
|||
|
||||
func (s *WebsocketSuite) TestOrigin(c *check.C) {
|
||||
// use default options
|
||||
var upgrader = gorillawebsocket.Upgrader{}
|
||||
upgrader := gorillawebsocket.Upgrader{}
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c, err := upgrader.Upgrade(w, r, nil)
|
||||
|
@ -180,7 +180,7 @@ func (s *WebsocketSuite) TestOrigin(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *WebsocketSuite) TestWrongOriginIgnoredByServer(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{CheckOrigin: func(r *http.Request) bool {
|
||||
upgrader := gorillawebsocket.Upgrader{CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
}}
|
||||
|
||||
|
@ -240,7 +240,7 @@ func (s *WebsocketSuite) TestWrongOriginIgnoredByServer(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *WebsocketSuite) TestSSLTermination(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
||||
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c, err := upgrader.Upgrade(w, r, nil)
|
||||
|
@ -297,11 +297,10 @@ func (s *WebsocketSuite) TestSSLTermination(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *WebsocketSuite) TestBasicAuth(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
||||
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
conn, err := upgrader.Upgrade(w, r, nil)
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -390,7 +389,7 @@ func (s *WebsocketSuite) TestSpecificResponseFromBackend(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *WebsocketSuite) TestURLWithURLEncodedChar(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
||||
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c.Assert(r.URL.EscapedPath(), check.Equals, "/ws/http%3A%2F%2Ftest")
|
||||
|
@ -441,7 +440,7 @@ func (s *WebsocketSuite) TestURLWithURLEncodedChar(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *WebsocketSuite) TestSSLhttp2(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
||||
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||
|
||||
ts := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c, err := upgrader.Upgrade(w, r, nil)
|
||||
|
@ -504,7 +503,7 @@ func (s *WebsocketSuite) TestSSLhttp2(c *check.C) {
|
|||
}
|
||||
|
||||
func (s *WebsocketSuite) TestHeaderAreForwared(c *check.C) {
|
||||
var upgrader = gorillawebsocket.Upgrader{} // use default options
|
||||
upgrader := gorillawebsocket.Upgrader{} // use default options
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c.Assert(r.Header.Get("X-Token"), check.Equals, "my-token")
|
||||
|
|
|
@ -138,7 +138,7 @@ func (s *ZookeeperSuite) TestSimpleConfiguration(c *check.C) {
|
|||
expectedJSON := filepath.FromSlash("testdata/rawdata-zk.json")
|
||||
|
||||
if *updateExpected {
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0666)
|
||||
err = ioutil.WriteFile(expectedJSON, got, 0o666)
|
||||
c.Assert(err, checker.IsNil)
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ func main() {
|
|||
genKVDynConfDoc("./docs/content/reference/dynamic-configuration/kv-ref.md")
|
||||
}
|
||||
|
||||
func genStaticConfDoc(outputFile string, prefix string, encodeFn func(interface{}) ([]parser.Flat, error)) {
|
||||
func genStaticConfDoc(outputFile, prefix string, encodeFn func(interface{}) ([]parser.Flat, error)) {
|
||||
logger := log.WithoutContext().WithField("file", outputFile)
|
||||
|
||||
element := &static.Configuration{}
|
||||
|
@ -41,7 +41,7 @@ func genStaticConfDoc(outputFile string, prefix string, encodeFn func(interface{
|
|||
logger.Fatal(err)
|
||||
}
|
||||
|
||||
file, err := os.OpenFile(outputFile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
|
||||
file, err := os.OpenFile(outputFile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o666)
|
||||
if err != nil {
|
||||
logger.Fatal(err)
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ type Courgette struct {
|
|||
Ji string
|
||||
Ho string
|
||||
}
|
||||
|
||||
type Tomate struct {
|
||||
Ji string
|
||||
Ho string
|
||||
|
|
|
@ -226,7 +226,7 @@ func TestHandler_EntryPoints(t *testing.T) {
|
|||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||
require.NoError(t, err)
|
||||
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
|
|
@ -840,7 +840,7 @@ func TestHandler_HTTP(t *testing.T) {
|
|||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||
require.NoError(t, err)
|
||||
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
|
|
@ -277,7 +277,7 @@ func TestHandler_Overview(t *testing.T) {
|
|||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||
require.NoError(t, err)
|
||||
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
|
|
@ -548,7 +548,7 @@ func TestHandler_TCP(t *testing.T) {
|
|||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||
require.NoError(t, err)
|
||||
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
|
|
@ -161,7 +161,7 @@ func TestHandler_RawData(t *testing.T) {
|
|||
newJSON, err := json.MarshalIndent(rtRepr, "", "\t")
|
||||
require.NoError(t, err)
|
||||
|
||||
err = ioutil.WriteFile(test.expected.json, newJSON, 0644)
|
||||
err = ioutil.WriteFile(test.expected.json, newJSON, 0o644)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
|
|
@ -525,7 +525,7 @@ func TestHandler_UDP(t *testing.T) {
|
|||
newJSON, err := json.MarshalIndent(results, "", "\t")
|
||||
require.NoError(t, err)
|
||||
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0644)
|
||||
err = ioutil.WriteFile(test.expected.jsonFile, newJSON, 0o644)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ func Decode(filePath string, element interface{}) error {
|
|||
// file contents -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element.
|
||||
func DecodeContent(content string, extension string, element interface{}) error {
|
||||
func DecodeContent(content, extension string, element interface{}) error {
|
||||
data := make(map[string]interface{})
|
||||
|
||||
switch extension {
|
||||
|
|
|
@ -100,25 +100,34 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "defaultMode", Value: "foobar"},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "filters", Children: []*parser.Node{
|
||||
{Name: "minDuration", Value: "42"},
|
||||
{Name: "retryAttempts", Value: "true"},
|
||||
{Name: "statusCodes", Value: "foobar,foobar"}}},
|
||||
{Name: "format", Value: "foobar"}}},
|
||||
{Name: "statusCodes", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "format", Value: "foobar"},
|
||||
}},
|
||||
{Name: "api", Children: []*parser.Node{
|
||||
{Name: "dashboard", Value: "true"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
{Name: "statistics", Children: []*parser.Node{
|
||||
{Name: "recentErrors", Value: "42"}}}}},
|
||||
{Name: "recentErrors", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
{Name: "certificatesResolvers", Children: []*parser.Node{
|
||||
{Name: "default", Children: []*parser.Node{
|
||||
{Name: "acme",
|
||||
{
|
||||
Name: "acme",
|
||||
Children: []*parser.Node{
|
||||
{Name: "acmeLogging", Value: "true"},
|
||||
{Name: "caServer", Value: "foobar"},
|
||||
|
@ -131,7 +140,8 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "email", Value: "foobar"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "httpChallenge", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"}}},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
{Name: "keyType", Value: "foobar"},
|
||||
{Name: "storage", Value: "foobar"},
|
||||
{Name: "tlsChallenge"},
|
||||
|
@ -144,33 +154,44 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "address", Value: "foobar"},
|
||||
{Name: "forwardedHeaders", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "proxyProtocol", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "transport", Children: []*parser.Node{
|
||||
{Name: "lifeCycle", Children: []*parser.Node{
|
||||
{Name: "graceTimeOut", Value: "42"},
|
||||
{Name: "requestAcceptGraceTimeout", Value: "42"}}},
|
||||
{Name: "requestAcceptGraceTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "respondingTimeouts", Children: []*parser.Node{
|
||||
{Name: "idleTimeout", Value: "42"},
|
||||
{Name: "readTimeout", Value: "42"},
|
||||
{Name: "writeTimeout", Value: "42"}}}}}}}}},
|
||||
{Name: "writeTimeout", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
{Name: "global", Children: []*parser.Node{
|
||||
{Name: "checkNewVersion", Value: "true"},
|
||||
{Name: "sendAnonymousUsage", Value: "true"}}},
|
||||
{Name: "sendAnonymousUsage", Value: "true"},
|
||||
}},
|
||||
{Name: "hostResolver", Children: []*parser.Node{
|
||||
{Name: "cnameFlattening", Value: "true"},
|
||||
{Name: "resolvConfig", Value: "foobar"},
|
||||
{Name: "resolvDepth", Value: "42"}}},
|
||||
{Name: "resolvDepth", Value: "42"},
|
||||
}},
|
||||
{Name: "log", Children: []*parser.Node{
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "format", Value: "foobar"},
|
||||
{Name: "level", Value: "foobar"}}},
|
||||
{Name: "level", Value: "foobar"},
|
||||
}},
|
||||
{Name: "metrics", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"}}},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
{Name: "influxDB", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "database", Value: "foobar"},
|
||||
|
@ -178,17 +199,22 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "protocol", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
{Name: "retentionPolicy", Value: "foobar"},
|
||||
{Name: "username", Value: "foobar"}}},
|
||||
{Name: "username", Value: "foobar"},
|
||||
}},
|
||||
{Name: "prometheus", Children: []*parser.Node{
|
||||
{Name: "buckets", Value: "42,42"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "statsD", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"}}}}},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
}},
|
||||
{Name: "ping", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "providers", Children: []*parser.Node{
|
||||
{Name: "docker", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
|
@ -203,15 +229,19 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"}}},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "useBindPortIP", Value: "true"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "file", Children: []*parser.Node{
|
||||
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
||||
{Name: "directory", Value: "foobar"},
|
||||
{Name: "filename", Value: "foobar"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "kubernetesCRD",
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{
|
||||
Name: "kubernetesCRD",
|
||||
Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
|
@ -219,7 +249,9 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "ingressClass", Value: "foobar"},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"}}},
|
||||
{Name: "token", Value: "foobar"},
|
||||
},
|
||||
},
|
||||
{Name: "kubernetesIngress", Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
|
@ -228,14 +260,17 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "ingressEndpoint", Children: []*parser.Node{
|
||||
{Name: "hostname", Value: "foobar"},
|
||||
{Name: "ip", Value: "foobar"},
|
||||
{Name: "publishedService", Value: "foobar"}}},
|
||||
{Name: "publishedService", Value: "foobar"},
|
||||
}},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"}}},
|
||||
{Name: "token", Value: "foobar"},
|
||||
}},
|
||||
{Name: "marathon", Children: []*parser.Node{
|
||||
{Name: "basic", Children: []*parser.Node{
|
||||
{Name: "httpBasicAuthUser", Value: "foobar"},
|
||||
{Name: "httpBasicPassword", Value: "foobar"}}},
|
||||
{Name: "httpBasicPassword", Value: "foobar"},
|
||||
}},
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "dcosToken", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
|
@ -251,10 +286,12 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"}}},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "tlsHandshakeTimeout", Value: "42"},
|
||||
{Name: "trace", Value: "true"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "providersThrottleDuration", Value: "42"},
|
||||
{Name: "rancher", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
|
@ -264,17 +301,22 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "intervalPoll", Value: "true"},
|
||||
{Name: "prefix", Value: "foobar"},
|
||||
{Name: "refreshSeconds", Value: "42"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "rest", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"}}}}},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "serversTransport", Children: []*parser.Node{
|
||||
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
||||
{Name: "dialTimeout", Value: "42"},
|
||||
{Name: "idleConnTimeout", Value: "42"},
|
||||
{Name: "responseHeaderTimeout", Value: "42"}}},
|
||||
{Name: "responseHeaderTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "maxIdleConnsPerHost", Value: "42"},
|
||||
{Name: "rootCAs", Value: "foobar,foobar"}}},
|
||||
{Name: "rootCAs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "tracing", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
||||
|
@ -284,18 +326,21 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "prioritySampling", Value: "true"},
|
||||
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "haystack", Children: []*parser.Node{
|
||||
{Name: "globalTag", Value: "foobar"},
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "spanIDHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "instana", Children: []*parser.Node{
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "logLevel", Value: "foobar"}}},
|
||||
{Name: "logLevel", Value: "foobar"},
|
||||
}},
|
||||
{Name: "jaeger", Children: []*parser.Node{
|
||||
{Name: "gen128Bit", Value: "true"},
|
||||
{Name: "localAgentHostPort", Value: "foobar"},
|
||||
|
@ -303,14 +348,17 @@ func Test_decodeFileToNode_Toml(t *testing.T) {
|
|||
{Name: "samplingParam", Value: "42"},
|
||||
{Name: "samplingServerURL", Value: "foobar"},
|
||||
{Name: "samplingType", Value: "foobar"},
|
||||
{Name: "traceContextHeaderName", Value: "foobar"}}},
|
||||
{Name: "traceContextHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "serviceName", Value: "foobar"},
|
||||
{Name: "spanNameLimit", Value: "42"},
|
||||
{Name: "zipkin", Children: []*parser.Node{
|
||||
{Name: "httpEndpoint", Value: "foobar"},
|
||||
{Name: "id128Bit", Value: "true"},
|
||||
{Name: "sameSpan", Value: "true"},
|
||||
{Name: "sampleRate", Value: "42"}}}}},
|
||||
{Name: "sampleRate", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -332,25 +380,34 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "defaultMode", Value: "foobar"},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "name1", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "filters", Children: []*parser.Node{
|
||||
{Name: "minDuration", Value: "42"},
|
||||
{Name: "retryAttempts", Value: "true"},
|
||||
{Name: "statusCodes", Value: "foobar,foobar"}}},
|
||||
{Name: "format", Value: "foobar"}}},
|
||||
{Name: "statusCodes", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "format", Value: "foobar"},
|
||||
}},
|
||||
{Name: "api", Children: []*parser.Node{
|
||||
{Name: "dashboard", Value: "true"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
{Name: "statistics", Children: []*parser.Node{
|
||||
{Name: "recentErrors", Value: "42"}}}}},
|
||||
{Name: "recentErrors", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
{Name: "certificatesResolvers", Children: []*parser.Node{
|
||||
{Name: "default", Children: []*parser.Node{
|
||||
{Name: "acme",
|
||||
{
|
||||
Name: "acme",
|
||||
Children: []*parser.Node{
|
||||
{Name: "acmeLogging", Value: "true"},
|
||||
{Name: "caServer", Value: "foobar"},
|
||||
|
@ -363,7 +420,8 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "email", Value: "foobar"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "httpChallenge", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"}}},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
{Name: "keyType", Value: "foobar"},
|
||||
{Name: "storage", Value: "foobar"},
|
||||
{Name: "tlsChallenge"},
|
||||
|
@ -376,33 +434,44 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "address", Value: "foobar"},
|
||||
{Name: "forwardedHeaders", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "proxyProtocol", Children: []*parser.Node{
|
||||
{Name: "insecure", Value: "true"},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "trustedIPs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "transport", Children: []*parser.Node{
|
||||
{Name: "lifeCycle", Children: []*parser.Node{
|
||||
{Name: "graceTimeOut", Value: "42"},
|
||||
{Name: "requestAcceptGraceTimeout", Value: "42"}}},
|
||||
{Name: "requestAcceptGraceTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "respondingTimeouts", Children: []*parser.Node{
|
||||
{Name: "idleTimeout", Value: "42"},
|
||||
{Name: "readTimeout", Value: "42"},
|
||||
{Name: "writeTimeout", Value: "42"}}}}}}}}},
|
||||
{Name: "writeTimeout", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
{Name: "global", Children: []*parser.Node{
|
||||
{Name: "checkNewVersion", Value: "true"},
|
||||
{Name: "sendAnonymousUsage", Value: "true"}}},
|
||||
{Name: "sendAnonymousUsage", Value: "true"},
|
||||
}},
|
||||
{Name: "hostResolver", Children: []*parser.Node{
|
||||
{Name: "cnameFlattening", Value: "true"},
|
||||
{Name: "resolvConfig", Value: "foobar"},
|
||||
{Name: "resolvDepth", Value: "42"}}},
|
||||
{Name: "resolvDepth", Value: "42"},
|
||||
}},
|
||||
{Name: "log", Children: []*parser.Node{
|
||||
{Name: "filePath", Value: "foobar"},
|
||||
{Name: "format", Value: "foobar"},
|
||||
{Name: "level", Value: "foobar"}}},
|
||||
{Name: "level", Value: "foobar"},
|
||||
}},
|
||||
{Name: "metrics", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"}}},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
{Name: "influxDB", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "database", Value: "foobar"},
|
||||
|
@ -410,17 +479,22 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "protocol", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
{Name: "retentionPolicy", Value: "foobar"},
|
||||
{Name: "username", Value: "foobar"}}},
|
||||
{Name: "username", Value: "foobar"},
|
||||
}},
|
||||
{Name: "prometheus", Children: []*parser.Node{
|
||||
{Name: "buckets", Value: "42,42"},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "statsD", Children: []*parser.Node{
|
||||
{Name: "address", Value: "foobar"},
|
||||
{Name: "pushInterval", Value: "10s"}}}}},
|
||||
{Name: "pushInterval", Value: "10s"},
|
||||
}},
|
||||
}},
|
||||
{Name: "ping", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
{Name: "middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "middlewares", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "providers", Children: []*parser.Node{
|
||||
{Name: "docker", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
|
@ -435,15 +509,19 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"}}},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "useBindPortIP", Value: "true"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "file", Children: []*parser.Node{
|
||||
{Name: "debugLogGeneratedTemplate", Value: "true"},
|
||||
{Name: "directory", Value: "foobar"},
|
||||
{Name: "filename", Value: "foobar"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "kubernetesCRD",
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{
|
||||
Name: "kubernetesCRD",
|
||||
Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
|
@ -451,7 +529,9 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "ingressClass", Value: "foobar"},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"}}},
|
||||
{Name: "token", Value: "foobar"},
|
||||
},
|
||||
},
|
||||
{Name: "kubernetesIngress", Children: []*parser.Node{
|
||||
{Name: "certAuthFilePath", Value: "foobar"},
|
||||
{Name: "disablePassHostHeaders", Value: "true"},
|
||||
|
@ -460,14 +540,17 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "ingressEndpoint", Children: []*parser.Node{
|
||||
{Name: "hostname", Value: "foobar"},
|
||||
{Name: "ip", Value: "foobar"},
|
||||
{Name: "publishedService", Value: "foobar"}}},
|
||||
{Name: "publishedService", Value: "foobar"},
|
||||
}},
|
||||
{Name: "labelSelector", Value: "foobar"},
|
||||
{Name: "namespaces", Value: "foobar,foobar"},
|
||||
{Name: "token", Value: "foobar"}}},
|
||||
{Name: "token", Value: "foobar"},
|
||||
}},
|
||||
{Name: "marathon", Children: []*parser.Node{
|
||||
{Name: "basic", Children: []*parser.Node{
|
||||
{Name: "httpBasicAuthUser", Value: "foobar"},
|
||||
{Name: "httpBasicPassword", Value: "foobar"}}},
|
||||
{Name: "httpBasicPassword", Value: "foobar"},
|
||||
}},
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
{Name: "dcosToken", Value: "foobar"},
|
||||
{Name: "defaultRule", Value: "foobar"},
|
||||
|
@ -483,10 +566,12 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "caOptional", Value: "true"},
|
||||
{Name: "cert", Value: "foobar"},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "key", Value: "foobar"}}},
|
||||
{Name: "key", Value: "foobar"},
|
||||
}},
|
||||
{Name: "tlsHandshakeTimeout", Value: "42"},
|
||||
{Name: "trace", Value: "true"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "providersThrottleDuration", Value: "42"},
|
||||
{Name: "rancher", Children: []*parser.Node{
|
||||
{Name: "constraints", Value: "foobar"},
|
||||
|
@ -496,17 +581,22 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "intervalPoll", Value: "true"},
|
||||
{Name: "prefix", Value: "foobar"},
|
||||
{Name: "refreshSeconds", Value: "42"},
|
||||
{Name: "watch", Value: "true"}}},
|
||||
{Name: "watch", Value: "true"},
|
||||
}},
|
||||
{Name: "rest", Children: []*parser.Node{
|
||||
{Name: "entryPoint", Value: "foobar"}}}}},
|
||||
{Name: "entryPoint", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "serversTransport", Children: []*parser.Node{
|
||||
{Name: "forwardingTimeouts", Children: []*parser.Node{
|
||||
{Name: "dialTimeout", Value: "42"},
|
||||
{Name: "idleConnTimeout", Value: "42"},
|
||||
{Name: "responseHeaderTimeout", Value: "42"}}},
|
||||
{Name: "responseHeaderTimeout", Value: "42"},
|
||||
}},
|
||||
{Name: "insecureSkipVerify", Value: "true"},
|
||||
{Name: "maxIdleConnsPerHost", Value: "42"},
|
||||
{Name: "rootCAs", Value: "foobar,foobar"}}},
|
||||
{Name: "rootCAs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "tracing", Children: []*parser.Node{
|
||||
{Name: "datadog", Children: []*parser.Node{
|
||||
{Name: "bagagePrefixHeaderName", Value: "foobar"},
|
||||
|
@ -516,18 +606,21 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "prioritySampling", Value: "true"},
|
||||
{Name: "samplingPriorityHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "haystack", Children: []*parser.Node{
|
||||
{Name: "globalTag", Value: "foobar"},
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "parentIDHeaderName", Value: "foobar"},
|
||||
{Name: "spanIDHeaderName", Value: "foobar"},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"}}},
|
||||
{Name: "traceIDHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "instana", Children: []*parser.Node{
|
||||
{Name: "localAgentHost", Value: "foobar"},
|
||||
{Name: "localAgentPort", Value: "42"},
|
||||
{Name: "logLevel", Value: "foobar"}}},
|
||||
{Name: "logLevel", Value: "foobar"},
|
||||
}},
|
||||
{Name: "jaeger", Children: []*parser.Node{
|
||||
{Name: "gen128Bit", Value: "true"},
|
||||
{Name: "localAgentHostPort", Value: "foobar"},
|
||||
|
@ -535,14 +628,17 @@ func Test_decodeFileToNode_Yaml(t *testing.T) {
|
|||
{Name: "samplingParam", Value: "42"},
|
||||
{Name: "samplingServerURL", Value: "foobar"},
|
||||
{Name: "samplingType", Value: "foobar"},
|
||||
{Name: "traceContextHeaderName", Value: "foobar"}}},
|
||||
{Name: "traceContextHeaderName", Value: "foobar"},
|
||||
}},
|
||||
{Name: "serviceName", Value: "foobar"},
|
||||
{Name: "spanNameLimit", Value: "42"},
|
||||
{Name: "zipkin", Children: []*parser.Node{
|
||||
{Name: "httpEndpoint", Value: "foobar"},
|
||||
{Name: "id128Bit", Value: "true"},
|
||||
{Name: "sameSpan", Value: "true"},
|
||||
{Name: "sampleRate", Value: "42"}}}}},
|
||||
{Name: "sampleRate", Value: "42"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ func (f *flagSet) parseOne() (bool, error) {
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (f *flagSet) setValue(name string, value string) {
|
||||
func (f *flagSet) setValue(name, value string) {
|
||||
srcKey := parser.DefaultRootName + "." + name
|
||||
neutralKey := strings.ToLower(srcKey)
|
||||
|
||||
|
|
|
@ -81,8 +81,8 @@ func filterPairs(pairs []*store.KVPair, filters []string) []*store.KVPair {
|
|||
return pairs[i].Key < pairs[j].Key
|
||||
})
|
||||
|
||||
var simplePairs = map[string]*store.KVPair{}
|
||||
var slicePairs = map[string][]string{}
|
||||
simplePairs := map[string]*store.KVPair{}
|
||||
slicePairs := map[string][]string{}
|
||||
|
||||
for _, pair := range pairs {
|
||||
if len(filters) == 0 {
|
||||
|
|
|
@ -498,27 +498,31 @@ func TestFill(t *testing.T) {
|
|||
Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}
|
||||
}{},
|
||||
expected: expected{element: &struct {
|
||||
Foo struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
expected: expected{
|
||||
element: &struct {
|
||||
Foo struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}
|
||||
}{
|
||||
Fii: "huu",
|
||||
Fuu: 6,
|
||||
}},
|
||||
Foo: struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}{
|
||||
Fii: "huu",
|
||||
Fuu: 6,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -534,27 +538,31 @@ func TestFill(t *testing.T) {
|
|||
Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}
|
||||
}{},
|
||||
expected: expected{element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
expected: expected{
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}
|
||||
}{
|
||||
Fii: "huu",
|
||||
Fuu: 6,
|
||||
}},
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}{
|
||||
Fii: "huu",
|
||||
Fuu: 6,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -568,23 +576,26 @@ func TestFill(t *testing.T) {
|
|||
FieldName: "Foo",
|
||||
Kind: reflect.Ptr,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: expected{element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}{}},
|
||||
expected: expected{
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
}{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -599,19 +610,21 @@ func TestFill(t *testing.T) {
|
|||
Kind: reflect.Ptr,
|
||||
Disabled: true,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: expected{element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: expected{
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -628,20 +641,23 @@ func TestFill(t *testing.T) {
|
|||
Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "huu", Kind: reflect.String},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "6", Kind: reflect.Int},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: expected{element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: expected{
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu int
|
||||
} `label:"allowEmpty"`
|
||||
}{},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -657,18 +673,22 @@ func TestFill(t *testing.T) {
|
|||
Children: []*Node{
|
||||
{Name: "name1", Value: "hii", Kind: reflect.String},
|
||||
{Name: "name2", Value: "huu", Kind: reflect.String},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: expected{element: &struct {
|
||||
Foo map[string]string
|
||||
}{
|
||||
Foo: map[string]string{
|
||||
"name1": "hii",
|
||||
"name2": "huu",
|
||||
}},
|
||||
expected: expected{
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{
|
||||
Foo: map[string]string{
|
||||
"name1": "hii",
|
||||
"name2": "huu",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -696,18 +716,22 @@ func TestFill(t *testing.T) {
|
|||
{Name: "Fii", FieldName: "Fii", Kind: reflect.String, Value: "huu"},
|
||||
},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Fii string }
|
||||
}{},
|
||||
expected: expected{element: &struct {
|
||||
Foo map[string]struct{ Fii string }
|
||||
}{
|
||||
Foo: map[string]struct{ Fii string }{
|
||||
"name1": {Fii: "hii"},
|
||||
"name2": {Fii: "huu"},
|
||||
}},
|
||||
expected: expected{
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Fii string }
|
||||
}{
|
||||
Foo: map[string]struct{ Fii string }{
|
||||
"name1": {Fii: "hii"},
|
||||
"name2": {Fii: "huu"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1186,8 +1210,10 @@ func TestFill(t *testing.T) {
|
|||
Kind: reflect.Struct,
|
||||
Children: []*Node{
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo *InitializedFoo
|
||||
}{},
|
||||
|
@ -1212,8 +1238,10 @@ func TestFill(t *testing.T) {
|
|||
Kind: reflect.Struct,
|
||||
Children: []*Node{
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo *wrongInitialledFoo
|
||||
}{},
|
||||
|
@ -1273,8 +1301,10 @@ func TestFill(t *testing.T) {
|
|||
Kind: reflect.Struct,
|
||||
Children: []*Node{
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu", Kind: reflect.String},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
element: &struct {
|
||||
Foo struct {
|
||||
FiiFoo
|
||||
|
@ -1401,8 +1431,10 @@ func TestFill(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
type NamedType string
|
||||
type NamedTypeInt int
|
||||
type (
|
||||
NamedType string
|
||||
NamedTypeInt int
|
||||
)
|
||||
|
||||
type InitializedFoo struct {
|
||||
Fii string
|
||||
|
|
|
@ -23,9 +23,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo string `description:"text"`
|
||||
}{Foo: "bar"},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar", Description: "text"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar", Description: "text"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -33,9 +34,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo string
|
||||
}{Foo: "bar"},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -44,10 +46,11 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Foo string
|
||||
Fii string
|
||||
}{Foo: "bar", Fii: "hii"},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -55,9 +58,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo int
|
||||
}{Foo: 1},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -65,9 +69,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo int8
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -75,9 +80,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo int16
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -85,9 +91,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo int32
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -95,9 +102,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo int64
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -105,9 +113,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo uint
|
||||
}{Foo: 1},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -115,9 +124,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo uint8
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -125,9 +135,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo uint16
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -135,9 +146,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo uint32
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -145,9 +157,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo uint64
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -155,9 +168,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo float32
|
||||
}{Foo: 1.12},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -165,9 +179,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo float64
|
||||
}{Foo: 1.12},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -175,9 +190,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
element: struct {
|
||||
Foo bool
|
||||
}{Foo: true},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -196,12 +212,13 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -220,11 +237,12 @@ func TestEncodeToNode(t *testing.T) {
|
|||
fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -243,12 +261,13 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -267,12 +286,13 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -291,11 +311,12 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -314,12 +335,13 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Fuu: 4,
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "6"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "4"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "6"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "4"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -338,12 +360,13 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Fuu: true,
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "true"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "true"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "true"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "true"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -386,9 +409,10 @@ func TestEncodeToNode(t *testing.T) {
|
|||
Fuu string
|
||||
}{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -452,113 +476,127 @@ func TestEncodeToNode(t *testing.T) {
|
|||
{
|
||||
desc: "slice of string",
|
||||
element: struct{ Bar []string }{Bar: []string{"huu", "hii"}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "huu, hii"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "huu, hii"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int",
|
||||
element: struct{ Bar []int }{Bar: []int{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int8",
|
||||
element: struct{ Bar []int8 }{Bar: []int8{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int16",
|
||||
element: struct{ Bar []int16 }{Bar: []int16{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int32",
|
||||
element: struct{ Bar []int32 }{Bar: []int32{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int64",
|
||||
element: struct{ Bar []int64 }{Bar: []int64{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint",
|
||||
element: struct{ Bar []uint }{Bar: []uint{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint8",
|
||||
element: struct{ Bar []uint8 }{Bar: []uint8{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint16",
|
||||
element: struct{ Bar []uint16 }{Bar: []uint16{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint32",
|
||||
element: struct{ Bar []uint32 }{Bar: []uint32{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint64",
|
||||
element: struct{ Bar []uint64 }{Bar: []uint64{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of float32",
|
||||
element: struct{ Bar []float32 }{Bar: []float32{4.1, 2, 3.2}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of float64",
|
||||
element: struct{ Bar []float64 }{Bar: []float64{4.1, 2, 3.2}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of bool",
|
||||
element: struct{ Bar []bool }{Bar: []bool{true, false, true}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "true, false, true"},
|
||||
}},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "true, false, true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -708,12 +746,13 @@ func TestEncodeToNode(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
expected: expected{
|
||||
node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -682,7 +682,8 @@ func TestAddMetadata(t *testing.T) {
|
|||
Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "bir", Kind: reflect.String},
|
||||
{Name: "Bur", FieldName: "Bur", Value: "fuu", Kind: reflect.String},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -728,8 +729,10 @@ func TestAddMetadata(t *testing.T) {
|
|||
Kind: reflect.Struct,
|
||||
Children: []*Node{
|
||||
{Name: "Bur", FieldName: "Bur", Value: "fuu", Kind: reflect.String},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -797,22 +800,26 @@ func TestAddMetadata(t *testing.T) {
|
|||
Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Kind: reflect.String, Value: "fii"},
|
||||
{Name: "Fee", FieldName: "Fee", Kind: reflect.Int, Value: "1"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "Bur",
|
||||
FieldName: "Bur",
|
||||
Kind: reflect.Struct,
|
||||
Children: []*Node{
|
||||
{Name: "Faa", FieldName: "Faa", Kind: reflect.String, Value: "faa"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "Fii",
|
||||
FieldName: "Fii",
|
||||
Kind: reflect.Struct,
|
||||
Children: []*Node{
|
||||
{Name: "FiiBar", FieldName: "FiiBar", Kind: reflect.String, Value: "fiiBar"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -168,7 +168,7 @@ func (s *ServiceInfo) AddError(err error, critical bool) {
|
|||
|
||||
// UpdateServerStatus sets the status of the server in the ServiceInfo.
|
||||
// It is the responsibility of the caller to check that s is not nil.
|
||||
func (s *ServiceInfo) UpdateServerStatus(server string, status string) {
|
||||
func (s *ServiceInfo) UpdateServerStatus(server, status string) {
|
||||
s.serverStatusMu.Lock()
|
||||
defer s.serverStatusMu.Unlock()
|
||||
|
||||
|
|
|
@ -79,6 +79,7 @@ type CertificateResolver struct {
|
|||
type Global struct {
|
||||
CheckNewVersion bool `description:"Periodically check if a new version has been released." json:"checkNewVersion,omitempty" toml:"checkNewVersion,omitempty" yaml:"checkNewVersion,omitempty" label:"allowEmpty" file:"allowEmpty" export:"true"`
|
||||
SendAnonymousUsage bool `description:"Periodically send anonymous usage statistics. If the option is not specified, it will be enabled by default." json:"sendAnonymousUsage,omitempty" toml:"sendAnonymousUsage,omitempty" yaml:"sendAnonymousUsage,omitempty" label:"allowEmpty" file:"allowEmpty" export:"true"`
|
||||
InsecureSNI bool `description:"Allow domain fronting. If the option is not specified, it will be disabled by default." json:"insecureSNI,omitempty" toml:"insecureSNI,omitempty" yaml:"insecureSNI,omitempty" label:"allowEmpty" export:"true"`
|
||||
}
|
||||
|
||||
// ServersTransport options to configure communication between Traefik and the servers.
|
||||
|
|
|
@ -22,8 +22,10 @@ const (
|
|||
serverDown = "DOWN"
|
||||
)
|
||||
|
||||
var singleton *HealthCheck
|
||||
var once sync.Once
|
||||
var (
|
||||
singleton *HealthCheck
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// Balancer is the set of operations required to manage the list of servers in a load-balancer.
|
||||
type Balancer interface {
|
||||
|
|
|
@ -16,8 +16,10 @@ import (
|
|||
"github.com/vulcand/oxy/roundrobin"
|
||||
)
|
||||
|
||||
const healthCheckInterval = 200 * time.Millisecond
|
||||
const healthCheckTimeout = 100 * time.Millisecond
|
||||
const (
|
||||
healthCheckInterval = 200 * time.Millisecond
|
||||
healthCheckTimeout = 100 * time.Millisecond
|
||||
)
|
||||
|
||||
type testHandler struct {
|
||||
done func()
|
||||
|
@ -148,7 +150,7 @@ func TestSetBackendsConfiguration(t *testing.T) {
|
|||
assert.Equal(t, test.expectedNumRemovedServers, lb.numRemovedServers, "removed servers")
|
||||
assert.Equal(t, test.expectedNumUpsertedServers, lb.numUpsertedServers, "upserted servers")
|
||||
// FIXME re add metrics
|
||||
//assert.Equal(t, test.expectedGaugeValue, collectingMetrics.Gauge.GaugeValue, "ServerUp Gauge")
|
||||
// assert.Equal(t, test.expectedGaugeValue, collectingMetrics.Gauge.GaugeValue, "ServerUp Gauge")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,9 +6,7 @@ import (
|
|||
"github.com/cenkalti/backoff/v4"
|
||||
)
|
||||
|
||||
var (
|
||||
_ backoff.BackOff = (*BackOff)(nil)
|
||||
)
|
||||
var _ backoff.BackOff = (*BackOff)(nil)
|
||||
|
||||
const (
|
||||
defaultMinJobInterval = 30 * time.Second
|
||||
|
|
|
@ -24,21 +24,21 @@ func TestJobBackOff(t *testing.T) {
|
|||
exp.MinJobInterval = testMinJobInterval
|
||||
exp.Reset()
|
||||
|
||||
var expectedResults = []time.Duration{500, 500, 500, 1000, 2000, 4000, 5000, 5000, 500, 1000, 2000, 4000, 5000, 5000}
|
||||
expectedResults := []time.Duration{500, 500, 500, 1000, 2000, 4000, 5000, 5000, 500, 1000, 2000, 4000, 5000, 5000}
|
||||
for i, d := range expectedResults {
|
||||
expectedResults[i] = d * time.Millisecond
|
||||
}
|
||||
|
||||
for i, expected := range expectedResults {
|
||||
// Assert that the next backoff falls in the expected range.
|
||||
var minInterval = expected - time.Duration(testRandomizationFactor*float64(expected))
|
||||
var maxInterval = expected + time.Duration(testRandomizationFactor*float64(expected))
|
||||
minInterval := expected - time.Duration(testRandomizationFactor*float64(expected))
|
||||
maxInterval := expected + time.Duration(testRandomizationFactor*float64(expected))
|
||||
|
||||
if i < 3 || i == 8 {
|
||||
time.Sleep(2 * time.Second)
|
||||
}
|
||||
|
||||
var actualInterval = exp.NextBackOff()
|
||||
actualInterval := exp.NextBackOff()
|
||||
if !(minInterval <= actualInterval && actualInterval <= maxInterval) {
|
||||
t.Error("error")
|
||||
}
|
||||
|
|
|
@ -101,7 +101,7 @@ func OpenFile(path string) error {
|
|||
logFilePath = path
|
||||
|
||||
var err error
|
||||
logFile, err = os.OpenFile(logFilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
|
||||
logFile, err = os.OpenFile(logFilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0o666)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -11,8 +11,10 @@ import (
|
|||
"github.com/go-kit/kit/metrics/statsd"
|
||||
)
|
||||
|
||||
var statsdClient *statsd.Statsd
|
||||
var statsdTicker *time.Ticker
|
||||
var (
|
||||
statsdClient *statsd.Statsd
|
||||
statsdTicker *time.Ticker
|
||||
)
|
||||
|
||||
const (
|
||||
statsdMetricsServiceReqsName = "service.request.total"
|
||||
|
|
|
@ -9,9 +9,7 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/middlewares"
|
||||
)
|
||||
|
||||
var (
|
||||
_ middlewares.Stateful = &captureResponseWriterWithCloseNotify{}
|
||||
)
|
||||
var _ middlewares.Stateful = &captureResponseWriterWithCloseNotify{}
|
||||
|
||||
type capturer interface {
|
||||
http.ResponseWriter
|
||||
|
|
|
@ -19,7 +19,7 @@ type FieldHandler struct {
|
|||
}
|
||||
|
||||
// NewFieldHandler creates a Field handler.
|
||||
func NewFieldHandler(next http.Handler, name string, value string, applyFn FieldApply) http.Handler {
|
||||
func NewFieldHandler(next http.Handler, name, value string, applyFn FieldApply) http.Handler {
|
||||
return &FieldHandler{next: next, name: name, value: value, applyFn: applyFn}
|
||||
}
|
||||
|
||||
|
|
|
@ -131,11 +131,11 @@ func NewHandler(config *types.AccessLog) (*Handler, error) {
|
|||
func openAccessLogFile(filePath string) (*os.File, error) {
|
||||
dir := filepath.Dir(filePath)
|
||||
|
||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
return nil, fmt.Errorf("failed to create log path %s: %w", dir, err)
|
||||
}
|
||||
|
||||
file, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0664)
|
||||
file, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0o664)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error opening file %s: %w", filePath, err)
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ func (h *Handler) Rotate() error {
|
|||
}
|
||||
|
||||
var err error
|
||||
h.file, err = os.OpenFile(h.config.FilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0664)
|
||||
h.file, err = os.OpenFile(h.config.FilePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0o664)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -259,7 +259,7 @@ func (h *Handler) Rotate() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func silentSplitHostPort(value string) (host string, port string) {
|
||||
func silentSplitHostPort(value string) (host, port string) {
|
||||
host, port, err := net.SplitHostPort(value)
|
||||
if err != nil {
|
||||
return value, "-"
|
||||
|
|
|
@ -21,7 +21,7 @@ type CommonLogFormatter struct{}
|
|||
func (f *CommonLogFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||
b := &bytes.Buffer{}
|
||||
|
||||
var timestamp = defaultValue
|
||||
timestamp := defaultValue
|
||||
if v, ok := entry.Data[StartUTC]; ok {
|
||||
timestamp = v.(time.Time).Format(commonLogTimeFormat)
|
||||
} else if v, ok := entry.Data[StartLocal]; ok {
|
||||
|
@ -52,7 +52,7 @@ func (f *CommonLogFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
|||
return b.Bytes(), err
|
||||
}
|
||||
|
||||
func toLog(fields logrus.Fields, key string, defaultValue string, quoted bool) interface{} {
|
||||
func toLog(fields logrus.Fields, key, defaultValue string, quoted bool) interface{} {
|
||||
if v, ok := fields[key]; ok {
|
||||
if v == nil {
|
||||
return defaultValue
|
||||
|
@ -73,7 +73,7 @@ func toLog(fields logrus.Fields, key string, defaultValue string, quoted bool) i
|
|||
return defaultValue
|
||||
}
|
||||
|
||||
func toLogEntry(s string, defaultValue string, quote bool) string {
|
||||
func toLogEntry(s, defaultValue string, quote bool) string {
|
||||
if len(s) == 0 {
|
||||
return defaultValue
|
||||
}
|
||||
|
|
|
@ -62,29 +62,39 @@ func (b *basicAuth) GetTracingInformation() (string, ext.SpanKindEnum) {
|
|||
func (b *basicAuth) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
logger := log.FromContext(middlewares.GetLoggerCtx(req.Context(), b.name, basicTypeName))
|
||||
|
||||
if username := b.auth.CheckAuth(req); username == "" {
|
||||
user, password, ok := req.BasicAuth()
|
||||
if ok {
|
||||
secret := b.auth.Secrets(user, b.auth.Realm)
|
||||
if secret == "" || !goauth.CheckSecret(password, secret) {
|
||||
ok = false
|
||||
}
|
||||
}
|
||||
|
||||
logData := accesslog.GetLogData(req)
|
||||
if logData != nil {
|
||||
logData.Core[accesslog.ClientUsername] = user
|
||||
}
|
||||
|
||||
if !ok {
|
||||
logger.Debug("Authentication failed")
|
||||
tracing.SetErrorWithEvent(req, "Authentication failed")
|
||||
|
||||
b.auth.RequireAuth(rw, req)
|
||||
} else {
|
||||
logger.Debug("Authentication succeeded")
|
||||
req.URL.User = url.User(username)
|
||||
|
||||
logData := accesslog.GetLogData(req)
|
||||
if logData != nil {
|
||||
logData.Core[accesslog.ClientUsername] = username
|
||||
}
|
||||
|
||||
if b.headerField != "" {
|
||||
req.Header[b.headerField] = []string{username}
|
||||
}
|
||||
|
||||
if b.removeHeader {
|
||||
logger.Debug("Removing authorization header")
|
||||
req.Header.Del(authorizationHeader)
|
||||
}
|
||||
b.next.ServeHTTP(rw, req)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Debug("Authentication succeeded")
|
||||
req.URL.User = url.User(user)
|
||||
|
||||
if b.headerField != "" {
|
||||
req.Header[b.headerField] = []string{user}
|
||||
}
|
||||
|
||||
if b.removeHeader {
|
||||
logger.Debug("Removing authorization header")
|
||||
req.Header.Del(authorizationHeader)
|
||||
}
|
||||
b.next.ServeHTTP(rw, req)
|
||||
}
|
||||
|
||||
func (b *basicAuth) secretBasic(user, realm string) string {
|
||||
|
|
|
@ -63,6 +63,19 @@ func (d *digestAuth) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
|||
|
||||
username, authinfo := d.auth.CheckAuth(req)
|
||||
if username == "" {
|
||||
headerField := d.headerField
|
||||
if d.headerField == "" {
|
||||
headerField = "Authorization"
|
||||
}
|
||||
|
||||
auth := goauth.DigestAuthParams(req.Header.Get(headerField))
|
||||
if auth["username"] != "" {
|
||||
logData := accesslog.GetLogData(req)
|
||||
if logData != nil {
|
||||
logData.Core[accesslog.ClientUsername] = auth["username"]
|
||||
}
|
||||
}
|
||||
|
||||
if authinfo != nil && *authinfo == "stale" {
|
||||
logger.Debug("Digest authentication failed, possibly because out of order requests")
|
||||
tracing.SetErrorWithEvent(req, "Digest authentication failed, possibly because out of order requests")
|
||||
|
|
|
@ -158,7 +158,7 @@ func (fa *forwardAuth) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
|||
fa.next.ServeHTTP(rw, req)
|
||||
}
|
||||
|
||||
func writeHeader(req *http.Request, forwardReq *http.Request, trustForwardHeader bool) {
|
||||
func writeHeader(req, forwardReq *http.Request, trustForwardHeader bool) {
|
||||
utils.CopyHeaders(forwardReq.Header, req.Header)
|
||||
utils.RemoveHeaders(forwardReq.Header, forward.HopHeaders...)
|
||||
|
||||
|
|
|
@ -325,7 +325,8 @@ func Test_writeHeader(t *testing.T) {
|
|||
"X-Forwarded-Host": "foo.bar",
|
||||
"X-Forwarded-Uri": "/path?q=1",
|
||||
},
|
||||
}, {
|
||||
},
|
||||
{
|
||||
name: "trust Forward Header with forwarded request Method",
|
||||
headers: map[string]string{
|
||||
"X-Forwarded-Method": "OPTIONS",
|
||||
|
|
|
@ -246,7 +246,8 @@ func TestServeHTTP(t *testing.T) {
|
|||
expectedHeaders: map[string]string{
|
||||
xForwardedHost: "foo.com:8080",
|
||||
},
|
||||
}, {
|
||||
},
|
||||
{
|
||||
desc: "xForwardedServer from req XForwarded",
|
||||
host: "foo.com:8080",
|
||||
expectedHeaders: map[string]string{
|
||||
|
|
|
@ -54,13 +54,13 @@ func New(ctx context.Context, next http.Handler, cfg dynamic.Headers, name strin
|
|||
nextHandler := next
|
||||
|
||||
if hasSecureHeaders {
|
||||
logger.Debug("Setting up secureHeaders from %v", cfg)
|
||||
logger.Debugf("Setting up secureHeaders from %v", cfg)
|
||||
handler = newSecure(next, cfg, name)
|
||||
nextHandler = handler
|
||||
}
|
||||
|
||||
if hasCustomHeaders || hasCorsHeaders {
|
||||
logger.Debug("Setting up customHeaders/Cors from %v", cfg)
|
||||
logger.Debugf("Setting up customHeaders/Cors from %v", cfg)
|
||||
handler = NewHeader(nextHandler, cfg)
|
||||
}
|
||||
|
||||
|
|
|
@ -552,7 +552,8 @@ func TestCORSResponses(t *testing.T) {
|
|||
expected: map[string][]string{
|
||||
"Access-Control-Allow-Origin": {"*"},
|
||||
},
|
||||
}, {
|
||||
},
|
||||
{
|
||||
desc: "Test Simple CustomRequestHeaders Not Hijacked by CORS",
|
||||
header: NewHeader(emptyHandler, dynamic.Headers{
|
||||
CustomRequestHeaders: map[string]string{"foo": "bar"},
|
||||
|
|
|
@ -7,6 +7,6 @@ import (
|
|||
)
|
||||
|
||||
// GetLoggerCtx creates a logger context with the middleware fields.
|
||||
func GetLoggerCtx(ctx context.Context, middleware string, middlewareType string) context.Context {
|
||||
func GetLoggerCtx(ctx context.Context, middleware, middlewareType string) context.Context {
|
||||
return log.With(ctx, log.Str(log.MiddlewareName, middleware), log.Str(log.MiddlewareType, middlewareType))
|
||||
}
|
||||
|
|
|
@ -235,7 +235,7 @@ func writeParts(ctx context.Context, content io.StringWriter, entries []string,
|
|||
}
|
||||
}
|
||||
|
||||
func writePart(ctx context.Context, content io.StringWriter, entry string, prefix string) {
|
||||
func writePart(ctx context.Context, content io.StringWriter, entry, prefix string) {
|
||||
if len(entry) > 0 {
|
||||
_, err := content.WriteString(fmt.Sprintf("%s=%s%s", prefix, entry, subFieldSeparator))
|
||||
if err != nil {
|
||||
|
|
|
@ -24,7 +24,7 @@ type redirect struct {
|
|||
}
|
||||
|
||||
// New creates a Redirect middleware.
|
||||
func newRedirect(next http.Handler, regex string, replacement string, permanent bool, name string) (http.Handler, error) {
|
||||
func newRedirect(next http.Handler, regex, replacement string, permanent bool, name string) (http.Handler, error) {
|
||||
re, err := regexp.Compile(regex)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -115,7 +115,7 @@ func rawURL(req *http.Request) string {
|
|||
port := ""
|
||||
uri := req.RequestURI
|
||||
|
||||
schemeRegex := `^(https?):\/\/([\w\._-]+)(:\d+)?(.*)$`
|
||||
schemeRegex := `^(https?):\/\/(\[[\w:.]+\]|[\w\._-]+)?(:\d+)?(.*)$`
|
||||
re, _ := regexp.Compile(schemeRegex)
|
||||
if re.Match([]byte(req.RequestURI)) {
|
||||
match := re.FindStringSubmatch(req.RequestURI)
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
|
||||
const (
|
||||
typeSchemeName = "RedirectScheme"
|
||||
schemeRedirectRegex = `^(https?:\/\/)?([\w\._-]+)(:\d+)?(.*)$`
|
||||
schemeRedirectRegex = `^(https?:\/\/)?(\[[\w:.]+\]|[\w\._-]+)?(:\d+)?(.*)$`
|
||||
)
|
||||
|
||||
// NewRedirectScheme creates a new RedirectScheme middleware.
|
||||
|
|
|
@ -186,6 +186,44 @@ func TestRedirectSchemeHandler(t *testing.T) {
|
|||
expectedURL: "https://foo",
|
||||
expectedStatus: http.StatusFound,
|
||||
},
|
||||
{
|
||||
desc: "IPV6 HTTP to HTTPS redirection without port",
|
||||
config: dynamic.RedirectScheme{
|
||||
Scheme: "https",
|
||||
},
|
||||
url: "http://[::1]",
|
||||
expectedURL: "https://[::1]",
|
||||
expectedStatus: http.StatusFound,
|
||||
},
|
||||
{
|
||||
desc: "IPV6 HTTP to HTTPS redirection with port",
|
||||
config: dynamic.RedirectScheme{
|
||||
Scheme: "https",
|
||||
Port: "8443",
|
||||
},
|
||||
url: "http://[::1]",
|
||||
expectedURL: "https://[::1]:8443",
|
||||
expectedStatus: http.StatusFound,
|
||||
},
|
||||
{
|
||||
desc: "IPV6 HTTP with port 80 to HTTPS redirection without port",
|
||||
config: dynamic.RedirectScheme{
|
||||
Scheme: "https",
|
||||
},
|
||||
url: "http://[::1]:80",
|
||||
expectedURL: "https://[::1]",
|
||||
expectedStatus: http.StatusFound,
|
||||
},
|
||||
{
|
||||
desc: "IPV6 HTTP with port 80 to HTTPS redirection with port",
|
||||
config: dynamic.RedirectScheme{
|
||||
Scheme: "https",
|
||||
Port: "8443",
|
||||
},
|
||||
url: "http://[::1]:80",
|
||||
expectedURL: "https://[::1]:8443",
|
||||
expectedStatus: http.StatusFound,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
|
@ -235,7 +273,7 @@ func TestRedirectSchemeHandler(t *testing.T) {
|
|||
require.Errorf(t, err, "Location %v", location)
|
||||
}
|
||||
|
||||
schemeRegex := `^(https?):\/\/([\w\._-]+)(:\d+)?(.*)$`
|
||||
schemeRegex := `^(https?):\/\/(\[[\w:.]+\]|[\w\._-]+)?(:\d+)?(.*)$`
|
||||
re, _ := regexp.Compile(schemeRegex)
|
||||
|
||||
if re.Match([]byte(test.url)) {
|
||||
|
|
|
@ -47,7 +47,7 @@ func (hr *Resolver) CNAMEFlatten(ctx context.Context, host string) string {
|
|||
}
|
||||
|
||||
logger := log.FromContext(ctx)
|
||||
var cacheDuration = 0 * time.Second
|
||||
cacheDuration := 0 * time.Second
|
||||
for depth := 0; depth < hr.ResolvDepth; depth++ {
|
||||
resolv, err := cnameResolve(ctx, request, hr.ResolvConfig)
|
||||
if err != nil {
|
||||
|
@ -73,7 +73,7 @@ func (hr *Resolver) CNAMEFlatten(ctx context.Context, host string) string {
|
|||
}
|
||||
|
||||
// cnameResolve resolves CNAME if exists, and return with the highest TTL.
|
||||
func cnameResolve(ctx context.Context, host string, resolvPath string) (*cnameResolv, error) {
|
||||
func cnameResolve(ctx context.Context, host, resolvPath string) (*cnameResolv, error) {
|
||||
config, err := dns.ClientConfigFromFile(resolvPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid resolver configuration file: %s", resolvPath)
|
||||
|
@ -102,7 +102,7 @@ func cnameResolve(ctx context.Context, host string, resolvPath string) (*cnameRe
|
|||
return result[0], nil
|
||||
}
|
||||
|
||||
func getRecord(client *dns.Client, msg *dns.Msg, server string, port string) (*cnameResolv, error) {
|
||||
func getRecord(client *dns.Client, msg *dns.Msg, server, port string) (*cnameResolv, error) {
|
||||
resp, _, err := client.Exchange(msg, net.JoinHostPort(server, port))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("exchange error for server %s: %w", server, err)
|
||||
|
|
|
@ -45,7 +45,8 @@ func TestNewForwarder(t *testing.T) {
|
|||
},
|
||||
OperationName: "forward some-service.domain.tld/some-service.domain.tld",
|
||||
},
|
||||
}, {
|
||||
},
|
||||
{
|
||||
desc: "Simple Forward Tracer with truncation and hashing",
|
||||
spanNameLimit: 101,
|
||||
tracing: &trackingBackenMock{
|
||||
|
|
|
@ -18,12 +18,12 @@ func (n MockTracer) StartSpan(operationName string, opts ...opentracing.StartSpa
|
|||
}
|
||||
|
||||
// Inject belongs to the Tracer interface.
|
||||
func (n MockTracer) Inject(sp opentracing.SpanContext, format interface{}, carrier interface{}) error {
|
||||
func (n MockTracer) Inject(sp opentracing.SpanContext, format, carrier interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Extract belongs to the Tracer interface.
|
||||
func (n MockTracer) Extract(format interface{}, carrier interface{}) (opentracing.SpanContext, error) {
|
||||
func (n MockTracer) Extract(format, carrier interface{}) (opentracing.SpanContext, error) {
|
||||
return nil, opentracing.ErrSpanContextNotFound
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ const (
|
|||
)
|
||||
|
||||
// NewAccount creates an account.
|
||||
func NewAccount(ctx context.Context, email string, keyTypeValue string) (*Account, error) {
|
||||
func NewAccount(ctx context.Context, email, keyTypeValue string) (*Account, error) {
|
||||
keyType := GetKeyType(ctx, keyTypeValue)
|
||||
|
||||
// Create a user. New accounts need an email and private key to start
|
||||
|
|
|
@ -103,7 +103,7 @@ func (s *LocalStore) listenSaveAction() {
|
|||
logger.Error(err)
|
||||
}
|
||||
|
||||
err = ioutil.WriteFile(s.filename, data, 0600)
|
||||
err = ioutil.WriteFile(s.filename, data, 0o600)
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ func CheckFile(name string) (bool, error) {
|
|||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return false, f.Chmod(0600)
|
||||
return false, f.Chmod(0o600)
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ func CheckFile(name string) (bool, error) {
|
|||
return false, err
|
||||
}
|
||||
|
||||
if fi.Mode().Perm()&0077 != 0 {
|
||||
if fi.Mode().Perm()&0o077 != 0 {
|
||||
return false, fmt.Errorf("permissions %o for %s are too open, please use 600", fi.Mode().Perm(), name)
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ func CheckFile(name string) (bool, error) {
|
|||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return false, f.Chmod(0600)
|
||||
return false, f.Chmod(0o600)
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
|
|
@ -27,10 +27,8 @@ import (
|
|||
"github.com/go-acme/lego/v3/registration"
|
||||
)
|
||||
|
||||
var (
|
||||
// oscpMustStaple enables OSCP stapling as from https://github.com/go-acme/lego/issues/270.
|
||||
oscpMustStaple = false
|
||||
)
|
||||
// oscpMustStaple enables OSCP stapling as from https://github.com/go-acme/lego/issues/270.
|
||||
var oscpMustStaple = false
|
||||
|
||||
// Configuration holds ACME configuration provided by users.
|
||||
type Configuration struct {
|
||||
|
@ -145,7 +143,7 @@ func (p *Provider) Init() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func isAccountMatchingCaServer(ctx context.Context, accountURI string, serverURI string) bool {
|
||||
func isAccountMatchingCaServer(ctx context.Context, accountURI, serverURI string) bool {
|
||||
logger := log.FromContext(ctx)
|
||||
|
||||
aru, err := url.Parse(accountURI)
|
||||
|
@ -428,13 +426,11 @@ func (p *Provider) resolveCertificate(ctx context.Context, domain types.Domain,
|
|||
return nil, err
|
||||
}
|
||||
|
||||
// Check provided certificates
|
||||
// Check if provided certificates are not already in progress and lock them if needed
|
||||
uncheckedDomains := p.getUncheckedDomains(ctx, domains, tlsStore)
|
||||
if len(uncheckedDomains) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
p.addResolvingDomains(uncheckedDomains)
|
||||
defer p.removeResolvingDomains(uncheckedDomains)
|
||||
|
||||
logger := log.FromContext(ctx)
|
||||
|
@ -483,16 +479,7 @@ func (p *Provider) removeResolvingDomains(resolvingDomains []string) {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *Provider) addResolvingDomains(resolvingDomains []string) {
|
||||
p.resolvingDomainsMutex.Lock()
|
||||
defer p.resolvingDomainsMutex.Unlock()
|
||||
|
||||
for _, domain := range resolvingDomains {
|
||||
p.resolvingDomains[domain] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Provider) addCertificateForDomain(domain types.Domain, certificate []byte, key []byte, tlsStore string) {
|
||||
func (p *Provider) addCertificateForDomain(domain types.Domain, certificate, key []byte, tlsStore string) {
|
||||
p.certsChan <- &CertAndStore{Certificate: Certificate{Certificate: certificate, Key: key, Domain: domain}, Store: tlsStore}
|
||||
}
|
||||
|
||||
|
@ -640,7 +627,6 @@ func (p *Provider) renewCertificates(ctx context.Context) {
|
|||
PrivateKey: cert.Key,
|
||||
Certificate: cert.Certificate.Certificate,
|
||||
}, true, oscpMustStaple)
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("Error renewing certificate from LE: %v, %v", cert.Domain, err)
|
||||
continue
|
||||
|
@ -659,8 +645,8 @@ func (p *Provider) renewCertificates(ctx context.Context) {
|
|||
// Get provided certificate which check a domains list (Main and SANs)
|
||||
// from static and dynamic provided certificates.
|
||||
func (p *Provider) getUncheckedDomains(ctx context.Context, domainsToCheck []string, tlsStore string) []string {
|
||||
p.resolvingDomainsMutex.RLock()
|
||||
defer p.resolvingDomainsMutex.RUnlock()
|
||||
p.resolvingDomainsMutex.Lock()
|
||||
defer p.resolvingDomainsMutex.Unlock()
|
||||
|
||||
log.FromContext(ctx).Debugf("Looking for provided certificate(s) to validate %q...", domainsToCheck)
|
||||
|
||||
|
@ -676,10 +662,17 @@ func (p *Provider) getUncheckedDomains(ctx context.Context, domainsToCheck []str
|
|||
allDomains = append(allDomains, domain)
|
||||
}
|
||||
|
||||
return searchUncheckedDomains(ctx, domainsToCheck, allDomains)
|
||||
uncheckedDomains := searchUncheckedDomains(ctx, domainsToCheck, allDomains)
|
||||
|
||||
// Lock domains that will be resolved by this routine
|
||||
for _, domain := range uncheckedDomains {
|
||||
p.resolvingDomains[domain] = struct{}{}
|
||||
}
|
||||
|
||||
return uncheckedDomains
|
||||
}
|
||||
|
||||
func searchUncheckedDomains(ctx context.Context, domainsToCheck []string, existentDomains []string) []string {
|
||||
func searchUncheckedDomains(ctx context.Context, domainsToCheck, existentDomains []string) []string {
|
||||
var uncheckedDomains []string
|
||||
for _, domainToCheck := range domainsToCheck {
|
||||
if !isDomainAlreadyChecked(domainToCheck, existentDomains) {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue