fix: Double GZIP.
This commit is contained in:
parent
ccc429e36c
commit
d87c4d89e9
2 changed files with 89 additions and 6 deletions
|
@ -6,12 +6,26 @@ import (
|
||||||
"github.com/NYTimes/gziphandler"
|
"github.com/NYTimes/gziphandler"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Compress is a middleware that allows redirections
|
const (
|
||||||
type Compress struct {
|
contentEncodingHeader = "Content-Encoding"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Compress is a middleware that allows redirection
|
||||||
|
type Compress struct{}
|
||||||
|
|
||||||
|
// ServerHTTP is a function used by Negroni
|
||||||
|
func (c *Compress) ServeHTTP(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) {
|
||||||
|
if isEncoded(r.Header) {
|
||||||
|
next.ServeHTTP(rw, r)
|
||||||
|
} else {
|
||||||
|
newGzipHandler := gziphandler.GzipHandler(next)
|
||||||
|
newGzipHandler.ServeHTTP(rw, r)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ServerHTTP is a function used by negroni
|
func isEncoded(headers http.Header) bool {
|
||||||
func (c *Compress) ServeHTTP(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) {
|
header := headers.Get(contentEncodingHeader)
|
||||||
newGzipHandler := gziphandler.GzipHandler(next)
|
// According to https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding,
|
||||||
newGzipHandler.ServeHTTP(rw, r)
|
// content is not encoded if the header 'Content-Encoding' is empty or equals to 'identity'.
|
||||||
|
return header != "" && header != "identity"
|
||||||
}
|
}
|
||||||
|
|
69
middlewares/compress_test.go
Normal file
69
middlewares/compress_test.go
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
package middlewares
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/NYTimes/gziphandler"
|
||||||
|
"github.com/containous/traefik/testhelpers"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
acceptEncodingHeader = "Accept-Encoding"
|
||||||
|
varyHeader = "Vary"
|
||||||
|
gzip = "gzip"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestShouldCompressWhenNoContentEncodingHeader(t *testing.T) {
|
||||||
|
handler := &Compress{}
|
||||||
|
|
||||||
|
req := testhelpers.MustNewRequest(http.MethodGet, "http://localhost", nil)
|
||||||
|
req.Header.Add(acceptEncodingHeader, gzip)
|
||||||
|
|
||||||
|
baseBody := generateBytes(gziphandler.DefaultMinSize)
|
||||||
|
next := func(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
rw.Write(baseBody)
|
||||||
|
}
|
||||||
|
rw := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler.ServeHTTP(rw, req, next)
|
||||||
|
|
||||||
|
assert.Equal(t, gzip, rw.Header().Get(contentEncodingHeader))
|
||||||
|
assert.Equal(t, acceptEncodingHeader, rw.Header().Get(varyHeader))
|
||||||
|
|
||||||
|
if assert.ObjectsAreEqualValues(rw.Body.Bytes(), baseBody) {
|
||||||
|
assert.Fail(t, "expected a compressed body", "got %v", rw.Body.Bytes())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldNotCompressWhenContentEncodingHeader(t *testing.T) {
|
||||||
|
handler := &Compress{}
|
||||||
|
|
||||||
|
req := testhelpers.MustNewRequest(http.MethodGet, "http://localhost", nil)
|
||||||
|
req.Header.Add(acceptEncodingHeader, gzip)
|
||||||
|
req.Header.Add(contentEncodingHeader, gzip)
|
||||||
|
|
||||||
|
baseBody := generateBytes(gziphandler.DefaultMinSize)
|
||||||
|
|
||||||
|
next := func(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
rw.Write(baseBody)
|
||||||
|
}
|
||||||
|
|
||||||
|
rw := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rw, req, next)
|
||||||
|
|
||||||
|
assert.Equal(t, "", rw.Header().Get(contentEncodingHeader))
|
||||||
|
assert.Equal(t, "", rw.Header().Get(varyHeader))
|
||||||
|
|
||||||
|
assert.EqualValues(t, rw.Body.Bytes(), baseBody)
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateBytes(len int) []byte {
|
||||||
|
var value []byte
|
||||||
|
for i := 0; i < len; i++ {
|
||||||
|
value = append(value, 0x61)
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
Loading…
Reference in a new issue