You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
prometheus/util/httputil/compression_test.go

144 lines
3.8 KiB

// Copyright 2016 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package httputil
import (
"bytes"
"io"
"net/http"
"net/http/httptest"
"testing"
Use github.com/klauspost/compress for gzip and zlib klauspost/compress is a high quality drop-in replacement for common Go compression libraries. Since Prometheus sends out a lot of HTTP requests that often return compressed output having improved compression libraries helps to save cpu & memory resources. On a test Prometheus server I was able to see cpu reduction from 31 to 30 cores. Benchmark results: name old time/op new time/op delta TargetScraperGzip/metrics=1-8 69.4µs ± 4% 69.2µs ± 3% ~ (p=0.122 n=50+50) TargetScraperGzip/metrics=100-8 84.3µs ± 2% 80.9µs ± 2% -4.02% (p=0.000 n=48+46) TargetScraperGzip/metrics=1000-8 296µs ± 1% 274µs ±14% -7.35% (p=0.000 n=47+45) TargetScraperGzip/metrics=10000-8 2.06ms ± 1% 1.66ms ± 2% -19.34% (p=0.000 n=47+45) TargetScraperGzip/metrics=100000-8 20.9ms ± 2% 17.5ms ± 3% -16.50% (p=0.000 n=49+50) name old alloc/op new alloc/op delta TargetScraperGzip/metrics=1-8 6.06kB ± 0% 6.07kB ± 0% +0.24% (p=0.000 n=48+48) TargetScraperGzip/metrics=100-8 7.04kB ± 0% 6.89kB ± 0% -2.17% (p=0.000 n=49+50) TargetScraperGzip/metrics=1000-8 9.02kB ± 0% 8.35kB ± 1% -7.49% (p=0.000 n=50+50) TargetScraperGzip/metrics=10000-8 18.1kB ± 1% 16.1kB ± 2% -10.87% (p=0.000 n=47+47) TargetScraperGzip/metrics=100000-8 1.21MB ± 0% 1.01MB ± 2% -16.69% (p=0.000 n=36+50) name old allocs/op new allocs/op delta TargetScraperGzip/metrics=1-8 71.0 ± 0% 72.0 ± 0% +1.41% (p=0.000 n=50+50) TargetScraperGzip/metrics=100-8 81.0 ± 0% 76.0 ± 0% -6.17% (p=0.000 n=50+50) TargetScraperGzip/metrics=1000-8 92.0 ± 0% 83.0 ± 0% -9.78% (p=0.000 n=50+50) TargetScraperGzip/metrics=10000-8 93.0 ± 0% 91.0 ± 0% -2.15% (p=0.000 n=50+50) TargetScraperGzip/metrics=100000-8 111 ± 0% 135 ± 1% +21.89% (p=0.000 n=40+50) Signed-off-by: Łukasz Mierzwa <l.mierzwa@gmail.com>
3 years ago
"github.com/klauspost/compress/gzip"
"github.com/klauspost/compress/zlib"
"github.com/stretchr/testify/require"
)
var (
mux *http.ServeMux
server *httptest.Server
)
func setup() func() {
mux = http.NewServeMux()
server = httptest.NewServer(mux)
return func() {
server.Close()
}
}
func getCompressionHandlerFunc() CompressionHandler {
hf := func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
w.Write([]byte("Hello World!"))
}
return CompressionHandler{
Handler: http.HandlerFunc(hf),
}
}
func TestCompressionHandler_PlainText(t *testing.T) {
tearDown := setup()
defer tearDown()
ch := getCompressionHandlerFunc()
mux.Handle("/foo_endpoint", ch)
client := &http.Client{
Transport: &http.Transport{
DisableCompression: true,
},
}
resp, err := client.Get(server.URL + "/foo_endpoint")
require.NoError(t, err, "client get failed with unexpected error")
defer resp.Body.Close()
contents, err := io.ReadAll(resp.Body)
require.NoError(t, err, "unexpected error while creating the response body reader")
expected := "Hello World!"
actual := string(contents)
require.Equal(t, expected, actual, "expected response with content")
}
func TestCompressionHandler_Gzip(t *testing.T) {
tearDown := setup()
defer tearDown()
ch := getCompressionHandlerFunc()
mux.Handle("/foo_endpoint", ch)
client := &http.Client{
Transport: &http.Transport{
DisableCompression: true,
},
}
req, _ := http.NewRequest(http.MethodGet, server.URL+"/foo_endpoint", nil)
req.Header.Set(acceptEncodingHeader, gzipEncoding)
resp, err := client.Do(req)
require.NoError(t, err, "client get failed with unexpected error")
defer resp.Body.Close()
actualHeader := resp.Header.Get(contentEncodingHeader)
require.Equal(t, gzipEncoding, actualHeader, "unexpected encoding header in response")
var buf bytes.Buffer
zr, err := gzip.NewReader(resp.Body)
require.NoError(t, err, "unexpected error while creating the response body reader")
_, err = buf.ReadFrom(zr)
require.NoError(t, err, "unexpected error while reading the response body")
actual := buf.String()
expected := "Hello World!"
require.Equal(t, expected, actual, "unexpected response content")
}
func TestCompressionHandler_Deflate(t *testing.T) {
tearDown := setup()
defer tearDown()
ch := getCompressionHandlerFunc()
mux.Handle("/foo_endpoint", ch)
client := &http.Client{
Transport: &http.Transport{
DisableCompression: true,
},
}
req, _ := http.NewRequest(http.MethodGet, server.URL+"/foo_endpoint", nil)
req.Header.Set(acceptEncodingHeader, deflateEncoding)
resp, err := client.Do(req)
require.NoError(t, err, "client get failed with unexpected error")
defer resp.Body.Close()
actualHeader := resp.Header.Get(contentEncodingHeader)
require.Equal(t, deflateEncoding, actualHeader, "expected response with encoding header")
var buf bytes.Buffer
dr, err := zlib.NewReader(resp.Body)
require.NoError(t, err, "unexpected error while creating the response body reader")
_, err = buf.ReadFrom(dr)
require.NoError(t, err, "unexpected error while reading the response body")
actual := buf.String()
expected := "Hello World!"
require.Equal(t, expected, actual, "expected response with content")
}