import (
"errors"
"fmt"
+ "math"
"os"
"regexp"
"runtime"
+ "sort"
+ "strconv"
+ "strings"
"sync"
"time"
FailIfHeaderNotMatchesRegexp []HeaderMatch `yaml:"fail_if_header_not_matches,omitempty"`
Body string `yaml:"body,omitempty"`
HTTPClientConfig config.HTTPClientConfig `yaml:"http_client_config,inline"`
+ Compression string `yaml:"compression,omitempty"`
}
type HeaderMatch struct {
if err := s.HTTPClientConfig.Validate(); err != nil {
return err
}
+
+ for key, value := range s.Headers {
+ switch strings.Title(key) {
+ case "Accept-Encoding":
+ if !isCompressionAcceptEncodingValid(s.Compression, value) {
+ return fmt.Errorf(`invalid configuration "%s: %s", "compression: %s"`, key, value, s.Compression)
+ }
+ }
+ }
+
return nil
}
return nil
}
+
+// isCompressionAcceptEncodingValid validates the compression +
+// Accept-Encoding combination.
+//
+// If there's a compression setting, and there's also an accept-encoding
+// header, they MUST match, otherwise we end up requesting something
+// that doesn't include the specified compression, and that's likely to
+// fail, depending on how the server is configured. Testing that the
+// server _ignores_ Accept-Encoding, e.g. by not including a particular
+// compression in the header but expecting it in the response falls out
+// of the scope of the tests we perform.
+//
+// With that logic, this function validates that if a compression
+// algorithm is specified, it's covered by the specified accept encoding
+// header. It doesn't need to be the most prefered encoding, but it MUST
+// be included in the prefered encodings.
+func isCompressionAcceptEncodingValid(encoding, acceptEncoding string) bool {
+ // unspecified compression + any encoding value is valid
+ // any compression + no accept encoding is valid
+ if encoding == "" || acceptEncoding == "" {
+ return true
+ }
+
+ type encodingQuality struct {
+ encoding string
+ quality float32
+ }
+
+ var encodings []encodingQuality
+
+ for _, parts := range strings.Split(acceptEncoding, ",") {
+ var e encodingQuality
+
+ if idx := strings.LastIndexByte(parts, ';'); idx == -1 {
+ e.encoding = strings.TrimSpace(parts)
+ e.quality = 1.0
+ } else {
+ parseQuality := func(str string) float32 {
+ q, err := strconv.ParseFloat(str, 32)
+ if err != nil {
+ return 0
+ }
+ return float32(math.Round(q*1000) / 1000)
+ }
+
+ e.encoding = strings.TrimSpace(parts[:idx])
+
+ q := strings.TrimSpace(parts[idx+1:])
+ q = strings.TrimPrefix(q, "q=")
+ e.quality = parseQuality(q)
+ }
+
+ encodings = append(encodings, e)
+ }
+
+ sort.SliceStable(encodings, func(i, j int) bool {
+ return encodings[j].quality < encodings[i].quality
+ })
+
+ for _, e := range encodings {
+ if encoding == e.encoding || e.encoding == "*" {
+ return e.quality > 0
+ }
+ }
+
+ return false
+}
input: "testdata/invalid-http-header-match-regexp.yml",
want: `error parsing config file: "Could not compile regular expression" regexp=":["`,
},
+ {
+ input: "testdata/invalid-http-compression-mismatch.yml",
+ want: `error parsing config file: invalid configuration "Accept-Encoding: deflate", "compression: gzip"`,
+ },
+ {
+ input: "testdata/invalid-http-request-compression-reject-all-encodings.yml",
+ want: `error parsing config file: invalid configuration "Accept-Encoding: *;q=0.0", "compression: gzip"`,
+ },
{
input: "testdata/invalid-tcp-query-response-regexp.yml",
want: `error parsing config file: "Could not compile regular expression" regexp=":["`,
t.Fatal("config's String method reveals authentication credentials.")
}
}
+
+func TestIsEncodingAcceptable(t *testing.T) {
+ testcases := map[string]struct {
+ input string
+ acceptEncoding string
+ expected bool
+ }{
+ "empty compression": {
+ input: "",
+ acceptEncoding: "gzip",
+ expected: true,
+ },
+ "trivial": {
+ input: "gzip",
+ acceptEncoding: "gzip",
+ expected: true,
+ },
+ "trivial, quality": {
+ input: "gzip",
+ acceptEncoding: "gzip;q=1.0",
+ expected: true,
+ },
+ "first": {
+ input: "gzip",
+ acceptEncoding: "gzip, compress",
+ expected: true,
+ },
+ "second": {
+ input: "gzip",
+ acceptEncoding: "compress, gzip",
+ expected: true,
+ },
+ "missing": {
+ input: "br",
+ acceptEncoding: "gzip, compress",
+ expected: false,
+ },
+ "*": {
+ input: "br",
+ acceptEncoding: "gzip, compress, *",
+ expected: true,
+ },
+ "* with quality": {
+ input: "br",
+ acceptEncoding: "gzip, compress, *;q=0.1",
+ expected: true,
+ },
+ "rejected": {
+ input: "br",
+ acceptEncoding: "gzip, compress, br;q=0.0",
+ expected: false,
+ },
+ "rejected *": {
+ input: "br",
+ acceptEncoding: "gzip, compress, *;q=0.0",
+ expected: false,
+ },
+ "complex": {
+ input: "br",
+ acceptEncoding: "gzip;q=1.0, compress;q=0.5, br;q=0.1, *;q=0.0",
+ expected: true,
+ },
+ "complex out of order": {
+ input: "br",
+ acceptEncoding: "*;q=0.0, compress;q=0.5, br;q=0.1, gzip;q=1.0",
+ expected: true,
+ },
+ "complex with extra blanks": {
+ input: "br",
+ acceptEncoding: " gzip;q=1.0, compress; q=0.5, br;q=0.1, *; q=0.0 ",
+ expected: true,
+ },
+ }
+
+ for name, tc := range testcases {
+ t.Run(name, func(t *testing.T) {
+ actual := isCompressionAcceptEncodingValid(tc.input, tc.acceptEncoding)
+ if actual != tc.expected {
+ t.Errorf("Unexpected result: input=%q acceptEncoding=%q expected=%t actual=%t", tc.input, tc.acceptEncoding, tc.expected, actual)
+ }
+ })
+ }
+}
package prober
import (
+ "compress/flate"
+ "compress/gzip"
"context"
"crypto/tls"
"errors"
"sync"
"time"
+ "github.com/andybalholm/brotli"
"github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level"
"github.com/prometheus/client_golang/prometheus"
request.Host = value
continue
}
+
request.Header.Set(key, value)
}
}
}
+ // Since the configuration specifies a compression algorithm, blindly treat the response body as a
+ // compressed payload; if we cannot decompress it it's a failure because the configuration says we
+ // should expect the response to be compressed in that way.
+ if httpConfig.Compression != "" {
+ dec, err := getDecompressionReader(httpConfig.Compression, resp.Body)
+ if err != nil {
+ level.Info(logger).Log("msg", "Failed to get decompressor for HTTP response body", "err", err)
+ success = false
+ } else if dec != nil {
+ // Since we are replacing the original resp.Body with the decoder, we need to make sure
+ // we close the original body. We cannot close it right away because the decompressor
+ // might not have read it yet.
+ defer func(c io.Closer) {
+ err := c.Close()
+ if err != nil {
+ // At this point we cannot really do anything with this error, but log
+ // it in case it contains useful information as to what's the problem.
+ level.Info(logger).Log("msg", "Error while closing response from server", "err", err)
+ }
+ }(resp.Body)
+
+ resp.Body = dec
+ }
+ }
+
byteCounter := &byteCounter{ReadCloser: resp.Body}
if success && (len(httpConfig.FailIfBodyMatchesRegexp) > 0 || len(httpConfig.FailIfBodyNotMatchesRegexp) > 0) {
respBodyBytes = byteCounter.n
if err := byteCounter.Close(); err != nil {
- // We have already read everything we could from the server. The error here might be a
- // TCP error. Log it in case it contains useful information as to what's the problem.
+ // We have already read everything we could from the server, maybe even uncompressed the
+ // body. The error here might be either a decompression error or a TCP error. Log it in
+ // case it contains useful information as to what's the problem.
level.Info(logger).Log("msg", "Error while closing response from server", "error", err.Error())
}
}
redirectsGauge.Set(float64(redirects))
return
}
+
+func getDecompressionReader(algorithm string, origBody io.ReadCloser) (io.ReadCloser, error) {
+ switch strings.ToLower(algorithm) {
+ case "br":
+ return ioutil.NopCloser(brotli.NewReader(origBody)), nil
+
+ case "deflate":
+ return flate.NewReader(origBody), nil
+
+ case "gzip":
+ return gzip.NewReader(origBody)
+
+ case "identity", "":
+ return origBody, nil
+
+ default:
+ return nil, errors.New("unsupported compression algorithm")
+ }
+}
import (
"bytes"
+ "compress/flate"
+ "compress/gzip"
"context"
"crypto/tls"
"crypto/x509"
"testing"
"time"
+ "github.com/andybalholm/brotli"
"github.com/go-kit/kit/log"
"github.com/prometheus/client_golang/prometheus"
pconfig "github.com/prometheus/common/config"
func TestContentLength(t *testing.T) {
type testdata struct {
+ msg []byte
contentLength int
uncompressedBodyLength int
handler http.HandlerFunc
testcases := map[string]testdata{
"identity": {
+ msg: testmsg,
contentLength: len(testmsg),
uncompressedBodyLength: len(testmsg),
handler: func(w http.ResponseWriter, r *http.Request) {
},
"no content-encoding": {
+ msg: testmsg,
contentLength: len(testmsg),
uncompressedBodyLength: len(testmsg),
handler: func(w http.ResponseWriter, r *http.Request) {
// Unknown Content-Encoding, we should let this pass thru.
"unknown content-encoding": {
+ msg: testmsg,
contentLength: len(testmsg),
uncompressedBodyLength: len(testmsg),
handler: func(w http.ResponseWriter, r *http.Request) {
// 401 response, verify that the content-length is still computed correctly.
"401": {
expectFailure: true,
+ msg: notfoundMsg,
contentLength: len(notfoundMsg),
uncompressedBodyLength: len(notfoundMsg),
handler: func(w http.ResponseWriter, r *http.Request) {
w.Write(notfoundMsg)
},
},
+
+ // Compressed payload _without_ compression setting, it should not be decompressed.
+ "brotli": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ fw := brotli.NewWriter(&buf)
+ fw.Write([]byte(msg))
+ fw.Close()
+ return testdata{
+ msg: msg,
+ contentLength: len(buf.Bytes()), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(buf.Bytes()), // No decompression.
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "br")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ }
+ }(),
+
+ // Compressed payload _without_ compression setting, it should not be decompressed.
+ "deflate": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ // the only error path is an invalid compression level
+ fw, _ := flate.NewWriter(&buf, flate.DefaultCompression)
+ fw.Write([]byte(msg))
+ fw.Close()
+ return testdata{
+ msg: msg,
+ contentLength: len(buf.Bytes()), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(buf.Bytes()), // No decompression.
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "deflate")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ }
+ }(),
+
+ // Compressed payload _without_ compression setting, it should not be decompressed.
+ "gzip": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ gw := gzip.NewWriter(&buf)
+ gw.Write([]byte(msg))
+ gw.Close()
+ return testdata{
+ msg: msg,
+ contentLength: len(buf.Bytes()), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(buf.Bytes()), // No decompression.
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "gzip")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ }
+ }(),
}
for name, tc := range testcases {
}
}
+// TestHandlingOfCompressionSetting verifies that the "compression"
+// setting is handled correctly: content is decompressed only if
+// compression is specified, and only the specified compression
+// algorithm is handled.
+func TestHandlingOfCompressionSetting(t *testing.T) {
+ type testdata struct {
+ contentLength int
+ uncompressedBodyLength int
+ handler http.HandlerFunc
+ expectFailure bool
+ httpConfig config.HTTPProbe
+ }
+
+ testmsg := []byte(strings.Repeat("hello world", 10))
+
+ testcases := map[string]testdata{
+ "gzip": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ enc := gzip.NewWriter(&buf)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ contentLength: buf.Len(), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(msg),
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "gzip")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "gzip",
+ },
+ }
+ }(),
+
+ "brotli": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ enc := brotli.NewWriter(&buf)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ contentLength: len(buf.Bytes()), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(msg),
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "br")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "br",
+ },
+ }
+ }(),
+
+ "deflate": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ // the only error path is an invalid compression level
+ enc, _ := flate.NewWriter(&buf, flate.DefaultCompression)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ contentLength: len(buf.Bytes()), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(msg),
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "deflate")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "deflate",
+ },
+ }
+ }(),
+
+ "identity": {
+ contentLength: len(testmsg),
+ uncompressedBodyLength: len(testmsg),
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "identity")
+ w.WriteHeader(http.StatusOK)
+ w.Write(testmsg)
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "identity",
+ },
+ },
+
+ // We do exactly as told: the server is returning a
+ // gzip-encoded response, but the module is expecting a
+ // delfate-encoded response. This should fail.
+ "compression encoding mismatch": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ enc := gzip.NewWriter(&buf)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ expectFailure: true,
+ contentLength: buf.Len(), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: 0,
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "gzip")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "deflate",
+ },
+ }
+ }(),
+
+ "accept gzip": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ enc := gzip.NewWriter(&buf)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ expectFailure: false,
+ contentLength: buf.Len(), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(msg),
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "gzip")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "gzip",
+ Headers: map[string]string{
+ "Accept-Encoding": "gzip",
+ },
+ },
+ }
+ }(),
+
+ "accept br, gzip": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ enc := gzip.NewWriter(&buf)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ expectFailure: false,
+ contentLength: buf.Len(), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(msg),
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "gzip")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "gzip",
+ Headers: map[string]string{
+ "Accept-Encoding": "br, gzip",
+ },
+ },
+ }
+ }(),
+
+ "accept anything": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ enc := gzip.NewWriter(&buf)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ expectFailure: false,
+ contentLength: buf.Len(), // Content lenght is the length of the compressed buffer.
+ uncompressedBodyLength: len(msg),
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "gzip")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ Compression: "gzip",
+ Headers: map[string]string{
+ "Accept-Encoding": "*",
+ },
+ },
+ }
+ }(),
+
+ "compressed content without compression setting": func() testdata {
+ msg := testmsg
+ var buf bytes.Buffer
+ enc := gzip.NewWriter(&buf)
+ enc.Write(msg)
+ enc.Close()
+ return testdata{
+ expectFailure: false,
+ contentLength: buf.Len(),
+ uncompressedBodyLength: buf.Len(), // content won't be uncompressed
+ handler: func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Encoding", "gzip")
+ w.WriteHeader(http.StatusOK)
+ w.Write(buf.Bytes())
+ },
+ httpConfig: config.HTTPProbe{
+ IPProtocolFallback: true,
+ },
+ }
+ }(),
+ }
+
+ for name, tc := range testcases {
+ t.Run(name, func(t *testing.T) {
+ ts := httptest.NewServer(tc.handler)
+ defer ts.Close()
+
+ testCTX, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ defer cancel()
+
+ registry := prometheus.NewRegistry()
+ var logbuf bytes.Buffer
+ result := ProbeHTTP(testCTX,
+ ts.URL,
+ config.Module{
+ Timeout: time.Second,
+ HTTP: tc.httpConfig,
+ },
+ registry,
+ log.NewLogfmtLogger(&logbuf))
+ if !tc.expectFailure && !result {
+ t.Fatalf("probe failed unexpectedly: %s", logbuf.String())
+ } else if tc.expectFailure && result {
+ t.Fatalf("probe succeeded unexpectedly: %s", logbuf.String())
+ }
+
+ mfs, err := registry.Gather()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ expectedResults := map[string]float64{
+ "probe_http_content_length": float64(tc.contentLength),
+ "probe_http_uncompressed_body_length": float64(tc.uncompressedBodyLength),
+ }
+ checkRegistryResults(expectedResults, mfs, t)
+ })
+ }
+}
+
func TestRedirectFollowed(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/" {