Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[confighttp] Apply MaxRequestBodySize to the result of a decompressed body #10289

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .chloggen/jpkroehling_set-maxrequestbodysize-to-compressed.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: 'breaking'

# The name of the component, or a single word describing the area of concern, (e.g. otlpreceiver)
component: confighttp

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Apply MaxRequestBodySize to the result of a decompressed body

# One or more tracking issues or pull requests related to the change
issues: [10289]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext: |
When using compressed payloads, the Collector would verify only the size of the compressed payload.
This change applies the same restriction to the decompressed content. As a security measure, a limit of 20 MiB was added, which makes this a breaking change.
For most clients, this shouldn't be a problem, but if you often have payloads that decompress to more than 20 MiB, you might want to either configure your
client to send smaller batches (recommended), or increase the limit using the MaxRequestBodySize option.
16 changes: 9 additions & 7 deletions config/confighttp/compression.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,24 +67,26 @@ func (r *compressRoundTripper) RoundTrip(req *http.Request) (*http.Response, err
}

type decompressor struct {
errHandler func(w http.ResponseWriter, r *http.Request, errorMsg string, statusCode int)
base http.Handler
decoders map[string]func(body io.ReadCloser) (io.ReadCloser, error)
errHandler func(w http.ResponseWriter, r *http.Request, errorMsg string, statusCode int)
base http.Handler
decoders map[string]func(body io.ReadCloser) (io.ReadCloser, error)
maxRequestBodySize int64
}

// httpContentDecompressor offloads the task of handling compressed HTTP requests
// by identifying the compression format in the "Content-Encoding" header and re-writing
// request body so that the handlers further in the chain can work on decompressed data.
// It supports gzip and deflate/zlib compression.
func httpContentDecompressor(h http.Handler, eh func(w http.ResponseWriter, r *http.Request, errorMsg string, statusCode int), decoders map[string]func(body io.ReadCloser) (io.ReadCloser, error)) http.Handler {
func httpContentDecompressor(h http.Handler, maxRequestBodySize int64, eh func(w http.ResponseWriter, r *http.Request, errorMsg string, statusCode int), decoders map[string]func(body io.ReadCloser) (io.ReadCloser, error)) http.Handler {
errHandler := defaultErrorHandler
if eh != nil {
errHandler = eh
}

d := &decompressor{
errHandler: errHandler,
base: h,
maxRequestBodySize: maxRequestBodySize,
errHandler: errHandler,
base: h,
decoders: map[string]func(body io.ReadCloser) (io.ReadCloser, error){
"": func(io.ReadCloser) (io.ReadCloser, error) {
// Not a compressed payload. Nothing to do.
Expand Down Expand Up @@ -155,7 +157,7 @@ func (d *decompressor) ServeHTTP(w http.ResponseWriter, r *http.Request) {
// "Content-Length" is set to -1 as the size of the decompressed body is unknown.
r.Header.Del("Content-Length")
r.ContentLength = -1
r.Body = newBody
r.Body = http.MaxBytesReader(w, newBody, d.maxRequestBodySize)
}
d.base.ServeHTTP(w, r)
}
Expand Down
4 changes: 2 additions & 2 deletions config/confighttp/compression_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ func TestHTTPCustomDecompression(t *testing.T) {
return io.NopCloser(strings.NewReader("decompressed body")), nil
},
}
srv := httptest.NewServer(httpContentDecompressor(handler, defaultErrorHandler, decoders))
srv := httptest.NewServer(httpContentDecompressor(handler, defaultMaxRequestBodySize, defaultErrorHandler, decoders))

t.Cleanup(srv.Close)

Expand Down Expand Up @@ -253,7 +253,7 @@ func TestHTTPContentDecompressionHandler(t *testing.T) {
require.NoError(t, err, "failed to read request body: %v", err)
assert.EqualValues(t, testBody, string(body))
w.WriteHeader(http.StatusOK)
}), defaultErrorHandler, noDecoders))
}), defaultMaxRequestBodySize, defaultErrorHandler, noDecoders))
t.Cleanup(srv.Close)

req, err := http.NewRequest(http.MethodGet, srv.URL, tt.reqBody)
Expand Down
9 changes: 7 additions & 2 deletions config/confighttp/confighttp.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import (
)

const headerContentEncoding = "Content-Encoding"
const defaultMaxRequestBodySize = 20 * 1024 * 1024 // 20MiB

// ClientConfig defines settings for creating an HTTP client.
type ClientConfig struct {
Expand Down Expand Up @@ -269,7 +270,7 @@ type ServerConfig struct {
// Auth for this receiver
Auth *configauth.Authentication `mapstructure:"auth"`

// MaxRequestBodySize sets the maximum request body size in bytes
// MaxRequestBodySize sets the maximum request body size in bytes. Default: 20MiB.
MaxRequestBodySize int64 `mapstructure:"max_request_body_size"`

// IncludeMetadata propagates the client metadata from the incoming requests to the downstream consumers
Expand Down Expand Up @@ -340,7 +341,11 @@ func (hss *ServerConfig) ToServer(_ context.Context, host component.Host, settin
o(serverOpts)
}

handler = httpContentDecompressor(handler, serverOpts.errHandler, serverOpts.decoders)
if hss.MaxRequestBodySize <= 0 {
hss.MaxRequestBodySize = defaultMaxRequestBodySize
}

handler = httpContentDecompressor(handler, hss.MaxRequestBodySize, serverOpts.errHandler, serverOpts.decoders)

if hss.MaxRequestBodySize > 0 {
handler = maxRequestBodySizeInterceptor(handler, hss.MaxRequestBodySize)
Expand Down
91 changes: 90 additions & 1 deletion config/confighttp/confighttp_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
package confighttp

import (
"bytes"
"context"
"errors"
"fmt"
Expand All @@ -13,6 +14,7 @@ import (
"net/http/httptest"
"net/url"
"path/filepath"
"strings"
"testing"
"time"

Expand Down Expand Up @@ -1300,7 +1302,7 @@ func TestServerWithDecoder(t *testing.T) {
// test
response := &httptest.ResponseRecorder{}

req, err := http.NewRequest(http.MethodGet, srv.Addr, nil)
req, err := http.NewRequest(http.MethodGet, srv.Addr, bytes.NewBuffer([]byte("something")))
require.NoError(t, err, "Error creating request: %v", err)
req.Header.Set("Content-Encoding", "something-else")

Expand All @@ -1310,6 +1312,93 @@ func TestServerWithDecoder(t *testing.T) {

}

func TestServerWithDecompression(t *testing.T) {
// prepare
hss := ServerConfig{
MaxRequestBodySize: 1000, // 1 KB
}
body := []byte(strings.Repeat("a", 1000*1000)) // 1 MB

srv, err := hss.ToServer(
context.Background(),
componenttest.NewNopHost(),
componenttest.NewNopTelemetrySettings(),
http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
actualBody, err := io.ReadAll(req.Body)
assert.ErrorContains(t, err, "http: request body too large")
assert.Len(t, actualBody, 1000)

if err != nil {
resp.WriteHeader(http.StatusBadRequest)
} else {
resp.WriteHeader(http.StatusOK)
}
}),
)
require.NoError(t, err)

testSrv := httptest.NewServer(srv.Handler)
defer testSrv.Close()

req, err := http.NewRequest(http.MethodGet, testSrv.URL, compressZstd(t, body))
require.NoError(t, err, "Error creating request: %v", err)

req.Header.Set("Content-Encoding", "zstd")

// test
c := http.Client{}
resp, err := c.Do(req)
require.NoError(t, err, "Error sending request: %v", err)

_, err = io.ReadAll(resp.Body)
require.NoError(t, err, "Error reading response body: %v", err)

// verifications is done mostly within the test, but this is only a sanity check
// that we got into the test handler
assert.Equal(t, resp.StatusCode, http.StatusBadRequest)
}

func TestDefaultMaxRequestBodySize(t *testing.T) {
tests := []struct {
name string
settings ServerConfig
expected int64
}{
{
name: "default",
settings: ServerConfig{},
expected: defaultMaxRequestBodySize,
},
{
name: "zero",
settings: ServerConfig{MaxRequestBodySize: 0},
expected: defaultMaxRequestBodySize,
},
{
name: "negative",
settings: ServerConfig{MaxRequestBodySize: -1},
expected: defaultMaxRequestBodySize,
},
{
name: "custom",
settings: ServerConfig{MaxRequestBodySize: 100},
expected: 100,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, err := tt.settings.ToServer(
context.Background(),
componenttest.NewNopHost(),
componenttest.NewNopTelemetrySettings(),
http.HandlerFunc(func(http.ResponseWriter, *http.Request) {}),
)
require.NoError(t, err)
assert.Equal(t, tt.expected, tt.settings.MaxRequestBodySize)
})
}
}

type mockHost struct {
component.Host
ext map[component.ID]component.Component
Expand Down
Loading