-
Notifications
You must be signed in to change notification settings - Fork 1.4k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
util+server: Fix bug around chunked request handling. #6906
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1713,8 +1713,10 @@ func generateJSONBenchmarkData(k, v int) map[string]interface{} { | |
} | ||
|
||
return map[string]interface{}{ | ||
"keys": keys, | ||
"values": values, | ||
"input": map[string]interface{}{ | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a minor refactoring that silences "'input' key missing" warnings from the testcases. |
||
"keys": keys, | ||
"values": values, | ||
}, | ||
} | ||
} | ||
|
||
|
@@ -1832,10 +1834,12 @@ func TestDataPostV1CompressedDecodingLimits(t *testing.T) { | |
tests := []struct { | ||
note string | ||
wantGzip bool | ||
wantChunkedEncoding bool | ||
payload []byte | ||
forceContentLen int64 // Size to manually set the Content-Length header to. | ||
forcePayloadSizeField uint32 // Size to manually set the payload field for the gzip blob. | ||
expRespHTTPStatus int | ||
expWarningMsg string | ||
expErrorMsg string | ||
maxLen int64 | ||
gzipMaxLen int64 | ||
|
@@ -1844,30 +1848,44 @@ func TestDataPostV1CompressedDecodingLimits(t *testing.T) { | |
note: "empty message", | ||
payload: []byte{}, | ||
expRespHTTPStatus: 200, | ||
expWarningMsg: "'input' key missing from the request", | ||
}, | ||
{ | ||
note: "empty message, gzip", | ||
wantGzip: true, | ||
payload: mustGZIPPayload([]byte{}), | ||
expRespHTTPStatus: 200, | ||
expWarningMsg: "'input' key missing from the request", | ||
}, | ||
{ | ||
note: "empty message, malicious Content-Length", | ||
payload: []byte{}, | ||
forceContentLen: 2048, // Server should ignore this header entirely. | ||
expRespHTTPStatus: 200, | ||
expRespHTTPStatus: 400, | ||
expErrorMsg: "request body too large", | ||
}, | ||
{ | ||
note: "empty message, gzip, malicious Content-Length", | ||
wantGzip: true, | ||
payload: mustGZIPPayload([]byte{}), | ||
forceContentLen: 2048, // Server should ignore this header entirely. | ||
expRespHTTPStatus: 200, | ||
expRespHTTPStatus: 400, | ||
expErrorMsg: "request body too large", | ||
}, | ||
{ | ||
note: "basic - malicious size field, expect reject on gzip payload length", | ||
wantGzip: true, | ||
payload: mustGZIPPayload([]byte(`{"user": "alice"}`)), | ||
payload: mustGZIPPayload([]byte(`{"input": {"user": "alice"}}`)), | ||
expRespHTTPStatus: 400, | ||
forcePayloadSizeField: 134217728, // 128 MB | ||
expErrorMsg: "gzip payload too large", | ||
gzipMaxLen: 1024, | ||
}, | ||
{ | ||
note: "basic - malicious size field, expect reject on gzip payload length, chunked encoding", | ||
wantGzip: true, | ||
wantChunkedEncoding: true, | ||
payload: mustGZIPPayload([]byte(`{"input": {"user": "alice"}}`)), | ||
expRespHTTPStatus: 400, | ||
forcePayloadSizeField: 134217728, // 128 MB | ||
expErrorMsg: "gzip payload too large", | ||
|
@@ -1886,6 +1904,13 @@ func TestDataPostV1CompressedDecodingLimits(t *testing.T) { | |
maxLen: 512, | ||
expErrorMsg: "request body too large", | ||
}, | ||
{ | ||
note: "basic, large payload, expect reject on Content-Length, chunked encoding", | ||
wantChunkedEncoding: true, | ||
payload: util.MustMarshalJSON(generateJSONBenchmarkData(100, 100)), | ||
expRespHTTPStatus: 200, | ||
maxLen: 134217728, | ||
}, | ||
{ | ||
note: "basic, gzip, large payload", | ||
wantGzip: true, | ||
|
@@ -1957,13 +1982,19 @@ allow if { | |
if test.wantGzip { | ||
req.Header.Set("Content-Encoding", "gzip") | ||
} | ||
if test.wantChunkedEncoding { | ||
req.ContentLength = -1 | ||
req.TransferEncoding = []string{"chunked"} | ||
req.Header.Set("Transfer-Encoding", "chunked") | ||
} | ||
if test.forceContentLen > 0 { | ||
req.ContentLength = test.forceContentLen | ||
req.Header.Set("Content-Length", strconv.FormatInt(test.forceContentLen, 10)) | ||
} | ||
f.reset() | ||
f.server.Handler.ServeHTTP(f.recorder, req) | ||
if f.recorder.Code != test.expRespHTTPStatus { | ||
t.Fatalf("Unexpected HTTP status code, (exp,got): %d, %d", test.expRespHTTPStatus, f.recorder.Code) | ||
t.Fatalf("Unexpected HTTP status code, (exp,got): %d, %d, response body: %s", test.expRespHTTPStatus, f.recorder.Code, f.recorder.Body.Bytes()) | ||
} | ||
if test.expErrorMsg != "" { | ||
var serverErr types.ErrorV1 | ||
|
@@ -1973,6 +2004,19 @@ allow if { | |
if !strings.Contains(serverErr.Message, test.expErrorMsg) { | ||
t.Fatalf("Expected error message to have message '%s', got message: '%s'", test.expErrorMsg, serverErr.Message) | ||
} | ||
} else { | ||
var resp types.DataResponseV1 | ||
if err := json.Unmarshal(f.recorder.Body.Bytes(), &resp); err != nil { | ||
t.Fatalf("Could not deserialize response: %s, message was: %s", err.Error(), f.recorder.Body.Bytes()) | ||
} | ||
if test.expWarningMsg != "" { | ||
if !strings.Contains(resp.Warning.Message, test.expWarningMsg) { | ||
t.Fatalf("Expected warning message to have message '%s', got message: '%s'", test.expWarningMsg, resp.Warning.Message) | ||
} | ||
} else if resp.Warning != nil { | ||
// Error on unexpected warnings. Something is wrong. | ||
t.Fatalf("Unexpected warning: code: %s, message: %s", resp.Warning.Code, resp.Warning.Message) | ||
} | ||
} | ||
}) | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -27,13 +27,24 @@ var gzipReaderPool = sync.Pool{ | |
// payload size, but not an unbounded amount of memory, as was potentially | ||
// possible before. | ||
func ReadMaybeCompressedBody(r *http.Request) ([]byte, error) { | ||
if r.ContentLength <= 0 { | ||
return []byte{}, nil | ||
} | ||
// Read content from the request body into a buffer of known size. | ||
content := bytes.NewBuffer(make([]byte, 0, r.ContentLength)) | ||
if _, err := io.CopyN(content, r.Body, r.ContentLength); err != nil { | ||
return content.Bytes(), err | ||
var content *bytes.Buffer | ||
// Note(philipc): If the request body is of unknown length (such as what | ||
// happens when 'Transfer-Encoding: chunked' is set), we have to do an | ||
// incremental read of the body. In this case, we can't be too clever, we | ||
// just do the best we can with whatever is streamed over to us. | ||
// Fetch gzip payload size limit from request context. | ||
if maxLength, ok := decoding.GetServerDecodingMaxLen(r.Context()); ok { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is where we add back the original |
||
bs, err := io.ReadAll(io.LimitReader(r.Body, maxLength)) | ||
if err != nil { | ||
return bs, err | ||
} | ||
content = bytes.NewBuffer(bs) | ||
} else { | ||
// Read content from the request body into a buffer of known size. | ||
content = bytes.NewBuffer(make([]byte, 0, r.ContentLength)) | ||
if _, err := io.CopyN(content, r.Body, r.ContentLength); err != nil { | ||
return content.Bytes(), err | ||
} | ||
} | ||
|
||
// Decompress gzip content by reading from the buffer. | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This is where the new context key is inserted. Note that we only add it when we have an "indeterminate body size" case, such as what happens during chunked requests.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Is this only the case for chunked requests or are there some other use cases? If only true for chunked requests, can we just check the header (ie.
'Transfer-Encoding: chunked'
)?Also it would be helpful to add a comment which explain the 2 conditions in the
if
statement.