mirror of
https://github.com/versity/versitygw.git
synced 2025-12-23 05:05:16 +00:00
fix: Checks that x-amz-decoded-content-length matches the actual payload in unsigned streaming upload
Fixes #1676 `x-amz-decoded-content-length` in streaming uploads specifies the number of actual data-payload bytes, with encoding characters removed. If the value does not match the actual payload after decoding, now an `IncompleteBody` error is returned.
This commit is contained in:
@@ -79,6 +79,10 @@ func AuthorizePublicBucketAccess(be backend.Backend, s3action string, policyPerm
|
|||||||
|
|
||||||
if streamBody {
|
if streamBody {
|
||||||
if utils.IsUnsignedStreamingPayload(payloadHash) {
|
if utils.IsUnsignedStreamingPayload(payloadHash) {
|
||||||
|
cLength, err := utils.ParseDecodedContentLength(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
// stack an unsigned streaming payload reader
|
// stack an unsigned streaming payload reader
|
||||||
checksumType, err := utils.ExtractChecksumType(ctx)
|
checksumType, err := utils.ExtractChecksumType(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -87,7 +91,7 @@ func AuthorizePublicBucketAccess(be backend.Backend, s3action string, policyPerm
|
|||||||
|
|
||||||
wrapBodyReader(ctx, func(r io.Reader) io.Reader {
|
wrapBodyReader(ctx, func(r io.Reader) io.Reader {
|
||||||
var cr io.Reader
|
var cr io.Reader
|
||||||
cr, err = utils.NewUnsignedChunkReader(r, checksumType)
|
cr, err = utils.NewUnsignedChunkReader(r, checksumType, cLength)
|
||||||
return cr
|
return cr
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -160,22 +160,32 @@ func IsStreamingPayload(str string) bool {
|
|||||||
pt == payloadTypeStreamingSignedTrailer
|
pt == payloadTypeStreamingSignedTrailer
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChunkReader(ctx *fiber.Ctx, r io.Reader, authdata AuthData, region, secret string, date time.Time) (io.Reader, error) {
|
// ParseDecodedContentLength extracts and validates the
|
||||||
|
// 'x-amz-decoded-content-length' from fiber context
|
||||||
|
func ParseDecodedContentLength(ctx *fiber.Ctx) (int64, error) {
|
||||||
decContLengthStr := ctx.Get("X-Amz-Decoded-Content-Length")
|
decContLengthStr := ctx.Get("X-Amz-Decoded-Content-Length")
|
||||||
if decContLengthStr == "" {
|
if decContLengthStr == "" {
|
||||||
debuglogger.Logf("missing required header 'X-Amz-Decoded-Content-Length'")
|
debuglogger.Logf("missing required header 'X-Amz-Decoded-Content-Length'")
|
||||||
return nil, s3err.GetAPIError(s3err.ErrMissingContentLength)
|
return 0, s3err.GetAPIError(s3err.ErrMissingContentLength)
|
||||||
}
|
}
|
||||||
decContLength, err := strconv.ParseInt(decContLengthStr, 10, 64)
|
decContLength, err := strconv.ParseInt(decContLengthStr, 10, 64)
|
||||||
//TODO: not sure if InvalidRequest should be returned in this case
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
debuglogger.Logf("invalid value for 'X-Amz-Decoded-Content-Length': %v", decContLengthStr)
|
debuglogger.Logf("invalid value for 'X-Amz-Decoded-Content-Length': %v", decContLengthStr)
|
||||||
return nil, s3err.GetAPIError(s3err.ErrMissingContentLength)
|
return 0, s3err.GetAPIError(s3err.ErrMissingContentLength)
|
||||||
}
|
}
|
||||||
|
|
||||||
if decContLength > maxObjSizeLimit {
|
if decContLength > maxObjSizeLimit {
|
||||||
debuglogger.Logf("the object size exceeds the allowed limit: (size): %v, (limit): %v", decContLength, int64(maxObjSizeLimit))
|
debuglogger.Logf("the object size exceeds the allowed limit: (size): %v, (limit): %v", decContLength, int64(maxObjSizeLimit))
|
||||||
return nil, s3err.GetAPIError(s3err.ErrEntityTooLarge)
|
return 0, s3err.GetAPIError(s3err.ErrEntityTooLarge)
|
||||||
|
}
|
||||||
|
|
||||||
|
return decContLength, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewChunkReader(ctx *fiber.Ctx, r io.Reader, authdata AuthData, region, secret string, date time.Time) (io.Reader, error) {
|
||||||
|
cLength, err := ParseDecodedContentLength(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
contentSha256 := payloadType(ctx.Get("X-Amz-Content-Sha256"))
|
contentSha256 := payloadType(ctx.Get("X-Amz-Content-Sha256"))
|
||||||
@@ -192,7 +202,7 @@ func NewChunkReader(ctx *fiber.Ctx, r io.Reader, authdata AuthData, region, secr
|
|||||||
|
|
||||||
switch contentSha256 {
|
switch contentSha256 {
|
||||||
case payloadTypeStreamingUnsignedTrailer:
|
case payloadTypeStreamingUnsignedTrailer:
|
||||||
return NewUnsignedChunkReader(r, checksumType)
|
return NewUnsignedChunkReader(r, checksumType, cLength)
|
||||||
case payloadTypeStreamingSignedTrailer:
|
case payloadTypeStreamingSignedTrailer:
|
||||||
return NewSignedChunkReader(r, authdata, region, secret, date, checksumType)
|
return NewSignedChunkReader(r, authdata, region, secret, date, checksumType)
|
||||||
case payloadTypeStreamingSigned:
|
case payloadTypeStreamingSigned:
|
||||||
|
|||||||
@@ -50,9 +50,13 @@ type UnsignedChunkReader struct {
|
|||||||
// this data is necessary for 'InvalidChunkSizeError' error
|
// this data is necessary for 'InvalidChunkSizeError' error
|
||||||
// TODO: add 'Chunk' and 'BadChunkSize' in the error
|
// TODO: add 'Chunk' and 'BadChunkSize' in the error
|
||||||
chunkSizes []int64
|
chunkSizes []int64
|
||||||
|
cLength int64
|
||||||
|
// This data is necessary for the decoded content length mismatch error
|
||||||
|
// TODO: add 'NumberBytesExpected' and 'NumberBytesProvided' in the error
|
||||||
|
dataRead int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUnsignedChunkReader(r io.Reader, ct checksumType) (*UnsignedChunkReader, error) {
|
func NewUnsignedChunkReader(r io.Reader, ct checksumType, decContentLength int64) (*UnsignedChunkReader, error) {
|
||||||
var hasher hash.Hash
|
var hasher hash.Hash
|
||||||
var err error
|
var err error
|
||||||
if ct != "" {
|
if ct != "" {
|
||||||
@@ -70,6 +74,7 @@ func NewUnsignedChunkReader(r io.Reader, ct checksumType) (*UnsignedChunkReader,
|
|||||||
stash: make([]byte, 0),
|
stash: make([]byte, 0),
|
||||||
hasher: hasher,
|
hasher: hasher,
|
||||||
chunkSizes: []int64{},
|
chunkSizes: []int64{},
|
||||||
|
cLength: decContentLength,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -104,6 +109,8 @@ func (ucr *UnsignedChunkReader) Read(p []byte) (int, error) {
|
|||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ucr.dataRead += chunkSize
|
||||||
|
|
||||||
if chunkSize == 0 {
|
if chunkSize == 0 {
|
||||||
// Stop reading parsing payloads as 0 sized chunk is reached
|
// Stop reading parsing payloads as 0 sized chunk is reached
|
||||||
break
|
break
|
||||||
@@ -146,6 +153,11 @@ func (ucr *UnsignedChunkReader) Read(p []byte) (int, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ucr.cLength != ucr.dataRead {
|
||||||
|
debuglogger.Logf("number of bytes expected: (%v), number of bytes read: (%v)", ucr.cLength, ucr.dataRead)
|
||||||
|
return 0, s3err.GetAPIError(s3err.ErrContentLengthMismatch)
|
||||||
|
}
|
||||||
|
|
||||||
// Read and validate trailers
|
// Read and validate trailers
|
||||||
if err := ucr.readTrailer(); err != nil {
|
if err := ucr.readTrailer(); err != nil {
|
||||||
debuglogger.Logf("failed to read trailer: %v", err)
|
debuglogger.Logf("failed to read trailer: %v", err)
|
||||||
|
|||||||
@@ -121,6 +121,7 @@ const (
|
|||||||
ErrInvalidSHA256Paylod
|
ErrInvalidSHA256Paylod
|
||||||
ErrUnsupportedAnonymousSignedStreaming
|
ErrUnsupportedAnonymousSignedStreaming
|
||||||
ErrMissingContentLength
|
ErrMissingContentLength
|
||||||
|
ErrContentLengthMismatch
|
||||||
ErrInvalidAccessKeyID
|
ErrInvalidAccessKeyID
|
||||||
ErrRequestNotReadyYet
|
ErrRequestNotReadyYet
|
||||||
ErrMissingDateHeader
|
ErrMissingDateHeader
|
||||||
@@ -520,6 +521,11 @@ var errorCodeResponse = map[ErrorCode]APIError{
|
|||||||
Description: "You must provide the Content-Length HTTP header.",
|
Description: "You must provide the Content-Length HTTP header.",
|
||||||
HTTPStatusCode: http.StatusLengthRequired,
|
HTTPStatusCode: http.StatusLengthRequired,
|
||||||
},
|
},
|
||||||
|
ErrContentLengthMismatch: {
|
||||||
|
Code: "IncompleteBody",
|
||||||
|
Description: "You did not provide the number of bytes specified by the Content-Length HTTP header",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
ErrMissingDateHeader: {
|
ErrMissingDateHeader: {
|
||||||
Code: "AccessDenied",
|
Code: "AccessDenied",
|
||||||
Description: "AWS authentication requires a valid Date or x-amz-date header.",
|
Description: "AWS authentication requires a valid Date or x-amz-date header.",
|
||||||
|
|||||||
@@ -1102,6 +1102,7 @@ func TestUnsignedStreaminPayloadTrailer(ts *TestState) {
|
|||||||
ts.Run(UnsignedStreamingPayloadTrailer_sdk_algo_and_trailer_mismatch)
|
ts.Run(UnsignedStreamingPayloadTrailer_sdk_algo_and_trailer_mismatch)
|
||||||
ts.Run(UnsignedStreamingPayloadTrailer_incomplete_body)
|
ts.Run(UnsignedStreamingPayloadTrailer_incomplete_body)
|
||||||
ts.Run(UnsignedStreamingPayloadTrailer_invalid_chunk_size)
|
ts.Run(UnsignedStreamingPayloadTrailer_invalid_chunk_size)
|
||||||
|
ts.Run(UnsignedStreamingPayloadTrailer_content_length_payload_size_mismatch)
|
||||||
ts.Run(UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme)
|
ts.Run(UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme)
|
||||||
ts.Run(UnsignedStreamingPayloadTrailer_no_payload_trailer_only_headers)
|
ts.Run(UnsignedStreamingPayloadTrailer_no_payload_trailer_only_headers)
|
||||||
ts.Run(UnsignedStreamingPayloadTrailer_success_both_sdk_algo_and_trailer)
|
ts.Run(UnsignedStreamingPayloadTrailer_success_both_sdk_algo_and_trailer)
|
||||||
@@ -1756,6 +1757,7 @@ func GetIntTests() IntTests {
|
|||||||
"UnsignedStreamingPayloadTrailer_sdk_algo_and_trailer_mismatch": UnsignedStreamingPayloadTrailer_sdk_algo_and_trailer_mismatch,
|
"UnsignedStreamingPayloadTrailer_sdk_algo_and_trailer_mismatch": UnsignedStreamingPayloadTrailer_sdk_algo_and_trailer_mismatch,
|
||||||
"UnsignedStreamingPayloadTrailer_incomplete_body": UnsignedStreamingPayloadTrailer_incomplete_body,
|
"UnsignedStreamingPayloadTrailer_incomplete_body": UnsignedStreamingPayloadTrailer_incomplete_body,
|
||||||
"UnsignedStreamingPayloadTrailer_invalid_chunk_size": UnsignedStreamingPayloadTrailer_invalid_chunk_size,
|
"UnsignedStreamingPayloadTrailer_invalid_chunk_size": UnsignedStreamingPayloadTrailer_invalid_chunk_size,
|
||||||
|
"UnsignedStreamingPayloadTrailer_content_length_payload_size_mismatch": UnsignedStreamingPayloadTrailer_content_length_payload_size_mismatch,
|
||||||
"UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme": UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme,
|
"UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme": UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme,
|
||||||
"UnsignedStreamingPayloadTrailer_no_payload_trailer_only_headers": UnsignedStreamingPayloadTrailer_no_payload_trailer_only_headers,
|
"UnsignedStreamingPayloadTrailer_no_payload_trailer_only_headers": UnsignedStreamingPayloadTrailer_no_payload_trailer_only_headers,
|
||||||
"UnsignedStreamingPayloadTrailer_success_both_sdk_algo_and_trailer": UnsignedStreamingPayloadTrailer_success_both_sdk_algo_and_trailer,
|
"UnsignedStreamingPayloadTrailer_success_both_sdk_algo_and_trailer": UnsignedStreamingPayloadTrailer_success_both_sdk_algo_and_trailer,
|
||||||
|
|||||||
@@ -268,6 +268,41 @@ func UnsignedStreamingPayloadTrailer_invalid_chunk_size(s *S3Conf) error {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func UnsignedStreamingPayloadTrailer_content_length_payload_size_mismatch(s *S3Conf) error {
|
||||||
|
testName := "UnsignedStreamingPayloadTrailer_content_length_payload_size_mismatch"
|
||||||
|
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
|
||||||
|
object := "my-object"
|
||||||
|
for i, test := range []struct {
|
||||||
|
payload string
|
||||||
|
cLength int64
|
||||||
|
trailer string
|
||||||
|
}{
|
||||||
|
{"b\r\nabcdefghijk\r\n0\r\n\r\n", 5, ""},
|
||||||
|
{"b\r\nabcdefghijk\r\n0\r\n\r\n", 200, ""},
|
||||||
|
{"a\r\ndummy data\r\n0\r\nx-amz-checksum-crc64nvme:dPVWc2vU1+Q=\r\n\r\n", 128, "crc64nvme"},
|
||||||
|
{"a\r\ndummy data\r\n0\r\nx-amz-checksum-sha256:eXuwq/95jXIAr3aF3KeQHt/8Ur8mUA1b2XKCZY7iQVI=\r\n\r\n", 7, "crc64nvme"},
|
||||||
|
} {
|
||||||
|
reqHeaders := map[string]string{
|
||||||
|
"x-amz-decoded-content-length": fmt.Sprint(test.cLength),
|
||||||
|
}
|
||||||
|
if test.trailer != "" {
|
||||||
|
reqHeaders["x-amz-trailer"] = fmt.Sprintf("x-amz-checksum-%s", test.trailer)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, apiErr, err := testUnsignedStreamingPayloadTrailerObjectPut(s, bucket, object, []byte(test.payload), reqHeaders)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("test %v failed: %w", i+1, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := compareS3ApiError(s3err.GetAPIError(s3err.ErrContentLengthMismatch), apiErr); err != nil {
|
||||||
|
return fmt.Errorf("test %v failed: %w", i+1, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme(s *S3Conf) error {
|
func UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme(s *S3Conf) error {
|
||||||
testName := "UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme"
|
testName := "UnsignedStreamingPayloadTrailer_no_trailer_should_calculate_crc64nvme"
|
||||||
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
|
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
|
||||||
|
|||||||
Reference in New Issue
Block a user