mirror of
https://github.com/versity/versitygw.git
synced 2026-04-26 15:35:05 +00:00
Merge pull request #1608 from versity/tests/rest_bucket_tagging_chunked
Tests/rest bucket tagging chunked
This commit is contained in:
2
go.mod
2
go.mod
@@ -50,7 +50,9 @@ require (
|
||||
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
||||
github.com/hashicorp/go-rootcerts v1.0.2 // indirect
|
||||
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.9 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/minio/crc64nvme v1.1.1 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/nats-io/nkeys v0.4.11 // indirect
|
||||
github.com/nats-io/nuid v1.0.1 // indirect
|
||||
|
||||
4
go.sum
4
go.sum
@@ -117,6 +117,8 @@ github.com/keybase/go-keychain v0.0.1 h1:way+bWYa6lDppZoZcgMbYsvC7GxljxrskdNInRt
|
||||
github.com/keybase/go-keychain v0.0.1/go.mod h1:PdEILRW3i9D8JcdM+FmY6RwkHGnhHxXwkPPMeUgOK1k=
|
||||
github.com/klauspost/compress v1.18.1 h1:bcSGx7UbpBqMChDtsF28Lw6v/G94LPrrbMbdC3JH2co=
|
||||
github.com/klauspost/compress v1.18.1/go.mod h1:ZQFFVG+MdnR0P+l6wpXgIL4NTtwiKIdBnrBd8Nrxr+0=
|
||||
github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY=
|
||||
github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
@@ -129,6 +131,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
|
||||
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
|
||||
github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI=
|
||||
github.com/minio/crc64nvme v1.1.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg=
|
||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/nats-io/nats.go v1.47.0 h1:YQdADw6J/UfGUd2Oy6tn4Hq6YHxCaJrVKayxxFqYrgM=
|
||||
|
||||
@@ -113,7 +113,7 @@ check_verify_object_tags() {
|
||||
|
||||
check_object_tags_empty() {
|
||||
if ! check_param_count_v2 "command type, bucket, key" 3 $#; then
|
||||
return 1
|
||||
return 2
|
||||
fi
|
||||
if ! get_object_tagging "$1" "$2" "$3"; then
|
||||
log 2 "failed to get tags"
|
||||
|
||||
@@ -48,6 +48,27 @@ send_via_openssl_and_check_code() {
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
send_via_openssl_and_check_code_header() {
|
||||
if ! check_param_count_v2 "command file, expected code, header key, expected value" 4 $#; then
|
||||
return 1
|
||||
fi
|
||||
if ! send_via_openssl_and_check_code "$1" "$2"; then
|
||||
log 2 "error sending via openssl and checking code"
|
||||
return 1
|
||||
fi
|
||||
header_line="$(echo "$result" | grep "$3")"
|
||||
if [ "$header_line" == "" ]; then
|
||||
log 2 "header key '$3' not found in header data"
|
||||
return 1
|
||||
fi
|
||||
header_value="$(echo "$header_line" | awk '{print $2}' | tr -d '\r')"
|
||||
if [ "$header_value" != "$4" ]; then
|
||||
log 2 "expected header value of '$4', was '$header_value'"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
send_via_openssl_check_code_error_contains() {
|
||||
if ! check_param_count_v2 "command file, expected code, error, message" 4 $#; then
|
||||
return 1
|
||||
@@ -97,8 +118,8 @@ send_openssl_go_command_expect_error() {
|
||||
if ! check_param_count_gt "expected HTTP code, expected error code, expected message, params" 4 $#; then
|
||||
return 1
|
||||
fi
|
||||
if ! go run "./tests/rest_scripts/generateCommand.go" "-awsAccessKeyId" "$AWS_ACCESS_KEY_ID" "-awsSecretAccessKey" "$AWS_SECRET_ACCESS_KEY" "-url" "$AWS_ENDPOINT_URL" "-client" "openssl" "-filePath" "$TEST_FILE_FOLDER/openssl_command.txt" "${@:4}"; then
|
||||
log 2 "error sending go command and checking error"
|
||||
if ! result=$(go run "./tests/rest_scripts/generateCommand.go" "-awsAccessKeyId" "$AWS_ACCESS_KEY_ID" "-awsSecretAccessKey" "$AWS_SECRET_ACCESS_KEY" "-url" "$AWS_ENDPOINT_URL" "-client" "openssl" "-filePath" "$TEST_FILE_FOLDER/openssl_command.txt" "${@:4}" 2>&1); then
|
||||
log 2 "error sending go command and checking error: $result"
|
||||
return 1
|
||||
fi
|
||||
if ! send_via_openssl_check_code_error_contains "$TEST_FILE_FOLDER/openssl_command.txt" "$1" "$2" "$3"; then
|
||||
@@ -122,3 +143,18 @@ send_openssl_go_command() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
send_openssl_go_command_check_header() {
|
||||
if ! check_param_count_gt "expected HTTP code, header key, value, params" 4 $#; then
|
||||
return 1
|
||||
fi
|
||||
if ! go run "./tests/rest_scripts/generateCommand.go" "-awsAccessKeyId" "$AWS_ACCESS_KEY_ID" "-awsSecretAccessKey" "$AWS_SECRET_ACCESS_KEY" "-url" "$AWS_ENDPOINT_URL" "-client" "openssl" "-filePath" "$TEST_FILE_FOLDER/openssl_command.txt" "${@:4}"; then
|
||||
log 2 "error sending go command and checking error"
|
||||
return 1
|
||||
fi
|
||||
if ! send_via_openssl_and_check_code_header "$TEST_FILE_FOLDER/openssl_command.txt" "$1" "$2" "$3"; then
|
||||
log 2 "error sending command, checking code and header value"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -54,3 +54,13 @@ setup_bucket_and_add_file() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
send_openssl_go_command_chunked_no_content_length() {
|
||||
if ! check_param_count_gt "bucket name, key" 2 $#; then
|
||||
return 1
|
||||
fi
|
||||
run send_openssl_go_command_expect_error "400" "IncompleteBody" "The request body terminated unexpectedly" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$1" "-payload" "abcdefg" "-omitContentLength" \
|
||||
"-payloadType" "STREAMING-AWS4-HMAC-SHA256-PAYLOAD" "-chunkSize" "8192" "-objectKey" "$2"
|
||||
assert_success
|
||||
}
|
||||
|
||||
11
tests/rest_scripts/command/dataSource.go
Normal file
11
tests/rest_scripts/command/dataSource.go
Normal file
@@ -0,0 +1,11 @@
|
||||
package command
|
||||
|
||||
import "io"
|
||||
|
||||
type DataSource interface {
|
||||
SourceDataByteSize() (int64, error)
|
||||
CalculateSHA256HashString() (string, error)
|
||||
Close() error
|
||||
GetReader() (io.Reader, error)
|
||||
GetTeeReader(io.Writer) (io.Reader, error)
|
||||
}
|
||||
82
tests/rest_scripts/command/fileDataSource.go
Normal file
82
tests/rest_scripts/command/fileDataSource.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type FileDataSource struct {
|
||||
filePath string
|
||||
File *os.File
|
||||
}
|
||||
|
||||
func NewFileDataSource(filePath string) *FileDataSource {
|
||||
return &FileDataSource{
|
||||
filePath: filePath,
|
||||
File: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (f *FileDataSource) SourceDataByteSize() (int64, error) {
|
||||
fileInfo, err := os.Stat(f.filePath)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error getting file info: %w", err)
|
||||
}
|
||||
return fileInfo.Size(), nil
|
||||
}
|
||||
|
||||
func (f *FileDataSource) CalculateSHA256HashString() (string, error) {
|
||||
file, err := os.Open(f.filePath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error opening payload file '%s': %w", f.filePath, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
hasher := sha256.New()
|
||||
if _, err = io.Copy(hasher, file); err != nil {
|
||||
return "", fmt.Errorf("error copying file data of '%s' to hasher: %w", f.filePath, err)
|
||||
}
|
||||
|
||||
hash := hasher.Sum(nil)
|
||||
return hex.EncodeToString(hash), nil
|
||||
}
|
||||
|
||||
func (f *FileDataSource) Close() error {
|
||||
if f.File != nil {
|
||||
err := f.File.Close()
|
||||
f.File = nil
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *FileDataSource) openFile() error {
|
||||
var err error
|
||||
f.File, err = os.OpenFile(f.filePath, os.O_RDONLY, 0600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error opening file: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *FileDataSource) GetReader() (io.Reader, error) {
|
||||
if f.File == nil {
|
||||
if err := f.openFile(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return f.File, nil
|
||||
}
|
||||
|
||||
func (f *FileDataSource) GetTeeReader(checksumWriter io.Writer) (io.Reader, error) {
|
||||
if f.File == nil {
|
||||
if err := f.openFile(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
r := io.TeeReader(f.File, checksumWriter)
|
||||
return r, nil
|
||||
}
|
||||
6
tests/rest_scripts/command/openSSLPayloadManager.go
Normal file
6
tests/rest_scripts/command/openSSLPayloadManager.go
Normal file
@@ -0,0 +1,6 @@
|
||||
package command
|
||||
|
||||
type OpenSSLPayloadManager interface {
|
||||
GetContentLength() (int64, error)
|
||||
WritePayload(string) error
|
||||
}
|
||||
91
tests/rest_scripts/command/payload.go
Normal file
91
tests/rest_scripts/command/payload.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash"
|
||||
"hash/crc32"
|
||||
"reflect"
|
||||
|
||||
"github.com/minio/crc64nvme"
|
||||
)
|
||||
|
||||
type Payload struct {
|
||||
dataSource DataSource
|
||||
payloadType PayloadType
|
||||
checksumType string
|
||||
dataSizeCalculated bool
|
||||
dataSize int64
|
||||
}
|
||||
|
||||
func GetBase64ChecksumLength(checksumType string) (int64, error) {
|
||||
switch checksumType {
|
||||
case ChecksumCRC32, ChecksumCRC32C:
|
||||
return 8, nil
|
||||
case ChecksumSHA256:
|
||||
return 44, nil
|
||||
case ChecksumSHA1:
|
||||
return 28, nil
|
||||
case ChecksumCRC64NVME:
|
||||
return 12, nil
|
||||
}
|
||||
return 0, errors.New("unrecognized checksum type: " + checksumType)
|
||||
}
|
||||
|
||||
func (p *Payload) GetDataSize() (int64, error) {
|
||||
if !p.dataSizeCalculated {
|
||||
dataSize, err := p.dataSource.SourceDataByteSize()
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error getting payload data size: %w", err)
|
||||
}
|
||||
p.dataSize = dataSize
|
||||
p.dataSizeCalculated = true
|
||||
}
|
||||
return p.dataSize, nil
|
||||
}
|
||||
|
||||
func (p *Payload) getChecksumHasher() hash.Hash {
|
||||
switch p.checksumType {
|
||||
case ChecksumSHA256:
|
||||
return sha256.New()
|
||||
case ChecksumSHA1:
|
||||
return sha1.New()
|
||||
case ChecksumCRC32:
|
||||
return crc32.NewIEEE()
|
||||
case ChecksumCRC32C:
|
||||
return crc32.New(crc32.MakeTable(crc32.Castagnoli))
|
||||
case ChecksumCRC64NVME:
|
||||
return crc64nvme.New()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Payload) getBase64Checksum(hasher hash.Hash) (string, error) {
|
||||
switch p.checksumType {
|
||||
case ChecksumSHA256, ChecksumSHA1, ChecksumCRC32:
|
||||
return base64.StdEncoding.EncodeToString(hasher.Sum(nil)), nil
|
||||
case ChecksumCRC32C:
|
||||
var b [4]byte
|
||||
hasher32, ok := hasher.(hash.Hash32)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("'%v' not a Hash32 interface", reflect.TypeOf(hasher).String())
|
||||
}
|
||||
sum := hasher32.Sum32()
|
||||
binary.BigEndian.PutUint32(b[:], sum)
|
||||
return base64.StdEncoding.EncodeToString(b[:]), nil
|
||||
case ChecksumCRC64NVME:
|
||||
var b [8]byte
|
||||
hasher64, ok := hasher.(hash.Hash64)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("'%v' not a Hash64 interface", reflect.TypeOf(hasher).String())
|
||||
}
|
||||
sum := hasher64.Sum64()
|
||||
binary.BigEndian.PutUint64(b[:], sum)
|
||||
return base64.StdEncoding.EncodeToString(b[:]), nil
|
||||
}
|
||||
return "", fmt.Errorf("invalid checksum type specified: '%s'", p.checksumType)
|
||||
}
|
||||
101
tests/rest_scripts/command/payloadChunked.go
Normal file
101
tests/rest_scripts/command/payloadChunked.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type PayloadChunked struct {
|
||||
*Payload
|
||||
chunkSize int64
|
||||
getReaderFunc func() (io.Reader, error)
|
||||
addSignatureFunc func(chunk []byte, outFile *os.File) error
|
||||
addTrailerFunc func(outFile *os.File) error
|
||||
}
|
||||
|
||||
func (c *PayloadChunked) getChunkedPayloadContentLength(additionalChunkHeaderSize, additionalTrailerSize int64) (int64, error) {
|
||||
payloadSize, err := c.Payload.GetDataSize()
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error getting payload data size: %w", err)
|
||||
}
|
||||
var sizeIdx int64
|
||||
var contentLength int64
|
||||
for sizeIdx = 0; sizeIdx < payloadSize; sizeIdx += c.chunkSize {
|
||||
var endIdx int64
|
||||
if sizeIdx+c.chunkSize < payloadSize {
|
||||
endIdx = sizeIdx + c.chunkSize
|
||||
} else {
|
||||
endIdx = payloadSize
|
||||
}
|
||||
hexSize := fmt.Sprintf("%x", endIdx-sizeIdx)
|
||||
contentLength += int64(len(hexSize)) + additionalChunkHeaderSize + (endIdx - sizeIdx) + 2
|
||||
}
|
||||
contentLength += 1 + additionalTrailerSize
|
||||
return contentLength, nil
|
||||
}
|
||||
|
||||
func (c *PayloadChunked) writeChunkedPayload(filePath string) error {
|
||||
defer func() {
|
||||
c.dataSource.Close()
|
||||
}()
|
||||
outFile, err := os.OpenFile(filePath, os.O_APPEND|os.O_WRONLY, 0600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing to file: %w", err)
|
||||
}
|
||||
br, err := c.getReaderFunc()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting data reader: %w", err)
|
||||
}
|
||||
payloadBuffer := make([]byte, c.chunkSize)
|
||||
for {
|
||||
var bytesRead int
|
||||
if bytesRead, err = c.addChunk(br, payloadBuffer, outFile); err != nil {
|
||||
return fmt.Errorf("error adding chunk: %w", err)
|
||||
}
|
||||
if bytesRead == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if _, err = outFile.Write([]byte{'0'}); err != nil {
|
||||
return fmt.Errorf("error writing \\r\\n: %w", err)
|
||||
}
|
||||
if err = c.addSignatureFunc(nil, outFile); err != nil {
|
||||
return fmt.Errorf("error adding signature: %w", err)
|
||||
}
|
||||
if err = c.addTrailerFunc(outFile); err != nil {
|
||||
return fmt.Errorf("error adding trailer: %w", err)
|
||||
}
|
||||
if _, err = outFile.Write([]byte{'\r', '\n', '\r', '\n'}); err != nil {
|
||||
return fmt.Errorf("error writing \\r\\n: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *PayloadChunked) addChunk(reader io.Reader, payloadBuffer []byte, outFile *os.File) (int, error) {
|
||||
var bytesRead int
|
||||
bytesRead, err := reader.Read(payloadBuffer)
|
||||
if err != nil && err != io.EOF {
|
||||
return 0, fmt.Errorf("error reading bytes: %w", err)
|
||||
}
|
||||
if bytesRead == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
hexString := fmt.Sprintf("%x", bytesRead)
|
||||
if _, err = outFile.Write([]byte(hexString)); err != nil {
|
||||
return 0, fmt.Errorf("error writing hex string: %w", err)
|
||||
}
|
||||
if err = c.addSignatureFunc(payloadBuffer[:bytesRead], outFile); err != nil {
|
||||
return 0, fmt.Errorf("error adding signature: %w", err)
|
||||
}
|
||||
if _, err = outFile.Write([]byte{'\r', '\n'}); err != nil {
|
||||
return 0, fmt.Errorf("error writing \\r\\n: %w", err)
|
||||
}
|
||||
if _, err = outFile.Write(payloadBuffer[:bytesRead]); err != nil {
|
||||
return 0, fmt.Errorf("error writing bytes to file: %w", err)
|
||||
}
|
||||
if _, err = outFile.Write([]byte{'\r', '\n'}); err != nil {
|
||||
return 0, fmt.Errorf("error writing \\r\\n: %w", err)
|
||||
}
|
||||
return bytesRead, nil
|
||||
}
|
||||
28
tests/rest_scripts/command/payloadChunkedAWS.go
Normal file
28
tests/rest_scripts/command/payloadChunkedAWS.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"github.com/versity/versitygw/tests/rest_scripts/logger"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PayloadChunkedAWS struct {
|
||||
*PayloadChunked
|
||||
serviceString string
|
||||
currentDateTime string
|
||||
lastSignature string
|
||||
emptyByteSignature string
|
||||
signingKey []byte
|
||||
}
|
||||
|
||||
func (c *PayloadChunkedAWS) getChunkedSTSSignature(chunkSignature string) string {
|
||||
request := strings.Join([]string{"AWS4-HMAC-SHA256-PAYLOAD",
|
||||
c.currentDateTime,
|
||||
c.serviceString,
|
||||
c.lastSignature,
|
||||
c.emptyByteSignature,
|
||||
chunkSignature}, "\n")
|
||||
logger.PrintDebug("request: %s", request)
|
||||
canonicalRequestHashBytes := hmacSHA256(c.signingKey, request)
|
||||
return hex.EncodeToString(canonicalRequestHashBytes[:])
|
||||
}
|
||||
5
tests/rest_scripts/command/payloadSizeCalculator.go
Normal file
5
tests/rest_scripts/command/payloadSizeCalculator.go
Normal file
@@ -0,0 +1,5 @@
|
||||
package command
|
||||
|
||||
type PayloadSizeCalculator interface {
|
||||
CalculatePayloadSize() int64
|
||||
}
|
||||
73
tests/rest_scripts/command/payloadStreamingAWSHMACSHA256.go
Normal file
73
tests/rest_scripts/command/payloadStreamingAWSHMACSHA256.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type PayloadStreamingAWS4HMACSHA256 struct {
|
||||
*PayloadChunkedAWS
|
||||
}
|
||||
|
||||
func NewPayloadStreamingAWS4HMACSHA256(source DataSource, chunkSize int64, serviceString, currentDateTime string) *PayloadStreamingAWS4HMACSHA256 {
|
||||
return &PayloadStreamingAWS4HMACSHA256{
|
||||
PayloadChunkedAWS: &PayloadChunkedAWS{
|
||||
PayloadChunked: &PayloadChunked{
|
||||
Payload: &Payload{
|
||||
dataSource: source,
|
||||
payloadType: StreamingAWS4HMACSHA256Payload,
|
||||
checksumType: "",
|
||||
dataSizeCalculated: false,
|
||||
dataSize: 0,
|
||||
},
|
||||
chunkSize: chunkSize,
|
||||
},
|
||||
serviceString: serviceString,
|
||||
currentDateTime: currentDateTime,
|
||||
lastSignature: "",
|
||||
emptyByteSignature: SHA256HashZeroBytes,
|
||||
signingKey: nil,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (s *PayloadStreamingAWS4HMACSHA256) AddInitialSignatureAndSigningKey(initialSignature string, signingKey []byte) {
|
||||
s.lastSignature = initialSignature
|
||||
s.signingKey = signingKey
|
||||
}
|
||||
|
||||
func (s *PayloadStreamingAWS4HMACSHA256) GetContentLength() (int64, error) {
|
||||
return s.getChunkedPayloadContentLength(83, 85)
|
||||
}
|
||||
|
||||
func (s *PayloadStreamingAWS4HMACSHA256) addSignature(chunk []byte, outFile *os.File) error {
|
||||
sha256sum := sha256.Sum256(chunk)
|
||||
sha256sumString := hex.EncodeToString(sha256sum[:])
|
||||
signature := s.getChunkedSTSSignature(sha256sumString)
|
||||
if _, err := outFile.Write([]byte(";chunk-signature=" + signature)); err != nil {
|
||||
return fmt.Errorf("error writing chunked signature: %w", err)
|
||||
}
|
||||
s.lastSignature = signature
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *PayloadStreamingAWS4HMACSHA256) getReader() (io.Reader, error) {
|
||||
sourceFile, err := s.dataSource.GetReader()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating tee reader: %w", err)
|
||||
}
|
||||
return bufio.NewReader(sourceFile), nil
|
||||
}
|
||||
|
||||
func (s *PayloadStreamingAWS4HMACSHA256) WritePayload(filePath string) error {
|
||||
s.addSignatureFunc = s.addSignature
|
||||
s.getReaderFunc = s.getReader
|
||||
s.addTrailerFunc = func(outFile *os.File) error {
|
||||
return nil
|
||||
}
|
||||
return s.writeChunkedPayload(filePath)
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Tag struct {
|
||||
@@ -51,7 +50,7 @@ func NewPutBucketTaggingCommand(s3Command *S3Command, fields *PutBucketTaggingFi
|
||||
return nil, errors.New("tagCount can not be set simultaneously with tagKeys or tagValues")
|
||||
}
|
||||
command.Tags = &PutBucketTaggingTags{
|
||||
XMLNamespace: "https://s3.amazonaws.com/doc/2006-03-01/",
|
||||
XMLNamespace: "http://s3.amazonaws.com/doc/2006-03-01/",
|
||||
}
|
||||
if fields.TagCount > 0 {
|
||||
command.Tags.GenerateKeyValuePairs(fields.TagCount)
|
||||
@@ -65,8 +64,7 @@ func NewPutBucketTaggingCommand(s3Command *S3Command, fields *PutBucketTaggingFi
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error marshalling XML: %w", err)
|
||||
}
|
||||
command.Payload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + string(xmlData)
|
||||
command.Payload = strings.Replace(command.Payload, "\"", "\\\"", -1)
|
||||
command.Payload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + string(xmlData)
|
||||
return command, nil
|
||||
}
|
||||
|
||||
|
||||
17
tests/rest_scripts/command/putObjectCommand.go
Normal file
17
tests/rest_scripts/command/putObjectCommand.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
func NewPutObjectCommand(s3Command *S3Command) (*S3Command, error) {
|
||||
if s3Command.BucketName == "" {
|
||||
return nil, errors.New("PutObject must have bucket name")
|
||||
}
|
||||
if s3Command.ObjectKey == "" {
|
||||
return nil, errors.New("PutObject must have object key")
|
||||
}
|
||||
s3Command.Method = "PUT"
|
||||
s3Command.Query = ""
|
||||
return s3Command, nil
|
||||
}
|
||||
@@ -15,7 +15,55 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
CURL = "curl"
|
||||
OPENSSL = "openssl"
|
||||
)
|
||||
|
||||
const (
|
||||
UnsignedPayload = "UNSIGNED-PAYLOAD"
|
||||
StreamingAWS4HMACSHA256Payload = "STREAMING-AWS4-HMAC-SHA256-PAYLOAD"
|
||||
StreamingAWS4HMACSHA256PayloadTrailer = "STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER"
|
||||
StreamingUnsignedPayloadTrailer = "STREAMING-UNSIGNED-PAYLOAD-TRAILER"
|
||||
StreamingAWS4ECDSAP256SHA256Payload = "STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD"
|
||||
StreamingAWS4ECDSAP256SHA256PayloadTrailer = "STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER"
|
||||
)
|
||||
|
||||
type PayloadType string
|
||||
|
||||
const (
|
||||
ChecksumCRC32 = "crc32"
|
||||
ChecksumCRC32C = "crc32c"
|
||||
ChecksumCRC64NVME = "crc64nvme"
|
||||
ChecksumSHA1 = "sha1"
|
||||
ChecksumSHA256 = "sha256"
|
||||
)
|
||||
|
||||
const SHA256HashZeroBytes = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
|
||||
type S3RESTCommand struct {
|
||||
Method string
|
||||
Url string
|
||||
Queries map[string]string
|
||||
SignedParams map[string]string
|
||||
UnsignedParams map[string]string
|
||||
DataSource DataSource
|
||||
}
|
||||
|
||||
type S3CommandErrors struct {
|
||||
IncorrectSignature bool
|
||||
AuthorizationHeaderMalformed bool
|
||||
IncorrectCredential string
|
||||
IncorrectYearMonthDay bool
|
||||
InvalidYearMonthDay bool
|
||||
IncorrectContentMD5 bool
|
||||
MissingHostParam bool
|
||||
CustomHostParam string
|
||||
CustomHostParamSet bool
|
||||
}
|
||||
|
||||
type S3Command struct {
|
||||
Client string
|
||||
Method string
|
||||
Url string
|
||||
BucketName string
|
||||
@@ -36,11 +84,19 @@ type S3Command struct {
|
||||
Payload string
|
||||
ContentMD5 bool
|
||||
IncorrectContentMD5 bool
|
||||
CustomContentMD5 string
|
||||
MissingHostParam bool
|
||||
FilePath string
|
||||
CustomHostParam string
|
||||
CustomHostParamSet bool
|
||||
PayloadType string
|
||||
ChunkSize int
|
||||
ChecksumType string
|
||||
OmitPayloadTrailer bool
|
||||
OmitPayloadTrailerKey bool
|
||||
OmitContentLength bool
|
||||
|
||||
dataSource DataSource
|
||||
currentDateTime string
|
||||
host string
|
||||
payloadHash string
|
||||
@@ -50,6 +106,9 @@ type S3Command struct {
|
||||
signedParamString string
|
||||
yearMonthDay string
|
||||
signature string
|
||||
signingKey []byte
|
||||
contentLength int64
|
||||
payloadOpenSSL OpenSSLPayloadManager
|
||||
}
|
||||
|
||||
func (s *S3Command) OpenSSLCommand() error {
|
||||
@@ -73,9 +132,6 @@ func (s *S3Command) CurlShellCommand() (string, error) {
|
||||
}
|
||||
|
||||
func (s *S3Command) prepareForBuild() error {
|
||||
if s.PayloadFile != "" && s.Payload != "" {
|
||||
return fmt.Errorf("cannot have both payload and payloadFile parameters set")
|
||||
}
|
||||
if s.IncorrectYearMonthDay {
|
||||
s.currentDateTime = time.Now().Add(-48 * time.Hour).UTC().Format("20060102T150405Z")
|
||||
} else {
|
||||
@@ -86,24 +142,74 @@ func (s *S3Command) prepareForBuild() error {
|
||||
return fmt.Errorf("invalid URL value: %s", s.Url)
|
||||
}
|
||||
s.host = protocolAndHost[1]
|
||||
s.payloadHash = "UNSIGNED-PAYLOAD"
|
||||
if err := s.addHeaderValues(); err != nil {
|
||||
return fmt.Errorf("error adding header values: %w", err)
|
||||
s.yearMonthDay = strings.Split(s.currentDateTime, "T")[0]
|
||||
if s.InvalidYearMonthDay {
|
||||
s.yearMonthDay = s.yearMonthDay[:len(s.yearMonthDay)-2]
|
||||
}
|
||||
s.path = "/" + s.BucketName
|
||||
if s.ObjectKey != "" {
|
||||
s.path += "/" + s.ObjectKey
|
||||
}
|
||||
s.generateCanonicalRequestString()
|
||||
|
||||
s.yearMonthDay = strings.Split(s.currentDateTime, "T")[0]
|
||||
if s.InvalidYearMonthDay {
|
||||
s.yearMonthDay = s.yearMonthDay[:len(s.yearMonthDay)-2]
|
||||
if err := s.preparePayload(); err != nil {
|
||||
return fmt.Errorf("error preparing payload: %w", err)
|
||||
}
|
||||
if err := s.addHeaderValues(); err != nil {
|
||||
return fmt.Errorf("error adding header values: %w", err)
|
||||
}
|
||||
s.generateCanonicalRequestString()
|
||||
s.getStsSignature()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *S3Command) preparePayload() error {
|
||||
if s.PayloadFile != "" && s.Payload != "" {
|
||||
return fmt.Errorf("cannot have both payload and payloadFile parameters set")
|
||||
}
|
||||
if s.PayloadFile != "" {
|
||||
s.dataSource = NewFileDataSource(s.PayloadFile)
|
||||
} else if s.Payload != "" {
|
||||
s.dataSource = NewStringDataSource(s.Payload)
|
||||
}
|
||||
if s.PayloadType != "" {
|
||||
s.payloadHash = s.PayloadType
|
||||
} else if s.dataSource != nil {
|
||||
var err error
|
||||
s.payloadHash, err = s.dataSource.CalculateSHA256HashString()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error calculating sha256 hash")
|
||||
}
|
||||
} else {
|
||||
s.payloadHash = SHA256HashZeroBytes
|
||||
}
|
||||
if s.Client == OPENSSL {
|
||||
if err := s.initializeOpenSSLPayloadAndGetContentLength(); err != nil {
|
||||
return fmt.Errorf("error initializing openssl payload: %w", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *S3Command) initializeOpenSSLPayloadAndGetContentLength() error {
|
||||
switch s.PayloadType {
|
||||
case StreamingAWS4HMACSHA256Payload:
|
||||
serviceString := fmt.Sprintf("%s/%s/%s/aws4_request", s.yearMonthDay, s.AwsRegion, s.ServiceName)
|
||||
s.payloadOpenSSL = NewPayloadStreamingAWS4HMACSHA256(s.dataSource, int64(s.ChunkSize), serviceString, s.currentDateTime)
|
||||
case StreamingUnsignedPayloadTrailer:
|
||||
streamingUnsignedPayloadTrailerImpl := NewStreamingUnsignedPayloadWithTrailer(s.dataSource, int64(s.ChunkSize), s.ChecksumType)
|
||||
streamingUnsignedPayloadTrailerImpl.OmitTrailerOrKey(s.OmitPayloadTrailer, s.OmitPayloadTrailerKey)
|
||||
s.payloadOpenSSL = streamingUnsignedPayloadTrailerImpl
|
||||
default:
|
||||
return fmt.Errorf("unsupported OpenSSL payload type: '%s'", s.PayloadType)
|
||||
}
|
||||
var err error
|
||||
s.contentLength, err = s.payloadOpenSSL.GetContentLength()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error calculating Content-Length: %w", err)
|
||||
}
|
||||
logger.PrintDebug("Predicted payload size: %d", s.contentLength)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *S3Command) addHeaderValues() error {
|
||||
s.headerValues = [][]string{}
|
||||
if s.MissingHostParam {
|
||||
@@ -117,10 +223,22 @@ func (s *S3Command) addHeaderValues() error {
|
||||
[]string{"x-amz-content-sha256", s.payloadHash},
|
||||
[]string{"x-amz-date", s.currentDateTime},
|
||||
)
|
||||
if s.Client == OPENSSL && !s.OmitContentLength {
|
||||
s.headerValues = append(s.headerValues,
|
||||
[]string{"Content-Length", fmt.Sprintf("%d", s.contentLength)})
|
||||
}
|
||||
if s.dataSource != nil && s.PayloadType != UnsignedPayload {
|
||||
payloadSize, err := s.dataSource.SourceDataByteSize()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting payload size: %w", err)
|
||||
}
|
||||
s.headerValues = append(s.headerValues,
|
||||
[]string{"x-amz-decoded-content-length", fmt.Sprintf("%d", payloadSize)})
|
||||
}
|
||||
for key, value := range s.SignedParams {
|
||||
s.headerValues = append(s.headerValues, []string{key, value})
|
||||
}
|
||||
if s.ContentMD5 || s.IncorrectContentMD5 {
|
||||
if s.ContentMD5 || s.IncorrectContentMD5 || s.CustomContentMD5 != "" {
|
||||
if err := s.addContentMD5Header(); err != nil {
|
||||
return fmt.Errorf("error adding Content-MD5 header: %w", err)
|
||||
}
|
||||
@@ -132,6 +250,14 @@ func (s *S3Command) addHeaderValues() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *S3Command) modifyHash(md5Hash []byte) {
|
||||
if md5Hash[0] == 'a' {
|
||||
md5Hash[0] = 'A'
|
||||
} else {
|
||||
md5Hash[0] = 'a'
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S3Command) addContentMD5Header() error {
|
||||
var payloadData []byte
|
||||
var err error
|
||||
@@ -144,17 +270,18 @@ func (s *S3Command) addContentMD5Header() error {
|
||||
payloadData = []byte(strings.Replace(s.Payload, "\\", "", -1))
|
||||
}
|
||||
|
||||
hasher := md5.New()
|
||||
hasher.Write(payloadData)
|
||||
md5Hash := hasher.Sum(nil)
|
||||
if s.IncorrectContentMD5 {
|
||||
if md5Hash[0] == 'a' {
|
||||
md5Hash[0] = 'A'
|
||||
} else {
|
||||
md5Hash[0] = 'a'
|
||||
var contentMD5 string
|
||||
if s.CustomContentMD5 != "" {
|
||||
contentMD5 = s.CustomContentMD5
|
||||
} else {
|
||||
hasher := md5.New()
|
||||
hasher.Write(payloadData)
|
||||
md5Hash := hasher.Sum(nil)
|
||||
if s.IncorrectContentMD5 {
|
||||
s.modifyHash(md5Hash)
|
||||
}
|
||||
contentMD5 = base64.StdEncoding.EncodeToString(md5Hash)
|
||||
}
|
||||
contentMD5 := base64.StdEncoding.EncodeToString(md5Hash)
|
||||
|
||||
s.headerValues = append(s.headerValues, []string{"Content-MD5", contentMD5})
|
||||
return nil
|
||||
@@ -198,10 +325,10 @@ func (s *S3Command) getStsSignature() {
|
||||
dateKey := hmacSHA256([]byte("AWS4"+s.AwsSecretAccessKey), s.yearMonthDay)
|
||||
dateRegionKey := hmacSHA256(dateKey, s.AwsRegion)
|
||||
dateRegionServiceKey := hmacSHA256(dateRegionKey, s.ServiceName)
|
||||
signingKey := hmacSHA256(dateRegionServiceKey, "aws4_request")
|
||||
s.signingKey = hmacSHA256(dateRegionServiceKey, "aws4_request")
|
||||
|
||||
// Generate signature
|
||||
signatureBytes := hmacSHA256(signingKey, stsDataString)
|
||||
signatureBytes := hmacSHA256(s.signingKey, stsDataString)
|
||||
if s.IncorrectSignature {
|
||||
if signatureBytes[0] == 'a' {
|
||||
signatureBytes[0] = 'A'
|
||||
@@ -237,9 +364,12 @@ func (s *S3Command) buildCurlShellCommand() (string, error) {
|
||||
if s.PayloadFile != "" {
|
||||
curlCommand = append(curlCommand, "-T", s.PayloadFile)
|
||||
} else if s.Payload != "" {
|
||||
s.Payload = strings.Replace(s.Payload, "\"", "\\\"", -1)
|
||||
curlCommand = append(curlCommand, "-H", "\"Content-Type: application/xml\"", "-d", fmt.Sprintf("\"%s\"", s.Payload))
|
||||
}
|
||||
return strings.Join(curlCommand, " "), nil
|
||||
curlStringCommand := strings.Join(curlCommand, " ")
|
||||
logger.PrintDebug("curl command: %s", curlStringCommand)
|
||||
return curlStringCommand, nil
|
||||
}
|
||||
|
||||
func (s *S3Command) buildAuthorizationString() string {
|
||||
@@ -254,6 +384,9 @@ func (s *S3Command) buildAuthorizationString() string {
|
||||
}
|
||||
|
||||
func (s *S3Command) buildOpenSSLCommand() error {
|
||||
if s.Query != "" {
|
||||
s.path += "?" + s.Query
|
||||
}
|
||||
openSSLCommand := []string{fmt.Sprintf("%s %s HTTP/1.1", s.Method, s.path)}
|
||||
openSSLCommand = append(openSSLCommand, s.buildAuthorizationString())
|
||||
for _, headerValue := range s.headerValues {
|
||||
@@ -262,16 +395,41 @@ func (s *S3Command) buildOpenSSLCommand() error {
|
||||
}
|
||||
openSSLCommand = append(openSSLCommand, fmt.Sprintf("%s:%s", headerValue[0], headerValue[1]))
|
||||
}
|
||||
openSSLCommand = append(openSSLCommand, "\r\n")
|
||||
var file *os.File
|
||||
var err error
|
||||
if file, err = os.Create(s.FilePath); err != nil {
|
||||
|
||||
file, err := os.Create(s.FilePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error opening file: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
file.Close()
|
||||
}()
|
||||
openSSLCommandBytes := []byte(strings.Join(openSSLCommand, "\r\n"))
|
||||
if _, err = file.Write(openSSLCommandBytes); err != nil {
|
||||
return fmt.Errorf("error writing to file: %w", err)
|
||||
}
|
||||
if s.PayloadFile != "" || s.Payload != "" {
|
||||
if err = s.writeOpenSSLPayload(file); err != nil {
|
||||
return fmt.Errorf("error writing openssl payload: %w", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *S3Command) writeOpenSSLPayload(file *os.File) error {
|
||||
if _, err := file.Write([]byte{'\r', '\n', '\r', '\n'}); err != nil {
|
||||
return fmt.Errorf("error writing to file: %w", err)
|
||||
}
|
||||
if awsPayload, ok := s.payloadOpenSSL.(*PayloadStreamingAWS4HMACSHA256); ok {
|
||||
awsPayload.AddInitialSignatureAndSigningKey(s.signature, s.signingKey)
|
||||
}
|
||||
switch s.PayloadType {
|
||||
case UnsignedPayload, "", StreamingUnsignedPayloadTrailer, StreamingAWS4HMACSHA256Payload:
|
||||
if err := s.payloadOpenSSL.WritePayload(s.FilePath); err != nil {
|
||||
return fmt.Errorf("error writing payload to openssl file: %w", err)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("unsupported payload type: %s", s.PayloadType)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,99 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"hash"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type StreamingUnsignedPayloadWithTrailer struct {
|
||||
*PayloadChunked
|
||||
hasher hash.Hash
|
||||
checksumHeader string
|
||||
checksumValue string
|
||||
omitTrailer bool
|
||||
omitTrailerKey bool
|
||||
}
|
||||
|
||||
func NewStreamingUnsignedPayloadWithTrailer(source DataSource, chunkSize int64, checksumType string) *StreamingUnsignedPayloadWithTrailer {
|
||||
return &StreamingUnsignedPayloadWithTrailer{
|
||||
PayloadChunked: &PayloadChunked{
|
||||
Payload: &Payload{
|
||||
dataSource: source,
|
||||
payloadType: StreamingUnsignedPayloadTrailer,
|
||||
checksumType: checksumType,
|
||||
dataSizeCalculated: false,
|
||||
dataSize: 0,
|
||||
},
|
||||
chunkSize: chunkSize,
|
||||
},
|
||||
checksumHeader: "x-amz-checksum-" + checksumType,
|
||||
checksumValue: "",
|
||||
omitTrailer: false,
|
||||
omitTrailerKey: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *StreamingUnsignedPayloadWithTrailer) OmitTrailerOrKey(omitTrailer, omitTrailerKey bool) {
|
||||
s.omitTrailer = omitTrailer
|
||||
s.omitTrailerKey = omitTrailerKey
|
||||
}
|
||||
|
||||
func (s *StreamingUnsignedPayloadWithTrailer) GetContentLength() (int64, error) {
|
||||
checksumValueLength, err := GetBase64ChecksumLength(s.checksumType)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error getting base64 checksum length: %w", err)
|
||||
}
|
||||
var trailerLength int64
|
||||
if s.omitTrailer {
|
||||
trailerLength = 4
|
||||
} else if s.omitTrailerKey {
|
||||
trailerLength = 1 + checksumValueLength + 4
|
||||
} else {
|
||||
trailerLength = 2 + int64(len(s.checksumHeader)) + 1 + checksumValueLength + 4
|
||||
}
|
||||
return s.getChunkedPayloadContentLength(2, trailerLength)
|
||||
}
|
||||
|
||||
func (s *StreamingUnsignedPayloadWithTrailer) getReader() (io.Reader, error) {
|
||||
s.hasher = s.getChecksumHasher()
|
||||
teeReader, err := s.dataSource.GetTeeReader(s.hasher)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating tee reader: %w", err)
|
||||
}
|
||||
br := bufio.NewReader(teeReader)
|
||||
return br, nil
|
||||
}
|
||||
|
||||
func (s *StreamingUnsignedPayloadWithTrailer) addTrailer(outFile *os.File) error {
|
||||
if s.omitTrailer {
|
||||
return nil
|
||||
}
|
||||
if _, err := outFile.Write([]byte{'\r', '\n'}); err != nil {
|
||||
return fmt.Errorf("error writing \\r\\n: %w", err)
|
||||
}
|
||||
checksum, err := s.getBase64Checksum(s.hasher)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting checksum: %w", err)
|
||||
}
|
||||
if !s.omitTrailerKey {
|
||||
if _, err = outFile.Write([]byte(s.checksumHeader)); err != nil {
|
||||
return fmt.Errorf("error writing trailer key: %w", err)
|
||||
}
|
||||
}
|
||||
if _, err = outFile.Write([]byte(":" + checksum)); err != nil {
|
||||
return fmt.Errorf("error writing checksum: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *StreamingUnsignedPayloadWithTrailer) WritePayload(filePath string) error {
|
||||
s.addSignatureFunc = func(chunk []byte, file *os.File) error {
|
||||
return nil
|
||||
}
|
||||
s.getReaderFunc = s.getReader
|
||||
s.addTrailerFunc = s.addTrailer
|
||||
return s.writeChunkedPayload(filePath)
|
||||
}
|
||||
42
tests/rest_scripts/command/stringDataSource.go
Normal file
42
tests/rest_scripts/command/stringDataSource.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type StringDataSource struct {
|
||||
dataString string
|
||||
}
|
||||
|
||||
func NewStringDataSource(dataString string) *StringDataSource {
|
||||
return &StringDataSource{
|
||||
dataString: dataString,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *StringDataSource) SourceDataByteSize() (int64, error) {
|
||||
return int64(len(s.dataString)), nil
|
||||
}
|
||||
|
||||
func (s *StringDataSource) CalculateSHA256HashString() (string, error) {
|
||||
hash := sha256.Sum256([]byte(s.dataString))
|
||||
return hex.EncodeToString(hash[:]), nil
|
||||
}
|
||||
|
||||
func (s *StringDataSource) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *StringDataSource) GetReader() (io.Reader, error) {
|
||||
stringReader := strings.NewReader(s.dataString)
|
||||
return stringReader, nil
|
||||
}
|
||||
|
||||
func (s *StringDataSource) GetTeeReader(checksumWriter io.Writer) (io.Reader, error) {
|
||||
stringReader := strings.NewReader(s.dataString)
|
||||
r := io.TeeReader(stringReader, checksumWriter)
|
||||
return r, nil
|
||||
}
|
||||
56
tests/rest_scripts/command/wholePayload.go
Normal file
56
tests/rest_scripts/command/wholePayload.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package command
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type WholePayload struct {
|
||||
*Payload
|
||||
}
|
||||
|
||||
func NewWholePayload(dataSource DataSource) *WholePayload {
|
||||
return &WholePayload{
|
||||
&Payload{
|
||||
dataSource: dataSource,
|
||||
payloadType: "",
|
||||
checksumType: "",
|
||||
dataSizeCalculated: false,
|
||||
dataSize: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (w *WholePayload) CalculatePayloadSize() (int64, error) {
|
||||
return w.GetDataSize()
|
||||
}
|
||||
|
||||
func (w *WholePayload) GetContentLength() (int64, error) {
|
||||
return w.GetDataSize()
|
||||
}
|
||||
|
||||
func (w *WholePayload) WritePayload(filePath string) error {
|
||||
sourceFile, err := w.dataSource.GetReader()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating tee reader: %w", err)
|
||||
}
|
||||
outFile, err := os.OpenFile(filePath, os.O_APPEND|os.O_WRONLY, 0600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing to file: %w", err)
|
||||
}
|
||||
buffer := make([]byte, 256)
|
||||
for {
|
||||
var bytesRead int
|
||||
bytesRead, err = sourceFile.Read(buffer)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error reading data bytes: %w", err)
|
||||
}
|
||||
if bytesRead == 0 {
|
||||
break
|
||||
}
|
||||
if _, err = outFile.Write(buffer[:bytesRead]); err != nil {
|
||||
return fmt.Errorf("error writing bytes to file: %w", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/versity/versitygw/tests/rest_scripts/command"
|
||||
@@ -9,13 +10,9 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
CURL = "curl"
|
||||
OPENSSL = "openssl"
|
||||
)
|
||||
|
||||
const (
|
||||
PutBucketTagging = "putBucketTagging"
|
||||
PutObject = "putObject"
|
||||
)
|
||||
|
||||
var method *string
|
||||
@@ -38,12 +35,14 @@ var invalidYearMonthDay *bool
|
||||
var payload *string
|
||||
var contentMD5 *bool
|
||||
var incorrectContentMD5 *bool
|
||||
var customContentMD5 *string
|
||||
var missingHostParam *bool
|
||||
var filePath *string
|
||||
var client *string
|
||||
var customHostParam *string
|
||||
var customHostParamSet bool = false
|
||||
var commandType *string
|
||||
var checksumType *string
|
||||
|
||||
type arrayFlags []string
|
||||
|
||||
@@ -51,6 +50,13 @@ var tagCount *int
|
||||
var tagKeys arrayFlags
|
||||
var tagValues arrayFlags
|
||||
|
||||
var payloadType *string
|
||||
var chunkSize *int
|
||||
|
||||
var omitPayloadTrailer *bool
|
||||
var omitPayloadTrailerKey *bool
|
||||
var omitContentLength *bool
|
||||
|
||||
type restParams map[string]string
|
||||
|
||||
func (r *restParams) String() string {
|
||||
@@ -104,11 +110,30 @@ func main() {
|
||||
Payload: *payload,
|
||||
ContentMD5: *contentMD5,
|
||||
IncorrectContentMD5: *incorrectContentMD5,
|
||||
CustomContentMD5: *customContentMD5,
|
||||
MissingHostParam: *missingHostParam,
|
||||
FilePath: *filePath,
|
||||
CustomHostParam: *customHostParam,
|
||||
CustomHostParamSet: customHostParamSet,
|
||||
PayloadType: *payloadType,
|
||||
ChunkSize: *chunkSize,
|
||||
ChecksumType: *checksumType,
|
||||
OmitPayloadTrailer: *omitPayloadTrailer,
|
||||
OmitPayloadTrailerKey: *omitPayloadTrailerKey,
|
||||
OmitContentLength: *omitContentLength,
|
||||
Client: *client,
|
||||
}
|
||||
|
||||
s3Command, err := getS3CommandType(baseCommand)
|
||||
if err != nil {
|
||||
logger.LogFatal("Error getting command subtype: %v", err)
|
||||
}
|
||||
if err := buildCommand(s3Command); err != nil {
|
||||
logger.LogFatal("Error building command: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func getS3CommandType(baseCommand *command.S3Command) (command.S3CommandConverter, error) {
|
||||
var s3Command command.S3CommandConverter
|
||||
var err error
|
||||
switch *commandType {
|
||||
@@ -119,25 +144,34 @@ func main() {
|
||||
TagValues: tagValues,
|
||||
}
|
||||
if s3Command, err = command.NewPutBucketTaggingCommand(baseCommand, &fields); err != nil {
|
||||
log.Fatalf("Error setting up PutBucketTagging command: %v", err)
|
||||
return nil, fmt.Errorf("error setting up PutBucketTagging command: %v", err)
|
||||
}
|
||||
case PutObject:
|
||||
if s3Command, err = command.NewPutObjectCommand(baseCommand); err != nil {
|
||||
return nil, fmt.Errorf("error setting up PutBucketTagging command: %v", err)
|
||||
}
|
||||
default:
|
||||
s3Command = baseCommand
|
||||
}
|
||||
return s3Command, nil
|
||||
}
|
||||
|
||||
func buildCommand(s3Command command.S3CommandConverter) error {
|
||||
switch *client {
|
||||
case CURL:
|
||||
case command.CURL:
|
||||
curlShellCommand, err := s3Command.CurlShellCommand()
|
||||
if err != nil {
|
||||
log.Fatalf("Error generating curl command: %v", err)
|
||||
return fmt.Errorf("error generating curl command: %w", err)
|
||||
}
|
||||
fmt.Println(curlShellCommand)
|
||||
case OPENSSL:
|
||||
case command.OPENSSL:
|
||||
if err := s3Command.OpenSSLCommand(); err != nil {
|
||||
log.Fatalf("Error generating and writing openssl command: %v", err)
|
||||
return fmt.Errorf("error generating and writing openssl command: %w", err)
|
||||
}
|
||||
default:
|
||||
log.Fatalln("Invalid client type: ", *client)
|
||||
return errors.New("Invalid client type: " + *client)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkFlags() error {
|
||||
@@ -151,6 +185,7 @@ func checkFlags() error {
|
||||
awsRegion = flag.String("awsRegion", "us-east-1", "AWS region")
|
||||
serviceName = flag.String("serviceName", "s3", "Service name")
|
||||
logger.Debug = flag.Bool("debug", false, "Print debug statements")
|
||||
logger.LogFile = flag.String("logFile", "", "Log file, if any")
|
||||
flag.Var(&signedParamsMap, "signedParams", "Signed params, separated by comma")
|
||||
payloadFile = flag.String("payloadFile", "", "Payload file path, if any")
|
||||
incorrectSignature = flag.Bool("incorrectSignature", false, "Simulate an incorrect signature")
|
||||
@@ -161,12 +196,19 @@ func checkFlags() error {
|
||||
payload = flag.String("payload", "", "Message payload")
|
||||
contentMD5 = flag.Bool("contentMD5", false, "Include content-md5 hash")
|
||||
incorrectContentMD5 = flag.Bool("incorrectContentMD5", false, "Include incorrect content-md5 hash")
|
||||
customContentMD5 = flag.String("customContentMD5", "", "Add a custom (generally invalid) content-md5 hash")
|
||||
missingHostParam = flag.Bool("missingHostParam", false, "Missing host parameter")
|
||||
customHostParam = flag.String("customHostParam", "", "Custom host parameter")
|
||||
filePath = flag.String("filePath", "", "Path to write command (stdout if none)")
|
||||
client = flag.String("client", CURL, "Command-line client to use")
|
||||
client = flag.String("client", command.CURL, "Command-line client to use")
|
||||
commandType = flag.String("commandType", "", "Command template to use, if any")
|
||||
tagCount = flag.Int("tagCount", 0, "Autogenerate this amount of tags for commands with tags")
|
||||
payloadType = flag.String("payloadType", "", "Payload type")
|
||||
chunkSize = flag.Int("chunkSize", 0, "Chunk size for chunked uploads (0 for non-chunked upload)")
|
||||
checksumType = flag.String("checksumType", "", "Checksum type for additional or trailing checksum")
|
||||
omitPayloadTrailer = flag.Bool("omitPayloadTrailer", false, "Omit final trailer for chunked uploads w/trailers")
|
||||
omitPayloadTrailerKey = flag.Bool("omitPayloadTrailerKey", false, "Omit final trailer key for chunked uploads w/trailer")
|
||||
omitContentLength = flag.Bool("omitContentLength", false, "Omit content length parameter")
|
||||
flag.Var(&tagKeys, "tagKey", "Tag key (can add multiple)")
|
||||
flag.Var(&tagValues, "tagValue", "Tag value (can add multiple)")
|
||||
// Parse the flags
|
||||
|
||||
@@ -1,11 +1,28 @@
|
||||
package logger
|
||||
|
||||
import "log"
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
var Debug *bool
|
||||
var LogFile *string
|
||||
|
||||
func PrintDebug(format string, args ...any) {
|
||||
func PrintDebug(format string, args ...interface{}) {
|
||||
if *Debug {
|
||||
if *LogFile != "" {
|
||||
logFile, err := os.OpenFile(*LogFile, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("Error opening logfile: %v", err)
|
||||
}
|
||||
defer logFile.Close()
|
||||
log.SetOutput(logFile)
|
||||
}
|
||||
log.Printf(format, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func LogFatal(format string, args ...interface{}) {
|
||||
PrintDebug(format, args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -209,8 +209,9 @@ export RUN_USERS=true
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run send_rest_go_command_expect_error "400" "InvalidDigest" "you specified" "-bucketName" "$bucket_name" "-query" "tagging=" "-method" "PUT" "-signedParams" "Content-MD5:dummy" \
|
||||
"-payload" "<Tagging xmlms=\\\"http://s3.amazonaws.com/doc/2006-03-01/\\\"><TagSet><Tag><Key>key</Key><Value>value</Value></Tag></TagSet></Tagging>"
|
||||
run send_rest_go_command_expect_error "400" "InvalidDigest" "you specified" "-bucketName" "$bucket_name" "-query" "tagging=" "-method" "PUT" \
|
||||
"-customContentMD5" "dummy" \
|
||||
"-payload" "<Tagging xmlms=\"http://s3.amazonaws.com/doc/2006-03-01/\"><TagSet><Tag><Key>key</Key><Value>value</Value></Tag></TagSet></Tagging>"
|
||||
assert_success
|
||||
}
|
||||
|
||||
|
||||
@@ -145,22 +145,28 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# @test "REST - UploadPart w/o part number" {
|
||||
# run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
# assert_success
|
||||
# bucket_name="$output"
|
||||
@test "REST - UploadPart w/o part number" {
|
||||
|
||||
# run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
|
||||
# assert_success
|
||||
skip "versitygw/curl/fasthttp issue"
|
||||
|
||||
# run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
# assert_success
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
# run upload_part_rest_without_part_number "$bucket_name" "$test_file"
|
||||
# assert_success
|
||||
# }
|
||||
run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_part_rest_without_part_number "$bucket_name" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPart w/o upload ID" {
|
||||
|
||||
skip "versitygw/curl/fasthttp issue"
|
||||
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
@@ -119,3 +119,21 @@ source ./tests/drivers/put_bucket_tagging/put_bucket_tagging_rest.sh
|
||||
run add_verify_bucket_tags_rest "$bucket_name" "$test_key" "$test_value"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutBucketTagging - STREAMING-UNSIGNED-PAYLOAD-TRAILER fails" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1601"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run send_openssl_go_command_expect_error "400" "InvalidRequest" "The value of x-amz-content-sha256 header is invalid" \
|
||||
"-client" "openssl" "-commandType" "putBucketTagging" "-bucketName" "$bucket_name" "-payload" "abcdefg" \
|
||||
"-debug" "-logFile" "tagging.log" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-tagKey" "key" "-tagValue" "value"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@@ -264,6 +264,185 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
|
||||
run send_rest_go_command "200" "-bucketName" "$bucket_name" "-objectKey" "$test_file" "-method" "PUT" "-payloadFile" "$TEST_FILE_FOLDER/$test_file" \
|
||||
"-signedParams" "Expect:100-continue"
|
||||
"-signedParams" "Expect:100-continue" "-debug" "-logFile" "tagging.log"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER, x-amz-trailer of crc32, trailer missing" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1600"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run send_openssl_go_command_expect_error "400" "MalformedTrailerError" "The request contained trailing data that was not well-formed" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$bucket_name" "-payload" "abcdefg" \
|
||||
"-omitPayloadTrailer" "-checksumType" "crc32" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-objectKey" "key" "-signedParams" "x-amz-trailer:x-amz-checksum-crc32"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER - 200 header returns correct checksum type" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1607"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_and_file_v2 "$bucket_name" "$test_file"
|
||||
assert_success
|
||||
|
||||
checksum="$(sha256sum "$TEST_FILE_FOLDER/$test_file" | awk '{print $1}' | xxd -r -p | base64)"
|
||||
|
||||
run send_openssl_go_command_check_header "200" "x-amz-checksum-sha256" "$checksum" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$bucket_name" "-payloadFile" "$TEST_FILE_FOLDER/$test_file" "-checksumType" "sha256" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-objectKey" "key" "-signedParams" "x-amz-trailer:x-amz-checksum-sha256"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER - success (sha1)" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1607"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run create_test_file "$test_file" 10000
|
||||
assert_success
|
||||
|
||||
checksum="$(sha1sum "$TEST_FILE_FOLDER/$test_file" | awk '{print $1}' | xxd -r -p | base64)"
|
||||
|
||||
run send_openssl_go_command_check_header "200" "x-amz-checksum-sha1" "$checksum" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$bucket_name" "-payloadFile" "$TEST_FILE_FOLDER/$test_file" "-checksumType" "sha1" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-objectKey" "key" "-signedParams" "x-amz-trailer:x-amz-checksum-sha1"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER - success (crc32)" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1607"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run create_test_file "$test_file" 10000
|
||||
assert_success
|
||||
|
||||
checksum="$(gzip -c -1 "$TEST_FILE_FOLDER/$test_file" | tail -c8 | od -t x4 -N 4 -A n | awk '{print $1}' | xxd -r -p | base64)"
|
||||
|
||||
run send_openssl_go_command_check_header "200" "x-amz-checksum-crc32" "$checksum" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$bucket_name" "-payloadFile" "$TEST_FILE_FOLDER/$test_file" "-checksumType" "crc32" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-objectKey" "key" "-signedParams" "x-amz-trailer:x-amz-checksum-crc32"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER - success (crc32c)" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1607"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run create_test_file "$test_file" 10000
|
||||
assert_success
|
||||
|
||||
if ! checksum=$(DATA_FILE="$TEST_FILE_FOLDER/$test_file" CHECKSUM_TYPE="crc32c" ./tests/rest_scripts/calculate_checksum.sh 2>&1); then
|
||||
log 2 "error calculating checksum: $checksum"
|
||||
return 1
|
||||
fi
|
||||
|
||||
run send_openssl_go_command_check_header "200" "x-amz-checksum-crc32c" "$checksum" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$bucket_name" "-payloadFile" "$TEST_FILE_FOLDER/$test_file" "-checksumType" "crc32c" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-objectKey" "key" "-checksumType" "crc32c" "-signedParams" "x-amz-trailer:x-amz-checksum-crc32c"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER - success (crc64nvme)" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run create_test_file "$test_file" 10000
|
||||
assert_success
|
||||
|
||||
if ! checksum=$(DATA_FILE="$TEST_FILE_FOLDER/$test_file" CHECKSUM_TYPE="crc64nvme" ./tests/rest_scripts/calculate_checksum.sh 2>&1); then
|
||||
log 2 "error calculating checksum: $checksum"
|
||||
return 1
|
||||
fi
|
||||
|
||||
run send_openssl_go_command_check_header "200" "x-amz-checksum-crc64nvme" "$checksum" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$bucket_name" "-payloadFile" "$TEST_FILE_FOLDER/$test_file" "-checksumType" "crc64nvme" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-objectKey" "key" "-signedParams" "x-amz-trailer:x-amz-checksum-crc64nvme"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-AWS4-HMAC-SHA256-PAYLOAD - missing content length" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1623"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run send_openssl_go_command_chunked_no_content_length "$bucket_name" "key"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER, x-amz-trailer of crc32, trailer key missing" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1626"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
run send_openssl_go_command_expect_error "400" "MalformedTrailerError" "The request contained trailing data that was not well-formed" \
|
||||
"-client" "openssl" "-commandType" "putObject" "-bucketName" "$bucket_name" "-objectKey" "key" "-payload" "abcdefg" "-checksumType" "crc32c" \
|
||||
"-omitPayloadTrailerKey" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192" "-objectKey" "key" "-signedParams" "x-amz-trailer:x-amz-checksum-crc32"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - PutObject - STREAMING-UNSIGNED-PAYLOAD-TRAILER - default crc64nvme" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1632"
|
||||
fi
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
run setup_bucket_and_file_v2 "$bucket_name" "$test_file"
|
||||
assert_success
|
||||
|
||||
run send_openssl_go_command "200" "-bucketName" "$bucket_name" "-objectKey" "$test_file" "-commandType" "putObject" \
|
||||
"-payloadFile" "$TEST_FILE_FOLDER/$test_file" "-omitPayloadTrailer" \
|
||||
"-debug" "-logFile" "tagging.log" "-checksumType" "crc64nvme" \
|
||||
"-payloadType" "STREAMING-UNSIGNED-PAYLOAD-TRAILER" "-chunkSize" "8192"
|
||||
assert_success
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user