test: UploadPart, UploadPartCopy data, parameter checks

This commit is contained in:
Luke McCrone
2025-04-21 13:49:38 -03:00
parent 63fd82654d
commit 1ea2e42f0a
11 changed files with 286 additions and 78 deletions

View File

@@ -122,5 +122,11 @@ create_multipart_upload_rest() {
log 2 "put-object-retention returned code $result: $(cat "$TEST_FILE_FOLDER/output.txt")"
return 1
fi
log 5 "result: $(cat "$TEST_FILE_FOLDER/output.txt")"
if ! upload_id=$(get_element_text "$TEST_FILE_FOLDER/output.txt" "InitiateMultipartUploadResult" "UploadId"); then
log 2 "error getting upload ID: $upload_id"
return 1
fi
echo "$upload_id"
return 0
}

View File

@@ -101,6 +101,22 @@ log_rest() {
fi
}
add_cr_parameters_and_header_fields() {
canonical_request+="$line
"
if [[ "$line" == *":"* ]]; then
local key="${line%%:*}"
local value="${line#*:}"
if [ "$key" == "x-amz-content-sha256" ]; then
payload="$value"
fi
if [[ "$value" != "" ]]; then
param_list=$(add_parameter "$param_list" "$key" ";")
header_fields+=(-H "\"$key: $value\"")
fi
fi
}
build_canonical_request() {
if [ $# -lt 0 ]; then
log_rest 2 "'build_canonical_request' requires parameters"
@@ -111,19 +127,7 @@ build_canonical_request() {
local payload=""
header_fields=()
for line in "$@"; do
canonical_request+="$line
"
if [[ "$line" == *":"* ]]; then
local key="${line%%:*}"
local value="${line#*:}"
if [ "$key" == "x-amz-content-sha256" ]; then
payload="$value"
fi
if [[ "$value" != "" ]]; then
param_list=$(add_parameter "$param_list" "$key" ";")
header_fields+=(-H "\"$key: $value\"")
fi
fi
add_cr_parameters_and_header_fields
done
canonical_request+="
$param_list

View File

@@ -27,31 +27,45 @@ upload_id="$UPLOAD_ID"
# shellcheck disable=SC2153
data=$DATA_FILE
payload_hash="$(sha256sum "$data" | awk '{print $1}')"
if [ "$data" != "" ]; then
payload_hash="$(sha256sum "$data" | awk '{print $1}')"
else
payload_hash="$(echo -n "" | sha256sum | awk '{print $1}')"
fi
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
# shellcheck disable=SC2034
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
# shellcheck disable=SC2154
canonical_request="PUT
/$bucket_name/$key
partNumber=$part_number&uploadId=$upload_id
host:$aws_endpoint_url_address
x-amz-content-sha256:$payload_hash
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
$payload_hash"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
# shellcheck disable=SC2034
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
# shellcheck disable=SC2154
cr_data=("PUT" "/$bucket_name/$key")
query_params=""
if [ "$part_number" != "" ]; then
query_params=$(add_parameter "$query_params" "partNumber=$part_number")
fi
if [ "$upload_id" != "" ]; then
query_params=$(add_parameter "$query_params" "uploadId=$upload_id")
fi
cr_data+=("$query_params")
cr_data+=("host:$host" "x-amz-content-sha256:$payload_hash" "x-amz-date:$current_date_time")
build_canonical_request "${cr_data[@]}"
# shellcheck disable=SC2119
create_canonical_hash_sts_and_signature
curl_command+=(curl -isk -w "\"%{http_code}\"" "\"$AWS_ENDPOINT_URL/$bucket_name/$key?partNumber=$part_number&uploadId=$upload_id\""
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
-H "\"x-amz-content-sha256: $payload_hash\""
-H "\"x-amz-date: $current_date_time\""
-o "\"$OUTPUT_FILE\""
-T "\"$data\"")
url="'$AWS_ENDPOINT_URL/$bucket_name/$key"
if [ "$query_params" != "" ]; then
url+="?$query_params"
fi
url+="'"
curl_command+=(curl -isk -w "\"%{http_code}\"" -X PUT "$url"
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=$param_list,Signature=$signature\"")
if [ "$data" == "" ]; then
curl_command+=(-H "\"Content-Length: 0\"")
fi
curl_command+=("${header_fields[@]}")
curl_command+=(-o "$OUTPUT_FILE")
if [ "$data" != "" ]; then
curl_command+=(-T "$data")
fi
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1

View File

@@ -28,29 +28,34 @@ upload_id="$UPLOAD_ID"
part_location=$PART_LOCATION
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
# shellcheck disable=SC2034
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
# shellcheck disable=SC2154
canonical_request="PUT
/$bucket_name/$key
partNumber=$part_number&uploadId=$upload_id
host:$aws_endpoint_url_address
x-amz-content-sha256:UNSIGNED-PAYLOAD
x-amz-copy-source:$part_location
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-copy-source;x-amz-date
UNSIGNED-PAYLOAD"
cr_data=("PUT" "/$bucket_name/$key")
query_params=""
if [ "$part_number" != "" ]; then
query_params=$(add_parameter "$query_params" "partNumber=$part_number")
fi
if [ "$upload_id" != "" ]; then
query_params=$(add_parameter "$query_params" "uploadId=$upload_id")
fi
cr_data+=("$query_params")
cr_data+=("host:$host" "x-amz-content-sha256:UNSIGNED-PAYLOAD")
cr_data+=("x-amz-copy-source:$part_location")
cr_data+=("x-amz-date:$current_date_time")
build_canonical_request "${cr_data[@]}"
# shellcheck disable=SC2119
create_canonical_hash_sts_and_signature
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "\"$AWS_ENDPOINT_URL/$bucket_name/$key?partNumber=$part_number&uploadId=$upload_id\""
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-copy-source;x-amz-date,Signature=$signature\""
-H "\"x-amz-content-sha256: UNSIGNED-PAYLOAD\""
-H "\"x-amz-copy-source: $part_location\""
-H "\"x-amz-date: $current_date_time\""
-o "\"$OUTPUT_FILE\"")
url="'$AWS_ENDPOINT_URL/$bucket_name/$key"
if [ "$query_params" != "" ]; then
url+="?$query_params"
fi
url+="'"
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "$url"
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=$param_list,Signature=$signature\"")
curl_command+=("${header_fields[@]}")
curl_command+=(-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1

View File

@@ -73,10 +73,7 @@ test_file="test_file"
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run get_object "rest" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_success
run compare_files "$TEST_FILE_FOLDER/$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
run download_and_compare_file "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_success
run delete_object "rest" "$BUCKET_ONE_NAME" "$test_file"
@@ -166,10 +163,7 @@ test_file="test_file"
"$TEST_FILE_FOLDER/$test_file-0" "$TEST_FILE_FOLDER/$test_file-1" "$TEST_FILE_FOLDER/$test_file-2" "$TEST_FILE_FOLDER/$test_file-3"
assert_success
run get_object "rest" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_success
run compare_files "$TEST_FILE_FOLDER/$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
run download_and_compare_file "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_success
}
@@ -298,7 +292,7 @@ test_file="test_file"
assert_success
}
@test "REST - upload part copy" {
@test "REST - upload part copy (UploadPartCopy)" {
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
assert_success
@@ -382,9 +376,6 @@ test_file="test_file"
}
@test "REST - put object w/STREAMING-AWS4-HMAC-SHA256-PAYLOAD without content length" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1043"
fi
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
assert_success
@@ -394,7 +385,7 @@ test_file="test_file"
@test "REST - HeadObject does not return 405 with versioning, after file deleted" {
if [ "$RECREATE_BUCKETS" == "false" ] || [[ ( -z "$VERSIONING_DIR" ) && ( "$DIRECT" != "true" ) ]]; then
skip
skip "test isn't valid for this configuration"
fi
run bucket_cleanup_if_bucket_exists "s3api" "$BUCKET_ONE_NAME"
assert_success
@@ -420,7 +411,7 @@ test_file="test_file"
@test "REST - HeadObject returns 405 when querying DeleteMarker" {
if [ "$RECREATE_BUCKETS" == "false" ] || [[ ( -z "$VERSIONING_DIR" ) && ( "$DIRECT" != "true" ) ]]; then
skip
skip "test isn't valid for this configuration"
fi
run bucket_cleanup_if_bucket_exists "s3api" "$BUCKET_ONE_NAME"
assert_success
@@ -533,3 +524,82 @@ test_file="test_file"
run rest_check_legal_hold "$BUCKET_ONE_NAME" "$test_file"
assert_success
}
@test "REST - UploadPartCopy w/o upload ID" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1226"
fi
run upload_part_copy_without_upload_id_or_part_number "$BUCKET_ONE_NAME" "$test_file" "1" "" \
400 "InvalidArgument" "This operation does not accept partNumber without uploadId"
assert_success
}
@test "REST - UploadPartCopy w/o part number" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1229"
fi
run upload_part_copy_without_upload_id_or_part_number "$BUCKET_ONE_NAME" "$test_file" "" "dummy" \
405 "MethodNotAllowed" "The specified method is not allowed against this resource"
assert_success
}
@test "REST - UploadPartCopy - ETag is quoted" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1235"
fi
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
assert_success
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run upload_part_copy_check_etag_header "$BUCKET_ONE_NAME" "$test_file"-mp "$BUCKET_ONE_NAME/$test_file"
assert_success
}
@test "REST - UploadPart - ETag is quoted" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1233"
fi
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
assert_success
run split_file "$TEST_FILE_FOLDER/$test_file" 4
assert_success
run create_multipart_upload_rest "$BUCKET_ONE_NAME" "$test_file"
assert_success
# shellcheck disable=SC2030
upload_id=$output
run upload_part_check_etag_header "$BUCKET_ONE_NAME" "$test_file" "$upload_id"
assert_success
}
@test "REST - UploadPart w/o part number" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1236"
fi
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
assert_success
run split_file "$TEST_FILE_FOLDER/$test_file" 4
assert_success
run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
assert_success
}
@test "REST - UploadPart w/o upload ID" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1237"
fi
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
assert_success
run split_file "$TEST_FILE_FOLDER/$test_file" 4
assert_success
run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
assert_success
}

View File

@@ -21,12 +21,10 @@ source ./tests/setup.sh
source ./tests/util/util_head_object.sh
source ./tests/util/util_setup.sh
export RUN_USERS=true
test_file="test_file"
@test "REST - invalid checksum type" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/1104"
fi
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
assert_success

View File

@@ -29,6 +29,8 @@ source ./tests/commands/get_object.sh
source ./tests/commands/put_object.sh
source ./tests/commands/list_multipart_uploads.sh
export RUN_USERS=true
# abort-multipart-upload
@test "test_abort_multipart_upload" {
local bucket_file="bucket-file"
@@ -115,10 +117,7 @@ source ./tests/commands/list_multipart_uploads.sh
run multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4
assert_success
run get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file-copy" "$TEST_FILE_FOLDER/$bucket_file-copy"
assert_success
run compare_files "$TEST_FILE_FOLDER"/$bucket_file-copy "$TEST_FILE_FOLDER"/$bucket_file
run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "${bucket_file}-copy" "$TEST_FILE_FOLDER/$bucket_file-copy-two"
assert_success
}

View File

@@ -224,7 +224,7 @@ export RUN_USERS=true
@test "directory objects can't contain data" {
if [ "$DIRECT" == "true" ]; then
skip
skip "for direct, directory objects can contain data (though discouraged)"
fi
test_file="a"
@@ -239,7 +239,7 @@ export RUN_USERS=true
@test "objects containing data can't be copied to directory objects with same name" {
# operation is legal (though discouraged) for direct
if [ "$DIRECT" == "true" ]; then
skip
skip "for direct, directory objects can contain data (though discouraged)"
fi
test_file="a"

View File

@@ -71,7 +71,7 @@ test_create_user_already_exists() {
test_user_user() {
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip
skip "test not valid for static buckets"
fi
run setup_user_v2 "user" "1" "$BUCKET_ONE_NAME"

View File

@@ -221,7 +221,7 @@ run_and_verify_multipart_upload_with_valid_range() {
create_upload_part_copy_rest() {
if [ $# -ne 3 ]; then
log 2 "'run_and_verify_multipart_upload_with_valid_range' requires bucket, key, >20MB file"
log 2 "'create_upload_part_copy_rest' requires bucket, key, >20MB file"
return 1
fi
if ! split_and_put_file "$1" "$2" "$3" 4; then
@@ -314,10 +314,10 @@ setup_multipart_upload_with_params() {
os_name="$(uname)"
if [[ "$os_name" == "Darwin" ]]; then
now=$(date -u +"%Y-%m-%dT%H:%M:%S")
later=$(date -j -v +15S -f "%Y-%m-%dT%H:%M:%S" "$now" +"%Y-%m-%dT%H:%M:%S")
later=$(date -j -v +20S -f "%Y-%m-%dT%H:%M:%S" "$now" +"%Y-%m-%dT%H:%M:%S")
else
now=$(date +"%Y-%m-%dT%H:%M:%S")
later=$(date -d "$now 15 seconds" +"%Y-%m-%dT%H:%M:%S")
later=$(date -d "$now 20 seconds" +"%Y-%m-%dT%H:%M:%S")
fi
if ! create_test_files "$2"; then

View File

@@ -434,3 +434,115 @@ list_check_multipart_upload_key() {
fi
return 0
}
upload_part_copy_without_upload_id_or_part_number() {
if [ $# -ne 7 ]; then
log 2 "'upload_part_copy_without_upload_id_or_part_number' requires bucket name, key, part number, upload ID, response code, error code, message"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" PART_NUMBER="$3" UPLOAD_ID="$4" PART_LOCATION="$BUCKET_ONE_NAME/$2-1" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part_copy.sh); then
# shellcheck disable=SC2154
log 2 "error uploading part $i: $result"
return 1
fi
log 5 "result: $result"
if [ "$result" != "$5" ]; then
log 2 "expected '$5', was '$result' ($(cat "$TEST_FILE_FOLDER/response.txt"))"
return 1
fi
log 5 "error: $(cat "$TEST_FILE_FOLDER/response.txt")"
if ! check_xml_error_contains "$TEST_FILE_FOLDER/response.txt" "$6" "$7"; then
log 2 "error checking XML response"
return 1
fi
}
upload_part_check_etag_header() {
if [ $# -ne 3 ]; then
log 2 "'upload_part_check_etag_header' requires bucket name, key, upload ID"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" PART_NUMBER="1" UPLOAD_ID="$3" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh); then
# shellcheck disable=SC2154
log 2 "error uploading part $i: $result"
return 1
fi
if [ "$result" != "200" ]; then
log 2 "expected '200', was '$result'"
return 1
fi
etag="$(grep -i "ETag: " "$TEST_FILE_FOLDER/response.txt" | awk '{print $2}' | tr -d '\r')"
if ! [[ "$etag" =~ ^\"[0-9a-f]+\" ]]; then
log 2 "etag pattern mismatch, etag ($etag) should be hex string surrounded by quotes"
return 1
fi
return 0
}
upload_part_copy_check_etag_header() {
if [ $# -ne 3 ]; then
log 2 "'upload_part_copy_check_etag_header' requires bucket, destination file, part location"
return 1
fi
if ! create_upload_and_get_id_rest "$1" "$2"; then
log 2 "error creating upload and getting ID"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" PART_NUMBER="1" UPLOAD_ID="$upload_id" PART_LOCATION="$3" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part_copy.sh); then
# shellcheck disable=SC2154
log 2 "error uploading part: $result"
return 1
fi
if ! etag=$(get_element_text "$TEST_FILE_FOLDER/response.txt" "CopyPartResult" "ETag"); then
log 2 "error getting etag"
return 1
fi
log 5 "etag: $etag"
if ! [[ "$etag" =~ ^\"[0-9a-f]+\" ]]; then
log 2 "etag pattern mismatch, etag ($etag) should be hex string surrounded by quotes"
return 1
fi
return 0
}
upload_part_without_part_number() {
if [ $# -ne 2 ]; then
log 2 "'upload_part_without_upload_id' requires bucket name, key"
return 1
fi
if ! create_multipart_upload_rest "$1" "$2"; then
log 2 "error creating multpart upload"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" DATA_FILE="$TEST_FILE_FOLDER/$2" PART_NUMBER="" UPLOAD_ID="$upload_id" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh); then
# shellcheck disable=SC2154
log 2 "error uploading part $i: $result"
return 1
fi
if [ "$result" != "405" ]; then
log 2 "expected '405', was '$result' ($(cat "$TEST_FILE_FOLDER/response.txt"))"
return 1
fi
return 0
}
upload_part_without_upload_id() {
if [ $# -ne 2 ]; then
log 2 "'upload_part_without_part_number' requires bucket name, key"
return 1
fi
if ! create_multipart_upload_rest "$1" "$2"; then
log 2 "error creating multpart upload"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" DATA_FILE="$TEST_FILE_FOLDER/$2" PART_NUMBER="1" UPLOAD_ID="" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh); then
# shellcheck disable=SC2154
log 2 "error uploading part $i: $result"
return 1
fi
if [ "$result" != "405" ]; then
log 2 "expected '405', was '$result' ($(cat "$TEST_FILE_FOLDER/response.txt"))"
return 1
fi
return 0
}