diff --git a/tests/commands/complete_multipart_upload.sh b/tests/commands/complete_multipart_upload.sh
index 62f8ca2..45c64b0 100644
--- a/tests/commands/complete_multipart_upload.sh
+++ b/tests/commands/complete_multipart_upload.sh
@@ -28,4 +28,66 @@ complete_multipart_upload() {
fi
log 5 "complete multipart upload error: $error"
return 0
+}
+
+complete_multipart_upload_rest() {
+ if ! check_param_count_v2 "bucket, key, upload ID, parts payload" 4 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PARTS="$4" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh); then
+ log 2 "error completing multipart upload: $result"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "complete multipart upload returned code $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ return 1
+ fi
+}
+
+complete_multipart_upload_rest_nonexistent_param() {
+ if ! check_param_count_v2 "bucket, key, upload ID, parts payload" 4 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PARTS="$4" ALGORITHM_PARAMETER="true" CHECKSUM_ALGORITHM="crc32c" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh 2>&1); then
+ log 2 "error completing multipart upload: $result"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "complete multipart upload returned code $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ return 1
+ fi
+}
+
+complete_multipart_upload_rest_incorrect_checksum() {
+ if ! check_param_count_v2 "bucket, key, upload ID, parts payload, type, algorithm, correct hash" 7 $#; then
+ return 1
+ fi
+ checksum="$7"
+ if [ "${checksum:0:1}" == "a" ]; then
+ checksum="b${checksum:1}"
+ else
+ checksum="a${checksum:1}"
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PARTS="$4" CHECKSUM_TYPE="$5" CHECKSUM_ALGORITHM="$6" CHECKSUM_HASH="$checksum" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh 2>&1); then
+ log 2 "error completing multipart upload: $result"
+ return 1
+ fi
+ if ! check_rest_expected_error "$result" "$TEST_FILE_FOLDER/result.txt" 400 "BadDigest" "did not match"; then
+ log 2 "expected '400', was $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ return 1
+ fi
+}
+
+complete_multipart_upload_rest_invalid_checksum() {
+ if ! check_param_count_v2 "bucket, key, upload ID, parts payload, type, algorithm, correct hash" 7 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PARTS="$4" CHECKSUM_TYPE="$5" CHECKSUM_ALGORITHM="$6" CHECKSUM_HASH="$7" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh 2>&1); then
+ log 2 "error completing multipart upload: $result"
+ return 1
+ fi
+ if ! check_rest_expected_error "$result" "$TEST_FILE_FOLDER/result.txt" 400 "InvalidRequest" "header is invalid"; then
+ log 2 "expected '400', was $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ return 1
+ fi
}
\ No newline at end of file
diff --git a/tests/commands/create_multipart_upload.sh b/tests/commands/create_multipart_upload.sh
index e3add78..98b922b 100644
--- a/tests/commands/create_multipart_upload.sh
+++ b/tests/commands/create_multipart_upload.sh
@@ -14,13 +14,63 @@
# specific language governing permissions and limitations
# under the License.
-# initialize a multipart upload
-# params: bucket, key
-# return 0 for success, 1 for failure
-create_multipart_upload() {
+create_multipart_upload_rest() {
+ if ! check_param_count_v2 "bucket name, key" 2 $#; then
+ return 1
+ fi
+ if ! result=$(BUCKET_NAME="$1" OBJECT_KEY="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/output.txt" COMMAND_LOG=$COMMAND_LOG ./tests/rest_scripts/create_multipart_upload.sh); then
+ log 2 "error creating multipart upload: $result"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "put-object-retention returned code $result: $(cat "$TEST_FILE_FOLDER/output.txt")"
+ return 1
+ fi
+ if ! upload_id=$(get_element_text "$TEST_FILE_FOLDER/output.txt" "InitiateMultipartUploadResult" "UploadId"); then
+ log 2 "error getting upload ID: $upload_id"
+ return 1
+ fi
+ echo "$upload_id"
+ return 0
+}
+
+create_multipart_upload_rest_with_checksum_type_and_algorithm() {
+ if ! check_param_count_v2 "bucket, key, checksum type, checksum algorithm" 4 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG=$COMMAND_LOG BUCKET_NAME="$1" OBJECT_KEY="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/output.txt" CHECKSUM_TYPE="$3" CHECKSUM_ALGORITHM="$4" ./tests/rest_scripts/create_multipart_upload.sh 2>&1); then
+ log 2 "error creating multipart upload: $result"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "expected '200', was '$result' ($(cat "$TEST_FILE_FOLDER/output.txt"))"
+ return 1
+ fi
+ if ! upload_id=$(get_element_text "$TEST_FILE_FOLDER/output.txt" "InitiateMultipartUploadResult" "UploadId"); then
+ log 2 "error getting upload ID: $upload_id"
+ return 1
+ fi
+ echo "$upload_id"
+ return 0
+}
+
+create_multipart_upload_rest_with_checksum_type_and_algorithm_error() {
+ if ! check_param_count_v2 "bucket, key, checksum type, checksum algorithm, handle fn, response, code, error" 8 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG=$COMMAND_LOG BUCKET_NAME="$1" OBJECT_KEY="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/output.txt" CHECKSUM_TYPE="$3" CHECKSUM_ALGORITHM="$4" ./tests/rest_scripts/create_multipart_upload.sh 2>&1); then
+ log 2 "error creating multipart upload: $result"
+ return 1
+ fi
+ if ! "$5" "$result" "$TEST_FILE_FOLDER/output.txt" "$6" "$7" "$8"; then
+ log 2 "error checking result"
+ return 1
+ fi
+}
+
+create_multipart_upload_s3api() {
record_command "create-multipart-upload" "client:s3api"
- if [ $# -ne 2 ]; then
- log 2 "create multipart upload function must have bucket, key"
+ if ! check_param_count_v2 "bucket, key" 2 $#; then
return 1
fi
@@ -37,57 +87,8 @@ create_multipart_upload() {
return 0
}
-create_multipart_upload_with_user() {
- record_command "create-multipart-upload" "client:s3api"
- if [ $# -ne 4 ]; then
- log 2 "create multipart upload function must have bucket, key, username, password"
- return 1
- fi
-
- if ! multipart_data=$(AWS_ACCESS_KEY_ID="$3" AWS_SECRET_ACCESS_KEY="$4" send_command aws --no-verify-ssl s3api create-multipart-upload --bucket "$1" --key "$2" 2>&1); then
- log 2 "Error creating multipart upload: $multipart_data"
- return 1
- fi
-
- if ! upload_id=$(echo "$multipart_data" | grep -v "InsecureRequestWarning" | jq -r '.UploadId' 2>&1); then
- log 2 "error parsing upload ID: $upload_id"
- return 1
- fi
- upload_id="${upload_id//\"/}"
- echo "$upload_id"
- return 0
-}
-
-create_multipart_upload_params() {
- record_command "create-multipart-upload" "client:s3api"
- if [ $# -ne 8 ]; then
- log 2 "create multipart upload function with params must have bucket, key, content type, metadata, object lock legal hold status, " \
- "object lock mode, object lock retain until date, and tagging"
- return 1
- fi
- local multipart_data
- multipart_data=$(send_command aws --no-verify-ssl s3api create-multipart-upload \
- --bucket "$1" \
- --key "$2" \
- --content-type "$3" \
- --metadata "$4" \
- --object-lock-legal-hold-status "$5" \
- --object-lock-mode "$6" \
- --object-lock-retain-until-date "$7" \
- --tagging "$8" 2>&1) || local create_result=$?
- if [[ $create_result -ne 0 ]]; then
- log 2 "error creating multipart upload with params: $multipart_data"
- return 1
- fi
- upload_id=$(echo "$multipart_data" | grep -v "InsecureRequestWarning" | jq '.UploadId')
- upload_id="${upload_id//\"/}"
- return 0
-}
-
-create_multipart_upload_custom() {
- record_command "create-multipart-upload" "client:s3api"
- if [ $# -lt 2 ]; then
- log 2 "create multipart upload custom function must have at least bucket and key"
+create_multipart_upload_s3api_custom() {
+ if ! check_param_count_gt "at least bucket and key" 2 $#; then
return 1
fi
local multipart_data
@@ -109,24 +110,47 @@ create_multipart_upload_custom() {
return 0
}
-create_multipart_upload_rest() {
- if [ $# -ne 2 ]; then
- log 2 "'create_multipart_upload_rest' requires bucket name, key"
+create_multipart_upload_s3api_params() {
+ record_command "create-multipart-upload" "client:s3api"
+ if ! check_param_count_v2 "bucket, key, content type, metadata, object lock legal hold status, \
+ object lock mode, object lock retain until date, and tagging" 8 $#; then
return 1
fi
- if ! result=$(BUCKET_NAME="$1" OBJECT_KEY="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/output.txt" COMMAND_LOG=$COMMAND_LOG ./tests/rest_scripts/create_multipart_upload.sh); then
- log 2 "error creating multipart upload: $result"
+ local multipart_data
+ multipart_data=$(send_command aws --no-verify-ssl s3api create-multipart-upload \
+ --bucket "$1" \
+ --key "$2" \
+ --content-type "$3" \
+ --metadata "$4" \
+ --object-lock-legal-hold-status "$5" \
+ --object-lock-mode "$6" \
+ --object-lock-retain-until-date "$7" \
+ --tagging "$8" 2>&1) || local create_result=$?
+ if [[ $create_result -ne 0 ]]; then
+ log 2 "error creating multipart upload with params: $multipart_data"
return 1
fi
- if [ "$result" != "200" ]; then
- log 2 "put-object-retention returned code $result: $(cat "$TEST_FILE_FOLDER/output.txt")"
+ upload_id=$(echo "$multipart_data" | grep -v "InsecureRequestWarning" | jq '.UploadId')
+ upload_id="${upload_id//\"/}"
+ return 0
+}
+
+create_multipart_upload_s3api_with_user() {
+ record_command "create-multipart-upload" "client:s3api"
+ if ! check_param_count_v2 "bucket, key, username, password" 4 $#; then
return 1
fi
- log 5 "result: $(cat "$TEST_FILE_FOLDER/output.txt")"
- if ! upload_id=$(get_element_text "$TEST_FILE_FOLDER/output.txt" "InitiateMultipartUploadResult" "UploadId"); then
- log 2 "error getting upload ID: $upload_id"
+
+ if ! multipart_data=$(AWS_ACCESS_KEY_ID="$3" AWS_SECRET_ACCESS_KEY="$4" send_command aws --no-verify-ssl s3api create-multipart-upload --bucket "$1" --key "$2" 2>&1); then
+ log 2 "Error creating multipart upload: $multipart_data"
return 1
fi
+
+ if ! upload_id=$(echo "$multipart_data" | grep -v "InsecureRequestWarning" | jq -r '.UploadId' 2>&1); then
+ log 2 "error parsing upload ID: $upload_id"
+ return 1
+ fi
+ upload_id="${upload_id//\"/}"
echo "$upload_id"
return 0
}
diff --git a/tests/commands/delete_object.sh b/tests/commands/delete_object.sh
index dedb34e..f4cd06b 100644
--- a/tests/commands/delete_object.sh
+++ b/tests/commands/delete_object.sh
@@ -45,23 +45,6 @@ delete_object() {
return 0
}
-# shellcheck disable=SC2317
-delete_object_rest() {
- if ! check_param_count "delete_object_rest" "bucket, key" 2 $#; then
- return 1
- fi
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/delete_object.sh 2>&1); then
- log 2 "error deleting object: $result"
- return 1
- fi
- if [ "$result" != "204" ]; then
- delete_object_error=$(cat "$TEST_FILE_FOLDER/result.txt")
- log 2 "expected '204', was '$result' ($delete_object_error)"
- return 1
- fi
- return 0
-}
-
delete_object_bypass_retention() {
if ! check_param_count "delete_object_bypass_retention" "bucket, key, user, password" 4 $#; then
return 1
diff --git a/tests/commands/get_object.sh b/tests/commands/get_object.sh
index c0427b9..dc74f11 100644
--- a/tests/commands/get_object.sh
+++ b/tests/commands/get_object.sh
@@ -115,3 +115,18 @@ get_object_rest_with_user() {
fi
return 0
}
+
+get_object_rest_with_invalid_streaming_type() {
+ if ! check_param_count_v2 "bucket, key" 2 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/$2_copy" PAYLOAD="GIBBERISH" ./tests/rest_scripts/get_object.sh 2>&1); then
+ log 2 "error: $result"
+ return 1
+ fi
+ if ! check_rest_expected_error "$result" "$TEST_FILE_FOLDER/$2_copy" "400" "InvalidArgument" "x-amz-content-sha256 must be"; then
+ log 2 "error checking response"
+ return 1
+ fi
+ return 0
+}
diff --git a/tests/commands/get_object_lock_configuration.sh b/tests/commands/get_object_lock_configuration.sh
index e47d382..c4b836f 100644
--- a/tests/commands/get_object_lock_configuration.sh
+++ b/tests/commands/get_object_lock_configuration.sh
@@ -14,7 +14,7 @@
# specific language governing permissions and limitations
# under the License.
-source ./tests/drivers/drivers.sh
+source ./tests/drivers/params.sh
get_object_lock_configuration() {
record_command "get-object-lock-configuration" "client:s3api"
diff --git a/tests/commands/list_buckets.sh b/tests/commands/list_buckets.sh
index d94957c..c4926d8 100644
--- a/tests/commands/list_buckets.sh
+++ b/tests/commands/list_buckets.sh
@@ -125,6 +125,29 @@ list_buckets_rest() {
log 2 "list-buckets returned code $result: $(cat "$TEST_FILE_FOLDER/buckets.txt")"
return 1
fi
- parse_bucket_list
+ if ! parse_bucket_list "$TEST_FILE_FOLDER/buckets.txt"; then
+ log 2 "error parsing bucket list"
+ return 1
+ fi
+ return 0
+}
+
+list_buckets_rest_prefix() {
+ if ! check_param_count_v2 "prefix" 1 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" PREFIX="$1" ./tests/rest_scripts/list_buckets.sh 2>&1); then
+ log 2 "error getting result: $result"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "expected '200', was '$result' ($(cat "$TEST_FILE_FOLDER/result.txt"))"
+ return 1
+ fi
+ log 5 "buckets w/prefix $1: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ if ! parse_bucket_list "$TEST_FILE_FOLDER/result.txt"; then
+ log 2 "error parsing bucket list"
+ return 1
+ fi
return 0
}
diff --git a/tests/commands/put_bucket_acl.sh b/tests/commands/put_bucket_acl.sh
index d18d0be..0e37e05 100644
--- a/tests/commands/put_bucket_acl.sh
+++ b/tests/commands/put_bucket_acl.sh
@@ -16,6 +16,7 @@
source ./tests/util/util_file.sh
source ./tests/commands/command.sh
+source ./tests/drivers/rest.sh
put_bucket_acl_s3api() {
log 6 "put_bucket_acl_s3api"
@@ -128,3 +129,33 @@ put_bucket_acl_rest() {
fi
return 0
}
+
+put_canned_acl_rest() {
+ if ! check_param_count_v2 "bucket name, canned ACL" 2 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" CANNED_ACL="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/put_bucket_acl.sh); then
+ log 2 "error attempting to put bucket acl: $result"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "response code '$result' (message: $(cat "$TEST_FILE_FOLDER/response.txt"))"
+ return 1
+ fi
+ return 0
+}
+
+put_bucket_acl_rest_canned_invalid() {
+ if ! check_param_count_v2 "bucket name, invalid ACL" 2 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" CANNED_ACL="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/put_bucket_acl.sh); then
+ log 2 "error attempting to put bucket acl: $result"
+ return 1
+ fi
+ if ! check_rest_expected_error "$result" "$TEST_FILE_FOLDER/response.txt" "400" "InvalidArgument" ""; then
+ log 2 "error checking REST response (message: $(cat "$TEST_FILE_FOLDER/response.txt"))"
+ return 1
+ fi
+ return 0
+}
diff --git a/tests/commands/put_bucket_policy.sh b/tests/commands/put_bucket_policy.sh
index eb44afc..8428d88 100644
--- a/tests/commands/put_bucket_policy.sh
+++ b/tests/commands/put_bucket_policy.sh
@@ -14,7 +14,7 @@
# specific language governing permissions and limitations
# under the License.
-source ./tests/drivers/drivers.sh
+source ./tests/drivers/params.sh
put_bucket_policy() {
log 6 "put_bucket_policy '$1' '$2' '$3'"
diff --git a/tests/commands/put_object.sh b/tests/commands/put_object.sh
index 45760ef..d5cef2c 100644
--- a/tests/commands/put_object.sh
+++ b/tests/commands/put_object.sh
@@ -142,6 +142,22 @@ put_object_rest_user_bad_signature() {
return 0
}
+put_object_rest_with_unneeded_algorithm_param() {
+ if ! check_param_count_v2 "local file, bucket name, key, checksum type" 4 $#; then
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" DATA_FILE="$1" BUCKET_NAME="$2" OBJECT_KEY="$3" CHECKSUM_TYPE="$4" \
+ ALGORITHM_PARAMETER="true" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/put_object.sh); then
+ log 2 "error sending object file: $result"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "expected '200', was '$result' ($(cat "$TEST_FILE_FOLDER/result.txt"))"
+ return 1
+ fi
+ return 0
+}
+
put_object_multiple() {
if [ $# -ne 3 ]; then
log 2 "put object command requires command type, source, destination"
diff --git a/tests/commands/upload_part.sh b/tests/commands/upload_part.sh
index 8ba8382..1e24167 100644
--- a/tests/commands/upload_part.sh
+++ b/tests/commands/upload_part.sh
@@ -44,12 +44,11 @@ upload_part_with_user() {
export etag
}
-upload_part_and_get_etag_rest() {
- if [ $# -ne 5 ]; then
- log 2 "'upload_part_rest' requires bucket name, key, part number, upload ID, part"
+upload_part_rest() {
+ if ! check_param_count_v2 "bucket, key, upload ID, part number, part" 5 $#; then
return 1
fi
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" PART_NUMBER="$4" UPLOAD_ID="$3" DATA_FILE="$5" OUTPUT_FILE="$TEST_FILE_FOLDER/etag.txt" ./tests/rest_scripts/upload_part.sh); then
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PART_NUMBER="$4" DATA_FILE="$5" OUTPUT_FILE="$TEST_FILE_FOLDER/etag.txt" ./tests/rest_scripts/upload_part.sh); then
log 2 "error sending upload-part REST command: $result"
return 1
fi
@@ -60,5 +59,67 @@ upload_part_and_get_etag_rest() {
log 5 "$(cat "$TEST_FILE_FOLDER/etag.txt")"
etag=$(grep -i "etag" "$TEST_FILE_FOLDER/etag.txt" | awk '{print $2}' | tr -d '\r')
log 5 "etag: $etag"
+ echo "$etag"
+ return 0
+}
+
+upload_part_rest_without_part_number() {
+ if ! check_param_count_v2 "bucket, key" 2 $#; then
+ return 1
+ fi
+ if ! create_multipart_upload_rest "$1" "$2"; then
+ log 2 "error creating multpart upload"
+ return 1
+ fi
+ # shellcheck disable=SC2154
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" DATA_FILE="$TEST_FILE_FOLDER/$2" PART_NUMBER="" UPLOAD_ID="$upload_id" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh); then
+ log 2 "error uploading part $i: $result"
+ return 1
+ fi
+ if ! check_rest_expected_error "$result" "$TEST_FILE_FOLDER/response.txt" "405" "MethodNotAllowed" "method is not allowed"; then
+ log 2 "error checking error"
+ return 1
+ fi
+ return 0
+}
+
+upload_part_rest_without_upload_id() {
+ if ! check_param_count_v2 "bucket, key" 2 $#; then
+ return 1
+ fi
+ if ! create_multipart_upload_rest "$1" "$2"; then
+ log 2 "error creating multpart upload"
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" DATA_FILE="$TEST_FILE_FOLDER/$2" PART_NUMBER="1" UPLOAD_ID="" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh); then
+ # shellcheck disable=SC2154
+ log 2 "error uploading part $i: $result"
+ return 1
+ fi
+ if ! check_rest_expected_error "$result" "$TEST_FILE_FOLDER/response.txt" "405" "MethodNotAllowed" "method is not allowed"; then
+ log 2 "error checking error"
+ return 1
+ fi
+ return 0
+}
+
+upload_part_rest_with_checksum() {
+ if ! check_param_count_v2 "bucket name, key, upload ID, part number, part, checksum algorithm" 6 $#; then
+ return 1
+ fi
+ # shellcheck disable=SC2154,SC2097,SC2098
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PART_NUMBER="$4" DATA_FILE="$5" CHECKSUM_TYPE="$6" TEST_FILE_FOLDER="$TEST_FILE_FOLDER" OUTPUT_FILE="$TEST_FILE_FOLDER/etag.txt" ./tests/rest_scripts/upload_part.sh); then
+ log 2 "error sending upload-part REST command: $result"
+ return 1
+ fi
+ if [[ "$result" != "200" ]]; then
+ log 2 "upload-part command returned error $result: $(cat "$TEST_FILE_FOLDER/etag.txt")"
+ return 1
+ fi
+ log 5 "$(cat "$TEST_FILE_FOLDER/etag.txt")"
+ etag=$(grep -i "etag" "$TEST_FILE_FOLDER/etag.txt" | awk '{print $2}' | tr -d '\r')
+ # shellcheck disable=SC2034
+ checksum=$(grep -i "x-amz-checksum-" "$TEST_FILE_FOLDER/etag.txt" | awk '{print $2}' | tr -d '\r')
+ log 5 "etag: $etag"
return 0
}
diff --git a/tests/drivers/complete_multipart_upload/complete_multipart_upload_rest.sh b/tests/drivers/complete_multipart_upload/complete_multipart_upload_rest.sh
new file mode 100644
index 0000000..7f2d6ff
--- /dev/null
+++ b/tests/drivers/complete_multipart_upload/complete_multipart_upload_rest.sh
@@ -0,0 +1,167 @@
+#!/usr/bin/env bash
+
+# Copyright 2024 Versity Software
+# This file is licensed under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+calculate_multipart_checksum() {
+ if ! check_param_count_gt "checksum type, part count, data file, checksums" 4 $#; then
+ return 1
+ fi
+ log 5 "checksums: ${*:4}"
+ if [ "$1" == "COMPOSITE" ]; then
+ if ! calculate_composite_checksum "$lowercase_checksum_algorithm" ${@:4}; then
+ log 2 "error calculating checksum"
+ return 1
+ fi
+ checksum="$composite-$2"
+ return 0
+ fi
+
+ if [ "$1" != "FULL_OBJECT" ]; then
+ log 2 "unrecognized checksum type: $1"
+ return 1
+ fi
+ if ! checksum=$(DATA_FILE="$3" CHECKSUM_TYPE="$lowercase_checksum_algorithm" TEST_FILE_FOLDER="$TEST_FILE_FOLDER" ./tests/rest_scripts/calculate_checksum.sh 2>&1); then
+ log 2 "error calculating checksum: $checksum"
+ return 1
+ fi
+ return 0
+}
+
+complete_multipart_upload_with_checksum() {
+ if ! check_param_count_v2 "bucket, key, file, upload ID, part count, checksum type, checksum algorithm" 7 $#; then
+ return 1
+ fi
+ lowercase_checksum_algorithm=$(echo -n "$7" | tr '[:upper:]' '[:lower:]')
+ if ! upload_parts_rest_with_checksum_before_completion "$1" "$2" "$3" "$4" "$5" "$lowercase_checksum_algorithm"; then
+ log 2 "error uploading parts"
+ return 1
+ fi
+ log 5 "parts payload: $parts_payload"
+ log 5 "checksums: ${checksums[*]}"
+ if ! calculate_multipart_checksum "$6" "$5" "$3" ${checksums[@]}; then
+ log 2 "error calculating multipart checksum"
+ return 1
+ fi
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$4" PARTS="$parts_payload" CHECKSUM_TYPE="$6" CHECKSUM_ALGORITHM="$7" CHECKSUM_HASH="$checksum" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh); then
+ log 2 "error completing multipart upload"
+ return 1
+ fi
+ if [ "$result" != "200" ]; then
+ log 2 "expected '200', was '$result' ($(cat "$TEST_FILE_FOLDER/result.txt"))"
+ return 1
+ fi
+ log 5 "result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ return 0
+}
+
+calculate_composite_checksum() {
+ if ! check_param_count_gt "algorithm, at least two checksums" 3 $#; then
+ return 1
+ fi
+ if ! result=$(truncate -s 0 "$TEST_FILE_FOLDER/all_checksums.bin" 2>&1); then
+ log 2 "error truncating file: $result"
+ return 1
+ fi
+ log 5 "checksums: ${*:2}"
+ for checksum in ${@:2}; do
+ if ! binary_checksum=$(echo -n "$checksum" | base64 -d 2>&1); then
+ log 2 "error calculating binary checksum: $binary_checksum"
+ return 1
+ fi
+ log 5 "binary checksum: $binary_checksum"
+ printf "%s" "$binary_checksum" | cat >> "$TEST_FILE_FOLDER/all_checksums.bin"
+ done
+ if [ "$1" == "sha256" ]; then
+ composite=$(openssl dgst -sha256 -binary "$TEST_FILE_FOLDER/all_checksums.bin" | base64)
+ elif [ "$1" == "sha1" ]; then
+ composite=$(openssl dgst -sha1 -binary "$TEST_FILE_FOLDER/all_checksums.bin" | base64)
+ elif [ "$1" == "crc32" ]; then
+ composite="$(gzip -c -1 "$TEST_FILE_FOLDER/all_checksums.bin" | tail -c8 | od -t x4 -N 4 -A n | awk '{print $1}' | xxd -r -p | base64)"
+ elif [ "$1" == "crc32c" ]; then
+ if ! composite=$(CHECKSUM_TYPE="$1" DATA_FILE="$TEST_FILE_FOLDER/all_checksums.bin" TEST_FILE_FOLDER="$TEST_FILE_FOLDER" ./tests/rest_scripts/calculate_checksum.sh 2>&1); then
+ log 2 "error calculating crc32c checksum: $composite"
+ return 1
+ fi
+ fi
+ log 5 "composite: $composite"
+}
+
+test_multipart_upload_with_checksum() {
+ if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
+ return 1
+ fi
+ if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
+ log 2 "error performing multipart upload with checksum before completion"
+ return 1
+ fi
+ if ! calculate_multipart_checksum "$3" 2 "$TEST_FILE_FOLDER/$2" ${checksums[@]}; then
+ log 2 "error calculating multipart checksum"
+ return 1
+ fi
+ if ! complete_multipart_upload_with_checksum "$1" "$2" "$TEST_FILE_FOLDER/$2" "$upload_id" 2 "$3" "$4"; then
+ log 2 "error completing multipart upload"
+ return 1
+ fi
+ return 0
+}
+
+test_complete_multipart_upload_unneeded_algorithm_parameter() {
+ if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
+ return 1
+ fi
+ if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
+ log 2 "error performing multipart upload with checksum before completion"
+ return 1
+ fi
+ if ! complete_multipart_upload_rest_nonexistent_param "$1" "$2" "$upload_id" "$parts_payload"; then
+ log 2 "error completing multipart upload with nonexistent param"
+ return 1
+ fi
+ return 0
+}
+
+test_complete_multipart_upload_incorrect_checksum() {
+ if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
+ return 1
+ fi
+ if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
+ log 2 "error performing multipart upload with checksum before completion"
+ return 1
+ fi
+ if ! calculate_multipart_checksum "$3" 2 "$TEST_FILE_FOLDER/$2" ${checksums[@]}; then
+ log 2 "error calculating multipart checksum"
+ return 1
+ fi
+ if ! complete_multipart_upload_rest_incorrect_checksum "$1" "$2" "$upload_id" "$parts_payload" "$3" "$4" "$checksum"; then
+ log 2 "error completing multipart upload with nonexistent param"
+ return 1
+ fi
+ return 0
+}
+
+test_complete_multipart_upload_invalid_checksum() {
+ if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
+ return 1
+ fi
+ if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
+ log 2 "error performing multipart upload with checksum before completion"
+ return 1
+ fi
+ if ! complete_multipart_upload_rest_invalid_checksum "$1" "$2" "$upload_id" "$parts_payload" "$3" "$4" "wrong"; then
+ log 2 "error completing multipart upload with nonexistent param"
+ return 1
+ fi
+ return 0
+}
diff --git a/tests/drivers/openssl.sh b/tests/drivers/openssl.sh
new file mode 100644
index 0000000..36e42cd
--- /dev/null
+++ b/tests/drivers/openssl.sh
@@ -0,0 +1,94 @@
+#!/usr/bin/env bats
+
+# Copyright 2024 Versity Software
+# This file is licensed under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+source ./tests/util/util_xml.sh
+
+send_via_openssl() {
+ if ! check_param_count_v2 "command file" 1 $#; then
+ return 1
+ fi
+ host="${AWS_ENDPOINT_URL#http*://}"
+ if [[ "$host" =~ s3\..*amazonaws\.com ]]; then
+ host+=":443"
+ fi
+ log 5 "connecting to $host"
+ if ! result=$(openssl s_client -connect "$host" -ign_eof < "$1" 2>&1); then
+ log 2 "error sending openssl command: $result"
+ return 1
+ fi
+ echo "$result"
+}
+
+send_via_openssl_and_check_code() {
+ if ! check_param_count_v2 "command file, expected code" 2 $#; then
+ return 1
+ fi
+ if ! result=$(send_via_openssl "$1"); then
+ log 2 "error sending command via openssl"
+ return 1
+ fi
+ response_code="$(echo "$result" | grep "HTTP/" | awk '{print $2}')"
+ if [ "$response_code" != "$2" ]; then
+ log 2 "expected '$2', actual '$response_code' (error response: '$result')"
+ return 1
+ fi
+ echo "$result"
+}
+
+send_via_openssl_check_code_error_contains() {
+ if ! check_param_count_v2 "command file, expected code, error, message" 4 $#; then
+ return 1
+ fi
+ if ! result=$(send_via_openssl_and_check_code "$1" "$2"); then
+ log 2 "error sending and checking code"
+ return 1
+ fi
+ echo -n "$result" > "$TEST_FILE_FOLDER/result.txt"
+ if ! get_xml_data "$TEST_FILE_FOLDER/result.txt" "$TEST_FILE_FOLDER/error_data.txt"; then
+ log 2 "error parsing XML data from result"
+ return 1
+ fi
+ if ! check_xml_error_contains "$TEST_FILE_FOLDER/error_data.txt" "$3" "$4"; then
+ log 2 "error checking xml error, message"
+ return 1
+ fi
+ return 0
+}
+
+send_via_openssl_with_timeout() {
+ if ! check_param_count_v2 "command file" 1 $#; then
+ return 1
+ fi
+ host="${AWS_ENDPOINT_URL#http*://}"
+ if [[ "$host" =~ s3\..*amazonaws\.com ]]; then
+ host+=":443"
+ fi
+ log 5 "connecting to $host"
+ local exit_code=0
+ result=$(timeout 65 openssl s_client -connect "$host" -ign_eof < "$1" 2>&1) || exit_code=$?
+ if [ "$exit_code" == 124 ]; then
+ log 2 "error: openssl command timed out"
+ return 1
+ elif [ "$exit_code" != 0 ]; then
+ log 2 "error sending openssl command: exit code $exit_code, $result"
+ return 1
+ fi
+ if ! [[ "$result" =~ .*$'\nclosed' ]]; then
+ log 2 "connection not closed properly: $result"
+ return 1
+ fi
+ return 0
+}
diff --git a/tests/drivers/drivers.sh b/tests/drivers/params.sh
similarity index 83%
rename from tests/drivers/drivers.sh
rename to tests/drivers/params.sh
index 69506c2..b5d57a8 100644
--- a/tests/drivers/drivers.sh
+++ b/tests/drivers/params.sh
@@ -28,7 +28,7 @@ check_param_count() {
check_param_count_v2() {
if [ $# -ne 3 ]; then
- log 2 "'check_param_count' requires params list, expected, actual"
+ log 2 "'check_param_count_v2' requires params list, expected, actual"
return 1
fi
if [ "$2" -ne "$3" ]; then
@@ -51,12 +51,12 @@ assert_param_count() {
}
check_param_count_gt() {
- if [ $# -ne 4 ]; then
- log 2 "'check_param_count_gt' requires function name, params list, expected minimum, actual"
+ if [ $# -lt 3 ]; then
+ log 2 "'check_param_count_gt' requires params list, expected minimum, actual"
return 1
fi
- if [ "$3" -gt "$4" ]; then
- log_with_stack_ref 2 "function $1 requires $2" 2
+ if [ "$2" -gt "$3" ]; then
+ log_with_stack_ref 2 "function '${FUNCNAME[1]}' requires $1" 2
return 1
fi
return 0
diff --git a/tests/drivers/rest.sh b/tests/drivers/rest.sh
new file mode 100644
index 0000000..69b74b0
--- /dev/null
+++ b/tests/drivers/rest.sh
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+
+# Copyright 2024 Versity Software
+# This file is licensed under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+source ./tests/util/util_xml.sh
+
+check_rest_expected_error() {
+ if ! check_param_count_v2 "response, response file, expected http code, expected error code, expected error" 5 $#; then
+ return 1
+ fi
+ if [ "$1" != "$3" ]; then
+ log 2 "expected '$3', was '$1' ($(cat "$2"))"
+ return 1
+ fi
+ if ! check_xml_error_contains "$2" "$4" "$5"; then
+ log 2 "error checking XML response"
+ return 1
+ fi
+ return 0
+}
\ No newline at end of file
diff --git a/tests/drivers/upload_part/upload_part_rest.sh b/tests/drivers/upload_part/upload_part_rest.sh
new file mode 100644
index 0000000..f3411b4
--- /dev/null
+++ b/tests/drivers/upload_part/upload_part_rest.sh
@@ -0,0 +1,80 @@
+#!/usr/bin/env bats
+
+# Copyright 2024 Versity Software
+# This file is licensed under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+upload_parts_rest_before_completion() {
+ if ! check_param_count_v2 "bucket, key, file, upload ID, part count" 5 $#; then
+ return 1
+ fi
+ if ! split_file "$3" "$5"; then
+ log 2 "error splitting file"
+ return 1
+ fi
+ local parts_payload=""
+ for ((part=0;part<"$5";part++)); do
+ part_number=$((part+1))
+ if ! etag=$(upload_part_rest "$1" "$2" "$4" "$part_number" "$3-$part" 2>&1); then
+ log 2 "error uploading part $part: $etag"
+ return 1
+ fi
+ parts_payload+="$etag$part_number"
+ done
+ echo "$parts_payload"
+ return 0
+}
+
+upload_parts_rest_with_checksum_before_completion() {
+ if ! check_param_count_v2 "bucket, key, file, upload ID, part count, algorithm" 6 $#; then
+ return 1
+ fi
+ if ! split_file "$3" "$5"; then
+ log 2 "error splitting file"
+ return 1
+ fi
+ parts_payload=""
+ checksums=()
+ for ((part=0;part<"$5";part++)); do
+ part_number=$((part+1))
+ if ! upload_part_rest_with_checksum "$1" "$2" "$4" "$part_number" "$3-$part" "$6"; then
+ log 2 "error uploading part $part"
+ return 1
+ fi
+ checksums+=("$checksum")
+ uppercase_checksum_algorithm=$(echo -n "$6" | tr '[:lower:]' '[:upper:]')
+ parts_payload+="$etag${checksum}$part_number"
+ log 5 "parts payload: $parts_payload"
+ done
+ log 5 "${checksums[*]}"
+ return 0
+}
+
+perform_full_multipart_upload_with_checksum_before_completion() {
+ if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
+ return 1
+ fi
+ if ! setup_bucket_and_large_file "$1" "$2"; then
+ log 2 "error setting up bucket and large file"
+ return 1
+ fi
+ if ! create_multipart_upload_rest_with_checksum_type_and_algorithm "$1" "$2" "$3" "$4"; then
+ log 2 "error creating multipart upload"
+ return 1
+ fi
+ lowercase_checksum_algorithm=$(echo -n "$4" | tr '[:upper:]' '[:lower:]')
+ if ! upload_parts_rest_with_checksum_before_completion "$1" "$2" "$TEST_FILE_FOLDER/$2" "$upload_id" 2 "$lowercase_checksum_algorithm"; then
+ log 2 "error uploading parts"
+ return 1
+ fi
+}
\ No newline at end of file
diff --git a/tests/env.sh b/tests/env.sh
index e2d096f..64f4aba 100644
--- a/tests/env.sh
+++ b/tests/env.sh
@@ -160,6 +160,7 @@ check_universal_vars() {
exit 1
fi
fi
+ export TEST_FILE_FOLDER
}
delete_command_log() {
diff --git a/tests/rest_scripts/calculate_checksum.sh b/tests/rest_scripts/calculate_checksum.sh
index a846585..ca99e89 100755
--- a/tests/rest_scripts/calculate_checksum.sh
+++ b/tests/rest_scripts/calculate_checksum.sh
@@ -45,25 +45,25 @@ calculate_checksum_python() {
}
case "$CHECKSUM_TYPE" in
-"crc32c")
+"crc32c"|"CRC32C")
if ! checksum=$(calculate_checksum_python "crc32c" "$DATA_FILE" 2>&1); then
log_rest 2 "error getting checksum: $checksum"
exit 1
fi
;;
-"crc64nvme")
+"crc64nvme"|"CRC64NVME")
if ! checksum=$(calculate_checksum_python "crc64nvme" "$DATA_FILE" 2>&1); then
log 2 "error calculating checksum: $checksum"
exit 1
fi
;;
-"sha256")
+"sha256"|"SHA256")
checksum="$(sha256sum "$DATA_FILE" | awk '{print $1}' | xxd -r -p | base64)"
;;
-"sha1")
+"sha1"|"SHA1")
checksum="$(sha1sum "$DATA_FILE" | awk '{print $1}' | xxd -r -p | base64)"
;;
-"crc32")
+"crc32"|"CRC32")
checksum="$(gzip -c -1 "$DATA_FILE" | tail -c8 | od -t x4 -N 4 -A n | awk '{print $1}' | xxd -r -p | base64)"
;;
*)
diff --git a/tests/rest_scripts/complete_multipart_upload.sh b/tests/rest_scripts/complete_multipart_upload.sh
index abf7dc2..ff31668 100755
--- a/tests/rest_scripts/complete_multipart_upload.sh
+++ b/tests/rest_scripts/complete_multipart_upload.sh
@@ -26,30 +26,43 @@ key="$OBJECT_KEY"
upload_id="$UPLOAD_ID"
# shellcheck disable=SC2153
parts="$PARTS"
+# shellcheck disable=SC2153
+checksum_type="$CHECKSUM_TYPE"
+# shellcheck disable=SC2153
+checksum_algorithm="$CHECKSUM_ALGORITHM"
+# shellcheck disable=SC2153
+checksum_hash="$CHECKSUM_HASH"
+# shellcheck disable=SC2154
+algorithm_parameter="${ALGORITHM_PARAMETER:=false}"
-payload="$parts"
+payload="
+$parts"
payload_hash="$(echo -n "$payload" | sha256sum | awk '{print $1}')"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
-canonical_request="POST
-/$bucket_name/$key
-uploadId=$UPLOAD_ID
-host:$host
-x-amz-content-sha256:$payload_hash
-x-amz-date:$current_date_time
-
-host;x-amz-content-sha256;x-amz-date
-$payload_hash"
+cr_data=("POST" "/$bucket_name/$key" "uploadId=$upload_id" "host:$host")
+log_rest 5 "Algorithm param: $algorithm_parameter"
+lowercase_algorithm="$(echo -n "$checksum_algorithm" | tr '[:upper:]' '[:lower:]')"
+if [ "$algorithm_parameter" != "false" ]; then
+ cr_data+=("x-amz-checksum-algorithm:${checksum_algorithm}")
+fi
+if [ "$checksum_hash" != "" ]; then
+ cr_data+=("x-amz-checksum-${lowercase_algorithm}:$checksum_hash")
+fi
+if [ "$checksum_type" != "" ]; then
+ cr_data+=("x-amz-checksum-type:$checksum_type")
+fi
+cr_data+=("x-amz-content-sha256:$payload_hash" "x-amz-date:$current_date_time")
+build_canonical_request "${cr_data[@]}"
# shellcheck disable=SC2119
create_canonical_hash_sts_and_signature
-curl_command+=(curl -ks -w "\"%{http_code}\"" -X POST "$AWS_ENDPOINT_URL/$bucket_name/$key?uploadId=$upload_id"
--H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
--H "\"x-amz-content-sha256: $payload_hash\""
--H "\"x-amz-date: $current_date_time\""
--H "\"Content-Type: application/xml\""
--d "\"${payload//\"/\\\"}\""
--o "$OUTPUT_FILE")
+curl_command+=(curl -iks -w "\"%{http_code}\"" -X POST "$AWS_ENDPOINT_URL/$bucket_name/$key?uploadId=$upload_id"
+-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=$param_list,Signature=$signature\"")
+curl_command+=(-H "\"Content-Type: application/xml\"")
+curl_command+=("${header_fields[@]}")
+curl_command+=(-d "\"${payload//\"/\\\"}\"")
+curl_command+=(-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1
diff --git a/tests/rest_scripts/create_multipart_upload.sh b/tests/rest_scripts/create_multipart_upload.sh
index 48b568c..6834679 100755
--- a/tests/rest_scripts/create_multipart_upload.sh
+++ b/tests/rest_scripts/create_multipart_upload.sh
@@ -22,30 +22,29 @@ source ./tests/rest_scripts/rest.sh
bucket_name="$BUCKET_NAME"
# shellcheck disable=SC2153
key="$OBJECT_KEY"
-
-# Step 1: generate canonical request hash
+# shellcheck disable=SC2153
+checksum_type="$CHECKSUM_TYPE"
+# shellcheck disable=SC2153
+checksum_algorithm="$CHECKSUM_ALGORITHM"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
-canonical_request="POST
-/$bucket_name/$key
-uploads=
-host:$host
-x-amz-content-sha256:UNSIGNED-PAYLOAD
-x-amz-date:$current_date_time
-
-host;x-amz-content-sha256;x-amz-date
-UNSIGNED-PAYLOAD"
-
-canonical_request_hash="$(echo -n "$canonical_request" | openssl dgst -sha256 | awk '{print $2}')"
+cr_data=("POST" "/$bucket_name/$key" "uploads=" "host:$host")
+if [ "$checksum_algorithm" != "" ]; then
+ cr_data+=("x-amz-checksum-algorithm:$checksum_algorithm")
+fi
+if [ "$checksum_type" != "" ]; then
+ cr_data+=("x-amz-checksum-type:$checksum_type")
+fi
+cr_data+=("x-amz-content-sha256:UNSIGNED-PAYLOAD" "x-amz-date:$current_date_time")
+build_canonical_request "${cr_data[@]}"
# shellcheck disable=SC2119
create_canonical_hash_sts_and_signature
curl_command+=(curl -ks -w "\"%{http_code}\"" -X POST "$AWS_ENDPOINT_URL/$bucket_name/$key?uploads="
--H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
--H "\"x-amz-content-sha256: UNSIGNED-PAYLOAD\""
--H "\"x-amz-date: $current_date_time\""
--o "$OUTPUT_FILE")
+-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=$param_list,Signature=$signature\"")
+curl_command+=("${header_fields[@]}")
+curl_command+=(-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1
diff --git a/tests/rest_scripts/get_object.sh b/tests/rest_scripts/get_object.sh
index 60b11a9..5e7c77b 100755
--- a/tests/rest_scripts/get_object.sh
+++ b/tests/rest_scripts/get_object.sh
@@ -26,6 +26,8 @@ key="$(echo -n "$OBJECT_KEY" | jq -sRr 'split("/") | map(@uri) | join("/")')"
checksum_mode="${CHECKSUM_MODE:=false}"
# shellcheck disable=SC2153
range="$RANGE"
+# shellcheck disable=SC2153
+payload="${PAYLOAD:=UNSIGNED-PAYLOAD}"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
@@ -37,7 +39,7 @@ fi
if [ "$checksum_mode" == "true" ]; then
canonical_request_data+=("x-amz-checksum-mode:ENABLED")
fi
-canonical_request_data+=("x-amz-content-sha256:UNSIGNED-PAYLOAD" "x-amz-date:$current_date_time")
+canonical_request_data+=("x-amz-content-sha256:$payload" "x-amz-date:$current_date_time")
build_canonical_request "${canonical_request_data[@]}"
diff --git a/tests/rest_scripts/put_object.sh b/tests/rest_scripts/put_object.sh
index 8539a27..a4c1038 100755
--- a/tests/rest_scripts/put_object.sh
+++ b/tests/rest_scripts/put_object.sh
@@ -30,12 +30,12 @@ checksum_type="$CHECKSUM_TYPE"
payload="$PAYLOAD"
# shellcheck disable=SC2153
expires="$EXPIRES"
-
# use this parameter to check incorrect checksums
# shellcheck disable=SC2153,SC2154
checksum_hash="$CHECKSUM"
# shellcheck disable=SC2153,SC2154
fake_signature="$SIGNATURE"
+algorithm_parameter="${ALGORITHM_PARAMETER:=false}"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
data_file_esc="$(echo -n "$data_file" | sed -e 's/[][`"$^{}]/\\&/g')"
@@ -51,6 +51,9 @@ if [ -n "$expires" ]; then
cr_data+=("expires:$expires")
fi
cr_data+=("host:$host")
+if [ "$algorithm_parameter" != "false" ]; then
+ cr_data+=("x-amz-checksum-algorithm:${checksum_type}")
+fi
if [ "$checksum_type" != "" ]; then
if [ "$checksum_hash" == "" ] && ! checksum_hash=$(DATA_FILE="$data_file" CHECKSUM_TYPE="$checksum_type" ./tests/rest_scripts/calculate_checksum.sh 2>&1); then
log_rest 2 "error calculating checksum hash"
@@ -68,7 +71,6 @@ if [ "$fake_signature" != "" ]; then
signature="$fake_signature"
fi
-
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "\"$AWS_ENDPOINT_URL/$bucket_name/$key\"")
curl_command+=(-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=$param_list,Signature=$signature\"")
curl_command+=("${header_fields[@]}")
diff --git a/tests/rest_scripts/upload_part.go b/tests/rest_scripts/upload_part.go
new file mode 100644
index 0000000..41e7f16
--- /dev/null
+++ b/tests/rest_scripts/upload_part.go
@@ -0,0 +1,7 @@
+package main
+
+import "fmt"
+
+func main() {
+ fmt.Println("main")
+}
diff --git a/tests/rest_scripts/upload_part.sh b/tests/rest_scripts/upload_part.sh
index e8eb6e1..d9c44da 100755
--- a/tests/rest_scripts/upload_part.sh
+++ b/tests/rest_scripts/upload_part.sh
@@ -26,6 +26,10 @@ part_number="$PART_NUMBER"
upload_id="$UPLOAD_ID"
# shellcheck disable=SC2153
data=$DATA_FILE
+# shellcheck disable=SC2153
+checksum_type="$CHECKSUM_TYPE"
+# shellcheck disable=SC2153
+checksum_hash="$CHECKSUM_HASH"
if [ "$data" != "" ]; then
payload_hash="$(sha256sum "$data" | awk '{print $1}')"
@@ -46,7 +50,15 @@ if [ "$upload_id" != "" ]; then
query_params=$(add_parameter "$query_params" "uploadId=$upload_id")
fi
cr_data+=("$query_params")
-cr_data+=("host:$host" "x-amz-content-sha256:$payload_hash" "x-amz-date:$current_date_time")
+cr_data+=("host:$host")
+if [ "$checksum_type" != "" ]; then
+ if [ "$checksum_hash" == "" ] && ! checksum_hash=$(DATA_FILE="$data" CHECKSUM_TYPE="$checksum_type" ./tests/rest_scripts/calculate_checksum.sh 2>&1); then
+ log_rest 2 "error calculating checksum hash: $checksum_hash"
+ exit 1
+ fi
+ cr_data+=("x-amz-checksum-${checksum_type}:$checksum_hash")
+fi
+cr_data+=("x-amz-content-sha256:$payload_hash" "x-amz-date:$current_date_time")
build_canonical_request "${cr_data[@]}"
# shellcheck disable=SC2119
diff --git a/tests/setup_static.sh b/tests/setup_static.sh
index 293e0b9..ef4520b 100755
--- a/tests/setup_static.sh
+++ b/tests/setup_static.sh
@@ -14,7 +14,6 @@
# specific language governing permissions and limitations
# under the License.
-source ./tests/drivers/drivers.sh
source ./tests/env.sh
source ./tests/util/util_object.sh
source ./tests/commands/create_bucket.sh
diff --git a/tests/test_common.sh b/tests/test_common.sh
index 8f914b9..eafcdd8 100644
--- a/tests/test_common.sh
+++ b/tests/test_common.sh
@@ -37,7 +37,7 @@ source ./tests/commands/put_bucket_tagging.sh
source ./tests/commands/put_object_tagging.sh
source ./tests/commands/put_object.sh
source ./tests/commands/put_public_access_block.sh
-source ./tests/drivers/drivers.sh
+source ./tests/drivers/params.sh
# param: command type
# fail on test failure
diff --git a/tests/test_rest.sh b/tests/test_rest.sh
index 8264042..d6f2275 100755
--- a/tests/test_rest.sh
+++ b/tests/test_rest.sh
@@ -459,3 +459,49 @@ test_file="test_file"
run delete_object_rest "$BUCKET_ONE_NAME" "$file_name/$file_name"
assert_success
}
+
+@test "REST - GetObject w/STREAMING-AWS4-HMAC-SHA256-PAYLOAD type" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1352"
+ fi
+ run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
+ assert_success
+
+ run get_object_rest_with_invalid_streaming_type "$BUCKET_ONE_NAME" "$test_file"
+ assert_success
+}
+
+@test "REST - PutObject w/x-amz-checksum-algorithm" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1356"
+ fi
+ run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
+ assert_success
+
+ run put_object_rest_with_unneeded_algorithm_param "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "crc32c"
+ assert_success
+}
+
+@test "REST - empty message" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1249"
+ fi
+ run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
+ assert_success
+
+ echo -en "\r\n" > "$TEST_FILE_FOLDER/empty.txt"
+ run send_via_openssl_with_timeout "$TEST_FILE_FOLDER/empty.txt"
+ assert_success
+}
+
+@test "REST - deformed message" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1364"
+ fi
+ run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
+ assert_success
+
+ echo -en "abcdefg\r\n\r\n" > "$TEST_FILE_FOLDER/deformed.txt"
+ run send_via_openssl_check_code_error_contains "$TEST_FILE_FOLDER/deformed.txt" 400 "BadRequest" "An error occurred when parsing the HTTP request."
+ assert_success
+}
diff --git a/tests/test_rest_acl.sh b/tests/test_rest_acl.sh
index 8e2e38d..8a620eb 100755
--- a/tests/test_rest_acl.sh
+++ b/tests/test_rest_acl.sh
@@ -162,3 +162,18 @@ fi
run put_object_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_success
}
+
+@test "REST - invalid canned acl" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1367"
+ fi
+ test_file="test_file"
+ run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
+ assert_success
+
+ run put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred"
+ assert_success
+
+ run put_bucket_acl_rest_canned_invalid "$BUCKET_ONE_NAME" "privatee"
+ assert_success
+}
diff --git a/tests/test_rest_chunked.sh b/tests/test_rest_chunked.sh
index 2c7a4c6..2de932b 100755
--- a/tests/test_rest_chunked.sh
+++ b/tests/test_rest_chunked.sh
@@ -216,3 +216,18 @@ source ./tests/util/util_setup.sh
run chunked_upload_trailer_incorrect_checksum "crc64nvme"
assert_success
}
+
+@test "REST chunked upload - smaller chunk size" {
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ test_file="test-file"
+ run create_test_file "$test_file" 200000
+ assert_success
+
+ run chunked_upload_trailer_different_chunk_size "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "sha256"
+ assert_success
+
+ run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
+ assert_success
+}
diff --git a/tests/test_rest_multipart.sh b/tests/test_rest_multipart.sh
index 31a2125..017251c 100755
--- a/tests/test_rest_multipart.sh
+++ b/tests/test_rest_multipart.sh
@@ -18,6 +18,10 @@ load ./bats-support/load
load ./bats-assert/load
source ./tests/setup.sh
+source ./tests/drivers/rest.sh
+source ./tests/drivers/complete_multipart_upload/complete_multipart_upload_rest.sh
+source ./tests/drivers/upload_part/upload_part_rest.sh
+source ./tests/util/util_file.sh
source ./tests/util/util_list_parts.sh
source ./tests/util/util_setup.sh
@@ -129,7 +133,7 @@ test_file="test_file"
run split_file "$TEST_FILE_FOLDER/$test_file" 4
assert_success
- run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
+ run upload_part_rest_without_part_number "$BUCKET_ONE_NAME" "$test_file"
assert_success
}
@@ -143,6 +147,227 @@ test_file="test_file"
run split_file "$TEST_FILE_FOLDER/$test_file" 4
assert_success
- run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
+ run upload_part_rest_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
+ assert_success
+}
+
+@test "REST - multipart w/invalid checksum type" {
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECTS" "" \
+ check_rest_expected_error "400" "InvalidRequest" "Value for x-amz-checksum-type header is invalid"
+ assert_success
+}
+
+@test "REST - multipart w/invalid checksum algorithm" {
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$BUCKET_ONE_NAME" "$test_file" "" "crc64nvm" \
+ check_rest_expected_error "400" "InvalidRequest" "Checksum algorithm provided is unsupported."
+ assert_success
+}
+
+@test "REST - multipart checksum w/crc64nvme, composite" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1329"
+ fi
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "crc64nvme" \
+ check_rest_expected_error "400" "InvalidRequest" "The COMPOSITE checksum type cannot be used with the crc64nvme checksum algorithm."
+ assert_success
+}
+
+@test "REST - multipart checksum w/sha1, full object" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1329"
+ fi
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "sha1" \
+ check_rest_expected_error "400" "InvalidRequest" "The FULL_OBJECT checksum type cannot be used with the sha1 checksum algorithm."
+ assert_success
+}
+
+@test "REST - multipart checksum w/sha256, full object" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1329"
+ fi
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "sha256" \
+ check_rest_expected_error "400" "InvalidRequest" "The FULL_OBJECT checksum type cannot be used with the sha256 checksum algorithm."
+ assert_success
+}
+
+@test "REST - multipart - lowercase checksum type and algorithm" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1339"
+ fi
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ run create_multipart_upload_rest_with_checksum_type_and_algorithm "$BUCKET_ONE_NAME" "$test_file" "full_object" "crc64nvme"
+ assert_success
+}
+
+@test "REST - multipart - full object checksum type doesn't require UploadPart checksums" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1342"
+ fi
+ run setup_bucket "$BUCKET_ONE_NAME"
+ assert_success
+
+ run create_test_file "$test_file" $((5*1024*1024))
+ assert_success
+
+ run create_multipart_upload_rest_with_checksum_type_and_algorithm "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32"
+ assert_success
+ upload_id=$output
+ log 5 "upload ID: $upload_id"
+
+ run upload_part_rest "$BUCKET_ONE_NAME" "$test_file" "$upload_id" 1 "$TEST_FILE_FOLDER/$test_file"
+ assert_success
+}
+
+@test "REST - multipart - composite - sha256" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA256"
+ assert_success
+}
+
+@test "REST - multipart - composite - sha1" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA1"
+ assert_success
+}
+
+@test "REST - multipart - composite - crc32" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32"
+ assert_success
+}
+
+@test "REST - multipart - composite - crc32c" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - full object - crc32" {
+ run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32"
+ assert_success
+}
+
+@test "REST - multipart - full object - crc32c" {
+ run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - full object - crc64nvme" {
+ run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC64NVME"
+ assert_success
+}
+
+@test "REST - multipart - x-amz-checksum-algorithm is ignored in CompleteMultipartUpload" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1345"
+ fi
+ run test_complete_multipart_upload_unneeded_algorithm_parameter "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - composite - incorrect sha256" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA256"
+ assert_success
+}
+
+@test "REST - multipart - composite - incorrect sha1" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA1"
+ assert_success
+}
+
+@test "REST - multipart - composite - incorrect crc32" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - composite - incorrect crc32c" {
+ if [ "$DIRECT" != "true" ]; then
+ skip "https://github.com/versity/versitygw/issues/1359"
+ fi
+ run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - full object - incorrect crc32" {
+ run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32"
+ assert_success
+}
+
+@test "REST - multipart - full object - incorrect crc32c" {
+ run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - full object - incorrect crc64nvme" {
+ run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC64NVME"
+ assert_success
+}
+
+@test "REST - multipart - composite - invalid sha1" {
+ run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA1"
+ assert_success
+}
+
+@test "REST - multipart - composite - invalid sha256" {
+ run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA256"
+ assert_success
+}
+
+@test "REST - multipart - composite - invalid crc32" {
+ run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32"
+ assert_success
+}
+
+@test "REST - multipart - composite - invalid crc32c" {
+ run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - full object - invalid crc32" {
+ run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32"
+ assert_success
+}
+
+@test "REST - multipart - full object - invalid crc32c" {
+ run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
+ assert_success
+}
+
+@test "REST - multipart - full object - invalid crc64nvme" {
+ run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC64NVME"
assert_success
}
diff --git a/tests/test_s3api_object.sh b/tests/test_s3api_object.sh
index 826dfa3..f27d014 100755
--- a/tests/test_s3api_object.sh
+++ b/tests/test_s3api_object.sh
@@ -261,7 +261,7 @@ export RUN_USERS=true
run setup_bucket "$BUCKET_ONE_NAME"
assert_success
- run create_multipart_upload "$BUCKET_ONE_NAME" "test_file/"
+ run create_multipart_upload_s3api "$BUCKET_ONE_NAME" "test_file/"
assert_failure
assert_output -p "Directory object contains data payload"
}
diff --git a/tests/test_s3api_policy_multipart.sh b/tests/test_s3api_policy_multipart.sh
index 7b68c3a..bff2add 100644
--- a/tests/test_s3api_policy_multipart.sh
+++ b/tests/test_s3api_policy_multipart.sh
@@ -40,7 +40,7 @@ test_s3api_policy_abort_multipart_upload() {
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
- run create_multipart_upload_with_user "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
+ run create_multipart_upload_s3api_with_user "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_success
# shellcheck disable=SC2154
upload_id="$output"
@@ -82,7 +82,7 @@ test_s3api_policy_list_multipart_uploads() {
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "$effect" "$principal" "$action" "$resource"
assert_success
- run create_multipart_upload "$BUCKET_ONE_NAME" "$test_file"
+ run create_multipart_upload_s3api "$BUCKET_ONE_NAME" "$test_file"
assert_success
run list_multipart_uploads_with_user "$BUCKET_ONE_NAME" "$username" "$password"
diff --git a/tests/test_user_aws.sh b/tests/test_user_aws.sh
index 50140c4..09f847a 100755
--- a/tests/test_user_aws.sh
+++ b/tests/test_user_aws.sh
@@ -158,12 +158,12 @@ export RUN_USERS=true
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
assert_success
- run create_multipart_upload_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password"
+ run create_multipart_upload_s3api_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password"
assert_failure
run change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username"
assert_success
- run create_multipart_upload_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password"
+ run create_multipart_upload_s3api_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password"
assert_success
}
diff --git a/tests/util/util_acl.sh b/tests/util/util_acl.sh
index e11db39..7468f65 100644
--- a/tests/util/util_acl.sh
+++ b/tests/util/util_acl.sh
@@ -365,22 +365,6 @@ put_invalid_acl_rest_verify_failure() {
return 0
}
-put_canned_acl_rest() {
- if [ $# -ne 2 ]; then
- log 2 "'put_canned_acl_rest' requires bucket name, canned acl"
- return 1
- fi
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" CANNED_ACL="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/put_bucket_acl.sh); then
- log 2 "error attempting to put bucket acl: $result"
- return 1
- fi
- if [ "$result" != "200" ]; then
- log 2 "response code '$result' (message: $(cat "$TEST_FILE_FOLDER/response.txt"))"
- return 1
- fi
- return 0
-}
-
# param: bucket name
# return 0 for success, 1 for failure
check_ownership_rule_and_reset_acl() {
diff --git a/tests/util/util_bucket.sh b/tests/util/util_bucket.sh
index 7e6dec3..f463be9 100644
--- a/tests/util/util_bucket.sh
+++ b/tests/util/util_bucket.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-source ./tests/drivers/drivers.sh
+source ./tests/drivers/params.sh
source ./tests/drivers/get_object_lock_config/get_object_lock_config_rest.sh
source ./tests/drivers/list_objects/list_objects_rest.sh
source ./tests/util/util_acl.sh
@@ -147,7 +147,7 @@ bucket_cleanup() {
# return 0 for success, 1 for error
bucket_cleanup_if_bucket_exists() {
log 6 "bucket_cleanup_if_bucket_exists"
- if ! check_param_count_gt "bucket_cleanup_if_bucket_exists" "bucket name, bucket known to exist (optional)" 1 $#; then
+ if ! check_param_count_gt "bucket name, bucket known to exist (optional)" 1 $#; then
return 1
fi
@@ -170,7 +170,7 @@ bucket_cleanup_if_bucket_exists() {
# params: client, bucket name(s)
# return 0 for success, 1 for failure
setup_buckets() {
- if ! check_param_count_gt "setup_buckets" "minimum of 1 bucket name" 1 $#; then
+ if ! check_param_count_gt "minimum of 1 bucket name" 1 $#; then
return 1
fi
for name in "$@"; do
diff --git a/tests/util/util_chunked_upload.sh b/tests/util/util_chunked_upload.sh
index 0342fb4..515ddc5 100644
--- a/tests/util/util_chunked_upload.sh
+++ b/tests/util/util_chunked_upload.sh
@@ -1,8 +1,10 @@
#!/usr/bin/env bash
+source ./tests/drivers/rest.sh
+source ./tests/drivers/openssl.sh
+
attempt_seed_signature_without_content_length() {
- if [ "$#" -ne 3 ]; then
- log 2 "'attempt_seed_signature_without_content_length' requires bucket name, key, data file"
+ if ! check_param_count_v2 "bucket, key, data file" 3 $#; then
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" \
@@ -53,7 +55,8 @@ attempt_chunked_upload_with_bad_first_signature() {
response_data="${response_data/---/}"
log 5 "response data: $response_data"
log 5 "END"
- if ! check_xml_element <(echo "$response_data") "SignatureDoesNotMatch" "Error" "Code"; then
+ echo -n "$response_data" > "$TEST_FILE_FOLDER/response_data.txt"
+ if ! check_xml_element "$TEST_FILE_FOLDER/response_data.txt" "SignatureDoesNotMatch" "Error" "Code"; then
log 2 "error checking XML element"
return 1
fi
@@ -106,17 +109,16 @@ attempt_chunked_upload_with_bad_final_signature() {
log 2 "error sending command via openssl"
return 1
fi
- response_code="$(echo "$result" | grep "HTTP" | awk '{print $2}')"
- log 5 "response code: $response_code"
- if [ "$response_code" != "403" ]; then
- log 2 "expected code '403', was '$response_code'"
+ log 5 "response: $result"
+ echo -n "$result" > "$TEST_FILE_FOLDER/result.txt"
+ if ! get_xml_data "$TEST_FILE_FOLDER/result.txt" "$TEST_FILE_FOLDER/error_data.txt"; then
+ log 2 "error parsing XML data from result"
return 1
fi
- response_data="$(echo "$result" | grep "" | sed 's/---//g')"
- log 5 "response data: $response_data"
- log 5 "END"
- if ! check_xml_element <(echo "$response_data") "SignatureDoesNotMatch" "Error" "Code"; then
- log 2 "error checking XML element"
+ log 5 "xml data: $(cat "$TEST_FILE_FOLDER/error_data.txt")"
+ response_code="$(echo "$result" | grep "HTTP" | awk '{print $2}')"
+ if ! check_rest_expected_error "$response_code" "$TEST_FILE_FOLDER/error_data.txt" "403" "SignatureDoesNotMatch" "does not match"; then
+ log 2 "error checking expected REST error"
return 1
fi
return 0
@@ -278,53 +280,24 @@ chunked_upload_trailer_incorrect_checksum() {
return 0
}
-send_via_openssl() {
- if [ "$#" -ne 1 ]; then
- log 2 "'send_via_openssl' requires command file"
+chunked_upload_trailer_different_chunk_size() {
+ if ! check_param_count_v2 "data file, bucket, key, checksum type" 4 $#; then
return 1
fi
- host="${AWS_ENDPOINT_URL#http*://}"
- if [[ "$host" =~ s3\..*amazonaws\.com ]]; then
- host+=":443"
- fi
- log 5 "connecting to $host"
- if ! result=$(openssl s_client -connect "$host" -ign_eof < "$1" 2>&1); then
- log 2 "error sending openssl command: $result"
+ # shellcheck disable=SC2097,SC2098
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" \
+ AWS_ACCESS_KEY_ID="$AWS_ACCESS_KEY_ID" \
+ AWS_SECRET_ACCESS_KEY="$AWS_SECRET_ACCESS_KEY" \
+ AWS_ENDPOINT_URL="$AWS_ENDPOINT_URL" \
+ DATA_FILE="$1" \
+ BUCKET_NAME="$2" \
+ OBJECT_KEY="$3" CHUNK_SIZE=16384 TEST_MODE=false TRAILER="x-amz-checksum-$4" TEST_FILE_FOLDER="$TEST_FILE_FOLDER" COMMAND_FILE="$TEST_FILE_FOLDER/command.txt" ./tests/rest_scripts/put_object_openssl_chunked_trailer_example.sh 2>&1); then
+ log 2 "error creating command: $result"
return 1
fi
- echo "$result"
-}
-send_via_openssl_and_check_code() {
- if [ "$#" -ne 2 ]; then
- log 2 "'send_via_openssl_and_check_code' requires command file, expected code"
- return 1
- fi
- if ! result=$(send_via_openssl "$1"); then
- log 2 "error sending command via openssl"
- return 1
- fi
- response_code="$(echo "$result" | grep "HTTP/" | awk '{print $2}')"
- if [ "$response_code" != "$2" ]; then
- log 2 "expected '$2', actual '$response_code' (error response: '$result')"
- return 1
- fi
- echo "$result"
-}
-
-send_via_openssl_check_code_error_contains() {
- if [ "$#" -ne 4 ]; then
- log 2 "'send_via_openssl_check_code_error_contains' requires command file, expected code, error, message"
- return 1
- fi
- if ! result=$(send_via_openssl_and_check_code "$1" "$2"); then
- log 2 "error sending and checking code"
- return 1
- fi
- error_data="$(echo "$result" | grep "" | sed 's/---//')"
- echo -n "$error_data" > "$TEST_FILE_FOLDER/error-data.txt"
- if ! check_xml_error_contains "$TEST_FILE_FOLDER/error-data.txt" "$3" "$4"; then
- log 2 "error checking xml error, message"
+ if ! send_via_openssl_and_check_code "$TEST_FILE_FOLDER/command.txt" 200; then
+ log 2 "error sending command via openssl or checking response code"
return 1
fi
return 0
diff --git a/tests/util/util_delete_object.sh b/tests/util/util_delete_object.sh
index ab2fa4b..d05de52 100644
--- a/tests/util/util_delete_object.sh
+++ b/tests/util/util_delete_object.sh
@@ -27,15 +27,16 @@ delete_object_empty_bucket_check_error() {
log 2 "error getting XML error data: $error"
return 1
fi
- if ! check_xml_element <(echo "$error") "MethodNotAllowed" "Code"; then
+ echo -n "$error" > "$TEST_FILE_FOLDER/error.txt"
+ if ! check_xml_element "$TEST_FILE_FOLDER/error.txt" "MethodNotAllowed" "Code"; then
log 2 "Code mismatch"
return 1
fi
- if ! check_xml_element <(echo "$error") "POST" "Method"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/error.txt" "POST" "Method"; then
log 2 "Method mismatch"
return 1
fi
- if ! check_xml_element <(echo "$error") "SERVICE" "ResourceType"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/error.txt" "SERVICE" "ResourceType"; then
log 2 "ResourceType mismatch"
return 1
fi
diff --git a/tests/util/util_file.sh b/tests/util/util_file.sh
index 03502b4..5cf1233 100644
--- a/tests/util/util_file.sh
+++ b/tests/util/util_file.sh
@@ -244,7 +244,7 @@ create_test_file_count() {
}
download_and_compare_file_with_user() {
- if ! check_param_count_gt "download_and_compare_large_file" "original file, bucket, key, destination, username, password, chunk size (optional)" 6 $#; then
+ if ! check_param_count_gt "original file, bucket, key, destination, username, password, chunk size (optional)" 6 $#; then
return 1
fi
if ! download_file_with_user "$5" "$6" "$2" "$3" "$4" "$7"; then
@@ -260,7 +260,7 @@ download_and_compare_file_with_user() {
download_and_compare_file() {
log 6 "download_and_compare_file"
- if ! check_param_count_gt "download_and_compare_file" "original file, bucket, key, destination, chunk size (optional)" 4 $#; then
+ if ! check_param_count_gt "original file, bucket, key, destination, chunk size (optional)" 4 $#; then
return 1
fi
if ! download_and_compare_file_with_user "$1" "$2" "$3" "$4" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$5"; then
diff --git a/tests/util/util_legal_hold.sh b/tests/util/util_legal_hold.sh
index 58446bb..c55de83 100644
--- a/tests/util/util_legal_hold.sh
+++ b/tests/util/util_legal_hold.sh
@@ -69,7 +69,8 @@ check_remove_legal_hold_versions() {
return 0
fi
log 5 "legal hold: $legal_hold"
- if ! status=$(get_element_text <(echo -n "$legal_hold") "LegalHold" "Status"); then
+ echo -n "$legal_hold" > "$TEST_FILE_FOLDER/legal_hold.xml"
+ if ! status=$(get_element_text "$TEST_FILE_FOLDER/legal_hold.xml" "LegalHold" "Status"); then
log 2 "error getting XML legal hold status"
return 1
fi
diff --git a/tests/util/util_list_objects.sh b/tests/util/util_list_objects.sh
index 0a7b82f..3b4d39c 100644
--- a/tests/util/util_list_objects.sh
+++ b/tests/util/util_list_objects.sh
@@ -282,19 +282,20 @@ list_objects_check_params_get_token() {
log 2 "error getting list bucket result: $list_bucket_result"
return 1
fi
- if ! check_xml_element <(echo "$list_bucket_result") "$2" "Key"; then
+ echo -n "$list_bucket_result" > "$TEST_FILE_FOLDER/list_bucket_result.txt"
+ if ! check_xml_element "$TEST_FILE_FOLDER/list_bucket_result.txt" "$2" "Key"; then
log 2 "key mismatch"
return 1
fi
- if ! check_xml_element <(echo "$list_bucket_result") "1" "MaxKeys"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/list_bucket_result.txt" "1" "MaxKeys"; then
log 2 "max keys mismatch"
return 1
fi
- if ! check_xml_element <(echo "$list_bucket_result") "1" "KeyCount"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/list_bucket_result.txt" "1" "KeyCount"; then
log 2 "key count mismatch"
return 1
fi
- if ! check_xml_element <(echo "$list_bucket_result") "true" "IsTruncated"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/list_bucket_result.txt" "true" "IsTruncated"; then
log 2 "key count mismatch"
return 1
fi
diff --git a/tests/util/util_list_parts.sh b/tests/util/util_list_parts.sh
index 833dd44..8f22445 100644
--- a/tests/util/util_list_parts.sh
+++ b/tests/util/util_list_parts.sh
@@ -67,38 +67,34 @@ perform_multipart_upload_rest() {
log 2 "'upload_check_parts' requires bucket, key, part list"
return 1
fi
- if ! create_upload_and_get_id_rest "$1" "$2"; then
- log 2 "error creating upload"
+ if ! upload_id=$(create_multipart_upload_rest "$1" "$2" 2>&1); then
+ log 2 "error creating multipart upload"
return 1
fi
# shellcheck disable=SC2154
- if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 1 "$3"; then
+ if ! etag=$(upload_part_rest "$1" "$2" "$upload_id" 1 "$3" 2>&1); then
log 2 "error uploading part 1"
return 1
fi
# shellcheck disable=SC2154
parts_payload="$etag1"
- if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 2 "$4"; then
- log 2 "error uploading part 2"
+ if ! etag=$(upload_part_rest "$1" "$2" "$upload_id" 2 "$4" 2>&1); then
+ log 2 "error uploading part 2: $etag"
return 1
fi
parts_payload+="$etag2"
- if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 3 "$5"; then
- log 2 "error uploading part 3"
+ if ! etag=$(upload_part_rest "$1" "$2" "$upload_id" 3 "$5" 2>&1); then
+ log 2 "error uploading part 3: $etag"
return 1
fi
parts_payload+="$etag3"
- if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 4 "$6"; then
- log 2 "error uploading part 4"
+ if ! etag=$(upload_part_rest "$1" "$2" "$upload_id" 4 "$6" 2>&1); then
+ log 2 "error uploading part 4: $etag"
return 1
fi
parts_payload+="$etag4"
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$upload_id" PARTS="$parts_payload" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh); then
- log 2 "error completing multipart upload: $result"
- return 1
- fi
- if [ "$result" != "200" ]; then
- log 2 "complete multipart upload returned code $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ if ! complete_multipart_upload_rest "$1" "$2" "$3" "$4"; then
+ log 2 "error completing multipart upload"
return 1
fi
return 0
@@ -109,7 +105,7 @@ upload_check_parts() {
log 2 "'upload_check_parts' requires bucket, key, part list"
return 1
fi
- if ! create_upload_and_get_id_rest "$1" "$2"; then
+ if ! upload_id=$(create_multipart_upload_rest "$1" "$2" 2>&1); then
log 2 "error creating upload"
return 1
fi
@@ -144,12 +140,8 @@ upload_check_parts() {
return 1
fi
log 5 "PARTS PAYLOAD: $parts_payload"
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$upload_id" PARTS="$parts_payload" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh); then
- log 2 "error completing multipart upload: $result"
- return 1
- fi
- if [ "$result" != "200" ]; then
- log 2 "complete multipart upload returned code $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ if ! complete_multipart_upload_rest "$1" "$2" "$upload_id" "$parts_payload"; then
+ log 2 "error completing multipart upload"
return 1
fi
return 0
@@ -160,8 +152,8 @@ upload_check_part() {
log 2 "'upload_check_part' requires bucket, key, upload ID, part number, part, etags"
return 1
fi
- if ! upload_part_and_get_etag_rest "$1" "$2" "$3" "$4" "$5"; then
- log 2 "error uploading part $4"
+ if ! etag=$(upload_part_rest "$1" "$2" "$3" "$4" "$5" 2>&1); then
+ log 2 "error uploading part $4: $etag"
return 1
fi
parts_payload+="$etag$4"
diff --git a/tests/util/util_multipart.sh b/tests/util/util_multipart.sh
index 3d464ca..0e52a48 100644
--- a/tests/util/util_multipart.sh
+++ b/tests/util/util_multipart.sh
@@ -60,7 +60,7 @@ multipart_upload_from_bucket() {
fi
}
- if ! create_multipart_upload "$1" "$2-copy"; then
+ if ! create_multipart_upload_rest "$1" "$2-copy"; then
log 2 "error running first multipart upload"
return 1
fi
@@ -106,7 +106,7 @@ multipart_upload_from_bucket_range() {
fi
}
- if ! create_multipart_upload "$1" "$2-copy"; then
+ if ! create_multipart_upload_rest "$1" "$2-copy"; then
log 2 "error running first multpart upload"
return 1
fi
@@ -131,7 +131,7 @@ multipart_upload_from_bucket_range() {
}
multipart_upload_custom() {
- if ! check_param_count_gt "multipart_upload_custom" "bucket, key, file, part count, optional additional parameters" 4 $$; then
+ if ! check_param_count_gt "bucket, key, file, part count, optional additional parameters" 4 $$; then
return 1
fi
@@ -221,7 +221,7 @@ create_upload_part_copy_rest() {
log 2 "error splitting and putting file"
return 1
fi
- if ! create_upload_and_get_id_rest "$1" "$2"; then
+ if ! create_multipart_upload_rest "$1" "$2"; then
log 2 "error creating upload and getting ID"
return 1
fi
@@ -244,12 +244,8 @@ create_upload_part_copy_rest() {
fi
parts_payload+="$etag$part_number"
done
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$upload_id" PARTS="$parts_payload" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh); then
- log 2 "error completing multipart upload: $result"
- return 1
- fi
- if [ "$result" != "200" ]; then
- log 2 "complete multipart upload returned code $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
+ if ! complete_multipart_upload_rest "$1" "$2" "$upload_id" "$parts_payload"; then
+ log 2 "error completing multipart upload"
return 1
fi
return 0
@@ -262,7 +258,7 @@ create_upload_finish_wrong_etag() {
etag="gibberish"
part_number=1
- if ! create_upload_and_get_id_rest "$1" "$2"; then
+ if ! create_multipart_upload_rest "$1" "$2"; then
log 2 "error creating upload and getting ID"
return 1
fi
@@ -279,19 +275,20 @@ create_upload_finish_wrong_etag() {
log 2 "error retrieving error info: $error"
return 1
fi
- if ! check_xml_element <(echo "$error") "InvalidPart" "Code"; then
+ echo -n "$error" > "$TEST_FILE_FOLDER/error.txt"
+ if ! check_xml_element "$TEST_FILE_FOLDER/error.txt" "InvalidPart" "Code"; then
log 2 "code mismatch"
return 1
fi
- if ! check_xml_element <(echo "$error") "$upload_id" "UploadId"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/error.txt" "$upload_id" "UploadId"; then
log 2 "upload ID mismatch"
return 1
fi
- if ! check_xml_element <(echo "$error") "$part_number" "PartNumber"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/error.txt" "$part_number" "PartNumber"; then
log 2 "part number mismatch"
return 1
fi
- if ! check_xml_element <(echo "$error") "$etag" "ETag"; then
+ if ! check_xml_element "$TEST_FILE_FOLDER/error.txt" "$etag" "ETag"; then
log 2 "etag mismatch"
return 1
fi
diff --git a/tests/util/util_multipart_abort.sh b/tests/util/util_multipart_abort.sh
index fa6cf41..b901c3b 100644
--- a/tests/util/util_multipart_abort.sh
+++ b/tests/util/util_multipart_abort.sh
@@ -43,7 +43,7 @@ create_abort_multipart_upload_rest() {
return 1
fi
log 5 "uploads before upload: $(cat "$TEST_FILE_FOLDER/uploads.txt")"
- if ! create_upload_and_get_id_rest "$1" "$2"; then
+ if ! create_multipart_upload_rest "$1" "$2"; then
log 2 "error creating upload"
return 1
fi
diff --git a/tests/util/util_multipart_before_completion.sh b/tests/util/util_multipart_before_completion.sh
index ced8701..330e51b 100644
--- a/tests/util/util_multipart_before_completion.sh
+++ b/tests/util/util_multipart_before_completion.sh
@@ -200,12 +200,12 @@ create_and_list_multipart_uploads() {
return 1
fi
- if ! create_multipart_upload "$1" "$2"; then
+ if ! create_multipart_upload_rest "$1" "$2"; then
log 2 "error creating multpart upload"
return 1
fi
- if ! create_multipart_upload "$1" "$3"; then
+ if ! create_multipart_upload_rest "$1" "$3"; then
log 2 "error creating multpart upload two"
return 1
fi
@@ -243,7 +243,7 @@ multipart_upload_before_completion_with_user() {
return 1
fi
- if ! create_multipart_upload_with_user "$1" "$2" "$5" "$6"; then
+ if ! create_multipart_upload_s3api_with_user "$1" "$2" "$5" "$6"; then
log 2 "error creating multpart upload"
return 1
fi
@@ -276,7 +276,7 @@ multipart_upload_before_completion_with_params() {
return 1
fi
- if ! create_multipart_upload_params "$1" "$2" "$5" "$6" "$7" "$8" "$9" "${10}"; then
+ if ! create_multipart_upload_s3api_params "$1" "$2" "$5" "$6" "$7" "$8" "$9" "${10}"; then
log 2 "error creating multpart upload"
return 1
fi
@@ -331,31 +331,8 @@ multipart_upload_before_completion_custom() {
export parts
}
-create_upload_and_get_id_rest() {
- if [ $# -ne 2 ]; then
- log 2 "'create_upload_and_get_id_rest' requires bucket, key"
- return 1
- fi
- if ! result=$(COMMAND_LOG=$COMMAND_LOG BUCKET_NAME=$1 OBJECT_KEY=$2 OUTPUT_FILE="$TEST_FILE_FOLDER/output.txt" ./tests/rest_scripts/create_multipart_upload.sh); then
- log 2 "error creating multipart upload: $result"
- return 1
- fi
- if [ "$result" != "200" ]; then
- log 2 "error: response code: $result, output: $(cat "$TEST_FILE_FOLDER/output.txt")"
- return 1
- fi
- log 5 "multipart upload create info: $(cat "$TEST_FILE_FOLDER/output.txt")"
- if ! upload_id=$(xmllint --xpath '//*[local-name()="UploadId"]/text()' "$TEST_FILE_FOLDER/output.txt" 2>&1); then
- log 2 "error getting upload ID: $upload_id"
- return 1
- fi
- log 5 "upload ID: $upload_id"
- return 0
-}
-
multipart_upload_range_too_large() {
- if [ $# -ne 3 ]; then
- log 2 "'multipart_upload_range_too_large' requires bucket name, key, file location"
+ if ! check_param_count_v2 "bucket, key, file location" 3 $#; then
return 1
fi
if multipart_upload_from_bucket_range "$1" "$2" "$3" 4 "bytes=0-1000000000"; then
@@ -462,16 +439,10 @@ upload_part_check_etag_header() {
log 2 "'upload_part_check_etag_header' requires bucket name, key, upload ID"
return 1
fi
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" PART_NUMBER="1" UPLOAD_ID="$3" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh); then
- # shellcheck disable=SC2154
- log 2 "error uploading part $i: $result"
+ if ! etag=$(upload_part_rest "$1" "$2" "$3" 1 2>&1); then
+ log 2 "error getting etag: $etag"
return 1
fi
- if [ "$result" != "200" ]; then
- log 2 "expected '200', was '$result'"
- return 1
- fi
- etag="$(grep -i "ETag: " "$TEST_FILE_FOLDER/response.txt" | awk '{print $2}' | tr -d '\r')"
if ! [[ "$etag" =~ ^\"[0-9a-f]+\" ]]; then
log 2 "etag pattern mismatch, etag ($etag) should be hex string surrounded by quotes"
return 1
@@ -484,8 +455,8 @@ upload_part_copy_check_etag_header() {
log 2 "'upload_part_copy_check_etag_header' requires bucket, destination file, part location"
return 1
fi
- if ! create_upload_and_get_id_rest "$1" "$2"; then
- log 2 "error creating upload and getting ID"
+ if ! upload_id=$(create_multipart_upload_rest "$1" "$2" 2>&1); then
+ log 2 "error creating upload and getting ID: $upload_id"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" PART_NUMBER="1" UPLOAD_ID="$upload_id" PART_LOCATION="$3" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part_copy.sh); then
@@ -535,7 +506,7 @@ upload_part_without_upload_id() {
log 2 "error creating multpart upload"
return 1
fi
- if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" DATA_FILE="$TEST_FILE_FOLDER/$2" PART_NUMBER="1" UPLOAD_ID="" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh); then
+ if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" DATA_FILE="$TEST_FILE_FOLDER/$2" PART_NUMBER="1" UPLOAD_ID="" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/upload_part.sh 2>&1); then
# shellcheck disable=SC2154
log 2 "error uploading part $i: $result"
return 1
@@ -545,4 +516,4 @@ upload_part_without_upload_id() {
return 1
fi
return 0
-}
\ No newline at end of file
+}
diff --git a/tests/util/util_object.sh b/tests/util/util_object.sh
index 2d6a268..597a637 100644
--- a/tests/util/util_object.sh
+++ b/tests/util/util_object.sh
@@ -404,7 +404,7 @@ put_object_rest_check_expires_header() {
}
download_file_with_user() {
- if ! check_param_count_gt "download_large_file" "username, password, bucket, key, destination, chunk size (optional)" 5 $#; then
+ if ! check_param_count_gt "username, password, bucket, key, destination, chunk size (optional)" 5 $#; then
return 1
fi
if ! file_size=$(get_object_size_with_user "$1" "$2" "$3" "$4" 2>&1); then
diff --git a/tests/util/util_rest.sh b/tests/util/util_rest.sh
index 8c31e2e..d3d2b18 100644
--- a/tests/util/util_rest.sh
+++ b/tests/util/util_rest.sh
@@ -1,9 +1,12 @@
#!/usr/bin/env bash
parse_bucket_list() {
+ if ! check_param_count_v2 "data file" 1 $#; then
+ return 1
+ fi
# shellcheck disable=SC2154
- log 5 "bucket list: $(cat "$TEST_FILE_FOLDER/buckets.txt")"
- bucket_list=$(xmllint --xpath '//*[local-name()="Bucket"]/*[local-name()="Name"]/text()' "$TEST_FILE_FOLDER/buckets.txt")
+ log 5 "bucket list: $(cat "$1")"
+ bucket_list=$(xmllint --xpath '//*[local-name()="Bucket"]/*[local-name()="Name"]/text()' "$1")
bucket_array=()
while read -r bucket; do
bucket_array+=("$bucket")
diff --git a/tests/util/util_retention.sh b/tests/util/util_retention.sh
index e9c8033..93cc3c9 100644
--- a/tests/util/util_retention.sh
+++ b/tests/util/util_retention.sh
@@ -14,7 +14,7 @@
# specific language governing permissions and limitations
# under the License.
-source ./tests/drivers/drivers.sh
+source ./tests/drivers/params.sh
# params: bucket name
# return 0 for success, 1 for error
diff --git a/tests/util/util_setup.sh b/tests/util/util_setup.sh
index a06ae5b..3656ea9 100644
--- a/tests/util/util_setup.sh
+++ b/tests/util/util_setup.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-source ./tests/drivers/drivers.sh
+source ./tests/drivers/params.sh
setup_bucket_and_file() {
if ! check_param_count "setup_bucket_and_file" "bucket, file name" 2 $#; then
@@ -14,7 +14,7 @@ setup_bucket_and_file() {
}
setup_bucket_and_files() {
- if ! check_param_count_gt "setup_bucket_and_files" "bucket, file name" 2 $#; then
+ if ! check_param_count_gt "bucket, file name" 2 $#; then
return 1
fi
if ! setup_bucket "$1"; then
diff --git a/tests/util/util_versioning.sh b/tests/util/util_versioning.sh
index 4f9c0cc..fa6f6d7 100644
--- a/tests/util/util_versioning.sh
+++ b/tests/util/util_versioning.sh
@@ -2,7 +2,7 @@
source ./tests/commands/get_bucket_versioning.sh
source ./tests/commands/list_object_versions.sh
-source ./tests/drivers/drivers.sh
+source ./tests/drivers/params.sh
check_if_versioning_enabled() {
if ! check_param_count "check_if_versioning_enabled" "bucket" 1 $#; then
@@ -203,7 +203,7 @@ parse_versions_rest() {
}
get_and_check_versions_rest() {
- if ! check_param_count_gt "get_and_check_versions_rest" "bucket, key, count, expected islatest, expected id equal to null" 5 $#; then
+ if ! check_param_count_gt "bucket, key, count, expected islatest, expected id equal to null" 5 $#; then
return 1
fi
if ! list_object_versions_rest "$1"; then
diff --git a/tests/util/util_xml.sh b/tests/util/util_xml.sh
index 77da803..cae5ac5 100644
--- a/tests/util/util_xml.sh
+++ b/tests/util/util_xml.sh
@@ -1,16 +1,45 @@
#!/usr/bin/env bash
-get_element_text() {
- if [ $# -lt 2 ]; then
- log 2 "'get_element_text' requires data source, XML tree"
+build_xpath_string() {
+ if ! check_param_count_gt "XML tree" 1 $#; then
return 1
fi
- local xpath='//'
- for tree_val in "${@:2}"; do
+ xpath='//'
+ for tree_val in "$@"; do
xpath+='*[local-name()="'$tree_val'"]/'
done
xpath+='text()'
- if ! xml_val=$(xmllint --xpath "$xpath" "$1" 2>&1); then
+}
+
+check_for_empty_element() {
+ if ! check_param_count_gt "data file, XML tree" 2 $#; then
+ return 1
+ fi
+
+ # shellcheck disable=SC2068
+ if ! build_xpath_string ${@:2}; then
+ log 2 "error building XPath search string"
+ return 1
+ fi
+ if grep '<[^/][^ >]*>' "$1" | xmllint --xpath "'${xpath}[not(normalize-space())]'" -; then
+ return 0
+ fi
+ return 1
+}
+
+get_element_text() {
+ if [ $# -lt 2 ]; then
+ log 2 "'get_element_text' requires data file, XML tree"
+ return 1
+ fi
+
+ if ! build_xpath_string "${@:2}"; then
+ log 2 "error building XPath search string"
+ return 1
+ fi
+
+ log 5 "data: $(cat "$1")"
+ if ! xml_val=$(grep '<[^/][^ >]*>' "$1" | xmllint --xpath "$xpath" - 2>&1); then
log 2 "error getting XML value matching $xpath: $xml_val (file data: $(cat "$1"))"
return 1
fi
@@ -38,9 +67,17 @@ check_xml_element_contains() {
log 2 "'check_xml_element_contains' requires data source, expected value, XML tree"
return 1
fi
- if ! xml_val=$(get_element_text "$1" "${@:3}"); then
- log 2 "error getting element text"
- return 1
+ if [ "$2" == "" ]; then
+ if ! check_for_empty_element "$1" "${@:3}"; then
+ log 2 "Message value not empty"
+ return 1
+ fi
+ return 0
+ else
+ if ! xml_val=$(get_element_text "$1" "${@:3}"); then
+ log 2 "error getting element text"
+ return 1
+ fi
fi
if [[ "$xml_val" != *"$2"* ]]; then
log 2 "XML data mismatch, expected '$2', actual '$xml_val'"
@@ -64,3 +101,33 @@ check_xml_error_contains() {
fi
return 0
}
+
+get_xml_data() {
+ if ! check_param_count_v2 "data file, output file" 2 $#; then
+ return 1
+ fi
+ log 5 "data: $(cat "$1")"
+
+ # Find first line with " "$2"
+ log 5 "xml data after start: $(cat "$2")"
+
+ # Try to extract valid XML using xmllint recover mode
+ # This will truncate anything after the root closing tag
+ truncated=$(xmllint --recover --noent --nocdata "$2" 2>/dev/null |
+ awk 'BEGIN{xml=0}
+ /<\?xml/{xml=1}
+ {if (xml) print}
+ /<\/[^>]+>/{lastline=NR}
+ END{exit}')
+ echo -n "$truncated" > "$2"
+}