diff --git a/tests/README.md b/tests/README.md index b9b86d0..bdd0ed1 100644 --- a/tests/README.md +++ b/tests/README.md @@ -183,6 +183,8 @@ A single instance can be run with `docker-compose -f docker-compose-bats.yml up **SKIP_ACL_TESTING**: avoid ACL tests for systems which do not use ACLs +**MAX_FILE_DOWNLOAD_CHUNK_SIZE**: when set, will divide the download of large files with GetObject into chunks of the given size. Useful for direct testing with slower connections. + ## REST Scripts REST scripts are included for calls to S3's REST API in the `./tests/rest_scripts/` folder. To call a script, the following parameters are needed: diff --git a/tests/commands/delete_object.sh b/tests/commands/delete_object.sh index 68d7364..7475463 100644 --- a/tests/commands/delete_object.sh +++ b/tests/commands/delete_object.sh @@ -45,6 +45,24 @@ delete_object() { return 0 } +# shellcheck disable=SC2317 +delete_object_rest() { + if [ $# -ne 2 ]; then + log 2 "'delete_object_rest' requires bucket name, object name" + return 1 + fi + if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/delete_object.sh 2>&1); then + log 2 "error deleting object: $result" + return 1 + fi + if [ "$result" != "204" ]; then + delete_object_error=$(cat "$TEST_FILE_FOLDER/result.txt") + log 2 "expected '204', was '$result' ($delete_object_error)" + return 1 + fi + return 0 +} + delete_object_bypass_retention() { if ! check_param_count "delete_object_bypass_retention" "client, bucket, key, user, password" 5 $#; then return 1 diff --git a/tests/env.sh b/tests/env.sh index d828d30..e2d096f 100644 --- a/tests/env.sh +++ b/tests/env.sh @@ -137,6 +137,9 @@ check_universal_vars() { fi export DIRECT_POST_COMMAND_DELAY fi + if [ -n "$MAX_FILE_DOWNLOAD_CHUNK_SIZE" ]; then + export MAX_FILE_DOWNLOAD_CHUNK_SIZE + fi check_aws_vars diff --git a/tests/logger.sh b/tests/logger.sh index e6a0d77..84ab8c1 100644 --- a/tests/logger.sh +++ b/tests/logger.sh @@ -43,6 +43,18 @@ log_with_stack_ref() { if ! check_log_params "log_with_stack_ref" "level, message, stack reference" 3 $#; then return 1 fi + if ! log_with_stack_ref "$1" "$2" 2; then + echo "error logging with stack ref" + return 1 + fi + return 0 +} + +log_with_stack_ref() { + if [[ $# -ne 3 ]]; then + echo "log_with_stack_ref function requires level, message, stack reference" + return 1 + fi # shellcheck disable=SC2153 if [[ $1 -gt ${LOG_LEVEL_INT:=4} ]]; then return 0 diff --git a/tests/rest_scripts/get_object.sh b/tests/rest_scripts/get_object.sh index 795d86e..639dd77 100755 --- a/tests/rest_scripts/get_object.sh +++ b/tests/rest_scripts/get_object.sh @@ -20,15 +20,20 @@ source ./tests/rest_scripts/rest.sh # shellcheck disable=SC2153 bucket_name="$BUCKET_NAME" -# shellcheck disable=SC2154 +# shellcheck disable=SC2153 key="$OBJECT_KEY" -# shellcheck disable=SC2154 +# shellcheck disable=SC2153 checksum_mode="${CHECKSUM_MODE:=false}" +# shellcheck disable=SC2153 +range="$RANGE" current_date_time=$(date -u +"%Y%m%dT%H%M%SZ") #x-amz-object-attributes:ETag canonical_request_data+=("GET" "/$bucket_name/$key" "" "host:$host") +if [ "$range" != "" ]; then + canonical_request_data+=("range:$range") +fi if [ "$checksum_mode" == "true" ]; then canonical_request_data+=("x-amz-checksum-mode:ENABLED") fi diff --git a/tests/rest_scripts/list_objects.sh b/tests/rest_scripts/list_objects.sh index 416f820..b65b48c 100755 --- a/tests/rest_scripts/list_objects.sh +++ b/tests/rest_scripts/list_objects.sh @@ -54,6 +54,7 @@ fi # shellcheck disable=SC2119 create_canonical_hash_sts_and_signature +log_rest 5 "cr data: $canonical_request" curl_command+=(curl -ks -w "\"%{http_code}\"") url="'$AWS_ENDPOINT_URL/$bucket_name" diff --git a/tests/test_common.sh b/tests/test_common.sh index 446aad5..642b7dd 100644 --- a/tests/test_common.sh +++ b/tests/test_common.sh @@ -62,7 +62,7 @@ test_common_multipart_upload() { assert_success fi - run download_and_compare_file "$1" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy" assert_success } @@ -115,7 +115,7 @@ test_common_copy_object() { run copy_object "$1" "$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name" assert_success fi - run download_and_compare_file "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_TWO_NAME" "$object_name" "$TEST_FILE_FOLDER/$object_name-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$object_name" "$BUCKET_TWO_NAME" "$object_name" "$TEST_FILE_FOLDER/$object_name-copy" assert_success } @@ -168,7 +168,7 @@ test_common_put_object() { assert_success fi - run download_and_compare_file "$1" "$TEST_FILE_FOLDER/$2" "$BUCKET_ONE_NAME" "$2" "$TEST_FILE_FOLDER/${2}-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$2" "$BUCKET_ONE_NAME" "$2" "$TEST_FILE_FOLDER/${2}-copy" assert_success run delete_object "$1" "$BUCKET_ONE_NAME" "$2" @@ -196,7 +196,10 @@ test_common_put_get_object() { run object_exists "$1" "$BUCKET_ONE_NAME" "$object_name" assert_success - run download_and_compare_file "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name" "$TEST_FILE_FOLDER/${2}-copy" + run get_object "$1" "$BUCKET_ONE_NAME" "$object_name" "$TEST_FILE_FOLDER/${object_name}-copy" + assert_success + + run compare_files "$TEST_FILE_FOLDER/$object_name" "$TEST_FILE_FOLDER/${object_name}-copy" assert_success } diff --git a/tests/test_rest.sh b/tests/test_rest.sh index 17edb16..0f0a1ba 100755 --- a/tests/test_rest.sh +++ b/tests/test_rest.sh @@ -74,7 +74,7 @@ test_file="test_file" run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" assert_success - run download_and_compare_file "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success run delete_object "rest" "$BUCKET_ONE_NAME" "$test_file" @@ -164,7 +164,7 @@ test_file="test_file" "$TEST_FILE_FOLDER/$test_file-0" "$TEST_FILE_FOLDER/$test_file-1" "$TEST_FILE_FOLDER/$test_file-2" "$TEST_FILE_FOLDER/$test_file-3" assert_success - run download_and_compare_file "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success } @@ -300,7 +300,7 @@ test_file="test_file" run create_upload_part_copy_rest "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file" assert_success - run download_and_compare_file "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success } @@ -643,3 +643,14 @@ test_file="test_file" run copy_object_copy_source_and_payload "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file" assert_success } + +@test "REST - range download and compare" { + run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file" + assert_success + + run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" + assert_success + + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" 2000000 + assert_success +} diff --git a/tests/test_rest_chunked.sh b/tests/test_rest_chunked.sh index 52e8317..9b1284f 100755 --- a/tests/test_rest_chunked.sh +++ b/tests/test_rest_chunked.sh @@ -72,7 +72,7 @@ source ./tests/util/util_setup.sh run chunked_upload_success "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success } @@ -87,7 +87,7 @@ source ./tests/util/util_setup.sh run chunked_upload_success "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success } @@ -102,7 +102,7 @@ source ./tests/util/util_setup.sh run chunked_upload_success "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success } @@ -117,7 +117,7 @@ source ./tests/util/util_setup.sh run chunked_upload_success "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success } diff --git a/tests/test_s3api_multipart.sh b/tests/test_s3api_multipart.sh index 2cf7e26..4fa2809 100755 --- a/tests/test_s3api_multipart.sh +++ b/tests/test_s3api_multipart.sh @@ -60,7 +60,7 @@ export RUN_USERS=true run multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4 assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy" assert_success } @@ -102,7 +102,7 @@ export RUN_USERS=true run get_and_check_legal_hold "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "OFF" assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy" assert_success } @@ -117,7 +117,7 @@ export RUN_USERS=true run multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4 assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "${bucket_file}-copy" "$TEST_FILE_FOLDER/$bucket_file-copy-two" + run download_and_compare_file "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "${bucket_file}-copy" "$TEST_FILE_FOLDER/$bucket_file-copy-two" assert_success } diff --git a/tests/test_s3api_policy_object.sh b/tests/test_s3api_policy_object.sh index f8f7e47..70763a2 100644 --- a/tests/test_s3api_policy_object.sh +++ b/tests/test_s3api_policy_object.sh @@ -160,10 +160,10 @@ test_s3api_policy_get_object_file_wildcard() { run put_object "s3api" "$TEST_FILE_FOLDER/$policy_file_three" "$BUCKET_ONE_NAME" "$policy_file_three" assert_success - run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$policy_file" "$BUCKET_ONE_NAME" "$policy_file" "$TEST_FILE_FOLDER/$policy_file-copy" "$username" "$password" + run download_and_compare_file_with_user "$TEST_FILE_FOLDER/$policy_file" "$BUCKET_ONE_NAME" "$policy_file" "$TEST_FILE_FOLDER/$policy_file-copy" "$username" "$password" assert_success - run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$policy_file_two" "$BUCKET_ONE_NAME" "$policy_file_two" "$TEST_FILE_FOLDER/$policy_file_two-copy" "$username" "$password" + run download_and_compare_file_with_user "$TEST_FILE_FOLDER/$policy_file_two" "$BUCKET_ONE_NAME" "$policy_file_two" "$TEST_FILE_FOLDER/$policy_file_two-copy" "$username" "$password" assert_success run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$policy_file_three" "$TEST_FILE_FOLDER/$policy_file_three" "$username" "$password" @@ -201,7 +201,7 @@ test_s3api_policy_get_object_folder_wildcard() { run put_object "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" assert_success - run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" + run download_and_compare_file_with_user "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" assert_success } @@ -236,7 +236,7 @@ test_s3api_policy_get_object_specific_file() { run put_object "s3api" "$TEST_FILE_FOLDER/$test_file_two" "$BUCKET_ONE_NAME" "$test_file_two" assert_success - run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" + run download_and_compare_file_with_user "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" assert_success run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$TEST_FILE_FOLDER/$test_file_two-copy" "$username" "$password" @@ -272,7 +272,7 @@ test_s3api_policy_get_object_with_user() { run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" assert_success - run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" + run download_and_compare_file_with_user "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" assert_success } @@ -332,7 +332,7 @@ test_s3api_policy_put_wildcard() { run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$test_folder/$test_file-copy" "$username" "$password" assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" assert_success } diff --git a/tests/test_s3api_root_inner.sh b/tests/test_s3api_root_inner.sh index 47c1088..7a5828e 100755 --- a/tests/test_s3api_root_inner.sh +++ b/tests/test_s3api_root_inner.sh @@ -90,7 +90,7 @@ test_put_object_s3api_root() { run copy_object "s3api" "$BUCKET_ONE_NAME/$bucket_file" "$BUCKET_TWO_NAME" "$bucket_file" assert_success - run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/${bucket_file}_copy" + run download_and_compare_file "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/${bucket_file}_copy" assert_success } diff --git a/tests/util/util_chunked_upload.sh b/tests/util/util_chunked_upload.sh index b650d70..3921e37 100644 --- a/tests/util/util_chunked_upload.sh +++ b/tests/util/util_chunked_upload.sh @@ -199,7 +199,7 @@ chunked_upload_trailer_success() { log 2 "error performing chunked upload w/trailer" return 1 fi - if ! download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"; then + if ! download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"; then log 2 "error downloading and comparing file" return 1 fi diff --git a/tests/util/util_file.sh b/tests/util/util_file.sh index 80d5f7f..fb0b1cf 100644 --- a/tests/util/util_file.sh +++ b/tests/util/util_file.sh @@ -229,34 +229,28 @@ create_test_file_count() { return 0 } -download_and_compare_file() { - log 6 "download_and_compare_file" - if [[ $# -ne 5 ]]; then - log 2 "'download and compare file' requires command type, original file, bucket, key, local file" +download_and_compare_file_with_user() { + if ! check_param_count_gt "download_and_compare_large_file" "original file, bucket, key, destination, username, password, chunk size (optional)" 6 $#; then return 1 fi - download_and_compare_file_with_user "$1" "$2" "$3" "$4" "$5" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" - return "$?" + if ! download_file_with_user "$5" "$6" "$2" "$3" "$4" "$7"; then + log 2 "error downloading file" + return 1 + fi + if ! compare_files "$1" "$4"; then + log 2 "files don't match" + return 1 + fi + return 0 } -download_and_compare_file_with_user() { - log 6 "download_and_compare_file_with_user" - if [[ $# -ne 7 ]]; then - log 2 "'download and compare file with user' command requires command type, original file, bucket, key, local file, user, password" +download_and_compare_file() { + log 6 "download_and_compare_file" + if ! check_param_count_gt "download_and_compare_file" "original file, bucket, key, destination, chunk size (optional)" 4 $#; then return 1 fi - if ! get_object_with_user "$1" "$3" "$4" "$5" "$6" "$7"; then - log 2 "error retrieving file" - return 1 - fi - log 5 "files: $2, $5" - #if [ "$1" == 'mc' ]; then - # file_to_compare="$5/$(basename "$2")" - #else - file_to_compare="$5" - #fi - if ! compare_files "$2" "$file_to_compare"; then - log 2 "files don't match" + if ! download_and_compare_file_with_user "$1" "$2" "$3" "$4" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$5"; then + log 2 "error downloading and comparing file with user" return 1 fi return 0 diff --git a/tests/util/util_head_object.sh b/tests/util/util_head_object.sh index 03d1bb9..d4cbae2 100644 --- a/tests/util/util_head_object.sh +++ b/tests/util/util_head_object.sh @@ -219,3 +219,21 @@ check_default_checksum() { fi return 0 } + +get_object_size_with_user() { + if ! check_param_count "get_object_size_with_user" "username, password, bucket, key" 4 $#; then + return 1 + fi + if ! result=$(AWS_ACCESS_KEY_ID="$1" AWS_SECRET_ACCESS_KEY="$2" COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$3" OBJECT_KEY="$4" OUTPUT_FILE="$TEST_FILE_FOLDER/head_object.txt" ./tests/rest_scripts/head_object.sh 2>&1); then + log 2 "error attempting to get object info: $result" + return 1 + fi + if [ "$result" != "200" ]; then + log 2 "response code '$result', data: $(cat "$TEST_FILE_FOLDER/head_object.txt")" + return 1 + fi + log 5 "head object data: $(cat "$TEST_FILE_FOLDER/head_object.txt")" + content_length=$(grep "Content-Length:" "$TEST_FILE_FOLDER/head_object.txt" | awk '{print $2}' | tr -d '\r') + log 5 "file size: $content_length" + echo "$content_length" +} diff --git a/tests/util/util_legal_hold.sh b/tests/util/util_legal_hold.sh index 58446bb..0a6706f 100644 --- a/tests/util/util_legal_hold.sh +++ b/tests/util/util_legal_hold.sh @@ -73,6 +73,10 @@ check_remove_legal_hold_versions() { log 2 "error getting XML legal hold status" return 1 fi + #if ! status="$(echo "$legal_hold" | grep -v "InsecureRequestWarning" | jq -r '.LegalHold.Status' 2>&1)"; then + # log 2 "error getting legal hold status: $status" + # return 1 + #fi if [ "$status" == "ON" ]; then if ! put_object_legal_hold_rest_version_id "$1" "$2" "$3" "OFF"; then log 2 "error removing legal hold of version ID" diff --git a/tests/util/util_list_parts.sh b/tests/util/util_list_parts.sh index 2982abe..833dd44 100644 --- a/tests/util/util_list_parts.sh +++ b/tests/util/util_list_parts.sh @@ -118,6 +118,7 @@ upload_check_parts() { log 2 "error checking part list before part upload" return 1 fi + sleep 5 parts_payload="" if ! upload_check_part "$1" "$2" "$upload_id" 1 "$3"; then log 2 "error uploading and checking first part" diff --git a/tests/util/util_object.sh b/tests/util/util_object.sh index 4a8b18d..cf7dfaf 100644 --- a/tests/util/util_object.sh +++ b/tests/util/util_object.sh @@ -16,6 +16,7 @@ source ./tests/util/util_bucket.sh source ./tests/util/util_create_bucket.sh +source ./tests/util/util_head_object.sh source ./tests/util/util_mc.sh source ./tests/util/util_multipart.sh source ./tests/util/util_versioning.sh @@ -447,3 +448,67 @@ put_object_rest_check_expires_header() { fi return 0 } + +download_file_with_user() { + if ! check_param_count_gt "download_large_file" "username, password, bucket, key, destination, chunk size (optional)" 5 $#; then + return 1 + fi + if ! file_size=$(get_object_size_with_user "$1" "$2" "$3" "$4" 2>&1); then + log 2 "error getting object size: $file_size" + return 1 + fi + if [ "$6" != "" ]; then + chunk_size="$6" + elif [ "$MAX_FILE_DOWNLOAD_CHUNK_SIZE" != "" ]; then + chunk_size="$MAX_FILE_DOWNLOAD_CHUNK_SIZE" + else + chunk_size="$file_size" + fi + if [ "$file_size" -le "$chunk_size" ]; then + if ! get_object_rest_with_user "$1" "$2" "$3" "$4" "$5"; then + log 2 "error downloading file" + return 1 + fi + else + if ! get_object_with_ranged_download "$1" "$2" "$3" "$4" "$5" "$file_size" "$chunk_size"; then + log 2 "error downloading object" + return 1 + fi + fi + return 0 +} + +get_object_with_ranged_download() { + if ! check_param_count "get_object_with_ranged_download" "username, password, bucket, key, destination, file size, chunk size" 7 $#; then + return 1 + fi + number_of_chunks=$(($6/$7)) + log 5 "number of chunks: $number_of_chunks" + if ! result=$(truncate -s "$6" "$5" 2>&1); then + log 2 "error allocating file space: $result" + return 1 + fi + + file_byte_idx=0 + while [ $file_byte_idx -lt "$6" ]; do + last_byte=$((file_byte_idx + $7 - 1)) + [ $last_byte -ge "$6" ] && last_byte=$(($6 - 1)) + range_value="bytes=${file_byte_idx}-${last_byte}" + log 5 "downloading part of file, range $range_value" + + if ! result=$(AWS_ACCESS_KEY_ID="$1" AWS_SECRET_ACCESS_KEY="$2" COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$3" OBJECT_KEY="$4" RANGE="$range_value" OUTPUT_FILE="$5.tmp" ./tests/rest_scripts/get_object.sh 2>&1); then + log 2 "error getting file data: $result" + return 1 + fi + if [ "$result" != "206" ]; then + log 2 "expected '206', was '$result' ($(cat "$5.tmp"))" + return 1 + fi + if ! dd if="$5.tmp" of="$5" bs=1 seek="$file_byte_idx" count="$7" conv=notrunc 2>"$TEST_FILE_FOLDER/dd_error.txt"; then + log 2 "error writing file segment: $(cat "$TEST_FILE_FOLDER/dd_error.txt")" + return 1 + fi + + file_byte_idx=$((last_byte + 1)) + done +}