test: ListObjectsV1 updates, file code changes, multipart updates

This commit is contained in:
Luke McCrone
2026-03-17 15:27:24 -03:00
parent 09413c5f60
commit d7d9179db0
31 changed files with 764 additions and 513 deletions

View File

@@ -26,6 +26,7 @@ create_bucket() {
fi
local exit_code=0
local error
if [[ $1 == 's3' ]]; then
error=$(send_command aws --no-verify-ssl s3 mb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]]; then

View File

@@ -14,7 +14,6 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/util/util_file.sh
source ./tests/commands/command.sh
source ./tests/drivers/put_bucket_acl/put_bucket_acl.sh
source ./tests/drivers/rest.sh

View File

@@ -15,7 +15,7 @@
# under the License.
upload_part() {
if [ $# -ne 5 ]; then
if ! check_param_count_v2 "bucket, key, upload ID, file name, part number" 5 $#; then
log 2 "upload multipart part function must have bucket, key, upload ID, file name, part number"
return 1
fi
@@ -27,12 +27,11 @@ upload_part() {
}
upload_part_with_user() {
if [ $# -ne 7 ]; then
log 2 "upload multipart part function must have bucket, key, upload ID, file name, part number, username, password"
if ! check_param_count_v2 "bucket, key, upload ID, file name, part number, username, password" 7 $#; then
return 1
fi
local etag_json
if ! etag_json=$(AWS_ACCESS_KEY_ID="$6" AWS_SECRET_ACCESS_KEY="$7" send_command aws --no-verify-ssl s3api upload-part --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --body "$4-$(($5-1))" 2>&1); then
if ! etag_json=$(AWS_ACCESS_KEY_ID="$6" AWS_SECRET_ACCESS_KEY="$7" send_command aws --no-verify-ssl s3api upload-part --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --body "$4" 2>&1); then
log 2 "Error uploading part $5: $etag_json"
return 1
fi
@@ -106,19 +105,22 @@ upload_part_rest_with_checksum() {
if ! check_param_count_v2 "bucket name, key, upload ID, part number, part, checksum algorithm" 6 $#; then
return 1
fi
if ! response_file=$(get_file_name 2>&1); then
log 2 "error getting file name: $response_file"
return 1
fi
# shellcheck disable=SC2154,SC2097,SC2098
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PART_NUMBER="$4" DATA_FILE="$5" CHECKSUM_TYPE="$6" TEST_FILE_FOLDER="$TEST_FILE_FOLDER" OUTPUT_FILE="$TEST_FILE_FOLDER/etag.txt" ./tests/rest_scripts/upload_part.sh); then
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$3" PART_NUMBER="$4" DATA_FILE="$5" CHECKSUM_TYPE="$6" TEST_FILE_FOLDER="$TEST_FILE_FOLDER" OUTPUT_FILE="$TEST_FILE_FOLDER/$response_file" ./tests/rest_scripts/upload_part.sh); then
log 2 "error sending upload-part REST command: $result"
return 1
fi
if [[ "$result" != "200" ]]; then
log 2 "upload-part command returned error $result: $(cat "$TEST_FILE_FOLDER/etag.txt")"
log 2 "upload-part command returned error $result: $(cat "$TEST_FILE_FOLDER/$response_file")"
return 1
fi
log 5 "$(cat "$TEST_FILE_FOLDER/etag.txt")"
etag=$(grep -i "etag" "$TEST_FILE_FOLDER/etag.txt" | awk '{print $2}' | tr -d '\r')
etag=$(grep -i "etag" "$TEST_FILE_FOLDER/$response_file" | awk '{print $2}' | tr -d '\r')
# shellcheck disable=SC2034
checksum=$(grep -i "x-amz-checksum-" "$TEST_FILE_FOLDER/etag.txt" | awk '{print $2}' | tr -d '\r')
checksum=$(grep -i "x-amz-checksum-" "$TEST_FILE_FOLDER/$response_file" | awk '{print $2}' | tr -d '\r')
log 5 "etag: $etag"
return 0
}

View File

@@ -43,9 +43,10 @@ complete_multipart_upload_with_checksum() {
if ! check_param_count_v2 "bucket, key, file, upload ID, part count, checksum type, checksum algorithm" 7 $#; then
return 1
fi
log 5 "checksum algorithm: $7"
lowercase_checksum_algorithm=$(echo -n "$7" | tr '[:upper:]' '[:lower:]')
if ! upload_parts_rest_with_checksum_before_completion "$1" "$2" "$3" "$4" "$5" "$lowercase_checksum_algorithm"; then
log 2 "error uploading parts"
log 2 "error uploading REST parts with checksum before completion"
return 1
fi
log 5 "parts payload: $parts_payload"
@@ -98,33 +99,46 @@ calculate_composite_checksum() {
}
test_multipart_upload_with_checksum() {
if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
log 6 "test_multipart_upload_with_checksum"
if ! check_param_count_v2 "checksum type, algorithm" 2 $#; then
return 1
fi
if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
if ! file_and_bucket=$(setup_bucket_and_file_v3 "$BUCKET_ONE_NAME" 2>&1); then
log 2 "error setting up file and bucket"
return 1
fi
read -r bucket_name mp_file_name <<< "$file_and_bucket"
log 5 "file name: $mp_file_name, file info: $(ls -l "$TEST_FILE_FOLDER/$mp_file_name")"
if ! perform_full_multipart_upload_with_checksum_before_completion "$bucket_name" "$mp_file_name" "$1" "$2"; then
log 2 "error performing multipart upload with checksum before completion"
return 1
fi
if ! calculate_multipart_checksum "$3" 2 "$TEST_FILE_FOLDER/$2" ${checksums[@]}; then
if ! calculate_multipart_checksum "$1" 2 "$TEST_FILE_FOLDER/$mp_file_name" ${checksums[@]}; then
log 2 "error calculating multipart checksum"
return 1
fi
if ! complete_multipart_upload_with_checksum "$bucket_name" "$2" "$TEST_FILE_FOLDER/$2" "$upload_id" 2 "$3" "$4"; then
log 2 "error completing multipart upload"
if ! complete_multipart_upload_with_checksum "$bucket_name" "$mp_file_name" "$TEST_FILE_FOLDER/$mp_file_name" "$upload_id" 2 "$1" "$2"; then
log 2 "error completing multipart upload with checksum"
return 1
fi
return 0
}
test_complete_multipart_upload_unneeded_algorithm_parameter() {
if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
if ! check_param_count_v2 "checksum type, algorithm" 2 $#; then
return 1
fi
if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
if ! file_and_bucket=$(setup_bucket_and_file_v3 "$BUCKET_ONE_NAME" 2>&1); then
log 2 "error setting up file and bucket"
return 1
fi
read -r bucket_name mp_file_name <<< "$file_and_bucket"
if ! perform_full_multipart_upload_with_checksum_before_completion "$bucket_name" "$mp_file_name" "$1" "$2"; then
log 2 "error performing multipart upload with checksum before completion"
return 1
fi
if ! complete_multipart_upload_rest_nonexistent_param "$bucket_name" "$2" "$upload_id" "$parts_payload"; then
log 5 "upload ID: $upload_id"
if ! complete_multipart_upload_rest_nonexistent_param "$bucket_name" "$mp_file_name" "$upload_id" "$parts_payload"; then
log 2 "error completing multipart upload with nonexistent param"
return 1
fi
@@ -132,18 +146,23 @@ test_complete_multipart_upload_unneeded_algorithm_parameter() {
}
test_complete_multipart_upload_incorrect_checksum() {
if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
if ! check_param_count_v2 "checksum type, algorithm" 2 $#; then
return 1
fi
if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
if ! file_and_bucket=$(setup_bucket_and_file_v3 "$BUCKET_ONE_NAME" 2>&1); then
log 2 "error setting up file and bucket"
return 1
fi
read -r bucket_name mp_file_name <<< "$file_and_bucket"
if ! perform_full_multipart_upload_with_checksum_before_completion "$bucket_name" "$mp_file_name" "$1" "$2"; then
log 2 "error performing multipart upload with checksum before completion"
return 1
fi
if ! calculate_multipart_checksum "$3" 2 "$TEST_FILE_FOLDER/$2" ${checksums[@]}; then
if ! calculate_multipart_checksum "$1" 2 "$TEST_FILE_FOLDER/$mp_file_name" ${checksums[@]}; then
log 2 "error calculating multipart checksum"
return 1
fi
if ! complete_multipart_upload_rest_incorrect_checksum "$bucket_name" "$2" "$upload_id" "$parts_payload" "$3" "$4" "$checksum"; then
if ! complete_multipart_upload_rest_incorrect_checksum "$bucket_name" "$mp_file_name" "$upload_id" "$parts_payload" "$1" "$2" "$checksum"; then
log 2 "error completing multipart upload with nonexistent param"
return 1
fi
@@ -151,14 +170,19 @@ test_complete_multipart_upload_incorrect_checksum() {
}
test_complete_multipart_upload_invalid_checksum() {
if ! check_param_count_v2 "bucket, filename, checksum type, algorithm" 4 $#; then
if ! check_param_count_v2 "checksum type, algorithm" 2 $#; then
return 1
fi
if ! perform_full_multipart_upload_with_checksum_before_completion "$1" "$2" "$3" "$4"; then
if ! file_and_bucket=$(setup_bucket_and_file_v3 "$BUCKET_ONE_NAME" 2>&1); then
log 2 "error setting up file and bucket"
return 1
fi
read -r bucket_name mp_file_name <<< "$file_and_bucket"
if ! perform_full_multipart_upload_with_checksum_before_completion "$bucket_name" "$mp_file_name" "$1" "$2"; then
log 2 "error performing multipart upload with checksum before completion"
return 1
fi
if ! complete_multipart_upload_rest_invalid_checksum "$bucket_name" "$2" "$upload_id" "$parts_payload" "$3" "$4" "wrong"; then
if ! complete_multipart_upload_rest_invalid_checksum "$bucket_name" "$mp_file_name" "$upload_id" "$parts_payload" "$1" "$2" "wrong"; then
log 2 "error completing multipart upload with nonexistent param"
return 1
fi

View File

@@ -241,7 +241,8 @@ get_bucket_name() {
fi
local bucket_name
bucket_name="$1-${uuid,,}"
echo "${bucket_name:0:63}"
bucket_name_trimmed="${bucket_name:0:63}"
echo "${bucket_name_trimmed%-}"
return 0
}

View File

@@ -117,6 +117,7 @@ setup_bucket_and_files_base() {
log 2 "error setting up bucket"
return 1
fi
log 5 "create test files: '${*:3}'"
if ! create_test_files "${@:3}"; then
log 2 "error creating test files"
return 1
@@ -169,7 +170,7 @@ setup_bucket_and_large_file_v3() {
log 2 "error setting up bucket: $bucket_name"
return 1
fi
if ! file_name=$(create_large_file_v2 "$file_name" 2>&1); then
if ! file_name=$(create_large_file "$file_name" 2>&1); then
log 2 "error creating large file: $file_name"
return 1
fi
@@ -221,6 +222,7 @@ get_file_name_with_prefix() {
return 1
fi
echo "$1-${uuid}"
return 0
}
create_test_files_and_folders() {
@@ -288,3 +290,344 @@ get_file_names() {
echo "${file_names[*]}"
return 0
}
# Usage: create_test_files_with_prefix <prefix> [count]
# Returns: Space-separated list of created filenames
create_test_files_with_prefix() {
if ! check_param_count_gt "prefix, count (optional)" 1 $#; then
return 1
fi
local prefix="$1"
local count="${2:-1}" # Default to 1 if not provided
local file_names=()
local file_name
local error
for ((i=0; i<count; i++)); do
# Generate the name
if ! file_name=$(get_file_name_with_prefix "$prefix" 2>&1); then
log 2 "error getting file name: $file_name"
return 1
fi
# Create the file
if ! error=$(create_test_file "$file_name" 2>&1); then
log 2 "error creating test file: $error"
return 1
fi
file_names+=("$file_name")
done
echo "${file_names[*]}"
return 0
}
# Combined function to setup environment and create test files
# Params: filename1 [filename2 ...]
# Note: Uses $FILE_SIZE if set, otherwise defaults to 10 bytes. Requires $TEST_FILE_FOLDER.
create_test_files() {
if ! check_param_count_gt "at least one filename" 1 $#; then
return 1
fi
if [[ -z "$TEST_FILE_FOLDER" ]]; then
log 2 "TEST_FILE_FOLDER must be defined"
return 1
fi
local file_size="${FILE_SIZE:-10}" # Use global $FILE_SIZE or default to 10
local error
log 5 "file size: $file_size"
for filename in "$@"; do
local full_path="$TEST_FILE_FOLDER/$filename"
# Clean up existing file if present
if ! error=$(rm -f "$full_path" 2>&1); then
log 2 "error removing existing file $filename: $error"
return 1
fi
# Create the file with random data
if [[ "$file_size" -eq 0 ]]; then
touch "$full_path"
else
# Use dd for specific size creation
if ! error=$(dd if=/dev/urandom of="$full_path" bs="$file_size" count=1 conv=notrunc 2>&1); then
log 2 "error adding $file_size bytes to $filename: $error"
return 1
fi
fi
log 5 "Created: $full_path ($file_size bytes)"
done
return 0
}
create_test_file() {
if ! check_param_count_gt "file name, size (optional)" 1 $#; then
return 1
fi
if ! error=$(FILE_SIZE="${2:-10}" create_test_files "$1" 2>&1); then
log 2 "error creating test file: $error"
return 1
fi
return 0
}
create_file_single_char() {
if ! check_param_count_v2 "filename, size, char" 3 $#; then
return 1
fi
if ! error=$(rm -f "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error removing existing file: $error"
return 1
fi
if ! error=$(touch "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error creating new file: $error"
return 1
fi
if ! error=$(dd if=/dev/zero bs=1 count="$2" | tr '\0' "$3" > "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error adding data to file: $error"
return 1
fi
return 0
}
# params: folder name
# fail if error
create_test_folder() {
if ! check_param_count_gt "folder names" 1 $#; then
return 1
fi
for name in "$@"; do
if ! error=$(mkdir -p "$TEST_FILE_FOLDER"/"$name" 2>&1); then
log 2 "error creating folder $name: $error"
return 1
fi
done
return 0
}
# delete a test file
# params: filename
# return: 0 for success, 1 for error
delete_test_files() {
if ! check_param_count_gt "filenames" 1 $#; then
return 1
fi
if [ -z "$TEST_FILE_FOLDER" ]; then
log 2 "no test file folder defined, not deleting"
return 1
fi
for name in "$@"; do
if ! error=$(rm -f "${TEST_FILE_FOLDER:?}"/"${name:?}" 2>&1); then
log 2 "error deleting file '$name': $error"
fi
done
return 0
}
get_file_size() {
if ! check_param_count_v2 "file location" 1 $#; then
return 1
fi
local file_size=""
if [[ "$OSTYPE" == "darwin"* ]]; then
if ! file_size=$(stat -f %z "$1" 2>&1); then
log 2 "error getting file size: $file_size"
return 1
fi
else
if ! file_size=$(stat -c %s "$1" 2>&1); then
log 2 "error getting file size: $file_size"
return 1
fi
fi
echo "$file_size"
}
# split file into pieces to test multipart upload
# param: file location
# return 0 for success, 1 for error
split_file() {
if ! check_param_count_v2 "file name, number of pieces" 2 $#; then
return 1
fi
# -n l/K : Split into K pieces without breaking lines (or use 'K' for raw bytes)
# -d : Use numeric suffixes
# -a 2 : Allow up to 100 pieces (00-99)
if ! error=$(split -a 2 -d -n "$2" "$1" "${1}-" 2>&1); then
log 2 "error splitting file: $error"
return 1
fi
local restore_nullglob
restore_nullglob="$(shopt -p nullglob)"
shopt -s nullglob
local parts=("${1}-"*)
eval "$restore_nullglob"
if [ "${#parts[@]}" -eq 0 ]; then
log 2 "split produced no output files"
return 1
fi
echo "${parts[*]}" | sort
return 0
}
compare_files() {
if ! check_param_count_v2 "two files" 2 $#; then
return 2
fi
log 5 "comparing files '$1' and '$2'"
local file1="$1"
local file2="$2"
local md5_cmd
local f1_sum f2_sum
if [[ "$(uname)" == "Darwin" ]]; then
md5_cmd="md5 -q"
else
md5_cmd="md5sum"
fi
if ! f1_raw=$($md5_cmd "$file1" 2>&1); then
log 2 "error getting md5 for '$file1': $f1_raw"
return 2
fi
# Clean the output (extract just the hex hash)
f1_sum=$(echo "$f1_raw" | awk '{print $1}')
if ! f2_raw=$($md5_cmd "$file2" 2>&1); then
log 2 "error getting md5 for '$file2': $f2_raw"
return 2
fi
# Clean the output (extract just the hex hash)
f2_sum=$(echo "$f2_raw" | awk '{print $1}')
if [[ "$f1_sum" == "$f2_sum" ]]; then
return 0
fi
log 2 "MD5 mismatch: $f1_sum ($file1) vs $f2_sum ($file2)"
return 1
}
# Usage: create_large_file [filename] [size_in_mb]
# If filename is omitted, it generates one. Defaults to 160MB.
create_large_file() {
if ! check_param_count_le "filename (optional), size in MB (optional)" 2 $#; then
return 1
fi
local file_name="$1"
local size_mb="${2:-160}"
local error
if [ -z "$TEST_FILE_FOLDER" ]; then
log 2 "TEST_FILE_FOLDER must be defined"
return 1
fi
if [[ -z "$file_name" ]]; then
if ! file_name=$(get_file_name 2>&1); then
log 2 "error generating automatic file name: $file_name"
return 1
fi
fi
log 6 "Creating ${size_mb}MB file: $file_name"
# bs=1M is significantly faster than bs=1024 for large files
if ! error=$(dd if=/dev/urandom of="${TEST_FILE_FOLDER}/${file_name}" bs=1M count="$size_mb" 2>&1); then
log 2 "error creating ${size_mb}MB file at ${file_name}: $error"
return 1
fi
echo "$file_name"
return 0
}
# param: number of files
# fail on error
create_test_file_count() {
if ! check_param_count_v2 "number of files" 1 $#; then
return 1
fi
for ((i=1;i<=$1;i++)) {
if ! error=$(touch "$TEST_FILE_FOLDER/file_$i" 2>&1); then
log 2 "error creating file_$i: $error"
return 1
fi
}
# shellcheck disable=SC2153
if [[ $LOG_LEVEL -ge 5 ]]; then
ls_result=$(ls "$TEST_FILE_FOLDER/file_*")
log 5 "$ls_result"
fi
return 0
}
download_and_compare_file_with_user() {
if ! check_param_count_gt "original file, bucket, key, destination, username, password, chunk size (optional)" 6 $#; then
return 1
fi
if [ -e "$4" ] && ! error=$(rm -f "$4"); then
log 2 "error deleting local file at download destination before download: $error"
return 1
fi
if ! download_file_with_user "$5" "$6" "$2" "$3" "$4" "$7"; then
log 2 "error downloading file"
return 1
fi
if ! compare_files "$1" "$4"; then
log 2 "files don't match"
return 1
fi
return 0
}
download_and_compare_file() {
log 6 "download_and_compare_file"
if ! check_param_count_gt "original file, bucket, key, destination, chunk size (optional)" 4 $#; then
return 1
fi
if ! download_and_compare_file_with_user "$1" "$2" "$3" "$4" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$5"; then
log 2 "error downloading and comparing file with user"
return 1
fi
return 0
}
# params: src, dst
# fail if error
copy_file_locally() {
if ! check_param_count_v2 "src, dst" 2 $#; then
return 1
fi
if ! error=$(cp "$1" "$2" 2>&1); then
log 2 "error copying file: $error"
return 1
fi
return 0
}
# params: src, dst
# fail if error
move_file_locally() {
if ! check_param_count_v2 "src,dst" 2 $#; then
return 1
fi
if ! error=$(mv "$1" "$2" 2>&1); then
log 2 "error moving file: $error"
return 1
fi
return 0
}

View File

@@ -24,8 +24,8 @@ get_check_bucket_location_various() {
fi
# shellcheck disable=SC2154
if [ "$AWS_REGION" == "us-east-1" ]; then
# s3cmd returns 'us-east-1' here, the others return null
if [ "$1" == "s3cmd" ]; then
# s3cmd and mc return 'us-east-1' here, the others return null
if [ "$1" == "s3cmd" ] || [ "$1" == "mc" ]; then
expected_location="us-east-1"
else
expected_location="null"

View File

@@ -189,3 +189,37 @@ check_if_key_exists() {
fi
return 0
}
check_count_and_keys() {
if ! check_param_count_gt "data file, count, keys" 2 $#; then
return 1
fi
if ! xml_data=$(check_validity_and_or_parse_xml_data "$1" 2>&1); then
log 2 "error parsing xml data: $xml_data"
return 1
fi
if ! check_element_count "$xml_data" "$2" "ListBucketResult" "Contents" "Key"; then
log 2 "error checking element count"
return 1
fi
for key in "${@:3}"; do
if ! check_if_element_exists "$xml_data" "$key" "ListBucketResult" "Contents" "Key"; then
log 2 "error checking if element '$key' exists"
return 1
fi
done
return 0
}
list_objects_check_count_and_keys() {
if ! check_param_count_gt "bucket name, count, keys, additional params if any" 1 $#; then
return 1
fi
local count="$2"
local keys=("${@:3:$count}")
if ! send_rest_go_command_callback "200" "check_count_and_keys" "-bucketName" "$1" "${@:((3+$count))}" "--" "$count" "${keys[@]}"; then
log 2 "error sending list objects command"
return 1
fi
return 0
}

View File

@@ -61,3 +61,15 @@ check_param_count_gt() {
fi
return 0
}
check_param_count_le() {
if [ $# -lt 3 ]; then
log 2 "'check_param_count_le' requires params list, expected maximum, actual"
return 1
fi
if [ "$2" -lt "$3" ]; then
log_with_stack_ref 2 "function '${FUNCNAME[1]}' has maximum param count of $2 ($1)"
return 1
fi
return 0
}

View File

@@ -446,3 +446,16 @@ attempt_put_object_with_specific_acl() {
fi
return 0
}
put_objects() {
if ! check_param_count_gt "bucket name, file names" 2 $#; then
return 1
fi
for file_name in "${@:2}"; do
if ! send_rest_go_command "200" "-method" "PUT" "-payloadFile" "$TEST_FILE_FOLDER/$file_name" "-bucketName" "$1" "-objectKey" "$file_name"; then
log 2 "error putting file '$file_name'"
return 1
fi
done
return 0
}

View File

@@ -32,14 +32,16 @@ upload_parts_rest_before_completion() {
if ! check_param_count_v2 "bucket, key, file, upload ID, part count" 5 $#; then
return 1
fi
if ! split_file "$3" "$5"; then
log 2 "error splitting file"
if ! segments=$(split_file "$3" "$5" 2>&1); then
log 2 "error splitting file: $segments"
return 1
fi
read -r -a segment_array <<< "$segments"
parts_payload=""
for ((part=0;part<"$5";part++)); do
part_number=$((part+1))
if ! etag=$(upload_part_rest "$1" "$2" "$4" "$part_number" "$3-$part" 2>&1); then
if ! etag=$(upload_part_rest "$1" "$2" "$4" "$part_number" "${segment_array[$part]}" 2>&1); then
log 2 "error uploading part $part: $etag"
return 1
fi
@@ -53,15 +55,19 @@ upload_parts_rest_with_checksum_before_completion() {
if ! check_param_count_v2 "bucket, key, file, upload ID, part count, algorithm" 6 $#; then
return 1
fi
if ! split_file "$3" "$5"; then
log 5 "file: $3, part count: $5"
log 5 "file info: $(ls -l "$3")"
if ! segments=$(split_file "$3" "$5" 2>&1); then
log 2 "error splitting file"
return 1
fi
read -r -a segment_array <<< "$segments"
parts_payload=""
checksums=()
for ((part=0;part<"$5";part++)); do
part_number=$((part+1))
if ! upload_part_rest_with_checksum "$1" "$2" "$4" "$part_number" "$3-$part" "$6"; then
if ! upload_part_rest_with_checksum "$1" "$2" "$4" "$part_number" "${segment_array[$part]}" "$6"; then
log 2 "error uploading part $part"
return 1
fi
@@ -70,7 +76,7 @@ upload_parts_rest_with_checksum_before_completion() {
parts_payload+="<Part><ETag>$etag</ETag><Checksum${uppercase_checksum_algorithm}>${checksum}</Checksum${uppercase_checksum_algorithm}><PartNumber>$part_number</PartNumber></Part>"
log 5 "parts payload: $parts_payload"
done
log 5 "${checksums[*]}"
log 5 "CHECKSUMS: ${checksums[*]}"
return 0
}

View File

@@ -15,7 +15,6 @@
# under the License.
source ./tests/rest_scripts/rest.sh
source ./tests/util/util_file.sh
# Fields

View File

@@ -14,8 +14,8 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/drivers/file.sh
source ./tests/rest_scripts/rest.sh
source ./tests/util/util_file.sh
# Fields

View File

@@ -14,8 +14,8 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/drivers/file.sh
source ./tests/rest_scripts/rest.sh
source ./tests/util/util_file.sh
# Fields

View File

@@ -32,7 +32,6 @@ source ./tests/commands/put_public_access_block.sh
source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/drivers/file.sh
source ./tests/drivers/params.sh
source ./tests/util/util_file.sh
source ./tests/util/util_list_buckets.sh
source ./tests/util/util_object.sh
source ./tests/util/util_policy.sh

84
tests/test_file.sh Executable file
View File

@@ -0,0 +1,84 @@
#!/usr/bin/env bats
# Copyright 2026 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
load ./bats-support/load
load ./bats-assert/load
source ./tests/setup.sh
source ./tests/drivers/file.sh
source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/commands/put_object.sh
assert_test_file_folder_exists() {
if [ -z "$TEST_FILE_FOLDER" ] || [ ! -d "$TEST_FILE_FOLDER" ]; then
echo "TEST_FILE_FOLDER must be set and exist" >&2
return 1
fi
return 0
}
# shellcheck disable=SC2030
@test "test - download_and_compare_file" {
file_exists_in_dest_location=("true" "false")
pass_or_fail=("true" "false")
for exists in "${file_exists_in_dest_location[@]}"; do
for pass in "${pass_or_fail[@]}"; do
test_download_and_compare_file "$exists" "$pass"
done
done
}
test_download_and_compare_file() {
if ! check_param_count_v2 "existing file, pass" 2 $#; then
return 1
fi
log 5 "existing file: $1, pass: $2"
assert_test_file_folder_exists
run setup_bucket_v3 "$BUCKET_ONE_NAME"
assert_success
# shellcheck disable=SC2031
bucket_name="$output"
run get_file_name
assert_success
# shellcheck disable=SC2031
object_key="$output"
src_file="$TEST_FILE_FOLDER/src_${object_key}"
printf '%s' "source-payload" > "$src_file"
existing_or_not_dst_file="$TEST_FILE_FOLDER/dst_${object_key}"
if [ "$1" == "true" ]; then
printf '%s' "other-payload" > "$existing_or_not_dst_file"
fi
if [ "$2" == "false" ]; then
bad_src_file="$TEST_FILE_FOLDER/bad_src_${object_key}"
printf '%s' "wrong-payload" > "$src_file"
compare_file="$bad_src_file"
else
compare_file="$src_file"
fi
run put_object_rest "$src_file" "$bucket_name" "$object_key"
assert_success
run download_and_compare_file "$compare_file" "$bucket_name" "$object_key" "$existing_or_not_dst_file"
if [ "$2" == "true" ]; then
assert_success
else
assert_failure
fi
}

View File

@@ -23,7 +23,6 @@ source ./tests/drivers/file.sh
source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/drivers/get_object_lock_config/get_object_lock_config_rest.sh
source ./tests/drivers/put_bucket_ownership_controls/put_bucket_ownership_controls_rest.sh
source ./tests/util/util_file.sh
@test "REST - chunked upload, no content length" {
run get_bucket_name "$BUCKET_ONE_NAME"

View File

@@ -17,6 +17,7 @@
load ./bats-support/load
load ./bats-assert/load
source ./tests/commands/get_object.sh
source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/setup.sh

View File

@@ -19,6 +19,7 @@ load ./bats-assert/load
source ./tests/setup.sh
source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/drivers/list_objects/list_objects_rest.sh
@test "test_rest_list_objects" {
run get_bucket_name "$BUCKET_ONE_NAME"
@@ -215,3 +216,49 @@ source ./tests/drivers/create_bucket/create_bucket_rest.sh
run list_objects_check_key "$bucket_name" "$file_name" ""
assert_success
}
@test "REST - ListObjects - marker/max-keys" {
run setup_bucket_and_files_v3 "$BUCKET_ONE_NAME" 2
assert_success
read -r bucket_name file_one_name file_two_name <<< "$output"
mapfile -t sorted_files < <(printf '%s\n' "$file_one_name" "$file_two_name" | sort)
run put_objects "$bucket_name" "$file_one_name" "$file_two_name"
assert_success
run list_objects_check_count_and_keys "$bucket_name" "1" "${sorted_files[0]}" "-query" "max-keys=1"
assert_success
run list_objects_check_count_and_keys "$bucket_name" "1" "${sorted_files[1]}" "-query" "max-keys=1&marker=${sorted_files[0]}"
assert_success
run list_objects_check_count_and_keys "$bucket_name" "0" "-query" "max-keys=1&marker=${sorted_files[1]}"
assert_success
}
@test "REST - ListObjects - prefix" {
run setup_bucket_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name=$output
local prefix="prefix"
run create_test_files_with_prefix "$prefix" 2
assert_success
read -r file_one_name file_two_name <<< "$output"
run put_objects "$bucket_name" "$file_one_name" "$file_two_name"
assert_success
run list_objects_check_count_and_keys "$bucket_name" "2" "$file_one_name" "$file_two_name" "-query" "prefix=$prefix"
assert_success
run list_objects_check_count_and_keys "$bucket_name" "0" "-query" "prefix=prefik"
assert_success
run list_objects_check_count_and_keys "$bucket_name" "1" "$file_one_name" "-query" "prefix=$file_one_name"
assert_success
run list_objects_check_count_and_keys "$bucket_name" "1" "$file_two_name" "-query" "prefix=$file_two_name"
assert_success
}

View File

@@ -24,19 +24,13 @@ source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/drivers/complete_multipart_upload/complete_multipart_upload_rest.sh
source ./tests/drivers/list_buckets/list_buckets_rest.sh
source ./tests/drivers/upload_part/upload_part_rest.sh
source ./tests/util/util_file.sh
test_file="test_file"
@test "REST - multipart upload create then abort" {
run get_bucket_name "$BUCKET_ONE_NAME"
run setup_bucket_and_file_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
read -r bucket_name mp_file <<< "$output"
run setup_bucket "$bucket_name"
assert_success
run create_abort_multipart_upload_rest "$bucket_name" "$test_file"
run create_abort_multipart_upload_rest "$bucket_name" "$mp_file"
assert_success
}
@@ -45,17 +39,23 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
run get_file_name
assert_success
large_test_file="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$large_test_file"
assert_success
run split_file "$TEST_FILE_FOLDER/$test_file" 4
run split_file "$TEST_FILE_FOLDER/$large_test_file" 4
assert_success
read -r part_one part_two part_three part_four <<< "$output"
log 5 "parts: $part_one $part_two $part_three $part_four"
run upload_check_parts "$bucket_name" "$large_test_file" \
"$part_one" "$part_two" "$part_three" "$part_four"
assert_success
run upload_check_parts "$bucket_name" "$test_file" \
"$TEST_FILE_FOLDER/$test_file-0" "$TEST_FILE_FOLDER/$test_file-1" "$TEST_FILE_FOLDER/$test_file-2" "$TEST_FILE_FOLDER/$test_file-3"
assert_success
run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$bucket_name" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
run download_and_compare_file "$TEST_FILE_FOLDER/$large_test_file" "$bucket_name" "$large_test_file" "$TEST_FILE_FOLDER/$large_test_file-copy"
assert_success
}
@@ -67,10 +67,14 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
run get_file_name
assert_success
large_test_file="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$large_test_file"
assert_success
run create_upload_finish_wrong_etag "$bucket_name" "$test_file"
run create_upload_finish_wrong_etag "$bucket_name" "$large_test_file"
assert_success
}
@@ -79,13 +83,17 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
run get_file_name
assert_success
large_test_file="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$large_test_file"
assert_success
run create_upload_part_copy_rest "$bucket_name" "$test_file" "$TEST_FILE_FOLDER/$test_file"
run create_upload_part_copy_rest "$bucket_name" "$large_test_file" "$TEST_FILE_FOLDER/$large_test_file"
assert_success
run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$bucket_name" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
run download_and_compare_file "$TEST_FILE_FOLDER/$large_test_file" "$bucket_name" "$large_test_file" "$TEST_FILE_FOLDER/$large_test_file-copy"
assert_success
}
@@ -94,7 +102,11 @@ test_file="test_file"
assert_success
bucket_name="$output"
run upload_part_copy_without_upload_id_or_part_number "$bucket_name" "$test_file" "1" "" \
run get_file_name
assert_success
mp_file=$output
run upload_part_copy_without_upload_id_or_part_number "$bucket_name" "$mp_file" "1" "" \
400 "InvalidArgument" "This operation does not accept partNumber without uploadId"
assert_success
}
@@ -104,7 +116,11 @@ test_file="test_file"
assert_success
bucket_name="$output"
run upload_part_copy_without_upload_id_or_part_number "$bucket_name" "$test_file" "" "dummy" \
run get_file_name
assert_success
mp_file=$output
run upload_part_copy_without_upload_id_or_part_number "$bucket_name" "$mp_file" "" "dummy" \
405 "MethodNotAllowed" "The specified method is not allowed against this resource"
assert_success
}
@@ -114,13 +130,17 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket_and_file_v2 "$bucket_name" "$test_file"
run get_file_name
assert_success
mp_file=$output
run setup_bucket_and_file_v2 "$bucket_name" "$mp_file"
assert_success
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$bucket_name" "$test_file"
run put_object "rest" "$TEST_FILE_FOLDER/$mp_file" "$bucket_name" "$mp_file"
assert_success
run upload_part_copy_check_etag_header "$bucket_name" "$test_file"-mp "$bucket_name/$test_file"
run upload_part_copy_check_etag_header "$bucket_name" "$mp_file"-mp "$bucket_name/$mp_file"
assert_success
}
@@ -129,18 +149,23 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
run get_file_name
assert_success
large_test_file=$output
run setup_bucket_and_large_file_v2 "$bucket_name" "$large_test_file"
assert_success
run split_file "$TEST_FILE_FOLDER/$test_file" 4
run split_file "$TEST_FILE_FOLDER/$large_test_file" 4
assert_success
first_part=$(echo -n "$output" | awk '{print $1}')
run create_multipart_upload_rest "$bucket_name" "$test_file" "" "parse_upload_id"
run create_multipart_upload_rest "$bucket_name" "$large_test_file" "" "parse_upload_id"
assert_success
# shellcheck disable=SC2030
upload_id=$output
run upload_part_check_etag_header "$bucket_name" "$test_file" "$upload_id" "1" "$TEST_FILE_FOLDER/${test_file}-0"
run upload_part_check_etag_header "$bucket_name" "$large_test_file" "$upload_id" "1" "$first_part"
assert_success
}
@@ -152,13 +177,17 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
run get_file_name
assert_success
mp_file=$output
run setup_bucket_and_large_file_v2 "$bucket_name" "$mp_file"
assert_success
run split_file "$TEST_FILE_FOLDER/$test_file" 4
run split_file "$TEST_FILE_FOLDER/$mp_file" 4
assert_success
run upload_part_rest_without_part_number "$bucket_name" "$test_file"
run upload_part_rest_without_part_number "$bucket_name" "$mp_file"
assert_success
}
@@ -170,88 +199,74 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$test_file"
run get_file_name
assert_success
mp_file=$output
run setup_bucket_and_large_file_v2 "$bucket_name" "$mp_file"
assert_success
run send_openssl_go_command_expect_error "400" "InvalidArgument" "This operation does not accept partNumber without uploadId" \
"-method" "PUT" "-bucketName" "$bucket_name" "-objectKey" "$test_file" "-query" "partNumber=1"
"-method" "PUT" "-bucketName" "$bucket_name" "-objectKey" "$mp_file" "-query" "partNumber=1"
assert_success
}
@test "REST - multipart w/invalid checksum type" {
run get_bucket_name "$BUCKET_ONE_NAME"
run setup_bucket_and_file_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
read -r bucket_name mp_file <<< "$output"
run setup_bucket_v2 "$bucket_name"
assert_success
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$test_file" "FULL_OBJECTS" "" \
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$mp_file" "FULL_OBJECTS" "" \
check_rest_expected_error "400" "InvalidRequest" "Value for x-amz-checksum-type header is invalid"
assert_success
}
@test "REST - multipart w/invalid checksum algorithm" {
run get_bucket_name "$BUCKET_ONE_NAME"
run setup_bucket_and_file_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
read -r bucket_name mp_file <<< "$output"
run setup_bucket_v2 "$bucket_name"
assert_success
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$test_file" "" "crc64nvm" \
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$mp_file" "" "crc64nvm" \
check_rest_expected_error "400" "InvalidRequest" "Checksum algorithm provided is unsupported."
assert_success
}
@test "REST - multipart checksum w/crc64nvme, composite" {
run get_bucket_name "$BUCKET_ONE_NAME"
run setup_bucket_and_file_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
read -r bucket_name mp_file <<< "$output"
run setup_bucket_v2 "$bucket_name"
assert_success
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$test_file" "COMPOSITE" "crc64nvme" \
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$mp_file" "COMPOSITE" "crc64nvme" \
check_rest_expected_error "400" "InvalidRequest" "The COMPOSITE checksum type cannot be used with the crc64nvme checksum algorithm."
assert_success
}
@test "REST - multipart checksum w/sha1, full object" {
run get_bucket_name "$BUCKET_ONE_NAME"
run setup_bucket_and_file_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
read -r bucket_name mp_file <<< "$output"
run setup_bucket_v2 "$bucket_name"
assert_success
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$test_file" "FULL_OBJECT" "sha1" \
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$mp_file" "FULL_OBJECT" "sha1" \
check_rest_expected_error "400" "InvalidRequest" "The FULL_OBJECT checksum type cannot be used with the sha1 checksum algorithm."
assert_success
}
@test "REST - multipart checksum w/sha256, full object" {
run get_bucket_name "$BUCKET_ONE_NAME"
run setup_bucket_and_file_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
read -r bucket_name mp_file <<< "$output"
run setup_bucket_v2 "$BUCKET_ONE_NAME"
assert_success
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "sha256" \
run create_multipart_upload_rest_with_checksum_type_and_algorithm_error "$bucket_name" "$mp_file" "FULL_OBJECT" "sha256" \
check_rest_expected_error "400" "InvalidRequest" "The FULL_OBJECT checksum type cannot be used with the sha256 checksum algorithm."
assert_success
}
@test "REST - multipart - lowercase checksum type and algorithm" {
run get_bucket_name "$BUCKET_ONE_NAME"
run setup_bucket_and_file_v3 "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
read -r bucket_name mp_file <<< "$output"
run setup_bucket "$bucket_name"
assert_success
run create_multipart_upload_rest "$bucket_name" "$test_file" "CHECKSUM_TYPE=full_object CHECKSUM_ALGORITHM=crc64nvme" "parse_upload_id"
run create_multipart_upload_rest "$bucket_name" "$mp_file" "CHECKSUM_TYPE=full_object CHECKSUM_ALGORITHM=crc64nvme" "parse_upload_id"
assert_success
}
@@ -260,18 +275,19 @@ test_file="test_file"
assert_success
bucket_name="$output"
run setup_bucket "$bucket_name"
run get_file_name
assert_success
large_test_file="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$large_test_file"
assert_success
run create_test_file "$test_file" $((5*1024*1024))
assert_success
run create_multipart_upload_rest "$bucket_name" "$test_file" "CHECKSUM_TYPE=FULL_OBJECT CHECKSUM_ALGORITHM=CRC32" "parse_upload_id"
run create_multipart_upload_rest "$bucket_name" "$large_test_file" "CHECKSUM_TYPE=FULL_OBJECT CHECKSUM_ALGORITHM=CRC32" "parse_upload_id"
assert_success
upload_id=$output
log 5 "upload ID: $upload_id"
run upload_part_rest "$bucket_name" "$test_file" "$upload_id" 1 "$TEST_FILE_FOLDER/$test_file"
run upload_part_rest "$bucket_name" "$large_test_file" "$upload_id" 1 "$TEST_FILE_FOLDER/$large_test_file"
assert_success
}
@@ -283,119 +299,127 @@ test_file="test_file"
}
@test "REST - multipart - composite - sha256" {
run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA256"
run test_multipart_upload_with_checksum "COMPOSITE" "SHA256"
assert_success
}
@test "REST - multipart - composite - sha1" {
run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA1"
run test_multipart_upload_with_checksum "COMPOSITE" "SHA1"
assert_success
}
@test "REST - multipart - composite - crc32" {
run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32"
run test_multipart_upload_with_checksum "COMPOSITE" "CRC32"
assert_success
}
@test "REST - multipart - composite - crc32c" {
run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
run test_multipart_upload_with_checksum "COMPOSITE" "CRC32C"
assert_success
}
@test "REST - multipart - full object - crc32" {
run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32"
run test_multipart_upload_with_checksum "FULL_OBJECT" "CRC32"
assert_success
}
@test "REST - multipart - full object - crc32c" {
run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
run test_multipart_upload_with_checksum "FULL_OBJECT" "CRC32C"
assert_success
}
@test "REST - multipart - full object - crc64nvme" {
run test_multipart_upload_with_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC64NVME"
run test_multipart_upload_with_checksum "FULL_OBJECT" "CRC64NVME"
assert_success
}
@test "REST - multipart - x-amz-checksum-algorithm is ignored in CompleteMultipartUpload" {
run test_complete_multipart_upload_unneeded_algorithm_parameter "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
run test_complete_multipart_upload_unneeded_algorithm_parameter "FULL_OBJECT" "CRC32C"
assert_success
}
@test "REST - multipart - composite - incorrect sha256" {
run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA256"
run test_complete_multipart_upload_incorrect_checksum "COMPOSITE" "SHA256"
assert_success
}
@test "REST - multipart - composite - incorrect sha1" {
run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA1"
run test_complete_multipart_upload_incorrect_checksum "COMPOSITE" "SHA1"
assert_success
}
@test "REST - multipart - composite - incorrect crc32" {
run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
run test_complete_multipart_upload_incorrect_checksum "COMPOSITE" "CRC32C"
assert_success
}
@test "REST - multipart - composite - incorrect crc32c" {
run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
run test_complete_multipart_upload_incorrect_checksum "COMPOSITE" "CRC32C"
assert_success
}
@test "REST - multipart - full object - incorrect crc32" {
run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32"
run test_complete_multipart_upload_incorrect_checksum "FULL_OBJECT" "CRC32"
assert_success
}
@test "REST - multipart - full object - incorrect crc32c" {
run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
run test_complete_multipart_upload_incorrect_checksum "FULL_OBJECT" "CRC32C"
assert_success
}
@test "REST - multipart - full object - incorrect crc64nvme" {
run test_complete_multipart_upload_incorrect_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC64NVME"
run test_complete_multipart_upload_incorrect_checksum "FULL_OBJECT" "CRC64NVME"
assert_success
}
@test "REST - multipart - composite - invalid sha1" {
run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA1"
run test_complete_multipart_upload_invalid_checksum "COMPOSITE" "SHA1"
assert_success
}
@test "REST - multipart - composite - invalid sha256" {
run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "SHA256"
run test_complete_multipart_upload_invalid_checksum "COMPOSITE" "SHA256"
assert_success
}
@test "REST - multipart - composite - invalid crc32" {
run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32"
run test_complete_multipart_upload_invalid_checksum "COMPOSITE" "CRC32"
assert_success
}
@test "REST - multipart - composite - invalid crc32c" {
run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "COMPOSITE" "CRC32C"
run test_complete_multipart_upload_invalid_checksum "COMPOSITE" "CRC32C"
assert_success
}
@test "REST - multipart - full object - invalid crc32" {
run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32"
run test_complete_multipart_upload_invalid_checksum "FULL_OBJECT" "CRC32"
assert_success
}
@test "REST - multipart - full object - invalid crc32c" {
run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC32C"
run test_complete_multipart_upload_invalid_checksum "FULL_OBJECT" "CRC32C"
assert_success
}
@test "REST - multipart - full object - invalid crc64nvme" {
run test_complete_multipart_upload_invalid_checksum "$BUCKET_ONE_NAME" "$test_file" "FULL_OBJECT" "CRC64NVME"
run test_complete_multipart_upload_invalid_checksum "FULL_OBJECT" "CRC64NVME"
assert_success
}
@test "REST - multipart - x-amz-mp-object-size - invalid string" {
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
run get_bucket_name "$BUCKET_ONE_NAME"
assert_success
bucket_name="$output"
run get_file_name
assert_success
large_test_file="$output"
run setup_bucket_and_large_file_v2 "$bucket_name" "$large_test_file"
assert_success
run complete_multipart_upload_invalid_object_size_string "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file"
run complete_multipart_upload_invalid_object_size_string "$bucket_name" "$large_test_file" "$TEST_FILE_FOLDER/$large_test_file"
assert_success
}

View File

@@ -20,7 +20,6 @@ load ./bats-assert/load
source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/drivers/put_bucket_ownership_controls/put_bucket_ownership_controls_rest.sh
source ./tests/drivers/file.sh
source ./tests/util/util_file.sh
source ./tests/test_common.sh
# complete-multipart-upload

View File

@@ -18,7 +18,6 @@ load ./bats-support/load
load ./bats-assert/load
source ./tests/drivers/put_bucket_ownership_controls/put_bucket_ownership_controls_rest.sh
source ./tests/util/util_file.sh
source ./tests/test_common.sh
@test "test_list_objects_file_count" {

View File

@@ -53,7 +53,6 @@ source ./tests/drivers/get_bucket_tagging/get_bucket_tagging_rest.sh
source ./tests/drivers/head_bucket/head_bucket_rest.sh
source ./tests/drivers/head_bucket/head_bucket_s3api.sh
source ./tests/drivers/put_bucket_ownership_controls/put_bucket_ownership_controls_rest.sh
source ./tests/util/util_file.sh
source ./tests/util/util_head_bucket.sh
source ./tests/util/util_lock_config.sh
source ./tests/util/util_object.sh

View File

@@ -27,7 +27,6 @@ source ./tests/drivers/head_object/head_object_s3api.sh
source ./tests/drivers/create_bucket/create_bucket_rest.sh
source ./tests/drivers/get_object_tagging/get_object_tagging.sh
source ./tests/drivers/put_bucket_ownership_controls/put_bucket_ownership_controls_rest.sh
source ./tests/util/util_file.sh
source ./tests/util/util_multipart.sh
source ./tests/util/util_multipart_abort.sh
source ./tests/util/util_multipart_before_completion.sh
@@ -36,18 +35,14 @@ export RUN_USERS=true
# abort-multipart-upload
@test "test_abort_multipart_upload" {
local bucket_file="bucket-file"
# shellcheck disable=SC2154
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
run setup_bucket_and_large_file_v3 "$BUCKET_ONE_NAME"
assert_success
read -r bucket_name file_name <<< "$output"
run run_then_abort_multipart_upload "$bucket_name" "$file_name" "$TEST_FILE_FOLDER"/"$file_name" 4
assert_success
run setup_bucket "$BUCKET_ONE_NAME"
assert_success
run run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4
assert_success
run object_exists "s3api" "$BUCKET_ONE_NAME" "$bucket_file"
run object_exists "s3api" "$bucket_name" "$file_name"
assert_failure 1
}
@@ -69,7 +64,9 @@ export RUN_USERS=true
# create-multipart-upload
@test "test_create_multipart_upload_properties" {
local bucket_file="bucket-file"
run get_file_name
assert_success
local bucket_file="$output"
local expected_content_type="application/zip"
local expected_meta_key="testkey"

View File

@@ -47,7 +47,6 @@ source ./tests/drivers/get_object_tagging/get_object_tagging.sh
source ./tests/drivers/list_buckets/list_buckets_rest.sh
source ./tests/drivers/put_bucket_ownership_controls/put_bucket_ownership_controls_rest.sh
source ./tests/drivers/file.sh
source ./tests/util/util_file.sh
source ./tests/util/util_lock_config.sh
source ./tests/util/util_object.sh
source ./tests/test_s3api_root_inner.sh

View File

@@ -25,7 +25,6 @@ source ./tests/test_s3api_policy_object.sh
source ./tests/util/util_multipart.sh
source ./tests/util/util_multipart_abort.sh
source ./tests/util/util_multipart_before_completion.sh
source ./tests/util/util_file.sh
source ./tests/util/util_policy.sh
source ./tests/util/util_users.sh
source ./tests/commands/get_bucket_policy.sh

View File

@@ -1,335 +0,0 @@
#!/usr/bin/env bats
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/logger.sh
source ./tests/commands/get_object.sh
# create a test file and export folder. do so in temp folder
# params: filenames
# fail if error
create_test_files() {
log 6 "create_test_files"
if [ $# -lt 1 ]; then
log 2 "'create_test_files' requires file names"
return 1
fi
for name in "$@"; do
if ! create_test_file "$name"; then
log 2 "error creating test file"
return 1
fi
done
return 0
}
# params: filename, size (optional, defaults to 10)
create_test_file() {
if [[ ( $# -lt 1 ) || ( $# -gt 2 ) ]]; then
log 2 "'create_test_file' requires filename, size (optional)"
return 1
fi
if [[ -e "$TEST_FILE_FOLDER/$1" ]]; then
if ! error=$(rm "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error removing existing file: $error"
return 1
fi
fi
if ! error=$(touch "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error creating new file: $error"
return 1
fi
if [ -z "$2" ]; then
file_size=10
else
file_size="$2"
fi
if [ "$file_size" -eq 0 ]; then
return 0
fi
if ! error=$(dd if=/dev/urandom of="$TEST_FILE_FOLDER/$1" bs=1 count="$file_size" 2>&1); then
log 2 "error adding data to file: $error"
return 1
fi
return 0
}
create_file_single_char() {
if [ "$#" -ne 3 ]; then
log 2 "'create_file_single_char' requires filename, size, char"
return 1
fi
if [[ -e "$TEST_FILE_FOLDER/$1" ]]; then
if ! error=$(rm "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error removing existing file: $error"
return 1
fi
fi
if ! error=$(touch "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error creating new file: $error"
return 1
fi
if ! error=$(dd if=/dev/zero bs=1 count="$2" | tr '\0' "$3" > "$TEST_FILE_FOLDER/$1" 2>&1); then
log 2 "error adding data to file: $error"
return 1
fi
return 0
}
# params: folder name
# fail if error
create_test_folder() {
if [ $# -lt 1 ]; then
log 2 "'create_test_folder' requires folder names"
return 1
fi
for name in "$@"; do
if ! error=$(mkdir -p "$TEST_FILE_FOLDER"/"$name" 2>&1); then
log 2 "error creating folder $name: $error"
return 1
fi
done
return 0
}
# delete a test file
# params: filename
# return: 0 for success, 1 for error
delete_test_files() {
if [ $# -lt 1 ]; then
log 2 "delete test files command missing filenames"
return 1
fi
if [ -z "$TEST_FILE_FOLDER" ]; then
log 2 "no test file folder defined, not deleting"
return 1
fi
for name in "$@"; do
rm -rf "${TEST_FILE_FOLDER:?}"/"${name:?}" || rm_result=$?
if [[ $rm_result -ne 0 ]]; then
log 2 "error deleting file $name"
fi
done
return 0
}
get_file_size() {
if [ $# -ne 1 ]; then
log 2 "'get_file_size' requires file location"
return 1
fi
local file_size=""
if [[ "$OSTYPE" == "darwin"* ]]; then
if ! file_size=$(stat -f %z "$1" 2>&1); then
log 2 "error getting file size: $file_size"
return 1
fi
else
if ! file_size=$(stat -c %s "$1" 2>&1); then
log 2 "error getting file size: $file_size"
return 1
fi
fi
echo "$file_size"
}
# split file into pieces to test multipart upload
# param: file location
# return 0 for success, 1 for error
split_file() {
if [ $# -ne 2 ]; then
log 2 "'split_file' requires file name, number of pieces"
return 1
fi
file_size=$(stat -c %s "$1" 2>/dev/null || stat -f %z "$1" 2>/dev/null)
part_size=$((file_size / $2))
remainder=$((file_size % $2))
if [[ remainder -ne 0 ]]; then
part_size=$((part_size+1))
fi
local error
if ! error=$(split -a 1 -d -b "$part_size" "$1" "$1"- 2>&1); then
log 2 "error splitting file: $error"
return 1
fi
return 0
}
# compare files
# input: two files
# return 0 for same data, 1 for different data, 2 for error
compare_files() {
if [ $# -ne 2 ]; then
log 2 "file comparison requires two files"
return 2
fi
log 5 "comparing files '$1' and '$2'"
os=$(uname)
if [[ $os == "Darwin" ]]; then
if ! file_one_md5=$(md5 -q "$1" 2>&1); then
log 2 "error getting md5 for '$1': $file_one_md5"
return 2
fi
if ! file_two_md5=$(md5 -q "$2" 2>&1); then
log 2 "error getting md5 for '$2': $file_two_md5"
return 2
fi
else
if ! file_one_md5=$(md5sum "$1" | cut -d " " -f 1 2>&1); then
log 2 "error getting md5 for '$1': $file_one_md5"
return 2
fi
if ! file_two_md5=$(md5sum "$2" | cut -d " " -f 1 2>&1); then
log 2 "error getting md5 for '$2': $file_two_md5"
return 2
fi
fi
if [[ "$file_one_md5" == "$file_two_md5" ]]; then
return 0
fi
return 1
}
# generate 160MB file
# input: filename
# fail on error
create_large_file() {
log 6 "create_large_file"
if ! check_param_count_v2 "file name" 1 $#; then
return 1
fi
if ! create_large_file_with_size "$1" 160; then
log 2 "error creating 160MB file"
return 1
fi
return 0
}
create_large_file_v2() {
if ! file_name=$(get_file_name 2>&1); then
log 2 "error getting file name: $file_name"
return 1
fi
if ! create_large_file_with_size "$file_name" 160; then
log 2 "error creating 160MB file with name '$file_name'"
return 1
fi
echo "$file_name"
return 0
}
create_large_file_with_size() {
if ! check_param_count_v2 "file name, size in MB" 2 $#; then
return 1
fi
filesize=$(($2*1024*1024))
if ! error=$(dd if=/dev/urandom of="$TEST_FILE_FOLDER"/"$1" bs=1024 count=$((filesize/1024)) 2>&1); then
log 2 "error adding data to large file: $error"
return 1
fi
return 0
}
create_and_split_large_file() {
if ! check_param_count_v2 "file name, size in MB, pieces" 3 $#; then
return 1
fi
if ! create_large_file_with_size "$1" "$2"; then
log 2 "error creating large file"
return 1
fi
if ! split_file "$TEST_FILE_FOLDER/$1" "$3"; then
log 2 "error splitting file"
return 1
fi
}
# param: number of files
# fail on error
create_test_file_count() {
if [ $# -ne 1 ]; then
log 2 "'create_test_file_count' requires number of files"
return 1
fi
for ((i=1;i<=$1;i++)) {
if ! error=$(touch "$TEST_FILE_FOLDER/file_$i" 2>&1); then
log 2 "error creating file_$i: $error"
return 1
fi
}
# shellcheck disable=SC2153
if [[ $LOG_LEVEL -ge 5 ]]; then
ls_result=$(ls "$TEST_FILE_FOLDER/file_*")
log 5 "$ls_result"
fi
return 0
}
download_and_compare_file_with_user() {
if ! check_param_count_gt "original file, bucket, key, destination, username, password, chunk size (optional)" 6 $#; then
return 1
fi
if ! download_file_with_user "$5" "$6" "$2" "$3" "$4" "$7"; then
log 2 "error downloading file"
return 1
fi
if ! compare_files "$1" "$4"; then
log 2 "files don't match"
return 1
fi
return 0
}
download_and_compare_file() {
log 6 "download_and_compare_file"
if ! check_param_count_gt "original file, bucket, key, destination, chunk size (optional)" 4 $#; then
return 1
fi
if ! download_and_compare_file_with_user "$1" "$2" "$3" "$4" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$5"; then
log 2 "error downloading and comparing file with user"
return 1
fi
return 0
}
# params: src, dst
# fail if error
copy_file_locally() {
if [ $# -ne 2 ]; then
log 2 "'copy_file_locally' requires src, dst"
return 1
fi
if ! error=$(cp "$1" "$2" 2>&1); then
log 2 "error copying file: $error"
return 1
fi
return 0
}
# params: src, dst
# fail if error
move_file_locally() {
if [ $# -ne 2 ]; then
log 2 "'move_file_locally' requires src, dst"
return 1
fi
if ! error=$(mv "$1" "$2" 2>&1); then
log 2 "error moving file: $error"
return 1
fi
return 0
}

View File

@@ -48,14 +48,15 @@ multipart_upload_from_bucket() {
return 1
fi
if ! split_file "$3" "$4"; then
if ! segments=$(split_file "$3" "$4" 2>&1); then
log 2 "error splitting file"
return 1
fi
read -r -a segment_array <<< "$segments"
for ((i=0;i<$4;i++)) {
log 5 "key: $3"
if ! put_object "s3api" "$3-$i" "$1" "$2-$i"; then
if ! put_object "s3api" "${segment_array[$i]}" "$1" "$2-$i"; then
log 2 "error copying object"
return 1
fi
@@ -77,13 +78,15 @@ split_and_put_file() {
if ! check_param_count "split_and_put_file" "bucket, key, copy source, part count" 4 $#; then
return 1
fi
if ! split_file "$3" "$4"; then
log 2 "error splitting file"
if ! file_parts=$(split_file "$3" "$4" 2>&1); then
log 2 "error splitting file: $file_parts"
return 1
fi
read -r -a part_array <<< "$file_parts"
for ((i=0;i<$4;i++)) {
log 5 "key: $2, file info: $(ls -l "$3"-"$i")"
if ! put_object "s3api" "$3-$i" "$1" "$2-$i"; then
log 5 "key: $2, file info: $(ls -l "${part_array[$i]}")"
if ! put_object "s3api" "${part_array[$i]}" "$1" "$2-$i"; then
log 2 "error copying object"
return 1
fi
@@ -95,13 +98,15 @@ multipart_upload_from_bucket_range() {
if ! check_param_count "multipart_upload_from_bucket_range" "bucket, copy source, key, part count, range" 5 $#; then
return 1
fi
if ! split_file "$3" "$4"; then
if ! segments=$(split_file "$3" "$4" 2>&1); then
log 2 "error splitting file"
return 1
fi
read -r -a segment_array <<< "$segments"
for ((i=0;i<$4;i++)) {
log 5 "key: $3, file info: $(ls -l "$3"-"$i")"
if ! put_object "s3api" "$3-$i" "$1" "$2-$i"; then
if ! put_object "s3api" "${segment_array[$i]}" "$1" "$2-$i"; then
log 2 "error copying object"
return 1
fi

View File

@@ -233,15 +233,15 @@ multipart_upload_before_completion() {
# params: bucket, key, file to split and upload, number of file parts to upload
# return: 0 for success, 1 for failure
multipart_upload_before_completion_with_user() {
if [ $# -ne 6 ]; then
log 2 "multipart upload pre-completion command missing bucket, key, file, part count, username, password"
if ! check_param_count_v2 "bucket, key, file, part count, username, password" 6 $#; then
return 1
fi
if ! split_file "$3" "$4"; then
log 2 "error splitting file"
if ! segments=$(split_file "$3" "$4" 2>&1); then
log 2 "error splitting file: $segments"
return 1
fi
read -r -a segment_array <<< "$segments"
if ! create_multipart_upload_s3api_with_user "$1" "$2" "$5" "$6"; then
log 2 "error creating multpart upload"
@@ -251,7 +251,7 @@ multipart_upload_before_completion_with_user() {
parts="["
for ((i = 1; i <= $4; i++)); do
# shellcheck disable=SC2154
if ! upload_part_with_user "$1" "$2" "$upload_id" "$3" "$i" "$5" "$6"; then
if ! upload_part_with_user "$1" "$2" "$upload_id" "${segment_array[$((i-1))]}" "$i" "$5" "$6"; then
log 2 "error uploading part $i"
return 1
fi
@@ -271,10 +271,11 @@ multipart_upload_before_completion_with_params() {
return 1
fi
if ! result=$(split_file "$3" "$4" 2>&1); then
if ! segments=$(split_file "$3" "$4" 2>&1); then
log 2 "error splitting file: $result"
return 1
fi
read -r -a segment_array <<< "$segments"
if ! create_multipart_upload_s3api_params "$1" "$2" "$5" "$6" "$7" "$8" "$9" "${10}"; then
log 2 "error creating multpart upload"
@@ -283,7 +284,7 @@ multipart_upload_before_completion_with_params() {
parts="["
for ((i = 1; i <= $4; i++)); do
if ! upload_part "$1" "$2" "$upload_id" "$3" "$i"; then
if ! upload_part "$1" "$2" "$upload_id" "${segment_array[$((i-1))]}" "$i"; then
log 2 "error uploading part $i"
return 1
fi
@@ -303,10 +304,11 @@ multipart_upload_before_completion_custom() {
return 1
fi
if ! result=$(split_file "$3" "$4" 2>&1); then
if ! segments=$(split_file "$3" "$4" 2>&1); then
log 2 "error splitting file"
return 1
fi
read -r -a segment_array <<< "$segments"
# shellcheck disable=SC2086 disable=SC2048
if ! create_multipart_upload_custom "$1" "$2" ${*:5}; then
@@ -317,7 +319,7 @@ multipart_upload_before_completion_custom() {
parts="["
for ((i = 1; i <= $4; i++)); do
if ! upload_part "$1" "$2" "$upload_id" "$3" "$i"; then
if ! upload_part "$1" "$2" "$upload_id" "${segment_array[$i]}" "$i"; then
log 2 "error uploading part $i"
return 1
fi

View File

@@ -28,6 +28,7 @@ source ./tests/commands/delete_object.sh
source ./tests/commands/get_bucket_acl.sh
source ./tests/commands/get_bucket_ownership_controls.sh
source ./tests/commands/get_bucket_policy.sh
source ./tests/commands/get_object.sh
source ./tests/commands/get_object_legal_hold.sh
source ./tests/commands/get_object_lock_configuration.sh
source ./tests/commands/head_bucket.sh

View File

@@ -14,8 +14,6 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/util/util_file.sh
start_versity_process() {
if ! check_param_count "start_versity_process" "versity app index" 1 $#; then
exit 1