mirror of
https://github.com/versity/versitygw.git
synced 2026-01-03 10:35:15 +00:00
Merge pull request #600 from versity/test_cmdline_shellcheck
Test cmdline shellcheck
This commit is contained in:
@@ -12,6 +12,7 @@ copy_object() {
|
||||
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
|
||||
error=$(aws --no-verify-ssl s3api copy-object --copy-source "$2" --bucket "$3" --key "$4" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 's3cmd' ]]; then
|
||||
log 5 "s3cmd ${S3CMD_OPTS[*]} --no-check-certificate cp s3://$2 s3://$3/$4"
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate cp "s3://$2" s3://"$3/$4" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
error=$(mc --insecure cp "$MC_ALIAS/$2" "$MC_ALIAS/$3/$4" 2>&1) || exit_code=$?
|
||||
|
||||
@@ -6,11 +6,11 @@ delete_bucket_policy() {
|
||||
return 1
|
||||
fi
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
error=$(aws --no-verify-ssl s3api delete-bucket-policy --bucket "$2") || delete_result=$?
|
||||
error=$(aws --no-verify-ssl s3api delete-bucket-policy --bucket "$2" 2>&1) || delete_result=$?
|
||||
elif [[ $1 == 's3cmd' ]]; then
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate delpolicy "s3://$2") || delete_result=$?
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate delpolicy "s3://$2" 2>&1) || delete_result=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
error=$(mc --insecure anonymous set none "$MC_ALIAS/$2") || delete_result=$?
|
||||
error=$(mc --insecure anonymous set none "$MC_ALIAS/$2" 2>&1) || delete_result=$?
|
||||
else
|
||||
log 2 "command 'get bucket policy' not implemented for '$1'"
|
||||
return 1
|
||||
|
||||
@@ -6,22 +6,22 @@ delete_object() {
|
||||
return 1
|
||||
fi
|
||||
local exit_code=0
|
||||
local error
|
||||
if [[ $1 == 's3' ]]; then
|
||||
error=$(aws --no-verify-ssl s3 rm "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
delete_object_error=$(aws --no-verify-ssl s3 rm "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
|
||||
error=$(aws --no-verify-ssl s3api delete-object --bucket "$2" --key "$3" 2>&1) || exit_code=$?
|
||||
delete_object_error=$(aws --no-verify-ssl s3api delete-object --bucket "$2" --key "$3" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 's3cmd' ]]; then
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
delete_object_error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
error=$(mc --insecure rm "$MC_ALIAS/$2/$3" 2>&1) || exit_code=$?
|
||||
delete_object_error=$(mc --insecure rm "$MC_ALIAS/$2/$3" 2>&1) || exit_code=$?
|
||||
else
|
||||
log 2 "invalid command type $1"
|
||||
return 1
|
||||
fi
|
||||
log 5 "delete object exit code: $exit_code"
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
log 2 "error deleting object: $error"
|
||||
log 2 "error deleting object: $delete_object_error"
|
||||
export delete_object_error
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
@@ -34,18 +34,18 @@ delete_object_with_user() {
|
||||
fi
|
||||
local exit_code=0
|
||||
if [[ $1 == 's3' ]]; then
|
||||
error=$(AWS_ACCESS_KEY_ID="$4" AWS_SECRET_ACCESS_KEY="$5" aws --no-verify-ssl s3 rm "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
delete_object_error=$(AWS_ACCESS_KEY_ID="$4" AWS_SECRET_ACCESS_KEY="$5" aws --no-verify-ssl s3 rm "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
|
||||
error=$(AWS_ACCESS_KEY_ID="$4" AWS_SECRET_ACCESS_KEY="$5" aws --no-verify-ssl s3api delete-object --bucket "$2" --key "$3" 2>&1) || exit_code=$?
|
||||
delete_object_error=$(AWS_ACCESS_KEY_ID="$4" AWS_SECRET_ACCESS_KEY="$5" aws --no-verify-ssl s3api delete-object --bucket "$2" --key "$3" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 's3cmd' ]]; then
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm --access_key="$4" --secret_key="$5" "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
delete_object_error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm --access_key="$4" --secret_key="$5" "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
else
|
||||
log 2 "command 'delete object with user' not implemented for '$1'"
|
||||
return 1
|
||||
fi
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
log 2 "error deleting object: $error"
|
||||
export error
|
||||
log 2 "error deleting object: $delete_object_error"
|
||||
export delete_object_error
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
@@ -34,15 +34,15 @@ put_object_with_user() {
|
||||
fi
|
||||
local exit_code=0
|
||||
if [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
|
||||
error=$(AWS_ACCESS_KEY_ID="$5" AWS_SECRET_ACCESS_KEY="$6" aws --no-verify-ssl s3api put-object --body "$2" --bucket "$3" --key "$4" 2>&1) || exit_code=$?
|
||||
put_object_error=$(AWS_ACCESS_KEY_ID="$5" AWS_SECRET_ACCESS_KEY="$6" aws --no-verify-ssl s3api put-object --body "$2" --bucket "$3" --key "$4" 2>&1) || exit_code=$?
|
||||
else
|
||||
log 2 "'put object with user' command not implemented for '$1'"
|
||||
return 1
|
||||
fi
|
||||
log 5 "put object exit code: $exit_code"
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
log 2 "error putting object into bucket: $error"
|
||||
export error
|
||||
log 2 "error putting object into bucket: $put_object_error"
|
||||
export put_object_error
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
34
tests/commands/upload_part_copy.sh
Normal file
34
tests/commands/upload_part_copy.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
upload_part_copy() {
|
||||
if [ $# -ne 5 ]; then
|
||||
echo "upload multipart part copy function must have bucket, key, upload ID, file name, part number"
|
||||
return 1
|
||||
fi
|
||||
local etag_json
|
||||
echo "$1 $2 $3 $4 $5"
|
||||
etag_json=$(aws --no-verify-ssl s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))") || local uploaded=$?
|
||||
if [[ $uploaded -ne 0 ]]; then
|
||||
echo "Error uploading part $5: $etag_json"
|
||||
return 1
|
||||
fi
|
||||
etag=$(echo "$etag_json" | jq '.CopyPartResult.ETag')
|
||||
export etag
|
||||
}
|
||||
|
||||
upload_part_copy_with_range() {
|
||||
if [ $# -ne 6 ]; then
|
||||
log 2 "upload multipart part copy function must have bucket, key, upload ID, file name, part number, range"
|
||||
return 1
|
||||
fi
|
||||
local etag_json
|
||||
log 5 "bucket: $1, key: $2, upload ID: $3, file name: $4, range: $5"
|
||||
etag_json=$(aws --no-verify-ssl s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))" --copy-source-range "$6" 2>&1) || local uploaded=$?
|
||||
if [[ $uploaded -ne 0 ]]; then
|
||||
log 2 "Error uploading part $5: $etag_json"
|
||||
export upload_part_copy_error=$etag_json
|
||||
return 1
|
||||
fi
|
||||
etag=$(echo "$etag_json" | grep -v "InsecureRequestWarning" | jq '.CopyPartResult.ETag')
|
||||
export etag
|
||||
}
|
||||
@@ -7,6 +7,7 @@ log() {
|
||||
echo "log function requires level, message"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2153
|
||||
if [[ $1 -gt $LOG_LEVEL ]]; then
|
||||
return 0
|
||||
fi
|
||||
@@ -18,9 +19,37 @@ log() {
|
||||
4) log_level="INFO";;
|
||||
5) log_level="DEBUG";;
|
||||
6) log_level="TRACE";;
|
||||
*) echo "invalid log level $1"; return 1
|
||||
esac
|
||||
if [[ "$2" == *"secret_key"* ]]; then
|
||||
log_mask $log_level "$2"
|
||||
return 0
|
||||
fi
|
||||
echo "$log_level $2"
|
||||
if [[ -n "$TEST_LOG_FILE" ]]; then
|
||||
echo "$2" >> "$TEST_LOG_FILE"
|
||||
echo "$log_level $2" >> "$TEST_LOG_FILE"
|
||||
fi
|
||||
}
|
||||
}
|
||||
|
||||
log_mask() {
|
||||
if [[ $# -ne 2 ]]; then
|
||||
echo "mask and log requires level, string"
|
||||
return 1
|
||||
fi
|
||||
local masked_args=() # Initialize an array to hold the masked arguments
|
||||
|
||||
IFS=' ' read -r -a array <<< "$2"
|
||||
|
||||
for arg in "${array[@]}"; do
|
||||
if [[ "$arg" == --secret_key=* ]]; then
|
||||
masked_args+=("--secret_key=********")
|
||||
else
|
||||
masked_args+=("$arg")
|
||||
fi
|
||||
done
|
||||
|
||||
echo "$log_level ${masked_args[*]}"
|
||||
if [[ -n "$TEST_LOG_FILE" ]]; then
|
||||
echo "$log_level ${masked_args[*]}" >> "$TEST_LOG_FILE"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ if [[ -z "$VERSITYGW_TEST_ENV" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck source=./.env.default
|
||||
# shellcheck source=./tests/.env.default
|
||||
source "$VERSITYGW_TEST_ENV"
|
||||
export RECREATE_BUCKETS
|
||||
|
||||
|
||||
@@ -8,8 +8,7 @@ check_for_alias() {
|
||||
return 2
|
||||
fi
|
||||
while IFS= read -r line; do
|
||||
error=$(echo "$line" | grep -w "$MC_ALIAS ")
|
||||
if [[ $? -eq 0 ]]; then
|
||||
if echo "$line" | grep -w "$MC_ALIAS "; then
|
||||
return 0
|
||||
fi
|
||||
done <<< "$aliases"
|
||||
|
||||
@@ -171,6 +171,59 @@ source ./tests/commands/select_object_content.sh
|
||||
test_common_set_get_delete_bucket_tags "aws"
|
||||
}
|
||||
|
||||
# delete-object - tested with bucket cleanup before or after tests
|
||||
|
||||
# delete-object-tagging
|
||||
@test "test_delete_object_tagging" {
|
||||
test_common_delete_object_tagging "aws"
|
||||
}
|
||||
|
||||
# delete-objects
|
||||
@test "test_delete_objects" {
|
||||
local object_one="test-file-one"
|
||||
local object_two="test-file-two"
|
||||
|
||||
create_test_files "$object_one" "$object_two" || local created=$?
|
||||
[[ $created -eq 0 ]] || fail "Error creating test files"
|
||||
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result_one=$?
|
||||
[[ $result_one -eq 0 ]] || fail "Error creating bucket"
|
||||
|
||||
put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local result_two=$?
|
||||
[[ $result_two -eq 0 ]] || fail "Error adding object one"
|
||||
put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local result_three=$?
|
||||
[[ $result_three -eq 0 ]] || fail "Error adding object two"
|
||||
|
||||
error=$(aws --no-verify-ssl s3api delete-objects --bucket "$BUCKET_ONE_NAME" --delete '{
|
||||
"Objects": [
|
||||
{"Key": "test-file-one"},
|
||||
{"Key": "test-file-two"}
|
||||
]
|
||||
}') || local result=$?
|
||||
[[ $result -eq 0 ]] || fail "Error deleting objects: $error"
|
||||
|
||||
object_exists "aws" "$BUCKET_ONE_NAME" "$object_one" || local exists_one=$?
|
||||
[[ $exists_one -eq 1 ]] || fail "Object one not deleted"
|
||||
object_exists "aws" "$BUCKET_ONE_NAME" "$object_two" || local exists_two=$?
|
||||
[[ $exists_two -eq 1 ]] || fail "Object two not deleted"
|
||||
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$object_one" "$object_two"
|
||||
}
|
||||
|
||||
# get-bucket-acl
|
||||
@test "test_get_bucket_acl" {
|
||||
setup_bucket "aws" "$BUCKET_ONE_NAME" || local created=$?
|
||||
[[ $created -eq 0 ]] || fail "Error creating bucket"
|
||||
|
||||
get_bucket_acl "s3api" "$BUCKET_ONE_NAME" || local result=$?
|
||||
[[ $result -eq 0 ]] || fail "Error retrieving acl"
|
||||
|
||||
id=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq '.Owner.ID')
|
||||
[[ $id == '"'"$AWS_ACCESS_KEY_ID"'"' ]] || fail "Acl mismatch"
|
||||
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
#@test "test_get_object_invalid_range" {
|
||||
# bucket_file="bucket_file"
|
||||
#
|
||||
@@ -211,8 +264,8 @@ source ./tests/commands/select_object_content.sh
|
||||
[[ $setup_result_two -eq 0 ]] || fail "Bucket two setup error"
|
||||
put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local copy_result=$?
|
||||
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
|
||||
error=$(aws --no-verify-ssl s3api copy-object --copy-source "$BUCKET_ONE_NAME/$bucket_file" --key "$bucket_file" --bucket "$BUCKET_TWO_NAME" 2>&1) || local copy_result=$?
|
||||
[[ $copy_result -eq 0 ]] || fail "Error copying file: $error"
|
||||
copy_error=$(aws --no-verify-ssl s3api copy-object --copy-source "$BUCKET_ONE_NAME/$bucket_file" --key "$bucket_file" --bucket "$BUCKET_TWO_NAME" 2>&1) || local copy_result=$?
|
||||
[[ $copy_result -eq 0 ]] || fail "Error copying file: $copy_error"
|
||||
copy_file "s3://$BUCKET_TWO_NAME/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local copy_result=$?
|
||||
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
|
||||
compare_files "$test_file_folder/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local compare_result=$?
|
||||
@@ -253,19 +306,6 @@ source ./tests/commands/select_object_content.sh
|
||||
test_common_list_objects "aws"
|
||||
}
|
||||
|
||||
# test ability to retrieve bucket ACLs
|
||||
@test "test_get_bucket_acl" {
|
||||
setup_bucket "aws" "$BUCKET_ONE_NAME" || local created=$?
|
||||
[[ $created -eq 0 ]] || fail "Error creating bucket"
|
||||
|
||||
get_bucket_acl "s3api" "$BUCKET_ONE_NAME" || local result=$?
|
||||
[[ $result -eq 0 ]] || fail "Error retrieving acl"
|
||||
|
||||
id=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq '.Owner.ID')
|
||||
[[ $id == '"'"$AWS_ACCESS_KEY_ID"'"' ]] || fail "Acl mismatch"
|
||||
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
@test "test_get_object_attributes" {
|
||||
bucket_file="bucket_file"
|
||||
@@ -316,11 +356,13 @@ source ./tests/commands/select_object_content.sh
|
||||
echo "fdkljafajkfs" > "$test_file_folder/$bucket_file"
|
||||
put_object_with_user "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key" || local put_result=$?
|
||||
[[ $put_result -ne 0 ]] || fail "able to overwrite object with hold"
|
||||
[[ $error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $error"
|
||||
# shellcheck disable=SC2154
|
||||
[[ $put_object_error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $put_object_error"
|
||||
|
||||
delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key" || local delete_result=$?
|
||||
[[ $delete_result -ne 0 ]] || fail "able to delete object with hold"
|
||||
[[ $error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $error"
|
||||
# shellcheck disable=SC2154
|
||||
[[ $delete_object_error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $delete_object_error"
|
||||
put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "OFF" || fail "error removing legal hold on object"
|
||||
delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key" || fail "error deleting object after removing legal hold"
|
||||
|
||||
@@ -411,37 +453,6 @@ legal_hold_retention_setup() {
|
||||
# delete_bucket_or_contents "$BUCKET_ONE_NAME"
|
||||
#}
|
||||
|
||||
# test ability to delete multiple objects from bucket
|
||||
@test "test_delete_objects" {
|
||||
local object_one="test-file-one"
|
||||
local object_two="test-file-two"
|
||||
|
||||
create_test_files "$object_one" "$object_two" || local created=$?
|
||||
[[ $created -eq 0 ]] || fail "Error creating test files"
|
||||
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result_one=$?
|
||||
[[ $result_one -eq 0 ]] || fail "Error creating bucket"
|
||||
|
||||
put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local result_two=$?
|
||||
[[ $result_two -eq 0 ]] || fail "Error adding object one"
|
||||
put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local result_three=$?
|
||||
[[ $result_three -eq 0 ]] || fail "Error adding object two"
|
||||
|
||||
error=$(aws --no-verify-ssl s3api delete-objects --bucket "$BUCKET_ONE_NAME" --delete '{
|
||||
"Objects": [
|
||||
{"Key": "test-file-one"},
|
||||
{"Key": "test-file-two"}
|
||||
]
|
||||
}') || local result=$?
|
||||
[[ $result -eq 0 ]] || fail "Error deleting objects: $error"
|
||||
|
||||
object_exists "aws" "$BUCKET_ONE_NAME" "$object_one" || local exists_one=$?
|
||||
[[ $exists_one -eq 1 ]] || fail "Object one not deleted"
|
||||
object_exists "aws" "$BUCKET_ONE_NAME" "$object_two" || local exists_two=$?
|
||||
[[ $exists_two -eq 1 ]] || fail "Object two not deleted"
|
||||
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$object_one" "$object_two"
|
||||
}
|
||||
|
||||
#@test "test_select_object_content" {
|
||||
# bucket_file="bucket_file"
|
||||
@@ -558,6 +569,7 @@ legal_hold_retention_setup() {
|
||||
echo "error: blank etag"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2004
|
||||
parts_map[$part_number]=$etag
|
||||
done
|
||||
[[ ${#parts_map[@]} -ne 0 ]] || fail "error loading multipart upload parts to check"
|
||||
@@ -593,8 +605,7 @@ legal_hold_retention_setup() {
|
||||
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$?
|
||||
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
|
||||
|
||||
list_multipart_uploads "$BUCKET_ONE_NAME" "$test_file_folder"/"$bucket_file_one" "$test_file_folder"/"$bucket_file_two"
|
||||
[[ $? -eq 0 ]] || fail "failed to list multipart uploads"
|
||||
list_multipart_uploads "$BUCKET_ONE_NAME" "$test_file_folder"/"$bucket_file_one" "$test_file_folder"/"$bucket_file_two" || fail "failed to list multipart uploads"
|
||||
|
||||
local key_one
|
||||
local key_two
|
||||
@@ -635,6 +646,23 @@ legal_hold_retention_setup() {
|
||||
delete_test_files $bucket_file
|
||||
}
|
||||
|
||||
#@test "test_multipart_upload_from_bucket_range" {
|
||||
# local bucket_file="bucket-file"
|
||||
#
|
||||
# create_large_file "$bucket_file" || error creating file "$bucket_file"
|
||||
# setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
|
||||
#
|
||||
# multipart_upload_from_bucket_range "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 "bytes=0-1000000000" || local upload_result=$?
|
||||
# [[ $upload_result -eq 1 ]] || fail "multipart upload with overly large range should have failed"
|
||||
# [[ $upload_part_copy_error == *"Range specified is not valid"* ]] || fail "unexpected error: $upload_part_copy_error"
|
||||
#
|
||||
# multipart_upload_from_bucket_range "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 "bytes=0-16000000" || local upload_two_result=$?
|
||||
# [[ $upload_two_result -eq 0 ]] || fail "range should be valid"
|
||||
#
|
||||
# delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
# delete_test_files $bucket_file
|
||||
#}
|
||||
|
||||
@test "test-presigned-url-utf8-chars" {
|
||||
test_common_presigned_url_utf8_chars "aws"
|
||||
}
|
||||
@@ -691,6 +719,11 @@ legal_hold_retention_setup() {
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
@test "test_head_bucket_invalid_name" {
|
||||
head_bucket "aws" "" || local head_result=$?
|
||||
[[ $head_result -ne 0 ]] || fail "able to get bucket info for invalid name"
|
||||
}
|
||||
|
||||
@test "test_head_bucket_doesnt_exist" {
|
||||
setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$?
|
||||
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
|
||||
@@ -726,9 +759,6 @@ legal_hold_retention_setup() {
|
||||
[[ $value == "\"$test_value\"" ]] || fail "values doesn't match (expected $value, actual \"$test_value\")"
|
||||
}
|
||||
|
||||
@test "test_delete_object_tagging" {
|
||||
test_common_delete_object_tagging "aws"
|
||||
}
|
||||
|
||||
@test "test_get_bucket_location" {
|
||||
test_common_get_bucket_location "aws"
|
||||
|
||||
@@ -62,32 +62,25 @@ test_common_copy_object() {
|
||||
fail "copy object test requires command type"
|
||||
fi
|
||||
local object_name="test-object"
|
||||
create_test_files "$object_name" || local create_result=$?
|
||||
[[ $create_result -eq 0 ]] || fail "Error creating test file"
|
||||
create_test_files "$object_name" || fail "error creating test file"
|
||||
echo "test data" > "$test_file_folder/$object_name"
|
||||
|
||||
setup_bucket "$1" "$BUCKET_ONE_NAME" || local setup_result=$?
|
||||
[[ $setup_result -eq 0 ]] || fail "error setting up bucket one"
|
||||
setup_bucket "$1" "$BUCKET_TWO_NAME" || local setup_result=$?
|
||||
[[ $setup_result -eq 0 ]] || fail "error setting up bucket two"
|
||||
setup_bucket "$1" "$BUCKET_ONE_NAME" || fail "error setting up bucket one"
|
||||
setup_bucket "$1" "$BUCKET_TWO_NAME" || fail "error setting up bucket two"
|
||||
|
||||
if [[ $1 == 's3' ]]; then
|
||||
copy_object "$1" "$test_file_folder/$object_name" "$BUCKET_ONE_NAME" "$object_name" || local put_result=$?
|
||||
copy_object "$1" "$test_file_folder/$object_name" "$BUCKET_ONE_NAME" "$object_name" || fail "failed to copy object to bucket one"
|
||||
else
|
||||
put_object "$1" "$test_file_folder/$object_name" "$BUCKET_ONE_NAME" "$object_name" || local put_result=$?
|
||||
put_object "$1" "$test_file_folder/$object_name" "$BUCKET_ONE_NAME" "$object_name" || fail "failed to put object to bucket one"
|
||||
fi
|
||||
[[ $put_result -eq 0 ]] || fail "Failed to add object to bucket"
|
||||
if [[ $1 == 's3' ]]; then
|
||||
copy_object "$1" "s3://$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name" || local copy_result_one=$?
|
||||
copy_object "$1" "s3://$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name" || fail "object not copied to bucket two"
|
||||
else
|
||||
copy_object "$1" "$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name" || local copy_result_one=$?
|
||||
copy_object "$1" "$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name" || fail "object not copied to bucket two"
|
||||
fi
|
||||
[[ $copy_result_one -eq 0 ]] || fail "Object not added to bucket"
|
||||
get_object "$1" "$BUCKET_TWO_NAME" "$object_name" "$test_file_folder/$object_name-copy" || local get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "failed to retrieve object"
|
||||
get_object "$1" "$BUCKET_TWO_NAME" "$object_name" "$test_file_folder/$object_name-copy" || fail "failed to retrieve object"
|
||||
|
||||
compare_files "$test_file_folder/$object_name" "$test_file_folder/$object_name-copy" || local compare_result=$?
|
||||
[[ $compare_result -eq 0 ]] || fail "files not the same"
|
||||
compare_files "$test_file_folder/$object_name" "$test_file_folder/$object_name-copy" || fail "files not the same"
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_bucket_or_contents "$1" "$BUCKET_TWO_NAME"
|
||||
@@ -274,18 +267,14 @@ test_common_set_get_delete_bucket_tags() {
|
||||
local key="test_key"
|
||||
local value="test_value"
|
||||
|
||||
setup_bucket "$1" "$BUCKET_ONE_NAME" || local result=$?
|
||||
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
|
||||
setup_bucket "$1" "$BUCKET_ONE_NAME" || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
|
||||
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || local get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "Error getting bucket tags first time"
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || fail "Error getting bucket tags first time"
|
||||
|
||||
check_bucket_tags_empty "$1" "$BUCKET_ONE_NAME" || local check_result=$?
|
||||
[[ $check_result -eq 0 ]] || fail "error checking if bucket tags are empty"
|
||||
check_bucket_tags_empty "$1" "$BUCKET_ONE_NAME" || fail "error checking if bucket tags are empty"
|
||||
|
||||
put_bucket_tag "$1" "$BUCKET_ONE_NAME" $key $value
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || local get_result_two=$?
|
||||
[[ $get_result_two -eq 0 ]] || fail "Error getting bucket tags second time"
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || fail "Error getting bucket tags second time"
|
||||
|
||||
local tag_set_key
|
||||
local tag_set_value
|
||||
@@ -302,11 +291,9 @@ test_common_set_get_delete_bucket_tags() {
|
||||
fi
|
||||
delete_bucket_tags "$1" "$BUCKET_ONE_NAME"
|
||||
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || local get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "Error getting bucket tags third time"
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || fail "Error getting bucket tags third time"
|
||||
|
||||
check_bucket_tags_empty "$1" "$BUCKET_ONE_NAME" || local check_result=$?
|
||||
[[ $check_result -eq 0 ]] || fail "error checking if bucket tags are empty"
|
||||
check_bucket_tags_empty "$1" "$BUCKET_ONE_NAME" || fail "error checking if bucket tags are empty"
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,15 @@ export RUN_MC=true
|
||||
test_common_set_get_delete_bucket_tags "mc"
|
||||
}
|
||||
|
||||
# delete-object - put-object tests
|
||||
|
||||
# delete-objects - test setup/teardown
|
||||
|
||||
# delete-object-tagging
|
||||
@test "test_delete_object_tagging" {
|
||||
test_common_delete_object_tagging "mc"
|
||||
}
|
||||
|
||||
@test "test_put_object-with-data-mc" {
|
||||
test_common_put_object_with_data "mc"
|
||||
}
|
||||
@@ -94,9 +103,6 @@ export RUN_MC=true
|
||||
delete_bucket_or_contents "mc" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
@test "test_delete_object_tagging" {
|
||||
test_common_delete_object_tagging "mc"
|
||||
}
|
||||
|
||||
@test "test_get_bucket_location" {
|
||||
test_common_get_bucket_location "mc"
|
||||
|
||||
@@ -19,6 +19,10 @@ source ./tests/test_common.sh
|
||||
|
||||
# delete-bucket - test_create_delete_bucket
|
||||
|
||||
# delete-object - test_put_object
|
||||
|
||||
# delete-objects - tested with recursive bucket delete
|
||||
|
||||
@test "test_put_object" {
|
||||
test_common_put_object_no_data "s3"
|
||||
}
|
||||
|
||||
@@ -17,20 +17,28 @@ export RUN_S3CMD=true
|
||||
}
|
||||
|
||||
# copy-object
|
||||
@test "test_copy_object_with_data" {
|
||||
test_common_put_object_with_data "s3cmd"
|
||||
}
|
||||
|
||||
# copy-object
|
||||
@test "test_copy_object_no_data" {
|
||||
test_common_put_object_no_data "s3cmd"
|
||||
}
|
||||
#@test "test_copy_object" {
|
||||
# test_common_copy_object "s3cmd"
|
||||
#}
|
||||
|
||||
# create-bucket
|
||||
@test "test_create_delete_bucket" {
|
||||
test_common_create_delete_bucket "s3cmd"
|
||||
}
|
||||
|
||||
@test "test_create_bucket_invalid_name_s3cmd" {
|
||||
if [[ $RECREATE_BUCKETS != "true" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
create_bucket_invalid_name "s3cmd" || local create_result=$?
|
||||
[[ $create_result -eq 0 ]] || fail "Invalid name test failed"
|
||||
|
||||
[[ "$bucket_create_error" == *"just the bucket name"* ]] || fail "unexpected error: $bucket_create_error"
|
||||
|
||||
delete_bucket_or_contents "s3cmd" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
# delete-bucket - test_create_delete_bucket
|
||||
|
||||
# delete-bucket-policy
|
||||
@@ -38,6 +46,12 @@ export RUN_S3CMD=true
|
||||
test_common_get_put_delete_bucket_policy "s3cmd"
|
||||
}
|
||||
|
||||
# delete-object - test_put_object
|
||||
|
||||
# delete-objects - tested with cleanup before or after tests
|
||||
|
||||
# get-bucket-acl - test_put_bucket_acl
|
||||
|
||||
#@test "test_put_bucket_acl" {
|
||||
# test_common_put_bucket_acl "s3cmd"
|
||||
#}
|
||||
@@ -59,18 +73,6 @@ export RUN_S3CMD=true
|
||||
test_common_list_objects_file_count "s3cmd"
|
||||
}
|
||||
|
||||
@test "test_create_bucket_invalid_name_s3cmd" {
|
||||
if [[ $RECREATE_BUCKETS != "true" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
create_bucket_invalid_name "s3cmd" || local create_result=$?
|
||||
[[ $create_result -eq 0 ]] || fail "Invalid name test failed"
|
||||
|
||||
[[ "$bucket_create_error" == *"just the bucket name"* ]] || fail "unexpected error: $bucket_create_error"
|
||||
|
||||
delete_bucket_or_contents "s3cmd" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
@test "test_get_bucket_info_s3cmd" {
|
||||
setup_bucket "s3cmd" "$BUCKET_ONE_NAME" || local setup_result=$?
|
||||
@@ -80,6 +82,15 @@ export RUN_S3CMD=true
|
||||
delete_bucket_or_contents "s3cmd" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
# put-object
|
||||
@test "test_put_object_with_data" {
|
||||
test_common_put_object_with_data "s3cmd"
|
||||
}
|
||||
|
||||
@test "test_put_object_no_data" {
|
||||
test_common_put_object_no_data "s3cmd"
|
||||
}
|
||||
|
||||
@test "test_get_bucket_info_doesnt_exist_s3cmd" {
|
||||
setup_bucket "s3cmd" "$BUCKET_ONE_NAME" || local setup_result=$?
|
||||
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
source ./tests/test_user_common.sh
|
||||
source ./tests/util_users.sh
|
||||
|
||||
@test "test_admin_user_aws" {
|
||||
test_admin_user "aws"
|
||||
@@ -10,6 +11,12 @@ source ./tests/test_user_common.sh
|
||||
test_create_user_already_exists "aws"
|
||||
}
|
||||
|
||||
@test "test_delete_user_no_access_key" {
|
||||
if delete_user ""; then
|
||||
fail "delete user with empty access key succeeded"
|
||||
fi
|
||||
}
|
||||
|
||||
@test "test_user_user_aws" {
|
||||
test_user_user "aws"
|
||||
}
|
||||
|
||||
131
tests/util.sh
131
tests/util.sh
@@ -14,6 +14,7 @@ source ./tests/commands/get_object_tagging.sh
|
||||
source ./tests/commands/head_bucket.sh
|
||||
source ./tests/commands/head_object.sh
|
||||
source ./tests/commands/list_objects.sh
|
||||
source ./tests/commands/upload_part_copy.sh
|
||||
|
||||
# recursively delete an AWS bucket
|
||||
# param: bucket name
|
||||
@@ -29,7 +30,7 @@ delete_bucket_recursive() {
|
||||
if [[ $1 == 's3' ]]; then
|
||||
error=$(aws --no-verify-ssl s3 rb s3://"$2" --force 2>&1) || exit_code="$?"
|
||||
elif [[ $1 == "aws" ]] || [[ $1 == 's3api' ]]; then
|
||||
delete_bucket_recursive_s3api "$2" 2>&1 || exit_code="$?"
|
||||
delete_bucket_recursive_s3api "$2" || exit_code="$?"
|
||||
elif [[ $1 == "s3cmd" ]]; then
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rb s3://"$2" --recursive 2>&1) || exit_code="$?"
|
||||
elif [[ $1 == "mc" ]]; then
|
||||
@@ -55,16 +56,26 @@ delete_bucket_recursive_s3api() {
|
||||
log 2 "delete bucket recursive command for s3api requires bucket name"
|
||||
return 1
|
||||
fi
|
||||
list_objects 's3api' "$1" || local list_result=$?
|
||||
if [[ $list_result -ne 0 ]]; then
|
||||
if ! list_objects 's3api' "$1"; then
|
||||
log 2 "error listing objects"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
for object in "${object_array[@]}"; do
|
||||
delete_object 's3api' "$1" "$object" || local delete_object_result=$?
|
||||
if [[ $delete_object_result -ne 0 ]]; then
|
||||
if ! delete_object 's3api' "$1" "$object"; then
|
||||
log 2 "error deleting object $object"
|
||||
if [[ $delete_object_error == *"WORM"* ]]; then
|
||||
log 5 "WORM protection found"
|
||||
if ! put_object_legal_hold "$1" "$object" "OFF"; then
|
||||
log 2 "error removing object legal hold"
|
||||
return 1
|
||||
fi
|
||||
if ! delete_object 's3api' "$1" "$object"; then
|
||||
log 2 "error deleting object after legal hold removal"
|
||||
return 1
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
@@ -220,34 +231,6 @@ object_exists() {
|
||||
return 2
|
||||
fi
|
||||
return $head_result
|
||||
|
||||
return 0
|
||||
local exit_code=0
|
||||
local error=""
|
||||
if [[ $1 == 's3' ]]; then
|
||||
error=$(aws --no-verify-ssl s3 ls "s3://$2/$3" 2>&1) || exit_code="$?"
|
||||
elif [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
|
||||
error=$(aws --no-verify-ssl s3api head-object --bucket "$2" --prefix "$3" 2>&1) || exit_code="$?"
|
||||
elif [[ $1 == 's3cmd' ]]; then
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3://"$2/$3" 2>&1) || exit_code="$?"
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
error=$(mc --insecure ls "$MC_ALIAS/$2/$3" 2>&1) || exit_code=$?
|
||||
else
|
||||
echo "invalid command type $1"
|
||||
return 2
|
||||
fi
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
if [[ "$error" == "" ]] || [[ $error == *"InsecureRequestWarning"* ]]; then
|
||||
return 1
|
||||
else
|
||||
echo "error checking if object exists: $error"
|
||||
return 2
|
||||
fi
|
||||
# s3cmd, mc return empty when object doesn't exist, rather than error
|
||||
elif [[ ( $1 == 's3cmd' ) || ( $1 == 'mc' ) ]] && [[ $error == "" ]]; then
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
put_object_with_metadata() {
|
||||
@@ -289,9 +272,9 @@ get_object_metadata() {
|
||||
echo "error copying object to bucket: $error"
|
||||
return 1
|
||||
fi
|
||||
log 5 "$metadata_struct"
|
||||
log 5 "raw metadata: $metadata_struct"
|
||||
metadata=$(echo "$metadata_struct" | jq '.Metadata')
|
||||
echo $metadata
|
||||
log 5 "metadata: $metadata"
|
||||
export metadata
|
||||
return 0
|
||||
}
|
||||
@@ -504,8 +487,7 @@ check_object_tags_empty() {
|
||||
echo "bucket tags empty check requires command type, bucket, and key"
|
||||
return 2
|
||||
fi
|
||||
get_object_tagging "$1" "$2" "$3" || get_result=$?
|
||||
if [[ $get_result -ne 0 ]]; then
|
||||
if ! get_object_tagging "$1" "$2" "$3"; then
|
||||
echo "failed to get tags"
|
||||
return 2
|
||||
fi
|
||||
@@ -518,8 +500,7 @@ check_bucket_tags_empty() {
|
||||
echo "bucket tags empty check requires command type, bucket"
|
||||
return 2
|
||||
fi
|
||||
get_bucket_tagging "$1" "$2" || get_result=$?
|
||||
if [[ $get_result -ne 0 ]]; then
|
||||
if ! get_bucket_tagging "$1" "$2"; then
|
||||
echo "failed to get tags"
|
||||
return 2
|
||||
fi
|
||||
@@ -738,7 +719,7 @@ multipart_upload_before_completion_custom() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2048
|
||||
# shellcheck disable=SC2086 disable=SC2048
|
||||
create_multipart_upload_custom "$1" "$2" ${*:5} || local create_result=$?
|
||||
if [[ $create_result -ne 0 ]]; then
|
||||
log 2 "error creating multipart upload"
|
||||
@@ -769,7 +750,7 @@ multipart_upload_custom() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2048
|
||||
# shellcheck disable=SC2086 disable=SC2048
|
||||
multipart_upload_before_completion_custom "$1" "$2" "$3" "$4" ${*:5} || local result=$?
|
||||
if [[ $result -ne 0 ]]; then
|
||||
log 2 "error performing pre-completion multipart upload"
|
||||
@@ -919,9 +900,6 @@ list_multipart_uploads() {
|
||||
export uploads
|
||||
}
|
||||
|
||||
# perform a multi-part upload within bucket
|
||||
# params: bucket, key, file, number of parts
|
||||
# return 0 for success, 1 for failure
|
||||
multipart_upload_from_bucket() {
|
||||
if [ $# -ne 4 ]; then
|
||||
echo "multipart upload from bucket command missing bucket, copy source, key, and/or part count"
|
||||
@@ -964,29 +942,62 @@ multipart_upload_from_bucket() {
|
||||
parts+="]"
|
||||
|
||||
error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
|
||||
if [[ $completed -ne 0 ]]; then
|
||||
echo "Error completing upload: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
parts+="]"
|
||||
if [[ $completed -ne 0 ]]; then
|
||||
echo "Error completing upload: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
upload_part_copy() {
|
||||
multipart_upload_from_bucket_range() {
|
||||
if [ $# -ne 5 ]; then
|
||||
echo "upload multipart part copy function must have bucket, key, upload ID, file name, part number"
|
||||
echo "multipart upload from bucket with range command requires bucket, copy source, key, part count, and range"
|
||||
return 1
|
||||
fi
|
||||
local etag_json
|
||||
echo "$1 $2 $3 $4 $5"
|
||||
etag_json=$(aws --no-verify-ssl s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))") || local uploaded=$?
|
||||
if [[ $uploaded -ne 0 ]]; then
|
||||
echo "Error uploading part $5: $etag_json"
|
||||
|
||||
split_file "$3" "$4" || local split_result=$?
|
||||
if [[ $split_result -ne 0 ]]; then
|
||||
echo "error splitting file"
|
||||
return 1
|
||||
fi
|
||||
etag=$(echo "$etag_json" | jq '.CopyPartResult.ETag')
|
||||
export etag
|
||||
|
||||
for ((i=0;i<$4;i++)) {
|
||||
echo "key: $3"
|
||||
log 5 "file info: $(ls -l "$3"-"$i")"
|
||||
put_object "s3api" "$3-$i" "$1" "$2-$i" || local copy_result=$?
|
||||
if [[ $copy_result -ne 0 ]]; then
|
||||
echo "error copying object"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
create_multipart_upload "$1" "$2-copy" || local create_multipart_result=$?
|
||||
if [[ $create_multipart_result -ne 0 ]]; then
|
||||
echo "error running first multpart upload"
|
||||
return 1
|
||||
fi
|
||||
|
||||
parts="["
|
||||
for ((i = 1; i <= $4; i++)); do
|
||||
upload_part_copy_with_range "$1" "$2-copy" "$upload_id" "$2" "$i" "$5" || local upload_part_copy_result=$?
|
||||
if [[ $upload_part_copy_result -ne 0 ]]; then
|
||||
# shellcheck disable=SC2154
|
||||
echo "error uploading part $i: $upload_part_copy_error"
|
||||
return 1
|
||||
fi
|
||||
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
|
||||
if [[ $i -ne $4 ]]; then
|
||||
parts+=","
|
||||
fi
|
||||
done
|
||||
parts+="]"
|
||||
|
||||
error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
|
||||
if [[ $completed -ne 0 ]]; then
|
||||
echo "Error completing upload: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
create_presigned_url() {
|
||||
|
||||
@@ -29,6 +29,7 @@ abort_all_multipart_uploads() {
|
||||
|
||||
log 5 "$lines"
|
||||
while read -r line; do
|
||||
# shellcheck disable=SC2086
|
||||
error=$(aws --no-verify-ssl s3api abort-multipart-upload --bucket "$1" $line 2>&1) || abort_result=$?
|
||||
if [[ $abort_result -ne 0 ]]; then
|
||||
echo "error aborting multipart upload: $error"
|
||||
|
||||
@@ -76,9 +76,12 @@ delete_user() {
|
||||
echo "delete user command requires user ID"
|
||||
return 1
|
||||
fi
|
||||
error=$($VERSITY_EXE admin --allow-insecure --access $AWS_ACCESS_KEY_ID --secret $AWS_SECRET_ACCESS_KEY --endpoint-url $AWS_ENDPOINT_URL delete-user --access "$1") || local delete_result=$?
|
||||
error=$($VERSITY_EXE admin --allow-insecure --access "$AWS_ACCESS_KEY_ID" --secret "$AWS_SECRET_ACCESS_KEY" --endpoint-url "$AWS_ENDPOINT_URL" delete-user --access "$1") || local delete_result=$?
|
||||
|
||||
|
||||
if [[ $delete_result -ne 0 ]]; then
|
||||
echo "error deleting user: $error"
|
||||
export error
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
@@ -68,11 +68,11 @@ start_versity() {
|
||||
echo "Warning: no .env file found in tests folder"
|
||||
fi
|
||||
else
|
||||
# shellcheck source=./.env.default
|
||||
# shellcheck source=./tests/.env.default
|
||||
source "$VERSITYGW_TEST_ENV"
|
||||
fi
|
||||
if [ "$GITHUB_ACTIONS" != "true" ] && [ -r "$SECRETS_FILE" ]; then
|
||||
# shellcheck source=/.secrets
|
||||
# shellcheck source=./tests/.secrets
|
||||
source "$SECRETS_FILE"
|
||||
else
|
||||
echo "Warning: no secrets file found"
|
||||
@@ -107,6 +107,7 @@ start_versity_process() {
|
||||
fi
|
||||
base_command+=(">" "$test_file_folder/versity_log_$1.txt" "2>&1")
|
||||
("${base_command[@]}") &
|
||||
# shellcheck disable=SC2181
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo "error running versitygw command: $(cat "$test_file_folder/versity_log_$1.txt")"
|
||||
return 1
|
||||
@@ -117,7 +118,7 @@ start_versity_process() {
|
||||
sleep 1
|
||||
|
||||
local proc_check
|
||||
check_result=$(kill -0 $pid 2>&1) || proc_check=$?
|
||||
check_result=$(kill -0 "$pid" 2>&1) || proc_check=$?
|
||||
if [[ $proc_check -ne 0 ]]; then
|
||||
echo "versitygw failed to start: $check_result"
|
||||
echo "log data: $(cat "$test_file_folder/versity_log_$1.txt")"
|
||||
|
||||
Reference in New Issue
Block a user