Merge pull request #546 from versity/test_cmdline_get_put_copy

Test cmdline get put copy
This commit is contained in:
Ben McClelland
2024-05-03 10:08:34 -07:00
committed by GitHub
24 changed files with 555 additions and 525 deletions

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env bash
abort_multipart_upload() {
if [ $# -ne 3 ]; then
echo "command to run abort requires bucket, key, upload ID"
return 1
fi
error=$(aws --no-verify-ssl s3api abort-multipart-upload --bucket "$1" --key "$2" --upload-id "$3") || local aborted=$?
if [[ $aborted -ne 0 ]]; then
echo "Error aborting upload: $error"
return 1
fi
return 0
}

View File

@@ -1,20 +1,22 @@
#!/usr/bin/env bash
copy_object() {
if [ $# -ne 3 ]; then
echo "copy object command requires command type, source, destination"
if [ $# -ne 4 ]; then
echo "copy object command requires command type, source, bucket, key"
return 1
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3 cp "$2" s3://"$3" 2>&1) || exit_code=$?
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 cp "$2" s3://"$3/$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3api copy-object --copy-source "$2" --bucket "$3" --key "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate put "$2" s3://"$(dirname "$3")" 2>&1) || exit_code=$?
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate cp "s3://$2" s3://"$3/$4" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure cp "$2" "$MC_ALIAS"/"$(dirname "$3")" 2>&1) || exit_code=$?
error=$(mc --insecure cp "$2" "$MC_ALIAS/$3/$4" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
echo "'copy-object' not implemented for '$1'"
return 1
fi
log 5 "copy object exit code: $exit_code"

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
# create an AWS bucket
# param: bucket name
# return 0 for success, 1 for failure
create_bucket() {
if [ $# -ne 2 ]; then
echo "create bucket missing command type, bucket name"
return 1
fi
local exit_code=0
local error
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 mb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == "aws" ]] || [[ $1 == 's3api' ]]; then
error=$(aws --no-verify-ssl s3api create-bucket --bucket "$2" 2>&1) || exit_code=$?
elif [[ $1 == "s3cmd" ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate mb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == "mc" ]]; then
error=$(mc --insecure mb "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error creating bucket: $error"
return 1
fi
return 0
}

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env bash
# delete an AWS bucket
# param: bucket name
# return 0 for success, 1 for failure
delete_bucket() {
if [ $# -ne 2 ]; then
echo "delete bucket missing command type, bucket name"
return 1
fi
local exit_code=0
local error
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 rb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
error=$(aws --no-verify-ssl s3api delete-bucket --bucket "$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure rb "$MC_ALIAS/$2" 2>&1) || exit_code=$?
else
echo "Invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
if [[ "$error" == *"The specified bucket does not exist"* ]]; then
return 0
else
echo "error deleting bucket: $error"
return 1
fi
fi
return 0
}

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env bash
delete_object() {
if [ $# -ne 3 ]; then
echo "delete object command requires command type, bucket, key"
return 1
fi
local exit_code=0
local error
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 rm "s3://$2/$3" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3api delete-object --bucket "$2" --key "$3" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm "s3://$2/$3" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure rm "$MC_ALIAS/$2/$3" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error deleting object: $error"
return 1
fi
return 0
}

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
get_object() {
if [ $# -ne 4 ]; then
echo "get object command requires command type, bucket, key, destination"
return 1
fi
local exit_code=0
local error
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 mv "s3://$2/$3" "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3api get-object --bucket "$2" --key "$3" "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate get "s3://$2/$3" "$4" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure get "$MC_ALIAS/$2/$3" "$4" 2>&1) || exit_code=$?
else
echo "'get object' command not implemented for '$1'"
return 1
fi
log 5 "get object exit code: $exit_code"
if [ $exit_code -ne 0 ]; then
echo "error putting object into bucket: $error"
return 1
fi
return 0
}

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
head_bucket() {
if [ $# -ne 2 ]; then
echo "head bucket command missing command type, bucket name"
return 1
fi
local exit_code=0
if [[ $1 == "aws" ]] || [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
bucket_info=$(aws --no-verify-ssl s3api head-bucket --bucket "$2" 2>&1) || exit_code=$?
elif [[ $1 == "s3cmd" ]]; then
bucket_info=$(s3cmd --no-check-certificate info "s3://$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
bucket_info=$(mc --insecure stat "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error getting bucket info: $bucket_info"
return 1
fi
export bucket_info
return 0
}

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
head_object() {
if [ $# -ne 3 ]; then
echo "head-object missing command, bucket name, object name"
return 2
fi
local exit_code=0
local error=""
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3api head-object --bucket "$2" --key "$3" 2>&1) || exit_code="$?"
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate info s3://"$2/$3" 2>&1) || exit_code="$?"
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure stat "$MC_ALIAS/$2/$3" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 2
fi
if [ $exit_code -ne 0 ]; then
if [[ "$error" == *"404"* ]] || [[ "$error" == *"does not exist"* ]]; then
return 1
else
echo "error checking if object exists: $error"
return 2
fi
fi
return 0
}

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
list_buckets() {
if [ $# -ne 1 ]; then
echo "list buckets command missing command type"
return 1
fi
local exit_code=0
local error
if [[ $1 == 's3' ]]; then
buckets=$(aws --no-verify-ssl s3 ls 2>&1 s3://) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
list_buckets_s3api || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
buckets=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3:// 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
buckets=$(mc --insecure ls "$MC_ALIAS" 2>&1) || exit_code=$?
else
echo "list buckets command not implemented for '$1'"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error listing buckets: $buckets"
return 1
fi
if [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
return 0
fi
bucket_array=()
while IFS= read -r line; do
bucket_name=$(echo "$line" | awk '{print $NF}')
bucket_array+=("${bucket_name%/}")
done <<< "$buckets"
export bucket_array
return 0
}
list_buckets_s3api() {
output=$(aws --no-verify-ssl s3api list-buckets 2>&1) || exit_code=$?
if [[ $exit_code -ne 0 ]]; then
echo "error listing buckets: $output"
return 1
fi
modified_output=""
while IFS= read -r line; do
if [[ $line != *InsecureRequestWarning* ]]; then
modified_output+="$line"
fi
done <<< "$output"
bucket_array=()
names=$(jq -r '.Buckets[].Name' <<<"$modified_output")
IFS=$'\n' read -rd '' -a bucket_array <<<"$names"
export bucket_array
return 0
}

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
list_objects() {
if [ $# -ne 2 ]; then
echo "list objects command requires command type, and bucket or folder"
return 1
fi
local exit_code=0
local output
if [[ $1 == "aws" ]] || [[ $1 == 's3' ]]; then
output=$(aws --no-verify-ssl s3 ls s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]]; then
list_objects_s3api "$2" || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
output=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
output=$(mc --insecure ls "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error listing objects: $output"
return 1
fi
if [[ $1 == 's3api' ]]; then
return 0
fi
object_array=()
while IFS= read -r line; do
if [[ $line != *InsecureRequestWarning* ]]; then
object_name=$(echo "$line" | awk '{print $NF}')
object_array+=("$object_name")
fi
done <<< "$output"
export object_array
}
list_objects_s3api() {
if [[ $# -ne 1 ]]; then
echo "list objects s3api command requires bucket name"
return 1
fi
output=$(aws --no-verify-ssl s3api list-objects --bucket "$1" 2>&1) || local exit_code=$?
if [[ $exit_code -ne 0 ]]; then
echo "error listing objects: $output"
return 1
fi
modified_output=""
while IFS= read -r line; do
if [[ $line != *InsecureRequestWarning* ]]; then
modified_output+="$line"
fi
done <<< "$output"
object_array=()
keys=$(jq -r '.Contents[].Key' <<<"$modified_output")
IFS=$'\n' read -rd '' -a object_array <<<"$keys"
export object_array
}

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
put_object() {
if [ $# -ne 4 ]; then
echo "put object command requires command type, source, destination bucket, destination key"
return 1
fi
local exit_code=0
local error
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 mv "$2" s3://"$3/$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3api put-object --body "$2" --bucket "$3" --key "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate put "$2" s3://"$3/$4" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure put "$2" "$MC_ALIAS/$3/$4" 2>&1) || exit_code=$?
else
echo "'put object' command not implemented for '$1'"
return 1
fi
log 5 "put object exit code: $exit_code"
if [ $exit_code -ne 0 ]; then
echo "error putting object into bucket: $error"
return 1
fi
return 0
}

View File

@@ -11,4 +11,7 @@ log() {
return 0
fi
echo "$2"
if [[ -n "$TEST_LOG_FILE" ]]; then
echo "$2" >> "$TEST_LOG_FILE"
fi
}

View File

@@ -6,7 +6,6 @@ show_help() {
echo " -h, --help Display this help message and exit"
echo " -s, --static Don't remove buckets between tests"
echo " aws Run tests with aws cli"
echo " aws-posix Run posix tests with aws cli"
echo " s3cmd Run tests with s3cmd utility"
echo " mc Run tests with mc utility"
}
@@ -20,7 +19,7 @@ handle_param() {
-s|--static)
export RECREATE_BUCKETS=false
;;
aws|aws-posix|s3cmd|mc|user)
s3|s3api|aws|s3cmd|mc|user)
set_command_type "$1"
;;
*) # Handle unrecognized options or positional arguments
@@ -65,16 +64,16 @@ if [[ $RECREATE_BUCKETS == false ]]; then
fi
case $command_type in
aws)
s3api|aws)
echo "Running aws tests ..."
"$HOME"/bin/bats ./tests/test_aws.sh || exit_code=$?
if [[ $exit_code -eq 0 ]]; then
"$HOME"/bin/bats ./tests/test_user_aws.sh || exit_code=$?
fi
;;
aws-posix)
echo "Running aws posix-specific tests ..."
"$HOME"/bin/bats ./tests/test_aws_posix.sh || exit_code=$?
s3)
echo "Running s3 tests ..."
"$HOME"/bin/bats ./tests/test_s3.sh || exit_code=$?
;;
s3cmd)
echo "Running s3cmd tests ..."

View File

@@ -12,7 +12,7 @@ export RECREATE_BUCKETS
if ! ./tests/run.sh aws; then
exit 1
fi
if ! ./tests/run.sh aws-posix; then
if ! ./tests/run.sh s3; then
exit 1
fi
if ! ./tests/run.sh s3cmd; then

View File

@@ -17,6 +17,12 @@ setup() {
return 1
fi
log 4 "Running test $BATS_TEST_NAME"
if [[ $LOG_LEVEL -ge 5 ]]; then
start_time=$(date +%s)
export start_time
fi
if [[ $RUN_S3CMD == true ]]; then
S3CMD_OPTS=()
S3CMD_OPTS+=(-c "$S3CMD_CONFIG")
@@ -59,6 +65,9 @@ check_params() {
else
export LOG_LEVEL
fi
if [[ -n "$TEST_LOG_FILE" ]]; then
export TEST_LOG_FILE
fi
return 0
}
@@ -72,4 +81,8 @@ fail() {
# bats teardown function
teardown() {
stop_versity
if [[ $LOG_LEVEL -ge 5 ]]; then
end_time=$(date +%s)
log 4 "Total test time: $((end_time - start_time))"
fi
}

View File

@@ -10,7 +10,9 @@ source ./tests/commands/copy_object.sh
source ./tests/commands/delete_bucket_policy.sh
source ./tests/commands/delete_object_tagging.sh
source ./tests/commands/get_bucket_policy.sh
source ./tests/commands/get_object.sh
source ./tests/commands/put_bucket_policy.sh
source ./tests/commands/put_object.sh
@test "test_abort_multipart_upload" {
local bucket_file="bucket-file"
@@ -22,10 +24,10 @@ source ./tests/commands/put_bucket_policy.sh
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || abort_result=$?
run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || abort_result=$?
[[ $abort_result -eq 0 ]] || fail "Abort failed"
object_exists "aws" "$BUCKET_ONE_NAME/$bucket_file" || exists=$?
object_exists "aws" "$BUCKET_ONE_NAME" "$bucket_file" || exists=$?
[[ $exists -eq 1 ]] || fail "Upload file exists after abort"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
@@ -53,20 +55,20 @@ source ./tests/commands/put_bucket_policy.sh
delete_test_files $bucket_file
}
@test "test_copy_object" {
@test "test_put_object" {
bucket_file="bucket_file"
create_test_files "$bucket_file" || local created=$?
[[ $created -eq 0 ]] || fail "Error creating test files"
setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$?
setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$?
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
setup_bucket "aws" "$BUCKET_TWO_NAME" || local setup_result_two=$?
setup_bucket "s3api" "$BUCKET_TWO_NAME" || local setup_result_two=$?
[[ $setup_result_two -eq 0 ]] || fail "Bucket two setup error"
copy_object "aws" "$test_file_folder"/"$bucket_file" "$BUCKET_ONE_NAME"/"$bucket_file" || local copy_result=$?
put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
error=$(aws --no-verify-ssl s3api copy-object --copy-source "$BUCKET_ONE_NAME"/"$bucket_file" --key "$bucket_file" --bucket "$BUCKET_TWO_NAME" 2>&1) || local copy_result=$?
error=$(aws --no-verify-ssl s3api copy-object --copy-source "$BUCKET_ONE_NAME/$bucket_file" --key "$bucket_file" --bucket "$BUCKET_TWO_NAME" 2>&1) || local copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Error copying file: $error"
copy_file "s3://$BUCKET_TWO_NAME"/"$bucket_file" "$test_file_folder/${bucket_file}_copy" || local copy_result=$?
copy_file "s3://$BUCKET_TWO_NAME/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
compare_files "$test_file_folder/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local compare_result=$?
[[ $compare_result -eq 0 ]] || file "files don't match"
@@ -90,22 +92,20 @@ source ./tests/commands/put_bucket_policy.sh
[[ $create_result -eq 0 ]] || fail "Invalid name test failed"
[[ "$bucket_create_error" == *"Invalid bucket name "* ]] || fail "unexpected error: $bucket_create_error"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
}
# test adding and removing an object on versitygw
@test "test_copy_object_with_data" {
test_common_copy_object_with_data "aws"
@test "test_put_object_with_data" {
test_common_put_object_with_data "aws"
}
@test "test_copy_object_no_data" {
test_common_copy_object_no_data "aws"
@test "test_put_object_no_data" {
test_common_put_object_no_data "aws"
}
# test listing buckets on versitygw
@test "test_list_buckets" {
test_common_list_buckets "aws"
test_common_list_buckets "s3api"
}
# test listing a bucket's objects on versitygw
@@ -159,9 +159,9 @@ source ./tests/commands/put_bucket_policy.sh
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result_one=$?
[[ $result_one -eq 0 ]] || fail "Error creating bucket"
copy_object "aws" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME"/"$object_one" || local result_two=$?
put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local result_two=$?
[[ $result_two -eq 0 ]] || fail "Error adding object one"
copy_object "aws" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME"/"$object_two" || local result_three=$?
put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local result_three=$?
[[ $result_three -eq 0 ]] || fail "Error adding object two"
error=$(aws --no-verify-ssl s3api delete-objects --bucket "$BUCKET_ONE_NAME" --delete '{
@@ -172,9 +172,9 @@ source ./tests/commands/put_bucket_policy.sh
}') || local result=$?
[[ $result -eq 0 ]] || fail "Error deleting objects: $error"
object_exists "aws" "$BUCKET_ONE_NAME"/"$object_one" || local exists_one=$?
object_exists "aws" "$BUCKET_ONE_NAME" "$object_one" || local exists_one=$?
[[ $exists_one -eq 1 ]] || fail "Object one not deleted"
object_exists "aws" "$BUCKET_ONE_NAME"/"$object_two" || local exists_two=$?
object_exists "aws" "$BUCKET_ONE_NAME" "$object_two" || local exists_two=$?
[[ $exists_two -eq 1 ]] || fail "Object two not deleted"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
@@ -197,20 +197,22 @@ source ./tests/commands/put_bucket_policy.sh
printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two"
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
copy_object "aws" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME"/"$object_one" || local copy_result_one=$?
put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local copy_result_one=$?
[[ $copy_result_one -eq 0 ]] || fail "Failed to add object $object_one"
copy_object "aws" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME"/"$object_two" || local copy_result_two=$?
put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local copy_result_two=$?
[[ $copy_result_two -eq 0 ]] || fail "Failed to add object $object_two"
sleep 1
list_objects_s3api_v1 "$BUCKET_ONE_NAME"
key_one=$(echo "$objects" | jq '.Contents[0].Key')
[[ $key_one == '"'$object_one'"' ]] || fail "Object one mismatch"
size_one=$(echo "$objects" | jq '.Contents[0].Size')
[[ $size_one -eq 0 ]] || fail "Object one size mismatch"
key_two=$(echo "$objects" | jq '.Contents[1].Key')
[[ $key_two == '"'$object_two'"' ]] || fail "Object two mismatch"
key_one=$(echo "$objects" | jq -r '.Contents[0].Key')
[[ $key_one == "$object_one" ]] || fail "Object one mismatch ($key_one, $object_one)"
size_one=$(echo "$objects" | jq -r '.Contents[0].Size')
[[ $size_one -eq 0 ]] || fail "Object one size mismatch ($size_one, 0)"
key_two=$(echo "$objects" | jq -r '.Contents[1].Key')
[[ $key_two == "$object_two" ]] || fail "Object two mismatch ($key_two, $object_two)"
size_two=$(echo "$objects" | jq '.Contents[1].Size')
[[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch"
[[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch ($size_two, ${#object_two_data})"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files "$object_one" "$object_two"
@@ -227,20 +229,20 @@ source ./tests/commands/put_bucket_policy.sh
printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two"
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
copy_object "aws" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME"/"$object_one" || local copy_object_one=$?
put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local copy_object_one=$?
[[ $copy_object_one -eq 0 ]] || fail "Failed to add object $object_one"
copy_object "aws" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME"/"$object_two" || local copy_object_two=$?
put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local copy_object_two=$?
[[ $copy_object_two -eq 0 ]] || fail "Failed to add object $object_two"
list_objects_s3api_v2 "$BUCKET_ONE_NAME"
key_one=$(echo "$objects" | jq '.Contents[0].Key')
[[ $key_one == '"'$object_one'"' ]] || fail "Object one mismatch"
size_one=$(echo "$objects" | jq '.Contents[0].Size')
[[ $size_one -eq 0 ]] || fail "Object one size mismatch"
key_two=$(echo "$objects" | jq '.Contents[1].Key')
[[ $key_two == '"'$object_two'"' ]] || fail "Object two mismatch"
size_two=$(echo "$objects" | jq '.Contents[1].Size')
[[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch"
key_one=$(echo "$objects" | jq -r '.Contents[0].Key')
[[ $key_one == "$object_one" ]] || fail "Object one mismatch ($key_one, $object_one)"
size_one=$(echo "$objects" | jq -r '.Contents[0].Size')
[[ $size_one -eq 0 ]] || fail "Object one size mismatch ($size_one, 0)"
key_two=$(echo "$objects" | jq -r '.Contents[1].Key')
[[ $key_two == "$object_two" ]] || fail "Object two mismatch ($key_two, $object_two)"
size_two=$(echo "$objects" | jq -r '.Contents[1].Size')
[[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch ($size_two, ${#object_two_data})"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files "$object_one" "$object_two"
@@ -266,7 +268,7 @@ source ./tests/commands/put_bucket_policy.sh
[[ list_result -eq 0 ]] || fail "Listing multipart upload parts failed"
declare -a parts_map
for ((i=0;i<$4;i++)) {
for i in {0..3}; do
local part_number
local etag
part_number=$(echo "$parts" | jq ".[$i].PartNumber")
@@ -280,9 +282,10 @@ source ./tests/commands/put_bucket_policy.sh
return 1
fi
parts_map[$part_number]=$etag
}
done
[[ ${#parts_map[@]} -ne 0 ]] || fail "error loading multipart upload parts to check"
for ((i=0;i<$4;i++)) {
for i in {0..3}; do
local part_number
local etag
part_number=$(echo "$listed_parts" | jq ".Parts[$i].PartNumber")
@@ -291,9 +294,9 @@ source ./tests/commands/put_bucket_policy.sh
echo "error: etags don't match (part number: $part_number, etags ${parts_map[$part_number]},$etag)"
return 1
fi
}
done
run_abort_command "$BUCKET_ONE_NAME" "$bucket_file" $upload_id
run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder/$bucket_file" 4
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files $bucket_file
}
@@ -347,7 +350,7 @@ source ./tests/commands/put_bucket_policy.sh
multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || upload_result=$?
[[ $upload_result -eq 0 ]] || fail "Error performing multipart upload"
copy_file "s3://$BUCKET_ONE_NAME/$bucket_file-copy" "$test_file_folder/$bucket_file-copy"
get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file-copy" "$test_file_folder/$bucket_file-copy"
compare_files "$test_file_folder"/$bucket_file-copy "$test_file_folder"/$bucket_file || compare_result=$?
[[ $compare_result -eq 0 ]] || fail "Data doesn't match"
@@ -370,7 +373,7 @@ source ./tests/commands/put_bucket_policy.sh
setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$?
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
copy_object "aws" "$test_file_folder"/"$folder_name"/"$object_name" "$BUCKET_ONE_NAME"/"$folder_name"/"$object_name" || local copy_result=$?
put_object "aws" "$test_file_folder/$folder_name/$object_name" "$BUCKET_ONE_NAME" "$folder_name/$object_name" || local copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
list_objects_s3api_v1 "$BUCKET_ONE_NAME" "/"
@@ -386,9 +389,9 @@ source ./tests/commands/put_bucket_policy.sh
}
# ensure that lists of files greater than a size of 1000 (pagination) are returned properly
@test "test_list_objects_file_count" {
test_common_list_objects_file_count "aws"
}
#@test "test_list_objects_file_count" {
# test_common_list_objects_file_count "aws"
#}
#@test "test_filename_length" {
# file_name=$(printf "%0.sa" $(seq 1 1025))
@@ -433,12 +436,12 @@ source ./tests/commands/put_bucket_policy.sh
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
object="$test_file_folder"/"$object_one"
put_object_with_metadata "aws" "$object" "$BUCKET_ONE_NAME" "$test_key" "$test_value" || copy_result=$?
put_object_with_metadata "aws" "$object" "$BUCKET_ONE_NAME" "$object_one" "$test_key" "$test_value" || copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
object_exists "aws" "$object" || local exists_result_one=$?
object_exists "aws" "$BUCKET_ONE_NAME" "$object_one" || local exists_result_one=$?
[[ $exists_result_one -eq 0 ]] || fail "Object not added to bucket"
get_object_metadata "aws" "$BUCKET_ONE_NAME" "$object" || get_result=$?
get_object_metadata "aws" "$BUCKET_ONE_NAME" "$object_one" || get_result=$?
[[ $get_result -eq 0 ]] || fail "error getting object metadata"
key=$(echo "$metadata" | jq 'keys[]')
value=$(echo "$metadata" | jq '.[]')

View File

@@ -1,95 +0,0 @@
#!/usr/bin/env bats
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util_bucket_create.sh
source ./tests/util_file.sh
source ./tests/util_posix.sh
source ./tests/commands/copy_object.sh
# test that changes to local folders and files are reflected on S3
@test "test_local_creation_deletion" {
if [[ $RECREATE_BUCKETS != "true" ]]; then
return
fi
local object_name="test-object"
if [[ -e "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME" ]]; then
rm -rf "${LOCAL_FOLDER:?}"/"${BUCKET_ONE_NAME:?}"
fi
mkdir "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME"
local object="$BUCKET_ONE_NAME"/"$object_name"
touch "$LOCAL_FOLDER"/"$object"
bucket_exists_remote_and_local "$BUCKET_ONE_NAME" || local bucket_exists_two=$?
[[ $bucket_exists_two -eq 0 ]] || fail "Failed bucket existence check"
object_exists_remote_and_local "$object" || local object_exists_two=$?
[[ $object_exists_two -eq 0 ]] || fail "Failed object existence check"
rm "$LOCAL_FOLDER"/"$object"
sleep 1
object_not_exists_remote_and_local "$object" || local object_deleted=$?
[[ $object_deleted -eq 0 ]] || fail "Failed object deletion check"
rmdir "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME"
sleep 1
bucket_not_exists_remote_and_local "$BUCKET_ONE_NAME" || local bucket_deleted=$?
[[ $bucket_deleted -eq 0 ]] || fail "Failed bucket deletion check"
}
# test head-object command
@test "test_head_object" {
local bucket_name=$BUCKET_ONE_NAME
local object_name="object-one"
create_test_files $object_name
if [ -e "$LOCAL_FOLDER"/"$bucket_name"/$object_name ]; then
chmod 755 "$LOCAL_FOLDER"/"$bucket_name"/$object_name
fi
setup_bucket "aws" "$bucket_name" || local created=$?
[[ $created -eq 0 ]] || fail "Error creating bucket"
copy_object "aws" "$test_file_folder"/"$object_name" "$bucket_name"/"$object_name" || local result="$?"
[[ result -eq 0 ]] || fail "Error adding object one"
chmod 000 "$LOCAL_FOLDER"/"$bucket_name"/$object_name
sleep 1
object_is_accessible "$bucket_name" $object_name || local accessible=$?
[[ $accessible -eq 1 ]] || fail "Object should be inaccessible"
chmod 755 "$LOCAL_FOLDER"/"$bucket_name"/$object_name
sleep 1
object_is_accessible "$bucket_name" $object_name || local accessible_two=$?
[[ $accessible_two -eq 0 ]] || fail "Object should be accessible"
delete_object "aws" "$bucket_name"/$object_name
delete_bucket_or_contents "aws" "$bucket_name"
delete_test_files $object_name
}
# check info, accessiblity of bucket
@test "test_get_bucket_info" {
if [ -e "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME" ]; then
chmod 755 "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME"
sleep 1
else
setup_bucket "aws" "$BUCKET_ONE_NAME" || local created=$?
[[ $created -eq 0 ]] || fail "Error creating bucket"
fi
chmod 000 "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME"
sleep 1
bucket_is_accessible "$BUCKET_ONE_NAME" || local accessible=$?
[[ $accessible -eq 1 ]] || fail "Bucket should be inaccessible"
chmod 755 "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME"
sleep 1
bucket_is_accessible "$BUCKET_ONE_NAME" || local accessible_two=$?
[[ $accessible_two -eq 0 ]] || fail "Bucket should be accessible"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
}

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env bats
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util_file.sh
source ./tests/util_policy.sh
@@ -7,6 +8,8 @@ source ./tests/commands/copy_object.sh
source ./tests/commands/delete_object_tagging.sh
source ./tests/commands/get_bucket_location.sh
source ./tests/commands/get_bucket_tagging.sh
source ./tests/commands/list_buckets.sh
source ./tests/commands/put_object.sh
test_common_multipart_upload() {
if [[ $# -ne 1 ]]; then
@@ -21,7 +24,7 @@ test_common_multipart_upload() {
setup_bucket "$1" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
copy_object "$1" "$test_file_folder"/$bucket_file "$BUCKET_ONE_NAME/$bucket_file" || local put_result=$?
put_object "$1" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local put_result=$?
[[ $put_result -eq 0 ]] || fail "failed to copy file"
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
@@ -50,7 +53,7 @@ test_common_create_delete_bucket() {
[[ $delete_result_two -eq 0 ]] || fail "Failed to delete bucket"
}
test_common_copy_object_with_data() {
test_common_put_object_with_data() {
if [[ $# -ne 1 ]]; then
fail "put object test requires command type"
fi
@@ -59,10 +62,10 @@ test_common_copy_object_with_data() {
create_test_files "$object_name" || local create_result=$?
[[ $create_result -eq 0 ]] || fail "Error creating test file"
echo "test data" > "$test_file_folder"/"$object_name"
test_common_copy_object "$1" "$object_name"
test_common_put_object "$1" "$object_name"
}
test_common_copy_object_no_data() {
test_common_put_object_no_data() {
if [[ $# -ne 1 ]]; then
fail "put object test requires command type"
fi
@@ -70,10 +73,10 @@ test_common_copy_object_no_data() {
local object_name="test-object"
create_test_files "$object_name" || local create_result=$?
[[ $create_result -eq 0 ]] || fail "Error creating test file"
test_common_copy_object "$1" "$object_name"
test_common_put_object "$1" "$object_name"
}
test_common_copy_object() {
test_common_put_object() {
if [[ $# -ne 2 ]]; then
fail "put object test requires command type, file"
fi
@@ -81,15 +84,14 @@ test_common_copy_object() {
setup_bucket "$1" "$BUCKET_ONE_NAME" || local setup_result=$?
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
object="$BUCKET_ONE_NAME"/"$2"
copy_object "$1" "$test_file_folder"/"$2" "$object" || local copy_result=$?
put_object "$1" "$test_file_folder/$2" "$BUCKET_ONE_NAME" "$2" || local copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
object_exists "$1" "$object" || local exists_result_one=$?
object_exists "$1" "$BUCKET_ONE_NAME" "$2" || local exists_result_one=$?
[[ $exists_result_one -eq 0 ]] || fail "Object not added to bucket"
delete_object "$1" "$object" || local delete_result=$?
delete_object "$1" "$BUCKET_ONE_NAME" "$2" || local delete_result=$?
[[ $delete_result -eq 0 ]] || fail "Failed to delete object"
object_exists "$1" "$object" || local exists_result_two=$?
object_exists "$1" "$BUCKET_ONE_NAME" "$2" || local exists_result_two=$?
[[ $exists_result_two -eq 1 ]] || fail "Object not removed from bucket"
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
@@ -115,6 +117,7 @@ test_common_list_buckets() {
if [ -z "$bucket_array" ]; then
fail "bucket_array parameter not exported"
fi
log 5 "bucket array: ${bucket_array[*]}"
for bucket in "${bucket_array[@]}"; do
if [ "$bucket" == "$BUCKET_ONE_NAME" ] || [ "$bucket" == "s3://$BUCKET_ONE_NAME" ]; then
bucket_one_found=true
@@ -148,9 +151,9 @@ test_common_list_objects() {
echo "test data 2" > "$test_file_folder"/"$object_two"
setup_bucket "$1" "$BUCKET_ONE_NAME" || local result_one=$?
[[ result_one -eq 0 ]] || fail "Error creating bucket"
copy_object "$1" "$test_file_folder"/$object_one "$BUCKET_ONE_NAME"/"$object_one" || local result_two=$?
put_object "$1" "$test_file_folder"/$object_one "$BUCKET_ONE_NAME" "$object_one" || local result_two=$?
[[ result_two -eq 0 ]] || fail "Error adding object one"
copy_object "$1" "$test_file_folder"/$object_two "$BUCKET_ONE_NAME"/"$object_two" || local result_three=$?
put_object "$1" "$test_file_folder"/$object_two "$BUCKET_ONE_NAME" "$object_two" || local result_three=$?
[[ result_three -eq 0 ]] || fail "Error adding object two"
list_objects "$1" "$BUCKET_ONE_NAME"
@@ -231,8 +234,7 @@ test_common_set_get_object_tags() {
[[ $created -eq 0 ]] || fail "Error creating test files"
setup_bucket "$1" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
local object_path="$BUCKET_ONE_NAME"/"$bucket_file"
copy_object "$1" "$test_file_folder"/"$bucket_file" "$object_path" || local copy_result=$?
put_object "$1" "$test_file_folder"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket '$BUCKET_ONE_NAME'"
get_object_tags "$1" "$BUCKET_ONE_NAME" $bucket_file || local get_result=$?
@@ -245,13 +247,13 @@ test_common_set_get_object_tags() {
fi
put_object_tag "$1" "$BUCKET_ONE_NAME" $bucket_file $key $value
get_object_tags "$1" "$BUCKET_ONE_NAME" $bucket_file || local get_result_two=$?
get_object_tags "$1" "$BUCKET_ONE_NAME" "$bucket_file" || local get_result_two=$?
[[ $get_result_two -eq 0 ]] || fail "Error getting object tags"
if [[ $1 == 'aws' ]]; then
tag_set_key=$(echo "$tags" | jq '.TagSet[0].Key')
tag_set_value=$(echo "$tags" | jq '.TagSet[0].Value')
[[ $tag_set_key == '"'$key'"' ]] || fail "Key mismatch"
[[ $tag_set_value == '"'$value'"' ]] || fail "Value mismatch"
tag_set_key=$(echo "$tags" | jq -r '.TagSet[0].Key')
tag_set_value=$(echo "$tags" | jq -r '.TagSet[0].Value')
[[ $tag_set_key == "$key" ]] || fail "Key mismatch"
[[ $tag_set_value == "$value" ]] || fail "Value mismatch"
else
read -r tag_set_key tag_set_value <<< "$(echo "$tags" | awk 'NR==2 {print $1, $3}')"
[[ $tag_set_key == "$key" ]] || fail "Key mismatch"
@@ -263,7 +265,6 @@ test_common_set_get_object_tags() {
}
test_common_presigned_url_utf8_chars() {
if [[ $# -ne 1 ]]; then
echo "presigned url command missing command type"
return 1
@@ -278,7 +279,7 @@ test_common_presigned_url_utf8_chars() {
setup_bucket "$1" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
copy_object "$1" "$test_file_folder"/"$bucket_file" "$BUCKET_ONE_NAME"/"$bucket_file" || put_result=$?
put_object "$1" "$test_file_folder"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || put_result=$?
[[ $put_result -eq 0 ]] || fail "Failed to add object $bucket_file"
create_presigned_url "$1" "$BUCKET_ONE_NAME" "$bucket_file" || presigned_result=$?
@@ -320,7 +321,6 @@ test_common_list_objects_file_count() {
}
test_common_delete_object_tagging() {
[[ $# -eq 1 ]] || fail "test common delete object tagging requires command type"
bucket_file="bucket_file"
@@ -333,7 +333,7 @@ test_common_delete_object_tagging() {
setup_bucket "$1" "$BUCKET_ONE_NAME" || local setup_result=$?
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
copy_object "$1" "$test_file_folder"/"$bucket_file" "$BUCKET_ONE_NAME"/"$bucket_file" || local copy_result=$?
put_object "$1" "$test_file_folder"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local copy_result=$?
[[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket"
put_object_tag "$1" "$BUCKET_ONE_NAME" "$bucket_file" "$tag_key" "$tag_value" || put_result=$?
@@ -348,7 +348,7 @@ test_common_delete_object_tagging() {
check_object_tags_empty "$1" "$BUCKET_ONE_NAME" "$bucket_file" || get_result=$?
[[ $get_result -eq 0 ]] || fail "failed to get tags"
delete_bucket_or_contents "aws" "$BUCKET_TWO_NAME"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files "$bucket_file"
}

View File

@@ -19,11 +19,11 @@ export RUN_MC=true
}
@test "test_put_object-with-data-mc" {
test_common_copy_object_with_data "mc"
test_common_put_object_with_data "mc"
}
@test "test_put_object-no-data-mc" {
test_common_copy_object_no_data "mc"
test_common_put_object_no_data "mc"
}
@test "test_list_buckets_mc" {

19
tests/test_s3.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/usr/bin/env bats
source ./tests/test_common.sh
@test "test_multipart_upload" {
test_common_multipart_upload "s3"
}
@test "test_put_object" {
test_common_put_object_no_data "s3"
}
@test "test_list_buckets" {
test_common_list_buckets "s3"
}
@test "test_list_objects_file_count" {
test_common_list_objects_file_count "s3"
}

View File

@@ -21,11 +21,11 @@ export RUN_S3CMD=true
# test s3cmd put object
@test "test_copy_object_with_data" {
test_common_copy_object_with_data "s3cmd"
test_common_put_object_with_data "s3cmd"
}
@test "test_copy_object_no_data" {
test_common_copy_object_no_data "s3cmd"
test_common_put_object_no_data "s3cmd"
}
# test listing buckets on versitygw

View File

@@ -1,38 +1,16 @@
#!/usr/bin/env bash
source ./tests/util_bucket_create.sh
source ./tests/util_mc.sh
source ./tests/logger.sh
source ./tests/commands/abort_multipart_upload.sh
source ./tests/commands/create_bucket.sh
source ./tests/commands/delete_bucket.sh
source ./tests/commands/delete_object.sh
source ./tests/commands/get_bucket_tagging.sh
# delete an AWS bucket
# param: bucket name
# return 0 for success, 1 for failure
delete_bucket() {
if [ $# -ne 2 ]; then
echo "delete bucket missing command type, bucket name"
return 1
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3 rb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure rb "$MC_ALIAS/$2" 2>&1) || exit_code=$?
else
echo "Invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
if [[ "$error" == *"The specified bucket does not exist"* ]]; then
return 0
else
echo "error deleting bucket: $error"
return 1
fi
fi
return 0
}
source ./tests/commands/head_bucket.sh
source ./tests/commands/head_object.sh
source ./tests/commands/list_objects.sh
# recursively delete an AWS bucket
# param: bucket name
@@ -45,8 +23,10 @@ delete_bucket_recursive() {
local exit_code=0
local error
if [[ $1 == "aws" ]]; then
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 rb s3://"$2" --force 2>&1) || exit_code="$?"
elif [[ $1 == "aws" ]] || [[ $1 == 's3api' ]]; then
delete_bucket_recursive_s3api "$2" 2>&1 || exit_code="$?"
elif [[ $1 == "s3cmd" ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rb s3://"$2" --recursive 2>&1) || exit_code="$?"
elif [[ $1 == "mc" ]]; then
@@ -67,6 +47,33 @@ delete_bucket_recursive() {
return 0
}
delete_bucket_recursive_s3api() {
if [[ $# -ne 1 ]]; then
echo "delete bucket recursive command for s3api requires bucket name"
return 1
fi
list_objects 's3api' "$1" || list_result=$?
if [[ $list_result -ne 0 ]]; then
echo "error listing objects"
return 1
fi
# shellcheck disable=SC2154
for object in "${object_array[@]}"; do
delete_object 's3api' "$1" "$object" || delete_result=$?
if [[ $delete_result -ne 0 ]]; then
echo "error deleting object $object"
return 1
fi
done
delete_bucket 's3api' "$1" || delete_result=$?
if [[ $delete_result -ne 0 ]]; then
echo "error deleting bucket"
return 1
fi
return 0
}
# delete contents of a bucket
# param: command type, bucket name
# return 0 for success, 1 for failure
@@ -104,28 +111,14 @@ bucket_exists() {
return 2
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3 ls s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
# NOTE: s3cmd sometimes takes longer with direct connection
sleep 1
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure ls "$MC_ALIAS/$2" 2>&1) || exit_code=$?
else
echo "invalid command type: $1"
return 2
fi
if [ $exit_code -ne 0 ]; then
if [[ "$error" == *"does not exist"* ]] || [[ "$error" == *"Access Denied"* ]]; then
head_bucket "$1" "$2" || local check_result=$?
if [[ $check_result -ne 0 ]]; then
# shellcheck disable=SC2154
if [[ "$bucket_info" == *"404"* ]] || [[ "$bucket_info" == *"does not exist"* ]]; then
return 1
else
echo "error checking if bucket exists: $error"
return 2
fi
echo "error checking if bucket exists"
return 2
fi
return 0
}
@@ -198,18 +191,28 @@ setup_bucket() {
# param: command, object path
# return 0 for true, 1 for false, 2 for error
object_exists() {
if [ $# -ne 2 ]; then
echo "object exists check missing command, object name"
if [ $# -ne 3 ]; then
echo "object exists check missing command, bucket name, object name"
return 2
fi
head_object "$1" "$2" "$3" || head_result=$?
if [[ $head_result -eq 2 ]]; then
echo "error checking if object exists"
return 2
fi
return $head_result
return 0
local exit_code=0
local error=""
if [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3 ls s3://"$2" 2>&1) || exit_code="$?"
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 ls "s3://$2/$3" 2>&1) || exit_code="$?"
elif [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
error=$(aws --no-verify-ssl s3api head-object --bucket "$2" --prefix "$3" 2>&1) || exit_code="$?"
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3://"$2" 2>&1) || exit_code="$?"
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3://"$2/$3" 2>&1) || exit_code="$?"
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure ls "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
error=$(mc --insecure ls "$MC_ALIAS/$2/$3" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 2
@@ -229,15 +232,15 @@ object_exists() {
}
put_object_with_metadata() {
if [ $# -ne 5 ]; then
echo "put object command requires command type, source, destination, key, value"
if [ $# -ne 6 ]; then
echo "put object command requires command type, source, destination, key, metadata key, metadata value"
return 1
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3api put-object --bucket "$3" --key "$2" --body "$2" --metadata "{\"$4\":\"$5\"}") || exit_code=$?
error=$(aws --no-verify-ssl s3api put-object --body "$2" --bucket "$3" --key "$4" --metadata "{\"$5\":\"$6\"}") || exit_code=$?
else
echo "invalid command type $1"
return 1
@@ -281,7 +284,7 @@ put_object_multiple() {
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
if [[ $1 == 'aws' ]] || [[ $1 == 's3' ]]; then
# shellcheck disable=SC2086
error=$(aws --no-verify-ssl s3 cp "$(dirname "$2")" s3://"$3" --recursive --exclude="*" --include="$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
@@ -307,11 +310,11 @@ put_object_multiple() {
# params: source file, destination copy location
# return 0 for success or already exists, 1 for failure
check_and_put_object() {
if [ $# -ne 2 ]; then
echo "check and put object function requires source, destination"
if [ $# -ne 3 ]; then
echo "check and put object function requires source, bucket, destination"
return 1
fi
object_exists "aws" "$2" || local exists_result=$?
object_exists "aws" "$2" "$3" || local exists_result=$?
if [ "$exists_result" -eq 2 ]; then
echo "error checking if object exists"
return 1
@@ -326,69 +329,6 @@ check_and_put_object() {
return 0
}
# delete object from versitygw
# param: object path, including bucket name
# return 0 for success, 1 for failure
delete_object() {
if [ $# -ne 2 ]; then
echo "delete object command requires command type, object parameter"
return 1
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3 rm s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure rm "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error deleting object: $error"
return 1
fi
return 0
}
# list buckets on versitygw
# params: format (aws, s3cmd)
# export bucket_array (bucket names) on success, return 1 for failure
list_buckets() {
if [[ $# -ne 1 ]]; then
echo "List buckets command missing format"
return 1
fi
local exit_code=0
local output
if [[ $1 == "aws" ]]; then
output=$(aws --no-verify-ssl s3 ls s3:// 2>&1) || exit_code=$?
elif [[ $1 == "s3cmd" ]]; then
output=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3:// 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
output=$(mc --insecure ls "$MC_ALIAS" 2>&1) || exit_code=$?
else
echo "invalid format: $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error listing buckets: $output"
return 1
fi
bucket_array=()
while IFS= read -r line; do
bucket_name=$(echo "$line" | awk '{print $NF}')
bucket_array+=("${bucket_name%/}")
done <<< "$output"
export bucket_array
}
list_buckets_with_user() {
if [[ $# -ne 3 ]]; then
echo "List buckets command missing format, user id, key"
@@ -418,42 +358,6 @@ list_buckets_with_user() {
export bucket_array
}
# list objects on versitygw, in bucket or folder
# param: path of bucket or folder
# export object_array (object names) on success, return 1 for failure
list_objects() {
if [ $# -ne 2 ]; then
echo "list objects command requires command type, and bucket or folder"
return 1
fi
local exit_code=0
local output
if [[ $1 == "aws" ]]; then
output=$(aws --no-verify-ssl s3 ls s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
output=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
output=$(mc --insecure ls "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error listing objects: $output"
return 1
fi
object_array=()
while IFS= read -r line; do
if [[ $line != *InsecureRequestWarning* ]]; then
object_name=$(echo "$line" | awk '{print $NF}')
object_array+=("$object_name")
fi
done <<< "$output"
export object_array
}
remove_insecure_request_warning() {
if [[ $# -ne 1 ]]; then
echo "remove insecure request warning requires input lines"
@@ -603,7 +507,7 @@ check_object_tags_empty() {
echo "failed to get tags"
return 2
fi
check_tags_empty "$1" || check_result=$?
check_tags_empty "$1" || local check_result=$?
return $check_result
}
@@ -617,7 +521,7 @@ check_bucket_tags_empty() {
echo "failed to get tags"
return 2
fi
check_tags_empty "$1" || check_result=$?
check_tags_empty "$1" || local check_result=$?
return $check_result
}
@@ -859,29 +763,12 @@ multipart_upload() {
return 0
}
# run the abort multipart command
# params: bucket, key, upload ID
# return 0 for success, 1 for failure
run_abort_command() {
if [ $# -ne 3 ]; then
echo "command to run abort requires bucket, key, upload ID"
return 1
fi
error=$(aws --no-verify-ssl s3api abort-multipart-upload --bucket "$1" --key "$2" --upload-id "$3") || local aborted=$?
if [[ $aborted -ne 0 ]]; then
echo "Error aborting upload: $error"
return 1
fi
return 0
}
# run upload, then abort it
# params: bucket, key, local file location, number of parts to split into before uploading
# return 0 for success, 1 for failure
abort_multipart_upload() {
run_then_abort_multipart_upload() {
if [ $# -ne 4 ]; then
echo "abort multipart upload command missing bucket, key, file, and/or part count"
echo "run then abort multipart upload command missing bucket, key, file, and/or part count"
return 1
fi
@@ -891,7 +778,7 @@ abort_multipart_upload() {
return 1
fi
run_abort_command "$1" "$2" "$upload_id"
abort_multipart_upload "$1" "$2" "$upload_id"
return $?
}
@@ -981,7 +868,8 @@ multipart_upload_from_bucket() {
fi
for ((i=0;i<$4;i++)) {
copy_object "aws" "$3"-"$i" "$1" || copy_result=$?
echo "key: $3"
put_object "s3api" "$3-$i" "$1" "$2-$i" || copy_result=$?
if [[ $copy_result -ne 0 ]]; then
echo "error copying object"
return 1
@@ -1024,6 +912,7 @@ upload_part_copy() {
return 1
fi
local etag_json
echo "$1 $2 $3 $4 $5"
etag_json=$(aws --no-verify-ssl s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))") || local uploaded=$?
if [[ $uploaded -ne 0 ]]; then
echo "Error uploading part $5: $etag_json"
@@ -1057,27 +946,3 @@ create_presigned_url() {
fi
export presigned_url
}
head_bucket() {
if [ $# -ne 2 ]; then
echo "head bucket command missing command type, bucket name"
return 1
fi
local exit_code=0
local error
if [[ $1 == "aws" ]]; then
bucket_info=$(aws --no-verify-ssl s3api head-bucket --bucket "$2" 2>&1) || exit_code=$?
elif [[ $1 == "s3cmd" ]]; then
bucket_info=$(s3cmd --no-check-certificate info "s3://$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
bucket_info=$(mc --insecure stat "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error getting bucket info: $bucket_info"
return 1
fi
export bucket_info
}

View File

@@ -3,34 +3,6 @@
source ./tests/util_mc.sh
source ./tests/logger.sh
# create an AWS bucket
# param: bucket name
# return 0 for success, 1 for failure
create_bucket() {
if [ $# -ne 2 ]; then
echo "create bucket missing command type, bucket name"
return 1
fi
local exit_code=0
local error
if [[ $1 == "aws" ]]; then
error=$(aws --no-verify-ssl s3 mb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == "s3cmd" ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate mb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == "mc" ]]; then
error=$(mc --insecure mb "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error creating bucket: $error"
return 1
fi
return 0
}
create_bucket_with_user() {
if [ $# -ne 4 ]; then
echo "create bucket missing command type, bucket name, access, secret"
@@ -61,8 +33,10 @@ create_bucket_invalid_name() {
return 1
fi
local exit_code=0
if [[ $1 == "aws" ]]; then
if [[ $1 == "aws" ]] || [[ $1 == 's3' ]]; then
bucket_create_error=$(aws --no-verify-ssl s3 mb "s3://" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]]; then
bucket_create_error=$(aws --no-verify-ssl s3api create-bucket --bucket "s3://" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
bucket_create_error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate mb "s3://" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then

View File

@@ -1,97 +0,0 @@
#!/usr/bin/env bats
# check if object exists both on S3 and locally
# param: object path
# 0 for yes, 1 for no, 2 for error
object_exists_remote_and_local() {
if [ $# -ne 1 ]; then
echo "object existence check requires single name parameter"
return 2
fi
object_exists "aws" "$1" || local exist_result=$?
if [[ $exist_result -eq 2 ]]; then
echo "Error checking if object exists"
return 2
fi
if [[ $exist_result -eq 1 ]]; then
echo "Error: object doesn't exist remotely"
return 1
fi
if [[ ! -e "$LOCAL_FOLDER"/"$1" ]]; then
echo "Error: object doesn't exist locally"
return 1
fi
return 0
}
# check if object doesn't exist both on S3 and locally
# param: object path
# return 0 for doesn't exist, 1 for still exists, 2 for error
object_not_exists_remote_and_local() {
if [ $# -ne 1 ]; then
echo "object non-existence check requires single name parameter"
return 2
fi
object_exists "aws" "$1" || local exist_result=$?
if [[ $exist_result -eq 2 ]]; then
echo "Error checking if object doesn't exist"
return 2
fi
if [[ $exist_result -eq 0 ]]; then
echo "Error: object exists remotely"
return 1
fi
if [[ -e "$LOCAL_FOLDER"/"$1" ]]; then
echo "Error: object exists locally"
return 1
fi
return 0
}
# check if a bucket doesn't exist both on S3 and on gateway
# param: bucket name
# return: 0 for doesn't exist, 1 for does, 2 for error
bucket_not_exists_remote_and_local() {
if [ $# -ne 1 ]; then
echo "bucket existence check requires single name parameter"
return 2
fi
bucket_exists "aws" "$1" || local exist_result=$?
if [[ $exist_result -eq 2 ]]; then
echo "Error checking if bucket exists"
return 2
fi
if [[ $exist_result -eq 0 ]]; then
echo "Error: bucket exists remotely"
return 1
fi
if [[ -e "$LOCAL_FOLDER"/"$1" ]]; then
echo "Error: bucket exists locally"
return 1
fi
return 0
}
# check if a bucket exists both on S3 and on gateway
# param: bucket name
# return: 0 for yes, 1 for no, 2 for error
bucket_exists_remote_and_local() {
if [ $# -ne 1 ]; then
echo "bucket existence check requires single name parameter"
return 2
fi
bucket_exists "aws" "$1" || local exist_result=$?
if [[ $exist_result -eq 2 ]]; then
echo "Error checking if bucket exists"
return 2
fi
if [[ $exist_result -eq 1 ]]; then
echo "Error: bucket doesn't exist remotely"
return 1
fi
if [[ ! -e "$LOCAL_FOLDER"/"$1" ]]; then
echo "Error: bucket doesn't exist locally"
return 1
fi
return 0
}