mirror of
https://github.com/versity/versitygw.git
synced 2026-01-05 11:24:52 +00:00
test: REST get/put/delete object, docker updates, time reduction
This commit is contained in:
2
.github/workflows/docker-bats.yaml
vendored
2
.github/workflows/docker-bats.yaml
vendored
@@ -25,4 +25,4 @@ jobs:
|
||||
run: sudo apt-get install -y docker-compose
|
||||
|
||||
- name: Run Docker Container
|
||||
run: docker-compose -f tests/docker-compose-bats.yml up --exit-code-from posix_backend posix_backend
|
||||
run: docker-compose -f tests/docker-compose-bats.yml up --exit-code-from s3api_only s3api_only
|
||||
|
||||
1
.github/workflows/system.yml
vendored
1
.github/workflows/system.yml
vendored
@@ -179,6 +179,7 @@ jobs:
|
||||
USERNAME_TWO: HIJKLMN
|
||||
PASSWORD_TWO: 8901234
|
||||
TEST_FILE_FOLDER: ${{ github.workspace }}/versity-gwtest-files
|
||||
REMOVE_TEST_FILE_FOLDER: true
|
||||
run: |
|
||||
make testbin
|
||||
export AWS_ACCESS_KEY_ID=ABCDEFGHIJKLMNOPQRST
|
||||
|
||||
@@ -86,4 +86,5 @@ RUN openssl genpkey -algorithm RSA -out versitygw-docker.pem -pkeyopt rsa_keygen
|
||||
ENV WORKSPACE=.
|
||||
ENV VERSITYGW_TEST_ENV=$CONFIG_FILE
|
||||
|
||||
CMD ["tests/run_all.sh"]
|
||||
ENTRYPOINT ["tests/run.sh"]
|
||||
CMD ["s3api,s3,s3cmd,mc,rest"]
|
||||
|
||||
27
tests/commands/create_presigned_url.sh
Normal file
27
tests/commands/create_presigned_url.sh
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
create_presigned_url() {
|
||||
if [[ $# -ne 3 ]]; then
|
||||
log 2 "create presigned url function requires command type, bucket, and filename"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local presign_result=0
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
presigned_url=$(aws s3 presign "s3://$2/$3" --expires-in 900) || presign_result=$?
|
||||
elif [[ $1 == 's3cmd' ]]; then
|
||||
presigned_url=$(s3cmd --no-check-certificate "${S3CMD_OPTS[@]}" signurl "s3://$2/$3" "$(echo "$(date +%s)" + 900 | bc)") || presign_result=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
presigned_url_data=$(mc --insecure share download --recursive "$MC_ALIAS/$2/$3") || presign_result=$?
|
||||
presigned_url="${presigned_url_data#*Share: }"
|
||||
else
|
||||
log 2 "unrecognized command type $1"
|
||||
return 1
|
||||
fi
|
||||
if [[ $presign_result -ne 0 ]]; then
|
||||
log 2 "error generating presigned url: $presigned_url"
|
||||
return 1
|
||||
fi
|
||||
export presigned_url
|
||||
return 0
|
||||
}
|
||||
@@ -31,6 +31,8 @@ delete_object() {
|
||||
delete_object_error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm "s3://$2/$3" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
delete_object_error=$(mc --insecure rm "$MC_ALIAS/$2/$3" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'rest' ]]; then
|
||||
delete_object_rest "$2" "$3" || exit_code=$?
|
||||
else
|
||||
log 2 "invalid command type $1"
|
||||
return 1
|
||||
@@ -79,4 +81,44 @@ delete_object_with_user() {
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
delete_object_rest() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'delete_object_rest' requires bucket name, object name"
|
||||
return 1
|
||||
fi
|
||||
|
||||
generate_hash_for_payload ""
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
|
||||
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
|
||||
# shellcheck disable=SC2154
|
||||
canonical_request="DELETE
|
||||
/$1/$2
|
||||
|
||||
host:$aws_endpoint_url_address
|
||||
x-amz-content-sha256:UNSIGNED-PAYLOAD
|
||||
x-amz-date:$current_date_time
|
||||
|
||||
host;x-amz-content-sha256;x-amz-date
|
||||
UNSIGNED-PAYLOAD"
|
||||
|
||||
if ! generate_sts_string "$current_date_time" "$canonical_request"; then
|
||||
log 2 "error generating sts string"
|
||||
return 1
|
||||
fi
|
||||
get_signature
|
||||
# shellcheck disable=SC2154
|
||||
reply=$(curl -ks -w "%{http_code}" -X DELETE "$header://$aws_endpoint_url_address/$1/$2" \
|
||||
-H "Authorization: AWS4-HMAC-SHA256 Credential=$AWS_ACCESS_KEY_ID/$ymd/$AWS_REGION/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature" \
|
||||
-H "x-amz-content-sha256: UNSIGNED-PAYLOAD" \
|
||||
-H "x-amz-date: $current_date_time" \
|
||||
-o "$TEST_FILE_FOLDER"/delete_object_error.txt 2>&1)
|
||||
if [[ "$reply" != "204" ]]; then
|
||||
log 2 "delete object command returned error: $(cat "$TEST_FILE_FOLDER"/delete_object_error.txt)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
@@ -30,6 +30,8 @@ get_object() {
|
||||
get_object_error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate get "s3://$2/$3" "$4" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
get_object_error=$(mc --insecure get "$MC_ALIAS/$2/$3" "$4" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'rest' ]]; then
|
||||
get_object_rest "$2" "$3" "$4" || exit_code=$?
|
||||
else
|
||||
log 2 "'get object' command not implemented for '$1'"
|
||||
return 1
|
||||
@@ -83,3 +85,45 @@ get_object_with_user() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_object_rest() {
|
||||
log 6 "get_object_rest"
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'get_object_rest' requires bucket name, object name, output file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
generate_hash_for_payload ""
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
|
||||
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
|
||||
# shellcheck disable=SC2154
|
||||
canonical_request="GET
|
||||
/$1/$2
|
||||
|
||||
host:$aws_endpoint_url_address
|
||||
x-amz-content-sha256:UNSIGNED-PAYLOAD
|
||||
x-amz-date:$current_date_time
|
||||
|
||||
host;x-amz-content-sha256;x-amz-date
|
||||
UNSIGNED-PAYLOAD"
|
||||
|
||||
if ! generate_sts_string "$current_date_time" "$canonical_request"; then
|
||||
log 2 "error generating sts string"
|
||||
return 1
|
||||
fi
|
||||
get_signature
|
||||
# shellcheck disable=SC2154
|
||||
reply=$(curl -w "%{http_code}" -ks "$header://$aws_endpoint_url_address/$1/$2" \
|
||||
-H "Authorization: AWS4-HMAC-SHA256 Credential=$AWS_ACCESS_KEY_ID/$ymd/$AWS_REGION/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature" \
|
||||
-H "x-amz-content-sha256: UNSIGNED-PAYLOAD" \
|
||||
-H "x-amz-date: $current_date_time" \
|
||||
-o "$3" 2>&1)
|
||||
log 5 "reply: $reply"
|
||||
if [[ "$reply" != "200" ]]; then
|
||||
log 2 "get object command returned error: $(cat "$3")"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ get_object_tagging() {
|
||||
return 1
|
||||
fi
|
||||
local result
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
|
||||
tags=$(aws --no-verify-ssl s3api get-object-tagging --bucket "$2" --key "$3" 2>&1) || result=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
tags=$(mc --insecure tag list "$MC_ALIAS"/"$2"/"$3" 2>&1) || result=$?
|
||||
|
||||
@@ -33,6 +33,8 @@ put_object() {
|
||||
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate put "$2" s3://"$3/$4" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
error=$(mc --insecure put "$2" "$MC_ALIAS/$3/$4" 2>&1) || exit_code=$?
|
||||
elif [[ $1 == 'rest' ]]; then
|
||||
put_object_rest "$2" "$3" "$4" || exit_code=$?
|
||||
else
|
||||
log 2 "'put object' command not implemented for '$1'"
|
||||
return 1
|
||||
@@ -66,3 +68,43 @@ put_object_with_user() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
put_object_rest() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'put_object_rest' requires local file, bucket name, key"
|
||||
return 1
|
||||
fi
|
||||
|
||||
generate_hash_for_payload_file "$1"
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
|
||||
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
|
||||
# shellcheck disable=SC2154
|
||||
canonical_request="PUT
|
||||
/$2/$3
|
||||
|
||||
host:$aws_endpoint_url_address
|
||||
x-amz-content-sha256:$payload_hash
|
||||
x-amz-date:$current_date_time
|
||||
|
||||
host;x-amz-content-sha256;x-amz-date
|
||||
$payload_hash"
|
||||
|
||||
if ! generate_sts_string "$current_date_time" "$canonical_request"; then
|
||||
log 2 "error generating sts string"
|
||||
return 1
|
||||
fi
|
||||
get_signature
|
||||
# shellcheck disable=SC2154
|
||||
reply=$(curl -ks -w "%{http_code}" -X PUT "$header://$aws_endpoint_url_address/$2/$3" \
|
||||
-H "Authorization: AWS4-HMAC-SHA256 Credential=$AWS_ACCESS_KEY_ID/$ymd/$AWS_REGION/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature" \
|
||||
-H "x-amz-content-sha256: $payload_hash" \
|
||||
-H "x-amz-date: $current_date_time" \
|
||||
-T "$1" -o "$TEST_FILE_FOLDER"/put_object_error.txt 2>&1)
|
||||
if [[ "$reply" != "200" ]]; then
|
||||
log 2 "put object command returned error: $(cat "$TEST_FILE_FOLDER"/put_object_error.txt)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -1,34 +1,42 @@
|
||||
services:
|
||||
no_certs:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
context: ../
|
||||
dockerfile: tests/Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.nocerts
|
||||
static_buckets:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
context: ../
|
||||
dockerfile: tests/Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.static
|
||||
posix_backend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
context: ../
|
||||
dockerfile: tests/Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.default
|
||||
image: bats_test
|
||||
s3_backend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
context: ../
|
||||
dockerfile: tests/Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.s3
|
||||
- SECRETS_FILE=tests/.secrets.s3
|
||||
s3api_only:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: tests/Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.default
|
||||
image: bats_test
|
||||
command: ["s3api"]
|
||||
direct:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_direct
|
||||
context: ../
|
||||
dockerfile: tests/Dockerfile_direct
|
||||
volumes:
|
||||
- ./.env.direct:/home/tester/tests/.env.direct
|
||||
- ./.secrets.direct:/home/tester/tests/.secrets.direct
|
||||
|
||||
114
tests/run.sh
114
tests/run.sh
@@ -18,7 +18,7 @@
|
||||
show_help() {
|
||||
echo "Usage: $0 [option...]"
|
||||
echo " -h, --help Display this help message and exit"
|
||||
echo " -s, --static Don't remove buckets between tests"
|
||||
echo " Separate the below by comma"
|
||||
echo " s3api Run tests with s3api cli"
|
||||
echo " s3api-non-policy Run policy tests with s3api cli"
|
||||
echo " s3api-policy Run policy tests with s3api cli"
|
||||
@@ -36,7 +36,7 @@ handle_param() {
|
||||
exit 0
|
||||
;;
|
||||
s3|s3api|s3cmd|mc|s3api-user|rest|s3api-policy|s3api-non-policy)
|
||||
set_command_type "$1"
|
||||
run_suite "$1"
|
||||
;;
|
||||
*) # Handle unrecognized options or positional arguments
|
||||
echo "Unrecognized option: $1" >&2
|
||||
@@ -45,68 +45,64 @@ handle_param() {
|
||||
esac
|
||||
}
|
||||
|
||||
set_command_type() {
|
||||
if [[ -n $command_type ]]; then
|
||||
echo "Error: command type already set"
|
||||
run_suite() {
|
||||
exit_code=0
|
||||
case $1 in
|
||||
s3api)
|
||||
echo "Running all s3api tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3api.sh || exit_code=$?
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
"$HOME"/bin/bats ./tests/test_s3api_policy.sh || exit_code=$?
|
||||
fi
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
"$HOME"/bin/bats ./tests/test_user_aws.sh || exit_code=$?
|
||||
fi
|
||||
;;
|
||||
s3api-policy)
|
||||
echo "Running s3api policy tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3api_policy.sh || exit_code=$?
|
||||
;;
|
||||
s3api-non-policy)
|
||||
echo "Running s3api non-policy tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3api.sh || exit_code=$?
|
||||
;;
|
||||
s3)
|
||||
echo "Running s3 tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3.sh || exit_code=$?
|
||||
;;
|
||||
s3cmd)
|
||||
echo "Running s3cmd tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3cmd.sh || exit_code=$?
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
"$HOME"/bin/bats ./tests/test_user_s3cmd.sh || exit_code=$?
|
||||
fi
|
||||
;;
|
||||
mc)
|
||||
echo "Running mc tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_mc.sh || exit_code=$?
|
||||
;;
|
||||
rest)
|
||||
echo "Running rest tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest.sh || exit_code=$?
|
||||
;;
|
||||
s3api-user)
|
||||
echo "Running s3api user tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_user_aws.sh || exit_code=$?
|
||||
esac
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
command_type=$1
|
||||
export command_type
|
||||
}
|
||||
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
handle_param "$1"
|
||||
shift # past argument or value
|
||||
done
|
||||
|
||||
if [[ -z "$VERSITYGW_TEST_ENV" ]] && [[ $BYPASS_ENV_FILE != "true" ]]; then
|
||||
echo "Error: VERSITYGW_TEST_ENV parameter must be set, or BYPASS_ENV_FILE must be set to true"
|
||||
exit 1
|
||||
if [ $# -le 0 ]; then
|
||||
show_help
|
||||
exit 0
|
||||
fi
|
||||
|
||||
exit_code=0
|
||||
case $command_type in
|
||||
s3api)
|
||||
echo "Running all s3api tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3api.sh || exit_code=$?
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
"$HOME"/bin/bats ./tests/test_s3api_policy.sh || exit_code=$?
|
||||
fi
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
"$HOME"/bin/bats ./tests/test_user_aws.sh || exit_code=$?
|
||||
fi
|
||||
;;
|
||||
s3api-policy)
|
||||
echo "Running s3api policy tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3api_policy.sh || exit_code=$?
|
||||
;;
|
||||
s3api-non-policy)
|
||||
echo "Running s3api non-policy tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3api.sh || exit_code=$?
|
||||
;;
|
||||
s3)
|
||||
echo "Running s3 tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3.sh || exit_code=$?
|
||||
;;
|
||||
s3cmd)
|
||||
echo "Running s3cmd tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_s3cmd.sh || exit_code=$?
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
"$HOME"/bin/bats ./tests/test_user_s3cmd.sh || exit_code=$?
|
||||
fi
|
||||
;;
|
||||
mc)
|
||||
echo "Running mc tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_mc.sh || exit_code=$?
|
||||
;;
|
||||
rest)
|
||||
echo "Running rest tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest.sh || exit_code=$?
|
||||
;;
|
||||
s3api-user)
|
||||
echo "Running s3api user tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_user_aws.sh || exit_code=$?
|
||||
esac
|
||||
IFS=',' read -ra options <<< "$1"
|
||||
for option in "${options[@]}"; do
|
||||
handle_param "$option"
|
||||
done
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
exit $exit_code
|
||||
exit 0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright 2024 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
|
||||
@@ -101,14 +101,14 @@ test_create_multipart_upload_properties_aws_root() {
|
||||
"$expected_hold_status" \
|
||||
"$expected_retention_mode" \
|
||||
"$later" \
|
||||
"$expected_tag_key=$expected_tag_val" || fail "error performing multipart upload"
|
||||
"$expected_tag_key=$expected_tag_val"
|
||||
assert_success
|
||||
|
||||
run get_and_verify_metadata "$bucket_file" "$expected_content_type" "$expected_meta_key" "$expected_meta_val" \
|
||||
"$expected_hold_status" "$expected_retention_mode" "$later"
|
||||
assert_success
|
||||
|
||||
run get_and_check_bucket_tags "$BUCKET_ONE_NAME" "$expected_tag_key" "$expected_tag_val"
|
||||
run check_verify_object_tags "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$expected_tag_key" "$expected_tag_val"
|
||||
assert_success
|
||||
|
||||
run put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "OFF"
|
||||
|
||||
@@ -16,8 +16,12 @@
|
||||
|
||||
source ./tests/setup.sh
|
||||
source ./tests/util.sh
|
||||
source ./tests/util_acl.sh
|
||||
source ./tests/util_bucket_location.sh
|
||||
source ./tests/util_file.sh
|
||||
source ./tests/util_list_buckets.sh
|
||||
source ./tests/util_policy.sh
|
||||
source ./tests/util_presigned_url.sh
|
||||
source ./tests/commands/copy_object.sh
|
||||
source ./tests/commands/delete_bucket_tagging.sh
|
||||
source ./tests/commands/delete_object_tagging.sh
|
||||
@@ -78,9 +82,11 @@ test_common_create_delete_bucket() {
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
bucket_exists "$1" "$BUCKET_ONE_NAME" || fail "failed bucket existence check"
|
||||
run bucket_exists "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME" || fail "failed to delete bucket"
|
||||
run delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_copy_object() {
|
||||
@@ -96,21 +102,21 @@ test_common_copy_object() {
|
||||
assert_success
|
||||
|
||||
if [[ $1 == 's3' ]]; then
|
||||
copy_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name" || fail "failed to copy object to bucket one"
|
||||
run copy_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name"
|
||||
assert_success
|
||||
else
|
||||
put_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name" || fail "failed to put object to bucket one"
|
||||
run put_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name"
|
||||
assert_success
|
||||
fi
|
||||
if [[ $1 == 's3' ]]; then
|
||||
copy_object "$1" "s3://$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name" || fail "object not copied to bucket two"
|
||||
run copy_object "$1" "s3://$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name"
|
||||
assert_success
|
||||
else
|
||||
copy_object "$1" "$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name" || fail "object not copied to bucket two"
|
||||
run copy_object "$1" "$BUCKET_ONE_NAME/$object_name" "$BUCKET_TWO_NAME" "$object_name"
|
||||
assert_success
|
||||
fi
|
||||
run download_and_compare_file "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_TWO_NAME" "$object_name" "$TEST_FILE_FOLDER/$object_name-copy"
|
||||
assert_success
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_bucket_or_contents "$1" "$BUCKET_TWO_NAME"
|
||||
delete_test_files "$object_name" "$object_name-copy"
|
||||
}
|
||||
|
||||
# param: client
|
||||
@@ -167,9 +173,6 @@ test_common_put_object() {
|
||||
|
||||
run object_exists "$1" "$BUCKET_ONE_NAME" "$2"
|
||||
assert_failure 1
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$2" "${2}-copy"
|
||||
}
|
||||
|
||||
test_common_put_get_object() {
|
||||
@@ -185,38 +188,17 @@ test_common_put_get_object() {
|
||||
assert_success
|
||||
|
||||
if [[ $1 == 's3' ]]; then
|
||||
copy_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name" || fail "failed to add object to bucket"
|
||||
run copy_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name"
|
||||
assert_success
|
||||
else
|
||||
put_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name" || fail "failed to add object to bucket"
|
||||
run put_object "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name"
|
||||
assert_success
|
||||
fi
|
||||
object_exists "$1" "$BUCKET_ONE_NAME" "$object_name" || fail "object not added to bucket"
|
||||
run object_exists "$1" "$BUCKET_ONE_NAME" "$object_name"
|
||||
assert_success
|
||||
|
||||
run download_and_compare_file "$1" "$TEST_FILE_FOLDER/$object_name" "$BUCKET_ONE_NAME" "$object_name" "$TEST_FILE_FOLDER/${2}-copy"
|
||||
assert_success
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$object_name" "${object_name}-copy"
|
||||
}
|
||||
|
||||
test_common_get_set_versioning() {
|
||||
local object_name="test-object"
|
||||
|
||||
run create_test_files "$object_name"
|
||||
assert_success
|
||||
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
get_bucket_versioning "$1" "$BUCKET_ONE_NAME" || local get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "error getting bucket versioning"
|
||||
|
||||
put_bucket_versioning "$1" "$BUCKET_ONE_NAME" "Enabled" || local put_result=$?
|
||||
[[ $put_result -eq 0 ]] || fail "error putting bucket versioning"
|
||||
|
||||
get_bucket_versioning "$1" "$BUCKET_ONE_NAME" || local get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "error getting bucket versioning"
|
||||
|
||||
fail "test fail"
|
||||
}
|
||||
|
||||
# common test for listing buckets
|
||||
@@ -230,30 +212,8 @@ test_common_list_buckets() {
|
||||
run setup_buckets "$1" "$BUCKET_ONE_NAME" "$BUCKET_TWO_NAME"
|
||||
assert_success
|
||||
|
||||
list_buckets "$1"
|
||||
local bucket_one_found=false
|
||||
local bucket_two_found=false
|
||||
if [ -z "$bucket_array" ]; then
|
||||
fail "bucket_array parameter not exported"
|
||||
fi
|
||||
log 5 "bucket array: ${bucket_array[*]}"
|
||||
for bucket in "${bucket_array[@]}"; do
|
||||
if [ "$bucket" == "$BUCKET_ONE_NAME" ] || [ "$bucket" == "s3://$BUCKET_ONE_NAME" ]; then
|
||||
bucket_one_found=true
|
||||
elif [ "$bucket" == "$BUCKET_TWO_NAME" ] || [ "$bucket" == "s3://$BUCKET_TWO_NAME" ]; then
|
||||
bucket_two_found=true
|
||||
fi
|
||||
if [ $bucket_one_found == true ] && [ $bucket_two_found == true ]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
echo $bucket_one_found $bucket_two_found
|
||||
if [ $bucket_one_found == false ] || [ $bucket_two_found == false ]; then
|
||||
fail "Not all buckets found"
|
||||
fi
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_bucket_or_contents "$1" "$BUCKET_TWO_NAME"
|
||||
run list_and_check_buckets "$1" "$BUCKET_ONE_NAME" "$BUCKET_TWO_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_list_objects() {
|
||||
@@ -268,41 +228,21 @@ test_common_list_objects() {
|
||||
run create_test_files $object_one $object_two
|
||||
assert_success
|
||||
|
||||
echo "test data" > "$TEST_FILE_FOLDER"/"$object_one"
|
||||
echo "test data 2" > "$TEST_FILE_FOLDER"/"$object_two"
|
||||
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_object "$1" "$TEST_FILE_FOLDER"/$object_one "$BUCKET_ONE_NAME" "$object_one" || local result_two=$?
|
||||
[[ result_two -eq 0 ]] || fail "Error adding object one"
|
||||
put_object "$1" "$TEST_FILE_FOLDER"/$object_two "$BUCKET_ONE_NAME" "$object_two" || local result_three=$?
|
||||
[[ result_three -eq 0 ]] || fail "Error adding object two"
|
||||
run put_object "$1" "$TEST_FILE_FOLDER"/$object_one "$BUCKET_ONE_NAME" "$object_one"
|
||||
assert_success
|
||||
|
||||
list_objects "$1" "$BUCKET_ONE_NAME"
|
||||
local object_one_found=false
|
||||
local object_two_found=false
|
||||
# shellcheck disable=SC2154
|
||||
for object in "${object_array[@]}"; do
|
||||
if [ "$object" == $object_one ] || [ "$object" == "s3://$BUCKET_ONE_NAME/$object_one" ]; then
|
||||
object_one_found=true
|
||||
elif [ "$object" == $object_two ] || [ "$object" == "s3://$BUCKET_ONE_NAME/$object_two" ]; then
|
||||
object_two_found=true
|
||||
fi
|
||||
done
|
||||
run put_object "$1" "$TEST_FILE_FOLDER"/$object_two "$BUCKET_ONE_NAME" "$object_two"
|
||||
assert_success
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_test_files $object_one $object_two
|
||||
|
||||
if [ $object_one_found != true ] || [ $object_two_found != true ]; then
|
||||
fail "$object_one and/or $object_two not listed (all objects: ${object_array[*]})"
|
||||
fi
|
||||
run list_check_objects_common "$1" "$BUCKET_ONE_NAME" "$object_one" "$object_two"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_set_get_delete_bucket_tags() {
|
||||
if [[ $# -ne 1 ]]; then
|
||||
fail "set/get bucket tags test requires command type"
|
||||
fi
|
||||
assert [ $# -eq 1 ]
|
||||
|
||||
local key="test_key"
|
||||
local value="test_value"
|
||||
@@ -310,40 +250,24 @@ test_common_set_get_delete_bucket_tags() {
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || fail "Error getting bucket tags first time"
|
||||
run verify_no_bucket_tags "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
check_bucket_tags_empty "$1" "$BUCKET_ONE_NAME" || fail "error checking if bucket tags are empty"
|
||||
run put_bucket_tagging "$1" "$BUCKET_ONE_NAME" $key $value
|
||||
assert_success
|
||||
|
||||
put_bucket_tagging "$1" "$BUCKET_ONE_NAME" $key $value || fail "error putting bucket tags"
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || fail "Error getting bucket tags second time"
|
||||
run get_and_check_bucket_tags "$BUCKET_ONE_NAME" "$key" "$value"
|
||||
assert_success
|
||||
|
||||
local tag_set_key
|
||||
local tag_set_value
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
log 5 "Post-export tags: $tags"
|
||||
tag_set_key=$(echo "$tags" | jq '.TagSet[0].Key')
|
||||
tag_set_value=$(echo "$tags" | jq '.TagSet[0].Value')
|
||||
[[ $tag_set_key == '"'$key'"' ]] || fail "Key mismatch"
|
||||
[[ $tag_set_value == '"'$value'"' ]] || fail "Value mismatch"
|
||||
else
|
||||
read -r tag_set_key tag_set_value <<< "$(echo "$tags" | awk 'NR==2 {print $1, $3}')"
|
||||
[[ $tag_set_key == "$key" ]] || fail "Key mismatch"
|
||||
[[ $tag_set_value == "$value" ]] || fail "Value mismatch"
|
||||
fi
|
||||
run delete_bucket_tagging "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || fail "Error getting bucket tags third time"
|
||||
|
||||
check_bucket_tags_empty "$1" "$BUCKET_ONE_NAME" || fail "error checking if bucket tags are empty"
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
run verify_no_bucket_tags "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_set_get_object_tags() {
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "get/set object tags missing command type"
|
||||
return 1
|
||||
fi
|
||||
assert [ $# -eq 1 ]
|
||||
|
||||
local bucket_file="bucket-file"
|
||||
local key="test_key"
|
||||
@@ -355,31 +279,17 @@ test_common_set_get_object_tags() {
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_object "$1" "$TEST_FILE_FOLDER"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "Failed to add object to bucket '$BUCKET_ONE_NAME'"
|
||||
run put_object "$1" "$TEST_FILE_FOLDER"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file"
|
||||
assert_success
|
||||
|
||||
get_object_tagging "$1" "$BUCKET_ONE_NAME" $bucket_file || fail "Error getting object tags"
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
tag_set=$(echo "$tags" | jq '.TagSet')
|
||||
[[ $tag_set == "[]" ]] || [[ $tag_set == "" ]] || fail "Error: tags not empty"
|
||||
elif [[ $tags != *"No tags found"* ]] && [[ $tags != "" ]]; then
|
||||
fail "no tags found (tags: $tags)"
|
||||
fi
|
||||
run verify_no_object_tags "$1" "$BUCKET_ONE_NAME" "$bucket_file"
|
||||
assert_success
|
||||
|
||||
put_object_tagging "$1" "$BUCKET_ONE_NAME" $bucket_file $key $value || fail "error putting object tagging"
|
||||
get_object_tagging "$1" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error getting object tags"
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
tag_set_key=$(echo "$tags" | jq -r '.TagSet[0].Key')
|
||||
tag_set_value=$(echo "$tags" | jq -r '.TagSet[0].Value')
|
||||
[[ $tag_set_key == "$key" ]] || fail "Key mismatch"
|
||||
[[ $tag_set_value == "$value" ]] || fail "Value mismatch"
|
||||
else
|
||||
read -r tag_set_key tag_set_value <<< "$(echo "$tags" | awk 'NR==2 {print $1, $3}')"
|
||||
[[ $tag_set_key == "$key" ]] || fail "Key mismatch"
|
||||
[[ $tag_set_value == "$value" ]] || fail "Value mismatch"
|
||||
fi
|
||||
run put_object_tagging "$1" "$BUCKET_ONE_NAME" $bucket_file $key $value
|
||||
assert_success
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_test_files $bucket_file
|
||||
run check_verify_object_tags "$1" "$BUCKET_ONE_NAME" "$bucket_file" "$key" "$value"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_presigned_url_utf8_chars() {
|
||||
@@ -398,47 +308,30 @@ test_common_presigned_url_utf8_chars() {
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_object "$1" "$TEST_FILE_FOLDER"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || put_result=$?
|
||||
[[ $put_result -eq 0 ]] || fail "Failed to add object $bucket_file"
|
||||
run put_object "$1" "$TEST_FILE_FOLDER"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file"
|
||||
assert_success
|
||||
|
||||
create_presigned_url "$1" "$BUCKET_ONE_NAME" "$bucket_file" || presigned_result=$?
|
||||
[[ $presigned_result -eq 0 ]] || fail "presigned url creation failure"
|
||||
run create_check_presigned_url "$1" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file_copy"
|
||||
assert_success
|
||||
|
||||
error=$(curl -k -v "$presigned_url" -o "$TEST_FILE_FOLDER"/"$bucket_file_copy") || curl_result=$?
|
||||
if [[ $curl_result -ne 0 ]]; then
|
||||
fail "error downloading file with curl: $error"
|
||||
fi
|
||||
compare_files "$TEST_FILE_FOLDER"/"$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file_copy" || compare_result=$?
|
||||
if [[ $compare_result -ne 0 ]]; then
|
||||
echo "file one: $(cat "$TEST_FILE_FOLDER"/"$bucket_file")"
|
||||
echo "file two: $(cat "$TEST_FILE_FOLDER"/"$bucket_file_copy")"
|
||||
fail "files don't match"
|
||||
fi
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$bucket_file" "$bucket_file_copy"
|
||||
run compare_files "$TEST_FILE_FOLDER"/"$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file_copy"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_list_objects_file_count() {
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "list objects greater than 1000 missing command type"
|
||||
return 1
|
||||
fi
|
||||
assert [ $# -eq 1 ]
|
||||
|
||||
run create_test_file_count 1001
|
||||
assert_success
|
||||
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_object_multiple "$1" "$TEST_FILE_FOLDER/file_*" "$BUCKET_ONE_NAME" || local put_result=$?
|
||||
[[ $put_result -eq 0 ]] || fail "Failed to copy files to bucket"
|
||||
list_objects "$1" "$BUCKET_ONE_NAME"
|
||||
if [[ $LOG_LEVEL -ge 5 ]]; then
|
||||
log 5 "Array: ${object_array[*]}"
|
||||
fi
|
||||
local file_count="${#object_array[@]}"
|
||||
[[ $file_count == 1001 ]] || fail "file count should be 1001, is $file_count"
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
run put_object_multiple "$1" "$TEST_FILE_FOLDER/file_*" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run list_objects_check_file_count "$1" "$BUCKET_ONE_NAME" 1001
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_delete_object_tagging() {
|
||||
@@ -454,18 +347,20 @@ test_common_delete_object_tagging() {
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_object "$1" "$TEST_FILE_FOLDER"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "Failed to add object to bucket"
|
||||
run put_object "$1" "$TEST_FILE_FOLDER"/"$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file"
|
||||
assert_success
|
||||
|
||||
put_object_tagging "$1" "$BUCKET_ONE_NAME" "$bucket_file" "$tag_key" "$tag_value" || fail "failed to add tags to object"
|
||||
run put_object_tagging "$1" "$BUCKET_ONE_NAME" "$bucket_file" "$tag_key" "$tag_value"
|
||||
assert_success
|
||||
|
||||
get_and_verify_object_tags "$1" "$BUCKET_ONE_NAME" "$bucket_file" "$tag_key" "$tag_value" || fail "failed to get tags"
|
||||
run get_and_verify_object_tags "$1" "$BUCKET_ONE_NAME" "$bucket_file" "$tag_key" "$tag_value"
|
||||
assert_success
|
||||
|
||||
delete_object_tagging "$1" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error deleting object tagging"
|
||||
run delete_object_tagging "$1" "$BUCKET_ONE_NAME" "$bucket_file"
|
||||
assert_success
|
||||
|
||||
check_object_tags_empty "$1" "$BUCKET_ONE_NAME" "$bucket_file" || fail "failed to get tags"
|
||||
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$bucket_file"
|
||||
run check_object_tags_empty "$1" "$BUCKET_ONE_NAME" "$bucket_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_get_bucket_location() {
|
||||
@@ -474,9 +369,8 @@ test_common_get_bucket_location() {
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
get_bucket_location "$1" "$BUCKET_ONE_NAME"
|
||||
# shellcheck disable=SC2154
|
||||
[[ $bucket_location == "null" ]] || [[ $bucket_location == "us-east-1" ]] || fail "wrong location: '$bucket_location'"
|
||||
run get_check_bucket_location "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_put_bucket_acl_s3cmd() {
|
||||
@@ -487,46 +381,28 @@ test_put_bucket_acl_s3cmd() {
|
||||
run setup_bucket "s3cmd" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred" || fail "error putting bucket ownership controls"
|
||||
run put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred"
|
||||
assert_success
|
||||
|
||||
username=$USERNAME_ONE
|
||||
if [[ $DIRECT != "true" ]]; then
|
||||
setup_user "$username" "HIJKLMN" "user" || fail "error creating user"
|
||||
run setup_user "$username" "HIJKLMN" "user"
|
||||
assert_success
|
||||
fi
|
||||
sleep 5
|
||||
|
||||
get_bucket_acl "s3cmd" "$BUCKET_ONE_NAME" || fail "error retrieving acl"
|
||||
log 5 "Initial ACLs: $acl"
|
||||
acl_line=$(echo "$acl" | grep "ACL")
|
||||
user_id=$(echo "$acl_line" | awk '{print $2}')
|
||||
if [[ $DIRECT == "true" ]]; then
|
||||
[[ $user_id == "$DIRECT_DISPLAY_NAME:" ]] || fail "ID mismatch ($user_id, $DIRECT_DISPLAY_NAME)"
|
||||
else
|
||||
[[ $user_id == "$AWS_ACCESS_KEY_ID:" ]] || fail "ID mismatch ($user_id, $AWS_ACCESS_KEY_ID)"
|
||||
fi
|
||||
permission=$(echo "$acl_line" | awk '{print $3}')
|
||||
[[ $permission == "FULL_CONTROL" ]] || fail "Permission mismatch ($permission)"
|
||||
run get_check_default_acl_s3cmd "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
if [[ $DIRECT == "true" ]]; then
|
||||
put_public_access_block_enable_public_acls "$BUCKET_ONE_NAME" || fail "error enabling public ACLs"
|
||||
run put_public_access_block_enable_public_acls "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
fi
|
||||
put_bucket_canned_acl_s3cmd "$BUCKET_ONE_NAME" "--acl-public" || fail "error putting canned s3cmd ACL"
|
||||
run put_bucket_canned_acl_s3cmd "$BUCKET_ONE_NAME" "--acl-public"
|
||||
assert_success
|
||||
|
||||
get_bucket_acl "s3cmd" "$BUCKET_ONE_NAME" || fail "error retrieving acl"
|
||||
log 5 "ACL after read put: $acl"
|
||||
acl_lines=$(echo "$acl" | grep "ACL")
|
||||
log 5 "ACL lines: $acl_lines"
|
||||
while IFS= read -r line; do
|
||||
lines+=("$line")
|
||||
done <<< "$acl_lines"
|
||||
log 5 "lines: ${lines[*]}"
|
||||
[[ ${#lines[@]} -eq 2 ]] || fail "unexpected number of ACL lines: ${#lines[@]}"
|
||||
anon_name=$(echo "${lines[1]}" | awk '{print $2}')
|
||||
anon_permission=$(echo "${lines[1]}" | awk '{print $3}')
|
||||
[[ $anon_name == "*anon*:" ]] || fail "unexpected anon name: $anon_name"
|
||||
[[ $anon_permission == "READ" ]] || fail "unexpected anon permission: $anon_permission"
|
||||
|
||||
delete_bucket_or_contents "s3cmd" "$BUCKET_ONE_NAME"
|
||||
run get_check_post_change_acl_s3cmd "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_put_bucket_acl() {
|
||||
@@ -539,23 +415,19 @@ test_common_put_bucket_acl() {
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred" || fail "error putting bucket ownership controls"
|
||||
run put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred"
|
||||
assert_success
|
||||
|
||||
username=$USERNAME_ONE
|
||||
setup_user "$username" "HIJKLMN" "user" || fail "error creating user"
|
||||
run setup_user "$username" "HIJKLMN" "user"
|
||||
assert_success
|
||||
|
||||
get_bucket_acl "$1" "$BUCKET_ONE_NAME" || fail "error retrieving acl"
|
||||
|
||||
log 5 "Initial ACLs: $acl"
|
||||
id=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Owner.ID' 2>&1) || fail "error getting ID: $id"
|
||||
if [[ $id != "$AWS_ACCESS_KEY_ID" ]]; then
|
||||
# for direct, ID is canonical user ID rather than AWS_ACCESS_KEY_ID
|
||||
canonical_id=$(aws --no-verify-ssl s3api list-buckets --query 'Owner.ID' 2>&1) || fail "error getting canonical ID: $canonical_id"
|
||||
[[ $id == "$canonical_id" ]] || fail "acl ID doesn't match AWS key or canonical ID"
|
||||
fi
|
||||
run get_check_acl_id "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
acl_file="test-acl"
|
||||
create_test_files "$acl_file"
|
||||
run create_test_files "$acl_file"
|
||||
assert_success
|
||||
|
||||
if [[ $DIRECT == "true" ]]; then
|
||||
grantee="{\"Type\": \"Group\", \"URI\": \"http://acs.amazonaws.com/groups/global/AllUsers\"}"
|
||||
@@ -577,14 +449,11 @@ cat <<EOF > "$TEST_FILE_FOLDER"/"$acl_file"
|
||||
}
|
||||
EOF
|
||||
|
||||
log 6 "before 1st put acl"
|
||||
put_bucket_acl_s3api "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER"/"$acl_file" || fail "error putting first acl"
|
||||
get_bucket_acl "$1" "$BUCKET_ONE_NAME" || fail "error retrieving second ACL"
|
||||
run put_bucket_acl_s3api "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER"/"$acl_file"
|
||||
assert_success
|
||||
|
||||
log 5 "Acls after 1st put: $acl"
|
||||
public_grants=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Grants[1]' 2>&1) || fail "error getting public grants: $public_grants"
|
||||
permission=$(echo "$public_grants" | jq -r '.Permission' 2>&1) || fail "error getting permission: $permission"
|
||||
[[ $permission == "READ" ]] || fail "incorrect permission ($permission)"
|
||||
run get_check_acl_after_first_put "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
cat <<EOF > "$TEST_FILE_FOLDER"/"$acl_file"
|
||||
{
|
||||
@@ -603,21 +472,15 @@ cat <<EOF > "$TEST_FILE_FOLDER"/"$acl_file"
|
||||
}
|
||||
EOF
|
||||
|
||||
put_bucket_acl_s3api "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER"/"$acl_file" || fail "error putting second acl"
|
||||
get_bucket_acl "$1" "$BUCKET_ONE_NAME" || fail "error retrieving second ACL"
|
||||
run put_bucket_acl_s3api "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER"/"$acl_file"
|
||||
assert_success
|
||||
|
||||
log 5 "Acls after 2nd put: $acl"
|
||||
public_grants=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Grants' 2>&1) || fail "error retrieving public grants: $public_grants"
|
||||
public_grant_length=$(echo "$public_grants" | jq -r 'length' 2>&1) || fail "Error retrieving public grant length: $public_grant_length"
|
||||
[[ $public_grant_length -eq 2 ]] || fail "incorrect grant length for private ACL ($public_grant_length)"
|
||||
permission=$(echo "$public_grants" | jq -r '.[0].Permission' 2>&1) || fail "Error retrieving permission: $permission"
|
||||
[[ $permission == "FULL_CONTROL" ]] || fail "incorrect permission ($permission)"
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
run get_check_acl_after_second_put "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_get_put_delete_bucket_policy() {
|
||||
[[ $# -eq 1 ]] || fail "get/put/delete policy test requires command type"
|
||||
assert [ $# -eq 1 ]
|
||||
|
||||
policy_file="policy_file"
|
||||
|
||||
@@ -634,53 +497,27 @@ test_common_get_put_delete_bucket_policy() {
|
||||
action="s3:GetObject"
|
||||
resource="arn:aws:s3:::$BUCKET_ONE_NAME/*"
|
||||
|
||||
cat <<EOF > "$TEST_FILE_FOLDER"/$policy_file
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "$effect",
|
||||
"Principal": $principal,
|
||||
"Action": "$action",
|
||||
"Resource": "$resource"
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "$effect" "$principal" "$action" "$resource"
|
||||
assert_success
|
||||
log 5 "POLICY: $(cat "$TEST_FILE_FOLDER/$policy_file")"
|
||||
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
check_for_empty_policy "$1" "$BUCKET_ONE_NAME" || fail "policy not empty"
|
||||
run check_for_empty_policy "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
put_bucket_policy "$1" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER"/"$policy_file" || fail "error putting bucket policy"
|
||||
run put_bucket_policy "$1" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER"/"$policy_file"
|
||||
assert_success
|
||||
|
||||
get_bucket_policy "$1" "$BUCKET_ONE_NAME" || fail "error getting bucket policy after setting"
|
||||
run get_and_check_policy "$1" "$BUCKET_ONE_NAME" "$effect" "$principal" "$action" "$resource"
|
||||
assert_success
|
||||
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "POLICY: $bucket_policy"
|
||||
statement=$(echo "$bucket_policy" | jq -r '.Statement[0]' 2>&1) || fail "error getting statement value: $statement"
|
||||
returned_effect=$(echo "$statement" | jq -r '.Effect' 2>&1) || fail "error getting effect: $returned_effect"
|
||||
[[ $effect == "$returned_effect" ]] || fail "effect mismatch ($effect, $returned_effect)"
|
||||
returned_principal=$(echo "$statement" | jq -r '.Principal')
|
||||
if [[ -n $DIRECT ]] && arn=$(echo "$returned_principal" | jq -r '.AWS' 2>&1); then
|
||||
[[ $arn == "arn:aws:iam::$DIRECT_AWS_USER_ID:user/s3user" ]] || fail "arn mismatch"
|
||||
else
|
||||
[[ $principal == "\"$returned_principal\"" ]] || fail "principal mismatch ($principal, $returned_principal)"
|
||||
fi
|
||||
returned_action=$(echo "$statement" | jq -r '.Action')
|
||||
[[ $action == "$returned_action" ]] || fail "action mismatch ($action, $returned_action)"
|
||||
returned_resource=$(echo "$statement" | jq -r '.Resource')
|
||||
[[ $resource == "$returned_resource" ]] || fail "resource mismatch ($resource, $returned_resource)"
|
||||
run delete_bucket_policy "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
delete_bucket_policy "$1" "$BUCKET_ONE_NAME" || delete_result=$?
|
||||
[[ $delete_result -eq 0 ]] || fail "error deleting policy"
|
||||
|
||||
check_for_empty_policy "$1" "$BUCKET_ONE_NAME" || check_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "policy not empty after deletion"
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
run check_for_empty_policy "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
test_common_ls_directory_object() {
|
||||
@@ -702,6 +539,4 @@ test_common_ls_directory_object() {
|
||||
|
||||
run list_and_check_directory_obj "$1" "$test_file"
|
||||
assert_success "error listing and checking directory object"
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
source ./tests/test_common.sh
|
||||
source ./tests/setup.sh
|
||||
source ./tests/util_create_bucket.sh
|
||||
source ./tests/util_tags.sh
|
||||
source ./tests/commands/delete_bucket_policy.sh
|
||||
source ./tests/commands/get_bucket_policy.sh
|
||||
source ./tests/commands/put_bucket_policy.sh
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
source ./tests/commands/get_object.sh
|
||||
source ./tests/commands/list_buckets.sh
|
||||
source ./tests/commands/put_object.sh
|
||||
source ./tests/logger.sh
|
||||
@@ -24,10 +25,34 @@ source ./tests/util_list_objects.sh
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "test_authorization_list_buckets" {
|
||||
@test "test_rest_list_buckets" {
|
||||
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run list_check_buckets_rest
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "test_rest_delete_object" {
|
||||
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
test_file="test_file"
|
||||
run create_test_files "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run get_object "rest" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_success
|
||||
|
||||
run compare_files "$TEST_FILE_FOLDER/$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_success
|
||||
|
||||
run delete_object "rest" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run get_object "rest" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_failure
|
||||
}
|
||||
|
||||
@@ -1176,31 +1176,6 @@ multipart_upload_from_bucket_range() {
|
||||
return 0
|
||||
}
|
||||
|
||||
create_presigned_url() {
|
||||
if [[ $# -ne 3 ]]; then
|
||||
echo "create presigned url function requires command type, bucket, and filename"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local presign_result=0
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
presigned_url=$(aws s3 presign "s3://$2/$3" --expires-in 900) || presign_result=$?
|
||||
elif [[ $1 == 's3cmd' ]]; then
|
||||
presigned_url=$(s3cmd --no-check-certificate "${S3CMD_OPTS[@]}" signurl "s3://$2/$3" "$(echo "$(date +%s)" + 900 | bc)") || presign_result=$?
|
||||
elif [[ $1 == 'mc' ]]; then
|
||||
presigned_url_data=$(mc --insecure share download --recursive "$MC_ALIAS/$2/$3") || presign_result=$?
|
||||
presigned_url="${presigned_url_data#*Share: }"
|
||||
else
|
||||
echo "unrecognized command type $1"
|
||||
return 1
|
||||
fi
|
||||
if [[ $presign_result -ne 0 ]]; then
|
||||
echo "error generating presigned url: $presigned_url"
|
||||
return 1
|
||||
fi
|
||||
export presigned_url
|
||||
}
|
||||
|
||||
list_and_check_directory_obj() {
|
||||
#assert [ $# -eq 2 ]
|
||||
if [ $# -ne 2 ]; then
|
||||
|
||||
151
tests/util_acl.sh
Normal file
151
tests/util_acl.sh
Normal file
@@ -0,0 +1,151 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
get_check_default_acl_s3cmd() {
|
||||
if [ $# -ne 1 ]; then
|
||||
log 2 "'get_check_acl_s3cmd' requires bucket name"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_acl "s3cmd" "$BUCKET_ONE_NAME"; then
|
||||
log 2 "error retrieving acl"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "Initial ACLs: $acl"
|
||||
acl_line=$(echo "$acl" | grep "ACL")
|
||||
user_id=$(echo "$acl_line" | awk '{print $2}')
|
||||
if [[ $DIRECT == "true" ]]; then
|
||||
if [[ $user_id != "$DIRECT_DISPLAY_NAME:" ]]; then
|
||||
log 2 "ID mismatch ($user_id, $DIRECT_DISPLAY_NAME)"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
if [[ $user_id != "$AWS_ACCESS_KEY_ID:" ]]; then
|
||||
log 2 "ID mismatch ($user_id, $AWS_ACCESS_KEY_ID)"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
permission=$(echo "$acl_line" | awk '{print $3}')
|
||||
if [[ $permission != "FULL_CONTROL" ]]; then
|
||||
log 2 "Permission mismatch ($permission)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_check_post_change_acl_s3cmd() {
|
||||
if [ $# -ne 1 ]; then
|
||||
log 2 "'get_check_post_change_acl_s3cmd' requires bucket name"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_acl "s3cmd" "$1"; then
|
||||
log 2 "error retrieving acl"
|
||||
return 1
|
||||
fi
|
||||
log 5 "ACL after read put: $acl"
|
||||
acl_lines=$(echo "$acl" | grep "ACL")
|
||||
log 5 "ACL lines: $acl_lines"
|
||||
while IFS= read -r line; do
|
||||
lines+=("$line")
|
||||
done <<< "$acl_lines"
|
||||
log 5 "lines: ${lines[*]}"
|
||||
if [[ ${#lines[@]} -ne 2 ]]; then
|
||||
log 2 "unexpected number of ACL lines: ${#lines[@]}"
|
||||
return 1
|
||||
fi
|
||||
anon_name=$(echo "${lines[1]}" | awk '{print $2}')
|
||||
anon_permission=$(echo "${lines[1]}" | awk '{print $3}')
|
||||
if [[ $anon_name != "*anon*:" ]]; then
|
||||
log 2 "unexpected anon name: $anon_name"
|
||||
return 1
|
||||
fi
|
||||
if [[ $anon_permission != "READ" ]]; then
|
||||
log 2 "unexpected anon permission: $anon_permission"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_check_acl_id() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'get_check_acl_id' requires client, bucket"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_acl "$1" "$2"; then
|
||||
log 2 "error retrieving acl"
|
||||
return 1
|
||||
fi
|
||||
log 5 "Initial ACLs: $acl"
|
||||
if ! id=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Owner.ID' 2>&1); then
|
||||
log 2 "error getting ID: $id"
|
||||
return 1
|
||||
fi
|
||||
if [[ $id != "$AWS_ACCESS_KEY_ID" ]]; then
|
||||
# for direct, ID is canonical user ID rather than AWS_ACCESS_KEY_ID
|
||||
if ! canonical_id=$(aws --no-verify-ssl s3api list-buckets --query 'Owner.ID' 2>&1); then
|
||||
log 2 "error getting canonical ID: $canonical_id"
|
||||
return 1
|
||||
fi
|
||||
if [[ $id != "$canonical_id" ]]; then
|
||||
log 2 "acl ID doesn't match AWS key or canonical ID"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_check_acl_after_first_put() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'get_check_acl_after_first_put' requires client, bucket"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_acl "$1" "$BUCKET_ONE_NAME"; then
|
||||
log 2 "error retrieving second ACL"
|
||||
return 1
|
||||
fi
|
||||
log 5 "Acls after 1st put: $acl"
|
||||
if ! public_grants=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Grants[1]' 2>&1); then
|
||||
log 2 "error getting public grants: $public_grants"
|
||||
return 1
|
||||
fi
|
||||
if ! permission=$(echo "$public_grants" | jq -r '.Permission' 2>&1); then
|
||||
log 2 "error getting permission: $permission"
|
||||
return 1
|
||||
fi
|
||||
if [[ $permission != "READ" ]]; then
|
||||
log 2 "incorrect permission ($permission)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_check_acl_after_second_put() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'get_check_acl_after_second_put' requires client, bucket"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_acl "$1" "$BUCKET_ONE_NAME"; then
|
||||
log 2 "error retrieving third ACL"
|
||||
return 1
|
||||
fi
|
||||
if ! public_grants=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Grants' 2>&1); then
|
||||
log 2 "error retrieving public grants: $public_grants"
|
||||
return 1
|
||||
fi
|
||||
if ! public_grant_length=$(echo "$public_grants" | jq -r 'length' 2>&1); then
|
||||
log 2 "Error retrieving public grant length: $public_grant_length"
|
||||
return 1
|
||||
fi
|
||||
if [[ $public_grant_length -ne 2 ]]; then
|
||||
log 2 "incorrect grant length for private ACL ($public_grant_length)"
|
||||
return 1
|
||||
fi
|
||||
if ! permission=$(echo "$public_grants" | jq -r '.[0].Permission' 2>&1); then
|
||||
log 2 "Error retrieving permission: $permission"
|
||||
return 1
|
||||
fi
|
||||
if [[ $permission != "FULL_CONTROL" ]]; then
|
||||
log 2 "incorrect permission ($permission)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
18
tests/util_bucket_location.sh
Normal file
18
tests/util_bucket_location.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
get_check_bucket_location() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'get_bucket_location' requires client, bucket"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_location "$1" "$2"; then
|
||||
log 2 "error getting bucket location"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
if [[ $bucket_location != "null" ]] && [[ $bucket_location != "us-east-1" ]]; then
|
||||
log 2 "wrong location: '$bucket_location'"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
@@ -33,4 +33,39 @@ list_check_buckets_rest() {
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
list_and_check_buckets() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'list_and_check_buckets' requires client, two bucket names"
|
||||
return 1
|
||||
fi
|
||||
if ! list_buckets "$1"; then
|
||||
log 2 "error listing buckets"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local bucket_one_found=false
|
||||
local bucket_two_found=false
|
||||
if [ -z "$bucket_array" ]; then
|
||||
log 2 "bucket_array parameter not exported"
|
||||
return 1
|
||||
fi
|
||||
log 5 "bucket array: ${bucket_array[*]}"
|
||||
for bucket in "${bucket_array[@]}"; do
|
||||
if [ "$bucket" == "$2" ] || [ "$bucket" == "s3://$2" ]; then
|
||||
bucket_one_found=true
|
||||
elif [ "$bucket" == "$3" ] || [ "$bucket" == "s3://$3" ]; then
|
||||
bucket_two_found=true
|
||||
fi
|
||||
if [ $bucket_one_found == true ] && [ $bucket_two_found == true ]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
echo $bucket_one_found $bucket_two_found
|
||||
if [ $bucket_one_found == false ] || [ $bucket_two_found == false ]; then
|
||||
log 2 "Not all buckets found"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
@@ -119,3 +119,50 @@ list_check_objects_rest() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
list_check_objects_common() {
|
||||
if [ $# -ne 4 ]; then
|
||||
log 2 "'list_check_objects_common' requires client, bucket, object one, object two"
|
||||
return 1
|
||||
fi
|
||||
if ! list_objects "$1" "$2"; then
|
||||
log 2 "error listing objects"
|
||||
return 1
|
||||
fi
|
||||
local object_one_found=false
|
||||
local object_two_found=false
|
||||
# shellcheck disable=SC2154
|
||||
for object in "${object_array[@]}"; do
|
||||
if [ "$object" == "$3" ] || [ "$object" == "s3://$2/$3" ]; then
|
||||
object_one_found=true
|
||||
elif [ "$object" == "$4" ] || [ "$object" == "s3://$2/$4" ]; then
|
||||
object_two_found=true
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $object_one_found != true ] || [ $object_two_found != true ]; then
|
||||
log 2 "$3 and/or $4 not listed (all objects: ${object_array[*]})"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
list_objects_check_file_count() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'list_objects_check_file_count' requires client, bucket, count"
|
||||
return 1
|
||||
fi
|
||||
if ! list_objects "$1" "$2"; then
|
||||
log 2 "error listing objects"
|
||||
return 1
|
||||
fi
|
||||
if [[ $LOG_LEVEL -ge 5 ]]; then
|
||||
log 5 "Array: ${object_array[*]}"
|
||||
fi
|
||||
local file_count="${#object_array[@]}"
|
||||
if [[ $file_count != "$3" ]]; then
|
||||
log 2 "file count should be $3, is $file_count"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -146,4 +146,62 @@ setup_policy_with_double_statement() {
|
||||
EOF"
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "policy data: $(cat "$1")"
|
||||
}
|
||||
}
|
||||
|
||||
get_and_check_policy() {
|
||||
if [ $# -ne 6 ]; then
|
||||
log 2 "'get_and_check_policy' requires client, bucket, expected effect, principal, action, resource"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_policy "$1" "$BUCKET_ONE_NAME"; then
|
||||
log 2 "error getting bucket policy after setting"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "POLICY: $bucket_policy"
|
||||
if ! statement=$(echo "$bucket_policy" | jq -r '.Statement[0]' 2>&1); then
|
||||
log 2 "error getting statement value: $statement"
|
||||
return 1
|
||||
fi
|
||||
if ! returned_effect=$(echo "$statement" | jq -r '.Effect' 2>&1); then
|
||||
log 2 "error getting effect: $returned_effect"
|
||||
return 1
|
||||
fi
|
||||
if [[ "$3" != "$returned_effect" ]]; then
|
||||
log 2 "effect mismatch ($3, $returned_effect)"
|
||||
return 1
|
||||
fi
|
||||
if ! returned_principal=$(echo "$statement" | jq -r '.Principal' 2>&1); then
|
||||
log 2 "error getting principal: $returned_principal"
|
||||
return 1
|
||||
fi
|
||||
if [[ -n $DIRECT ]] && arn=$(echo "$returned_principal" | jq -r '.AWS' 2>&1); then
|
||||
if [[ $arn != "arn:aws:iam::$DIRECT_AWS_USER_ID:user/s3user" ]]; then
|
||||
log 2 "arn mismatch"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
if [[ "$4" != "\"$returned_principal\"" ]]; then
|
||||
log 2 "principal mismatch ($4, $returned_principal)"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
if ! returned_action=$(echo "$statement" | jq -r '.Action' 2>&1); then
|
||||
log 2 "error getting action: $returned_action"
|
||||
return 1
|
||||
fi
|
||||
if [[ "$5" != "$returned_action" ]]; then
|
||||
log 2 "action mismatch ($5, $returned_action)"
|
||||
return 1
|
||||
fi
|
||||
if ! returned_resource=$(echo "$statement" | jq -r '.Resource' 2>&1); then
|
||||
log 2 "error getting resource: $returned_resource"
|
||||
return 1
|
||||
fi
|
||||
if [[ "$6" != "$returned_resource" ]]; then
|
||||
log 2 "resource mismatch ($6, $returned_resource)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
19
tests/util_presigned_url.sh
Normal file
19
tests/util_presigned_url.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source ./tests/commands/create_presigned_url.sh
|
||||
|
||||
create_check_presigned_url() {
|
||||
if [ $# -ne 4 ]; then
|
||||
log 2 "'create_check_presigned_url' requires client, bucket, key, save location"
|
||||
return 1
|
||||
fi
|
||||
if ! create_presigned_url "$1" "$2" "$3"; then
|
||||
log 2 "error creating presigned URL"
|
||||
return 1
|
||||
fi
|
||||
if ! error=$(curl -k -v "$presigned_url" -o "$4"); then
|
||||
log 2 "error downloading file with curl: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
@@ -57,6 +57,14 @@ generate_hash_for_payload() {
|
||||
payload_hash="$(echo -n "$1" | sha256sum | awk '{print $1}')"
|
||||
}
|
||||
|
||||
generate_hash_for_payload_file() {
|
||||
if [ $# -ne 1 ]; then
|
||||
log 2 "'generate_hash_for_payload' requires filename"
|
||||
return 1
|
||||
fi
|
||||
payload_hash="$(sha256sum "$1" | awk '{print $1}')"
|
||||
}
|
||||
|
||||
get_creq_string_list_buckets() {
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
|
||||
@@ -54,18 +54,23 @@ get_and_check_bucket_tags_with_user() {
|
||||
# params: bucket, expected tag key, expected tag value
|
||||
# fail on error
|
||||
get_and_check_bucket_tags() {
|
||||
assert [ $# -eq 3 ]
|
||||
run get_and_check_bucket_tags_with_user "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$1" "$2" "$3"
|
||||
assert_success "error getting and checking bucket tags"
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'get_and_check_bucket_tags' requires bucket, expected tag key, expected tag value"
|
||||
return 1
|
||||
fi
|
||||
if ! get_and_check_bucket_tags_with_user "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$1" "$2" "$3"; then
|
||||
log 2 "error getting and checking bucket tags with user"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
verify_no_bucket_tags() {
|
||||
if [ $# -ne 1 ]; then
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'verify_no_bucket_tags' requires bucket name"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_tagging "$1"; then
|
||||
if ! get_bucket_tagging "$1" "$2"; then
|
||||
log 2 "error retrieving bucket tagging"
|
||||
return 1
|
||||
fi
|
||||
@@ -76,3 +81,68 @@ verify_no_bucket_tags() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
verify_no_object_tags() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'verify_no_object_tags' requires client, bucket, object"
|
||||
return 1
|
||||
fi
|
||||
if ! get_object_tagging "$1" "$2" "$3"; then
|
||||
log 2 "error getting object tagging"
|
||||
return 1
|
||||
fi
|
||||
if [[ "$1" == 'aws' ]] || [ "$1" == 's3api' ]; then
|
||||
if ! tag_set=$(echo "$tags" | jq '.TagSet' 2>&1); then
|
||||
log 2 "error getting tag set: $tag_set"
|
||||
return 1
|
||||
fi
|
||||
if [[ $tag_set != "[]" ]] && [[ $tag_set != "" ]]; then
|
||||
log 2 "tags not empty ($tag_set)"
|
||||
return 1
|
||||
fi
|
||||
elif [[ $tags != *"No tags found"* ]] && [[ $tags != "" ]]; then
|
||||
log 2 "tags not empty (tags: $tags)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
check_verify_object_tags() {
|
||||
if [ $# -ne 5 ]; then
|
||||
log 2 "'check_verify_object_tags' requires client, bucket, key, expected tag key, expected tag value"
|
||||
return 1
|
||||
fi
|
||||
if ! get_object_tagging "$1" "$2" "$3"; then
|
||||
log 2 "error getting object tags"
|
||||
return 1
|
||||
fi
|
||||
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
|
||||
if ! tag_set_key=$(echo "$tags" | jq -r '.TagSet[0].Key' 2>&1); then
|
||||
log 2 "error retrieving tag key: $tag_set_key"
|
||||
return 1
|
||||
fi
|
||||
if ! tag_set_value=$(echo "$tags" | jq -r '.TagSet[0].Value' 2>&1); then
|
||||
log 2 "error retrieving tag value: $tag_set_value"
|
||||
return 1
|
||||
fi
|
||||
if [[ $tag_set_key != "$4" ]]; then
|
||||
log 2 "key mismatch ($tag_set_key, $4)"
|
||||
return 1
|
||||
fi
|
||||
if [[ $tag_set_value != "$5" ]]; then
|
||||
log 2 "value mismatch ($tag_set_value, $5)"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
read -r tag_set_key tag_set_value <<< "$(echo "$tags" | awk 'NR==2 {print $1, $3}')"
|
||||
if [[ $tag_set_key != "$4" ]]; then
|
||||
log 2 "Key mismatch ($tag_set_key, $4)"
|
||||
return 1
|
||||
fi
|
||||
if [[ $tag_set_value != "$5" ]]; then
|
||||
log 2 "Value mismatch ($tag_set_value, $5)"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user