Merge pull request #831 from versity/test_cmdline_rest_object_tagging

Test cmdline rest object tagging
This commit is contained in:
Ben McClelland
2024-09-24 08:31:22 -07:00
committed by GitHub
15 changed files with 698 additions and 449 deletions

View File

@@ -25,4 +25,4 @@ jobs:
run: sudo apt-get install -y docker-compose
- name: Run Docker Container
run: docker-compose -f tests/docker-compose-bats.yml up --exit-code-from s3api_only s3api_only
run: docker-compose -f tests/docker-compose-bats.yml up --exit-code-from s3api_np_only s3api_np_only

View File

@@ -30,7 +30,7 @@ jobs:
RECREATE_BUCKETS: "true"
PORT: 7071
BACKEND: "posix"
- set: "s3api, posix"
- set: "s3api non-policy, posix"
LOCAL_FOLDER: /tmp/gw3
BUCKET_ONE_NAME: versity-gwtest-bucket-one-3
BUCKET_TWO_NAME: versity-gwtest-bucket-two-3
@@ -74,7 +74,7 @@ jobs:
RECREATE_BUCKETS: "false"
PORT: 7075
BACKEND: "posix"
- set: "s3api, s3 backend"
- set: "s3api non-policy, s3 backend"
LOCAL_FOLDER: /tmp/gw7
BUCKET_ONE_NAME: versity-gwtest-bucket-one-7
BUCKET_TWO_NAME: versity-gwtest-bucket-two-7
@@ -118,6 +118,28 @@ jobs:
RECREATE_BUCKETS: "false"
PORT: 7079
BACKEND: "posix"
- set: "s3api policy and user, posix"
LOCAL_FOLDER: /tmp/gw11
BUCKET_ONE_NAME: versity-gwtest-bucket-one-10
BUCKET_TWO_NAME: versity-gwtest-bucket-two-10
IAM_TYPE: folder
USERS_FOLDER: /tmp/iam11
AWS_ENDPOINT_URL: https://127.0.0.1:7080
RUN_SET: "s3api-policy,s3api-user"
RECREATE_BUCKETS: "true"
PORT: 7080
BACKEND: "posix"
- set: "s3api policy and user, s3 backend"
LOCAL_FOLDER: /tmp/gw12
BUCKET_ONE_NAME: versity-gwtest-bucket-one-11
BUCKET_TWO_NAME: versity-gwtest-bucket-two-11
IAM_TYPE: folder
USERS_FOLDER: /tmp/iam12
AWS_ENDPOINT_URL: https://127.0.0.1:7081
RUN_SET: "s3api-policy,s3api-user"
RECREATE_BUCKETS: "true"
PORT: 7081
BACKEND: "s3"
steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v4
@@ -152,7 +174,7 @@ jobs:
run: |
sudo apt-get install libxml2-utils
- name: Build and run, posix backend
- name: Build and run
env:
LOCAL_FOLDER: ${{ matrix.LOCAL_FOLDER }}
BUCKET_ONE_NAME: ${{ matrix.BUCKET_ONE_NAME }}

View File

@@ -20,10 +20,13 @@ delete_object_tagging() {
echo "delete object tagging command missing command type, bucket, key"
return 1
fi
delete_result=0
if [[ $1 == 'aws' ]]; then
error=$(aws --no-verify-ssl s3api delete-object-tagging --bucket "$2" --key "$3" 2>&1) || delete_result=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure tag remove "$MC_ALIAS/$2/$3") || delete_result=$?
elif [ "$1" == 'rest' ]; then
delete_object_tagging_rest "$2" "$3" || delete_result=$?
else
echo "delete-object-tagging command not implemented for '$1'"
return 1
@@ -33,4 +36,46 @@ delete_object_tagging() {
return 1
fi
return 0
}
}
delete_object_tagging_rest() {
if [ $# -ne 2 ]; then
log 2 "'delete_object_tagging' requires bucket, key"
return 1
fi
generate_hash_for_payload ""
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
# shellcheck disable=SC2154
canonical_request="DELETE
/$1/$2
tagging=
host:$aws_endpoint_url_address
x-amz-content-sha256:$payload_hash
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
$payload_hash"
if ! generate_sts_string "$current_date_time" "$canonical_request"; then
log 2 "error generating sts string"
return 1
fi
get_signature
# shellcheck disable=SC2154
reply=$(curl -ks -w "%{http_code}" -X DELETE "$header://$aws_endpoint_url_address/$1/$2?tagging" \
-H "Authorization: AWS4-HMAC-SHA256 Credential=$AWS_ACCESS_KEY_ID/$ymd/$AWS_REGION/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature" \
-H "x-amz-content-sha256: $payload_hash" \
-H "x-amz-date: $current_date_time" \
-d "$tagging" -o "$TEST_FILE_FOLDER"/delete_tagging_error.txt 2>&1)
log 5 "reply status code: $reply"
if [[ "$reply" != "204" ]]; then
log 2 "reply error: $reply"
log 2 "put object tagging command returned error: $(cat "$TEST_FILE_FOLDER"/delete_tagging_error.txt)"
return 1
fi
return 0
}

View File

@@ -50,8 +50,7 @@ get_object_with_range() {
log 2 "'get object with range' requires bucket, key, range, outfile"
return 1
fi
get_object_error=$(aws --no-verify-ssl s3api get-object --bucket "$1" --key "$2" --range "$3" "$4" 2>&1) || local exit_code=$?
if [[ $exit_code -ne 0 ]]; then
if ! get_object_error=$(aws --no-verify-ssl s3api get-object --bucket "$1" --key "$2" --range "$3" "$4" 2>&1); then
log 2 "error getting object with range: $get_object_error"
return 1
fi

View File

@@ -21,10 +21,12 @@ get_object_tagging() {
return 1
fi
local result
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
if [[ "$1" == 'aws' ]] || [[ $1 == 's3api' ]]; then
tags=$(aws --no-verify-ssl s3api get-object-tagging --bucket "$2" --key "$3" 2>&1) || result=$?
elif [[ $1 == 'mc' ]]; then
elif [[ "$1" == 'mc' ]]; then
tags=$(mc --insecure tag list "$MC_ALIAS"/"$2"/"$3" 2>&1) || result=$?
elif [ "$1" == 'rest' ]; then
get_object_tagging_rest "$2" "$3" || result=$?
else
log 2 "invalid command type $1"
return 1
@@ -41,4 +43,50 @@ get_object_tagging() {
tags=$(echo "$tags" | grep -v "InsecureRequestWarning")
fi
export tags
}
get_object_tagging_rest() {
if [ $# -ne 2 ]; then
log 2 "'get_object_tagging' requires bucket, key"
return 1
fi
generate_hash_for_payload ""
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
# shellcheck disable=SC2154
canonical_request="GET
/$1/$2
tagging=
host:$aws_endpoint_url_address
x-amz-content-sha256:$payload_hash
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
$payload_hash"
if ! generate_sts_string "$current_date_time" "$canonical_request"; then
log 2 "error generating sts string"
return 1
fi
get_signature
# shellcheck disable=SC2154
reply=$(curl -ks -w "%{http_code}" "$header://$aws_endpoint_url_address/$1/$2?tagging" \
-H "Authorization: AWS4-HMAC-SHA256 Credential=$AWS_ACCESS_KEY_ID/$ymd/$AWS_REGION/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature" \
-H "x-amz-content-sha256: $payload_hash" \
-H "x-amz-date: $current_date_time" \
-o "$TEST_FILE_FOLDER"/object_tags.txt 2>&1)
log 5 "reply status code: $reply"
if [[ "$reply" != "200" ]]; then
if [ "$reply" == "404" ]; then
return 1
fi
log 2 "reply error: $reply"
log 2 "get object tagging command returned error: $(cat "$TEST_FILE_FOLDER"/object_tags.txt)"
return 2
fi
log 5 "object tags: $(cat "$TEST_FILE_FOLDER"/object_tags.txt)"
return 0
}

View File

@@ -16,7 +16,7 @@
put_object_tagging() {
if [ $# -ne 5 ]; then
log 2 "'put-object-tagging' command missing command type, object name, file, key, and/or value"
log 2 "'put-object-tagging' command missing command type, bucket, object name, file, key, and/or value"
return 1
fi
local error
@@ -26,6 +26,8 @@ put_object_tagging() {
error=$(aws --no-verify-ssl s3api put-object-tagging --bucket "$2" --key "$3" --tagging "TagSet=[{Key=$4,Value=$5}]" 2>&1) || result=$?
elif [[ $1 == 'mc' ]]; then
error=$(mc --insecure tag set "$MC_ALIAS"/"$2"/"$3" "$4=$5" 2>&1) || result=$?
elif [[ $1 == 'rest' ]]; then
put_object_tagging_rest "$2" "$3" "$4" "$5" || result=$?
else
log 2 "invalid command type $1"
return 1
@@ -35,4 +37,56 @@ put_object_tagging() {
return 1
fi
return 0
}
}
put_object_tagging_rest() {
if [ $# -ne 4 ]; then
log 2 "'put_object_tagging' requires bucket, key, tag key, tag value"
return 1
fi
tagging="<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<Tagging xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">
<TagSet>
<Tag>
<Key>$3</Key>
<Value>$4</Value>
</Tag>
</TagSet>
</Tagging>"
generate_hash_for_payload "$tagging"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
# shellcheck disable=SC2154
canonical_request="PUT
/$1/$2
tagging=
host:$aws_endpoint_url_address
x-amz-content-sha256:$payload_hash
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
$payload_hash"
if ! generate_sts_string "$current_date_time" "$canonical_request"; then
log 2 "error generating sts string"
return 1
fi
get_signature
# shellcheck disable=SC2154
reply=$(curl -ks -w "%{http_code}" -X PUT "$header://$aws_endpoint_url_address/$1/$2?tagging" \
-H "Authorization: AWS4-HMAC-SHA256 Credential=$AWS_ACCESS_KEY_ID/$ymd/$AWS_REGION/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature" \
-H "x-amz-content-sha256: $payload_hash" \
-H "x-amz-date: $current_date_time" \
-d "$tagging" -o "$TEST_FILE_FOLDER"/put_tagging_error.txt 2>&1)
log 5 "reply status code: $reply"
if [[ "$reply" != "200" ]]; then
log 2 "reply error: $reply"
log 2 "put object tagging command returned error: $(cat "$TEST_FILE_FOLDER"/put_tagging_error.txt)"
return 1
fi
return 0
}

View File

@@ -25,14 +25,14 @@ services:
args:
- CONFIG_FILE=tests/.env.s3
- SECRETS_FILE=tests/.secrets.s3
s3api_only:
s3api_np_only:
build:
context: ../
dockerfile: tests/Dockerfile_test_bats
args:
- CONFIG_FILE=tests/.env.default
image: bats_test
command: ["s3api"]
command: ["s3api-non-policy"]
direct:
build:
context: ../

View File

@@ -49,29 +49,40 @@ log_mask() {
echo "mask and log requires level, string"
return 1
fi
local masked_args=() # Initialize an array to hold the masked arguments
masked_args=() # Initialize an array to hold the masked arguments
IFS=' ' read -r -a array <<< "$2"
mask_next=false
for arg in "${array[@]}"; do
if [[ $mask_next == true ]]; then
masked_args+=("********")
mask_next=false
elif [[ "$arg" == --secret_key=* ]]; then
masked_args+=("--secret_key=********")
elif [[ "$arg" == --secret=* ]]; then
masked_args+=("--secret=********")
else
if [[ "$arg" == "--secret_key" ]] || [[ "$arg" == "--secret" ]] || [[ "$arg" == "--s3-iam-secret" ]]; then
mask_next=true
fi
masked_args+=("$arg")
if ! check_arg_for_mask "$arg"; then
echo "error checking arg for mask"
return 1
fi
done
log_message "$log_level" "${masked_args[*]}"
}
check_arg_for_mask() {
if [ $# -ne 1 ]; then
echo "'check_arg_for_mask' requires arg"
return 1
fi
if [[ $mask_next == true ]]; then
masked_args+=("********")
mask_next=false
elif [[ "$arg" == --secret_key=* ]]; then
masked_args+=("--secret_key=********")
elif [[ "$arg" == --secret=* ]]; then
masked_args+=("--secret=********")
else
if [[ "$arg" == "--secret_key" ]] || [[ "$arg" == "--secret" ]] || [[ "$arg" == "--s3-iam-secret" ]]; then
mask_next=true
fi
masked_args+=("$arg")
fi
}
log_message() {
if [[ $# -ne 2 ]]; then
echo "log message requires level, message"

View File

@@ -17,6 +17,7 @@
source ./tests/test_common.sh
source ./tests/setup.sh
source ./tests/util_create_bucket.sh
source ./tests/util_head_bucket.sh
source ./tests/util_tags.sh
source ./tests/commands/delete_bucket_policy.sh
source ./tests/commands/get_bucket_policy.sh
@@ -47,7 +48,8 @@ export RUN_MC=true
run setup_bucket "mc" "$BUCKET_ONE_NAME"
assert_success
delete_bucket "mc" "$BUCKET_ONE_NAME" || fail "error deleting bucket"
run delete_bucket "mc" "$BUCKET_ONE_NAME"
assert_success
}
# delete-bucket-policy
@@ -119,31 +121,24 @@ export RUN_MC=true
return
fi
create_bucket_invalid_name "mc" || local create_result=$?
[[ $create_result -eq 0 ]] || fail "Invalid name test failed"
[[ "$bucket_create_error" == *"Bucket name cannot be empty"* ]] || fail "unexpected error: $bucket_create_error"
delete_bucket_or_contents "mc" "$BUCKET_ONE_NAME"
run create_and_check_bucket_invalid_name "mc"
assert_success
}
@test "test_get_bucket_info_mc" {
run setup_bucket "mc" "$BUCKET_ONE_NAME"
assert_success
head_bucket "mc" "$BUCKET_ONE_NAME"
[[ $bucket_info == *"$BUCKET_ONE_NAME"* ]] || fail "failure to retrieve correct bucket info: $bucket_info"
delete_bucket_or_contents "mc" "$BUCKET_ONE_NAME"
run bucket_info_contains_bucket "mc" "$BUCKET_ONE_NAME"
assert_success
}
@test "test_get_bucket_info_doesnt_exist_mc" {
run setup_bucket "mc" "$BUCKET_ONE_NAME"
assert_success
head_bucket "mc" "$BUCKET_ONE_NAME"a || local info_result=$?
[[ $info_result -eq 1 ]] || fail "bucket info for non-existent bucket returned"
[[ $bucket_info == *"does not exist"* ]] || fail "404 not returned for non-existent bucket info"
delete_bucket_or_contents "mc" "$BUCKET_ONE_NAME"
run head_bucket "mc" "$BUCKET_ONE_NAME"a
assert_failure 1
}
@test "test_ls_directory_object" {

View File

@@ -1,14 +1,17 @@
#!/usr/bin/env bats
source ./tests/commands/delete_object_tagging.sh
source ./tests/commands/get_object.sh
source ./tests/commands/list_buckets.sh
source ./tests/commands/put_object.sh
source ./tests/commands/put_object_tagging.sh
source ./tests/logger.sh
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util_rest.sh
source ./tests/util_list_buckets.sh
source ./tests/util_list_objects.sh
source ./tests/util_rest.sh
source ./tests/util_tags.sh
@test "test_rest_list_objects" {
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
@@ -56,3 +59,30 @@ source ./tests/util_list_objects.sh
run get_object "rest" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_failure
}
@test "test_rest_tagging" {
test_file="test_file"
test_key="TestKey"
test_value="TestValue"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run create_test_files "$test_file"
assert_success
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run put_object_tagging "rest" "$BUCKET_ONE_NAME" "$test_file" "$test_key" "$test_value"
assert_success
run check_verify_object_tags "rest" "$BUCKET_ONE_NAME" "$test_file" "$test_key" "$test_value"
assert_success
run delete_object_tagging "rest" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run verify_no_object_tags "rest" "$BUCKET_ONE_NAME" "$test_file"
assert_success
}

View File

@@ -16,6 +16,7 @@
source ./tests/util_create_bucket.sh
source ./tests/util_mc.sh
source ./tests/util_multipart.sh
source ./tests/logger.sh
source ./tests/commands/abort_multipart_upload.sh
source ./tests/commands/complete_multipart_upload.sh
@@ -26,6 +27,7 @@ source ./tests/commands/delete_bucket_policy.sh
source ./tests/commands/delete_object.sh
source ./tests/commands/get_bucket_acl.sh
source ./tests/commands/get_bucket_ownership_controls.sh
source ./tests/commands/get_bucket_policy.sh
source ./tests/commands/get_bucket_tagging.sh
source ./tests/commands/get_object_lock_configuration.sh
source ./tests/commands/get_object_tagging.sh
@@ -375,47 +377,6 @@ bucket_exists() {
return 1
}
# param: bucket name
# return 0 for success, 1 for error
abort_all_multipart_uploads() {
if [ $# -ne 1 ]; then
log 2 "'abort_all_multipart_uploads' requires bucket name"
return 1
fi
if ! list_multipart_uploads "$1"; then
log 2 "error listing multipart uploads"
return 1
fi
# shellcheck disable=SC2154
log 5 "UPLOADS: $uploads"
if ! upload_set=$(echo "$uploads" | grep -v "InsecureRequestWarning" | jq -c '.Uploads[]' 2>&1); then
if [[ $upload_set == *"Cannot iterate over null"* ]]; then
return 0
else
log 2 "error getting upload set: $upload_set"
return 1
fi
fi
log 5 "UPLOAD SET: $upload_set"
for upload in $upload_set; do
log 5 "UPLOAD: $upload"
if ! upload_id=$(echo "$upload" | jq -r ".UploadId" 2>&1); then
log 2 "error getting upload ID: $upload_id"
return 1
fi
log 5 "upload ID: $upload_id"
if ! key=$(echo "$upload" | jq -r ".Key" 2>&1); then
log 2 "error getting key: $key"
return 1
fi
log 5 "Aborting multipart upload for key: $key, UploadId: $upload_id"
if ! abort_multipart_upload "$1" "$key" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
done
}
# param: bucket name
# return 1 for failure, 0 for success
get_object_ownership_rule_and_update_acl() {
@@ -815,202 +776,6 @@ get_and_verify_object_tags() {
return 0
}
# perform all parts of a multipart upload before completion command
# params: bucket, key, file to split and upload, number of file parts to upload
# return: 0 for success, 1 for failure
multipart_upload_before_completion() {
if [ $# -ne 4 ]; then
log 2 "multipart upload pre-completion command missing bucket, key, file, and/or part count"
return 1
fi
if ! split_file "$3" "$4"; then
log 2 "error splitting file"
return 1
fi
if ! create_multipart_upload "$1" "$2"; then
log 2 "error creating multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
# shellcheck disable=SC2154
if ! upload_part "$1" "$2" "$upload_id" "$3" "$i"; then
echo "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
export parts
}
multipart_upload_before_completion_with_params() {
if [ $# -ne 10 ]; then
log 2 "multipart upload command missing bucket, key, file, part count, content type, metadata, hold status, lock mode, retain until date, tagging"
return 1
fi
split_file "$3" "$4" || split_result=$?
if [[ $split_result -ne 0 ]]; then
log 2 "error splitting file"
return 1
fi
create_multipart_upload_params "$1" "$2" "$5" "$6" "$7" "$8" "$9" "${10}" || local create_result=$?
if [[ $create_result -ne 0 ]]; then
log 2 "error creating multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part "$1" "$2" "$upload_id" "$3" "$i" || local upload_result=$?
if [[ $upload_result -ne 0 ]]; then
log 2 "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
export parts
}
multipart_upload_before_completion_custom() {
if [ $# -lt 4 ]; then
log 2 "multipart upload custom command missing bucket, key, file, part count, and/or optional params"
return 1
fi
split_file "$3" "$4" || local split_result=$?
if [[ $split_result -ne 0 ]]; then
log 2 "error splitting file"
return 1
fi
# shellcheck disable=SC2086 disable=SC2048
create_multipart_upload_custom "$1" "$2" ${*:5} || local create_result=$?
if [[ $create_result -ne 0 ]]; then
log 2 "error creating multipart upload"
return 1
fi
log 5 "upload ID: $upload_id"
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part "$1" "$2" "$upload_id" "$3" "$i" || local upload_result=$?
if [[ $upload_result -ne 0 ]]; then
log 2 "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
export parts
}
multipart_upload_custom() {
if [ $# -lt 4 ]; then
log 2 "multipart upload custom command missing bucket, key, file, part count, and/or optional additional params"
return 1
fi
# shellcheck disable=SC2086 disable=SC2048
multipart_upload_before_completion_custom "$1" "$2" "$3" "$4" ${*:5} || local result=$?
if [[ $result -ne 0 ]]; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
log 5 "upload ID: $upload_id, parts: $parts"
complete_multipart_upload "$1" "$2" "$upload_id" "$parts" || local completed=$?
if [[ $completed -ne 0 ]]; then
log 2 "Error completing upload"
return 1
fi
return 0
}
multipart_upload() {
if [ $# -ne 4 ]; then
log 2 "multipart upload command missing bucket, key, file, and/or part count"
return 1
fi
multipart_upload_before_completion "$1" "$2" "$3" "$4" || local result=$?
if [[ $result -ne 0 ]]; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
complete_multipart_upload "$1" "$2" "$upload_id" "$parts" || local completed=$?
if [[ $completed -ne 0 ]]; then
log 2 "Error completing upload"
return 1
fi
return 0
}
# perform a multi-part upload
# params: bucket, key, source file location, number of parts
# return 0 for success, 1 for failure
multipart_upload_with_params() {
if [ $# -ne 10 ]; then
log 2 "multipart upload command requires bucket, key, file, part count, content type, metadata, hold status, lock mode, retain until date, tagging"
return 1
fi
log 5 "1: $1, 2: $2, 3: $3, 4: $4, 5: $5, 6: $6, 7: $7, 8: $8, 9: $9, 10: ${10}"
multipart_upload_before_completion_with_params "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" "${10}" || result=$?
if [[ $result -ne 0 ]]; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
log 5 "Upload parts: $parts"
complete_multipart_upload "$1" "$2" "$upload_id" "$parts" || local completed=$?
if [[ $completed -ne 0 ]]; then
log 2 "Error completing upload"
return 1
fi
return 0
}
# run upload, then abort it
# params: bucket, key, local file location, number of parts to split into before uploading
# return 0 for success, 1 for failure
run_then_abort_multipart_upload() {
if [ $# -ne 4 ]; then
log 2 "run then abort multipart upload command missing bucket, key, file, and/or part count"
return 1
fi
if ! multipart_upload_before_completion "$1" "$2" "$3" "$4"; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
if ! abort_multipart_upload "$1" "$2" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
return 0
}
# copy a file to/from S3
# params: source, destination
# return 0 for success, 1 for failure
@@ -1029,153 +794,6 @@ copy_file() {
return 0
}
# list parts of an unfinished multipart upload
# params: bucket, key, local file location, and parts to split into before upload
# export parts on success, return 1 for error
start_multipart_upload_and_list_parts() {
if [ $# -ne 4 ]; then
log 2 "list multipart upload parts command requires bucket, key, file, and part count"
return 1
fi
if ! multipart_upload_before_completion "$1" "$2" "$3" "$4"; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
if ! list_parts "$1" "$2" "$upload_id"; then
log 2 "Error listing multipart upload parts: $listed_parts"
return 1
fi
export listed_parts
}
# list unfinished multipart uploads
# params: bucket, key one, key two
# export current two uploads on success, return 1 for error
create_and_list_multipart_uploads() {
if [ $# -ne 3 ]; then
log 2 "list multipart uploads command requires bucket and two keys"
return 1
fi
if ! create_multipart_upload "$1" "$2"; then
log 2 "error creating multpart upload"
return 1
fi
if ! create_multipart_upload "$1" "$3"; then
log 2 "error creating multpart upload two"
return 1
fi
if ! list_multipart_uploads "$1"; then
echo "error listing uploads"
return 1
fi
return 0
}
multipart_upload_from_bucket() {
if [ $# -ne 4 ]; then
echo "multipart upload from bucket command missing bucket, copy source, key, and/or part count"
return 1
fi
split_file "$3" "$4" || split_result=$?
if [[ $split_result -ne 0 ]]; then
echo "error splitting file"
return 1
fi
for ((i=0;i<$4;i++)) {
echo "key: $3"
put_object "s3api" "$3-$i" "$1" "$2-$i" || copy_result=$?
if [[ $copy_result -ne 0 ]]; then
echo "error copying object"
return 1
fi
}
create_multipart_upload "$1" "$2-copy" || upload_result=$?
if [[ $upload_result -ne 0 ]]; then
echo "error running first multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part_copy "$1" "$2-copy" "$upload_id" "$2" "$i" || local upload_result=$?
if [[ $upload_result -ne 0 ]]; then
echo "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
if [[ $completed -ne 0 ]]; then
echo "Error completing upload: $error"
return 1
fi
return 0
}
multipart_upload_from_bucket_range() {
if [ $# -ne 5 ]; then
echo "multipart upload from bucket with range command requires bucket, copy source, key, part count, and range"
return 1
fi
split_file "$3" "$4" || local split_result=$?
if [[ $split_result -ne 0 ]]; then
echo "error splitting file"
return 1
fi
for ((i=0;i<$4;i++)) {
echo "key: $3"
log 5 "file info: $(ls -l "$3"-"$i")"
put_object "s3api" "$3-$i" "$1" "$2-$i" || local copy_result=$?
if [[ $copy_result -ne 0 ]]; then
echo "error copying object"
return 1
fi
}
create_multipart_upload "$1" "$2-copy" || local create_multipart_result=$?
if [[ $create_multipart_result -ne 0 ]]; then
echo "error running first multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part_copy_with_range "$1" "$2-copy" "$upload_id" "$2" "$i" "$5" || local upload_part_copy_result=$?
if [[ $upload_part_copy_result -ne 0 ]]; then
# shellcheck disable=SC2154
echo "error uploading part $i: $upload_part_copy_error"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
if [[ $completed -ne 0 ]]; then
echo "Error completing upload: $error"
return 1
fi
return 0
}
list_and_check_directory_obj() {
#assert [ $# -eq 2 ]
if [ $# -ne 2 ]; then

View File

@@ -30,7 +30,7 @@ create_bucket_invalid_name() {
elif [[ $1 == 's3cmd' ]]; then
bucket_create_error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate mb "s3://" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
bucket_create_error=$(mc --insecure mb "$MC_ALIAS" 2>&1) || exit_code=$?
bucket_create_error=$(mc --insecure mb "$MC_ALIAS/." 2>&1) || exit_code=$?
else
log 2 "invalid command type $1"
return 1
@@ -53,7 +53,7 @@ create_and_check_bucket_invalid_name() {
fi
# shellcheck disable=SC2154
if [[ "$bucket_create_error" != *"Invalid bucket name "* ]]; then
if [[ "$bucket_create_error" != *"Invalid bucket name "* ]] && [[ "$bucket_create_error" != *"Bucket name cannot"* ]]; then
log 2 "unexpected error: $bucket_create_error"
return 1
fi

18
tests/util_head_bucket.sh Normal file
View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
bucket_info_contains_bucket() {
if [ $# -ne 2 ]; then
log 2 "'bucket_info_contains_bucket' requires client, bucket"
return 1
fi
if ! head_bucket "mc" "$BUCKET_ONE_NAME"; then
log 2 "error getting bucket info"
return 1
fi
# shellcheck disable=SC2154
if [[ "$bucket_info" != *"$BUCKET_ONE_NAME"* ]]; then
return 1
fi
return 0
}

View File

@@ -137,3 +137,387 @@ compare_parts_to_listed_parts() {
return 1
fi
}
# list parts of an unfinished multipart upload
# params: bucket, key, local file location, and parts to split into before upload
# export parts on success, return 1 for error
start_multipart_upload_and_list_parts() {
if [ $# -ne 4 ]; then
log 2 "list multipart upload parts command requires bucket, key, file, and part count"
return 1
fi
if ! multipart_upload_before_completion "$1" "$2" "$3" "$4"; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
if ! list_parts "$1" "$2" "$upload_id"; then
log 2 "Error listing multipart upload parts: $listed_parts"
return 1
fi
export listed_parts
}
# list unfinished multipart uploads
# params: bucket, key one, key two
# export current two uploads on success, return 1 for error
create_and_list_multipart_uploads() {
if [ $# -ne 3 ]; then
log 2 "list multipart uploads command requires bucket and two keys"
return 1
fi
if ! create_multipart_upload "$1" "$2"; then
log 2 "error creating multpart upload"
return 1
fi
if ! create_multipart_upload "$1" "$3"; then
log 2 "error creating multpart upload two"
return 1
fi
if ! list_multipart_uploads "$1"; then
echo "error listing uploads"
return 1
fi
return 0
}
multipart_upload_from_bucket() {
if [ $# -ne 4 ]; then
echo "multipart upload from bucket command missing bucket, copy source, key, and/or part count"
return 1
fi
split_file "$3" "$4" || split_result=$?
if [[ $split_result -ne 0 ]]; then
echo "error splitting file"
return 1
fi
for ((i=0;i<$4;i++)) {
echo "key: $3"
put_object "s3api" "$3-$i" "$1" "$2-$i" || copy_result=$?
if [[ $copy_result -ne 0 ]]; then
echo "error copying object"
return 1
fi
}
create_multipart_upload "$1" "$2-copy" || upload_result=$?
if [[ $upload_result -ne 0 ]]; then
echo "error running first multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part_copy "$1" "$2-copy" "$upload_id" "$2" "$i" || local upload_result=$?
if [[ $upload_result -ne 0 ]]; then
echo "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
if [[ $completed -ne 0 ]]; then
echo "Error completing upload: $error"
return 1
fi
return 0
}
multipart_upload_from_bucket_range() {
if [ $# -ne 5 ]; then
echo "multipart upload from bucket with range command requires bucket, copy source, key, part count, and range"
return 1
fi
split_file "$3" "$4" || local split_result=$?
if [[ $split_result -ne 0 ]]; then
echo "error splitting file"
return 1
fi
for ((i=0;i<$4;i++)) {
echo "key: $3"
log 5 "file info: $(ls -l "$3"-"$i")"
put_object "s3api" "$3-$i" "$1" "$2-$i" || local copy_result=$?
if [[ $copy_result -ne 0 ]]; then
echo "error copying object"
return 1
fi
}
create_multipart_upload "$1" "$2-copy" || local create_multipart_result=$?
if [[ $create_multipart_result -ne 0 ]]; then
echo "error running first multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part_copy_with_range "$1" "$2-copy" "$upload_id" "$2" "$i" "$5" || local upload_part_copy_result=$?
if [[ $upload_part_copy_result -ne 0 ]]; then
# shellcheck disable=SC2154
echo "error uploading part $i: $upload_part_copy_error"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
if [[ $completed -ne 0 ]]; then
echo "Error completing upload: $error"
return 1
fi
return 0
}
# perform all parts of a multipart upload before completion command
# params: bucket, key, file to split and upload, number of file parts to upload
# return: 0 for success, 1 for failure
multipart_upload_before_completion() {
if [ $# -ne 4 ]; then
log 2 "multipart upload pre-completion command missing bucket, key, file, and/or part count"
return 1
fi
if ! split_file "$3" "$4"; then
log 2 "error splitting file"
return 1
fi
if ! create_multipart_upload "$1" "$2"; then
log 2 "error creating multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
# shellcheck disable=SC2154
if ! upload_part "$1" "$2" "$upload_id" "$3" "$i"; then
echo "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
export parts
}
multipart_upload_before_completion_with_params() {
if [ $# -ne 10 ]; then
log 2 "multipart upload command missing bucket, key, file, part count, content type, metadata, hold status, lock mode, retain until date, tagging"
return 1
fi
split_file "$3" "$4" || split_result=$?
if [[ $split_result -ne 0 ]]; then
log 2 "error splitting file"
return 1
fi
create_multipart_upload_params "$1" "$2" "$5" "$6" "$7" "$8" "$9" "${10}" || local create_result=$?
if [[ $create_result -ne 0 ]]; then
log 2 "error creating multpart upload"
return 1
fi
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part "$1" "$2" "$upload_id" "$3" "$i" || local upload_result=$?
if [[ $upload_result -ne 0 ]]; then
log 2 "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
export parts
}
multipart_upload_before_completion_custom() {
if [ $# -lt 4 ]; then
log 2 "multipart upload custom command missing bucket, key, file, part count, and/or optional params"
return 1
fi
split_file "$3" "$4" || local split_result=$?
if [[ $split_result -ne 0 ]]; then
log 2 "error splitting file"
return 1
fi
# shellcheck disable=SC2086 disable=SC2048
create_multipart_upload_custom "$1" "$2" ${*:5} || local create_result=$?
if [[ $create_result -ne 0 ]]; then
log 2 "error creating multipart upload"
return 1
fi
log 5 "upload ID: $upload_id"
parts="["
for ((i = 1; i <= $4; i++)); do
upload_part "$1" "$2" "$upload_id" "$3" "$i" || local upload_result=$?
if [[ $upload_result -ne 0 ]]; then
log 2 "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
if [[ $i -ne $4 ]]; then
parts+=","
fi
done
parts+="]"
export parts
}
multipart_upload_custom() {
if [ $# -lt 4 ]; then
log 2 "multipart upload custom command missing bucket, key, file, part count, and/or optional additional params"
return 1
fi
# shellcheck disable=SC2086 disable=SC2048
multipart_upload_before_completion_custom "$1" "$2" "$3" "$4" ${*:5} || local result=$?
if [[ $result -ne 0 ]]; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
log 5 "upload ID: $upload_id, parts: $parts"
complete_multipart_upload "$1" "$2" "$upload_id" "$parts" || local completed=$?
if [[ $completed -ne 0 ]]; then
log 2 "Error completing upload"
return 1
fi
return 0
}
multipart_upload() {
if [ $# -ne 4 ]; then
log 2 "multipart upload command missing bucket, key, file, and/or part count"
return 1
fi
multipart_upload_before_completion "$1" "$2" "$3" "$4" || local result=$?
if [[ $result -ne 0 ]]; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
complete_multipart_upload "$1" "$2" "$upload_id" "$parts" || local completed=$?
if [[ $completed -ne 0 ]]; then
log 2 "Error completing upload"
return 1
fi
return 0
}
# perform a multi-part upload
# params: bucket, key, source file location, number of parts
# return 0 for success, 1 for failure
multipart_upload_with_params() {
if [ $# -ne 10 ]; then
log 2 "multipart upload command requires bucket, key, file, part count, content type, metadata, hold status, lock mode, retain until date, tagging"
return 1
fi
log 5 "1: $1, 2: $2, 3: $3, 4: $4, 5: $5, 6: $6, 7: $7, 8: $8, 9: $9, 10: ${10}"
multipart_upload_before_completion_with_params "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" "${10}" || result=$?
if [[ $result -ne 0 ]]; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
log 5 "Upload parts: $parts"
complete_multipart_upload "$1" "$2" "$upload_id" "$parts" || local completed=$?
if [[ $completed -ne 0 ]]; then
log 2 "Error completing upload"
return 1
fi
return 0
}
# run upload, then abort it
# params: bucket, key, local file location, number of parts to split into before uploading
# return 0 for success, 1 for failure
run_then_abort_multipart_upload() {
if [ $# -ne 4 ]; then
log 2 "run then abort multipart upload command missing bucket, key, file, and/or part count"
return 1
fi
if ! multipart_upload_before_completion "$1" "$2" "$3" "$4"; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
if ! abort_multipart_upload "$1" "$2" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
return 0
}
# param: bucket name
# return 0 for success, 1 for error
abort_all_multipart_uploads() {
if [ $# -ne 1 ]; then
log 2 "'abort_all_multipart_uploads' requires bucket name"
return 1
fi
if ! list_multipart_uploads "$1"; then
log 2 "error listing multipart uploads"
return 1
fi
# shellcheck disable=SC2154
log 5 "UPLOADS: $uploads"
if ! upload_set=$(echo "$uploads" | grep -v "InsecureRequestWarning" | jq -c '.Uploads[]' 2>&1); then
if [[ $upload_set == *"Cannot iterate over null"* ]]; then
return 0
else
log 2 "error getting upload set: $upload_set"
return 1
fi
fi
log 5 "UPLOAD SET: $upload_set"
for upload in $upload_set; do
log 5 "UPLOAD: $upload"
if ! upload_id=$(echo "$upload" | jq -r ".UploadId" 2>&1); then
log 2 "error getting upload ID: $upload_id"
return 1
fi
log 5 "upload ID: $upload_id"
if ! key=$(echo "$upload" | jq -r ".Key" 2>&1); then
log 2 "error getting key: $key"
return 1
fi
log 5 "Aborting multipart upload for key: $key, UploadId: $upload_id"
if ! abort_multipart_upload "$1" "$key" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
done
}

View File

@@ -87,7 +87,12 @@ verify_no_object_tags() {
log 2 "'verify_no_object_tags' requires client, bucket, object"
return 1
fi
if ! get_object_tagging "$1" "$2" "$3"; then
result=0
get_object_tagging "$1" "$2" "$3" || result=$?
if [ $result == 1 ]; then
if [ "$1" == 'rest' ]; then
return 0
fi
log 2 "error getting object tagging"
return 1
fi
@@ -117,32 +122,52 @@ check_verify_object_tags() {
return 1
fi
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
if ! tag_set_key=$(echo "$tags" | jq -r '.TagSet[0].Key' 2>&1); then
log 2 "error retrieving tag key: $tag_set_key"
if ! parse_object_tags_s3api; then
log 2 "error parsing object tags"
return 1
fi
if ! tag_set_value=$(echo "$tags" | jq -r '.TagSet[0].Value' 2>&1); then
log 2 "error retrieving tag value: $tag_set_value"
elif [ "$1" == 'rest' ]; then
if ! parse_object_tags_rest; then
log 2 "error parsing object tags"
return 1
fi
if [[ $tag_set_key != "$4" ]]; then
log 2 "key mismatch ($tag_set_key, $4)"
return 1
fi
if [[ $tag_set_value != "$5" ]]; then
log 2 "value mismatch ($tag_set_value, $5)"
return 1
fi
else
elif [[ $1 == 'mc' ]]; then
read -r tag_set_key tag_set_value <<< "$(echo "$tags" | awk 'NR==2 {print $1, $3}')"
if [[ $tag_set_key != "$4" ]]; then
log 2 "Key mismatch ($tag_set_key, $4)"
return 1
fi
if [[ $tag_set_value != "$5" ]]; then
log 2 "Value mismatch ($tag_set_value, $5)"
return 1
fi
else
log 2 "unrecognized client for check_verify_object_tags: $1"
return 1
fi
if [[ $tag_set_key != "$4" ]]; then
log 2 "Key mismatch ($tag_set_key, $4)"
return 1
fi
if [[ $tag_set_value != "$5" ]]; then
log 2 "Value mismatch ($tag_set_value, $5)"
return 1
fi
return 0
}
parse_object_tags_s3api() {
if ! tag_set_key=$(echo "$tags" | jq -r '.TagSet[0].Key' 2>&1); then
log 2 "error retrieving tag key: $tag_set_key"
return 1
fi
if ! tag_set_value=$(echo "$tags" | jq -r '.TagSet[0].Value' 2>&1); then
log 2 "error retrieving tag value: $tag_set_value"
return 1
fi
return 0
}
parse_object_tags_rest() {
if ! tag_set_key=$(xmllint --xpath '//*[local-name()="Key"]/text()' "$TEST_FILE_FOLDER/object_tags.txt" 2>&1); then
log 2 "error getting key: $tag_set_key"
return 1
fi
if ! tag_set_value=$(xmllint --xpath '//*[local-name()="Value"]/text()' "$TEST_FILE_FOLDER/object_tags.txt" 2>&1); then
log 2 "error getting value: $value"
return 1
fi
return 0
}