mirror of
https://github.com/versity/versitygw.git
synced 2026-01-05 03:24:04 +00:00
test: PutObject/ListObjects/GetObject/HeadObject encodings
This commit is contained in:
4
.github/workflows/system.yml
vendored
4
.github/workflows/system.yml
vendored
@@ -21,9 +21,9 @@ jobs:
|
||||
RECREATE_BUCKETS: "true"
|
||||
DELETE_BUCKETS_AFTER_TEST: "true"
|
||||
BACKEND: "posix"
|
||||
- set: "REST, posix, non-static, base|acl, folder IAM"
|
||||
- set: "REST, posix, non-static, base|acl|multipart, folder IAM"
|
||||
IAM_TYPE: folder
|
||||
RUN_SET: "rest-base,rest-acl"
|
||||
RUN_SET: "rest-base,rest-acl,rest-multipart"
|
||||
RECREATE_BUCKETS: "true"
|
||||
DELETE_BUCKETS_AFTER_TEST: "true"
|
||||
BACKEND: "posix"
|
||||
|
||||
@@ -72,11 +72,6 @@ delete_object_bypass_retention() {
|
||||
log 2 "error deleting object: $result"
|
||||
return 1
|
||||
fi
|
||||
if [ "$result" != "204" ]; then
|
||||
delete_object_error=$(cat "$TEST_FILE_FOLDER/result.txt")
|
||||
log 2 "expected '204', was '$result' ($delete_object_error)"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
@@ -26,13 +26,25 @@ check_param_count() {
|
||||
return 0
|
||||
}
|
||||
|
||||
assert_param_count() {
|
||||
if [ $# -ne 4 ]; then
|
||||
log 2 "'assert_param_count' requires function name, params list, expected, actual"
|
||||
check_param_count_v2() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'check_param_count' requires params list, expected, actual"
|
||||
return 1
|
||||
fi
|
||||
if [ "$3" -ne "$4" ]; then
|
||||
log_with_stack_ref 2 "function $1 requires $2" 4
|
||||
if [ "$2" -ne "$3" ]; then
|
||||
log_with_stack_ref 2 "function '${FUNCNAME[1]}' requires $1" 2
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
assert_param_count() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'assert_param_count' requires params list, expected, actual"
|
||||
return 1
|
||||
fi
|
||||
if [ "$2" -ne "$3" ]; then
|
||||
log_with_stack_ref 2 "function '${FUNCNAME[3]}' requires $1" 4
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright 2024 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
check_object_lock_config() {
|
||||
log 6 "check_object_lock_config"
|
||||
if ! check_param_count "check_object_lock_config" "bucket" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
lock_config_exists=true
|
||||
if ! get_object_lock_configuration "rest" "$1"; then
|
||||
# shellcheck disable=SC2154
|
||||
if [[ "$get_object_lock_config_err" == *"does not exist"* ]]; then
|
||||
# shellcheck disable=SC2034
|
||||
lock_config_exists=false
|
||||
else
|
||||
log 2 "error getting object lock config"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
42
tests/drivers/list_objects/list_objects_rest.sh
Normal file
42
tests/drivers/list_objects/list_objects_rest.sh
Normal file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright 2024 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# param: bucket name
|
||||
# return 0 for success, 1 for failure
|
||||
list_and_delete_objects() {
|
||||
log 6 "list_and_delete_objects"
|
||||
if ! check_param_count "list_and_delete_objects" "bucket" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
if ! list_objects 'rest' "$1"; then
|
||||
log 2 "error getting object list"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "objects: ${object_array[*]}"
|
||||
for object in "${object_array[@]}"; do
|
||||
if ! clear_object_in_bucket "$1" "$object"; then
|
||||
log 2 "error deleting object $object"
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
|
||||
if ! delete_old_versions "$1"; then
|
||||
log 2 "error deleting old version"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
@@ -21,7 +21,7 @@ source ./tests/rest_scripts/rest.sh
|
||||
# shellcheck disable=SC2153
|
||||
bucket_name="$BUCKET_NAME"
|
||||
# shellcheck disable=SC2153
|
||||
key="$OBJECT_KEY"
|
||||
key="$(echo -n "$OBJECT_KEY" | jq -sRr 'split("/") | map(@uri) | join("/")')"
|
||||
# shellcheck disable=SC2153
|
||||
checksum_mode="${CHECKSUM_MODE:=false}"
|
||||
# shellcheck disable=SC2153
|
||||
@@ -44,9 +44,10 @@ build_canonical_request "${canonical_request_data[@]}"
|
||||
# shellcheck disable=SC2119
|
||||
create_canonical_hash_sts_and_signature
|
||||
|
||||
curl_command+=(curl -ks -w "\"%{http_code}\"" "$AWS_ENDPOINT_URL/$bucket_name/$key"
|
||||
output_file_esc="$(echo -n "$OUTPUT_FILE" | sed -e 's/[][`"$^]/\\&/g')"
|
||||
curl_command+=(curl -ks -w "\"%{http_code}\"" "\"$AWS_ENDPOINT_URL/$bucket_name/$key\""
|
||||
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=$param_list,Signature=$signature\"")
|
||||
curl_command+=("${header_fields[@]}")
|
||||
curl_command+=(-o "$OUTPUT_FILE")
|
||||
curl_command+=(-o "\"$output_file_esc\"")
|
||||
# shellcheck disable=SC2154
|
||||
eval "${curl_command[*]}" 2>&1
|
||||
@@ -21,7 +21,7 @@ source ./tests/rest_scripts/rest.sh
|
||||
# shellcheck disable=SC2153
|
||||
bucket_name="$BUCKET_NAME"
|
||||
# shellcheck disable=SC2154
|
||||
key="$OBJECT_KEY"
|
||||
key="$(echo -n "$OBJECT_KEY" | jq -sRr 'split("/") | map(@uri) | join("/")')"
|
||||
# shellcheck disable=SC2153
|
||||
version_id="$VERSION_ID"
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ data_file="$DATA_FILE"
|
||||
# shellcheck disable=SC2153
|
||||
bucket_name="$BUCKET_NAME"
|
||||
# shellcheck disable=SC2153
|
||||
key="$OBJECT_KEY"
|
||||
key="$(echo -n "$OBJECT_KEY" | jq -sRr 'split("/") | map(@uri) | join("/")')"
|
||||
# shellcheck disable=SC2153,SC2154
|
||||
checksum_type="$CHECKSUM_TYPE"
|
||||
# shellcheck disable=SC2153
|
||||
@@ -38,8 +38,10 @@ checksum_hash="$CHECKSUM"
|
||||
fake_signature="$SIGNATURE"
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
data_file_esc="$(echo -n "$data_file" | sed -e 's/[][`"$^{}]/\\&/g')"
|
||||
log_rest 5 "sha256sum: $(sha256sum "$data_file")"
|
||||
if [ "$payload" == "" ]; then
|
||||
payload_hash="$(sha256sum "$data_file" | awk '{print $1}')"
|
||||
payload_hash="$(sha256sum "$data_file" | awk '{print $1}' | sed 's/\\//g' )"
|
||||
else
|
||||
payload_hash="$payload"
|
||||
fi
|
||||
@@ -66,9 +68,11 @@ if [ "$fake_signature" != "" ]; then
|
||||
signature="$fake_signature"
|
||||
fi
|
||||
|
||||
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "$AWS_ENDPOINT_URL/$bucket_name/$key")
|
||||
|
||||
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "\"$AWS_ENDPOINT_URL/$bucket_name/$key\"")
|
||||
curl_command+=(-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=$param_list,Signature=$signature\"")
|
||||
curl_command+=("${header_fields[@]}")
|
||||
curl_command+=(-T "$data_file" -o "$OUTPUT_FILE")
|
||||
curl_command+=(-T "\"$data_file_esc\"" -o "$OUTPUT_FILE")
|
||||
# shellcheck disable=SC2154
|
||||
log_rest 5 "curl command: ${curl_command[*]}"
|
||||
eval "${curl_command[*]}" 2>&1
|
||||
|
||||
@@ -55,6 +55,8 @@ load_parameters() {
|
||||
trailer="$TRAILER"
|
||||
# shellcheck disable=SC2153
|
||||
checksum="$CHECKSUM"
|
||||
# shellcheck disable=SC2153
|
||||
invalid_checksum_type="${INVALID_CHECKSUM_TYPE:=false}"
|
||||
fi
|
||||
|
||||
readonly initial_sts_data="AWS4-HMAC-SHA256-PAYLOAD
|
||||
@@ -152,7 +154,7 @@ get_file_size_and_content_length() {
|
||||
calculate_checksum() {
|
||||
checksum_type="${trailer/x-amz-checksum-/}"
|
||||
log_rest 5 "checksum type: $checksum_type"
|
||||
if [ "$CHECKSUM" == "" ]; then
|
||||
if [ "$CHECKSUM" == "" ] && [ "$invalid_checksum_type" != "true" ]; then
|
||||
if ! checksum=$(DATA_FILE="$data_file" CHECKSUM_TYPE="$checksum_type" ./tests/rest_scripts/calculate_checksum.sh 2>&1); then
|
||||
log_rest 2 "error getting checksum: $checksum"
|
||||
return 1
|
||||
|
||||
17
tests/run.sh
17
tests/run.sh
@@ -38,6 +38,7 @@ show_help() {
|
||||
echo " rest-acl Run REST ACL tests"
|
||||
echo " rest-chunked Run REST chunked upload tests"
|
||||
echo " rest-checksum Run REST checksum tests"
|
||||
echo " rest-multipart Run REST multipart tests"
|
||||
echo " rest-versioning Run REST versioning tests"
|
||||
echo " rest-bucket Run REST bucket tests"
|
||||
}
|
||||
@@ -48,7 +49,7 @@ handle_param() {
|
||||
show_help
|
||||
exit 0
|
||||
;;
|
||||
s3|s3-file-count|s3-non-file-count|s3api|s3cmd|s3cmd-user|s3cmd-non-user|s3cmd-file-count|mc|mc-non-file-count|mc-file-count|s3api-user|rest|s3api-policy|s3api-bucket|s3api-object|s3api-multipart|rest-base|rest-acl|rest-chunked|rest-checksum|rest-versioning|rest-bucket)
|
||||
s3|s3-file-count|s3-non-file-count|s3api|s3cmd|s3cmd-user|s3cmd-non-user|s3cmd-file-count|mc|mc-non-file-count|mc-file-count|s3api-user|rest|s3api-policy|s3api-bucket|s3api-object|s3api-multipart|rest-base|rest-acl|rest-chunked|rest-checksum|rest-versioning|rest-bucket|rest-multipart)
|
||||
run_suite "$1"
|
||||
;;
|
||||
*) # Handle unrecognized options or positional arguments
|
||||
@@ -155,6 +156,8 @@ run_suite() {
|
||||
exit_code=1
|
||||
elif ! "$HOME"/bin/bats ./tests/test_rest_checksum.sh; then
|
||||
exit_code=1
|
||||
elif ! "$HOME"/bin/bats ./tests/test_rest_multipart.sh; then
|
||||
exit_code=1
|
||||
elif ! "$HOME"/bin/bats ./tests/test_rest_versioning.sh; then
|
||||
exit_code=1
|
||||
elif ! "$HOME"/bin/bats ./tests/test_rest_bucket.sh; then
|
||||
@@ -169,6 +172,10 @@ run_suite() {
|
||||
echo "Running REST ACL tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest_acl.sh || exit_code=$?
|
||||
;;
|
||||
rest-bucket)
|
||||
echo "Running REST bucket tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest_bucket.sh || exit_code=$?
|
||||
;;
|
||||
rest-chunked)
|
||||
echo "Running REST chunked upload tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest_chunked.sh || exit_code=$?
|
||||
@@ -177,14 +184,14 @@ run_suite() {
|
||||
echo "Running REST checksum tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest_checksum.sh || exit_code=$?
|
||||
;;
|
||||
rest-multipart)
|
||||
echo "Running REST multipart tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest_multipart.sh || exit_code=$?
|
||||
;;
|
||||
rest-versioning)
|
||||
echo "Running REST versioning tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest_versioning.sh || exit_code=$?
|
||||
;;
|
||||
rest-bucket)
|
||||
echo "Running REST bucket tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_rest_bucket.sh || exit_code=$?
|
||||
;;
|
||||
s3api-user)
|
||||
echo "Running s3api user tests ..."
|
||||
"$HOME"/bin/bats ./tests/test_user_aws.sh || exit_code=$?
|
||||
|
||||
@@ -42,7 +42,7 @@ source ./tests/drivers/drivers.sh
|
||||
# param: command type
|
||||
# fail on test failure
|
||||
test_common_multipart_upload() {
|
||||
run assert_param_count "test_common_multipart_upload" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
bucket_file="largefile"
|
||||
@@ -74,7 +74,7 @@ test_common_create_delete_bucket() {
|
||||
return
|
||||
fi
|
||||
|
||||
run assert_param_count "test_common_create_delete_bucket" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
run bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"
|
||||
@@ -91,7 +91,7 @@ test_common_create_delete_bucket() {
|
||||
}
|
||||
|
||||
test_common_copy_object() {
|
||||
run assert_param_count "test_common_copy_object" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
local object_name="test-object"
|
||||
@@ -122,7 +122,7 @@ test_common_copy_object() {
|
||||
# param: client
|
||||
# fail on error
|
||||
test_common_put_object_with_data() {
|
||||
run assert_param_count "test_common_put_object_with_data" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
local object_name="test-object"
|
||||
@@ -135,7 +135,7 @@ test_common_put_object_with_data() {
|
||||
# param: client
|
||||
# fail on error
|
||||
test_common_put_object_no_data() {
|
||||
run assert_param_count "test_common_put_object_no_data" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
local object_name="test-object"
|
||||
@@ -148,7 +148,7 @@ test_common_put_object_no_data() {
|
||||
# params: client, filename
|
||||
# fail on test failure
|
||||
test_common_put_object() {
|
||||
run assert_param_count "test_common_put_object" "client type, file" 2 "$#"
|
||||
run assert_param_count "client type, file" 2 "$#"
|
||||
assert_success
|
||||
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
@@ -179,7 +179,7 @@ test_common_put_object() {
|
||||
}
|
||||
|
||||
test_common_put_get_object() {
|
||||
run assert_param_count "test_common_put_get_object" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
local object_name="test-object"
|
||||
@@ -207,7 +207,7 @@ test_common_put_get_object() {
|
||||
# param: "aws" or "s3cmd"
|
||||
# pass if buckets are properly listed, fail if not
|
||||
test_common_list_buckets() {
|
||||
run assert_param_count "test_common_list_buckets" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
run setup_buckets "$BUCKET_ONE_NAME" "$BUCKET_TWO_NAME"
|
||||
@@ -218,7 +218,7 @@ test_common_list_buckets() {
|
||||
}
|
||||
|
||||
test_common_list_objects() {
|
||||
run assert_param_count "test_common_list_objects" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
object_one="test-file-one"
|
||||
@@ -237,7 +237,7 @@ test_common_list_objects() {
|
||||
}
|
||||
|
||||
test_common_set_get_delete_bucket_tags() {
|
||||
run assert_param_count "test_common_set_get_delete_bucket_tags" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
local key="test_key"
|
||||
@@ -263,7 +263,7 @@ test_common_set_get_delete_bucket_tags() {
|
||||
}
|
||||
|
||||
test_common_set_get_object_tags() {
|
||||
run assert_param_count "test_common_set_get_object_tags" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
local bucket_file="bucket-file"
|
||||
@@ -287,7 +287,7 @@ test_common_set_get_object_tags() {
|
||||
}
|
||||
|
||||
test_common_presigned_url_utf8_chars() {
|
||||
run assert_param_count "test_common_presigned_url_utf8_chars" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
local bucket_file="my-$%^&*;"
|
||||
@@ -312,7 +312,7 @@ test_common_presigned_url_utf8_chars() {
|
||||
}
|
||||
|
||||
test_common_list_objects_file_count() {
|
||||
run assert_param_count "test_common_list_objects_file_count" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
run create_test_file_count 1001
|
||||
@@ -329,7 +329,7 @@ test_common_list_objects_file_count() {
|
||||
}
|
||||
|
||||
test_common_delete_object_tagging() {
|
||||
run assert_param_count "test_common_delete_object_tagging" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
bucket_file="bucket_file"
|
||||
@@ -356,7 +356,7 @@ test_common_delete_object_tagging() {
|
||||
}
|
||||
|
||||
test_common_get_bucket_location() {
|
||||
run assert_param_count "test_common_get_bucket_location" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
@@ -367,7 +367,7 @@ test_common_get_bucket_location() {
|
||||
}
|
||||
|
||||
test_common_get_put_delete_bucket_policy() {
|
||||
run assert_param_count "test_common_get_put_delete_bucket_policy" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
policy_file="policy_file"
|
||||
@@ -404,7 +404,7 @@ test_common_get_put_delete_bucket_policy() {
|
||||
}
|
||||
|
||||
test_common_ls_directory_object() {
|
||||
run assert_param_count "test_common_ls_directory_object" "client type" 1 "$#"
|
||||
run assert_param_count "client type" 1 "$#"
|
||||
assert_success
|
||||
|
||||
test_file="a"
|
||||
|
||||
@@ -146,29 +146,6 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - multipart upload create then abort" {
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run create_abort_multipart_upload_rest "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - multipart upload create, list parts" {
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_check_parts "$BUCKET_ONE_NAME" "$test_file" \
|
||||
"$TEST_FILE_FOLDER/$test_file-0" "$TEST_FILE_FOLDER/$test_file-1" "$TEST_FILE_FOLDER/$test_file-2" "$TEST_FILE_FOLDER/$test_file-3"
|
||||
assert_success
|
||||
|
||||
run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - get object attributes" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1001"
|
||||
@@ -208,36 +185,6 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - get policy w/o policy" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/959"
|
||||
fi
|
||||
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run get_and_check_no_policy_error "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - put policy" {
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
log 5 "username: ${lines[1]}"
|
||||
log 5 "password: ${lines[2]}"
|
||||
|
||||
sleep 5
|
||||
|
||||
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/policy_file.txt" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run put_and_check_policy_rest "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/policy_file.txt" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - list objects v2 - invalid continuation token" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/993"
|
||||
@@ -283,28 +230,6 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - complete upload - invalid part" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1008"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run create_upload_finish_wrong_etag "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - upload part copy (UploadPartCopy)" {
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run create_upload_part_copy_rest "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file"
|
||||
assert_success
|
||||
|
||||
run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - head object" {
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
@@ -320,25 +245,6 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - HeadObject - default crc64nvme checksum" {
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run check_default_checksum "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - POST call on root endpoint" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1036"
|
||||
fi
|
||||
run delete_object_empty_bucket_check_error
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - delete objects - no content-md5 header" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1040"
|
||||
@@ -385,58 +291,6 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - HeadObject does not return 405 with versioning, after file deleted" {
|
||||
if [ "$RECREATE_BUCKETS" == "false" ] || [[ ( -z "$VERSIONING_DIR" ) && ( "$DIRECT" != "true" ) ]]; then
|
||||
skip "test isn't valid for this configuration"
|
||||
fi
|
||||
run bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
# in static bucket config, bucket will still exist
|
||||
if ! bucket_exists "$BUCKET_ONE_NAME"; then
|
||||
run create_bucket_object_lock_enabled "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
fi
|
||||
|
||||
run create_test_files "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run delete_object "s3api" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run verify_object_not_found "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - HeadObject returns 405 when querying DeleteMarker" {
|
||||
if [ "$RECREATE_BUCKETS" == "false" ] || [[ ( -z "$VERSIONING_DIR" ) && ( "$DIRECT" != "true" ) ]]; then
|
||||
skip "test isn't valid for this configuration"
|
||||
fi
|
||||
run bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
# in static bucket config, bucket will still exist
|
||||
if ! bucket_exists "$BUCKET_ONE_NAME"; then
|
||||
run create_bucket_object_lock_enabled "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
fi
|
||||
|
||||
run create_test_files "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run delete_object "s3api" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run get_delete_marker_and_verify_405 "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - invalid 'Expires' parameter" {
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
@@ -541,85 +395,6 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPartCopy w/o upload ID" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1226"
|
||||
fi
|
||||
run upload_part_copy_without_upload_id_or_part_number "$BUCKET_ONE_NAME" "$test_file" "1" "" \
|
||||
400 "InvalidArgument" "This operation does not accept partNumber without uploadId"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPartCopy w/o part number" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1229"
|
||||
fi
|
||||
run upload_part_copy_without_upload_id_or_part_number "$BUCKET_ONE_NAME" "$test_file" "" "dummy" \
|
||||
405 "MethodNotAllowed" "The specified method is not allowed against this resource"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPartCopy - ETag is quoted" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1235"
|
||||
fi
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run upload_part_copy_check_etag_header "$BUCKET_ONE_NAME" "$test_file"-mp "$BUCKET_ONE_NAME/$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPart - ETag is quoted" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1233"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run create_multipart_upload_rest "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
# shellcheck disable=SC2030
|
||||
upload_id=$output
|
||||
|
||||
run upload_part_check_etag_header "$BUCKET_ONE_NAME" "$test_file" "$upload_id"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPart w/o part number" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1236"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPart w/o upload ID" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1237"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - copy object w/invalid copy source" {
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
@@ -656,20 +431,6 @@ test_file="test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - create bucket test" {
|
||||
if [ "$RECREATE_BUCKETS" == "false" ]; then
|
||||
skip "invalid test for static buckets"
|
||||
fi
|
||||
run bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run create_bucket_rest "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run list_check_buckets_rest
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - put object, missing Content-Length" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1321"
|
||||
@@ -680,3 +441,21 @@ test_file="test_file"
|
||||
run put_object_without_content_length "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - put, get object, encoded name" {
|
||||
file_name=" \"<>\\^\`{}|+&?%"
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$file_name"
|
||||
assert_success
|
||||
|
||||
run put_object_rest "$TEST_FILE_FOLDER/$file_name" "$BUCKET_ONE_NAME" "$file_name/$file_name"
|
||||
assert_success
|
||||
|
||||
run list_check_single_object "$BUCKET_ONE_NAME" "$file_name/$file_name"
|
||||
assert_success
|
||||
|
||||
run download_and_compare_file "$TEST_FILE_FOLDER/$file_name" "$BUCKET_ONE_NAME" "$file_name/$file_name" "$TEST_FILE_FOLDER/${file_name}-copy"
|
||||
assert_success
|
||||
|
||||
run delete_object_rest "$BUCKET_ONE_NAME" "$file_name/$file_name"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@@ -29,6 +29,8 @@ source ./tests/util/util_ownership.sh
|
||||
source ./tests/util/util_rest.sh
|
||||
source ./tests/util/util_tags.sh
|
||||
|
||||
export RUN_USERS=true
|
||||
|
||||
@test "REST - HeadBucket" {
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -139,3 +141,55 @@ source ./tests/util/util_tags.sh
|
||||
run put_object_lock_config_without_content_md5 "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - get policy w/o policy" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/959"
|
||||
fi
|
||||
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run get_and_check_no_policy_error "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - put policy" {
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
log 5 "username: ${lines[1]}"
|
||||
log 5 "password: ${lines[2]}"
|
||||
|
||||
sleep 5
|
||||
|
||||
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/policy_file.txt" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run put_and_check_policy_rest "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/policy_file.txt" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - create bucket test" {
|
||||
if [ "$RECREATE_BUCKETS" == "false" ]; then
|
||||
skip "invalid test for static buckets"
|
||||
fi
|
||||
run bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run create_bucket_rest "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run list_check_buckets_rest
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - POST call on root endpoint" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1036"
|
||||
fi
|
||||
run delete_object_empty_bucket_check_error
|
||||
assert_success
|
||||
}
|
||||
|
||||
@@ -139,3 +139,14 @@ test_file="test_file"
|
||||
run head_object_without_and_with_checksum "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - HeadObject - default crc64nvme checksum" {
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run check_default_checksum "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@@ -47,9 +47,6 @@ source ./tests/util/util_setup.sh
|
||||
}
|
||||
|
||||
@test "REST - chunked upload, final signature error" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1147"
|
||||
fi
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
@@ -147,9 +144,6 @@ source ./tests/util/util_setup.sh
|
||||
}
|
||||
|
||||
@test "test - REST chunked upload - invalid trailer" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1161"
|
||||
fi
|
||||
test_file="test-file"
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
148
tests/test_rest_multipart.sh
Executable file
148
tests/test_rest_multipart.sh
Executable file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# Copyright 2024 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
load ./bats-support/load
|
||||
load ./bats-assert/load
|
||||
|
||||
source ./tests/setup.sh
|
||||
source ./tests/util/util_list_parts.sh
|
||||
source ./tests/util/util_setup.sh
|
||||
|
||||
test_file="test_file"
|
||||
|
||||
@test "REST - multipart upload create then abort" {
|
||||
run setup_bucket "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run create_abort_multipart_upload_rest "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - multipart upload create, list parts" {
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_check_parts "$BUCKET_ONE_NAME" "$test_file" \
|
||||
"$TEST_FILE_FOLDER/$test_file-0" "$TEST_FILE_FOLDER/$test_file-1" "$TEST_FILE_FOLDER/$test_file-2" "$TEST_FILE_FOLDER/$test_file-3"
|
||||
assert_success
|
||||
|
||||
run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - complete upload - invalid part" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1008"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run create_upload_finish_wrong_etag "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - upload part copy (UploadPartCopy)" {
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run create_upload_part_copy_rest "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file"
|
||||
assert_success
|
||||
|
||||
run download_and_compare_file "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPartCopy w/o upload ID" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1226"
|
||||
fi
|
||||
run upload_part_copy_without_upload_id_or_part_number "$BUCKET_ONE_NAME" "$test_file" "1" "" \
|
||||
400 "InvalidArgument" "This operation does not accept partNumber without uploadId"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPartCopy w/o part number" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1229"
|
||||
fi
|
||||
run upload_part_copy_without_upload_id_or_part_number "$BUCKET_ONE_NAME" "$test_file" "" "dummy" \
|
||||
405 "MethodNotAllowed" "The specified method is not allowed against this resource"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPartCopy - ETag is quoted" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1235"
|
||||
fi
|
||||
run setup_bucket_and_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run upload_part_copy_check_etag_header "$BUCKET_ONE_NAME" "$test_file"-mp "$BUCKET_ONE_NAME/$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPart - ETag is quoted" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1233"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run create_multipart_upload_rest "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
# shellcheck disable=SC2030
|
||||
upload_id=$output
|
||||
|
||||
run upload_part_check_etag_header "$BUCKET_ONE_NAME" "$test_file" "$upload_id"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPart w/o part number" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1236"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - UploadPart w/o upload ID" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1237"
|
||||
fi
|
||||
run setup_bucket_and_large_file "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_part_without_upload_id "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
@@ -103,3 +103,55 @@ test_file="test_file"
|
||||
run get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_failure
|
||||
}
|
||||
|
||||
@test "REST - HeadObject does not return 405 with versioning, after file deleted" {
|
||||
if [ "$RECREATE_BUCKETS" == "false" ] || [[ ( -z "$VERSIONING_DIR" ) && ( "$DIRECT" != "true" ) ]]; then
|
||||
skip "test isn't valid for this configuration"
|
||||
fi
|
||||
run bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
# in static bucket config, bucket will still exist
|
||||
if ! bucket_exists "$BUCKET_ONE_NAME"; then
|
||||
run create_bucket_object_lock_enabled "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
fi
|
||||
|
||||
run create_test_files "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run delete_object "s3api" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run verify_object_not_found "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - HeadObject returns 405 when querying DeleteMarker" {
|
||||
if [ "$RECREATE_BUCKETS" == "false" ] || [[ ( -z "$VERSIONING_DIR" ) && ( "$DIRECT" != "true" ) ]]; then
|
||||
skip "test isn't valid for this configuration"
|
||||
fi
|
||||
run bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
# in static bucket config, bucket will still exist
|
||||
if ! bucket_exists "$BUCKET_ONE_NAME"; then
|
||||
run create_bucket_object_lock_enabled "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
fi
|
||||
|
||||
run create_test_files "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run delete_object "s3api" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run get_delete_marker_and_verify_405 "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source ./tests/drivers/drivers.sh
|
||||
source ./tests/drivers/get_object_lock_config/get_object_lock_config_rest.sh
|
||||
source ./tests/drivers/list_objects/list_objects_rest.sh
|
||||
source ./tests/util/util_acl.sh
|
||||
source ./tests/util/util_multipart_abort.sh
|
||||
source ./tests/util/util_policy.sh
|
||||
|
||||
@@ -112,7 +112,7 @@ attempt_chunked_upload_with_bad_final_signature() {
|
||||
log 2 "expected code '403', was '$response_code'"
|
||||
return 1
|
||||
fi
|
||||
response_data="$(echo "$result" | grep "<")"
|
||||
response_data="$(echo "$result" | grep "<Error>" | sed 's/---//g')"
|
||||
log 5 "response data: $response_data"
|
||||
log 5 "END"
|
||||
if ! check_xml_element <(echo "$response_data") "SignatureDoesNotMatch" "Error" "Code"; then
|
||||
@@ -158,7 +158,10 @@ put_chunked_upload_trailer_invalid() {
|
||||
AWS_ENDPOINT_URL="$AWS_ENDPOINT_URL" \
|
||||
DATA_FILE="$1" \
|
||||
BUCKET_NAME="$2" \
|
||||
OBJECT_KEY="$3" CHUNK_SIZE=8192 TEST_MODE=false TRAILER="x-amz-checksum-sha10" TEST_FILE_FOLDER="$TEST_FILE_FOLDER" COMMAND_FILE="$TEST_FILE_FOLDER/command.txt" ./tests/rest_scripts/put_object_openssl_chunked_trailer_example.sh 2>&1); then
|
||||
OBJECT_KEY="$3" CHUNK_SIZE=8192 TEST_MODE=false \
|
||||
TRAILER="x-amz-checksum-sha10" \
|
||||
INVALID_CHECKSUM_TYPE="true" CHECKSUM="abc" \
|
||||
TEST_FILE_FOLDER="$TEST_FILE_FOLDER" COMMAND_FILE="$TEST_FILE_FOLDER/command.txt" ./tests/rest_scripts/put_object_openssl_chunked_trailer_example.sh 2>&1); then
|
||||
log 2 "error creating command: $result"
|
||||
return 1
|
||||
fi
|
||||
@@ -172,7 +175,7 @@ put_chunked_upload_trailer_invalid() {
|
||||
log 2 "expected response '400', was '$response_code'"
|
||||
return 1
|
||||
fi
|
||||
error_data="$(echo "$result" | grep "<Error>")"
|
||||
error_data="$(echo "$result" | grep "<Error>" | sed 's/---//g')"
|
||||
echo -n "$error_data" > "$TEST_FILE_FOLDER/error-data.txt"
|
||||
if ! check_xml_error_contains "$TEST_FILE_FOLDER/error-data.txt" "InvalidRequest" "The value specified in the x-amz-trailer header is not supported"; then
|
||||
log 2 "error checking xml error, message"
|
||||
|
||||
@@ -176,15 +176,29 @@ compare_files() {
|
||||
log 2 "file comparison requires two files"
|
||||
return 2
|
||||
fi
|
||||
log 5 "comparing files '$1' and '$2'"
|
||||
os=$(uname)
|
||||
|
||||
if [[ $os == "Darwin" ]]; then
|
||||
file_one_md5=$(md5 -q "$1")
|
||||
file_two_md5=$(md5 -q "$2")
|
||||
if ! file_one_md5=$(md5 -q "$1" 2>&1); then
|
||||
log 2 "error getting md5 for '$1': $file_one_md5"
|
||||
return 2
|
||||
fi
|
||||
if ! file_two_md5=$(md5 -q "$2" 2>&1); then
|
||||
log 2 "error getting md5 for '$2': $file_two_md5"
|
||||
return 2
|
||||
fi
|
||||
else
|
||||
file_one_md5=$(md5sum "$1" | cut -d " " -f 1)
|
||||
file_two_md5=$(md5sum "$2" | cut -d " " -f 1)
|
||||
if ! file_one_md5=$(md5sum "$1" | cut -d " " -f 1 2>&1); then
|
||||
log 2 "error getting md5 for '$1': $file_one_md5"
|
||||
return 2
|
||||
fi
|
||||
if ! file_two_md5=$(md5sum "$2" | cut -d " " -f 1 2>&1); then
|
||||
log 2 "error getting md5 for '$2': $file_two_md5"
|
||||
return 2
|
||||
fi
|
||||
fi
|
||||
if [[ $file_one_md5 == "$file_two_md5" ]]; then
|
||||
if [[ "$file_one_md5" == "$file_two_md5" ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
|
||||
@@ -28,10 +28,32 @@ parse_objects_list_rest() {
|
||||
log 2 "error getting object list: $object_list"
|
||||
return 1
|
||||
fi
|
||||
while read -r object; do
|
||||
log 5 "object list: '$object_list'"
|
||||
while IFS= read -r object; do
|
||||
log 5 "parsed key: '$object'"
|
||||
object_array+=("$(echo -n "$object" | xmlstarlet unesc)")
|
||||
done <<< "$object_list"
|
||||
log 5 "object array: ${object_array[*]}"
|
||||
return 0
|
||||
}
|
||||
|
||||
list_check_single_object() {
|
||||
if ! check_param_count "list_check_single_object" "bucket, key" 2 $#; then
|
||||
return 1
|
||||
fi
|
||||
if ! list_objects "rest" "$1"; then
|
||||
log 2 "error listing objects"
|
||||
return 1
|
||||
fi
|
||||
if [ ${#object_array[@]} -ne "1" ]; then
|
||||
log 2 "expected one object, found ${#object_array[@]}"
|
||||
return 1
|
||||
fi
|
||||
if [ "${object_array[0]}" != "$2" ]; then
|
||||
log 2 "expected '$2', was '${object_array[0]}'"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
list_check_objects_v1() {
|
||||
|
||||
@@ -118,6 +118,7 @@ upload_check_parts() {
|
||||
log 2 "error checking part list before part upload"
|
||||
return 1
|
||||
fi
|
||||
sleep 5
|
||||
parts_payload=""
|
||||
if ! upload_check_part "$1" "$2" "$upload_id" 1 "$3"; then
|
||||
log 2 "error uploading and checking first part"
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
source ./tests/commands/put_object.sh
|
||||
|
||||
multipart_upload_s3api_complete_from_bucket() {
|
||||
if ! check_param_count "multipart_upload_s3api_complete_from_bucket" "bucket, copy source, part count" 3 $#; then
|
||||
return 1
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright 2024 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
source ./tests/util/util_multipart_before_completion.sh
|
||||
|
||||
check_abort_access_denied() {
|
||||
if [ $# -ne 5 ]; then
|
||||
log 2 "'check_abort_access_denied' requires bucket, file, username, password"
|
||||
|
||||
@@ -48,52 +48,6 @@ source ./tests/commands/upload_part_copy.sh
|
||||
source ./tests/commands/upload_part.sh
|
||||
source ./tests/util/util_users.sh
|
||||
|
||||
# param: bucket name
|
||||
# return 0 for success, 1 for failure
|
||||
list_and_delete_objects() {
|
||||
log 6 "list_and_delete_objects"
|
||||
if ! check_param_count "list_and_delete_objects" "bucket" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
if ! list_objects 'rest' "$1"; then
|
||||
log 2 "error getting object list"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "objects: ${object_array[*]}"
|
||||
for object in "${object_array[@]}"; do
|
||||
if ! clear_object_in_bucket "$1" "$object"; then
|
||||
log 2 "error deleting object $object"
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
|
||||
if ! delete_old_versions "$1"; then
|
||||
log 2 "error deleting old version"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
check_object_lock_config() {
|
||||
log 6 "check_object_lock_config"
|
||||
if ! check_param_count "check_object_lock_config" "bucket" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
lock_config_exists=true
|
||||
if ! get_object_lock_configuration "rest" "$1"; then
|
||||
# shellcheck disable=SC2154
|
||||
if [[ "$get_object_lock_config_err" == *"does not exist"* ]]; then
|
||||
# shellcheck disable=SC2034
|
||||
lock_config_exists=false
|
||||
else
|
||||
log 2 "error getting object lock config"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# params: bucket, object name
|
||||
# return 0 for success, 1 for error
|
||||
clear_object_in_bucket() {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source ./tests/drivers/drivers.sh
|
||||
|
||||
setup_bucket_and_file() {
|
||||
if ! check_param_count "setup_bucket_and_file" "bucket, file name" 2 $#; then
|
||||
return 1
|
||||
|
||||
@@ -72,6 +72,7 @@ delete_object_version_with_or_without_retention() {
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
log 5 "successfully deleted version with key '${version_keys[$idx]}', id '${version_ids[$idx]}'"
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user