mirror of
https://github.com/versity/versitygw.git
synced 2026-01-06 19:56:27 +00:00
test: REST GetObjectAttributes, cleanup
This commit is contained in:
@@ -74,36 +74,12 @@ put_object_rest() {
|
||||
log 2 "'put_object_rest' requires local file, bucket name, key"
|
||||
return 1
|
||||
fi
|
||||
|
||||
generate_hash_for_payload_file "$1"
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
aws_endpoint_url_address=${AWS_ENDPOINT_URL#*//}
|
||||
header=$(echo "$AWS_ENDPOINT_URL" | awk -F: '{print $1}')
|
||||
# shellcheck disable=SC2154
|
||||
canonical_request="PUT
|
||||
/$2/$3
|
||||
|
||||
host:$aws_endpoint_url_address
|
||||
x-amz-content-sha256:$payload_hash
|
||||
x-amz-date:$current_date_time
|
||||
|
||||
host;x-amz-content-sha256;x-amz-date
|
||||
$payload_hash"
|
||||
|
||||
if ! generate_sts_string "$current_date_time" "$canonical_request"; then
|
||||
log 2 "error generating sts string"
|
||||
if ! result=$(COMMAND_LOG="$COMMAND_LOG" DATA_FILE="$1" BUCKET_NAME="$2" OBJECT_KEY="$3" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/put_object.sh); then
|
||||
log 2 "error sending object file: $result"
|
||||
return 1
|
||||
fi
|
||||
get_signature
|
||||
# shellcheck disable=SC2154
|
||||
reply=$(send_command curl -ks -w "%{http_code}" -X PUT "$header://$aws_endpoint_url_address/$2/$3" \
|
||||
-H "Authorization: AWS4-HMAC-SHA256 Credential=$AWS_ACCESS_KEY_ID/$ymd/$AWS_REGION/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature" \
|
||||
-H "x-amz-content-sha256: $payload_hash" \
|
||||
-H "x-amz-date: $current_date_time" \
|
||||
-T "$1" -o "$TEST_FILE_FOLDER"/put_object_error.txt 2>&1)
|
||||
if [[ "$reply" != "200" ]]; then
|
||||
log 2 "put object command returned error: $(cat "$TEST_FILE_FOLDER"/put_object_error.txt)"
|
||||
if [ "$result" != "200" ]; then
|
||||
log 2 "expected response code of '200', was '$result' (output: $(cat "$TEST_FILE_FOLDER/result.txt")"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
53
tests/rest_scripts/get_object_attributes.sh
Executable file
53
tests/rest_scripts/get_object_attributes.sh
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright 2024 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
source ./tests/rest_scripts/rest.sh
|
||||
|
||||
# Fields
|
||||
|
||||
# shellcheck disable=SC2153
|
||||
bucket_name="$BUCKET_NAME"
|
||||
# shellcheck disable=SC2154
|
||||
key="$OBJECT_KEY"
|
||||
# shellcheck disable=SC2153,SC2154
|
||||
attributes="$ATTRIBUTES"
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
|
||||
#x-amz-object-attributes:ETag
|
||||
canonical_request="GET
|
||||
/$bucket_name/$key
|
||||
attributes=
|
||||
host:$host
|
||||
x-amz-content-sha256:UNSIGNED-PAYLOAD
|
||||
x-amz-date:$current_date_time
|
||||
x-amz-object-attributes:$attributes
|
||||
|
||||
host;x-amz-content-sha256;x-amz-date;x-amz-object-attributes
|
||||
UNSIGNED-PAYLOAD"
|
||||
|
||||
create_canonical_hash_sts_and_signature
|
||||
|
||||
curl_command+=(curl -ks -w "\"%{http_code}\"" "$AWS_ENDPOINT_URL/$bucket_name/$key?attributes="
|
||||
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-object-attributes,Signature=$signature\""
|
||||
-H "\"x-amz-content-sha256: UNSIGNED-PAYLOAD\""
|
||||
-H "\"x-amz-date: $current_date_time\""
|
||||
-H "\"x-amz-object-attributes: $attributes\""
|
||||
-o "$OUTPUT_FILE")
|
||||
# shellcheck disable=SC2154
|
||||
eval "${curl_command[*]}" 2>&1
|
||||
72
tests/rest_scripts/put_object.sh
Executable file
72
tests/rest_scripts/put_object.sh
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright 2024 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
source ./tests/rest_scripts/rest.sh
|
||||
|
||||
# Fields
|
||||
|
||||
# shellcheck disable=SC2153
|
||||
data_file="$DATA_FILE"
|
||||
# shellcheck disable=SC2153
|
||||
bucket_name="$BUCKET_NAME"
|
||||
# shellcheck disable=SC2153
|
||||
key="$OBJECT_KEY"
|
||||
# shellcheck disable=SC2153,SC2154
|
||||
checksum="$CHECKSUM"
|
||||
|
||||
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
payload_hash="$(sha256sum "$data_file" | awk '{print $1}')"
|
||||
checksum_hash="$(echo -n "$payload_hash" | xxd -r -p | base64)"
|
||||
|
||||
if [ "$CHECKSUM" == "true" ]; then
|
||||
canonical_request="PUT
|
||||
/$bucket_name/$key
|
||||
|
||||
host:$host
|
||||
x-amz-checksum-sha256:$checksum_hash
|
||||
x-amz-content-sha256:$payload_hash
|
||||
x-amz-date:$current_date_time
|
||||
|
||||
host;x-amz-checksum-sha256;x-amz-content-sha256;x-amz-date
|
||||
$payload_hash"
|
||||
else
|
||||
canonical_request="PUT
|
||||
/$bucket_name/$key
|
||||
|
||||
host:$host
|
||||
x-amz-content-sha256:$payload_hash
|
||||
x-amz-date:$current_date_time
|
||||
|
||||
host;x-amz-content-sha256;x-amz-date
|
||||
$payload_hash"
|
||||
fi
|
||||
|
||||
create_canonical_hash_sts_and_signature
|
||||
|
||||
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "$AWS_ENDPOINT_URL/$bucket_name/$key")
|
||||
if [ "$CHECKSUM" == "true" ]; then
|
||||
curl_command+=(-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-checksum-sha256,Signature=$signature\"")
|
||||
else
|
||||
curl_command+=(-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\"")
|
||||
fi
|
||||
curl_command+=(-H "\"x-amz-content-sha256: $payload_hash\""
|
||||
-H "\"x-amz-date: $current_date_time\"")
|
||||
if [ "$checksum" == "true" ]; then
|
||||
curl_command+=(-H "\"x-amz-checksum-sha256: $checksum_hash\"")
|
||||
fi
|
||||
curl_command+=(-T "$data_file" -o "$OUTPUT_FILE")
|
||||
# shellcheck disable=SC2154
|
||||
eval "${curl_command[*]}" 2>&1
|
||||
@@ -29,6 +29,7 @@ source ./tests/commands/put_object_tagging.sh
|
||||
source ./tests/logger.sh
|
||||
source ./tests/setup.sh
|
||||
source ./tests/util.sh
|
||||
source ./tests/util_attributes.sh
|
||||
source ./tests/util_legal_hold.sh
|
||||
source ./tests/util_list_buckets.sh
|
||||
source ./tests/util_list_objects.sh
|
||||
@@ -315,3 +316,60 @@ source ./tests/util_versioning.sh
|
||||
run compare_files "$TEST_FILE_FOLDER/$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - get object attributes" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/916"
|
||||
fi
|
||||
test_file="test_file"
|
||||
|
||||
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run create_large_file "$test_file"
|
||||
assert_success
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
file_size=$(stat -c %s "$TEST_FILE_FOLDER/$test_file" 2>/dev/null || stat -f %z "$TEST_FILE_FOLDER/$test_file" 2>/dev/null)
|
||||
|
||||
run split_file "$TEST_FILE_FOLDER/$test_file" 4
|
||||
assert_success
|
||||
|
||||
run upload_and_check_attributes "$test_file" "$file_size"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - attributes - invalid param" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/917"
|
||||
fi
|
||||
test_file="test_file"
|
||||
|
||||
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run create_test_file "$test_file"
|
||||
assert_success
|
||||
|
||||
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
|
||||
assert_success
|
||||
|
||||
run check_attributes_invalid_param "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - attributes - checksum" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/928"
|
||||
fi
|
||||
test_file="test_file"
|
||||
|
||||
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
run create_test_file "$test_file"
|
||||
assert_success
|
||||
|
||||
run add_and_check_checksum "$TEST_FILE_FOLDER/$test_file" "$test_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@@ -272,19 +272,8 @@ export RUN_USERS=true
|
||||
run setup_bucket "aws" "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
|
||||
range_max=$((5*1024*1024-1))
|
||||
multipart_upload_from_bucket_range "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4 "bytes=0-$range_max" || fail "upload failure"
|
||||
|
||||
get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file-copy" "$TEST_FILE_FOLDER/$bucket_file-copy" || fail "error retrieving object after upload"
|
||||
if [[ $(uname) == 'Darwin' ]]; then
|
||||
object_size=$(stat -f%z "$TEST_FILE_FOLDER/$bucket_file-copy")
|
||||
else
|
||||
object_size=$(stat --format=%s "$TEST_FILE_FOLDER/$bucket_file-copy")
|
||||
fi
|
||||
[[ object_size -eq $((range_max*4+4)) ]] || fail "object size mismatch ($object_size, $((range_max*4+4)))"
|
||||
|
||||
bucket_cleanup "aws" "$BUCKET_ONE_NAME"
|
||||
delete_test_files $bucket_file
|
||||
run run_and_verify_multipart_upload_with_valid_range "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "test-presigned-url-utf8-chars" {
|
||||
|
||||
113
tests/util_attributes.sh
Normal file
113
tests/util_attributes.sh
Normal file
@@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
upload_and_check_attributes() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'upload_and_check_attributes' requires test file, file size"
|
||||
return 1
|
||||
fi
|
||||
if ! perform_multipart_upload_rest "$BUCKET_ONE_NAME" "$1" "$TEST_FILE_FOLDER/$1-0" "$TEST_FILE_FOLDER/$1-1" \
|
||||
"$TEST_FILE_FOLDER/$1-2" "$TEST_FILE_FOLDER/$1-3"; then
|
||||
log 2 "error uploading and checking parts"
|
||||
return 1
|
||||
fi
|
||||
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$BUCKET_ONE_NAME" OBJECT_KEY="$1" ATTRIBUTES="ETag,StorageClass,ObjectParts,ObjectSize" OUTPUT_FILE="$TEST_FILE_FOLDER/attributes.txt" ./tests/rest_scripts/get_object_attributes.sh); then
|
||||
log 2 "error listing object attributes: $result"
|
||||
return 1
|
||||
fi
|
||||
if ! check_attributes_after_upload "$2"; then
|
||||
log 2 "error checking attributes after upload"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_attributes_after_upload() {
|
||||
if [ $# -ne 1 ]; then
|
||||
log 2 "'check_attributes_after_upload' requires file size"
|
||||
return 1
|
||||
fi
|
||||
if ! object_size=$(xmllint --xpath '//*[local-name()="ObjectSize"]/text()' "$TEST_FILE_FOLDER/attributes.txt" 2>&1); then
|
||||
log 2 "error getting checksum: $object_size"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
if [ "$object_size" != "$1" ]; then
|
||||
log 2 "expected file size of '$file_size', was '$object_size'"
|
||||
return 1
|
||||
fi
|
||||
if ! error=$(xmllint --xpath '//*[local-name()="StorageClass"]/text()' "$TEST_FILE_FOLDER/attributes.txt" 2>&1); then
|
||||
log 2 "error getting storage class: $error"
|
||||
return 1
|
||||
fi
|
||||
if ! etag=$(xmllint --xpath '//*[local-name()="ETag"]/text()' "$TEST_FILE_FOLDER/attributes.txt" 2>&1); then
|
||||
log 2 "error getting etag: $etag"
|
||||
return 1
|
||||
fi
|
||||
if ! [[ $etag =~ ^[a-fA-F0-9]{32}-4$ ]]; then
|
||||
log 2 "unexpected etag pattern ($etag)"
|
||||
return 1
|
||||
fi
|
||||
if ! parts_count=$(xmllint --xpath '//*[local-name()="PartsCount"]/text()' "$TEST_FILE_FOLDER/attributes.txt" 2>&1); then
|
||||
log 2 "error getting parts_count: $parts_count"
|
||||
return 1
|
||||
fi
|
||||
if [[ $parts_count != 4 ]]; then
|
||||
log 2 "unexpected parts count, expected 4, was $parts_count"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_attributes_invalid_param() {
|
||||
if [ "$1" -ne 1 ]; then
|
||||
log 2 "'check_attributes_invalid_param' requires test file"
|
||||
return 1
|
||||
fi
|
||||
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$BUCKET_ONE_NAME" OBJECT_KEY="$1" ATTRIBUTES="ETags" OUTPUT_FILE="$TEST_FILE_FOLDER/attributes.txt" ./tests/rest_scripts/get_object_attributes.sh); then
|
||||
log 2 "error listing object attributes: $result"
|
||||
return 1
|
||||
fi
|
||||
if [ "$result" != "400" ]; then
|
||||
log 2 "expected response code of '400', was '$result'"
|
||||
return 1
|
||||
fi
|
||||
log 5 "attributes: $(cat "$TEST_FILE_FOLDER/attributes.txt")"
|
||||
if ! code=$(xmllint --xpath '//*[local-name()="Code"]/text()' "$TEST_FILE_FOLDER/attributes.txt" 2>&1); then
|
||||
log 2 "error getting code: $code"
|
||||
return 1
|
||||
fi
|
||||
if [ "$code" != "InvalidArgument" ]; then
|
||||
log 2 "expected 'InvalidArgument', was '$code'"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
add_and_check_checksum() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'add_and_check_checksum' requires data file, key"
|
||||
return 1
|
||||
fi
|
||||
if ! result=$(COMMAND_LOG="$COMMAND_LOG" DATA_FILE="$1" BUCKET_NAME="$BUCKET_ONE_NAME" OBJECT_KEY="$2" CHECKSUM="true" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/put_object.sh); then
|
||||
log 2 "error sending object file: $result"
|
||||
return 1
|
||||
fi
|
||||
if [ "$result" != "200" ]; then
|
||||
log 2 "expected response code of '200', was '$result' (output: $(cat "$TEST_FILE_FOLDER/result.txt")"
|
||||
return 1
|
||||
fi
|
||||
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$BUCKET_ONE_NAME" OBJECT_KEY="$2" ATTRIBUTES="Checksum" OUTPUT_FILE="$TEST_FILE_FOLDER/attributes.txt" ./tests/rest_scripts/get_object_attributes.sh); then
|
||||
log 2 "error listing object attributes: $result (output: $(cat "$TEST_FILE_FOLDER/attributes.txt")"
|
||||
return 1
|
||||
fi
|
||||
if [ "$result" != "200" ]; then
|
||||
log 2 "expected response code of '200', was '$result'"
|
||||
return 1
|
||||
fi
|
||||
log 5 "attributes: $(cat "$TEST_FILE_FOLDER/attributes.txt")"
|
||||
if ! checksum=$(xmllint --xpath '//*[local-name()="ChecksumSHA256"]/text()' "$TEST_FILE_FOLDER/attributes.txt" 2>&1); then
|
||||
log 2 "error getting checksum: $checksum"
|
||||
return 1
|
||||
fi
|
||||
if [ "$checksum" == "" ]; then
|
||||
log 2 "empty checksum"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
@@ -51,6 +51,48 @@ check_part_list_rest() {
|
||||
return 0
|
||||
}
|
||||
|
||||
perform_multipart_upload_rest() {
|
||||
if [ $# -ne 6 ]; then
|
||||
log 2 "'upload_check_parts' requires bucket, key, part list"
|
||||
return 1
|
||||
fi
|
||||
if ! create_upload_and_get_id_rest "$1" "$2"; then
|
||||
log 2 "error creating upload"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 1 "$3"; then
|
||||
log 2 "error uploading part 1"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
parts_payload="<Part><ETag>$etag</ETag><PartNumber>1</PartNumber></Part>"
|
||||
if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 2 "$4"; then
|
||||
log 2 "error uploading part 2"
|
||||
return 1
|
||||
fi
|
||||
parts_payload+="<Part><ETag>$etag</ETag><PartNumber>2</PartNumber></Part>"
|
||||
if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 3 "$5"; then
|
||||
log 2 "error uploading part 3"
|
||||
return 1
|
||||
fi
|
||||
parts_payload+="<Part><ETag>$etag</ETag><PartNumber>3</PartNumber></Part>"
|
||||
if ! upload_part_and_get_etag_rest "$1" "$2" "$upload_id" 4 "$6"; then
|
||||
log 2 "error uploading part 4"
|
||||
return 1
|
||||
fi
|
||||
parts_payload+="<Part><ETag>$etag</ETag><PartNumber>4</PartNumber></Part>"
|
||||
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$upload_id" PARTS="$parts_payload" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/complete_multipart_upload.sh); then
|
||||
log 2 "error completing multipart upload: $result"
|
||||
return 1
|
||||
fi
|
||||
if [ "$result" != "200" ]; then
|
||||
log 2 "complete multipart upload returned code $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
upload_check_parts() {
|
||||
if [ $# -ne 6 ]; then
|
||||
log 2 "'upload_check_parts' requires bucket, key, part list"
|
||||
|
||||
@@ -265,35 +265,27 @@ multipart_upload_from_bucket_range() {
|
||||
echo "multipart upload from bucket with range command requires bucket, copy source, key, part count, and range"
|
||||
return 1
|
||||
fi
|
||||
|
||||
split_file "$3" "$4" || local split_result=$?
|
||||
if [[ $split_result -ne 0 ]]; then
|
||||
echo "error splitting file"
|
||||
if ! split_file "$3" "$4"; then
|
||||
log 2 "error splitting file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
for ((i=0;i<$4;i++)) {
|
||||
echo "key: $3"
|
||||
log 5 "file info: $(ls -l "$3"-"$i")"
|
||||
put_object "s3api" "$3-$i" "$1" "$2-$i" || local copy_result=$?
|
||||
if [[ $copy_result -ne 0 ]]; then
|
||||
echo "error copying object"
|
||||
log 5 "key: $3, file info: $(ls -l "$3"-"$i")"
|
||||
if ! put_object "s3api" "$3-$i" "$1" "$2-$i"; then
|
||||
log 2 "error copying object"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
create_multipart_upload "$1" "$2-copy" || local create_multipart_result=$?
|
||||
if [[ $create_multipart_result -ne 0 ]]; then
|
||||
echo "error running first multpart upload"
|
||||
if ! create_multipart_upload "$1" "$2-copy"; then
|
||||
log 2 "error running first multpart upload"
|
||||
return 1
|
||||
fi
|
||||
|
||||
parts="["
|
||||
for ((i = 1; i <= $4; i++)); do
|
||||
upload_part_copy_with_range "$1" "$2-copy" "$upload_id" "$2" "$i" "$5" || local upload_part_copy_result=$?
|
||||
if [[ $upload_part_copy_result -ne 0 ]]; then
|
||||
if ! upload_part_copy_with_range "$1" "$2-copy" "$upload_id" "$2" "$i" "$5"; then
|
||||
# shellcheck disable=SC2154
|
||||
echo "error uploading part $i: $upload_part_copy_error"
|
||||
log 2 "error uploading part $i: $upload_part_copy_error"
|
||||
return 1
|
||||
fi
|
||||
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
|
||||
@@ -302,10 +294,8 @@ multipart_upload_from_bucket_range() {
|
||||
fi
|
||||
done
|
||||
parts+="]"
|
||||
|
||||
error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
|
||||
if [[ $completed -ne 0 ]]; then
|
||||
echo "Error completing upload: $error"
|
||||
if ! error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}'); then
|
||||
log 2 "Error completing upload: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
@@ -663,3 +653,29 @@ list_and_check_upload() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
run_and_verify_multipart_upload_with_valid_range() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'run_and_verify_multipart_upload_with_valid_range' requires bucket, key, 5MB file"
|
||||
return 1
|
||||
fi
|
||||
range_max=$((5*1024*1024-1))
|
||||
if ! multipart_upload_from_bucket_range "$1" "$2" "$3" 4 "bytes=0-$range_max"; then
|
||||
log 2 "error with multipart upload"
|
||||
return 1
|
||||
fi
|
||||
if ! get_object "s3api" "$1" "$2-copy" "$3-copy"; then
|
||||
log 2 "error getting object"
|
||||
return 1
|
||||
fi
|
||||
if [[ $(uname) == 'Darwin' ]]; then
|
||||
object_size=$(stat -f%z "$3-copy")
|
||||
else
|
||||
object_size=$(stat --format=%s "$3-copy")
|
||||
fi
|
||||
if [[ object_size -ne $((range_max*4+4)) ]]; then
|
||||
log 2 "object size mismatch ($object_size, $((range_max*4+4)))"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -90,9 +90,8 @@ put_user_policy_userplus() {
|
||||
log 2 "unable to create test file folder"
|
||||
return 1
|
||||
fi
|
||||
#"Resource": "arn:aws:s3:::${aws:username}-*"
|
||||
|
||||
cat <<EOF > "$TEST_FILE_FOLDER"/user_policy_file
|
||||
cat <<EOF > "$TEST_FILE_FOLDER"/user_policy_file
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
|
||||
@@ -25,18 +25,7 @@ start_versity_process() {
|
||||
log 1 "error creating test log folder"
|
||||
exit 1
|
||||
fi
|
||||
IFS=' ' read -r -a full_command <<< "${base_command[@]}"
|
||||
log 5 "versity command: ${full_command[*]}"
|
||||
if [ -n "$COMMAND_LOG" ]; then
|
||||
mask_args "${full_command[*]}"
|
||||
# shellcheck disable=SC2154
|
||||
echo "${masked_args[@]}" >> "$COMMAND_LOG"
|
||||
fi
|
||||
if [ -n "$VERSITY_LOG_FILE" ]; then
|
||||
"${full_command[@]}" >> "$VERSITY_LOG_FILE" 2>&1 &
|
||||
else
|
||||
"${full_command[@]}" 2>&1 &
|
||||
fi
|
||||
build_run_and_log_command
|
||||
# shellcheck disable=SC2181
|
||||
if [[ $? -ne 0 ]]; then
|
||||
sleep 1
|
||||
@@ -66,6 +55,21 @@ start_versity_process() {
|
||||
export versitygw_pid_"$1"
|
||||
}
|
||||
|
||||
build_run_and_log_command() {
|
||||
IFS=' ' read -r -a full_command <<< "${base_command[@]}"
|
||||
log 5 "versity command: ${full_command[*]}"
|
||||
if [ -n "$COMMAND_LOG" ]; then
|
||||
mask_args "${full_command[*]}"
|
||||
# shellcheck disable=SC2154
|
||||
echo "${masked_args[@]}" >> "$COMMAND_LOG"
|
||||
fi
|
||||
if [ -n "$VERSITY_LOG_FILE" ]; then
|
||||
"${full_command[@]}" >> "$VERSITY_LOG_FILE" 2>&1 &
|
||||
else
|
||||
"${full_command[@]}" 2>&1 &
|
||||
fi
|
||||
}
|
||||
|
||||
run_versity_app_posix() {
|
||||
if [[ $# -ne 3 ]]; then
|
||||
log 1 "run versity app w/posix command requires access ID, secret key, process number"
|
||||
@@ -147,14 +151,16 @@ run_versity_app() {
|
||||
log 1 "unrecognized backend type $BACKEND"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $IAM_TYPE == "s3" ]]; then
|
||||
if ! bucket_exists "s3api" "$USERS_BUCKET"; then
|
||||
if ! create_bucket "s3api" "$USERS_BUCKET"; then
|
||||
log 1 "error creating IAM bucket"
|
||||
teardown
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
if [[ $IAM_TYPE != "s3" ]]; then
|
||||
return 0
|
||||
fi
|
||||
if bucket_exists "s3api" "$USERS_BUCKET"; then
|
||||
return 0
|
||||
fi
|
||||
if ! create_bucket "s3api" "$USERS_BUCKET"; then
|
||||
log 1 "error creating IAM bucket"
|
||||
teardown
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user