mirror of
https://github.com/versity/versitygw.git
synced 2026-01-08 12:41:10 +00:00
Merge pull request #403 from versity/test_cmdline_upload_part_copy
Test cmdline upload part copy
This commit is contained in:
21
.github/workflows/system.yml
vendored
21
.github/workflows/system.yml
vendored
@@ -29,14 +29,25 @@ jobs:
|
||||
git clone https://github.com/bats-core/bats-core.git
|
||||
cd bats-core && ./install.sh $HOME
|
||||
|
||||
- name: Build and Run
|
||||
- name: Build
|
||||
run: |
|
||||
make testbin
|
||||
export AWS_ACCESS_KEY_ID=user
|
||||
export AWS_SECRET_ACCESS_KEY=pass
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile versity
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile versity
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile versity
|
||||
export VERSITY_EXE=./versitygw
|
||||
mkdir /tmp/gw
|
||||
VERSITYGW_TEST_ENV=$GITHUB_WORKSPACE/tests/.env.default $HOME/bin/bats ./tests/s3_bucket_tests.sh
|
||||
VERSITYGW_TEST_ENV=$GITHUB_WORKSPACE/tests/.env.default $HOME/bin/bats ./tests/posix_tests.sh
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
export AWS_ACCESS_KEY_ID=user
|
||||
export AWS_SECRET_ACCESS_KEY=pass
|
||||
export WORKSPACE=$GITHUB_WORKSPACE
|
||||
./tests/run.sh
|
||||
|
||||
- name: Run tests with static buckets
|
||||
run: |
|
||||
export AWS_ACCESS_KEY_ID=user
|
||||
export AWS_SECRET_ACCESS_KEY=pass
|
||||
export WORKSPACE=$GITHUB_WORKSPACE
|
||||
./tests/run_static.sh
|
||||
|
||||
9
tests/.env.static
Normal file
9
tests/.env.static
Normal file
@@ -0,0 +1,9 @@
|
||||
AWS_REGION=us-east-1
|
||||
AWS_PROFILE=versity
|
||||
VERSITY_EXE=./versitygw
|
||||
BACKEND=posix
|
||||
LOCAL_FOLDER=/tmp/gw
|
||||
AWS_ENDPOINT_URL=http://127.0.0.1:7070
|
||||
BUCKET_ONE_NAME=versity-gwtest-bucket-one-static
|
||||
BUCKET_TWO_NAME=versity-gwtest-bucket-two-static
|
||||
RECREATE_BUCKETS=false
|
||||
4
tests/run.sh
Executable file
4
tests/run.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
|
||||
VERSITYGW_TEST_ENV=$WORKSPACE/tests/.env.default "$HOME"/bin/bats ./tests/s3_bucket_tests.sh
|
||||
VERSITYGW_TEST_ENV=$WORKSPACE/tests/.env.default "$HOME"/bin/bats ./tests/posix_tests.sh
|
||||
12
tests/run_static.sh
Executable file
12
tests/run_static.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
export VERSITYGW_TEST_ENV=$WORKSPACE/tests/.env.static
|
||||
# shellcheck source=./.env.static
|
||||
source "$VERSITYGW_TEST_ENV"
|
||||
export AWS_PROFILE AWS_REGION BUCKET_ONE_NAME BUCKET_TWO_NAME AWS_ENDPOINT_URL
|
||||
aws configure set aws_access_key_id "$AWS_ACCESS_KEY_ID"
|
||||
aws configure set aws_secret_access_key "$AWS_SECRET_ACCESS_KEY"
|
||||
./tests/setup_static.sh
|
||||
"$HOME"/bin/bats ./tests/s3_bucket_tests.sh
|
||||
"$HOME"/bin/bats ./tests/posix_tests.sh
|
||||
./tests/teardown_static.sh
|
||||
@@ -123,6 +123,28 @@ source ./tests/util.sh
|
||||
delete_bucket_or_contents "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
# test ability to retrieve object ACLs
|
||||
#@test "test_get_object_acl" {
|
||||
|
||||
# object_one="test-file-one"
|
||||
|
||||
# setup_bucket "$BUCKET_ONE_NAME" || local created=$?
|
||||
# [[ $created -eq 0 ]] || fail "Error creating bucket"
|
||||
# create_test_files "$object_one" || local created=$?
|
||||
# [[ $created -eq 0 ]] || fail "Error creating test file"
|
||||
# put_object "$test_file_folder"/$object_one "$BUCKET_ONE_NAME"/"$object_one" || local result=$?
|
||||
# [[ result -eq 0 ]] || fail "Error adding object one"
|
||||
|
||||
# get_object_acl "$BUCKET_ONE_NAME" "$object_one" || local result=$?
|
||||
# [[ $result -eq 0 ]] || fail "Error retrieving acl"
|
||||
|
||||
# id=$(echo "$acl" | jq '.Owner.ID')
|
||||
# [[ $id == '"'"$AWS_ACCESS_KEY_ID"'"' ]] || fail "Acl mismatch"
|
||||
|
||||
# delete_bucket_or_contents "$BUCKET_ONE_NAME"
|
||||
#}
|
||||
|
||||
|
||||
# test ability to delete multiple objects from bucket
|
||||
@test "test_delete_objects" {
|
||||
|
||||
@@ -363,6 +385,7 @@ source ./tests/util.sh
|
||||
fi
|
||||
}
|
||||
|
||||
run_abort_command "$BUCKET_ONE_NAME" "$bucket_file" $upload_id
|
||||
delete_bucket_or_contents "$BUCKET_ONE_NAME"
|
||||
delete_test_files $bucket_file
|
||||
}
|
||||
@@ -383,14 +406,11 @@ source ./tests/util.sh
|
||||
|
||||
local key_one
|
||||
local key_two
|
||||
echo $uploads
|
||||
key_one=$(echo "$uploads" | jq '.Uploads[0].Key')
|
||||
key_two=$(echo "$uploads" | jq '.Uploads[1].Key')
|
||||
key_one=${key_one//\"/}
|
||||
key_two=${key_two//\"/}
|
||||
echo "$test_file_folder/${bucket_file_one}abc"
|
||||
echo "${key_one}abc"
|
||||
echo "Length of test_file_folder/bucket_file_one: ${#test_file_folder}/${#bucket_file_one}"
|
||||
echo "Length of key_one: ${#key_one}"
|
||||
if [[ "$test_file_folder/$bucket_file_one" != *"$key_one" ]]; then
|
||||
fail "Key mismatch ($test_file_folder/$bucket_file_one, $key_one)"
|
||||
fi
|
||||
@@ -401,3 +421,24 @@ source ./tests/util.sh
|
||||
delete_bucket_or_contents "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$bucket_file_one" "$bucket_file_two"
|
||||
}
|
||||
|
||||
@test "test-multipart-upload-from-bucket" {
|
||||
local bucket_file="bucket-file"
|
||||
bucket_file_data="test file\n"
|
||||
|
||||
create_test_files "$bucket_file" || local created=$?
|
||||
printf "%s" "$bucket_file_data" > "$test_file_folder"/$bucket_file
|
||||
[[ $created -eq 0 ]] || fail "Error creating test files"
|
||||
setup_bucket "$BUCKET_ONE_NAME" || local result=$?
|
||||
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
|
||||
|
||||
multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || upload_result=$?
|
||||
[[ $upload_result -eq 0 ]] || fail "Error performing multipart upload"
|
||||
|
||||
copy_file "s3://$BUCKET_ONE_NAME/$bucket_file-copy" "$test_file_folder/$bucket_file-copy"
|
||||
copy_data=$(<"$test_file_folder"/$bucket_file-copy)
|
||||
[[ $bucket_file_data == "$copy_data" ]] || fail "Data doesn't match"
|
||||
|
||||
delete_bucket_or_contents "$BUCKET_ONE_NAME"
|
||||
delete_test_files $bucket_file
|
||||
}
|
||||
|
||||
7
tests/setup_static.sh
Executable file
7
tests/setup_static.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
source ./tests/setup.sh
|
||||
setup
|
||||
aws s3 mb s3://"$BUCKET_ONE_NAME"
|
||||
aws s3 mb s3://"$BUCKET_TWO_NAME"
|
||||
teardown
|
||||
7
tests/teardown_static.sh
Executable file
7
tests/teardown_static.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
source ./tests/setup.sh
|
||||
setup
|
||||
aws s3 rb s3://"$BUCKET_ONE_NAME"
|
||||
aws s3 rb s3://"$BUCKET_TWO_NAME"
|
||||
teardown
|
||||
127
tests/util.sh
127
tests/util.sh
@@ -78,7 +78,7 @@ delete_bucket_contents() {
|
||||
local error
|
||||
error=$(aws s3 rm s3://"$1" --recursive 2>&1) || exit_code="$?"
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
echo "error deleting bucket: $error"
|
||||
echo "error deleting bucket contents: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
@@ -93,6 +93,7 @@ bucket_exists() {
|
||||
return 2
|
||||
fi
|
||||
|
||||
echo "checking bucket $1"
|
||||
local exit_code=0
|
||||
local error
|
||||
error=$(aws s3 ls s3://"$1" 2>&1) || exit_code="$?"
|
||||
@@ -116,7 +117,7 @@ delete_bucket_or_contents() {
|
||||
echo "delete bucket or contents function requires bucket name"
|
||||
return 1
|
||||
fi
|
||||
if [[ $RECREATE_BUCKETS != "true" ]]; then
|
||||
if [[ $RECREATE_BUCKETS == "false" ]]; then
|
||||
delete_bucket_contents "$1" || local delete_result=$?
|
||||
if [[ $delete_result -ne 0 ]]; then
|
||||
echo "error deleting bucket contents"
|
||||
@@ -141,6 +142,7 @@ setup_bucket() {
|
||||
echo "bucket creation function requires bucket name"
|
||||
return 1
|
||||
fi
|
||||
echo "$1"
|
||||
local exists_result
|
||||
bucket_exists "$1" || exists_result=$?
|
||||
if [[ $exists_result -eq 2 ]]; then
|
||||
@@ -153,8 +155,11 @@ setup_bucket() {
|
||||
echo "error deleting bucket or contents"
|
||||
return 1
|
||||
fi
|
||||
if [[ $RECREATE_BUCKETS == "false" ]]; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
if [[ $RECREATE_BUCKETS != "true" ]]; then
|
||||
if [[ $exists_result -eq 1 ]] && [[ $RECREATE_BUCKETS == "false" ]]; then
|
||||
echo "When RECREATE_BUCKETS isn't set to \"true\", buckets should be pre-created by user"
|
||||
return 1
|
||||
fi
|
||||
@@ -354,6 +359,23 @@ get_bucket_acl() {
|
||||
export acl
|
||||
}
|
||||
|
||||
# get object acl
|
||||
# param: object path
|
||||
# export acl for success, return 1 for error
|
||||
get_object_acl() {
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "object ACL command missing object name"
|
||||
return 1
|
||||
fi
|
||||
local exit_code=0
|
||||
acl=$(aws s3api get-object-acl --bucket "$1" --key "$2" 2>&1) || exit_code="$?"
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
echo "Error getting object ACLs: $acl"
|
||||
return 1
|
||||
fi
|
||||
export acl
|
||||
}
|
||||
|
||||
# add tags to bucket
|
||||
# params: bucket, key, value
|
||||
# return: 0 for success, 1 for error
|
||||
@@ -545,23 +567,14 @@ upload_part() {
|
||||
# params: bucket, key, file to split and upload, number of file parts to upload
|
||||
# return: 0 for success, 1 for failure
|
||||
multipart_upload_before_completion() {
|
||||
|
||||
if [ $# -ne 4 ]; then
|
||||
echo "multipart upload pre-completion command missing bucket, key, file, and/or part count"
|
||||
return 1
|
||||
fi
|
||||
|
||||
file_size=$(stat -c %s "$3" 2>/dev/null || stat -f %z "$3" 2>/dev/null)
|
||||
part_size=$((file_size / $4))
|
||||
remainder=$((file_size % $4))
|
||||
if [[ remainder -ne 0 ]]; then
|
||||
part_size=$((part_size+1))
|
||||
fi
|
||||
local error
|
||||
local split_result
|
||||
error=$(split -a 1 -d -b "$part_size" "$3" "$3"-) || split_result=$?
|
||||
split_file "$3" "$4" || split_result=$?
|
||||
if [[ $split_result -ne 0 ]]; then
|
||||
echo "error splitting file: $error"
|
||||
echo "error splitting file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
@@ -592,7 +605,6 @@ multipart_upload_before_completion() {
|
||||
# params: bucket, key, source file location, number of parts
|
||||
# return 0 for success, 1 for failure
|
||||
multipart_upload() {
|
||||
|
||||
if [ $# -ne 4 ]; then
|
||||
echo "multipart upload command missing bucket, key, file, and/or part count"
|
||||
return 1
|
||||
@@ -718,3 +730,88 @@ list_multipart_uploads() {
|
||||
export uploads
|
||||
}
|
||||
|
||||
# perform a multi-part upload within bucket
|
||||
# params: bucket, key, file, number of parts
|
||||
# return 0 for success, 1 for failure
|
||||
multipart_upload_from_bucket() {
|
||||
if [ $# -ne 4 ]; then
|
||||
echo "multipart upload from bucket command missing bucket, copy source, key, and/or part count"
|
||||
return 1
|
||||
fi
|
||||
|
||||
split_file "$3" "$4" || split_result=$?
|
||||
if [[ $split_result -ne 0 ]]; then
|
||||
echo "error splitting file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
for ((i=0;i<$4;i++)) {
|
||||
put_object "$3"-"$i" "$1" || put_result=$?
|
||||
if [[ $put_result -ne 0 ]]; then
|
||||
echo "error putting object"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
create_multipart_upload "$1" "$2-copy" || upload_result=$?
|
||||
if [[ $upload_result -ne 0 ]]; then
|
||||
echo "error running first multpart upload"
|
||||
return 1
|
||||
fi
|
||||
|
||||
parts="["
|
||||
for ((i = 1; i <= $4; i++)); do
|
||||
upload_part_copy "$1" "$2-copy" "$upload_id" "$2" "$i" || local upload_result=$?
|
||||
if [[ $upload_result -ne 0 ]]; then
|
||||
echo "error uploading part $i"
|
||||
return 1
|
||||
fi
|
||||
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
|
||||
if [[ $i -ne $4 ]]; then
|
||||
parts+=","
|
||||
fi
|
||||
done
|
||||
parts+="]"
|
||||
|
||||
error=$(aws s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$?
|
||||
if [[ $completed -ne 0 ]]; then
|
||||
echo "Error completing upload: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
parts+="]"
|
||||
}
|
||||
|
||||
upload_part_copy() {
|
||||
if [ $# -ne 5 ]; then
|
||||
echo "upload multipart part copy function must have bucket, key, upload ID, file name, part number"
|
||||
return 1
|
||||
fi
|
||||
local etag_json
|
||||
etag_json=$(aws s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))") || local uploaded=$?
|
||||
if [[ $uploaded -ne 0 ]]; then
|
||||
echo "Error uploading part $5: $etag_json"
|
||||
return 1
|
||||
fi
|
||||
etag=$(echo "$etag_json" | jq '.CopyPartResult.ETag')
|
||||
export etag
|
||||
}
|
||||
|
||||
split_file() {
|
||||
file_size=$(stat -c %s "$1" 2>/dev/null || stat -f %z "$1" 2>/dev/null)
|
||||
part_size=$((file_size / $2))
|
||||
remainder=$((file_size % $2))
|
||||
if [[ remainder -ne 0 ]]; then
|
||||
part_size=$((part_size+1))
|
||||
fi
|
||||
|
||||
local error
|
||||
local split_result
|
||||
error=$(split -a 1 -d -b "$part_size" "$1" "$1"-) || split_result=$?
|
||||
if [[ $split_result -ne 0 ]]; then
|
||||
echo "error splitting file: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
Reference in New Issue
Block a user