diff --git a/.github/workflows/system.yml b/.github/workflows/system.yml index cffc991..18005dc 100644 --- a/.github/workflows/system.yml +++ b/.github/workflows/system.yml @@ -40,4 +40,6 @@ jobs: aws configure set aws_region $AWS_REGION --profile versity mkdir /tmp/gw export WORKSPACE=$GITHUB_WORKSPACE + openssl genpkey -algorithm RSA -out versitygw.pem -pkeyopt rsa_keygen_bits:2048 + openssl req -new -x509 -key versitygw.pem -out cert.pem -days 365 -subj "/C=US/ST=California/L=San Francisco/O=Versity/OU=Software/CN=versity.com" ./tests/run_all.sh diff --git a/Dockerfile_test_bats b/Dockerfile_test_bats index f6c947d..8a439d7 100644 --- a/Dockerfile_test_bats +++ b/Dockerfile_test_bats @@ -41,9 +41,10 @@ RUN git clone https://github.com/bats-core/bats-core.git && \ ./install.sh /home/tester USER tester -COPY . /home/tester +COPY --chown=tester:tester . /home/tester WORKDIR /home/tester +RUN cp tests/.env.docker tests/.env RUN make RUN . tests/.secrets && \ @@ -54,6 +55,10 @@ RUN . tests/.secrets && \ RUN mkdir /tmp/gw +RUN openssl genpkey -algorithm RSA -out versitygw-docker.pem -pkeyopt rsa_keygen_bits:2048 && \ + openssl req -new -x509 -key versitygw-docker.pem -out cert-docker.pem -days 365 \ + -subj "/C=US/ST=California/L=San Francisco/O=Versity/OU=Software/CN=versity.com" + ENV WORKSPACE=. CMD ["tests/run_all.sh"] \ No newline at end of file diff --git a/tests/.env.default b/tests/.env.default index eab33de..2a76142 100644 --- a/tests/.env.default +++ b/tests/.env.default @@ -1,8 +1,11 @@ AWS_PROFILE=versity -AWS_ENDPOINT_URL=http://127.0.0.1:7070 +AWS_ENDPOINT_URL=https://127.0.0.1:7070 VERSITY_EXE=./versitygw BACKEND=posix LOCAL_FOLDER=/tmp/gw BUCKET_ONE_NAME=versity-gwtest-bucket-one BUCKET_TWO_NAME=versity-gwtest-bucket-two -RECREATE_BUCKETS=true \ No newline at end of file +RECREATE_BUCKETS=true +CERT=$PWD/cert.pem +KEY=$PWD/versitygw.pem +S3CMD_CONFIG=./s3cfg.local \ No newline at end of file diff --git a/tests/README.md b/tests/README.md index aa0897c..9f3c586 100644 --- a/tests/README.md +++ b/tests/README.md @@ -17,7 +17,13 @@ aws configure set aws_region $AWS_REGION --profile $AWS_PROFILE ``` 6. Create an environment file (`.env`) similar to the ones in this folder, setting the `AWS_PROFILE` parameter to the name of the profile you created. -7. In the root repo folder, run with `VERSITYGW_TEST_ENV= tests/run_all.sh`. +7. If using SSL, create a local private key and certificate, such as with the commands below. Afterwards, set the `KEY` and `CERT` fields in the `.env` file to these, respectively. +``` + openssl genpkey -algorithm RSA -out versitygw.pem -pkeyopt rsa_keygen_bits:2048 + openssl req -new -x509 -key versitygw.pem -out cert.pem -days 365 +``` +8. Set `BUCKET_ONE_NAME` and `BUCKET_TWO_NAME` to the desired names of your buckets. If you don't want them to be created each time, set `RECREATE_BUCKETS` to `false`. +9. In the root repo folder, run with `VERSITYGW_TEST_ENV= tests/run_all.sh`. ## Instructions - Running With Docker diff --git a/tests/posix_tests.sh b/tests/posix_tests.sh index 71339ab..61e2887 100755 --- a/tests/posix_tests.sh +++ b/tests/posix_tests.sh @@ -2,6 +2,7 @@ source ./tests/setup.sh source ./tests/util.sh +source ./tests/util_file.sh source ./tests/util_posix.sh # test that changes to local folders and files are reflected on S3 @@ -47,7 +48,7 @@ source ./tests/util_posix.sh if [ -e "$LOCAL_FOLDER"/"$bucket_name"/$object_name ]; then chmod 755 "$LOCAL_FOLDER"/"$bucket_name"/$object_name fi - setup_bucket "$bucket_name" || local created=$? + setup_bucket "aws" "$bucket_name" || local created=$? [[ $created -eq 0 ]] || fail "Error creating bucket" put_object "$test_file_folder"/"$object_name" "$bucket_name"/"$object_name" || local result="$?" [[ result -eq 0 ]] || fail "Error adding object one" @@ -63,7 +64,7 @@ source ./tests/util_posix.sh [[ $accessible_two -eq 0 ]] || fail "Object should be accessible" delete_object "$bucket_name"/$object_name - delete_bucket_or_contents "$bucket_name" + delete_bucket_or_contents "aws" "$bucket_name" delete_test_files $object_name } @@ -74,7 +75,7 @@ source ./tests/util_posix.sh chmod 755 "$LOCAL_FOLDER"/"$BUCKET_ONE_NAME" sleep 1 else - setup_bucket "$BUCKET_ONE_NAME" || local created=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local created=$? [[ $created -eq 0 ]] || fail "Error creating bucket" fi @@ -88,5 +89,5 @@ source ./tests/util_posix.sh bucket_is_accessible "$BUCKET_ONE_NAME" || local accessible_two=$? [[ $accessible_two -eq 0 ]] || fail "Bucket should be accessible" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" } diff --git a/tests/s3_bucket_tests.sh b/tests/s3_bucket_tests.sh index 9214a5a..4a80f92 100755 --- a/tests/s3_bucket_tests.sh +++ b/tests/s3_bucket_tests.sh @@ -2,30 +2,19 @@ source ./tests/setup.sh source ./tests/util.sh +source ./tests/util_file.sh +source ./tests/test_common.sh # test creation and deletion of bucket on versitygw -@test "test_create_delete_bucket" { - - if [[ $RECREATE_BUCKETS != "true" ]]; then - return - fi - - setup_bucket "$BUCKET_ONE_NAME" || local create_result=$? - [[ $create_result -eq 0 ]] || fail "Failed to create bucket" - - bucket_exists "$BUCKET_ONE_NAME" || local exists_three=$? - [[ $exists_three -eq 0 ]] || fail "Failed bucket existence check" - - delete_bucket_or_contents "$BUCKET_ONE_NAME" || local delete_result_two=$? - [[ $delete_result_two -eq 0 ]] || fail "Failed to delete bucket" +@test "test_create_delete_bucket_aws" { + test_common_create_delete_bucket "aws" } # test adding and removing an object on versitygw @test "test_put_object" { - local object_name="test-object" - setup_bucket "$BUCKET_ONE_NAME" || local setup_result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$? [[ $setup_result -eq 0 ]] || fail "error setting up bucket" create_test_files "$object_name" || local create_result=$? @@ -41,38 +30,13 @@ source ./tests/util.sh object_exists "$object" || local exists_result_two=$? [[ $exists_result_two -eq 1 ]] || fail "Object not removed from bucket" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files "$object_name" } # test listing buckets on versitygw @test "test_list_buckets" { - - setup_bucket "$BUCKET_ONE_NAME" || local setup_result_one=$? - [[ $setup_result_one -eq 0 ]] || fail "Bucket one setup error" - setup_bucket "$BUCKET_TWO_NAME" || local setup_result_two=$? - [[ $setup_result_two -eq 0 ]] || fail "Bucket two setup error" - - list_buckets - local bucket_one_found=false - local bucket_two_found=false - for bucket in "${bucket_array[@]}"; do - if [ "$bucket" == "$BUCKET_ONE_NAME" ]; then - bucket_one_found=true - elif [ "$bucket" == "$BUCKET_TWO_NAME" ]; then - bucket_two_found=true - fi - if [ $bucket_one_found == true ] && [ $bucket_two_found == true ]; then - break - fi - done - - delete_bucket_or_contents "$BUCKET_ONE_NAME" - delete_bucket_or_contents "$BUCKET_TWO_NAME" - - if [ $bucket_one_found != true ] || [ $bucket_two_found != true ]; then - fail "'$BUCKET_ONE_NAME' and/or '$BUCKET_TWO_NAME' not listed (all buckets: ${bucket_array[*]})" - fi + test_common_list_buckets "aws" } # test listing a bucket's objects on versitygw @@ -82,7 +46,7 @@ source ./tests/util.sh object_two="test-file-two" create_test_files $object_one $object_two - setup_bucket "$BUCKET_ONE_NAME" || local result_one=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result_one=$? [[ result_one -eq 0 ]] || fail "Error creating bucket" put_object "$test_file_folder"/$object_one "$BUCKET_ONE_NAME"/"$object_one" || local result_two=$? [[ result_two -eq 0 ]] || fail "Error adding object one" @@ -100,7 +64,7 @@ source ./tests/util.sh fi done - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $object_one $object_two if [ $object_one_found != true ] || [ $object_two_found != true ]; then @@ -111,16 +75,16 @@ source ./tests/util.sh # test ability to retrieve bucket ACLs @test "test_get_bucket_acl" { - setup_bucket "$BUCKET_ONE_NAME" || local created=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local created=$? [[ $created -eq 0 ]] || fail "Error creating bucket" get_bucket_acl "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Error retrieving acl" - id=$(echo "$acl" | jq '.Owner.ID') + id=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq '.Owner.ID') [[ $id == '"'"$AWS_ACCESS_KEY_ID"'"' ]] || fail "Acl mismatch" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" } # test ability to retrieve object ACLs @@ -153,7 +117,7 @@ source ./tests/util.sh create_test_files "$object_one" "$object_two" || local created=$? [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "$BUCKET_ONE_NAME" || local result_one=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result_one=$? [[ $result_one -eq 0 ]] || fail "Error creating bucket" put_object "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME"/"$object_one" || local result_two=$? @@ -161,7 +125,7 @@ source ./tests/util.sh put_object "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME"/"$object_two" || local result_three=$? [[ $result_three -eq 0 ]] || fail "Error adding object two" - error=$(aws s3api delete-objects --bucket "$BUCKET_ONE_NAME" --delete '{ + error=$(aws --no-verify-ssl s3api delete-objects --bucket "$BUCKET_ONE_NAME" --delete '{ "Objects": [ {"Key": "test-file-one"}, {"Key": "test-file-two"} @@ -174,7 +138,7 @@ source ./tests/util.sh object_exists "$BUCKET_ONE_NAME"/"$object_two" || local exists_two=$? [[ $exists_two -eq 1 ]] || fail "Object two not deleted" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files "$object_one" "$object_two" } @@ -184,7 +148,7 @@ source ./tests/util.sh local key="test_key" local value="test_value" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" get_bucket_tags "$BUCKET_ONE_NAME" || local get_result=$? @@ -200,7 +164,7 @@ source ./tests/util.sh [[ $tag_set_key == '"'$key'"' ]] || fail "Key mismatch" [[ $tag_set_value == '"'$value'"' ]] || fail "Value mismatch" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" } # test v1 s3api list objects command @@ -213,7 +177,7 @@ source ./tests/util.sh create_test_files "$object_one" "$object_two" || local created=$? [[ $created -eq 0 ]] || fail "Error creating test files" printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" put_object "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME"/"$object_one" || local put_object_one=$? [[ $put_object_one -eq 0 ]] || fail "Failed to add object $object_one" @@ -230,7 +194,7 @@ source ./tests/util.sh size_two=$(echo "$objects" | jq '.Contents[1].Size') [[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files "$object_one" "$object_two" } @@ -244,7 +208,7 @@ source ./tests/util.sh create_test_files "$object_one" "$object_two" || local created=$? [[ $created -eq 0 ]] || fail "Error creating test files" printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" put_object "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME"/"$object_one" || local put_object_one=$? [[ $put_object_one -eq 0 ]] || fail "Failed to add object $object_one" @@ -261,7 +225,7 @@ source ./tests/util.sh size_two=$(echo "$objects" | jq '.Contents[1].Size') [[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files "$object_one" "$object_two" } @@ -274,7 +238,7 @@ source ./tests/util.sh create_test_files "$bucket_file" || local created=$? [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" local object_path="$BUCKET_ONE_NAME"/"$bucket_file" put_object "$test_file_folder"/"$bucket_file" "$object_path" || local put_object=$? @@ -293,7 +257,7 @@ source ./tests/util.sh [[ $tag_set_key == '"'$key'"' ]] || fail "Key mismatch" [[ $tag_set_value == '"'$value'"' ]] || fail "Value mismatch" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $bucket_file } @@ -306,7 +270,7 @@ source ./tests/util.sh create_test_files "$bucket_file" || local created=$? printf "%s" "$bucket_file_data" > "$test_file_folder"/$bucket_file [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || upload_result=$? @@ -316,7 +280,7 @@ source ./tests/util.sh compare_files "$test_file_folder/$bucket_file-copy" "$test_file_folder"/$bucket_file || compare_result=$? [[ $compare_result -eq 0 ]] || fail "Files do not match" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $bucket_file } @@ -329,7 +293,7 @@ source ./tests/util.sh create_test_files "$bucket_file" || local created=$? printf "%s" "$bucket_file_data" > "$test_file_folder"/$bucket_file [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || abort_result=$? @@ -338,7 +302,7 @@ source ./tests/util.sh object_exists "$BUCKET_ONE_NAME/$bucket_file" || exists=$? [[ $exists -eq 1 ]] || fail "Upload file exists after abort" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $bucket_file } @@ -351,7 +315,7 @@ source ./tests/util.sh create_test_files "$bucket_file" || local created=$? [[ $created -eq 0 ]] || fail "Error creating test files" printf "%s" "$bucket_file_data" > "$test_file_folder"/$bucket_file - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" list_parts "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || list_result=$? @@ -386,7 +350,7 @@ source ./tests/util.sh } run_abort_command "$BUCKET_ONE_NAME" "$bucket_file" $upload_id - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $bucket_file } @@ -398,7 +362,7 @@ source ./tests/util.sh create_test_files "$bucket_file_one" "$bucket_file_two" || local created=$? [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" list_multipart_uploads "$BUCKET_ONE_NAME" "$test_file_folder"/"$bucket_file_one" "$test_file_folder"/"$bucket_file_two" @@ -418,7 +382,7 @@ source ./tests/util.sh fail "Key mismatch ($test_file_folder/$bucket_file_two, $key_two)" fi - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files "$bucket_file_one" "$bucket_file_two" } @@ -429,7 +393,7 @@ source ./tests/util.sh create_test_files "$bucket_file" || local created=$? printf "%s" "$bucket_file_data" > "$test_file_folder"/$bucket_file [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "$BUCKET_ONE_NAME" || local result=$? + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || upload_result=$? @@ -439,6 +403,6 @@ source ./tests/util.sh compare_files "$test_file_folder"/$bucket_file-copy "$test_file_folder"/$bucket_file || compare_result=$? [[ $compare_result -eq 0 ]] || fail "Data doesn't match" - delete_bucket_or_contents "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $bucket_file } diff --git a/tests/s3cmd_tests.sh b/tests/s3cmd_tests.sh new file mode 100755 index 0000000..cd6c70e --- /dev/null +++ b/tests/s3cmd_tests.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bats + +source ./tests/setup.sh +source ./tests/test_common.sh +source ./tests/util.sh + +# test s3cmd bucket creation/deletion +@test "test_create_delete_bucket_s3cmd" { + test_common_create_delete_bucket "s3cmd" +} + +# test listing buckets on versitygw +@test "test_list_buckets_s3cmd" { + test_common_list_buckets "s3cmd" +} \ No newline at end of file diff --git a/tests/setup.sh b/tests/setup.sh index d00af89..ee7415c 100644 --- a/tests/setup.sh +++ b/tests/setup.sh @@ -1,7 +1,7 @@ -#!/usr/bin/env bats +#!/usr/bin/env bash +# bats setup function setup() { - if [ "$GITHUB_ACTIONS" != "true" ] && [ -r tests/.secrets ]; then source tests/.secrets else @@ -19,6 +19,22 @@ setup() { source "$VERSITYGW_TEST_ENV" fi + check_params + + base_command="ROOT_ACCESS_KEY=$AWS_ACCESS_KEY_ID ROOT_SECRET_KEY=$AWS_SECRET_ACCESS_KEY $VERSITY_EXE" + if [ -n "$CERT" ] && [ -n "$KEY" ]; then + base_command+=" --cert $CERT --key $KEY" + fi + base_command+=" $BACKEND $LOCAL_FOLDER &" + eval "$base_command" + + versitygw_pid=$! + export versitygw_pid AWS_PROFILE AWS_ENDPOINT_URL LOCAL_FOLDER BUCKET_ONE_NAME BUCKET_TWO_NAME S3CMD_CONFIG +} + +# make sure required environment variables are defined properly +# return 0 for yes, 1 for no +check_params() { if [ -z "$AWS_ACCESS_KEY_ID" ]; then echo "No AWS access key set" return 1 @@ -53,27 +69,16 @@ setup() { echo "RECREATE_BUCKETS must be 'true' or 'false'" return 1 fi - key_len=${#AWS_ACCESS_KEY_ID} - secret_len=${#AWS_SECRET_ACCESS_KEY} - echo "$key_len $secret_len $VERSITY_EXE $BACKEND $LOCAL_FOLDER $AWS_ENDPOINT_URL $AWS_PROFILE $BUCKET_ONE_NAME $BUCKET_TWO_NAME" - - ROOT_ACCESS_KEY="$AWS_ACCESS_KEY_ID" ROOT_SECRET_KEY="$AWS_SECRET_ACCESS_KEY" "$VERSITY_EXE" "$BACKEND" "$LOCAL_FOLDER" & - - export AWS_PROFILE - export AWS_ENDPOINT_URL - export LOCAL_FOLDER - export BUCKET_ONE_NAME - export BUCKET_TWO_NAME - - versitygw_pid=$! - export versitygw_pid } +# fail a test +# param: error message fail() { echo "$1" return 1 } +# bats teardown function teardown() { if [ -n "$versitygw_pid" ]; then if ps -p "$versitygw_pid" > /dev/null; then diff --git a/tests/test_common.sh b/tests/test_common.sh new file mode 100644 index 0000000..b9232c4 --- /dev/null +++ b/tests/test_common.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bats + +# common test for creating, deleting buckets +# param: "aws" or "s3cmd" +# pass if buckets are properly listed, fail if not +test_common_create_delete_bucket() { + if [[ $RECREATE_BUCKETS != "true" ]]; then + return + fi + + if [[ $# -ne 1 ]]; then + fail "create/delete bucket test requires command type" + fi + + setup_bucket "$1" "$BUCKET_ONE_NAME" || local create_result=$? + [[ $create_result -eq 0 ]] || fail "Failed to create bucket" + + bucket_exists "$1" "$BUCKET_ONE_NAME" || local exists_three=$? + [[ $exists_three -eq 0 ]] || fail "Failed bucket existence check" + + delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME" || local delete_result_two=$? + [[ $delete_result_two -eq 0 ]] || fail "Failed to delete bucket" +} + +# common test for listing buckets +# param: "aws" or "s3cmd" +# pass if buckets are properly listed, fail if not +test_common_list_buckets() { + if [[ $# -ne 1 ]]; then + fail "List buckets test requires one argument" + fi + + setup_bucket "$1" "$BUCKET_ONE_NAME" || local setup_result_one=$? + [[ $setup_result_one -eq 0 ]] || fail "Bucket one setup error" + setup_bucket "$1" "$BUCKET_TWO_NAME" || local setup_result_two=$? + [[ $setup_result_two -eq 0 ]] || fail "Bucket two setup error" + + list_buckets "$1" + local bucket_one_found=false + local bucket_two_found=false + if [ -z "$bucket_array" ]; then + fail "bucket_array parameter not exported" + fi + for bucket in "${bucket_array[@]}"; do + if [ "$bucket" == "$BUCKET_ONE_NAME" ] || [ "$bucket" == "s3://$BUCKET_ONE_NAME" ]; then + bucket_one_found=true + elif [ "$bucket" == "$BUCKET_TWO_NAME" ] || [ "$bucket" == "s3://$BUCKET_TWO_NAME" ]; then + bucket_two_found=true + fi + if [ $bucket_one_found == true ] && [ $bucket_two_found == true ]; then + break + fi + done + echo $bucket_one_found $bucket_two_found + if [ $bucket_one_found == false ] || [ $bucket_two_found == false ]; then + fail "Not all buckets found" + fi + + delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME" + delete_bucket_or_contents "$1" "$BUCKET_TWO_NAME" +} \ No newline at end of file diff --git a/tests/util.sh b/tests/util.sh index d9eef41..8dcdcfb 100644 --- a/tests/util.sh +++ b/tests/util.sh @@ -11,7 +11,7 @@ create_bucket() { local exit_code=0 local error - error=$(aws s3 mb s3://"$1" 2>&1) || exit_code=$? + error=$(aws --no-verify-ssl s3 mb s3://"$1" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error creating bucket: $error" return 1 @@ -30,7 +30,7 @@ delete_bucket() { local exit_code=0 local error - error=$(aws s3 rb s3://"$1" 2>&1) || exit_code="$?" + error=$(aws --no-verify-ssl s3 rb s3://"$1" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then if [[ "$error" == *"The specified bucket does not exist"* ]]; then return 0 @@ -46,14 +46,22 @@ delete_bucket() { # param: bucket name # return 0 for success, 1 for failure delete_bucket_recursive() { - if [ $# -ne 1 ]; then - echo "delete bucket missing bucket name" + if [ $# -ne 2 ]; then + echo "delete bucket missing command type, bucket name" return 1 fi local exit_code=0 local error - error=$(aws s3 rb s3://"$1" --force 2>&1) || exit_code="$?" + if [[ $1 == "aws" ]]; then + error=$(aws --no-verify-ssl s3 rb s3://"$2" --force 2>&1) || exit_code="$?" + elif [[ $1 == "s3cmd" ]]; then + error=$(s3cmd --no-check-certificate rb s3://"$2" --recursive 2>&1) || exit_code="$?" + else + echo "invalid command type '$1'" + return 1 + fi + if [ $exit_code -ne 0 ]; then if [[ "$error" == *"The specified bucket does not exist"* ]]; then return 0 @@ -66,17 +74,24 @@ delete_bucket_recursive() { } # delete contents of a bucket -# param: bucket name +# param: command type, bucket name # return 0 for success, 1 for failure delete_bucket_contents() { - if [ $# -ne 1 ]; then + if [ $# -ne 2 ]; then echo "delete bucket missing bucket name" return 1 fi local exit_code=0 local error - error=$(aws s3 rm s3://"$1" --recursive 2>&1) || exit_code="$?" + if [[ $1 == "aws" ]]; then + error=$(aws --no-verify-ssl s3 rm s3://"$2" --recursive 2>&1) || exit_code="$?" + elif [[ $1 == "s3cmd" ]]; then + error=$(s3cmd --no-check-certificate del s3://"$2" --recursive 2>&1) || exit_code="$?" + else + echo "invalid command type $1" + return 1 + fi if [ $exit_code -ne 0 ]; then echo "error deleting bucket contents: $error" return 1 @@ -88,16 +103,22 @@ delete_bucket_contents() { # param: bucket name # return 0 for true, 1 for false, 2 for error bucket_exists() { - if [ $# -ne 1 ]; then - echo "bucket exists check missing bucket name" + if [ $# -ne 2 ]; then + echo "bucket exists check missing command type, bucket name" return 2 fi - echo "checking bucket $1" local exit_code=0 local error - error=$(aws s3 ls s3://"$1" 2>&1) || exit_code="$?" - echo "Exit code: $exit_code, error: $error" + if [[ $1 == 'aws' ]]; then + error=$(aws --no-verify-ssl s3 ls s3://"$2" 2>&1) || exit_code="$?" + elif [[ $1 == 's3cmd' ]]; then + error=$(s3cmd --no-check-certificate -c "$S3CMD_CONFIG" ls s3://"$2" 2>&1) || exit_code="$?" + else + echo "invalid command type: $1" + return 2 + fi + if [ $exit_code -ne 0 ]; then if [[ "$error" == *"The specified bucket does not exist"* ]] || [[ "$error" == *"Access Denied"* ]]; then return 1 @@ -110,22 +131,22 @@ bucket_exists() { } # delete buckets or just the contents depending on RECREATE_BUCKETS parameter -# param: bucket name +# params: command type, bucket name # return: 0 for success, 1 for failure delete_bucket_or_contents() { - if [ $# -ne 1 ]; then - echo "delete bucket or contents function requires bucket name" + if [ $# -ne 2 ]; then + echo "delete bucket or contents function requires command type, bucket name" return 1 fi if [[ $RECREATE_BUCKETS == "false" ]]; then - delete_bucket_contents "$1" || local delete_result=$? + delete_bucket_contents "$1" "$2" || local delete_result=$? if [[ $delete_result -ne 0 ]]; then echo "error deleting bucket contents" return 1 fi return 0 fi - delete_bucket_recursive "$1" || local delete_result=$? + delete_bucket_recursive "$1" "$2" || local delete_result=$? if [[ $delete_result -ne 0 ]]; then echo "Bucket deletion error" return 1 @@ -138,19 +159,18 @@ delete_bucket_or_contents() { # param: bucket name # return 0 for success, 1 for failure setup_bucket() { - if [ $# -ne 1 ]; then - echo "bucket creation function requires bucket name" + if [ $# -ne 2 ]; then + echo "bucket creation function requires command type, bucket name" return 1 fi - echo "$1" local exists_result - bucket_exists "$1" || exists_result=$? + bucket_exists "$1" "$2" || exists_result=$? if [[ $exists_result -eq 2 ]]; then echo "Bucket existence check error" return 1 fi if [[ $exists_result -eq 0 ]]; then - delete_bucket_or_contents "$1" || delete_result=$? + delete_bucket_or_contents "$1" "$2" || delete_result=$? if [[ delete_result -ne 0 ]]; then echo "error deleting bucket or contents" return 1 @@ -164,7 +184,7 @@ setup_bucket() { return 1 fi local create_result - create_bucket "$1" || create_result=$? + create_bucket "$2" || create_result=$? if [[ $create_result -ne 0 ]]; then echo "Error creating bucket" return 1 @@ -183,9 +203,9 @@ object_exists() { fi local exit_code=0 local error - error=$(aws s3 ls s3://"$1" 2>&1) || exit_code="$?" + error=$(aws --no-verify-ssl s3 ls s3://"$1" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then - if [[ "$error" == "" ]]; then + if [[ "$error" == "" ]] || [[ $error == *"InsecureRequestWarning"* ]]; then return 1 else echo "error checking if object exists: $error" @@ -205,7 +225,7 @@ put_object() { fi local exit_code=0 local error - error=$(aws s3 cp "$1" s3://"$2" 2>&1) || exit_code=$? + error=$(aws --no-verify-ssl s3 cp "$1" s3://"$2" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error copying object to bucket: $error" return 1 @@ -246,7 +266,7 @@ delete_object() { fi local exit_code=0 local error - error=$(aws s3 rm s3://"$1" 2>&1) || exit_code=$? + error=$(aws --no-verify-ssl s3 rm s3://"$1" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error deleting object: $error" return 1 @@ -255,12 +275,25 @@ delete_object() { } # list buckets on versitygw -# no params +# params: format (aws, s3cmd) # export bucket_array (bucket names) on success, return 1 for failure list_buckets() { + if [[ $# -ne 1 ]]; then + echo "List buckets command mssing format" + return 1 + fi + local exit_code=0 local output - output=$(aws s3 ls 2>&1) || exit_code=$? + if [[ $1 == "aws" ]]; then + output=$(aws --no-verify-ssl s3 ls s3:// 2>&1) || exit_code=$? + elif [[ $1 == "s3cmd" ]]; then + output=$(s3cmd --no-check-certificate -c "$S3CMD_CONFIG" ls s3://) || exit_code=$? + else + echo "invalid format: $1" + return 1 + fi + if [ $exit_code -ne 0 ]; then echo "error listing buckets: $output" return 1 @@ -285,7 +318,7 @@ list_objects() { fi local exit_code=0 local output - output=$(aws s3 ls s3://"$1" 2>&1) || exit_code=$? + output=$(aws --no-verify-ssl s3 ls s3://"$1" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error listing objects: $output" return 1 @@ -310,7 +343,7 @@ bucket_is_accessible() { fi local exit_code=0 local error - error=$(aws s3api head-bucket --bucket "$1" 2>&1) || exit_code="$?" + error=$(aws --no-verify-ssl s3api head-bucket --bucket "$1" 2>&1) || exit_code="$?" if [ $exit_code -eq 0 ]; then return 0 fi @@ -330,12 +363,12 @@ object_is_accessible() { return 2 fi local exit_code=0 - object_data=$(aws s3api head-object --bucket "$1" --key "$2" 2>&1) || exit_code="$?" + object_data=$(aws --no-verify-ssl s3api head-object --bucket "$1" --key "$2" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then echo "Error obtaining object data: $object_data" return 2 fi - etag=$(echo "$object_data" | jq '.ETag') + etag=$(echo "$object_data" | grep -v "InsecureRequestWarning" | jq '.ETag') if [[ "$etag" == '""' ]]; then return 1 fi @@ -351,7 +384,7 @@ get_bucket_acl() { return 1 fi local exit_code=0 - acl=$(aws s3api get-bucket-acl --bucket "$1" 2>&1) || exit_code="$?" + acl=$(aws --no-verify-ssl s3api get-bucket-acl --bucket "$1" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then echo "Error getting bucket ACLs: $acl" return 1 @@ -368,7 +401,7 @@ get_object_acl() { return 1 fi local exit_code=0 - acl=$(aws s3api get-object-acl --bucket "$1" --key "$2" 2>&1) || exit_code="$?" + acl=$(aws --no-verify-ssl s3api get-object-acl --bucket "$1" --key "$2" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then echo "Error getting object ACLs: $acl" return 1 @@ -386,7 +419,7 @@ put_bucket_tag() { fi local error local result - error=$(aws s3api put-bucket-tagging --bucket "$1" --tagging "TagSet=[{Key=$2,Value=$3}]") || result=$? + error=$(aws --no-verify-ssl s3api put-bucket-tagging --bucket "$1" --tagging "TagSet=[{Key=$2,Value=$3}]") || result=$? if [[ $result -ne 0 ]]; then echo "Error adding bucket tag: $error" return 1 @@ -403,7 +436,7 @@ get_bucket_tags() { return 1 fi local result - tags=$(aws s3api get-bucket-tagging --bucket "$1") || result=$? + tags=$(aws --no-verify-ssl s3api get-bucket-tagging --bucket "$1") || result=$? if [[ $result -ne 0 ]]; then echo "error getting bucket tags: $tags" return 1 @@ -421,7 +454,7 @@ put_object_tag() { fi local error local result - error=$(aws s3api put-object-tagging --bucket "$1" --key "$2" --tagging "TagSet=[{Key=$3,Value=$4}]") || result=$? + error=$(aws --no-verify-ssl s3api put-object-tagging --bucket "$1" --key "$2" --tagging "TagSet=[{Key=$3,Value=$4}]") || result=$? if [[ $result -ne 0 ]]; then echo "Error adding object tag: $error" return 1 @@ -438,7 +471,7 @@ get_object_tags() { return 1 fi local result - tags=$(aws s3api get-object-tagging --bucket "$1" --key "$2") || result=$? + tags=$(aws --no-verify-ssl s3api get-object-tagging --bucket "$1" --key "$2") || result=$? if [[ $result -ne 0 ]]; then echo "error getting object tags: $tags" return 1 @@ -446,52 +479,6 @@ get_object_tags() { export tags } -# create a test file and export folder. do so in temp folder -# params: filename -# export test file folder on success, return 1 for error -create_test_files() { - if [ $# -lt 1 ]; then - echo "create test files command missing filename" - return 1 - fi - test_file_folder=. - if [[ -z "$GITHUB_ACTIONS" ]]; then - test_file_folder=${TMPDIR}versity-gwtest - mkdir -p "$test_file_folder" || local mkdir_result=$? - if [[ $mkdir_result -ne 0 ]]; then - echo "error creating test file folder" - fi - fi - for name in "$@"; do - touch "$test_file_folder"/"$name" || local touch_result=$? - if [[ $touch_result -ne 0 ]]; then - echo "error creating file $name" - fi - done - export test_file_folder -} - -# delete a test file -# params: filename -# return: 0 for success, 1 for error -delete_test_files() { - if [ $# -lt 1 ]; then - echo "delete test files command missing filenames" - return 1 - fi - if [ -z "$test_file_folder" ]; then - echo "no test file folder defined, not deleting" - return 1 - fi - for name in "$@"; do - rm "$test_file_folder"/"$name" || rm_result=$? - if [[ $rm_result -ne 0 ]]; then - echo "error deleting file $name" - fi - done - return 0 -} - # list objects in bucket, v1 # param: bucket # export objects on success, return 1 for failure @@ -500,7 +487,7 @@ list_objects_s3api_v1() { echo "list objects command missing bucket" return 1 fi - objects=$(aws s3api list-objects --bucket "$1") || local result=$? + objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1") || local result=$? if [[ $result -ne 0 ]]; then echo "error listing objects: $objects" return 1 @@ -516,7 +503,7 @@ list_objects_s3api_v2() { echo "list objects command missing bucket and/or path" return 1 fi - objects=$(aws s3api list-objects-v2 --bucket "$1") || local result=$? + objects=$(aws --no-verify-ssl s3api list-objects-v2 --bucket "$1") || local result=$? if [[ $result -ne 0 ]]; then echo "error listing objects: $objects" return 1 @@ -534,7 +521,7 @@ create_multipart_upload() { fi local multipart_data - multipart_data=$(aws s3api create-multipart-upload --bucket "$1" --key "$2") || local created=$? + multipart_data=$(aws --no-verify-ssl s3api create-multipart-upload --bucket "$1" --key "$2") || local created=$? if [[ $created -ne 0 ]]; then echo "Error creating multipart upload: $upload_id" return 1 @@ -554,7 +541,7 @@ upload_part() { return 1 fi local etag_json - etag_json=$(aws s3api upload-part --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --body "$4-$(($5-1))") || local uploaded=$? + etag_json=$(aws --no-verify-ssl s3api upload-part --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --body "$4-$(($5-1))") || local uploaded=$? if [[ $uploaded -ne 0 ]]; then echo "Error uploading part $5: $etag_json" return 1 @@ -616,7 +603,7 @@ multipart_upload() { return 1 fi - error=$(aws s3api complete-multipart-upload --bucket "$1" --key "$2" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$? + error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$? if [[ $completed -ne 0 ]]; then echo "Error completing upload: $error" return 1 @@ -633,7 +620,7 @@ run_abort_command() { return 1 fi - error=$(aws s3api abort-multipart-upload --bucket "$1" --key "$2" --upload-id "$3") || local aborted=$? + error=$(aws --no-verify-ssl s3api abort-multipart-upload --bucket "$1" --key "$2" --upload-id "$3") || local aborted=$? if [[ $aborted -ne 0 ]]; then echo "Error aborting upload: $error" return 1 @@ -670,7 +657,7 @@ copy_file() { fi local result - error=$(aws s3 cp "$1" "$2") || result=$? + error=$(aws --no-verify-ssl s3 cp "$1" "$2") || result=$? if [[ $result -ne 0 ]]; then echo "error copying file: $error" return 1 @@ -693,7 +680,7 @@ list_parts() { return 1 fi - listed_parts=$(aws s3api list-parts --bucket "$1" --key "$2" --upload-id "$upload_id") || local listed=$? + listed_parts=$(aws --no-verify-ssl s3api list-parts --bucket "$1" --key "$2" --upload-id "$upload_id") || local listed=$? if [[ $listed -ne 0 ]]; then echo "Error aborting upload: $parts" return 1 @@ -722,7 +709,7 @@ list_multipart_uploads() { return 1 fi - uploads=$(aws s3api list-multipart-uploads --bucket "$1") || local list_result=$? + uploads=$(aws --no-verify-ssl s3api list-multipart-uploads --bucket "$1") || local list_result=$? if [[ $list_result -ne 0 ]]; then echo "error listing uploads: $uploads" return 1 @@ -773,7 +760,7 @@ multipart_upload_from_bucket() { done parts+="]" - error=$(aws s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$? + error=$(aws --no-verify-ssl s3api complete-multipart-upload --bucket "$1" --key "$2-copy" --upload-id "$upload_id" --multipart-upload '{"Parts": '"$parts"'}') || local completed=$? if [[ $completed -ne 0 ]]; then echo "Error completing upload: $error" return 1 @@ -789,7 +776,7 @@ upload_part_copy() { return 1 fi local etag_json - etag_json=$(aws s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))") || local uploaded=$? + etag_json=$(aws --no-verify-ssl s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))") || local uploaded=$? if [[ $uploaded -ne 0 ]]; then echo "Error uploading part $5: $etag_json" return 1 @@ -797,37 +784,3 @@ upload_part_copy() { etag=$(echo "$etag_json" | jq '.CopyPartResult.ETag') export etag } - -split_file() { - file_size=$(stat -c %s "$1" 2>/dev/null || stat -f %z "$1" 2>/dev/null) - part_size=$((file_size / $2)) - remainder=$((file_size % $2)) - if [[ remainder -ne 0 ]]; then - part_size=$((part_size+1)) - fi - - local error - local split_result - error=$(split -a 1 -d -b "$part_size" "$1" "$1"-) || split_result=$? - if [[ $split_result -ne 0 ]]; then - echo "error splitting file: $error" - return 1 - fi - return 0 -} - -# compare files -# input: two files -# return 0 for same data, 1 for different data, 2 for error -compare_files() { - if [ $# -ne 2 ]; then - echo "file comparison requires two files" - return 2 - fi - file_one_md5=$(md5 -q "$1") - file_two_md5=$(md5 -q "$2") - if [[ $file_one_md5 == "$file_two_md5" ]]; then - return 0 - fi - return 1 -} \ No newline at end of file diff --git a/tests/util_config.sh b/tests/util_config.sh new file mode 100644 index 0000000..751c3c2 --- /dev/null +++ b/tests/util_config.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +setup_two_buckets() { + setup_bucket "$BUCKET_ONE_NAME" || local setup_result_one=$? + if [[ $setup_result_one -eq 0 ]]; then + return 1 + fi + setup_bucket "$BUCKET_TWO_NAME" || local setup_result_two=$? + if [[ $setup_result_two -eq 0 ]]; then + return 1 + fi + return 0 +} \ No newline at end of file diff --git a/tests/util_file.sh b/tests/util_file.sh new file mode 100644 index 0000000..887fed7 --- /dev/null +++ b/tests/util_file.sh @@ -0,0 +1,84 @@ +#!/usr/bin/env bats + +# create a test file and export folder. do so in temp folder +# params: filename +# export test file folder on success, return 1 for error +create_test_files() { + if [ $# -lt 1 ]; then + echo "create test files command missing filename" + return 1 + fi + test_file_folder=. + if [[ -z "$GITHUB_ACTIONS" ]]; then + test_file_folder=${TMPDIR}versity-gwtest + mkdir -p "$test_file_folder" || local mkdir_result=$? + if [[ $mkdir_result -ne 0 ]]; then + echo "error creating test file folder" + fi + fi + for name in "$@"; do + touch "$test_file_folder"/"$name" || local touch_result=$? + if [[ $touch_result -ne 0 ]]; then + echo "error creating file $name" + fi + done + export test_file_folder +} + +# delete a test file +# params: filename +# return: 0 for success, 1 for error +delete_test_files() { + if [ $# -lt 1 ]; then + echo "delete test files command missing filenames" + return 1 + fi + if [ -z "$test_file_folder" ]; then + echo "no test file folder defined, not deleting" + return 1 + fi + for name in "$@"; do + rm "$test_file_folder"/"$name" || rm_result=$? + if [[ $rm_result -ne 0 ]]; then + echo "error deleting file $name" + fi + done + return 0 +} + +# split file into pieces to test multipart upload +# param: file location +# return 0 for success, 1 for error +split_file() { + file_size=$(stat -c %s "$1" 2>/dev/null || stat -f %z "$1" 2>/dev/null) + part_size=$((file_size / $2)) + remainder=$((file_size % $2)) + if [[ remainder -ne 0 ]]; then + part_size=$((part_size+1)) + fi + + local error + local split_result + error=$(split -a 1 -d -b "$part_size" "$1" "$1"-) || split_result=$? + if [[ $split_result -ne 0 ]]; then + echo "error splitting file: $error" + return 1 + fi + return 0 +} + +# compare files +# input: two files +# return 0 for same data, 1 for different data, 2 for error +compare_files() { + if [ $# -ne 2 ]; then + echo "file comparison requires two files" + return 2 + fi + file_one_md5=$(md5 -q "$1") + file_two_md5=$(md5 -q "$2") + if [[ $file_one_md5 == "$file_two_md5" ]]; then + return 0 + fi + return 1 +} diff --git a/tests/util_posix.sh b/tests/util_posix.sh index 614ddab..ab1ac56 100644 --- a/tests/util_posix.sh +++ b/tests/util_posix.sh @@ -56,7 +56,7 @@ bucket_not_exists_remote_and_local() { echo "bucket existence check requires single name parameter" return 2 fi - bucket_exists "$1" || local exist_result=$? + bucket_exists "aws" "$1" || local exist_result=$? if [[ $exist_result -eq 2 ]]; then echo "Error checking if bucket exists" return 2 @@ -80,7 +80,7 @@ bucket_exists_remote_and_local() { echo "bucket existence check requires single name parameter" return 2 fi - bucket_exists "$1" || local exist_result=$? + bucket_exists "aws" "$1" || local exist_result=$? if [[ $exist_result -eq 2 ]]; then echo "Error checking if bucket exists" return 2