diff --git a/.github/workflows/system.yml b/.github/workflows/system.yml index feec058..67bc336 100644 --- a/.github/workflows/system.yml +++ b/.github/workflows/system.yml @@ -12,6 +12,7 @@ jobs: LOCAL_FOLDER: /tmp/gw1 BUCKET_ONE_NAME: versity-gwtest-bucket-one-1 BUCKET_TWO_NAME: versity-gwtest-bucket-two-1 + IAM_TYPE: folder USERS_FOLDER: /tmp/iam1 AWS_ENDPOINT_URL: https://127.0.0.1:7070 RUN_SET: "s3cmd" @@ -20,6 +21,7 @@ jobs: LOCAL_FOLDER: /tmp/gw2 BUCKET_ONE_NAME: versity-gwtest-bucket-one-2 BUCKET_TWO_NAME: versity-gwtest-bucket-two-2 + IAM_TYPE: folder USERS_FOLDER: /tmp/iam2 AWS_ENDPOINT_URL: https://127.0.0.1:7071 RUN_SET: "s3" @@ -28,6 +30,7 @@ jobs: LOCAL_FOLDER: /tmp/gw3 BUCKET_ONE_NAME: versity-gwtest-bucket-one-3 BUCKET_TWO_NAME: versity-gwtest-bucket-two-3 + IAM_TYPE: folder USERS_FOLDER: /tmp/iam3 AWS_ENDPOINT_URL: https://127.0.0.1:7072 RUN_SET: "s3api" @@ -36,21 +39,25 @@ jobs: LOCAL_FOLDER: /tmp/gw4 BUCKET_ONE_NAME: versity-gwtest-bucket-one-4 BUCKET_TWO_NAME: versity-gwtest-bucket-two-4 + IAM_TYPE: folder USERS_FOLDER: /tmp/iam4 AWS_ENDPOINT_URL: https://127.0.0.1:7073 RUN_SET: "mc" PORT: 7073 + - set: 5 + LOCAL_FOLDER: /tmp/gw4 + BUCKET_ONE_NAME: versity-gwtest-bucket-one-4 + BUCKET_TWO_NAME: versity-gwtest-bucket-two-4 + IAM_TYPE: s3 + USERS_BUCKET: versity-gwtest-iam + AWS_ENDPOINT_URL: https://127.0.0.1:7074 + RUN_SET: "aws-user" + PORT: 7074 steps: - name: Check out code into the Go module directory uses: actions/checkout@v4 - - name: Install ShellCheck and md5 - run: sudo apt-get install shellcheck - - - name: Run ShellCheck - run: shellcheck -S warning ./tests/*.sh - - name: Set up Go uses: actions/setup-go@v5 with: @@ -81,6 +88,8 @@ jobs: BUCKET_ONE_NAME: ${{ matrix.BUCKET_ONE_NAME }} BUCKET_TWO_NAME: ${{ matrix.BUCKET_TWO_NAME }} USERS_FOLDER: ${{ matrix.USERS_FOLDER }} + USERS_BUCKET: ${{ matrix.USERS_BUCKET }} + IAM_TYPE: ${{ matrix.IAM_TYPE }} AWS_ENDPOINT_URL: ${{ matrix.AWS_ENDPOINT_URL }} RUN_SET: ${{ matrix.RUN_SET }} PORT: ${{ matrix.PORT }} diff --git a/tests/.env.default b/tests/.env.default index cc711c5..c856e31 100644 --- a/tests/.env.default +++ b/tests/.env.default @@ -14,4 +14,7 @@ SECRETS_FILE=./tests/.secrets MC_ALIAS=versity LOG_LEVEL=2 GOCOVERDIR=$PWD/cover -USERS_FOLDER=$PWD/iam \ No newline at end of file +USERS_FOLDER=$PWD/iam +#TEST_LOG_FILE=test.log +#VERSITY_LOG_FILE=versity.log +IAM_TYPE=folder \ No newline at end of file diff --git a/tests/commands/create_bucket.sh b/tests/commands/create_bucket.sh index 54a0b63..2ba87bf 100644 --- a/tests/commands/create_bucket.sh +++ b/tests/commands/create_bucket.sh @@ -5,7 +5,7 @@ # return 0 for success, 1 for failure create_bucket() { if [ $# -ne 2 ]; then - echo "create bucket missing command type, bucket name" + log 2 "create bucket missing command type, bucket name" return 1 fi @@ -22,11 +22,11 @@ create_bucket() { elif [[ $1 == "mc" ]]; then error=$(mc --insecure mb "$MC_ALIAS"/"$2" 2>&1) || exit_code=$? else - echo "invalid command type $1" + log 2 "invalid command type $1" return 1 fi if [ $exit_code -ne 0 ]; then - echo "error creating bucket: $error" + log 2 "error creating bucket: $error" return 1 fi return 0 diff --git a/tests/commands/get_bucket_policy.sh b/tests/commands/get_bucket_policy.sh index 97fc495..ebea32d 100644 --- a/tests/commands/get_bucket_policy.sh +++ b/tests/commands/get_bucket_policy.sh @@ -30,9 +30,8 @@ get_bucket_policy_aws() { return 1 fi policy_json=$(aws --no-verify-ssl s3api get-bucket-policy --bucket "$1" 2>&1) || get_result=$? - if [[ $policy_json == *"InsecureRequestWarning"* ]]; then - policy_json=$(awk 'NR>2' <<< "$policy_json") - fi + policy_json=$(echo "$policy_json" | grep -v "InsecureRequestWarning") + log 5 "$policy_json" if [[ $get_result -ne 0 ]]; then if [[ "$policy_json" == *"(NoSuchBucketPolicy)"* ]]; then bucket_policy= @@ -41,7 +40,7 @@ get_bucket_policy_aws() { return 1 fi else - bucket_policy=$(echo "{$policy_json}" | jq -r '.Policy') + bucket_policy=$(echo "$policy_json" | jq -r '.Policy') fi export bucket_policy return 0 diff --git a/tests/commands/upload_part_copy.sh b/tests/commands/upload_part_copy.sh index 7fffff6..4c08f58 100644 --- a/tests/commands/upload_part_copy.sh +++ b/tests/commands/upload_part_copy.sh @@ -22,7 +22,7 @@ upload_part_copy_with_range() { return 1 fi local etag_json - log 5 "bucket: $1, key: $2, upload ID: $3, file name: $4, range: $5" + log 5 "bucket: $1, key: $2, upload ID: $3, file name: $4, range: $5, copy source range: $6" etag_json=$(aws --no-verify-ssl s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))" --copy-source-range "$6" 2>&1) || local uploaded=$? if [[ $uploaded -ne 0 ]]; then log 2 "Error uploading part $5: $etag_json" diff --git a/tests/env.sh b/tests/env.sh new file mode 100644 index 0000000..53d8ddb --- /dev/null +++ b/tests/env.sh @@ -0,0 +1,155 @@ +#!/usr/bin/env bash + +check_env_vars() { + if ! check_universal_vars; then + log 2 "error checking universal params" + return 1 + fi + if [[ $RUN_VERSITYGW == "true" ]]; then + if ! check_versity_vars; then + log 2 "error checking versity params" + return 1 + fi + fi + if [[ $RUN_S3CMD == "true" ]]; then + if [[ -z "$S3CMD_CONFIG" ]]; then + log 2 "running s3cmd commands requires S3CMD_CONFIG param" + return 1 + fi + export S3CMD_CONFIG + fi + if [[ $RUN_MC == "true" ]]; then + if [ -z "$MC_ALIAS" ]; then + log 2 "running mc tests requires MC_ALIAS param" + return 1 + fi + export MC_ALIAS + fi + return 0 +} + +check_universal_vars() { + if [ -z "$VERSITYGW_TEST_ENV" ]; then + if [ -r tests/.env ]; then + source tests/.env + else + log 3 "Warning: no .env file found in tests folder" + fi + elif [[ $BYPASS_ENV_FILE != "true" ]]; then + # shellcheck source=./tests/.env.default + source "$VERSITYGW_TEST_ENV" + fi + if [ "$GITHUB_ACTIONS" != "true" ] && [ -r "$SECRETS_FILE" ]; then + # shellcheck source=./tests/.secrets + source "$SECRETS_FILE" + else + log 3 "Warning: no secrets file found" + fi + if [[ -n "$LOG_LEVEL" ]]; then + export LOG_LEVEL_INT=$LOG_LEVEL + fi + if [ -z "$AWS_ACCESS_KEY_ID" ]; then + log 2 "No AWS access key set" + return 1 + elif [ -z "$AWS_SECRET_ACCESS_KEY" ]; then + log 2 "No AWS secret access key set" + return 1 + elif [ -z "$AWS_PROFILE" ]; then + log 2 "No AWS profile set" + return 1 + elif [ -z "$AWS_ENDPOINT_URL" ]; then + log 2 "No AWS endpoint URL set" + return 1 + elif [[ $RUN_VERSITYGW != "true" ]] && [[ $RUN_VERSITYGW != "false" ]]; then + log 2 "RUN_VERSITYGW must be 'true' or 'false'" + return 1 + elif [ -z "$BUCKET_ONE_NAME" ]; then + log 2 "No bucket one name set" + return 1 + elif [ -z "$BUCKET_TWO_NAME" ]; then + log 2 "No bucket two name set" + return 1 + elif [ -z "$RECREATE_BUCKETS" ]; then + log 2 "No recreate buckets parameter set" + return 1 + elif [[ $RECREATE_BUCKETS != "true" ]] && [[ $RECREATE_BUCKETS != "false" ]]; then + log 2 "RECREATE_BUCKETS must be 'true' or 'false'" + return 1 + fi + export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_PROFILE AWS_ENDPOINT_URL RUN_VERSITYGW \ + BUCKET_ONE_NAME BUCKET_TWO_NAME RECREATE_BUCKETS + if [[ -n "$TEST_LOG_FILE" ]]; then + export TEST_LOG_FILE + fi + if [[ -n "$VERSITY_LOG_FILE" ]]; then + export VERSITY_LOG_FILE + fi +} + +check_versity_vars() { + if [ -z "$LOCAL_FOLDER" ]; then + log 2 "No local storage folder set" + return 1 + elif [ -z "$VERSITY_EXE" ]; then + log 2 "No versity executable location set" + return 1 + elif [ -z "$BACKEND" ]; then + log 2 "No backend parameter set (options: 'posix', 's3')" + return 1 + fi + export LOCAL_FOLDER VERSITY_EXE BACKEND + if [ "$BACKEND" == 's3' ]; then + if [ -z "$AWS_ACCESS_KEY_ID_TWO" ]; then + log 2 "missing second AWS access key ID for s3 backend" + return 1 + fi + if [ -z "$AWS_SECRET_ACCESS_KEY_TWO" ]; then + log 2 "missing second AWS secret access key for s3 backend" + return 1 + fi + export AWS_ACCESS_KEY_ID_TWO AWS_SECRET_ACCESS_KEY_TWO + fi + if [[ -r $GOCOVERDIR ]]; then + export GOCOVERDIR=$GOCOVERDIR + fi + if [[ $RUN_USERS == "true" ]]; then + if ! check_user_vars; then + log 2 "error setting user vars" + return 1 + fi + fi +} + +check_user_vars() { + if [[ -z "$IAM_TYPE" ]]; then + export IAM_TYPE="folder" + fi + if [[ "$IAM_TYPE" == "folder" ]]; then + if [[ -z "$USERS_FOLDER" ]]; then + log 2 "if IAM type is folder (or not set), USERS_FOLDER parameter is required" + return 1 + fi + if [ ! -d "$USERS_FOLDER" ]; then + if mkdir_error=$(mkdir "$USERS_FOLDER" 2>&1); then + log 2 "error creating users folder: $mkdir_error" + return 1 + fi + fi + IAM_PARAMS="--iam-dir=$USERS_FOLDER" + export IAM_PARAMS + return 0 + fi + if [[ $IAM_TYPE == "s3" ]]; then + if [[ -z "$USERS_BUCKET" ]]; then + log 2 "if IAM type is s3, USERS_BUCKET is required" + return 1 + fi + IAM_PARAMS="--s3-iam-access $AWS_ACCESS_KEY_ID --s3-iam-secret $AWS_SECRET_ACCESS_KEY \ + --s3-iam-region us-east-1 --s3-iam-bucket $USERS_BUCKET --s3-iam-endpoint $AWS_ENDPOINT_URL \ + --s3-iam-noverify" + export IAM_PARAMS + return 0 + fi + log 2 "unrecognized IAM_TYPE value: $IAM_TYPE" + return 1 +} diff --git a/tests/iam.sh b/tests/iam.sh new file mode 100644 index 0000000..e7b805f --- /dev/null +++ b/tests/iam.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +get_iam_parameters() { + if [[ -z "$IAM_TYPE" ]]; then + export IAM_TYPE="folder" + fi + if [[ "$IAM_TYPE" == "folder" ]]; then + if [[ -z "$USERS_FOLDER" ]]; then + log 2 "if IAM type is folder (or not set), USERS_FOLDER parameter is required" + return 1 + fi + if [ ! -d "$USERS_FOLDER" ]; then + if mkdir_error=$(mkdir "$USERS_FOLDER" 2>&1); then + log 2 "error creating users folder: $mkdir_error" + return 1 + fi + fi + iam_params="--iam-dir=$USERS_FOLDER" + export iam_params + return 0 + fi + if [[ $IAM_TYPE == "s3" ]]; then + if [[ -z "$USERS_BUCKET" ]]; then + log 2 "if IAM type is s3, USERS_BUCKET is required" + return 1 + fi + log 4 "$USERS_BUCKET" + if ! bucket_exists "s3api" "$USERS_BUCKET"; then + log 4 "bucket doesn't exist" + if [[ $? == 2 ]]; then + log 2 "error checking if users bucket exists" + return 1 + fi + if ! create_bucket "s3api" "$USERS_BUCKET"; then + log 2 "error creating bucket" + return 1 + fi + log 4 "bucket create successful" + else + log 4 "bucket exists" + fi + iam_params="--s3-iam-access $AWS_ACCESS_KEY_ID --s3-iam-secret $AWS_SECRET_ACCESS_KEY \ + --s3-iam-region us-east-1 --s3-iam-bucket $USERS_BUCKET --s3-iam-endpoint $AWS_ENDPOINT_URL \ + --s3-iam-noverify" + export iam_params + return 0 + fi + log 2 "unrecognized IAM_TYPE value: $IAM_TYPE" + return 1 +} \ No newline at end of file diff --git a/tests/logger.sh b/tests/logger.sh index f9c616d..7d65a8e 100644 --- a/tests/logger.sh +++ b/tests/logger.sh @@ -2,13 +2,15 @@ # levels: 1 - crit, 2 - err, 3 - warn, 4 - info, 5 - debug, 6 - trace +export LOG_LEVEL_INT=4 + log() { if [[ $# -ne 2 ]]; then echo "log function requires level, message" return 1 fi # shellcheck disable=SC2153 - if [[ $1 -gt $LOG_LEVEL ]]; then + if [[ $1 -gt $LOG_LEVEL_INT ]]; then return 0 fi log_level="" @@ -21,7 +23,7 @@ log() { 6) log_level="TRACE";; *) echo "invalid log level $1"; return 1 esac - if [[ "$2" == *"--secret"* ]]; then + if [[ "$2" == *"secret"* ]]; then log_mask "$log_level" "$2" return 0 fi @@ -47,7 +49,7 @@ log_mask() { elif [[ "$arg" == --secret=* ]]; then masked_args+=("--secret=********") else - if [[ "$arg" == "--secret_key" ]] || [[ "$arg" == "--secret" ]]; then + if [[ "$arg" == "--secret_key" ]] || [[ "$arg" == "--secret" ]] || [[ "$arg" == "--s3-iam-secret" ]]; then mask_next=true fi masked_args+=("$arg") diff --git a/tests/run.sh b/tests/run.sh index a5826cd..a5b3d6f 100755 --- a/tests/run.sh +++ b/tests/run.sh @@ -5,9 +5,12 @@ show_help() { echo "Usage: $0 [option...]" echo " -h, --help Display this help message and exit" echo " -s, --static Don't remove buckets between tests" - echo " aws Run tests with aws cli" + echo " aws Run tests with aws (s3api) cli" + echo " s3api Run tests with s3api cli" + echo " s3 Run tests with s3 cli" echo " s3cmd Run tests with s3cmd utility" echo " mc Run tests with mc utility" + echo " aws-user Run user tests with aws cli" } handle_param() { @@ -19,7 +22,7 @@ handle_param() { -s|--static) export RECREATE_BUCKETS=false ;; - s3|s3api|aws|s3cmd|mc) + s3|s3api|aws|s3cmd|mc|aws-user) set_command_type "$1" ;; *) # Handle unrecognized options or positional arguments @@ -86,6 +89,9 @@ case $command_type in echo "Running mc tests ..." "$HOME"/bin/bats ./tests/test_mc.sh || exit_code=$? ;; + aws-user) + echo "Running aws user tests ..." + "$HOME"/bin/bats ./tests/test_user_aws.sh || exit_code=$? esac # shellcheck disable=SC2086 diff --git a/tests/setup.sh b/tests/setup.sh index d365b0a..9661cc8 100644 --- a/tests/setup.sh +++ b/tests/setup.sh @@ -1,20 +1,20 @@ #!/usr/bin/env bash +source ./tests/env.sh source ./tests/setup_mc.sh source ./tests/versity.sh # bats setup function setup() { - start_versity || start_result=$? - if [[ $start_result -ne 0 ]]; then - echo "error starting versity executable" + if ! check_env_vars; then + log 2 "error checking env values" return 1 fi - - check_params || check_result=$? - if [[ $check_result -ne 0 ]]; then - echo "parameter check failed" - return 1 + if [ "$RUN_VERSITYGW" == "true" ]; then + if ! run_versity_app; then + log 2 "error starting versity apps" + return 1 + fi fi log 4 "Running test $BATS_TEST_NAME" @@ -32,9 +32,8 @@ setup() { fi if [[ $RUN_MC == true ]]; then - check_add_mc_alias || check_result=$? - if [[ $check_result -ne 0 ]]; then - echo "mc alias check/add failed" + if ! check_add_mc_alias; then + log 2 "mc alias check/add failed" return 1 fi fi @@ -44,33 +43,6 @@ setup() { BUCKET_TWO_NAME } -# make sure required environment variables for tests are defined properly -# return 0 for yes, 1 for no -check_params() { - if [ -z "$BUCKET_ONE_NAME" ]; then - echo "No bucket one name set" - return 1 - elif [ -z "$BUCKET_TWO_NAME" ]; then - echo "No bucket two name set" - return 1 - elif [ -z "$RECREATE_BUCKETS" ]; then - echo "No recreate buckets parameter set" - return 1 - elif [[ $RECREATE_BUCKETS != "true" ]] && [[ $RECREATE_BUCKETS != "false" ]]; then - echo "RECREATE_BUCKETS must be 'true' or 'false'" - return 1 - fi - if [[ -z "$LOG_LEVEL" ]]; then - export LOG_LEVEL=2 - else - export LOG_LEVEL - fi - if [[ -n "$TEST_LOG_FILE" ]]; then - export TEST_LOG_FILE - fi - return 0 -} - # fail a test # param: error message fail() { diff --git a/tests/test_aws.sh b/tests/test_aws.sh index 8ca5f66..0c96aa5 100755 --- a/tests/test_aws.sh +++ b/tests/test_aws.sh @@ -28,6 +28,8 @@ source ./tests/commands/put_object_legal_hold.sh source ./tests/commands/put_object_retention.sh source ./tests/commands/select_object_content.sh +export RUN_USERS=true + # abort-multipart-upload @test "test_abort_multipart_upload" { local bucket_file="bucket-file" @@ -103,10 +105,10 @@ source ./tests/commands/select_object_content.sh os_name="$(uname)" if [[ "$os_name" == "Darwin" ]]; then now=$(date -u +"%Y-%m-%dT%H:%M:%S") - five_seconds_later=$(date -j -v +5S -f "%Y-%m-%dT%H:%M:%S" "$now" +"%Y-%m-%dT%H:%M:%S") + five_seconds_later=$(date -j -v +10S -f "%Y-%m-%dT%H:%M:%S" "$now" +"%Y-%m-%dT%H:%M:%S") else now=$(date +"%Y-%m-%dT%H:%M:%S") - five_seconds_later=$(date -d "$now 5 seconds" +"%Y-%m-%dT%H:%M:%S") + five_seconds_later=$(date -d "$now 10 seconds" +"%Y-%m-%dT%H:%M:%S") fi create_test_files "$bucket_file" || fail "error creating test file" @@ -152,7 +154,7 @@ source ./tests/commands/select_object_content.sh get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder/$bucket_file-copy" || fail "error getting object" compare_files "$test_file_folder/$bucket_file" "$test_file_folder/$bucket_file-copy" || fail "files not equal" - sleep 2 + sleep 10 delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $bucket_file @@ -236,22 +238,18 @@ source ./tests/commands/select_object_content.sh # [[ $get_result -ne 0 ]] || fail "Get object with zero range returned no error" #} -#@test "test_get_object_full_range" { -# bucket_file="bucket_file" -# -# create_test_files "$bucket_file" || local created=$? -# [[ $created -eq 0 ]] || fail "Error creating test files" -# echo -n "0123456789" > "$test_file_folder/$bucket_file" -# setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? -# [[ $setup_result -eq 0 ]] || fail "error setting up bucket" -# put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error putting object" -# get_object_with_range "$BUCKET_ONE_NAME" "$bucket_file" "bytes=9-15" "$test_file_folder/$bucket_file-range" || fail "error getting range" -# cat "$test_file_folder/$bucket_file" -# cat "$test_file_folder/$bucket_file-range" -# ls -l "$test_file_folder/$bucket_file" -# ls -l "$test_file_folder/$bucket_file-range" -# compare_files "$test_file_folder/$bucket_file" "$test_file_folder/$bucket_file-range" || fail "files not equal" -#} +@test "test_get_object_full_range" { + bucket_file="bucket_file" + + create_test_files "$bucket_file" || local created=$? + [[ $created -eq 0 ]] || fail "Error creating test files" + echo -n "0123456789" > "$test_file_folder/$bucket_file" + setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? + [[ $setup_result -eq 0 ]] || fail "error setting up bucket" + put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error putting object" + get_object_with_range "$BUCKET_ONE_NAME" "$bucket_file" "bytes=9-15" "$test_file_folder/$bucket_file-range" || fail "error getting range" + [[ "$(cat "$test_file_folder/$bucket_file-range")" == "9" ]] || fail "byte range not copied properly" +} @test "test_put_object" { bucket_file="bucket_file" @@ -490,8 +488,6 @@ legal_hold_retention_setup() { put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local copy_result_two=$? [[ $copy_result_two -eq 0 ]] || fail "Failed to add object $object_two" - sleep 1 - list_objects_s3api_v1 "$BUCKET_ONE_NAME" key_one=$(echo "$objects" | jq -r '.Contents[0].Key') [[ $key_one == "$object_one" ]] || fail "Object one mismatch ($key_one, $object_one)" @@ -646,27 +642,41 @@ legal_hold_retention_setup() { delete_test_files $bucket_file } -#@test "test_multipart_upload_from_bucket_range" { -# local bucket_file="bucket-file" -# -# create_large_file "$bucket_file" || error creating file "$bucket_file" -# setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "Failed to create bucket '$BUCKET_ONE_NAME'" -# -# multipart_upload_from_bucket_range "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 "bytes=0-1000000000" || local upload_result=$? -# [[ $upload_result -eq 1 ]] || fail "multipart upload with overly large range should have failed" -# [[ $upload_part_copy_error == *"Range specified is not valid"* ]] || fail "unexpected error: $upload_part_copy_error" -# -# range_max=$((5*1024*1024-1)) -# multipart_upload_from_bucket_range "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 "bytes=0-$range_max" || local upload_two_result=$? -# [[ $upload_two_result -eq 0 ]] || fail "range should be valid" -# -# get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file-copy" "$test_file_folder/$bucket_file-copy" || fail "error retrieving object after upload" -# object_size=$(stat -f%z "$test_file_folder/$bucket_file-copy") -# [[ object_size -eq $((range_max*4+4)) ]] || fail "object size mismatch ($object_size, $((range_max*4+4)))" -# -# delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" -# delete_test_files $bucket_file -#} +@test "test_multipart_upload_from_bucket_range_too_large" { + local bucket_file="bucket-file" + + create_large_file "$bucket_file" || error creating file "$bucket_file" + setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "Failed to create bucket '$BUCKET_ONE_NAME'" + + multipart_upload_from_bucket_range "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 "bytes=0-1000000000" || local upload_result=$? + [[ $upload_result -eq 1 ]] || fail "multipart upload with overly large range should have failed" + log 5 "error: $upload_part_copy_error" + [[ $upload_part_copy_error == *"Range specified is not valid"* ]] || [[ $upload_part_copy_error == *"InvalidRange"* ]] || fail "unexpected error: $upload_part_copy_error" + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files $bucket_file +} + +@test "test_multipart_upload_from_bucket_range_valid" { + local bucket_file="bucket-file" + + create_large_file "$bucket_file" || error creating file "$bucket_file" + setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "Failed to create bucket '$BUCKET_ONE_NAME'" + + range_max=$((5*1024*1024-1)) + multipart_upload_from_bucket_range "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 "bytes=0-$range_max" || fail "upload failure" + + get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file-copy" "$test_file_folder/$bucket_file-copy" || fail "error retrieving object after upload" + if [[ $(uname) == 'Darwin' ]]; then + object_size=$(stat -f%z "$test_file_folder/$bucket_file-copy") + else + object_size=$(stat --format=%s "$test_file_folder/$bucket_file-copy") + fi + [[ object_size -eq $((range_max*4+4)) ]] || fail "object size mismatch ($object_size, $((range_max*4+4)))" + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files $bucket_file +} @test "test-presigned-url-utf8-chars" { test_common_presigned_url_utf8_chars "aws" diff --git a/tests/test_common.sh b/tests/test_common.sh index a56d910..d75733d 100644 --- a/tests/test_common.sh +++ b/tests/test_common.sh @@ -461,6 +461,7 @@ test_common_put_bucket_acl() { fi acl_file="test-acl" + create_test_files "$acl_file" cat < "$test_file_folder"/"$acl_file" { @@ -562,6 +563,7 @@ EOF get_bucket_policy "$1" "$BUCKET_ONE_NAME" || local get_result=$? [[ $get_result -eq 0 ]] || fail "error getting bucket policy after setting" + log 5 "$bucket_policy" returned_effect=$(echo "$bucket_policy" | jq -r '.Statement[0].Effect') [[ $effect == "$returned_effect" ]] || fail "effect mismatch ($effect, $returned_effect)" returned_principal=$(echo "$bucket_policy" | jq -r '.Statement[0].Principal') diff --git a/tests/test_user_aws.sh b/tests/test_user_aws.sh index c44c521..5e6a834 100755 --- a/tests/test_user_aws.sh +++ b/tests/test_user_aws.sh @@ -3,6 +3,8 @@ source ./tests/test_user_common.sh source ./tests/util_users.sh +export RUN_USERS=true + @test "test_admin_user_aws" { test_admin_user "aws" } diff --git a/tests/test_user_s3cmd.sh b/tests/test_user_s3cmd.sh index dbe3a68..838af32 100755 --- a/tests/test_user_s3cmd.sh +++ b/tests/test_user_s3cmd.sh @@ -2,6 +2,9 @@ source ./tests/test_user_common.sh +export RUN_S3CMD=true +export RUN_USERS=true + @test "test_admin_user_s3cmd" { test_admin_user "s3cmd" } diff --git a/tests/util.sh b/tests/util.sh index 18d9f39..87fc47e 100644 --- a/tests/util.sh +++ b/tests/util.sh @@ -125,8 +125,7 @@ bucket_exists() { return 2 fi - head_bucket "$1" "$2" || local check_result=$? - if [[ $check_result -ne 0 ]]; then + if ! head_bucket "$1" "$2"; then # shellcheck disable=SC2154 bucket_info=$(echo "$bucket_info" | grep -v "InsecureRequestWarning") log 5 "$bucket_info" diff --git a/tests/util_file.sh b/tests/util_file.sh index b25e1e1..74efd1a 100644 --- a/tests/util_file.sh +++ b/tests/util_file.sh @@ -157,6 +157,7 @@ create_test_file_count() { return 1 fi } + # shellcheck disable=SC2153 if [[ $LOG_LEVEL -ge 5 ]]; then ls_result=$(ls "$test_file_folder"/file_*) log 5 "$ls_result" diff --git a/tests/versity.sh b/tests/versity.sh index f361cd4..452512f 100644 --- a/tests/versity.sh +++ b/tests/versity.sh @@ -1,128 +1,45 @@ -#!/bin/bash +#!/usr/bin/env bash source ./tests/util_file.sh - -check_exe_params_versity() { - if [ -z "$LOCAL_FOLDER" ]; then - echo "No local storage folder set" - return 1 - elif [ -z "$VERSITY_EXE" ]; then - echo "No versity executable location set" - return 1 - elif [ -z "$BACKEND" ]; then - echo "No backend parameter set (options: 'posix')" - return 1 - fi - if [ "$BACKEND" == 's3' ]; then - if [ -z "$AWS_ACCESS_KEY_ID_TWO" ]; then - echo "missing second AWS access key ID for s3 backend" - return 1 - fi - if [ -z "$AWS_SECRET_ACCESS_KEY_TWO" ]; then - echo "missing second AWS secret access key for s3 backend" - return 1 - fi - fi -} - -check_exe_params() { - if [ -z "$AWS_ACCESS_KEY_ID" ]; then - echo "No AWS access key set" - return 1 - elif [ -z "$AWS_SECRET_ACCESS_KEY" ]; then - echo "No AWS secret access key set" - return 1 - elif [ -z "$AWS_PROFILE" ]; then - echo "No AWS profile set" - return 1 - elif [ -z "$AWS_ENDPOINT_URL" ]; then - echo "No AWS endpoint URL set" - return 1 - elif [ -z "$MC_ALIAS" ]; then - echo "No mc alias set" - return 1 - elif [[ $RUN_VERSITYGW != "true" ]] && [[ $RUN_VERSITYGW != "false" ]]; then - echo "RUN_VERSITYGW must be 'true' or 'false'" - return 1 - elif [ -z "$USERS_FOLDER" ]; then - echo "No users folder parameter set" - return 1 - fi - if [[ -r $GOCOVERDIR ]]; then - export GOCOVERDIR=$GOCOVERDIR - fi - if [[ $RUN_VERSITYGW == "true" ]]; then - local check_result - check_exe_params_versity || check_result=$? - if [[ $check_result -ne 0 ]]; then - return 1 - fi - fi -} - -start_versity() { - if [ -z "$VERSITYGW_TEST_ENV" ]; then - if [ -r tests/.env ]; then - source tests/.env - else - echo "Warning: no .env file found in tests folder" - fi - elif [[ $BYPASS_ENV_FILE != "true" ]]; then - # shellcheck source=./tests/.env.default - source "$VERSITYGW_TEST_ENV" - fi - if [ "$GITHUB_ACTIONS" != "true" ] && [ -r "$SECRETS_FILE" ]; then - # shellcheck source=./tests/.secrets - source "$SECRETS_FILE" - else - echo "Warning: no secrets file found" - fi - - check_exe_params || check_result=$? - if [[ $check_result -ne 0 ]]; then - echo "error checking for parameters" - return 1 - fi - - if [ "$RUN_VERSITYGW" == "true" ]; then - run_versity_app || run_result=$? - if [[ $run_result -ne 0 ]]; then - echo "error starting versity apps" - return 1 - fi - fi - - export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_REGION AWS_PROFILE AWS_ENDPOINT_URL VERSITY_EXE -} +source ./tests/iam.sh start_versity_process() { if [[ $# -ne 1 ]]; then - echo "start versity process function requires number" + log 2 "start versity process function requires number" return 1 fi - create_test_file_folder || create_result=$? - if [[ $create_result -ne 0 ]]; then - echo "error creating test log folder" + if ! create_test_file_folder; then + log 2 "error creating test log folder" return 1 fi - base_command+=(">" "$test_file_folder/versity_log_$1.txt" "2>&1") - log 5 "versity command: ${base_command[*]}" - ("${base_command[@]}") & + IFS=' ' read -r -a full_command <<< "${base_command[@]}" + log 5 "versity command: ${full_command[*]}" + if [ -n "$VERSITY_LOG_FILE" ]; then + "${full_command[@]}" >> "$VERSITY_LOG_FILE" 2>&1 & + else + "${full_command[@]}" 2>&1 & + fi # shellcheck disable=SC2181 if [[ $? -ne 0 ]]; then - echo "error running versitygw command: $(cat "$test_file_folder/versity_log_$1.txt")" + sleep 1 + if [ -n "$VERSITY_LOG_FILE" ]; then + log 2 "error running versitygw command: $(cat "$VERSITY_LOG_FILE")" + fi return 1 fi eval versitygw_pid_"$1"=$! + process_info="Versity process $1, PID $!" + echo "$process_info" >> "$VERSITY_LOG_FILE" + log 4 "$process_info" local pid eval pid=\$versitygw_pid_"$1" sleep 1 - local proc_check - check_result=$(kill -0 "$pid" 2>&1) || proc_check=$? - if [[ $proc_check -ne 0 ]]; then - echo "versitygw failed to start: $check_result" - echo "log data: $(cat "$test_file_folder/versity_log_$1.txt")" + if ! check_result=$(kill -0 "$pid" 2>&1); then + log 2 "versitygw failed to start: $check_result" + if [ -n "$VERSITY_LOG_FILE" ]; then + log 2 "log data: $(cat "$VERSITY_LOG_FILE")" + fi return 1 fi export versitygw_pid_"$1" @@ -130,10 +47,15 @@ start_versity_process() { run_versity_app_posix() { if [[ $# -ne 3 ]]; then - echo "run versity app w/posix command requires access ID, secret key, process number" + log 2 "run versity app w/posix command requires access ID, secret key, process number" return 1 fi - base_command=("$VERSITY_EXE" --access="$1" --secret="$2" --region="$AWS_REGION" --iam-dir="$USERS_FOLDER") + base_command=("$VERSITY_EXE" --access="$1" --secret="$2" --region="$AWS_REGION") + if [ -n "$RUN_USERS" ]; then + # shellcheck disable=SC2153 + IFS=' ' read -r -a iam_array <<< "$IAM_PARAMS" + fi + base_command+=("${iam_array[@]}") if [ -n "$CERT" ] && [ -n "$KEY" ]; then base_command+=(--cert "$CERT" --key "$KEY") fi @@ -143,10 +65,8 @@ run_versity_app_posix() { base_command+=(posix "$LOCAL_FOLDER") export base_command - local versity_result - start_versity_process "$3" || versity_result=$? - if [[ $versity_result -ne 0 ]]; then - echo "error starting versity process" + if ! start_versity_process "$3"; then + log 2 "error starting versity process" return 1 fi return 0 @@ -154,20 +74,23 @@ run_versity_app_posix() { run_versity_app_s3() { if [[ $# -ne 1 ]]; then - echo "run versity app w/s3 command requires process number" + log 2 "run versity app w/s3 command requires process number" return 1 fi - base_command=("$VERSITY_EXE" --port=":7071" --access="$AWS_ACCESS_KEY_ID" --secret="$AWS_SECRET_ACCESS_KEY") + base_command=("$VERSITY_EXE" --access="$AWS_ACCESS_KEY_ID" --secret="$AWS_SECRET_ACCESS_KEY") if [ -n "$CERT" ] && [ -n "$KEY" ]; then base_command+=(--cert "$CERT" --key "$KEY") fi + if [ -n "$PORT_TWO" ]; then + base_command+=(--port ":$PORT_TWO") + else + base_command+=(--port ":7071") + fi base_command+=(s3 --access="$AWS_ACCESS_KEY_ID_TWO" --secret="$AWS_SECRET_ACCESS_KEY_TWO" --region="$AWS_REGION" --endpoint=https://s3.amazonaws.com) export base_command - local versity_result - start_versity_process "$1" || versity_result=$? - if [[ $versity_result -ne 0 ]]; then - echo "error starting versity process" + if ! start_versity_process "$1"; then + log 2 "error starting versity process" return 1 fi return 0 @@ -175,38 +98,45 @@ run_versity_app_s3() { run_versity_app() { if [[ $BACKEND == 'posix' ]]; then - run_versity_app_posix "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "1" || result_one=$? - if [[ $result_one -ne 0 ]]; then - echo "error starting versity app" + if ! run_versity_app_posix "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "1"; then + log 2 "error starting versity app" return 1 fi elif [[ $BACKEND == 's3' ]]; then - run_versity_app_posix "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "1" || result_one=$? - if [[ $result_one -ne 0 ]]; then - echo "error starting versity app" + if ! run_versity_app_posix "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "1"; then + log 2 "error starting versity app" return 1 fi - run_versity_app_s3 "2" || result_two=$? - if [[ $result_two -ne 0 ]]; then - echo "error starting second versity app" + if ! run_versity_app_s3 "2"; then + log 2 "error starting second versity app" return 1 fi else - echo "unrecognized backend type $BACKEND" + log 2 "unrecognized backend type $BACKEND" return 1 fi + if [[ $IAM_TYPE == "s3" ]]; then + if ! bucket_exists "s3api" "$USERS_BUCKET"; then + if ! create_bucket "s3api" "$USERS_BUCKET"; then + log 2 "error creating IAM bucket" + return 1 + fi + fi + fi } stop_single_process() { if [[ $# -ne 1 ]]; then - echo "stop single process function requires process ID" + log 2 "stop single process function requires process ID" return 1 fi - if ps -p "$1" > /dev/null; then + log 5 "stop process with ID: $1" + # shellcheck disable=SC2086 + if ps_result=$(ps -p $1 2>&1) > /dev/null; then kill "$1" wait "$1" || true else - echo "Process with PID $1 does not exist." + log 3 "error stopping versity app: $ps_result" fi } @@ -214,18 +144,14 @@ stop_versity() { if [ "$RUN_VERSITYGW" == "false" ]; then return fi - local result_one - local result_two # shellcheck disable=SC2154 - stop_single_process "$versitygw_pid_1" || result_one=$? - if [[ $result_one -ne 0 ]]; then - echo "error stopping versity process" + if ! stop_single_process "$versitygw_pid_1"; then + log 2 "error stopping versity process" fi if [[ $BACKEND == 's3' ]]; then # shellcheck disable=SC2154 - stop_single_process "$versitygw_pid_2" || result_two=$? - if [[ $result_two -ne 0 ]]; then - echo "error stopping versity process two" + if ! stop_single_process "$versitygw_pid_2"; then + log 2 "error stopping versity process two" fi fi } \ No newline at end of file