From 17adbeca9ea60da147711b81325aaf2808959e8a Mon Sep 17 00:00:00 2001 From: Luke McCrone Date: Fri, 16 Jan 2026 14:34:46 -0300 Subject: [PATCH] test: S3 command coverage reporting --- .github/workflows/system.yml | 34 +++ tests/commands/command.sh | 12 +- tests/commands/get_bucket_location.sh | 1 + tests/drivers/file.sh | 12 +- .../get_bucket_location.sh | 10 +- tests/drivers/openssl.sh | 4 + tests/env.sh | 49 +++- tests/report.sh | 244 +++++++++++++++++- tests/setup.sh | 34 +-- tests/setup_unit.sh | 29 +++ tests/test_report.sh | 117 +++++++++ tests/versity.sh | 18 +- 12 files changed, 515 insertions(+), 49 deletions(-) create mode 100644 tests/setup_unit.sh create mode 100755 tests/test_report.sh diff --git a/.github/workflows/system.yml b/.github/workflows/system.yml index b46314a..2deb36e 100644 --- a/.github/workflows/system.yml +++ b/.github/workflows/system.yml @@ -104,6 +104,7 @@ jobs: AUTOGENERATE_USERS: true USER_AUTOGENERATION_PREFIX: github-actions-test- AWS_REGION: ${{ matrix.AWS_REGION }} + COVERAGE_LOG: coverage.log run: | make testbin export AWS_ACCESS_KEY_ID=ABCDEFGHIJKLMNOPQRST @@ -124,6 +125,23 @@ jobs: BYPASS_ENV_FILE=true ${{ github.workspace }}/tests/setup_static.sh fi BYPASS_ENV_FILE=true $HOME/bin/bats ${{ github.workspace }}/$RUN_SET + if [ -e "$COVERAGE_LOG" ]; then + cat $COVERAGE_LOG + fi + + - name: Ensure coverage file exists, and generate working name + run: | + touch coverage.log + run_set="${{ matrix.RUN_SET }}" + SAFE_RUN_SET="${run_set//\//-}" + echo "SAFE_RUN_SET=$SAFE_RUN_SET" >> $GITHUB_ENV + + - name: Upload command coverage log + uses: actions/upload-artifact@v4 + with: + name: coverage-${{ env.SAFE_RUN_SET }}-${{ matrix.RECREATE_BUCKETS }} + path: coverage.log + retention-days: 1 - name: Time report run: | @@ -134,3 +152,19 @@ jobs: - name: Coverage report run: | go tool covdata percent -i=cover + + print-coverage: + runs-on: ubuntu-latest + needs: [build] + steps: + - name: Download all S3 ops artifacts + uses: actions/download-artifact@v4 + with: + path: artifacts + + - name: Merge (sort | uniq) + run: | + find artifacts -type f -name coverage.log -exec sed '/^\s*$/d' {} \; | sort -u > combined-coverage.log + + echo "Command coverage:" + cat combined-coverage.log diff --git a/tests/commands/command.sh b/tests/commands/command.sh index af77b11..f11c930 100644 --- a/tests/commands/command.sh +++ b/tests/commands/command.sh @@ -15,18 +15,24 @@ # under the License. source ./tests/logger.sh +source ./tests/report.sh send_command() { if [ $# -eq 0 ]; then return 1 fi - if [ -n "$COMMAND_LOG" ]; then + if [ -n "$COMMAND_LOG" ] || [ -n "$COVERAGE_LOG" ]; then args=(AWS_ACCESS_KEY_ID="$AWS_ACCESS_KEY_ID" "$@") if ! mask_arg_array "${args[@]}"; then return 1 fi - # shellcheck disable=SC2154 - echo "${masked_args[*]}" >> "$COMMAND_LOG" + if [ -n "$COMMAND_LOG" ]; then + # shellcheck disable=SC2154 + echo "${masked_args[*]}" >> "$COMMAND_LOG" + fi + if [ -n "$COVERAGE_LOG" ]; then + record_command_v2 "${masked_args[*]}" + fi fi local command_result=0 "$@" || command_result=$? diff --git a/tests/commands/get_bucket_location.sh b/tests/commands/get_bucket_location.sh index c12da68..7b3f15f 100644 --- a/tests/commands/get_bucket_location.sh +++ b/tests/commands/get_bucket_location.sh @@ -67,6 +67,7 @@ get_bucket_location_s3cmd() { log 2 "error getting bucket location: $location" return 1 fi + log 5 "s3cmd bucket location info: $info" bucket_location=$(echo "$info" | grep -o 'Location:.*' | awk '{print $2}') return 0 } diff --git a/tests/drivers/file.sh b/tests/drivers/file.sh index 0d49577..ddd6523 100644 --- a/tests/drivers/file.sh +++ b/tests/drivers/file.sh @@ -153,9 +153,19 @@ chunked_upload_trailer_success() { } get_file_name() { + if ! get_file_name_with_prefix "test-file"; then + return 1 + fi + return 0 +} + +get_file_name_with_prefix() { + if ! check_param_count_v2 "prefix" 1 $#; then + return 1 + fi if ! uuid=$(uuidgen 2>&1); then log 2 "error getting UUID: $uuid" return 1 fi - echo "test-file-${uuid}" + echo "$1-${uuid}" } diff --git a/tests/drivers/get_bucket_location/get_bucket_location.sh b/tests/drivers/get_bucket_location/get_bucket_location.sh index a56a913..3712f32 100644 --- a/tests/drivers/get_bucket_location/get_bucket_location.sh +++ b/tests/drivers/get_bucket_location/get_bucket_location.sh @@ -24,8 +24,14 @@ get_check_bucket_location_various() { fi # shellcheck disable=SC2154 if [ "$AWS_REGION" == "us-east-1" ]; then - if [ "$bucket_location" != "null" ]; then - log 2 "expected 'null' for 'us-east-1' region, got : '$bucket_location'" + # s3cmd returns 'us-east-1' here, the others return null + if [ "$1" == "s3cmd" ]; then + expected_location="us-east-1" + else + expected_location="null" + fi + if [ "$bucket_location" != "$expected_location" ]; then + log 2 "expected '$expected_location' for 'us-east-1' region, got : '$bucket_location'" return 1 fi elif [ "$AWS_REGION" != "$bucket_location" ]; then diff --git a/tests/drivers/openssl.sh b/tests/drivers/openssl.sh index 45698e4..ac64fe6 100644 --- a/tests/drivers/openssl.sh +++ b/tests/drivers/openssl.sh @@ -15,6 +15,7 @@ # under the License. source ./tests/drivers/xml.sh +source ./tests/report.sh write_openssl_command_to_command_log() { if ! check_param_count_v2 "command file" 1 $#; then @@ -54,6 +55,9 @@ send_via_openssl() { if [ -n "$COMMAND_LOG" ]; then write_openssl_command_to_command_log "$1" fi + if ! record_openssl_command "$1"; then + log 3 "error recording openssl command" + fi if ! result=$(openssl s_client -connect "$host" -ign_eof < "$1" 2>&1); then log 2 "error sending openssl command: $result" return 1 diff --git a/tests/env.sh b/tests/env.sh index 4f4ed57..7d74fec 100644 --- a/tests/env.sh +++ b/tests/env.sh @@ -18,8 +18,29 @@ source ./tests/versity.sh base_setup() { check_env_vars - if [ "$RUN_VERSITYGW" == "true" ]; then - run_versity_app + if [ "$RUN_VERSITYGW" == "true" ] && [ "$UNIT_TEST" != "true" ]; then + if ! run_versity_app; then + exit 1 + fi + fi +} + +setup_test_log_file() { + if [ -n "$TEST_LOG_FILE" ]; then + if ! error=$(touch "$TEST_LOG_FILE.tmp" 2>&1); then + log 2 "error creating log file: $error" + exit 1 + fi + export TEST_LOG_FILE + fi +} + +remove_test_log_file_if_desired() { + if [ "$REMOVE_TEST_FILE_FOLDER" == "true" ]; then + log 6 "removing test file folder" + if ! error=$(rm -rf "${TEST_FILE_FOLDER:?}" 2>&1); then + log 3 "unable to remove test file folder: $error" + fi fi } @@ -140,6 +161,9 @@ check_universal_vars() { if [ -n "$MAX_FILE_DOWNLOAD_CHUNK_SIZE" ]; then export MAX_FILE_DOWNLOAD_CHUNK_SIZE fi + if [ -n "$COVERAGE_LOG" ]; then + export COVERAGE_LOG + fi check_aws_vars @@ -289,3 +313,24 @@ check_user_vars() { log 1 "unrecognized IAM_TYPE value: $IAM_TYPE" exit 1 } + +log_cleanup() { + if [ -e "$TEST_LOG_FILE.tmp" ]; then + if ! error=$(cat "$TEST_LOG_FILE.tmp" >> "$TEST_LOG_FILE" 2>&1); then + log 3 "error appending temp log to main log: $error" + fi + if ! delete_temp_log_if_exists; then + log 3 "error deleting temp log" + fi + fi +} + +delete_temp_log_if_exists() { + if [ -e "$TEST_LOG_FILE.tmp" ]; then + if ! error=$(rm "$TEST_LOG_FILE.tmp" 2>&1); then + log 2 "error deleting temp log: $error" + return 1 + fi + fi + return 0 +} diff --git a/tests/report.sh b/tests/report.sh index 01049cf..b4dcfad 100644 --- a/tests/report.sh +++ b/tests/report.sh @@ -14,6 +14,8 @@ # specific language governing permissions and limitations # under the License. +source ./tests/drivers/params.sh + check_and_create_database() { # Define SQL commands to create a table SQL_CREATE_TABLE="CREATE TABLE IF NOT EXISTS entries ( @@ -107,4 +109,244 @@ EOF sqlite3 "$COVERAGE_DB" "DROP TABLE entries;" log 5 "Database '$COVERAGE_DB' and table 'entries' created successfully." -} \ No newline at end of file +} + +get_curl_method() { + if ! check_param_count_v2 "command string" 1 $#; then + return 1 + fi + local method + if [[ "$1" =~ (^|[[:space:]])-([^-[:space:]]*)I([^-[:space:]]*) ]]; then + method="HEAD" + elif [[ "$1" =~ (^|[[:space:]])-X[[:space:]]*([^[:space:]]+) ]]; then + method="${BASH_REMATCH[2]}" + else + method="GET" + fi + echo "$method" +} + +parse_path_and_get_route() { + if ! check_param_count_v2 "string" 1 $#; then + return 1 + fi + + local url path + url="$(echo "$1" | grep -oE 'https?://[^" ]+' | head -n 1)" + + # Only accept http/https URLs with a path + if [ -z "$url" ]; then + echo "UNKNOWN" + return 0 + fi + + # Strip protocol + host + port + path="$(echo "$url" | sed -E 's|https?://[^/]+||')" + + # Normalize: remove leading/trailing slashes + path="${path#/}" + path="${path%/}" + + if ! get_route "$path"; then + log 2 "error getting route" + return 1 + fi + return 0 +} + +get_route() { + if ! check_param_count_v2 "string" 1 $#; then + return 1 + fi + + if [ "$1" == '/' ]; then + echo "MAIN" + return 0 + fi + + # Split path on '/' + local route_parts + IFS='/' read -r -a route_parts <<< "$1" + + if [[ -z "$1" ]]; then + echo "MAIN" + elif [[ "${#route_parts[@]}" -eq 1 ]]; then + echo "BUCKET" + else + echo "OBJECT" + fi + return 0 +} + +get_query() { + # Extract query string (everything after '?') + local query + query="${1#*\?}" + # No query present + if [[ "$query" == "$1" ]]; then + echo "" + return 0 + fi + + # Remove fragment if present + query="${query%%#*}" + + local query_keys=() + while [[ $query ]]; do + key="${query%%=*}" # Extract key + query_keys+=("$key") + + # If no more keys + if [[ "$query" != *"&"* ]]; then + break + fi + + query="${query#*&}" # Remove extracted part from query + done + + echo "${query_keys[*]}" +} + +check_for_copy_source() { + if ! check_param_count_v2 "'OPENSSL' or 'CURL', string or file" 2 $#; then + return 2 + fi + if [ "$1" == "CURL" ]; then + if [[ "$2" == *"x-amz-copy-source"* ]]; then + return 0 + fi + return 1 + elif [ "$1" == "OPENSSL" ]; then + if grep -qi 'x-amz-copy-source' "$2"; then + return 0 + fi + return 1 + fi + log 2 "invalid type param: $1" + return 2 +} + +parse_path_and_get_query() { + if ! check_param_count_v2 "string" 1 $#; then + return 1 + fi + + local url + url="$(echo "$1" | grep -oE 'https?://[^" ]+' | head -n 1)" + + # Must look like a URL + if [ -z "$url" ]; then + echo "" + return 0 + fi + + get_query "$url" +} + +parse_curl_rest_command() { + if ! check_param_count_v2 "command string" 1 $#; then + return 1 + fi + local method route query + if ! method=$(get_curl_method "$1" 2>&1); then + echo "error retrieving method: $method" + return 1 + fi + if ! route=$(parse_path_and_get_route "$1" 2>&1); then + echo "error retrieving route: $route" + return 1 + fi + if ! query=$(parse_path_and_get_query "$1" 2>&1); then + echo "error retrieving query: $query" + return 1 + fi + output_string="$method $route $query" + if [[ "$output_string" == "PUT OBJECT"* ]] && check_for_copy_source "CURL" "$1"; then + output_string+=" x-amz-copy-source" + fi + log 5 "output string: $output_string" + echo "$output_string" + return 0 +} + +get_openssl_method_route_queries() { + if ! check_param_count_v2 "command file" 1 $#; then + return 1 + fi + + local method route_string route query + + method=$(awk 'NR==1{print $1}' "$1") + route_string=$(awk 'NR==1{print $2}' "$1") + route=$(get_route "$route_string") + query=$(get_query "$route_string") + + echo "$method $route $query" + return 0 +} + +write_to_coverage_log() { + if ! check_param_count_v2 "string" 1 $#; then + return 1 + fi + echo "$1" >> "$COVERAGE_LOG" + sort "$COVERAGE_LOG" | uniq > "${COVERAGE_LOG}.tmp" + mv "${COVERAGE_LOG}.tmp" "$COVERAGE_LOG" +} + +record_openssl_command() { + if [ -z "$COVERAGE_LOG" ]; then + return 0 + fi + if ! check_param_count_v2 "command file" 1 $#; then + return 1 + fi + if ! command_info=$(get_openssl_method_route_queries "$1" 2>&1); then + log 2 "error getting command info: $command_info" + return 1 + fi + if [[ "$command_info" == "PUT OBJECT"* ]] && check_for_copy_source "OPENSSL" "$1"; then + command_info+=" x-amz-copy-source" + fi + if ! write_to_coverage_log "$command_info"; then + log 2 "error writing to coverage log" + return 1 + fi + return 0 +} + +parse_command_info() { + if ! check_param_count_v2 "command string" 1 $#; then + return 1 + fi + if [[ "$1" == *"curl "* ]]; then + if ! command_info=$(parse_curl_rest_command "$1" 2>&1); then + echo "error parsing rest command: $command_info" + return 1 + fi + else + command_info="OTHER" + fi +} + +record_command_v2() { + if [ -z "$COVERAGE_LOG" ]; then + log 5 "no coverage log set" + return 0 + fi + if ! check_param_count_v2 "command string" 1 $#; then + return 1 + fi + log 5 "parsing command '$1'" + if ! parse_command_info "$1"; then + log 2 "error parsing command info" + return 1 + fi + if [ "$command_info" == "OTHER" ]; then + return 0 + fi + if ! write_to_coverage_log "$command_info"; then + log 2 "error writing to coverage log" + return 1 + fi +} diff --git a/tests/setup.sh b/tests/setup.sh index 046199e..4b22538 100644 --- a/tests/setup.sh +++ b/tests/setup.sh @@ -66,13 +66,7 @@ static_user_versitygw_setup() { setup() { base_setup - if [ -n "$TEST_LOG_FILE" ]; then - if ! error=$(touch "$TEST_LOG_FILE.tmp" 2>&1); then - log 2 "error creating log file: $error" - exit 1 - fi - export TEST_LOG_FILE - fi + setup_test_log_file if [ "$RUN_USERS" == "true" ] && [ "$DIRECT" != "true" ] && [ "$CREATE_STATIC_USERS_IF_NONEXISTENT" == "true" ]; then if ! static_user_versitygw_setup; then @@ -104,16 +98,6 @@ setup() { log 4 "********** END SETUP **********" } -delete_temp_log_if_exists() { - if [ -e "$TEST_LOG_FILE.tmp" ]; then - if ! error=$(rm "$TEST_LOG_FILE.tmp" 2>&1); then - log 2 "error deleting temp log: $error" - return 1 - fi - fi - return 0 -} - post_versity_cleanup() { if [[ $LOG_LEVEL -ge 5 ]] || [[ -n "$TIME_LOG" ]]; then end_time=$(date +%s) @@ -140,14 +124,7 @@ post_versity_cleanup() { if ! delete_command_log; then log 3 "error deleting command log" fi - if [ -e "$TEST_LOG_FILE.tmp" ]; then - if ! error=$(cat "$TEST_LOG_FILE.tmp" >> "$TEST_LOG_FILE" 2>&1); then - log 3 "error appending temp log to main log: $error" - fi - if ! delete_temp_log_if_exists; then - log 3 "error deleting temp log" - fi - fi + log_cleanup } # bats teardown function @@ -173,12 +150,7 @@ teardown() { if [ "$AUTOGENERATE_USERS" == "true" ] && ! delete_autogenerated_users; then log 3 "error deleting autocreated users" fi - if [ "$REMOVE_TEST_FILE_FOLDER" == "true" ]; then - log 6 "removing test file folder" - if ! error=$(rm -rf "${TEST_FILE_FOLDER:?}" 2>&1); then - log 3 "unable to remove test file folder: $error" - fi - fi + remove_test_log_file_if_desired stop_versity post_versity_cleanup } diff --git a/tests/setup_unit.sh b/tests/setup_unit.sh new file mode 100644 index 0000000..a42386d --- /dev/null +++ b/tests/setup_unit.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bats + +# Copyright 2026 Versity Software +# This file is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +source ./tests/env.sh + +export UNIT_TEST=true + +setup() { + base_setup + setup_test_log_file +} + +teardown() { + remove_test_log_file_if_desired + log_cleanup +} \ No newline at end of file diff --git a/tests/test_report.sh b/tests/test_report.sh new file mode 100755 index 0000000..67c49ee --- /dev/null +++ b/tests/test_report.sh @@ -0,0 +1,117 @@ +#!/usr/bin/env bats + +# Copyright 2026 Versity Software +# This file is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +load ./bats-support/load +load ./bats-assert/load + +source ./tests/logger.sh +source ./tests/report.sh +source ./tests/setup_unit.sh + +@test "reporting - parse curl method" { + tests=(" -Iks" "" " -X PUT" " -X DELETE") + expected_results=("HEAD" "GET" "PUT" "DELETE") + + for ((i=0; i<${#tests[@]}; i++)); do + echo "test: ${tests[$i]}, expected result: ${expected_results[$i]}" + run get_curl_method "${tests[$i]}" + assert_output "${expected_results[$i]}" + done +} + +@test "reporting - parse curl route" { + tests=("http://localhost:7070/bucket_name" "http://localhost:7070/bucket_name/file_name" "http://localhost:7070/" "") + expected_results=("BUCKET" "OBJECT" "MAIN" "UNKNOWN") + + for ((i=0; i<${#tests[@]}; i++)); do + echo "test: ${tests[$i]}, expected result: ${expected_results[$i]}" + run parse_path_and_get_route "${tests[$i]}" + assert_output "${expected_results[$i]}" + done +} + +@test "reporting - get query" { + tests=("https://localhost:7070/?query1=" "https://localhost/bucket?another=" "https://1.2.3.4/" "http://localhost/bucket/file?third") + expected_results=("query1" "another" "" "third") + + for ((i=0; i<${#tests[@]}; i++)); do + echo "test: ${tests[$i]}, expected result: ${expected_results[$i]}" + run get_query "${tests[$i]}" + assert_output "${expected_results[$i]}" + done +} + +@test "reporting - parse curl rest command" { + tests=("curl -iks https://localhost:7070/versity-gwtest-bucket-one-1-20260127113351?location= -H Authorization: AWS4-HMAC-SHA256 Credential=AKIA6****/20260127/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=68c0b96180a5791be8a10335c10d302d31d358c4bc6028aec94faf502f3a185e -H host: localhost:7070 -H x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 -H x-amz-date: 20260127T143355Z") + expected_command=("GET BUCKET location") + + for ((i=0; i<${#tests[@]}; i++)); do + run parse_curl_rest_command "${tests[$i]}" + assert_output "${expected_command[$i]}" + done +} + +@test "openssl - get method, route, and queries" { + tests=("GET / HTTP/1.1 + Authorization: AWS4-HMAC-SHA256 Credential=AKIAQJVWFRZQNI6LF3W7/20250911/us-east-1/s3/aws4_request,SignedHeaders=x-amz-content-sha256;x-amz-date,Signature=86ffbe2317caddcac569b25aa9b8e8db4a613a639b2a402cf4a9dc0e975ba997 + x-amz-content-sha256:UNSIGNED-PAYLOAD" + "PUT /bucket/file?prefix=dummy HTTP/1.1 + Authorization: AWS4-HMAC-SHA256 Credential=AKIAQJVWFRZQNI6LF3W7/20250911/us-east-1/s3/aws4_request,SignedHeaders=x-amz-content-sha256;x-amz-date,Signature=86ffbe2317caddcac569b25aa9b8e8db4a613a639b2a402cf4a9dc0e975ba997 + x-amz-content-sha256:UNSIGNED-PAYLOAD") + expected_output=("GET MAIN " "PUT OBJECT prefix") + + for ((i=0; i<${#tests[@]}; i++)); do + if file_name=$(get_file_name_with_prefix "openssl" 2>&1); then + return 1 + fi + echo "${tests[$i]}" > "$file_name" + run get_openssl_method_route_queries "$file_name" + assert_output "${expected_output[$i]}" + done +} + +@test "report - check for copy header value" { + test_clients=("OPENSSL" "OPENSSL" "CURL" "CURL" "CUR") + test_data=("GET / HTTP/1.1 + Authorization: AWS4-HMAC-SHA256 Credential=AKIAQJVWFRZQNI6LF3W7/20250911/us-east-1/s3/aws4_request,SignedHeaders=x-amz-content-sha256;x-amz-date,Signature=86ffbe2317caddcac569b25aa9b8e8db4a613a639b2a402cf4a9dc0e975ba997 + x-amz-content-sha256:UNSIGNED-PAYLOAD" + "PUT /bucket/file?prefix=dummy HTTP/1.1 + Authorization: AWS4-HMAC-SHA256 Credential=AKIAQJVWFRZQNI6LF3W7/20250911/us-east-1/s3/aws4_request,SignedHeaders=x-amz-content-sha256;x-amz-date,Signature=86ffbe2317caddcac569b25aa9b8e8db4a613a639b2a402cf4a9dc0e975ba997 + x-amz-copy-source:something" + "curl -ks -w %{http_code} -X PUT https://localhost:7070/versity-gwtest-bucket-one-1-20260129133816/test-file-ED302D34-1A3F-47D5-B3B7-78DF01943C29-copy -H Authorization: AWS4-HMAC-SHA256 Credential=AKIA6****/20260129/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-copy-source;x-amz-date,Signature=59091238ab6f297fa79201c90c2e77707177942ef1ba1c78ba31ec735f109477 -H host: localhost:7070 -H x-amz-content-sha256: UNSIGNED-PAYLOAD -H x-amz-copy-source: versity-gwtest-bucket-one-1-20260129133816/test-file-ED302D34-1A3F-47D5-B3B7-78DF01943C29 -H x-amz-date: 20260129T163817Z -o /Users/lukemccrone/devel/versitygw/versity-gwtest-files/result.txt -T /Users/lukemccrone/devel/versitygw/versity-gwtest-files/test-file-ED302D34-1A3F-47D5-B3B7-78DF01943C29" + "curl -ks -w %{http_code} -X PUT https://localhost:7070/versity-gwtest-bucket-one-1-20260129133816/test-file-ED302D34-1A3F-47D5-B3B7-78DF01943C29 -H Authorization: AWS4-HMAC-SHA256 Credential=AKIA6****/20260129/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=3f0d83d829b502ed3e5d7c66de109151df10ce76e866def1ccdd46e48bde66ca -H host: localhost:7070 -H x-amz-content-sha256: 778e1535066c2e3def76239d1326c019f5548480d68fd13a1d68942b1eb1b6c5 -H x-amz-date: 20260129T163817Z -T /Users/lukemccrone/devel/versitygw/versity-gwtest-files/test-file-ED302D34-1A3F-47D5-B3B7-78DF01943C29 -o /Users/lukemccrone/devel/versitygw/versity-gwtest-files/output.txt" + "curl -ks -w %{http_code} -X PUT https://localhost:7070/versity-gwtest-bucket-one-1-20260129133816/test-file-ED302D34-1A3F-47D5-B3B7-78DF01943C29 -H Authorization: AWS4-HMAC-SHA256 Credential=AKIA6****/20260129/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=3f0d83d829b502ed3e5d7c66de109151df10ce76e866def1ccdd46e48bde66ca -H host: localhost:7070 -H x-amz-content-sha256: 778e1535066c2e3def76239d1326c019f5548480d68fd13a1d68942b1eb1b6c5 -H x-amz-date: 20260129T163817Z -T /Users/lukemccrone/devel/versitygw/versity-gwtest-files/test-file-ED302D34-1A3F-47D5-B3B7-78DF01943C29 -o /Users/lukemccrone/devel/versitygw/versity-gwtest-files/output.txt") + expected_responses=(1 0 0 1 2) + + for ((i=0; i<${#test_clients[@]}; i++)); do + echo "test $i" + if [ "${test_clients[$i]}" == "OPENSSL" ]; then + if file_name=$(get_file_name_with_prefix "openssl" 2>&1); then + return 1 + fi + echo "${test_data[$i]}" > "$file_name" + data_param=$file_name + else + data_param=${test_data[$i]} + fi + run check_for_copy_source "${test_clients[$i]}" "$data_param" + if [ "${expected_responses[$i]}" -eq 0 ]; then + assert_success + else + assert_failure "${expected_responses[$i]}" + fi + done +} diff --git a/tests/versity.sh b/tests/versity.sh index 5a7d938..ff9e3fb 100644 --- a/tests/versity.sh +++ b/tests/versity.sh @@ -26,9 +26,9 @@ start_versity_process() { sleep 1 if [ -n "$VERSITY_LOG_FILE" ]; then log 1 "error running versitygw command: $(cat "$VERSITY_LOG_FILE")" - exit 1 + return 1 fi - exit 1 + return 1 fi eval versitygw_pid_"$1"=$! if [ -n "$VERSITY_LOG_FILE" ]; then @@ -45,7 +45,7 @@ start_versity_process() { if [ -n "$VERSITY_LOG_FILE" ]; then log 1 "log data: $(cat "$VERSITY_LOG_FILE")" fi - exit 1 + return 1 fi export versitygw_pid_"$1" } @@ -67,7 +67,7 @@ build_run_and_log_command() { run_versity_app_posix() { if ! check_param_count "run_versity_app_posix" "access ID, secret key, versityid app index" 3 $#; then - exit 1 + return 1 fi base_command=("$VERSITY_EXE" --access="$1" --secret="$2" --region="$AWS_REGION") if [ -n "$RUN_USERS" ]; then @@ -94,7 +94,7 @@ run_versity_app_posix() { run_versity_app_scoutfs() { if ! check_param_count "run_versity_app_scoutfs" "access ID, secret key, versityid app index" 3 $#; then - exit 1 + return 1 fi base_command=("$VERSITY_EXE" --access="$1" --secret="$2" --region="$AWS_REGION" --iam-dir="$USERS_FOLDER") if [ -n "$CERT" ] && [ -n "$KEY" ]; then @@ -112,7 +112,7 @@ run_versity_app_scoutfs() { run_versity_app_s3() { if ! check_param_count "run_versity_app_s3" "versityid app index" 1 $#; then - exit 1 + return 1 fi base_command=("$VERSITY_EXE" --access="$AWS_ACCESS_KEY_ID" --secret="$AWS_SECRET_ACCESS_KEY" --region="$AWS_REGION") if [ -n "$CERT" ] && [ -n "$KEY" ]; then @@ -140,7 +140,7 @@ run_versity_app() { run_versity_app_s3 "2" else log 1 "unrecognized backend type $BACKEND" - exit 1 + return 1 fi if [[ $IAM_TYPE != "s3" ]]; then return 0 @@ -151,13 +151,13 @@ run_versity_app() { if ! create_bucket "s3api" "$USERS_BUCKET"; then log 1 "error creating IAM bucket" teardown - exit 1 + return 1 fi } stop_single_process() { if ! check_param_count "stop_single_process" "versitygw PID" 1 $#; then - exit 1 + return 1 fi log 5 "stop process with ID: $1" # shellcheck disable=SC2086