mirror of
https://github.com/versity/versitygw.git
synced 2026-01-04 11:03:57 +00:00
test: tags, metadata tests, docker, test config cleanup
This commit is contained in:
2
.github/workflows/system.yml
vendored
2
.github/workflows/system.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
# export AWS_ACCESS_KEY_ID_TWO=ABCDEFGHIJKLMNOPQRST
|
||||
# export AWS_SECRET_ACCESS_KEY_TWO=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmn
|
||||
# export WORKSPACE=$GITHUB_WORKSPACE
|
||||
# VERSITYGW_TEST_ENV=./tests/.env.s3.default GOCOVERDIR=/tmp/cover ./tests/run_all.sh
|
||||
# VERSITYGW_TEST_ENV=./tests/.env.s3 GOCOVERDIR=/tmp/cover ./tests/run_all.sh
|
||||
|
||||
- name: Coverage report
|
||||
run: |
|
||||
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -47,5 +47,15 @@ tests/.secrets*
|
||||
users.json
|
||||
|
||||
# env files for testing
|
||||
.env*
|
||||
!.env.default
|
||||
**/.env*
|
||||
**/!.env.default
|
||||
|
||||
# s3cmd config files (testing)
|
||||
tests/s3cfg.local*
|
||||
tests/!s3cfg.local.default
|
||||
|
||||
# keys
|
||||
*.pem
|
||||
|
||||
# patches
|
||||
*.patch
|
||||
|
||||
@@ -61,8 +61,6 @@ USER tester
|
||||
COPY --chown=tester:tester . /home/tester
|
||||
|
||||
WORKDIR /home/tester
|
||||
#RUN cp tests/.env.docker.s3.default tests/.env.docker.s3
|
||||
RUN cp tests/s3cfg.local.default tests/s3cfg.local
|
||||
RUN make
|
||||
|
||||
RUN . $SECRETS_FILE && \
|
||||
|
||||
35
docker-compose-bats.yml
Normal file
35
docker-compose-bats.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
no_certs:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.nocerts
|
||||
static_buckets:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.static
|
||||
posix_backend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.default
|
||||
s3_backend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.s3
|
||||
- SECRETS_FILE=tests/.secrets.s3
|
||||
direct:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.direct
|
||||
- SECRETS_FILE=tests/.secrets.direct
|
||||
@@ -6,7 +6,7 @@ BACKEND=posix
|
||||
LOCAL_FOLDER=/tmp/gw
|
||||
BUCKET_ONE_NAME=versity-gwtest-bucket-one
|
||||
BUCKET_TWO_NAME=versity-gwtest-bucket-two
|
||||
#RECREATE_BUCKETS=true
|
||||
RECREATE_BUCKETS=true
|
||||
CERT=$PWD/cert.pem
|
||||
KEY=$PWD/versitygw.pem
|
||||
S3CMD_CONFIG=./tests/s3cfg.local.default
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
AWS_PROFILE=versity_s3
|
||||
AWS_ENDPOINT_URL=https://127.0.0.1:7070
|
||||
VERSITY_EXE=./versitygw
|
||||
RUN_VERSITYGW=true
|
||||
BACKEND=s3
|
||||
LOCAL_FOLDER=/tmp/gw
|
||||
BUCKET_ONE_NAME=versity-gwtest-bucket-one
|
||||
BUCKET_TWO_NAME=versity-gwtest-bucket-two
|
||||
#RECREATE_BUCKETS=true
|
||||
CERT=$PWD/cert.pem
|
||||
KEY=$PWD/versitygw.pem
|
||||
S3CMD_CONFIG=./tests/s3cfg.local.default
|
||||
SECRETS_FILE=./tests/.secrets.s3
|
||||
MC_ALIAS=versity_s3
|
||||
@@ -36,12 +36,38 @@ Instructions are mostly the same; however, testing with the S3 backend requires
|
||||
|
||||
To set up the latter:
|
||||
1. Create a new AWS profile with ID and key values set to dummy 20-char allcaps and 40-char alphabetical values respectively.
|
||||
1. In the `.secrets` file being used, create the fields `AWS_ACCESS_KEY_ID_TWO` and `AWS_SECRET_ACCESS_KEY_TWO`. Set these values to the actual AWS ID and key.
|
||||
2. Set the values for `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` the same dummy values set in the AWS profile, and set `AWS_PROFILE` to the profile you just created.
|
||||
3. Create a new AWS profile with these dummy values. In the `.env` file being used, set the `AWS_PROFILE` parameter to the name of this new profile, and the ID and key fields to the dummy values.
|
||||
4. Set `BACKEND` to `s3`. Also, change the `MC_ALIAS` value if testing **mc** in this configuration.
|
||||
2. In the `.secrets` file being used, create the fields `AWS_ACCESS_KEY_ID_TWO` and `AWS_SECRET_ACCESS_KEY_TWO`. Set these values to the actual AWS ID and key.
|
||||
3. Set the values for `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` the same dummy values set in the AWS profile, and set `AWS_PROFILE` to the profile you just created.
|
||||
4. Create a new AWS profile with these dummy values. In the `.env` file being used, set the `AWS_PROFILE` parameter to the name of this new profile, and the ID and key fields to the dummy values.
|
||||
5. Set `BACKEND` to `s3`. Also, change the `MC_ALIAS` value if testing **mc** in this configuration.
|
||||
|
||||
### Direct Mode
|
||||
|
||||
To communicate directly with s3, in order to compare the gateway results to direct results:
|
||||
1. Create an AWS profile with the direct connection info. Set `AWS_PROFILE` to this.
|
||||
2. Set `RUN_VERSITYGW` to false.
|
||||
3. Set `AWS_ENDPOINT_URL` to the typical endpoint location (usually `https://s3.amazonaws.com`).
|
||||
4. If testing **s3cmd**, create a new `s3cfg.local` file with `host_base` and `host_bucket` set to `s3.amazonaws.com`.
|
||||
5. If testing **mc**, change the `MC_ALIAS` value to a new value such as `versity-direct`.
|
||||
|
||||
## Instructions - Running With Docker
|
||||
|
||||
1. Create a `.secrets` file in the `tests` folder, and add the `AWS_PROFILE`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and the `AWS_PROFILE` fields.
|
||||
2. Build and run the `Dockerfile_test_bats` file. Change the `SECRETS_FILE` and `CONFIG_FILE` parameters to point to an S3-backend-friendly config. Example: `docker build -t <tag> -f Dockerfile_test_bats --build-arg="SECRETS_FILE=<file>" --build-arg="CONFIG_FILE=<file>" .`.
|
||||
1. Create a `.secrets` file in the `tests` folder, and add the `AWS_PROFILE`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and the `AWS_PROFILE` fields, as well as the additional s3 fields explained in the **S3 Backend** section above if running with the s3 backend.
|
||||
2. Build and run the `Dockerfile_test_bats` file. Change the `SECRETS_FILE` and `CONFIG_FILE` parameters to point to your secrets and config file, respectively. Example: `docker build -t <tag> -f Dockerfile_test_bats --build-arg="SECRETS_FILE=<file>" --build-arg="CONFIG_FILE=<file>" .`.
|
||||
|
||||
## Instructions - Running with docker-compose
|
||||
|
||||
A file named `docker-compose-bats.yml` is provided in the root folder. Four configurations are provided:
|
||||
* insecure (without certificates), with creation/removal of buckets
|
||||
* secure, posix backend, with static buckets
|
||||
* secure, posix backend, with creation/removal of buckets
|
||||
* secure, s3 backend, with creation/removal of buckets
|
||||
* direct mode
|
||||
|
||||
To use each of these, creating a separate `.env` file for each is suggested. How to do so is explained below.
|
||||
|
||||
To run in insecure mode, comment out the `CERT` and `KEY` parameters in the `.env` file, and change the prefix for the `AWS_ENDPOINT_URL` parameter to `http://`. Also, set `S3CMD_CONFIG` to point to a copy of the default s3cmd config file that has `use_https` set to false. Finally, change `MC_ALIAS` to something new to avoid overwriting the secure `MC_ALIAS` values.
|
||||
|
||||
To use static buckets set the `RECREATE_BUCKETS` value to `false`.
|
||||
|
||||
For the s3 backend, see the **S3 Backend** instructions above.
|
||||
|
||||
9
tests/s3cfg.local.nocerts
Normal file
9
tests/s3cfg.local.nocerts
Normal file
@@ -0,0 +1,9 @@
|
||||
# Setup endpoint
|
||||
host_base = 127.0.0.1:7070
|
||||
host_bucket = 127.0.0.1:7070
|
||||
bucket_location = us-east-1
|
||||
use_https = True
|
||||
signurl_use_https = True
|
||||
|
||||
# Enable S3 v4 signature APIs
|
||||
signature_v2 = False
|
||||
@@ -114,8 +114,8 @@ source ./tests/test_common.sh
|
||||
}
|
||||
|
||||
# test abilty to set and retrieve bucket tags
|
||||
@test "test-set-get-bucket-tags" {
|
||||
test_common_set_get_bucket_tags "aws"
|
||||
@test "test-set-get-delete-bucket-tags" {
|
||||
test_common_set_get_delete_bucket_tags "aws"
|
||||
}
|
||||
|
||||
# test v1 s3api list objects command
|
||||
@@ -417,4 +417,30 @@ source ./tests/test_common.sh
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
delete_bucket_or_contents "aws" "$BUCKET_TWO_NAME"
|
||||
delete_test_files "$bucket_file"
|
||||
}
|
||||
|
||||
@test "test_add_object_metadata" {
|
||||
|
||||
object_one="object-one"
|
||||
test_key="x-test-data"
|
||||
test_value="test-value"
|
||||
|
||||
create_test_files "$object_one" || local created=$?
|
||||
[[ $created -eq 0 ]] || fail "Error creating test files"
|
||||
|
||||
setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$?
|
||||
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
|
||||
|
||||
object="$test_file_folder"/"$object_one"
|
||||
put_object_with_metadata "aws" "$object" "$BUCKET_ONE_NAME" "$test_key" "$test_value" || put_object=$?
|
||||
[[ $put_object -eq 0 ]] || fail "Failed to add object to bucket"
|
||||
object_exists "aws" "$object" || local exists_result_one=$?
|
||||
[[ $exists_result_one -eq 0 ]] || fail "Object not added to bucket"
|
||||
|
||||
get_object_metadata "aws" "$BUCKET_ONE_NAME" "$object" || get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "error getting object metadata"
|
||||
key=$(echo "$metadata" | jq 'keys[]')
|
||||
value=$(echo "$metadata" | jq '.[]')
|
||||
[[ $key == "\"$test_key\"" ]] || fail "keys doesn't match (expected $key, actual \"$test_key\")"
|
||||
[[ $value == "\"$test_value\"" ]] || fail "values doesn't match (expected $value, actual \"$test_value\")"
|
||||
}
|
||||
@@ -148,7 +148,7 @@ test_common_list_objects() {
|
||||
fi
|
||||
}
|
||||
|
||||
test_common_set_get_bucket_tags() {
|
||||
test_common_set_get_delete_bucket_tags() {
|
||||
if [[ $# -ne 1 ]]; then
|
||||
fail "set/get bucket tags test requires command type"
|
||||
fi
|
||||
@@ -160,20 +160,14 @@ test_common_set_get_bucket_tags() {
|
||||
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
|
||||
|
||||
get_bucket_tags "$1" "$BUCKET_ONE_NAME" || local get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "Error getting bucket tags"
|
||||
[[ $get_result -eq 0 ]] || fail "Error getting bucket tags first time"
|
||||
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
if [[ $tags != "" ]]; then
|
||||
tag_set=$(echo "$tags" | jq '.TagSet')
|
||||
[[ $tag_set == "[]" ]] || fail "Error: tags not empty: $tags"
|
||||
fi
|
||||
else
|
||||
[[ $tags == "" ]] || [[ $tags =~ "No tags found" ]] || fail "Error: tags not empty: $tags"
|
||||
fi
|
||||
check_bucket_tags_empty "$1" || local check_result=$?
|
||||
[[ $check_result -eq 0 ]] || fail "error checking if bucket tags are empty"
|
||||
|
||||
put_bucket_tag "$1" "$BUCKET_ONE_NAME" $key $value
|
||||
get_bucket_tags "$1" "$BUCKET_ONE_NAME" || local get_result_two=$?
|
||||
[[ $get_result_two -eq 0 ]] || fail "Error getting bucket tags"
|
||||
[[ $get_result_two -eq 0 ]] || fail "Error getting bucket tags second time"
|
||||
|
||||
local tag_set_key
|
||||
local tag_set_value
|
||||
@@ -189,6 +183,12 @@ test_common_set_get_bucket_tags() {
|
||||
[[ $tag_set_value == "$value" ]] || fail "Value mismatch"
|
||||
fi
|
||||
delete_bucket_tags "$1" "$BUCKET_ONE_NAME"
|
||||
|
||||
get_bucket_tags "$1" "$BUCKET_ONE_NAME" || local get_result=$?
|
||||
[[ $get_result -eq 0 ]] || fail "Error getting bucket tags third time"
|
||||
|
||||
check_bucket_tags_empty "$1" || local check_result=$?
|
||||
[[ $check_result -eq 0 ]] || fail "error checking if bucket tags are empty"
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ export RUN_MC=true
|
||||
}
|
||||
|
||||
@test "test_set_get_bucket_tags_mc" {
|
||||
test_common_set_get_bucket_tags "mc"
|
||||
test_common_set_get_delete_bucket_tags "mc"
|
||||
}
|
||||
|
||||
@test "test_set_get_object_tags_mc" {
|
||||
|
||||
@@ -255,6 +255,52 @@ put_object() {
|
||||
return 0
|
||||
}
|
||||
|
||||
put_object_with_metadata() {
|
||||
if [ $# -ne 5 ]; then
|
||||
echo "put object command requires command type, source, destination, key, value"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local exit_code=0
|
||||
local error
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
error=$(aws --no-verify-ssl s3api put-object --bucket "$3" --key "$2" --body "$2" --metadata "{\"$4\":\"$5\"}") || exit_code=$?
|
||||
else
|
||||
echo "invalid command type $1"
|
||||
return 1
|
||||
fi
|
||||
log 5 "put object exit code: $exit_code"
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
echo "error copying object to bucket: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_object_metadata() {
|
||||
if [ $# -ne 3 ]; then
|
||||
echo "get object metadata command requires command type, bucket, key"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local exit_code=0
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
metadata_struct=$(aws --no-verify-ssl s3api head-object --bucket "$2" --key "$3") || exit_code=$?
|
||||
else
|
||||
echo "invalid command type $1"
|
||||
return 1
|
||||
fi
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
echo "error copying object to bucket: $error"
|
||||
return 1
|
||||
fi
|
||||
log 5 "$metadata_struct"
|
||||
metadata=$(echo "$metadata_struct" | jq '.Metadata')
|
||||
echo $metadata
|
||||
export metadata
|
||||
return 0
|
||||
}
|
||||
|
||||
put_object_multiple() {
|
||||
if [ $# -ne 3 ]; then
|
||||
echo "put object command requires command type, source, destination"
|
||||
@@ -435,6 +481,20 @@ list_objects() {
|
||||
export object_array
|
||||
}
|
||||
|
||||
remove_insecure_request_warning() {
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "remove insecure request warning requires input lines"
|
||||
return 1
|
||||
fi
|
||||
parsed_output=()
|
||||
while IFS= read -r line; do
|
||||
if [[ $line != *InsecureRequestWarning* ]]; then
|
||||
parsed_output+=("$line")
|
||||
fi
|
||||
done <<< "$1"
|
||||
export parsed_output
|
||||
}
|
||||
|
||||
# check if bucket info can be retrieved
|
||||
# param: path of bucket or folder
|
||||
# return 0 for yes, 1 for no, 2 for error
|
||||
@@ -566,6 +626,28 @@ get_bucket_tags() {
|
||||
export tags
|
||||
}
|
||||
|
||||
check_bucket_tags_empty() {
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "bucket tags empty check requires command type"
|
||||
return 2
|
||||
fi
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
if [[ $tags != "" ]]; then
|
||||
tag_set=$(echo "$tags" | jq '.TagSet')
|
||||
if [[ $tag_set != "[]" ]]; then
|
||||
echo "error: tags not empty: $tags"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
if [[ $tags != "" ]] && [[ $tags != *"No tags found"* ]]; then
|
||||
echo "Error: tags not empty: $tags"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
delete_bucket_tags() {
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "delete bucket tag command missing command type, bucket name"
|
||||
|
||||
Reference in New Issue
Block a user