Merge pull request #481 from versity/test_cmdline_readme

Test cmdline readme
This commit is contained in:
Ben McClelland
2024-04-02 15:58:45 -07:00
committed by GitHub
13 changed files with 238 additions and 19 deletions

View File

@@ -61,7 +61,6 @@ USER tester
COPY --chown=tester:tester . /home/tester
WORKDIR /home/tester
RUN cp ${CONFIG_FILE}.default $CONFIG_FILE
#RUN cp tests/.env.docker.s3.default tests/.env.docker.s3
RUN cp tests/s3cfg.local.default tests/s3cfg.local
RUN make

View File

@@ -11,4 +11,5 @@ CERT=$PWD/cert.pem
KEY=$PWD/versitygw.pem
S3CMD_CONFIG=./tests/s3cfg.local.default
SECRETS_FILE=./tests/.secrets
MC_ALIAS=versity
MC_ALIAS=versity
LOG_LEVEL=2

View File

@@ -11,4 +11,4 @@ CERT=$PWD/cert.pem
KEY=$PWD/versitygw.pem
S3CMD_CONFIG=./tests/s3cfg.local.default
SECRETS_FILE=./tests/.secrets.s3
MC_ALIAS=versity
MC_ALIAS=versity_s3

View File

@@ -2,6 +2,8 @@
## Instructions - Running Locally
### Posix Backend
1. Build the `versitygw` binary.
2. Install the command-line interface(s) you want to test if unavailable on your machine.
* **aws cli**: Instructions are [here](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html).
@@ -28,7 +30,18 @@
8. Set `BUCKET_ONE_NAME` and `BUCKET_TWO_NAME` to the desired names of your buckets. If you don't want them to be created each time, set `RECREATE_BUCKETS` to `false`.
9. In the root repo folder, run single test group with `VERSITYGW_TEST_ENV=<env file> tests/run.sh <options>`. To print options, run `tests/run.sh -h`. To run all tests, run `VERSITYGW_TEST_ENV=<env file> tests/run_all.sh`.
### S3 Backend
Instructions are mostly the same; however, testing with the S3 backend requires two S3 accounts. Ideally, these are two real accounts, but one can also be a dummy account that versity uses internally.
To set up the latter:
1. Create a new AWS profile with ID and key values set to dummy 20-char allcaps and 40-char alphabetical values respectively.
1. In the `.secrets` file being used, create the fields `AWS_ACCESS_KEY_ID_TWO` and `AWS_SECRET_ACCESS_KEY_TWO`. Set these values to the actual AWS ID and key.
2. Set the values for `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` the same dummy values set in the AWS profile, and set `AWS_PROFILE` to the profile you just created.
3. Create a new AWS profile with these dummy values. In the `.env` file being used, set the `AWS_PROFILE` parameter to the name of this new profile, and the ID and key fields to the dummy values.
4. Set `BACKEND` to `s3`. Also, change the `MC_ALIAS` value if testing **mc** in this configuration.
## Instructions - Running With Docker
1. Create a `.secrets` file in the `tests` folder, and add the `AWS_PROFILE`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and the `AWS_PROFILE` fields.
2. Build and run the `Dockerfile_test_bats` file.
2. Build and run the `Dockerfile_test_bats` file. Change the `SECRETS_FILE` and `CONFIG_FILE` parameters to point to an S3-backend-friendly config. Example: `docker build -t <tag> -f Dockerfile_test_bats --build-arg="SECRETS_FILE=<file>" --build-arg="CONFIG_FILE=<file>" .`.

14
tests/logger.sh Normal file
View File

@@ -0,0 +1,14 @@
#!/usr/bin/env bash
# levels: 1 - crit, 2 - err, 3 - warn, 4 - info, 5 - debug, 6 - trace
log() {
if [[ $# -ne 2 ]]; then
echo "log function requires level, message"
return 1
fi
if [[ $1 -gt $LOG_LEVEL ]]; then
return 0
fi
echo "$2"
}

View File

@@ -54,6 +54,11 @@ check_params() {
echo "RECREATE_BUCKETS must be 'true' or 'false'"
return 1
fi
if [[ -z "$LOG_LEVEL" ]]; then
export LOG_LEVEL=2
else
export LOG_LEVEL
fi
return 0
}

View File

@@ -10,6 +10,19 @@ source ./tests/test_common.sh
test_common_create_delete_bucket "aws"
}
@test "test_create_bucket_invalid_name" {
if [[ $RECREATE_BUCKETS != "true" ]]; then
return
fi
create_bucket_invalid_name "aws" || local create_result=$?
[[ $create_result -eq 0 ]] || fail "Invalid name test failed"
[[ "$bucket_create_error" == *"Invalid bucket name "* ]] || fail "unexpected error: $bucket_create_error"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
}
# test adding and removing an object on versitygw
@test "test_put_object-with-data" {
test_common_put_object_with_data "aws"
@@ -300,8 +313,8 @@ source ./tests/test_common.sh
bucket_file_data="test file\n"
create_test_files "$bucket_file" || local created=$?
printf "%s" "$bucket_file_data" > "$test_file_folder"/$bucket_file
[[ $created -eq 0 ]] || fail "Error creating test files"
printf "%s" "$bucket_file_data" > "$test_file_folder"/$bucket_file
setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
@@ -319,3 +332,48 @@ source ./tests/test_common.sh
@test "test-presigned-url-utf8-chars" {
test_common_presigned_url_utf8_chars "aws"
}
@test "test-list-objects-delimiter" {
folder_name="two"
object_name="three"
create_test_folder "$folder_name" || local created=$?
[[ $created -eq 0 ]] || fail "error creating folder"
create_test_files "$folder_name"/"$object_name" || created=$?
[[ $created -eq 0 ]] || fail "error creating file"
setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$?
[[ $setup_result -eq 0 ]] || fail "error setting up bucket"
put_object "aws" "$test_file_folder"/"$folder_name"/"$object_name" "$BUCKET_ONE_NAME"/"$folder_name"/"$object_name" || local put_object=$?
[[ $put_object -eq 0 ]] || fail "Failed to add object to bucket"
list_objects_s3api_v1 "$BUCKET_ONE_NAME" "/"
prefix=$(echo "${objects[@]}" | jq ".CommonPrefixes[0].Prefix")
[[ $prefix == "\""$folder_name/"\"" ]] || fail "prefix doesn't match (expected $prefix, actual $folder_name/)"
list_objects_s3api_v1 "$BUCKET_ONE_NAME" "#"
key=$(echo "${objects[@]}" | jq ".Contents[0].Key")
[[ $key == "\""$folder_name/$object_name"\"" ]] || fail "prefix doesn't match (expected $prefix, actual $folder_name/)"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files $folder_name
}
# ensure that lists of files greater than a size of 1000 (pagination) are returned properly
@test "test_list_objects_file_count" {
test_common_list_objects_file_count "aws"
}
#@test "test_filename_length" {
# file_name=$(printf "%0.sa" $(seq 1 1025))
# echo "$file_name"
# create_test_files "$file_name" || created=$?
# [[ $created -eq 0 ]] || fail "error creating file"
# setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$?
# [[ $setup_result -eq 0 ]] || fail "error setting up bucket"
# put_object "aws" "$test_file_folder"/"$file_name" "$BUCKET_ONE_NAME"/"$file_name" || local put_object=$?
# [[ $put_object -eq 0 ]] || fail "Failed to add object to bucket"
#}

View File

@@ -148,7 +148,6 @@ test_common_list_objects() {
}
test_common_set_get_bucket_tags() {
if [[ $# -ne 1 ]]; then
fail "set/get bucket tags test requires command type"
fi
@@ -192,7 +191,6 @@ test_common_set_get_bucket_tags() {
}
test_common_set_get_object_tags() {
if [[ $# -ne 1 ]]; then
echo "get/set object tags missing command type"
return 1
@@ -293,3 +291,23 @@ test_common_presigned_url_utf8_chars() {
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
delete_test_files "$bucket_file" "$bucket_file_copy"
}
test_common_list_objects_file_count() {
if [[ $# -ne 1 ]]; then
echo "list objects greater than 1000 missing command type"
return 1
fi
create_test_file_count 1001 || local create_result=$?
[[ $create_result -eq 0 ]] || fail "error creating test files"
setup_bucket "$1" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'"
put_object_multiple "$1" "$test_file_folder/file_*" "$BUCKET_ONE_NAME" || local put_result=$?
[[ $put_result -eq 0 ]] || fail "Failed to copy files to bucket"
list_objects "$1" "$BUCKET_ONE_NAME"
if [[ $LOG_LEVEL -ge 5 ]]; then
log 5 "Array: ${object_array[*]}"
fi
local file_count="${#object_array[@]}"
[[ $file_count == 1001 ]] || fail "file count should be 1001, is $file_count"
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
}

View File

@@ -41,3 +41,7 @@ export RUN_MC=true
@test "test_presigned_url_utf8_chars_mc" {
test_common_presigned_url_utf8_chars "mc"
}
@test "test_list_objects_file_count" {
test_common_list_objects_file_count "mc"
}

View File

@@ -35,4 +35,8 @@ export RUN_S3CMD=true
#@test "test_presigned_url_utf8_chars_s3cmd" {
# test_common_presigned_url_utf8_chars "s3cmd"
#}
#}
@test "test_list_objects_file_count" {
test_common_list_objects_file_count "s3cmd"
}

View File

@@ -1,6 +1,7 @@
#!/usr/bin/env bats
source ./tests/util_mc.sh
source ./tests/logger.sh
# create an AWS bucket
# param: bucket name
@@ -30,6 +31,26 @@ create_bucket() {
return 0
}
create_bucket_invalid_name() {
if [ $# -ne 1 ]; then
echo "create bucket w/invalid name missing command type"
return 1
fi
local exit_code=0
local error
if [[ $1 == "aws" ]]; then
bucket_create_error=$(aws --no-verify-ssl s3 mb "s3://" 2>&1) || exit_code=$?
else
echo "invalid command type $i"
return 1
fi
if [ $exit_code -eq 0 ]; then
echo "error: bucket should have not been created but was"
return 1
fi
export bucket_create_error
}
# delete an AWS bucket
# param: bucket name
# return 0 for success, 1 for failure
@@ -281,6 +302,35 @@ put_object() {
return 0
}
put_object_multiple() {
if [ $# -ne 3 ]; then
echo "put object command requires command type, source, destination"
return 1
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
# shellcheck disable=SC2086
error=$(aws --debug --no-verify-ssl s3 cp "$(dirname "$2")" s3://"$3" --recursive --exclude="*" --include="$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
# shellcheck disable=SC2086
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate put $2 "s3://$3/" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
# shellcheck disable=SC2086
error=$(mc --insecure cp $2 "$MC_ALIAS"/"$3" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error copying object to bucket: $error"
return 1
else
log 5 "$error"
fi
return 0
}
# add object to versitygw if it doesn't exist
# params: source file, destination copy location
# return 0 for success or already exists, 1 for failure
@@ -394,8 +444,10 @@ list_objects() {
object_array=()
while IFS= read -r line; do
object_name=$(echo "$line" | awk '{print $NF}')
object_array+=("$object_name")
if [[ $line != *InsecureRequestWarning* ]]; then
object_name=$(echo "$line" | awk '{print $NF}')
object_array+=("$object_name")
fi
done <<< "$output"
export object_array
@@ -600,11 +652,15 @@ get_object_tags() {
# param: bucket
# export objects on success, return 1 for failure
list_objects_s3api_v1() {
if [ $# -ne 1 ]; then
echo "list objects command missing bucket"
if [ $# -lt 1 ] || [ $# -gt 2 ]; then
echo "list objects command requires bucket, (optional) delimiter"
return 1
fi
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1") || local result=$?
if [ "$2" == "" ]; then
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1") || local result=$?
else
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1" --delimiter "$2") || local result=$?
fi
if [[ $result -ne 0 ]]; then
echo "error listing objects: $objects"
return 1

View File

@@ -1,5 +1,7 @@
#!/usr/bin/env bats
source ./tests/logger.sh
# create a test file and export folder. do so in temp folder
# params: filename
# export test file folder on success, return 1 for error
@@ -8,7 +10,7 @@ create_test_files() {
echo "create test files command missing filename"
return 1
fi
test_file_folder=.
test_file_folder=$PWD
if [[ -z "$GITHUB_ACTIONS" ]]; then
create_test_file_folder
fi
@@ -21,6 +23,23 @@ create_test_files() {
export test_file_folder
}
create_test_folder() {
if [ $# -lt 1 ]; then
echo "create test folder command missing folder name"
return 1
fi
test_file_folder=$PWD
if [[ -z "$GITHUB_ACTIONS" ]]; then
create_test_file_folder
fi
for name in "$@"; do
mkdir -p "$test_file_folder"/"$name" || local mkdir_result=$?
if [[ $mkdir_result -ne 0 ]]; then
echo "error creating file $name"
fi
done
}
# delete a test file
# params: filename
# return: 0 for success, 1 for error
@@ -34,7 +53,7 @@ delete_test_files() {
return 1
fi
for name in "$@"; do
rm "$test_file_folder"/"$name" || rm_result=$?
rm -rf "${test_file_folder:?}"/"${name:?}" || rm_result=$?
if [[ $rm_result -ne 0 ]]; then
echo "error deleting file $name"
fi
@@ -80,7 +99,11 @@ compare_files() {
}
create_test_file_folder() {
test_file_folder=${TMPDIR}versity-gwtest
if [[ -v $TMPDIR ]]; then
test_file_folder=${TMPDIR}versity-gwtest
else
test_file_folder=$PWD/versity-gwtest
fi
mkdir -p "$test_file_folder" || local mkdir_result=$?
if [[ $mkdir_result -ne 0 ]]; then
echo "error creating test file folder"
@@ -97,16 +120,40 @@ create_large_file() {
return 1
fi
test_file_folder=.
test_file_folder=$PWD
if [[ -z "$GITHUB_ACTIONS" ]]; then
create_test_file_folder
fi
filesize=$((160*1024*1024))
error=$(dd if=/dev/urandom of=$test_file_folder/"$1" bs=1024 count=$((filesize/1024))) || dd_result=$?
error=$(dd if=/dev/urandom of="$test_file_folder"/"$1" bs=1024 count=$((filesize/1024))) || dd_result=$?
if [[ $dd_result -ne 0 ]]; then
echo "error creating file: $error"
return 1
fi
return 0
}
create_test_file_count() {
if [[ $# -ne 1 ]]; then
echo "create test file count function missing bucket name, count"
return 1
fi
test_file_folder=$PWD
if [[ -z "$GITHUB_ACTIONS" ]]; then
create_test_file_folder
fi
local touch_result
for ((i=1;i<=$1;i++)) {
error=$(touch "$test_file_folder/file_$i") || touch_result=$?
if [[ $touch_result -ne 0 ]]; then
echo "error creating file_$i: $error"
return 1
fi
}
if [[ $LOG_LEVEL -ge 5 ]]; then
ls_result=$(ls "$test_file_folder"/file_*)
log 5 "$ls_result"
fi
return 0
}

View File

@@ -21,4 +21,4 @@ delete_bucket_recursive_mc() {
return 1
fi
return 0
}
}