Merge pull request #653 from versity/test_cmdline_policy_more_tests

Test cmdline policy more tests
This commit is contained in:
Ben McClelland
2024-06-25 09:44:29 -07:00
committed by GitHub
6 changed files with 203 additions and 24 deletions

View File

@@ -18,6 +18,7 @@ jobs:
RUN_SET: "s3cmd"
RECREATE_BUCKETS: "true"
PORT: 7070
BACKEND: "posix"
- set: 2
LOCAL_FOLDER: /tmp/gw2
BUCKET_ONE_NAME: versity-gwtest-bucket-one-2
@@ -28,6 +29,7 @@ jobs:
RUN_SET: "s3"
RECREATE_BUCKETS: "true"
PORT: 7071
BACKEND: "posix"
- set: 3
LOCAL_FOLDER: /tmp/gw3
BUCKET_ONE_NAME: versity-gwtest-bucket-one-3
@@ -38,6 +40,7 @@ jobs:
RUN_SET: "s3api"
RECREATE_BUCKETS: "true"
PORT: 7072
BACKEND: "posix"
- set: 4
LOCAL_FOLDER: /tmp/gw4
BUCKET_ONE_NAME: versity-gwtest-bucket-one-4
@@ -48,6 +51,7 @@ jobs:
RUN_SET: "mc"
RECREATE_BUCKETS: "true"
PORT: 7073
BACKEND: "posix"
- set: 5
LOCAL_FOLDER: /tmp/gw5
BUCKET_ONE_NAME: versity-gwtest-bucket-one-5
@@ -58,6 +62,7 @@ jobs:
RUN_SET: "aws-user"
RECREATE_BUCKETS: "true"
PORT: 7074
BACKEND: "posix"
- set: 6
LOCAL_FOLDER: /tmp/gw6
BUCKET_ONE_NAME: versity-gwtest-bucket-one-6
@@ -68,6 +73,18 @@ jobs:
RUN_SET: "aws"
RECREATE_BUCKETS: "false"
PORT: 7075
BACKEND: "posix"
- set: 7
LOCAL_FOLDER: /tmp/gw7
BUCKET_ONE_NAME: versity-gwtest-bucket-one-7
BUCKET_TWO_NAME: versity-gwtest-bucket-two-7
IAM_TYPE: folder
USERS_FOLDER: /tmp/iam7
AWS_ENDPOINT_URL: https://127.0.0.1:7076
RUN_SET: "aws"
RECREATE_BUCKETS: "true"
PORT: 7076
BACKEND: "s3"
steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v4
@@ -110,7 +127,7 @@ jobs:
AWS_PROFILE: versity
VERSITY_EXE: ${{ github.workspace }}/versitygw
RUN_VERSITYGW: true
BACKEND: posix
BACKEND: ${{ matrix.BACKEND }}
RECREATE_BUCKETS: ${{ matrix.RECREATE_BUCKETS }}
CERT: ${{ github.workspace }}/cert.pem
KEY: ${{ github.workspace }}/versitygw.pem
@@ -123,6 +140,8 @@ jobs:
export AWS_ACCESS_KEY_ID=ABCDEFGHIJKLMNOPQRST
export AWS_SECRET_ACCESS_KEY=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmn
export AWS_REGION=us-east-1
export AWS_ACCESS_KEY_ID_TWO=user
export AWS_SECRET_ACCESS_KEY_TWO=pass
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile versity
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile versity
aws configure set aws_region $AWS_REGION --profile versity
@@ -136,20 +155,6 @@ jobs:
fi
BYPASS_ENV_FILE=true ${{ github.workspace }}/tests/run.sh $RUN_SET
#- name: Build and run, s3 backend
# run: |
# make testbin
# export AWS_ACCESS_KEY_ID=ABCDEFGHIJKLMNOPQRST
# export AWS_SECRET_ACCESS_KEY=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmn
# export AWS_REGION=us-east-1
# aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile versity_s3
# aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile versity_s3
# aws configure set aws_region $AWS_REGION --profile versity_s3
# export AWS_ACCESS_KEY_ID_TWO=ABCDEFGHIJKLMNOPQRST
# export AWS_SECRET_ACCESS_KEY_TWO=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmn
# export WORKSPACE=$GITHUB_WORKSPACE
# VERSITYGW_TEST_ENV=./tests/.env.s3 GOCOVERDIR=/tmp/cover ./tests/run_all.sh
- name: Coverage report
run: |
go tool covdata percent -i=cover

View File

@@ -31,6 +31,10 @@
8. Set `BUCKET_ONE_NAME` and `BUCKET_TWO_NAME` to the desired names of your buckets. If you don't want them to be created each time, set `RECREATE_BUCKETS` to `false`.
9. In the root repo folder, run single test group with `VERSITYGW_TEST_ENV=<env file> tests/run.sh <options>`. To print options, run `tests/run.sh -h`. To run all tests, run `VERSITYGW_TEST_ENV=<env file> tests/run_all.sh`.
### Static Bucket Mode
To preserve buckets while running tests, set `RECREATE_BUCKETS` to `false`. Two utility functions are included, if needed, to create, and delete buckets for this: `tests/setup_static.sh` and `tests/remove_static.sh`.
### S3 Backend
Instructions are mostly the same; however, testing with the S3 backend requires two S3 accounts. Ideally, these are two real accounts, but one can also be a dummy account that versity uses internally.

View File

@@ -2,7 +2,7 @@
put_bucket_acl() {
if [[ $# -ne 3 ]]; then
log 2 "put bucket acl command requires command type, bucket name, acls"
log 2 "put bucket acl command requires command type, bucket name, acls or username"
return 1
fi
local error=""
@@ -11,7 +11,7 @@ put_bucket_acl() {
log 5 "bucket name: $2, acls: $3"
error=$(aws --no-verify-ssl s3api put-bucket-acl --bucket "$2" --access-control-policy "file://$3" 2>&1) || put_result=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate setacl "s3://$2" --acl-grant=read:ABCDEFG 2>&1) || put_result=$?
error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate setacl "s3://$2" --acl-grant=read:"$3" 2>&1) || put_result=$?
else
log 2 "put_bucket_acl not implemented for '$1'"
return 1

View File

@@ -60,8 +60,7 @@ export RUN_USERS=true
multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || fail "error performing multipart upload"
copy_file "s3://$BUCKET_ONE_NAME/$bucket_file" "$test_file_folder/$bucket_file-copy" || fail "error copying file"
compare_files "$test_file_folder/$bucket_file-copy" "$test_file_folder"/$bucket_file || fail "files do not match"
download_and_compare_file "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder/$bucket_file-copy" || fail "error downloading and comparing file"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files $bucket_file
@@ -971,11 +970,151 @@ EOF
put_object "s3api" "$test_file_folder/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" || fail "error copying object to bucket"
get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$test_file_folder/$test_file" "$username" "$password" || fail "error getting object one after permissions"
download_and_compare_file_with_user "s3api" "$test_file_folder/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$test_file_folder/$test_file-copy" "$username" "$password" || fail "error downloading and comparing file"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files "$test_folder/$test_file" "$policy_file"
}
@test "test_policy_allow_deny" {
policy_file="policy_file"
test_file="test_file"
username="ABCDEFG"
password="HIJKLMN"
create_test_files "$policy_file" "$test_file" || fail "error creating policy file"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_file"
cat <<EOF > "$test_file_folder"/$policy_file
{
"Statement": [
{
"Effect": "Deny",
"Principal": "$principal",
"Action": "$action",
"Resource": "$resource"
},
{
"Effect": "Allow",
"Principal": "$principal",
"Action": "$action",
"Resource": "$resource"
}
]
}
EOF
if user_exists "$username"; then
delete_user "$username" || fail "failed to delete user '$username'"
fi
create_user "$username" "$password" "user" || fail "error creating user"
setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$test_file_folder/$policy_file" || fail "error putting policy"
put_object "s3api" "$test_file_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "error copying object to bucket"
if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy" "$username" "$password"; then
fail "able to get object despite deny statement"
fi
[[ "$get_object_error" == *"Access Denied"* ]] || fail "invalid get object error: $get_object_error"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files "$test_file" "$test_file-copy" "$policy_file"
}
@test "test_policy_deny" {
# TODO (https://github.com/versity/versitygw/issues/637)
if [[ $RECREATE_BUCKETS == "false" ]]; then
return 0
fi
policy_file="policy_file"
test_file_one="test_file_one"
test_file_two="test_file_two"
username="ABCDEFG"
password="HIJKLMN"
create_test_files "$test_file_one" "$test_file_two" "$policy_file" || fail "error creating policy file, test file"
cat <<EOF > "$test_file_folder"/$policy_file
{
"Statement": [
{
"Effect": "Deny",
"Principal": "$username",
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::$BUCKET_ONE_NAME/$test_file_two"
},
{
"Effect": "Allow",
"Principal": "$username",
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::$BUCKET_ONE_NAME/*"
}
]
}
EOF
if user_exists "$username"; then
delete_user "$username" || fail "failed to delete user '$username'"
fi
create_user "$username" "$password" "user" || fail "error creating user"
setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket"
log 5 "Policy: $(cat "$test_file_folder/$policy_file")"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$test_file_folder/$policy_file" || fail "error putting policy"
put_object "s3api" "$test_file_folder/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_one" || fail "error copying object one"
put_object "s3api" "$test_file_folder/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_two" || fail "error copying object two"
get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file_one" "$test_file_folder/$test_file_one-copy" "$username" "$password" || fail "error getting object"
if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$test_file_folder/$test_file_two-copy" "$username" "$password"; then
fail "able to get object despite deny statement"
fi
[[ "$get_object_error" == *"Access Denied"* ]] || fail "invalid get object error: $get_object_error"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files "$test_file_one" "$test_file_two" "$test_file_one-copy" "$test_file_two-copy" "$policy_file"
}
@test "test_policy_put_wildcard" {
# TODO (https://github.com/versity/versitygw/issues/637)
if [[ $RECREATE_BUCKETS == "false" ]]; then
return 0
fi
policy_file="policy_file"
test_folder="test_folder"
test_file="test_file"
username="ABCDEFG"
password="HIJKLMN"
create_test_folder "$test_folder" || fail "error creating test folder"
create_test_files "$test_folder/$test_file" "$policy_file" || fail "error creating policy file, test file"
echo "$BATS_TEST_NAME" >> "$test_file_folder/$test_folder/$test_file"
effect="Allow"
principal="$username"
action="s3:PutObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_folder/*"
if user_exists "$username"; then
delete_user "$username" || fail "failed to delete user '$username'"
fi
create_user "$username" "$password" "user" || fail "error creating user"
setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket"
log 5 "Policy: $(cat "$test_file_folder/$policy_file")"
setup_policy_with_single_statement "$test_file_folder/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$test_file_folder/$policy_file" || fail "error putting policy"
if put_object_with_user "s3api" "$test_file_folder/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"; then
fail "able to put object despite not being allowed"
fi
[[ "$put_object_error" == *"Access Denied"* ]] || fail "invalid put object error: $put_object_error"
put_object_with_user "s3api" "$test_file_folder/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$username" "$password" || fail "error putting file despite policy permissions"
download_and_compare_file "s3api" "$test_file_folder/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$test_file_folder/$test_file-copy" || fail "files don't match"
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
delete_test_files "$test_folder/$test_file" "$test_file-copy" "$policy_file"
}
# ensure that lists of files greater than a size of 1000 (pagination) are returned properly
#@test "test_list_objects_file_count" {
# test_common_list_objects_file_count "aws"

View File

@@ -473,10 +473,15 @@ cat <<EOF > "$test_file_folder"/"$acl_file"
}
EOF
put_bucket_acl "$1" "$BUCKET_ONE_NAME" "$test_file_folder"/"$acl_file" || fail "error putting acl"
log 6 "before 1st put acl"
if [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
put_bucket_acl "$1" "$BUCKET_ONE_NAME" "$test_file_folder"/"$acl_file" || fail "error putting first acl"
else
put_bucket_acl "$1" "$BUCKET_ONE_NAME" "ABCDEFG" || fail "error putting first acl"
fi
get_bucket_acl "$1" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Error retrieving acl"
[[ $result -eq 0 ]] || fail "Error retrieving second acl"
log 5 "Acls after 1st put: $acl"
public_grants=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Grants[0]')
@@ -501,10 +506,10 @@ cat <<EOF > "$test_file_folder"/"$acl_file"
EOF
put_bucket_acl "$1" "$BUCKET_ONE_NAME" "$test_file_folder"/"$acl_file" || local put_result=$?
[[ $put_result -eq 0 ]] || fail "Error putting acl"
[[ $put_result -eq 0 ]] || fail "Error putting second acl"
get_bucket_acl "$1" "$BUCKET_ONE_NAME" || local result=$?
[[ $result -eq 0 ]] || fail "Error retrieving acl"
[[ $result -eq 0 ]] || fail "Error retrieving second acl"
log 5 "Acls after 2nd put: $acl"
public_grants=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Grants')

View File

@@ -164,3 +164,29 @@ create_test_file_count() {
fi
return 0
}
download_and_compare_file() {
if [[ $# -ne 5 ]]; then
log 2 "'download and compare file' requires command type, original file, bucket, key, local file"
return 1
fi
download_and_compare_file_with_user "$1" "$2" "$3" "$4" "$5" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY"
return "$?"
}
download_and_compare_file_with_user() {
if [[ $# -ne 7 ]]; then
log 2 "'download and compare file with user' command requires command type, original file, bucket, key, local file, user, password"
return 1
fi
if ! get_object_with_user "$1" "$3" "$4" "$5" "$6" "$7"; then
log 2 "error retrieving file"
return 1
fi
log 5 "files: $2, $5"
if ! compare_files "$2" "$5"; then
log 2 "files don't match"
return 1
fi
return 0
}