mirror of
https://github.com/versity/versitygw.git
synced 2026-04-24 22:50:28 +00:00
test: delete tagging test, dockerfile
This commit is contained in:
28
.github/workflows/docker-bats.yaml
vendored
Normal file
28
.github/workflows/docker-bats.yaml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: docker bats tests
|
||||
|
||||
on: pull_request
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Build Docker Image
|
||||
run: |
|
||||
mv tests/.env.docker.default tests/.env.docker
|
||||
mv tests/.secrets.default tests/.secrets
|
||||
docker build --build-arg="GO_LIBRARY=go1.21.7.linux-amd64.tar.gz" \
|
||||
--build-arg="AWS_CLI=awscli-exe-linux-x86_64.zip" --build-arg="MC_FOLDER=linux-amd64" \
|
||||
--progress=plain -f Dockerfile_test_bats -t bats_test .
|
||||
|
||||
- name: Set up Docker Compose
|
||||
run: sudo apt-get install -y docker-compose
|
||||
|
||||
- name: Run Docker Container
|
||||
run: docker-compose -f docker-compose-bats.yml up posix_backend
|
||||
@@ -1,8 +1,11 @@
|
||||
FROM --platform=linux/arm64 ubuntu:latest
|
||||
FROM ubuntu:latest
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG SECRETS_FILE=tests/.secrets
|
||||
ARG CONFIG_FILE=tests/.env.docker
|
||||
ARG GO_LIBRARY=go1.21.7.linux-arm64.tar.gz
|
||||
ARG AWS_CLI=awscli-exe-linux-aarch64.zip
|
||||
ARG MC_FOLDER=linux-arm64
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
RUN apt-get update && \
|
||||
@@ -24,20 +27,20 @@ RUN apt-get update && \
|
||||
WORKDIR /tmp
|
||||
|
||||
# Install AWS cli
|
||||
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip" -o "awscliv2.zip" && unzip awscliv2.zip && ./aws/install
|
||||
RUN curl "https://awscli.amazonaws.com/${AWS_CLI}" -o "awscliv2.zip" && unzip awscliv2.zip && ./aws/install
|
||||
|
||||
# Install mc
|
||||
RUN curl https://dl.min.io/client/mc/release/linux-arm64/mc \
|
||||
RUN curl https://dl.min.io/client/mc/release/${MC_FOLDER}/mc \
|
||||
--create-dirs \
|
||||
-o /usr/local/minio-binaries/mc && \
|
||||
chmod -R 755 /usr/local/minio-binaries
|
||||
ENV PATH="/usr/local/minio-binaries":${PATH}
|
||||
|
||||
# Download Go 1.21 (adjust the version and platform as needed)
|
||||
RUN wget https://golang.org/dl/go1.21.7.linux-arm64.tar.gz
|
||||
RUN wget https://golang.org/dl/${GO_LIBRARY}
|
||||
|
||||
# Extract the downloaded archive
|
||||
RUN tar -xvf go1.21.7.linux-arm64.tar.gz -C /usr/local
|
||||
RUN tar -xvf $GO_LIBRARY -C /usr/local
|
||||
|
||||
# Set Go environment variables
|
||||
ENV PATH="/usr/local/go/bin:${PATH}"
|
||||
@@ -60,6 +63,10 @@ RUN git clone https://github.com/bats-core/bats-core.git && \
|
||||
USER tester
|
||||
COPY --chown=tester:tester . /home/tester
|
||||
|
||||
# add bats support libraries
|
||||
RUN git clone https://github.com/bats-core/bats-support.git && rm -rf /home/tester/tests/bats-support && mv bats-support /home/tester/tests
|
||||
RUN git clone https://github.com/ztombol/bats-assert.git && rm -rf /home/tester/tests/bats-assert && mv bats-assert /home/tester/tests
|
||||
|
||||
WORKDIR /home/tester
|
||||
RUN make
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ services:
|
||||
dockerfile: Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.default
|
||||
image: bats_test
|
||||
s3_backend:
|
||||
build:
|
||||
context: .
|
||||
|
||||
27
tests/.env.docker.default
Normal file
27
tests/.env.docker.default
Normal file
@@ -0,0 +1,27 @@
|
||||
AWS_PROFILE=versity
|
||||
AWS_ENDPOINT_URL=https://127.0.0.1:7070
|
||||
VERSITY_EXE=./versitygw
|
||||
RUN_VERSITYGW=true
|
||||
BACKEND=posix
|
||||
LOCAL_FOLDER=/tmp/gw
|
||||
BUCKET_ONE_NAME=versity-gwtest-bucket-one
|
||||
BUCKET_TWO_NAME=versity-gwtest-bucket-two
|
||||
CERT=$PWD/cert-docker.pem
|
||||
KEY=$PWD/versitygw-docker.pem
|
||||
S3CMD_CONFIG=./tests/s3cfg.local.default
|
||||
SECRETS_FILE=./tests/.secrets
|
||||
MC_ALIAS=versity
|
||||
LOG_LEVEL=2
|
||||
USERS_FOLDER=$PWD/iam
|
||||
#TEST_LOG_FILE=test.log
|
||||
#VERSITY_LOG_FILE=versity.log
|
||||
IAM_TYPE=folder
|
||||
DIRECT=false
|
||||
#DIRECT_DISPLAY_NAME=
|
||||
#COVERAGE_DB=coverage.sql
|
||||
USERNAME_ONE=ABCDEFG
|
||||
PASSWORD_ONE=HIJKLMN
|
||||
USERNAME_TWO=HIJKLMN
|
||||
PASSWORD_TWO=OPQRSTU
|
||||
TEST_FILE_FOLDER=$PWD/versity-gwtest-files
|
||||
RECREATE_BUCKETS=true
|
||||
5
tests/.secrets.default
Normal file
5
tests/.secrets.default
Normal file
@@ -0,0 +1,5 @@
|
||||
# change to your account attributes
|
||||
AWS_ACCESS_KEY_ID=ABCDEFGHIJKLMNOPQRST
|
||||
AWS_SECRET_ACCESS_KEY=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmn
|
||||
AWS_REGION=us-east-1
|
||||
AWS_PROFILE=versity
|
||||
@@ -58,8 +58,9 @@ To communicate directly with s3, in order to compare the gateway results to dire
|
||||
|
||||
## Instructions - Running With Docker
|
||||
|
||||
1. Create a `.secrets` file in the `tests` folder, and add the `AWS_PROFILE`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and the `AWS_PROFILE` fields, as well as the additional s3 fields explained in the **S3 Backend** section above if running with the s3 backend.
|
||||
2. Build and run the `Dockerfile_test_bats` file. Change the `SECRETS_FILE` and `CONFIG_FILE` parameters to point to your secrets and config file, respectively. Example: `docker build -t <tag> -f Dockerfile_test_bats --build-arg="SECRETS_FILE=<file>" --build-arg="CONFIG_FILE=<file>" .`.
|
||||
1. Copy `.secrets.default` to `.secrets` in the `tests` folder and change the parameters and add the additional s3 fields explained in the **S3 Backend** section above if running with the s3 backend.
|
||||
2. By default, the dockerfile uses the **arm** architecture (usually modern Mac). If using **amd** (usually earlier Mac or Linux), you can either replace the corresponding `ARG` values directly, or with `arg="<param>=<amd library or folder>"` Also, you can determine which is used by your OS with `uname -a`.
|
||||
3. Build and run the `Dockerfile_test_bats` file. Change the `SECRETS_FILE` and `CONFIG_FILE` parameters to point to your secrets and config file, respectively, if not using the defaults. Example: `docker build -t <tag> -f Dockerfile_test_bats --build-arg="SECRETS_FILE=<file>" --build-arg="CONFIG_FILE=<file>" .`.
|
||||
|
||||
## Instructions - Running with docker-compose
|
||||
|
||||
@@ -77,3 +78,7 @@ To run in insecure mode, comment out the `CERT` and `KEY` parameters in the `.en
|
||||
To use static buckets set the `RECREATE_BUCKETS` value to `false`.
|
||||
|
||||
For the s3 backend, see the **S3 Backend** instructions above.
|
||||
|
||||
If using AMD rather than ARM architecture, add the corresponding **args** values matching those in the Dockerfile for **amd** libraries.
|
||||
|
||||
A single instance can be run with `docker-compose -f docker-compose-bats.yml up <service name>`
|
||||
@@ -21,3 +21,17 @@ delete_bucket_tagging() {
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
delete_bucket_tagging_with_user() {
|
||||
log 6 "delete_bucket_tagging_with_user"
|
||||
record_command "delete-bucket-tagging" "client:s3api"
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "delete bucket tagging command missing username, password, bucket name"
|
||||
return 1
|
||||
fi
|
||||
if ! error=$(AWS_ACCESS_KEY_ID="$1" AWS_SECRET_ACCESS_KEY="$2" aws --no-verify-ssl s3api delete-bucket-tagging --bucket "$3" 2>&1); then
|
||||
log 2 "error deleting bucket tagging with user: $error"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -64,4 +64,51 @@ list_objects_s3api() {
|
||||
IFS=$'\n' read -rd '' -a object_array <<<"$keys"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
# list objects in bucket, v1
|
||||
# param: bucket
|
||||
# export objects on success, return 1 for failure
|
||||
list_objects_s3api_v1() {
|
||||
if [ $# -lt 1 ] || [ $# -gt 2 ]; then
|
||||
echo "list objects command requires bucket, (optional) delimiter"
|
||||
return 1
|
||||
fi
|
||||
if [ "$2" == "" ]; then
|
||||
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1") || local result=$?
|
||||
else
|
||||
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1" --delimiter "$2") || local result=$?
|
||||
fi
|
||||
if [[ $result -ne 0 ]]; then
|
||||
echo "error listing objects: $objects"
|
||||
return 1
|
||||
fi
|
||||
export objects
|
||||
}
|
||||
|
||||
list_objects_with_prefix() {
|
||||
if [ $# -ne 3 ]; then
|
||||
log 2 "'list_objects_with_prefix' command requires, client, bucket, prefix"
|
||||
return 1
|
||||
fi
|
||||
local result=0
|
||||
if [ "$1" == 's3' ]; then
|
||||
objects=$(aws --no-verify-ssl s3 ls s3://"$2/$3" 2>&1) || result=$?
|
||||
elif [ "$1" == 's3api' ]; then
|
||||
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$2" --prefix "$3" 2>&1) || result=$?
|
||||
elif [ "$1" == 's3cmd' ]; then
|
||||
objects=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3://"$2/$3" 2>&1) || result=$?
|
||||
elif [[ "$1" == 'mc' ]]; then
|
||||
objects=$(mc --insecure ls "$MC_ALIAS/$2/$3" 2>&1) || result=$?
|
||||
else
|
||||
log 2 "invalid command type '$1'"
|
||||
return 1
|
||||
fi
|
||||
if [ $result -ne 0 ]; then
|
||||
log 2 "error listing objects: $objects"
|
||||
return 1
|
||||
fi
|
||||
log 5 "output: $objects"
|
||||
export objects
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -10,4 +10,16 @@ list_parts() {
|
||||
log 2 "Error listing multipart upload parts: $listed_parts"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
list_parts_with_user() {
|
||||
if [ $# -ne 5 ]; then
|
||||
log 2 "'list_parts_with_user' requires username, password, bucket, key, upload ID"
|
||||
return 1
|
||||
fi
|
||||
record_command 'list-parts' 'client:s3api'
|
||||
if ! listed_parts=$(AWS_ACCESS_KEY_ID="$1" AWS_SECRET_ACCESS_KEY="$2" aws --no-verify-ssl s3api list-parts --bucket "$3" --key "$4" --upload-id "$5" 2>&1); then
|
||||
log 2 "Error listing multipart upload parts: $listed_parts"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
50
tests/iam.sh
50
tests/iam.sh
@@ -1,50 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
get_iam_parameters() {
|
||||
if [[ -z "$IAM_TYPE" ]]; then
|
||||
export IAM_TYPE="folder"
|
||||
fi
|
||||
if [[ "$IAM_TYPE" == "folder" ]]; then
|
||||
if [[ -z "$USERS_FOLDER" ]]; then
|
||||
log 2 "if IAM type is folder (or not set), USERS_FOLDER parameter is required"
|
||||
return 1
|
||||
fi
|
||||
if [ ! -d "$USERS_FOLDER" ]; then
|
||||
if mkdir_error=$(mkdir "$USERS_FOLDER" 2>&1); then
|
||||
log 2 "error creating users folder: $mkdir_error"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
iam_params="--iam-dir=$USERS_FOLDER"
|
||||
export iam_params
|
||||
return 0
|
||||
fi
|
||||
if [[ $IAM_TYPE == "s3" ]]; then
|
||||
if [[ -z "$USERS_BUCKET" ]]; then
|
||||
log 2 "if IAM type is s3, USERS_BUCKET is required"
|
||||
return 1
|
||||
fi
|
||||
log 4 "$USERS_BUCKET"
|
||||
if ! bucket_exists "s3api" "$USERS_BUCKET"; then
|
||||
log 4 "bucket doesn't exist"
|
||||
if [[ $? == 2 ]]; then
|
||||
log 2 "error checking if users bucket exists"
|
||||
return 1
|
||||
fi
|
||||
if ! create_bucket "s3api" "$USERS_BUCKET"; then
|
||||
log 2 "error creating bucket"
|
||||
return 1
|
||||
fi
|
||||
log 4 "bucket create successful"
|
||||
else
|
||||
log 4 "bucket exists"
|
||||
fi
|
||||
iam_params="--s3-iam-access $AWS_ACCESS_KEY_ID --s3-iam-secret $AWS_SECRET_ACCESS_KEY \
|
||||
--s3-iam-region us-east-1 --s3-iam-bucket $USERS_BUCKET --s3-iam-endpoint $AWS_ENDPOINT_URL \
|
||||
--s3-iam-noverify"
|
||||
export iam_params
|
||||
return 0
|
||||
fi
|
||||
log 2 "unrecognized IAM_TYPE value: $IAM_TYPE"
|
||||
return 1
|
||||
}
|
||||
@@ -611,3 +611,26 @@ EOF
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
test_common_ls_directory_object() {
|
||||
test_file="a"
|
||||
|
||||
run create_test_files "$test_file"
|
||||
assert_success "error creating file"
|
||||
|
||||
run setup_bucket "$1" "$BUCKET_ONE_NAME"
|
||||
assert_success "error setting up bucket"
|
||||
|
||||
if [ "$1" == 's3cmd' ]; then
|
||||
put_object_client="s3api"
|
||||
else
|
||||
put_object_client="$1"
|
||||
fi
|
||||
run put_object "$put_object_client" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file/"
|
||||
assert_success "error putting test file folder"
|
||||
|
||||
run list_and_check_directory_obj "$1" "$test_file"
|
||||
assert_success "error listing and checking directory object"
|
||||
|
||||
delete_bucket_or_contents "$1" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
@@ -122,4 +122,6 @@ export RUN_MC=true
|
||||
delete_bucket_or_contents "mc" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
|
||||
@test "test_ls_directory_object" {
|
||||
test_common_ls_directory_object "mc"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
source ./tests/test_common.sh
|
||||
source ./tests/util_file.sh
|
||||
|
||||
# complete-multipart-upload
|
||||
@test "test_complete_multipart_upload" {
|
||||
@@ -47,3 +48,7 @@ source ./tests/test_common.sh
|
||||
setup_bucket "s3" "$BUCKET_ONE_NAME"
|
||||
delete_bucket "s3" "$BUCKET_ONE_NAME" || fail "error deleting bucket"
|
||||
}
|
||||
|
||||
@test "test_ls_directory_object" {
|
||||
test_common_ls_directory_object "s3"
|
||||
}
|
||||
@@ -433,6 +433,10 @@ export RUN_USERS=true
|
||||
test_s3api_policy_get_bucket_tagging
|
||||
}
|
||||
|
||||
@test "test_policy_list_upload_parts" {
|
||||
test_s3api_policy_list_upload_parts
|
||||
}
|
||||
|
||||
@test "test_policy_put_acl" {
|
||||
test_s3api_policy_put_acl
|
||||
}
|
||||
@@ -456,3 +460,7 @@ export RUN_USERS=true
|
||||
delete_bucket_or_contents "aws" "$bucket_name"
|
||||
}
|
||||
|
||||
@test "test_ls_directory_object" {
|
||||
test_common_ls_directory_object "s3api"
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
source ./tests/util_multipart.sh
|
||||
source ./tests/util_tags.sh
|
||||
source ./tests/commands/get_bucket_tagging.sh
|
||||
source ./tests/commands/put_bucket_tagging.sh
|
||||
@@ -537,7 +538,7 @@ test_s3api_policy_put_bucket_tagging() {
|
||||
run setup_policy_with_single_statement "$test_file_folder/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
|
||||
assert_success "error setting up policy"
|
||||
run put_bucket_tagging_with_user "$BUCKET_ONE_NAME" "$tag_key" "$tag_value" "$USERNAME_ONE" "$PASSWORD_ONE"
|
||||
assert_failure "able to put bucket tagging despite lack of permissions"
|
||||
assert_failure
|
||||
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$test_file_folder/$policy_file"
|
||||
assert_success "error putting policy"
|
||||
run put_bucket_tagging_with_user "$BUCKET_ONE_NAME" "$tag_key" "$tag_value" "$USERNAME_ONE" "$PASSWORD_ONE"
|
||||
@@ -586,6 +587,7 @@ test_s3api_policy_put_acl() {
|
||||
[[ $id == "all-users" ]] || fail "unexpected ID: $id"
|
||||
fi
|
||||
delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$policy_file"
|
||||
}
|
||||
|
||||
test_s3api_policy_get_bucket_tagging() {
|
||||
@@ -609,7 +611,7 @@ test_s3api_policy_get_bucket_tagging() {
|
||||
assert_success "unable to put bucket tagging"
|
||||
|
||||
run get_bucket_tagging_with_user "$USERNAME_ONE" "$PASSWORD_ONE" "$BUCKET_ONE_NAME"
|
||||
assert_failure "able to get bucket tagging despite lack of permissions"
|
||||
assert_failure
|
||||
|
||||
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$test_file_folder/$policy_file"
|
||||
assert_success "error putting policy"
|
||||
@@ -617,4 +619,36 @@ test_s3api_policy_get_bucket_tagging() {
|
||||
assert_success "get and check bucket tags failed"
|
||||
|
||||
delete_bucket_or_contents "s3api" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
delete_test_files "$policy_file"
|
||||
}
|
||||
|
||||
test_s3api_policy_list_upload_parts() {
|
||||
policy_file="policy_file"
|
||||
test_file="test_file"
|
||||
tag_key="TestKey"
|
||||
tag_value="TestValue"
|
||||
|
||||
run create_test_files "$policy_file"
|
||||
assert_success "error creating test files"
|
||||
|
||||
run create_large_file "$test_file"
|
||||
assert_success "error creating large file"
|
||||
|
||||
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
|
||||
assert_success "error setting up bucket"
|
||||
|
||||
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
|
||||
assert_success "error creating user '$USERNAME_ONE'"
|
||||
|
||||
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
|
||||
assert_success "error setting up policy"
|
||||
|
||||
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
|
||||
assert_success "error putting policy"
|
||||
|
||||
run create_upload_and_test_parts_listing "$test_file" "$policy_file"
|
||||
assert_success "error creating upload and testing parts listing"
|
||||
|
||||
delete_bucket_or_contents "s3api" "$BUCKET_ONE_NAME"
|
||||
delete_test_files "$policy_file" "$test_file"
|
||||
}
|
||||
|
||||
@@ -109,3 +109,6 @@ export RUN_USERS=true
|
||||
delete_bucket_or_contents "s3cmd" "$BUCKET_ONE_NAME"
|
||||
}
|
||||
|
||||
@test "test_ls_directory_object" {
|
||||
test_common_ls_directory_object "s3cmd"
|
||||
}
|
||||
|
||||
@@ -426,6 +426,20 @@ delete_bucket_or_contents_if_exists() {
|
||||
return 0
|
||||
}
|
||||
|
||||
setup_buckets() {
|
||||
if [ $# -lt 1 ]; then
|
||||
log 2 "'setup_buckets' command requires bucket names"
|
||||
return 1
|
||||
fi
|
||||
for name in "$@"; do
|
||||
if ! setup_bucket "$name"; then
|
||||
log 2 "error setting up bucket $name"
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
# params: client, bucket name
|
||||
# fail if bucket is not properly set up
|
||||
setup_bucket() {
|
||||
@@ -727,26 +741,6 @@ get_and_verify_object_tags() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# list objects in bucket, v1
|
||||
# param: bucket
|
||||
# export objects on success, return 1 for failure
|
||||
list_objects_s3api_v1() {
|
||||
if [ $# -lt 1 ] || [ $# -gt 2 ]; then
|
||||
echo "list objects command requires bucket, (optional) delimiter"
|
||||
return 1
|
||||
fi
|
||||
if [ "$2" == "" ]; then
|
||||
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1") || local result=$?
|
||||
else
|
||||
objects=$(aws --no-verify-ssl s3api list-objects --bucket "$1" --delimiter "$2") || local result=$?
|
||||
fi
|
||||
if [[ $result -ne 0 ]]; then
|
||||
echo "error listing objects: $objects"
|
||||
return 1
|
||||
fi
|
||||
export objects
|
||||
}
|
||||
|
||||
# perform all parts of a multipart upload before completion command
|
||||
# params: bucket, key, file to split and upload, number of file parts to upload
|
||||
# return: 0 for success, 1 for failure
|
||||
@@ -1132,3 +1126,34 @@ create_presigned_url() {
|
||||
fi
|
||||
export presigned_url
|
||||
}
|
||||
|
||||
list_and_check_directory_obj() {
|
||||
#assert [ $# -eq 2 ]
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'list_and_check_directory_obj' requires client, file name"
|
||||
return 1
|
||||
fi
|
||||
if ! list_objects_with_prefix "$1" "$BUCKET_ONE_NAME" "$2/"; then
|
||||
log 2 "error listing objects with prefix"
|
||||
return 1
|
||||
fi
|
||||
if [ "$1" == "s3api" ]; then
|
||||
# shellcheck disable=SC2154
|
||||
if ! key=$(echo "$objects" | grep -v "InsecureRequestWarning" | jq -r ".Contents[0].Key" 2>&1); then
|
||||
log 2 "error getting key: $key"
|
||||
return 1
|
||||
fi
|
||||
if [ "$key" != "$2/" ]; then
|
||||
log 2 "key mismatch ($key, $2)"
|
||||
return 1
|
||||
fi
|
||||
elif [ "$1" == "s3" ]; then
|
||||
log 5 "$objects"
|
||||
filename=$(echo "$objects" | grep -v "InsecureRequestWarning" | awk '{print $4}')
|
||||
if [ "$filename" != "$2" ]; then
|
||||
log 2 "filename mismatch ($filename, $2)"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
34
tests/util_multipart.sh
Normal file
34
tests/util_multipart.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
create_upload_and_test_parts_listing() {
|
||||
if [ $# -ne 2 ]; then
|
||||
log 2 "'create_upload_and_test_parts_listing' requires test file, policy_file"
|
||||
return 1
|
||||
fi
|
||||
if ! create_multipart_upload_with_user "$BUCKET_ONE_NAME" "$1" "$USERNAME_ONE" "$PASSWORD_ONE"; then
|
||||
log 2 "error creating multipart upload with user"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2154
|
||||
if list_parts_with_user "$USERNAME_ONE" "$PASSWORD_ONE" "$BUCKET_ONE_NAME" "$1" "$upload_id"; then
|
||||
log 2 "list parts with user succeeded despite lack of policy permissions"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! setup_policy_with_single_statement "$TEST_FILE_FOLDER/$2" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:ListMultipartUploadParts" "arn:aws:s3:::$BUCKET_ONE_NAME/*"; then
|
||||
log 2 "error setting up policy"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$2"; then
|
||||
log 2 "error putting policy"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! list_parts_with_user "$USERNAME_ONE" "$PASSWORD_ONE" "$BUCKET_ONE_NAME" "$1" "$upload_id"; then
|
||||
log 2 "error listing parts after policy add"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
@@ -45,3 +45,20 @@ get_and_check_bucket_tags() {
|
||||
assert_success "error getting and checking bucket tags"
|
||||
return 0
|
||||
}
|
||||
|
||||
verify_no_bucket_tags() {
|
||||
if [ $# -ne 1 ]; then
|
||||
log 2 "'verify_no_bucket_tags' requires bucket name"
|
||||
return 1
|
||||
fi
|
||||
if ! get_bucket_tagging "$1"; then
|
||||
log 2 "error retrieving bucket tagging"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
if [[ "$tags" != "" ]]; then
|
||||
log 2 "tags should be empty, but are: $tags"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source ./tests/util_file.sh
|
||||
source ./tests/iam.sh
|
||||
|
||||
start_versity_process() {
|
||||
if [[ $# -ne 1 ]]; then
|
||||
|
||||
Reference in New Issue
Block a user