mirror of
https://github.com/versity/versitygw.git
synced 2026-04-30 17:25:43 +00:00
test: initial bats test tagging system, shellcheck fix
This commit is contained in:
2
.github/workflows/docker-bats.yml
vendored
2
.github/workflows/docker-bats.yml
vendored
@@ -26,3 +26,5 @@ jobs:
|
||||
run: |
|
||||
docker compose -f tests/docker-compose-bats.yml --project-directory . \
|
||||
up --exit-code-from s3api_np_only s3api_np_only
|
||||
docker compose -f tests/docker-compose-bats.yml --project-directory . \
|
||||
up --exit-code-from openssl_only openssl_only
|
||||
|
||||
12
.github/workflows/shellcheck.yml
vendored
12
.github/workflows/shellcheck.yml
vendored
@@ -14,4 +14,14 @@ jobs:
|
||||
- name: Run checks
|
||||
run: |
|
||||
shellcheck --version
|
||||
shellcheck -e SC1091 tests/*.sh tests/*/*.sh
|
||||
overall_rc=0
|
||||
while IFS= read -r -d '' f; do
|
||||
rc=0
|
||||
echo "CHECKING $f"
|
||||
shellcheck -S error -e SC1091 "$f" || rc=$?
|
||||
echo "exit=$rc file=$f"
|
||||
if [ "$rc" -ne 0 ]; then
|
||||
overall_rc="$rc"
|
||||
fi
|
||||
done < <(find . \( -path './tests/*.sh' -o -path './tests/*/*.sh' \) -print0)
|
||||
exit "$overall_rc"
|
||||
|
||||
@@ -28,6 +28,13 @@ services:
|
||||
- CONFIG_FILE=tests/.env.default
|
||||
image: bats_test
|
||||
command: ["s3api-bucket,s3api-object"]
|
||||
openssl_only:
|
||||
build:
|
||||
dockerfile: tests/Dockerfile_test_bats
|
||||
args:
|
||||
- CONFIG_FILE=tests/.env.default
|
||||
image: bats_test
|
||||
command: ["--tags","openssl"]
|
||||
direct:
|
||||
build:
|
||||
dockerfile: tests/Dockerfile_direct
|
||||
|
||||
@@ -20,7 +20,7 @@ calculate_multipart_checksum() {
|
||||
fi
|
||||
log 5 "checksums: ${*:4}"
|
||||
if [ "$1" == "COMPOSITE" ]; then
|
||||
if ! calculate_composite_checksum "$lowercase_checksum_algorithm" ${@:4}; then
|
||||
if ! calculate_composite_checksum "$lowercase_checksum_algorithm" "${@:4}"; then
|
||||
log 2 "error calculating checksum"
|
||||
return 1
|
||||
fi
|
||||
@@ -51,7 +51,7 @@ complete_multipart_upload_with_checksum() {
|
||||
fi
|
||||
log 5 "parts payload: $parts_payload"
|
||||
log 5 "checksums: ${checksums[*]}"
|
||||
if ! calculate_multipart_checksum "$6" "$5" "$3" ${checksums[@]}; then
|
||||
if ! calculate_multipart_checksum "$6" "$5" "$3" "${checksums[@]}"; then
|
||||
log 2 "error calculating multipart checksum"
|
||||
return 1
|
||||
fi
|
||||
@@ -76,7 +76,7 @@ calculate_composite_checksum() {
|
||||
return 1
|
||||
fi
|
||||
log 5 "checksums: ${*:2}"
|
||||
for checksum in ${@:2}; do
|
||||
for checksum in "${@:2}"; do
|
||||
if ! printf '%s' "$checksum" | base64 -d >> "$TEST_FILE_FOLDER/all_checksums.bin"; then
|
||||
log 2 "error calculating binary checksum and adding to file"
|
||||
return 1
|
||||
@@ -113,7 +113,7 @@ test_multipart_upload_with_checksum() {
|
||||
log 2 "error performing multipart upload with checksum before completion"
|
||||
return 1
|
||||
fi
|
||||
if ! calculate_multipart_checksum "$1" 2 "$TEST_FILE_FOLDER/$mp_file_name" ${checksums[@]}; then
|
||||
if ! calculate_multipart_checksum "$1" 2 "$TEST_FILE_FOLDER/$mp_file_name" "${checksums[@]}"; then
|
||||
log 2 "error calculating multipart checksum"
|
||||
return 1
|
||||
fi
|
||||
@@ -158,7 +158,7 @@ test_complete_multipart_upload_incorrect_checksum() {
|
||||
log 2 "error performing multipart upload with checksum before completion"
|
||||
return 1
|
||||
fi
|
||||
if ! calculate_multipart_checksum "$1" 2 "$TEST_FILE_FOLDER/$mp_file_name" ${checksums[@]}; then
|
||||
if ! calculate_multipart_checksum "$1" 2 "$TEST_FILE_FOLDER/$mp_file_name" "${checksums[@]}"; then
|
||||
log 2 "error calculating multipart checksum"
|
||||
return 1
|
||||
fi
|
||||
|
||||
@@ -29,7 +29,6 @@ setup_and_create_bucket_and_check_acl() {
|
||||
if ! check_param_count_v2 "grant env val" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
test_file="$test_file"
|
||||
if ! bucket_cleanup_if_bucket_exists "$BUCKET_ONE_NAME"; then
|
||||
log 2 "error cleaning up bucket"
|
||||
return 1
|
||||
@@ -46,6 +45,7 @@ setup_and_create_bucket_and_check_acl() {
|
||||
id="$user_canonical_id"
|
||||
fi
|
||||
log 5 "owner: $AWS_ACCESS_KEY_ID"
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "username=$username, password=$password"
|
||||
envs="$1=$id OBJECT_OWNERSHIP=BucketOwnerPreferred"
|
||||
log 5 "envs: $envs"
|
||||
|
||||
@@ -63,6 +63,7 @@ delete_old_versions_base64() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "base64 versions: ${base64_pairs[*]}"
|
||||
for pair in "${base64_pairs[@]}"; do
|
||||
log 5 "pair: $pair"
|
||||
@@ -171,6 +172,7 @@ delete_delete_marker() {
|
||||
echo "error parsing delete marker ID"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "version or marker ID: $version_or_marker_id"
|
||||
if ! delete_object_version_rest "$bucket_name" "$object_key" "$version_or_marker_id"; then
|
||||
log 2 "error deleting delete marker"
|
||||
|
||||
@@ -23,6 +23,7 @@ get_check_acl_id() {
|
||||
log 2 "error retrieving acl"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
log 5 "Initial ACLs: $acl"
|
||||
if ! id=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq -r '.Owner.ID' 2>&1); then
|
||||
log 2 "error getting ID: $id"
|
||||
|
||||
@@ -35,8 +35,8 @@ get_bucket_cors_check_404_header_and_bucket_name() {
|
||||
if ! check_param_count_v2 "bucket name" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
argument_name="BucketName"
|
||||
argument_value="$1"
|
||||
# shellcheck disable=SC2034
|
||||
argument_name="BucketName" argument_value="$1"
|
||||
if ! send_rest_go_command_expect_error_callback "404" "NoSuchCORSConfiguration" "The CORS configuration does not exist" \
|
||||
"check_cors_404_content_type_header_and_bucket_name" "-bucketName" "$1" "-query" "cors"; then
|
||||
log 2 "error sending get cors command and checking result"
|
||||
|
||||
@@ -29,6 +29,7 @@ get_and_verify_object_tags() {
|
||||
return 1
|
||||
fi
|
||||
if [[ $1 == 'aws' ]]; then
|
||||
# shellcheck disable=SC2154
|
||||
tag_set_key=$(echo "$tags" | jq '.TagSet[0].Key')
|
||||
tag_set_value=$(echo "$tags" | jq '.TagSet[0].Value')
|
||||
if [[ $tag_set_key != '"'$4'"' ]]; then
|
||||
|
||||
@@ -106,6 +106,7 @@ add_version_tags_check_version_id() {
|
||||
log 2 "error tagging old version"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
if ! send_rest_go_command_callback "200" "check_header_version_id" "-bucketName" "$1" "-objectKey" "$2" "-debug" "-logFile" "signature.log" \
|
||||
"-method" "GET" "-query" "tagging=&versionId=$version_id" "-tagKey" "key" "-tagValue" "value" "-contentMD5" "--" "$version_id"; then
|
||||
log 2 "error tagging object"
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
# under the License.
|
||||
|
||||
parse_object_tags_s3api() {
|
||||
# shellcheck disable=SC2154
|
||||
if ! tag_set_key=$(echo "$tags" | jq -r '.TagSet[0].Key' 2>&1); then
|
||||
log 2 "error retrieving tag key: $tag_set_key"
|
||||
return 1
|
||||
|
||||
@@ -99,6 +99,7 @@ check_object_versions_before_deletion() {
|
||||
log 2 "error parsing versions"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
if [ "${#version_ids[@]}" -ne 1 ]; then
|
||||
log 2 "expected version ID count of 1, was '${#version_ids[@]}'"
|
||||
return 1
|
||||
@@ -133,6 +134,7 @@ check_object_versions_after_deletion() {
|
||||
log 2 "expected version ID of '$2', was '${version_ids[0]}'"
|
||||
return 1
|
||||
fi
|
||||
# shellcheck disable=SC2154
|
||||
if [ "${version_islatests[0]}" != "false" ]; then
|
||||
log 2 "expected 'IsLatest' of version ID to be false, was '${version_islatests[0]}'"
|
||||
return 1
|
||||
|
||||
@@ -394,3 +394,29 @@ verify_owner_info_exists() {
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
list_objects_delimiter() {
|
||||
run assert_param_count "ListObjects version" 1 $#
|
||||
assert_success
|
||||
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
# shellcheck disable=SC2154
|
||||
local bucket_name="$output"
|
||||
|
||||
file_names=("a-b-1.txt" "a-b-2.txt" "a-b/c-1.txt" "a-b/c-2.txt" "a-b/d.txt" "a/c.txt")
|
||||
local prefix="a-"
|
||||
run create_test_files_and_folders "${file_names[@]}"
|
||||
assert_success
|
||||
|
||||
run setup_bucket_v2 "$bucket_name"
|
||||
assert_success
|
||||
|
||||
for file_name in "${file_names[@]}"; do
|
||||
run put_object "rest" "$TEST_FILE_FOLDER/$file_name" "$bucket_name" "$file_name"
|
||||
assert_success
|
||||
done
|
||||
|
||||
run list_objects_with_prefix_and_delimiter_check_results "$bucket_name" "2" "$prefix" "/" "a-b/" "--" "a-b-1.txt" "a-b-2.txt"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
source ./tests/logger.sh
|
||||
|
||||
check_param_count() {
|
||||
if [ $# -ne 4 ]; then
|
||||
log 2 "'check_param_count' requires function name, params list, expected, actual"
|
||||
|
||||
14
tests/run.sh
14
tests/run.sh
@@ -71,6 +71,13 @@ run_set_if_matching() {
|
||||
fi
|
||||
}
|
||||
|
||||
handle_tags() {
|
||||
if ! check_param_count_v2 "run sets, separated by comma" 1 $#; then
|
||||
exit 1
|
||||
fi
|
||||
./tests/tags/get_tests.sh --run "$1"
|
||||
}
|
||||
|
||||
handle_param() {
|
||||
if ! check_param_count_v2 "run sets, separated by comma" 1 $#; then
|
||||
exit 1
|
||||
@@ -100,6 +107,13 @@ if [ $# -le 0 ] || [ "$1" == "-h" ] || [ "$1" == "--help" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$1" == "--tags" ]; then
|
||||
if ! handle_tags "${@:2}"; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
IFS=',' read -ra options <<< "$1"
|
||||
for option in "${options[@]}"; do
|
||||
handle_param "$option"
|
||||
|
||||
57
tests/tags/get_test_info.py
Normal file
57
tests/tags/get_test_info.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import re, sys
|
||||
|
||||
|
||||
def main():
|
||||
mode = sys.argv[1]
|
||||
want = [t.lower() for t in re.split(r"[,\s]+", sys.argv[2].strip()) if t]
|
||||
files = sys.argv[3:]
|
||||
for path in files:
|
||||
print_file_test_info_matching(path, mode, want)
|
||||
|
||||
|
||||
def print_file_test_info_matching(path: str, mode: str, want: list[str]):
|
||||
try:
|
||||
lines = open(path, "r", encoding="utf-8", errors="replace").read().splitlines()
|
||||
except OSError as e:
|
||||
print(str(e))
|
||||
return
|
||||
|
||||
pending_tags = None
|
||||
for i, line in enumerate(lines, 1):
|
||||
# pending_tags = add_test_info(line, pending_tags)
|
||||
m = re.match(r"^\s*#\s*tags?\s*:\s*(.+)\s*$", line, flags=re.I)
|
||||
if m:
|
||||
pending_tags = norm_tags(line)
|
||||
continue
|
||||
|
||||
m = re.match(r'^\s*@test\s+"([^"]+)"', line)
|
||||
if m:
|
||||
test_name = m.group(1)
|
||||
tags = pending_tags or []
|
||||
pending_tags = None
|
||||
|
||||
print_test_if_match(path, i, test_name, tags, want, mode)
|
||||
|
||||
|
||||
def norm_tags(s: str):
|
||||
s = s.strip()
|
||||
s = re.sub(r"^\s*#\s*tags?\s*:\s*", "", s, flags=re.I)
|
||||
parts = [p for p in re.split(r"[,\s]+", s) if p]
|
||||
return parts
|
||||
|
||||
|
||||
def print_test_if_match(file: str, line_num: int, name: str, tags, want: list[str], mode: str):
|
||||
if not want:
|
||||
ok = True
|
||||
else:
|
||||
lowercase_tagset = [t.lower() for t in set(tags)]
|
||||
if mode == "any":
|
||||
ok = any(t in lowercase_tagset for t in want)
|
||||
else:
|
||||
ok = all(t in lowercase_tagset for t in want)
|
||||
if ok:
|
||||
print(f"{file}:{line_num}\t[{','.join(tags)}]\t{name}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
150
tests/tags/get_tests.sh
Executable file
150
tests/tags/get_tests.sh
Executable file
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright 2026 Versity Software
|
||||
# This file is licensed under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
source ./tests/drivers/file.sh
|
||||
source ./tests/drivers/params.sh
|
||||
|
||||
usage() {
|
||||
cat >&2 <<'EOF'
|
||||
Usage:
|
||||
tests/tags/get_tests.sh [--any] [--list] [--count] [--run] <tag1,tag2,...>
|
||||
tests/tags/get_tests.sh --list-tags <tag>
|
||||
|
||||
Options:
|
||||
--any Match any of the provided tags (OR). Default is ALL tags (AND).
|
||||
--list Print matching tests (one per line).
|
||||
--count Print count of matching tests.
|
||||
--run Execute matching tests via bats.
|
||||
--list-tags Print descriptions for tags (from tests/tags/tags.yaml).
|
||||
-h, --help Show this help.
|
||||
|
||||
Notes:
|
||||
- Requires VERSITYGW_TEST_ENV to be set.
|
||||
- Tags are passed as a single comma-separated argument.
|
||||
|
||||
Examples:
|
||||
VERSITYGW_TEST_ENV=./tests/.env.default tests/tags/get_tests.sh --list ListBuckets
|
||||
VERSITYGW_TEST_ENV=./tests/.env.default tests/tags/get_tests.sh --any --count "openssl,ListBuckets"
|
||||
tests/tags/get_tests.sh --list-tags ListBuckets
|
||||
EOF
|
||||
}
|
||||
|
||||
list_tests_by_tags() {
|
||||
if ! check_param_count_gt "tag matching mode, comma-separated tags (optional)" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
mapfile -t files < <(git ls-files 'tests/**/*.sh' 'tests/**/*.bats' 'tests/*.sh' 'tests/*.bats')
|
||||
python3 ./tests/tags/get_test_info.py "$1" "$2" "${files[@]}"
|
||||
}
|
||||
|
||||
run_tests() {
|
||||
if ! check_param_count_v2 "test lines" 1 $#; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
local i=0 files=() names=() tags=()
|
||||
|
||||
while IFS=$'\t' read -r loc t name || [ -n "$loc" ]; do
|
||||
[ -n "${loc:-}" ] || continue
|
||||
file="${loc%%:*}"
|
||||
files+=("$file")
|
||||
tags+=("${t:-}")
|
||||
names+=("${name:-}")
|
||||
i=$((i+1))
|
||||
done <<< "$1"
|
||||
|
||||
if [ "${#files[@]}" -eq 0 ]; then
|
||||
echo "no matching tests"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
for ((j=0;j<${#files[@]};j++)); do
|
||||
file="${files[$j]}"
|
||||
name="${names[$j]}"
|
||||
name_re="$(escape_regex "$name")"
|
||||
cmd=("$HOME/bin/bats" -f "$name_re" "$file")
|
||||
if ! VERSITYGW_TEST_ENV=$VERSITYGW_TEST_ENV "${cmd[@]}"; then
|
||||
echo "error running test"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
escape_regex() {
|
||||
python3 - "$1" <<'PY'
|
||||
import re, sys
|
||||
print(re.escape(sys.argv[1]))
|
||||
PY
|
||||
}
|
||||
|
||||
list_and_run_tests() {
|
||||
if ! check_param_count_gt "matching mode, list flag, count flag, run flag, tags" 4 $#; then
|
||||
return 1
|
||||
fi
|
||||
if [ "$2" -eq 0 ] && [ "$3" -eq 0 ] && [ "$4" -eq 0 ]; then
|
||||
usage
|
||||
return 1
|
||||
fi
|
||||
tests=$(list_tests_by_tags "$1" "${@:5}")
|
||||
if [ "$2" -eq 1 ]; then
|
||||
echo "$tests"
|
||||
fi
|
||||
|
||||
if [ "$3" -eq 1 ]; then
|
||||
count=$(wc -l <(echo "$tests") | awk '{print $1}')
|
||||
echo "$count tests matching tags"
|
||||
fi
|
||||
|
||||
if [ "$4" -eq 1 ]; then
|
||||
run_tests "$tests"
|
||||
fi
|
||||
exit 0
|
||||
}
|
||||
|
||||
count_flag=0 list_flag=0 tag_matching_mode="all" list_tags_flag=0 run_flag=0 tag_type="" forwarded_args=()
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--count) count_flag=1 ;;
|
||||
--list) list_flag=1 ;;
|
||||
--any) tag_matching_mode="any" ;;
|
||||
-h|--help) usage; exit 0 ;;
|
||||
--list-tags) list_tags_flag=1
|
||||
shift
|
||||
tag_type="$1"
|
||||
;;
|
||||
--run) run_flag=1 ;;
|
||||
*) forwarded_args+=("$arg") ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$list_tags_flag" -eq 1 ]; then
|
||||
if [ "$count_flag" -eq 1 ] || [ "$list_flag" -eq 1 ] || [ "$run_flag" -eq 1 ]; then
|
||||
echo "only --list-tags flag should be set when used"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
python3 ./tests/tags/list_tags.py "$tag_type"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -z "$VERSITYGW_TEST_ENV" ]; then
|
||||
usage
|
||||
echo "VERSITYGW_TEST_ENV must be defined" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
list_and_run_tests "$tag_matching_mode" "$list_flag" "$count_flag" "$run_flag" "${forwarded_args[@]}"
|
||||
|
||||
140
tests/tags/list_tags.py
Normal file
140
tests/tags/list_tags.py
Normal file
@@ -0,0 +1,140 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import ast
|
||||
import re
|
||||
from typing import TextIO
|
||||
|
||||
|
||||
def load_description_string(m: re.Match[str]) -> str:
|
||||
val = m.group(1)
|
||||
# Unquote simple YAML scalars when quoted.
|
||||
if val.startswith(("\"", "'")):
|
||||
try:
|
||||
val = ast.literal_eval(val)
|
||||
except (ValueError, SyntaxError):
|
||||
val = val.strip("\"'")
|
||||
return val
|
||||
|
||||
|
||||
def load_tags_from_file(f: TextIO) -> dict:
|
||||
in_tags = False
|
||||
current_group = None
|
||||
current_tag = None
|
||||
data: dict = {"tags": {}}
|
||||
|
||||
for raw_line in f:
|
||||
line = raw_line.rstrip("\n")
|
||||
if not line.strip() or line.lstrip().startswith("#"):
|
||||
continue
|
||||
|
||||
if re.match(r"^tags:\s*$", line):
|
||||
in_tags = True
|
||||
current_group = None
|
||||
current_tag = None
|
||||
continue
|
||||
|
||||
if not in_tags:
|
||||
continue
|
||||
|
||||
# 2-space indent: group
|
||||
m = re.match(r"^\s{2}([^:\s]+):\s*$", line)
|
||||
if m:
|
||||
current_group = m.group(1)
|
||||
data["tags"].setdefault(current_group, {})
|
||||
current_tag = None
|
||||
continue
|
||||
|
||||
# 4-space indent: tag
|
||||
m = re.match(r"^\s{4}([^:\s]+):\s*$", line)
|
||||
if m and current_group is not None:
|
||||
current_tag = m.group(1)
|
||||
data["tags"][current_group].setdefault(current_tag, {})
|
||||
continue
|
||||
|
||||
# 6-space indent: desc
|
||||
m = re.match(r"^\s{6}desc:\s*(.*)\s*$", line)
|
||||
if m and current_group is not None and current_tag is not None:
|
||||
data["tags"][current_group][current_tag]["desc"] = load_description_string(m)
|
||||
return data
|
||||
|
||||
|
||||
def load_tags(path: str) -> dict:
|
||||
# Minimal YAML reader for this project's tags.yaml structure.
|
||||
# Supports:
|
||||
# tags:
|
||||
# group:
|
||||
# tag:
|
||||
# desc: "..."
|
||||
in_tags = False
|
||||
current_group = None
|
||||
current_tag = None
|
||||
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return load_tags_from_file(f)
|
||||
|
||||
|
||||
def list_all_tag_descriptions(data: dict) -> None:
|
||||
tags = data.get("tags", {})
|
||||
for group in sorted(tags.keys()):
|
||||
print(f"{group} tags:")
|
||||
group_tags = tags.get(group, {}) or {}
|
||||
for tag in sorted(group_tags.keys()):
|
||||
desc = (group_tags.get(tag, {}) or {}).get("desc", "")
|
||||
print(f"\t{tag}: {desc}")
|
||||
|
||||
|
||||
def print_tag_description(data: dict, tag: str) -> int:
|
||||
tags = data.get("tags", {})
|
||||
for g in tags:
|
||||
group_tags = tags.get(g, {}) or {}
|
||||
g_lower = {k.lower(): v for k, v in group_tags.items()}
|
||||
tag_lower = tag.lower()
|
||||
if tag_lower in g_lower:
|
||||
desc = (group_tags.get(tag_lower, {}) or {}).get("desc", "")
|
||||
print(f"{tag}: {desc}")
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def usage() -> None:
|
||||
prog = os.path.basename(sys.argv[0])
|
||||
print(
|
||||
"\n".join(
|
||||
[
|
||||
f"Usage:",
|
||||
f" {prog} [yaml] # print all",
|
||||
f" {prog} [yaml] <tag> # searches all groups",
|
||||
f" {prog} [-h|--help] # print help",
|
||||
]
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
default_path = os.path.join(script_dir, "tags.yaml")
|
||||
|
||||
args = sys.argv[1:]
|
||||
if args and (args[0] == "-h" or args[0] == "--help"):
|
||||
usage()
|
||||
|
||||
path = default_path
|
||||
if args and args[0].endswith((".yml", ".yaml")):
|
||||
path = args[0]
|
||||
args = args[1:]
|
||||
|
||||
data = load_tags(path)
|
||||
|
||||
if not args or args[0] == "":
|
||||
list_all_tag_descriptions(data)
|
||||
return 0
|
||||
|
||||
return print_tag_description(data, args[0])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
40
tests/tags/tags.yaml
Normal file
40
tests/tags/tags.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
version: 1
|
||||
|
||||
tags:
|
||||
feature:
|
||||
invalid-method:
|
||||
desc: "Tests for invalid methods"
|
||||
malformed-message:
|
||||
desc: "Tests for severely malformed messages"
|
||||
minimal-request:
|
||||
desc: "Tests that do not use any optional query or header values"
|
||||
required-headers:
|
||||
desc: "Required headers such as 'Authorization' and 'x-amz-date'"
|
||||
user:
|
||||
desc: "versitygw user tests"
|
||||
client:
|
||||
curl:
|
||||
desc: "REST curl commands"
|
||||
openssl:
|
||||
desc: "REST OpenSSL commands"
|
||||
command:
|
||||
ListBuckets:
|
||||
desc: "ListBuckets command"
|
||||
header:
|
||||
Authorization:
|
||||
desc: "Authorization header"
|
||||
host:
|
||||
desc: "host header"
|
||||
x-amz-content-sha256:
|
||||
desc: "x-amz-content-sha256 header"
|
||||
x-amz-date:
|
||||
desc: "x-amz-date header"
|
||||
query:
|
||||
bucket-region:
|
||||
desc: "ListBuckets bucket-region query"
|
||||
continuation-token:
|
||||
desc: "ListBuckets/ListObjects continuation token query"
|
||||
max-buckets:
|
||||
desc: "Maximum amount of buckets returned by ListBuckets command"
|
||||
prefix:
|
||||
desc: "bucket or object prefix query"
|
||||
@@ -26,6 +26,7 @@ source ./tests/setup.sh
|
||||
|
||||
export RUN_USERS=true
|
||||
|
||||
# tags: openssl,malformed-message
|
||||
@test "REST - empty message" {
|
||||
test_file="test_file"
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
@@ -43,6 +44,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: malformed-message,openssl
|
||||
@test "REST - deformed message" {
|
||||
test_file="test_file"
|
||||
echo -en "abcdefg\r\n\r\n" > "$TEST_FILE_FOLDER/deformed.txt"
|
||||
@@ -50,6 +52,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags, curl,ListBuckets,required-headers,authorization
|
||||
@test "REST - invalid authorization scheme" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1705"
|
||||
@@ -58,21 +61,25 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags, curl,ListBuckets,required-headers,authorization
|
||||
@test "REST - very invalid credential string" {
|
||||
run send_rest_go_command_expect_error "400" "AuthorizationHeaderMalformed" "the Credential is mal-formed" "-incorrectCredential" "Credentials"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,authorization
|
||||
@test "REST - nonexistent key ID" {
|
||||
run send_rest_go_command_expect_error "403" "InvalidAccessKeyId" "does not exist" "-awsAccessKeyId" "dummy"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,authorization,x-amz-date
|
||||
@test "REST - invalid year/month/day" {
|
||||
run send_rest_go_command_expect_error "400" "AuthorizationHeaderMalformed" "incorrect date format" "-invalidYearMonthDay"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,authorization,x-amz-date
|
||||
@test "REST - incorrect year/month/day" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1514"
|
||||
@@ -81,21 +88,25 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,authorization
|
||||
@test "REST - invalid region" {
|
||||
run send_rest_go_command_expect_error "400" "AuthorizationHeaderMalformed" "us-eest-1" "-awsRegion" "us-eest-1"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,authorization
|
||||
@test "REST - invalid service name" {
|
||||
run send_rest_go_command_expect_error "400" "AuthorizationHeaderMalformed" "incorrect service" "-serviceName" "s2"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,authorization
|
||||
@test "REST - incorrect signature" {
|
||||
run send_rest_go_command_expect_error "403" "SignatureDoesNotMatch" "does not match" "-incorrectSignature"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: openssl,ListBuckets,required-headers,host
|
||||
@test "REST - missing host parameter" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1530"
|
||||
@@ -104,6 +115,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,minimal-request
|
||||
@test "test_rest_list_buckets" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -116,6 +128,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,max-buckets
|
||||
@test "REST - list buckets - continuation token isn't bucket name" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1399"
|
||||
@@ -124,6 +137,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
bucket_name="$output"
|
||||
|
||||
# shellcheck disable=SC2153
|
||||
run get_bucket_name "$BUCKET_TWO_NAME"
|
||||
assert_success
|
||||
bucket_two_name="$output"
|
||||
@@ -135,6 +149,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,max-buckets,continuation-token
|
||||
@test "REST - list buckets - success (multiple pages)" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -151,6 +166,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,prefix
|
||||
@test "REST - list buckets w/prefix" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -173,6 +189,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,minimal-request,user
|
||||
@test "REST - ListBuckets - correct buckets show up" {
|
||||
if [ "$SKIP_USERS_TESTS" == "true" ]; then
|
||||
skip "skip versitygw-specific users tests"
|
||||
@@ -204,7 +221,8 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - ListBuckets - invalid POST route" {
|
||||
# tags: curl,invalid-method
|
||||
@test "REST - service route - invalid POST route" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1810"
|
||||
fi
|
||||
@@ -223,7 +241,8 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "REST - ListBuckets - invalid method" {
|
||||
# tags: curl,invalid-method
|
||||
@test "REST - service route - invalid method" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1846"
|
||||
fi
|
||||
@@ -231,11 +250,13 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,x-amz-date
|
||||
@test "REST - ListBuckets - error Content-Type is application/xml" {
|
||||
run send_rest_go_command_check_header_key_and_value "403" "Content-Type" "application/xml" "-method" "GET" "-omitDate"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,bucket-region
|
||||
@test "REST - ListBuckets - invalid bucket-region query" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1923"
|
||||
@@ -254,6 +275,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,bucket-region
|
||||
@test "REST - ListBuckets - incorrect bucket region" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1930"
|
||||
@@ -278,6 +300,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,bucket-region
|
||||
@test "REST - ListBuckets - correct bucket region" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -290,6 +313,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,x-amz-content-sha256
|
||||
@test "REST - ListBuckets - missing sha256 hash" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -302,6 +326,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,x-amz-content-sha256
|
||||
@test "REST - ListBuckets - invalid hash type" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -314,6 +339,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,x-amz-content-sha256
|
||||
@test "REST - ListBuckets - non-matching hash type" {
|
||||
run get_bucket_name "$BUCKET_ONE_NAME"
|
||||
assert_success
|
||||
@@ -326,6 +352,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,x-amz-date
|
||||
@test "REST - ListBuckets - omit date" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1934"
|
||||
@@ -341,6 +368,7 @@ export RUN_USERS=true
|
||||
assert_success
|
||||
}
|
||||
|
||||
# tags: curl,ListBuckets,required-headers,x-amz-date
|
||||
@test "REST - ListBuckets - invalid date" {
|
||||
if [ "$DIRECT" != "true" ]; then
|
||||
skip "https://github.com/versity/versitygw/issues/1934"
|
||||
|
||||
@@ -94,7 +94,6 @@ echo_versions() {
|
||||
else
|
||||
keys="$response"
|
||||
fi
|
||||
|
||||
unescaped_keys=$(echo -n "$keys" | xmlstarlet unesc)
|
||||
log 5 "keys to append: ${unescaped_keys[*]}"
|
||||
echo "${unescaped_keys[*]}"
|
||||
|
||||
Reference in New Issue
Block a user