Compare commits

...

24 Commits

Author SHA1 Message Date
Ben McClelland
47a6152e84 Merge pull request #972 from versity/test/rest_get_acl
Test/rest get acl
2024-12-06 19:18:01 -08:00
Luke McCrone
94fc70f5a7 test: GetBucketAcl, cleanup 2024-12-04 22:54:37 -03:00
Ben McClelland
c77604b545 Merge pull request #968 from versity/dependabot/go_modules/dev-dependencies-9a54799178
chore(deps): bump the dev-dependencies group across 1 directory with 16 updates
2024-12-04 15:09:56 -08:00
dependabot[bot]
adb69ed041 chore(deps): bump the dev-dependencies group across 1 directory with 16 updates
Bumps the dev-dependencies group with 9 updates in the / directory:

| Package | From | To |
| --- | --- | --- |
| [github.com/aws/aws-sdk-go-v2](https://github.com/aws/aws-sdk-go-v2) | `1.32.5` | `1.32.6` |
| [github.com/aws/aws-sdk-go-v2/service/s3](https://github.com/aws/aws-sdk-go-v2) | `1.67.1` | `1.70.0` |
| [github.com/aws/aws-sdk-go-v2/feature/ec2/imds](https://github.com/aws/aws-sdk-go-v2) | `1.16.20` | `1.16.21` |
| [github.com/aws/aws-sdk-go-v2/service/sso](https://github.com/aws/aws-sdk-go-v2) | `1.24.6` | `1.24.7` |
| [github.com/aws/aws-sdk-go-v2/service/ssooidc](https://github.com/aws/aws-sdk-go-v2) | `1.28.5` | `1.28.6` |
| [github.com/aws/aws-sdk-go-v2/service/sts](https://github.com/aws/aws-sdk-go-v2) | `1.33.1` | `1.33.2` |
| [github.com/nats-io/nkeys](https://github.com/nats-io/nkeys) | `0.4.7` | `0.4.8` |
| [github.com/aws/aws-sdk-go-v2/config](https://github.com/aws/aws-sdk-go-v2) | `1.28.5` | `1.28.6` |
| [github.com/aws/aws-sdk-go-v2/feature/s3/manager](https://github.com/aws/aws-sdk-go-v2) | `1.17.39` | `1.17.42` |

Updates `github.com/aws/aws-sdk-go-v2` from 1.32.5 to 1.32.6
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/v1.32.5...v1.32.6)

Updates `github.com/aws/aws-sdk-go-v2/service/s3` from 1.67.1 to 1.70.0
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/s3/v1.67.1...service/s3/v1.70.0)

Updates `github.com/aws/aws-sdk-go-v2/feature/ec2/imds` from 1.16.20 to 1.16.21
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/ram/v1.16.20...service/ram/v1.16.21)

Updates `github.com/aws/aws-sdk-go-v2/service/sso` from 1.24.6 to 1.24.7
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/pi/v1.24.6...service/pi/v1.24.7)

Updates `github.com/aws/aws-sdk-go-v2/service/ssooidc` from 1.28.5 to 1.28.6
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.28.5...config/v1.28.6)

Updates `github.com/aws/aws-sdk-go-v2/service/sts` from 1.33.1 to 1.33.2
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/s3/v1.33.1...service/sfn/v1.33.2)

Updates `github.com/nats-io/nkeys` from 0.4.7 to 0.4.8
- [Release notes](https://github.com/nats-io/nkeys/releases)
- [Changelog](https://github.com/nats-io/nkeys/blob/main/.goreleaser.yml)
- [Commits](https://github.com/nats-io/nkeys/compare/v0.4.7...v0.4.8)

Updates `github.com/aws/aws-sdk-go-v2/config` from 1.28.5 to 1.28.6
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.28.5...config/v1.28.6)

Updates `github.com/aws/aws-sdk-go-v2/credentials` from 1.17.46 to 1.17.47
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/credentials/v1.17.46...credentials/v1.17.47)

Updates `github.com/aws/aws-sdk-go-v2/feature/s3/manager` from 1.17.39 to 1.17.42
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/credentials/v1.17.39...credentials/v1.17.42)

Updates `github.com/aws/aws-sdk-go-v2/internal/configsources` from 1.3.24 to 1.3.25
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/ini/v1.3.24...internal/ini/v1.3.25)

Updates `github.com/aws/aws-sdk-go-v2/internal/endpoints/v2` from 2.6.24 to 2.6.25
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/endpoints/v2.6.24...internal/endpoints/v2.6.25)

Updates `github.com/aws/aws-sdk-go-v2/internal/v4a` from 1.3.24 to 1.3.25
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/ini/v1.3.24...internal/ini/v1.3.25)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/checksum` from 1.4.5 to 1.4.6
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/m2/v1.4.5...service/m2/v1.4.6)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/presigned-url` from 1.12.5 to 1.12.6
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Changelog](https://github.com/aws/aws-sdk-go-v2/blob/credentials/v1.12.6/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/sso/v1.12.5...credentials/v1.12.6)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/s3shared` from 1.18.5 to 1.18.6
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Changelog](https://github.com/aws/aws-sdk-go-v2/blob/config/v1.18.6/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.18.5...config/v1.18.6)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go-v2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/s3
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/feature/ec2/imds
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/sso
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/ssooidc
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/sts
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/nats-io/nkeys
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/config
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/credentials
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/feature/s3/manager
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/configsources
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/endpoints/v2
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/v4a
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/checksum
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/presigned-url
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/s3shared
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-12-04 14:48:03 -08:00
Ben McClelland
c65a355bd9 Merge pull request #967 from versity/test/rest_put_bucket_policy
Test/rest put bucket policy
2024-12-04 10:00:07 -08:00
Luke McCrone
a43eec0ae7 test: REST PutBucketPolicy, logging, user changes 2024-12-04 12:34:54 -03:00
Ben McClelland
57d344a8f0 Merge pull request #962 from versity/test/rest_get_bucket_policy
Test/rest get bucket policy
2024-12-03 09:31:14 -08:00
Ben McClelland
7218926ac5 Merge pull request #969 from versity/ben/azure_admin_list_buckets
fix: azure admin list-buckets
2024-12-03 09:00:19 -08:00
Ben McClelland
d9591f694e fix: azure admin list-buckets
There were two issues that were preventing correct behavior here.
One was that we need to specifically request the container metadata
when listing containers, and then we also need to handle the case
where the container does not include the acl metadata.

This fixes both of these cases by adding in the metadata request
option for this container listing, and will return a default acl
if not provided in the container metadaata.

Fixes #948
2024-12-02 16:57:49 -08:00
Luke McCrone
0f2c727990 test: GetBucketPolicy - test w/o policy 2024-11-22 13:09:54 +01:00
Ben McClelland
80b316fccf Merge pull request #958 from versity/test/rest_get_bucket_ownership_controls
Test/rest get bucket ownership controls
2024-11-21 21:30:46 -08:00
Luke McCrone
a1aef5d559 test: REST Get/PutBucketOwnershipControls 2024-11-21 10:34:11 +01:00
Ben McClelland
1e5c1780c9 Merge pull request #956 from versity/ben/list-buckets-empty
feat: add list-buckets test for no buckets
2024-11-19 08:16:27 -08:00
Ben McClelland
3f2de08549 Merge pull request #955 from versity/ben/list-invalid-dir
fix: listobjects internal server error for prefix not directory
2024-11-19 08:16:08 -08:00
Ben McClelland
568f8346bf feat: add list-buckets test for no buckets
We need to check to make sure list-buckets returns a correct
empty list when no buckets created.
2024-11-18 21:35:19 -08:00
Ben McClelland
a6d61e1dde fix: listobjects internal server error for prefix not directory
We need to treat a prefix that has a parent component as a file
instead of a directory in the filesystem as a non existing prefix
instead of an internal server error. This error was caused
because we were not handling ENOTDIR within the fs walk.

Fixes #949
2024-11-18 21:15:16 -08:00
Ben McClelland
3e9c5b883f Merge pull request #953 from versity/dependabot/go_modules/dev-dependencies-bcf9a5f4ca
chore(deps): bump the dev-dependencies group with 20 updates
2024-11-18 14:42:07 -08:00
dependabot[bot]
b9e464bbd0 chore(deps): bump the dev-dependencies group with 20 updates
Bumps the dev-dependencies group with 20 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/Azure/azure-sdk-for-go/sdk/storage/azblob](https://github.com/Azure/azure-sdk-for-go) | `1.4.1` | `1.5.0` |
| [github.com/aws/aws-sdk-go-v2](https://github.com/aws/aws-sdk-go-v2) | `1.32.4` | `1.32.5` |
| [github.com/aws/aws-sdk-go-v2/service/s3](https://github.com/aws/aws-sdk-go-v2) | `1.66.3` | `1.67.1` |
| [github.com/aws/smithy-go](https://github.com/aws/smithy-go) | `1.22.0` | `1.22.1` |
| [github.com/AzureAD/microsoft-authentication-library-for-go](https://github.com/AzureAD/microsoft-authentication-library-for-go) | `1.3.1` | `1.3.2` |
| [github.com/aws/aws-sdk-go-v2/feature/ec2/imds](https://github.com/aws/aws-sdk-go-v2) | `1.16.19` | `1.16.20` |
| [github.com/aws/aws-sdk-go-v2/service/sso](https://github.com/aws/aws-sdk-go-v2) | `1.24.5` | `1.24.6` |
| [github.com/aws/aws-sdk-go-v2/service/ssooidc](https://github.com/aws/aws-sdk-go-v2) | `1.28.4` | `1.28.5` |
| [github.com/aws/aws-sdk-go-v2/service/sts](https://github.com/aws/aws-sdk-go-v2) | `1.32.4` | `1.33.1` |
| [github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream](https://github.com/aws/aws-sdk-go-v2) | `1.6.6` | `1.6.7` |
| [github.com/aws/aws-sdk-go-v2/config](https://github.com/aws/aws-sdk-go-v2) | `1.28.3` | `1.28.5` |
| [github.com/aws/aws-sdk-go-v2/credentials](https://github.com/aws/aws-sdk-go-v2) | `1.17.44` | `1.17.46` |
| [github.com/aws/aws-sdk-go-v2/feature/s3/manager](https://github.com/aws/aws-sdk-go-v2) | `1.17.37` | `1.17.39` |
| [github.com/aws/aws-sdk-go-v2/internal/configsources](https://github.com/aws/aws-sdk-go-v2) | `1.3.23` | `1.3.24` |
| [github.com/aws/aws-sdk-go-v2/internal/endpoints/v2](https://github.com/aws/aws-sdk-go-v2) | `2.6.23` | `2.6.24` |
| [github.com/aws/aws-sdk-go-v2/internal/v4a](https://github.com/aws/aws-sdk-go-v2) | `1.3.23` | `1.3.24` |
| [github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding](https://github.com/aws/aws-sdk-go-v2) | `1.12.0` | `1.12.1` |
| [github.com/aws/aws-sdk-go-v2/service/internal/checksum](https://github.com/aws/aws-sdk-go-v2) | `1.4.4` | `1.4.5` |
| [github.com/aws/aws-sdk-go-v2/service/internal/presigned-url](https://github.com/aws/aws-sdk-go-v2) | `1.12.4` | `1.12.5` |
| [github.com/aws/aws-sdk-go-v2/service/internal/s3shared](https://github.com/aws/aws-sdk-go-v2) | `1.18.4` | `1.18.5` |


Updates `github.com/Azure/azure-sdk-for-go/sdk/storage/azblob` from 1.4.1 to 1.5.0
- [Release notes](https://github.com/Azure/azure-sdk-for-go/releases)
- [Changelog](https://github.com/Azure/azure-sdk-for-go/blob/main/documentation/release.md)
- [Commits](https://github.com/Azure/azure-sdk-for-go/compare/sdk/storage/azblob/v1.4.1...sdk/azcore/v1.5.0)

Updates `github.com/aws/aws-sdk-go-v2` from 1.32.4 to 1.32.5
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/v1.32.4...v1.32.5)

Updates `github.com/aws/aws-sdk-go-v2/service/s3` from 1.66.3 to 1.67.1
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/s3/v1.66.3...service/s3/v1.67.1)

Updates `github.com/aws/smithy-go` from 1.22.0 to 1.22.1
- [Release notes](https://github.com/aws/smithy-go/releases)
- [Changelog](https://github.com/aws/smithy-go/blob/main/CHANGELOG.md)
- [Commits](https://github.com/aws/smithy-go/compare/v1.22.0...v1.22.1)

Updates `github.com/AzureAD/microsoft-authentication-library-for-go` from 1.3.1 to 1.3.2
- [Release notes](https://github.com/AzureAD/microsoft-authentication-library-for-go/releases)
- [Changelog](https://github.com/AzureAD/microsoft-authentication-library-for-go/blob/main/changelog.md)
- [Commits](https://github.com/AzureAD/microsoft-authentication-library-for-go/compare/v1.3.1...v1.3.2)

Updates `github.com/aws/aws-sdk-go-v2/feature/ec2/imds` from 1.16.19 to 1.16.20
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/ram/v1.16.19...service/ram/v1.16.20)

Updates `github.com/aws/aws-sdk-go-v2/service/sso` from 1.24.5 to 1.24.6
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/pi/v1.24.5...service/pi/v1.24.6)

Updates `github.com/aws/aws-sdk-go-v2/service/ssooidc` from 1.28.4 to 1.28.5
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.28.4...config/v1.28.5)

Updates `github.com/aws/aws-sdk-go-v2/service/sts` from 1.32.4 to 1.33.1
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Changelog](https://github.com/aws/aws-sdk-go-v2/blob/service/s3/v1.33.1/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/v1.32.4...service/s3/v1.33.1)

Updates `github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream` from 1.6.6 to 1.6.7
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/rum/v1.6.6...service/rum/v1.6.7)

Updates `github.com/aws/aws-sdk-go-v2/config` from 1.28.3 to 1.28.5
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.28.3...config/v1.28.5)

Updates `github.com/aws/aws-sdk-go-v2/credentials` from 1.17.44 to 1.17.46
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/credentials/v1.17.44...credentials/v1.17.46)

Updates `github.com/aws/aws-sdk-go-v2/feature/s3/manager` from 1.17.37 to 1.17.39
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/credentials/v1.17.37...credentials/v1.17.39)

Updates `github.com/aws/aws-sdk-go-v2/internal/configsources` from 1.3.23 to 1.3.24
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/ini/v1.3.23...internal/ini/v1.3.24)

Updates `github.com/aws/aws-sdk-go-v2/internal/endpoints/v2` from 2.6.23 to 2.6.24
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/endpoints/v2.6.23...internal/endpoints/v2.6.24)

Updates `github.com/aws/aws-sdk-go-v2/internal/v4a` from 1.3.23 to 1.3.24
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/ini/v1.3.23...internal/ini/v1.3.24)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding` from 1.12.0 to 1.12.1
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/v1.12.0...service/m2/v1.12.1)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/checksum` from 1.4.4 to 1.4.5
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/m2/v1.4.4...service/m2/v1.4.5)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/presigned-url` from 1.12.4 to 1.12.5
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/m2/v1.12.4...service/sso/v1.12.5)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/s3shared` from 1.18.4 to 1.18.5
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Changelog](https://github.com/aws/aws-sdk-go-v2/blob/config/v1.18.5/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.18.4...config/v1.18.5)

---
updated-dependencies:
- dependency-name: github.com/Azure/azure-sdk-for-go/sdk/storage/azblob
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/s3
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/smithy-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/AzureAD/microsoft-authentication-library-for-go
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/feature/ec2/imds
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/sso
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/ssooidc
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/sts
  dependency-type: indirect
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/config
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/credentials
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/feature/s3/manager
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/configsources
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/endpoints/v2
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/v4a
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/checksum
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/presigned-url
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/s3shared
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-18 22:17:01 +00:00
Ben McClelland
86440ec7da Merge pull request #947 from versity/ben/dir_not_empty 2024-11-18 07:34:27 -08:00
Ben McClelland
db305142f1 fix: return better error when trying to delete non empty directory object
The posix backend will return ENOTEMPTY when trying to delete a
directory that is not empty. This normally would run successfully
on object systems. So we need to create another non-standard error
for this case. We mainly just don't want to return InternalError
for this case.

Fixes #946
2024-11-14 22:33:08 -08:00
Ben McClelland
a26f069c53 Merge pull request #945 from versity/dependabot/go_modules/dev-dependencies-918f12f149
chore(deps): bump the dev-dependencies group with 22 updates
2024-11-11 19:07:10 -08:00
dependabot[bot]
8fb6227e31 chore(deps): bump the dev-dependencies group with 22 updates
Bumps the dev-dependencies group with 22 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/aws/aws-sdk-go-v2](https://github.com/aws/aws-sdk-go-v2) | `1.32.3` | `1.32.4` |
| [github.com/aws/aws-sdk-go-v2/service/s3](https://github.com/aws/aws-sdk-go-v2) | `1.66.2` | `1.66.3` |
| [golang.org/x/sync](https://github.com/golang/sync) | `0.8.0` | `0.9.0` |
| [golang.org/x/sys](https://github.com/golang/sys) | `0.26.0` | `0.27.0` |
| [github.com/AzureAD/microsoft-authentication-library-for-go](https://github.com/AzureAD/microsoft-authentication-library-for-go) | `1.2.3` | `1.3.1` |
| [github.com/aws/aws-sdk-go-v2/feature/ec2/imds](https://github.com/aws/aws-sdk-go-v2) | `1.16.18` | `1.16.19` |
| [github.com/aws/aws-sdk-go-v2/service/sso](https://github.com/aws/aws-sdk-go-v2) | `1.24.3` | `1.24.5` |
| [github.com/aws/aws-sdk-go-v2/service/ssooidc](https://github.com/aws/aws-sdk-go-v2) | `1.28.3` | `1.28.4` |
| [github.com/aws/aws-sdk-go-v2/service/sts](https://github.com/aws/aws-sdk-go-v2) | `1.32.3` | `1.32.4` |
| [golang.org/x/crypto](https://github.com/golang/crypto) | `0.28.0` | `0.29.0` |
| [golang.org/x/net](https://github.com/golang/net) | `0.30.0` | `0.31.0` |
| [golang.org/x/text](https://github.com/golang/text) | `0.19.0` | `0.20.0` |
| [golang.org/x/time](https://github.com/golang/time) | `0.7.0` | `0.8.0` |
| [github.com/aws/aws-sdk-go-v2/config](https://github.com/aws/aws-sdk-go-v2) | `1.28.1` | `1.28.3` |
| [github.com/aws/aws-sdk-go-v2/credentials](https://github.com/aws/aws-sdk-go-v2) | `1.17.42` | `1.17.44` |
| [github.com/aws/aws-sdk-go-v2/feature/s3/manager](https://github.com/aws/aws-sdk-go-v2) | `1.17.35` | `1.17.37` |
| [github.com/aws/aws-sdk-go-v2/internal/configsources](https://github.com/aws/aws-sdk-go-v2) | `1.3.22` | `1.3.23` |
| [github.com/aws/aws-sdk-go-v2/internal/endpoints/v2](https://github.com/aws/aws-sdk-go-v2) | `2.6.22` | `2.6.23` |
| [github.com/aws/aws-sdk-go-v2/internal/v4a](https://github.com/aws/aws-sdk-go-v2) | `1.3.22` | `1.3.23` |
| [github.com/aws/aws-sdk-go-v2/service/internal/checksum](https://github.com/aws/aws-sdk-go-v2) | `1.4.3` | `1.4.4` |
| [github.com/aws/aws-sdk-go-v2/service/internal/presigned-url](https://github.com/aws/aws-sdk-go-v2) | `1.12.3` | `1.12.4` |
| [github.com/aws/aws-sdk-go-v2/service/internal/s3shared](https://github.com/aws/aws-sdk-go-v2) | `1.18.3` | `1.18.4` |


Updates `github.com/aws/aws-sdk-go-v2` from 1.32.3 to 1.32.4
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/v1.32.3...v1.32.4)

Updates `github.com/aws/aws-sdk-go-v2/service/s3` from 1.66.2 to 1.66.3
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/s3/v1.66.2...service/s3/v1.66.3)

Updates `golang.org/x/sync` from 0.8.0 to 0.9.0
- [Commits](https://github.com/golang/sync/compare/v0.8.0...v0.9.0)

Updates `golang.org/x/sys` from 0.26.0 to 0.27.0
- [Commits](https://github.com/golang/sys/compare/v0.26.0...v0.27.0)

Updates `github.com/AzureAD/microsoft-authentication-library-for-go` from 1.2.3 to 1.3.1
- [Release notes](https://github.com/AzureAD/microsoft-authentication-library-for-go/releases)
- [Changelog](https://github.com/AzureAD/microsoft-authentication-library-for-go/blob/main/changelog.md)
- [Commits](https://github.com/AzureAD/microsoft-authentication-library-for-go/compare/v1.2.3...v1.3.1)

Updates `github.com/aws/aws-sdk-go-v2/feature/ec2/imds` from 1.16.18 to 1.16.19
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/ram/v1.16.18...service/ram/v1.16.19)

Updates `github.com/aws/aws-sdk-go-v2/service/sso` from 1.24.3 to 1.24.5
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/pi/v1.24.3...service/pi/v1.24.5)

Updates `github.com/aws/aws-sdk-go-v2/service/ssooidc` from 1.28.3 to 1.28.4
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.28.3...service/pi/v1.28.4)

Updates `github.com/aws/aws-sdk-go-v2/service/sts` from 1.32.3 to 1.32.4
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/v1.32.3...v1.32.4)

Updates `golang.org/x/crypto` from 0.28.0 to 0.29.0
- [Commits](https://github.com/golang/crypto/compare/v0.28.0...v0.29.0)

Updates `golang.org/x/net` from 0.30.0 to 0.31.0
- [Commits](https://github.com/golang/net/compare/v0.30.0...v0.31.0)

Updates `golang.org/x/text` from 0.19.0 to 0.20.0
- [Release notes](https://github.com/golang/text/releases)
- [Commits](https://github.com/golang/text/compare/v0.19.0...v0.20.0)

Updates `golang.org/x/time` from 0.7.0 to 0.8.0
- [Commits](https://github.com/golang/time/compare/v0.7.0...v0.8.0)

Updates `github.com/aws/aws-sdk-go-v2/config` from 1.28.1 to 1.28.3
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.28.1...config/v1.28.3)

Updates `github.com/aws/aws-sdk-go-v2/credentials` from 1.17.42 to 1.17.44
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/credentials/v1.17.42...credentials/v1.17.44)

Updates `github.com/aws/aws-sdk-go-v2/feature/s3/manager` from 1.17.35 to 1.17.37
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/credentials/v1.17.35...credentials/v1.17.37)

Updates `github.com/aws/aws-sdk-go-v2/internal/configsources` from 1.3.22 to 1.3.23
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/ini/v1.3.22...internal/ini/v1.3.23)

Updates `github.com/aws/aws-sdk-go-v2/internal/endpoints/v2` from 2.6.22 to 2.6.23
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/endpoints/v2.6.22...internal/endpoints/v2.6.23)

Updates `github.com/aws/aws-sdk-go-v2/internal/v4a` from 1.3.22 to 1.3.23
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/internal/ini/v1.3.22...internal/ini/v1.3.23)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/checksum` from 1.4.3 to 1.4.4
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/m2/v1.4.3...service/m2/v1.4.4)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/presigned-url` from 1.12.3 to 1.12.4
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/service/mq/v1.12.3...service/m2/v1.12.4)

Updates `github.com/aws/aws-sdk-go-v2/service/internal/s3shared` from 1.18.3 to 1.18.4
- [Release notes](https://github.com/aws/aws-sdk-go-v2/releases)
- [Changelog](https://github.com/aws/aws-sdk-go-v2/blob/config/v1.18.4/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.18.3...config/v1.18.4)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go-v2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/s3
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: golang.org/x/sync
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: golang.org/x/sys
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: github.com/AzureAD/microsoft-authentication-library-for-go
  dependency-type: indirect
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/feature/ec2/imds
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/sso
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/ssooidc
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/sts
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: golang.org/x/crypto
  dependency-type: indirect
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: golang.org/x/net
  dependency-type: indirect
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: golang.org/x/text
  dependency-type: indirect
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: golang.org/x/time
  dependency-type: indirect
  update-type: version-update:semver-minor
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/config
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/credentials
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/feature/s3/manager
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/configsources
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/endpoints/v2
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/internal/v4a
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/checksum
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/presigned-url
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
- dependency-name: github.com/aws/aws-sdk-go-v2/service/internal/s3shared
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: dev-dependencies
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-11 21:18:10 +00:00
Ben McClelland
6cf73eaabb Merge pull request #943 from versity/ben/err_not_dir 2024-11-11 07:57:24 -08:00
Ben McClelland
0312a1e3dc fix: internal server error when object parent dir is a file
The fileystem will return ENOTDIR if we try to access a file path
where a parent directory within the path already exists as a file.
In this case we need to return a standard 404 no such key since
the request object does not exist within the filesytem.

Fixes #942
2024-11-08 08:21:14 -08:00
97 changed files with 2824 additions and 1657 deletions

View File

@@ -196,7 +196,6 @@ func (az *Azure) CreateBucket(ctx context.Context, input *s3.CreateBucketInput,
}
func (az *Azure) ListBuckets(ctx context.Context, input s3response.ListBucketsInput) (s3response.ListAllMyBucketsResult, error) {
fmt.Printf("%+v\n", input)
pager := az.client.NewListContainersPager(
&service.ListContainersOptions{
Include: service.ListContainersInclude{
@@ -1459,7 +1458,10 @@ func (az *Azure) ChangeBucketOwner(ctx context.Context, bucket string, acl []byt
// The action actually returns the containers owned by the user, who initialized the gateway
// TODO: Not sure if there's a way to list all the containers and owners?
func (az *Azure) ListBucketsAndOwners(ctx context.Context) (buckets []s3response.Bucket, err error) {
pager := az.client.NewListContainersPager(nil)
opts := &service.ListContainersOptions{
Include: service.ListContainersInclude{Metadata: true},
}
pager := az.client.NewListContainersPager(opts)
for pager.More() {
resp, err := pager.NextPage(ctx)
@@ -1735,9 +1737,11 @@ func (az *Azure) deleteContainerMetaData(ctx context.Context, bucket, key string
}
func getAclFromMetadata(meta map[string]*string, key key) (*auth.ACL, error) {
var acl auth.ACL
data, ok := meta[string(key)]
if !ok {
return nil, s3err.GetAPIError(s3err.ErrInternalError)
return &acl, nil
}
value, err := decodeString(*data)
@@ -1745,7 +1749,6 @@ func getAclFromMetadata(meta map[string]*string, key key) (*auth.ACL, error) {
return nil, err
}
var acl auth.ACL
if len(value) == 0 {
return &acl, nil
}

View File

@@ -198,7 +198,7 @@ func (p *Posix) doesBucketAndObjectExist(bucket, object string) error {
}
_, err = os.Stat(filepath.Join(bucket, object))
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil {
@@ -761,7 +761,7 @@ func getBoolPtr(b bool) *bool {
// Check if the given object is a delete marker
func (p *Posix) isObjDeleteMarker(bucket, object string) (bool, error) {
_, err := p.meta.RetrieveAttribute(nil, bucket, object, deleteMarkerKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return false, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if errors.Is(err, meta.ErrNoSuchKey) {
@@ -2434,7 +2434,7 @@ func (p *Posix) DeleteObject(ctx context.Context, input *s3.DeleteObjectInput) (
if getString(input.VersionId) == "" {
// if the versionId is not specified, make the current version a delete marker
fi, err := os.Stat(objpath)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
// AWS returns success if the object does not exist
return &s3.DeleteObjectOutput{}, nil
}
@@ -2601,7 +2601,7 @@ func (p *Posix) DeleteObject(ctx context.Context, input *s3.DeleteObjectInput) (
if errors.Is(err, syscall.ENAMETOOLONG) {
return nil, s3err.GetAPIError(s3err.ErrKeyTooLong)
}
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
if err != nil {
@@ -2621,7 +2621,7 @@ func (p *Posix) DeleteObject(ctx context.Context, input *s3.DeleteObjectInput) (
if errors.Is(err, syscall.ENAMETOOLONG) {
return nil, s3err.GetAPIError(s3err.ErrKeyTooLong)
}
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
// AWS returns success if the object does not exist
return &s3.DeleteObjectOutput{}, nil
}
@@ -2646,6 +2646,9 @@ func (p *Posix) DeleteObject(ctx context.Context, input *s3.DeleteObjectInput) (
if errors.Is(err, fs.ErrNotExist) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if errors.Is(err, syscall.ENOTEMPTY) {
return nil, s3err.GetAPIError(s3err.ErrDirectoryNotEmpty)
}
if err != nil {
return nil, fmt.Errorf("delete object: %w", err)
}
@@ -2768,7 +2771,7 @@ func (p *Posix) GetObject(_ context.Context, input *s3.GetObjectInput) (*s3.GetO
object := *input.Key
if versionId != "" {
vId, err := p.meta.RetrieveAttribute(nil, bucket, object, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil && !errors.Is(err, meta.ErrNoSuchKey) {
@@ -2787,7 +2790,7 @@ func (p *Posix) GetObject(_ context.Context, input *s3.GetObjectInput) (*s3.GetO
objPath := filepath.Join(bucket, object)
fi, err := os.Stat(objPath)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
if versionId != "" {
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
@@ -3025,7 +3028,7 @@ func (p *Posix) HeadObject(ctx context.Context, input *s3.HeadObjectInput) (*s3.
if versionId != "" {
vId, err := p.meta.RetrieveAttribute(nil, bucket, object, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil && !errors.Is(err, meta.ErrNoSuchKey) {
@@ -3045,7 +3048,7 @@ func (p *Posix) HeadObject(ctx context.Context, input *s3.HeadObjectInput) (*s3.
objPath := filepath.Join(bucket, object)
fi, err := os.Stat(objPath)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
if versionId != "" {
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
@@ -3206,7 +3209,7 @@ func (p *Posix) CopyObject(ctx context.Context, input *s3.CopyObjectInput) (*s3.
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
vId, err := p.meta.RetrieveAttribute(nil, srcBucket, srcObject, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil && !errors.Is(err, meta.ErrNoSuchKey) {
@@ -3229,7 +3232,7 @@ func (p *Posix) CopyObject(ctx context.Context, input *s3.CopyObjectInput) (*s3.
objPath := filepath.Join(srcBucket, srcObject)
f, err := os.Open(objPath)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
if p.versioningEnabled() && vEnabled {
return nil, s3err.GetAPIError(s3err.ErrNoSuchVersion)
}
@@ -3284,7 +3287,7 @@ func (p *Posix) CopyObject(ctx context.Context, input *s3.CopyObjectInput) (*s3.
b, _ := p.meta.RetrieveAttribute(nil, dstBucket, dstObject, etagkey)
etag = string(b)
vId, _ := p.meta.RetrieveAttribute(nil, dstBucket, dstObject, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
version = backend.GetPtrFromString(string(vId))
@@ -3612,7 +3615,7 @@ func (p *Posix) GetObjectTagging(_ context.Context, bucket, object string) (map[
func (p *Posix) getAttrTags(bucket, object string) (map[string]string, error) {
tags := make(map[string]string)
b, err := p.meta.RetrieveAttribute(nil, bucket, object, tagHdr)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if errors.Is(err, meta.ErrNoSuchKey) {
@@ -3641,7 +3644,7 @@ func (p *Posix) PutObjectTagging(_ context.Context, bucket, object string, tags
if tags == nil {
err = p.meta.DeleteAttribute(bucket, object, tagHdr)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if errors.Is(err, meta.ErrNoSuchKey) {
@@ -3659,7 +3662,7 @@ func (p *Posix) PutObjectTagging(_ context.Context, bucket, object string, tags
}
err = p.meta.StoreAttribute(nil, bucket, object, tagHdr, b)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil {
@@ -3831,7 +3834,7 @@ func (p *Posix) PutObjectLegalHold(_ context.Context, bucket, object, versionId
return s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
vId, err := p.meta.RetrieveAttribute(nil, bucket, object, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil && !errors.Is(err, meta.ErrNoSuchKey) {
@@ -3845,7 +3848,7 @@ func (p *Posix) PutObjectLegalHold(_ context.Context, bucket, object, versionId
}
err = p.meta.StoreAttribute(nil, bucket, object, objectLegalHoldKey, statusData)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
if versionId != "" {
return s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
@@ -3874,7 +3877,7 @@ func (p *Posix) GetObjectLegalHold(_ context.Context, bucket, object, versionId
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
vId, err := p.meta.RetrieveAttribute(nil, bucket, object, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil && !errors.Is(err, meta.ErrNoSuchKey) {
@@ -3888,7 +3891,7 @@ func (p *Posix) GetObjectLegalHold(_ context.Context, bucket, object, versionId
}
data, err := p.meta.RetrieveAttribute(nil, bucket, object, objectLegalHoldKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
if versionId != "" {
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
@@ -3922,7 +3925,7 @@ func (p *Posix) PutObjectRetention(_ context.Context, bucket, object, versionId
return s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
vId, err := p.meta.RetrieveAttribute(nil, bucket, object, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil && !errors.Is(err, meta.ErrNoSuchKey) {
@@ -3936,7 +3939,7 @@ func (p *Posix) PutObjectRetention(_ context.Context, bucket, object, versionId
}
objectLockCfg, err := p.meta.RetrieveAttribute(nil, bucket, object, objectRetentionKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
if versionId != "" {
return s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
@@ -3994,7 +3997,7 @@ func (p *Posix) GetObjectRetention(_ context.Context, bucket, object, versionId
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}
vId, err := p.meta.RetrieveAttribute(nil, bucket, object, versionIdKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if err != nil && !errors.Is(err, meta.ErrNoSuchKey) {
@@ -4008,7 +4011,7 @@ func (p *Posix) GetObjectRetention(_ context.Context, bucket, object, versionId
}
data, err := p.meta.RetrieveAttribute(nil, bucket, object, objectRetentionKey)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
if versionId != "" {
return nil, s3err.GetAPIError(s3err.ErrInvalidVersionId)
}

View File

@@ -490,7 +490,7 @@ func (s *ScoutFS) HeadObject(ctx context.Context, input *s3.HeadObjectInput) (*s
objPath := filepath.Join(bucket, object)
fi, err := os.Stat(objPath)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if errors.Is(err, syscall.ENAMETOOLONG) {
@@ -614,7 +614,7 @@ func (s *ScoutFS) GetObject(_ context.Context, input *s3.GetObjectInput) (*s3.Ge
objPath := filepath.Join(bucket, object)
fi, err := os.Stat(objPath)
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return nil, s3err.GetAPIError(s3err.ErrNoSuchKey)
}
if errors.Is(err, syscall.ENAMETOOLONG) {

View File

@@ -21,6 +21,7 @@ import (
"io/fs"
"sort"
"strings"
"syscall"
"github.com/aws/aws-sdk-go-v2/service/s3/types"
"github.com/versity/versitygw/s3response"
@@ -231,7 +232,7 @@ func Walk(ctx context.Context, fileSystem fs.FS, prefix, delimiter, marker strin
})
if err != nil {
// suppress file not found caused by user's prefix
if errors.Is(err, fs.ErrNotExist) {
if errors.Is(err, fs.ErrNotExist) || errors.Is(err, syscall.ENOTDIR) {
return WalkResults{}, nil
}
return WalkResults{}, err

54
go.mod
View File

@@ -5,11 +5,11 @@ go 1.21.0
require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.1
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.5.0
github.com/DataDog/datadog-go/v5 v5.5.0
github.com/aws/aws-sdk-go-v2 v1.32.3
github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2
github.com/aws/smithy-go v1.22.0
github.com/aws/aws-sdk-go-v2 v1.32.6
github.com/aws/aws-sdk-go-v2/service/s3 v1.70.0
github.com/aws/smithy-go v1.22.1
github.com/go-ldap/ldap/v3 v3.4.8
github.com/gofiber/fiber/v2 v2.52.5
github.com/google/go-cmp v0.6.0
@@ -23,20 +23,20 @@ require (
github.com/urfave/cli/v2 v2.27.5
github.com/valyala/fasthttp v1.57.0
github.com/versity/scoutfs-go v0.0.0-20240325223134-38eb2f5f7d44
golang.org/x/sync v0.8.0
golang.org/x/sys v0.26.0
golang.org/x/sync v0.9.0
golang.org/x/sys v0.27.0
)
require (
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.3 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.21 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.24.3 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.32.3 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.24.7 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.6 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.33.2 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.7 // indirect
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
@@ -45,30 +45,30 @@ require (
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/nats-io/nkeys v0.4.7 // indirect
github.com/nats-io/nkeys v0.4.8 // indirect
github.com/nats-io/nuid v1.0.1 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/ryanuber/go-glob v1.0.0 // indirect
golang.org/x/crypto v0.28.0 // indirect
golang.org/x/net v0.30.0 // indirect
golang.org/x/text v0.19.0 // indirect
golang.org/x/time v0.7.0 // indirect
golang.org/x/crypto v0.29.0 // indirect
golang.org/x/net v0.31.0 // indirect
golang.org/x/text v0.20.0 // indirect
golang.org/x/time v0.8.0 // indirect
)
require (
github.com/andybalholm/brotli v1.1.1 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 // indirect
github.com/aws/aws-sdk-go-v2/config v1.28.1
github.com/aws/aws-sdk-go-v2/credentials v1.17.42
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.35
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.7 // indirect
github.com/aws/aws-sdk-go-v2/config v1.28.6
github.com/aws/aws-sdk-go-v2/credentials v1.17.47
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.42
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.25 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.25 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.25 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.6 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.6 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.6 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect

108
go.sum
View File

@@ -8,14 +8,14 @@ github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xP
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0 h1:PiSrjRPpkQNjrM8H0WwKMnZUdu1RGMtd/LdGKUrOo+c=
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0/go.mod h1:oDrbWx4ewMylP7xHivfgixbfGBT6APAwsSoHRKotnIc=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.1 h1:cf+OIKbkmMHBaC3u78AXomweqM0oxQSgBXRZf3WH4yM=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.1/go.mod h1:ap1dmS6vQKJxSMNiGJcq4QuUQkOynyD93gLw6MDF7ek=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.5.0 h1:mlmW46Q0B79I+Aj4azKC6xDMFN9a9SyZWESlGWYXbFs=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.5.0/go.mod h1:PXe2h+LKcWTX9afWdZoHyODqR4fBa5boUM/8uJfZ0Jo=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM=
github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.3 h1:6LyjnnaLpcOKK0fbYisI+mb8CE7iNe7i89nMNQxFxs8=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.3/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/DataDog/datadog-go/v5 v5.5.0 h1:G5KHeB8pWBNXT4Jtw0zAkhdxEAWSpWH00geHI6LDrKU=
github.com/DataDog/datadog-go/v5 v5.5.0/go.mod h1:K9kcYBlxkcPP8tvvjZZKs/m1edNAUFzBbdpTUKfCsuw=
github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84=
@@ -25,44 +25,44 @@ github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7V
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/aws/aws-sdk-go-v2 v1.32.3 h1:T0dRlFBKcdaUPGNtkBSwHZxrtis8CQU17UpNBZYd0wk=
github.com/aws/aws-sdk-go-v2 v1.32.3/go.mod h1:2SK5n0a2karNTv5tbP1SjsX0uhttou00v/HpXKM1ZUo=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 h1:pT3hpW0cOHRJx8Y0DfJUEQuqPild8jRGmSFmBgvydr0=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6/go.mod h1:j/I2++U0xX+cr44QjHay4Cvxj6FUbnxrgmqN3H1jTZA=
github.com/aws/aws-sdk-go-v2/config v1.28.1 h1:oxIvOUXy8x0U3fR//0eq+RdCKimWI900+SV+10xsCBw=
github.com/aws/aws-sdk-go-v2/config v1.28.1/go.mod h1:bRQcttQJiARbd5JZxw6wG0yIK3eLeSCPdg6uqmmlIiI=
github.com/aws/aws-sdk-go-v2/credentials v1.17.42 h1:sBP0RPjBU4neGpIYyx8mkU2QqLPl5u9cmdTWVzIpHkM=
github.com/aws/aws-sdk-go-v2/credentials v1.17.42/go.mod h1:FwZBfU530dJ26rv9saAbxa9Ej3eF/AK0OAY86k13n4M=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18 h1:68jFVtt3NulEzojFesM/WVarlFpCaXLKaBxDpzkQ9OQ=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18/go.mod h1:Fjnn5jQVIo6VyedMc0/EhPpfNlPl7dHV916O6B+49aE=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.35 h1:ihPPdcCVSN0IvBByXwqVp28/l4VosBZ6sDulcvU2J7w=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.35/go.mod h1:JkgEhs3SVF51Dj3m1Bj+yL8IznpxzkwlA3jLg3x7Kls=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22 h1:Jw50LwEkVjuVzE1NzkhNKkBf9cRN7MtE1F/b2cOKTUM=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22/go.mod h1:Y/SmAyPcOTmpeVaWSzSKiILfXTVJwrGmYZhcRbhWuEY=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22 h1:981MHwBaRZM7+9QSR6XamDzF/o7ouUGxFzr+nVSIhrs=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22/go.mod h1:1RA1+aBEfn+CAB/Mh0MB6LsdCYCnjZm7tKXtnk499ZQ=
github.com/aws/aws-sdk-go-v2 v1.32.6 h1:7BokKRgRPuGmKkFMhEg/jSul+tB9VvXhcViILtfG8b4=
github.com/aws/aws-sdk-go-v2 v1.32.6/go.mod h1:P5WJBrYqqbWVaOxgH0X/FYYD47/nooaPOZPlQdmiN2U=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.7 h1:lL7IfaFzngfx0ZwUGOZdsFFnQ5uLvR0hWqqhyE7Q9M8=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.7/go.mod h1:QraP0UcVlQJsmHfioCrveWOC1nbiWUl3ej08h4mXWoc=
github.com/aws/aws-sdk-go-v2/config v1.28.6 h1:D89IKtGrs/I3QXOLNTH93NJYtDhm8SYa9Q5CsPShmyo=
github.com/aws/aws-sdk-go-v2/config v1.28.6/go.mod h1:GDzxJ5wyyFSCoLkS+UhGB0dArhb9mI+Co4dHtoTxbko=
github.com/aws/aws-sdk-go-v2/credentials v1.17.47 h1:48bA+3/fCdi2yAwVt+3COvmatZ6jUDNkDTIsqDiMUdw=
github.com/aws/aws-sdk-go-v2/credentials v1.17.47/go.mod h1:+KdckOejLW3Ks3b0E3b5rHsr2f9yuORBum0WPnE5o5w=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.21 h1:AmoU1pziydclFT/xRV+xXE/Vb8fttJCLRPv8oAkprc0=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.21/go.mod h1:AjUdLYe4Tgs6kpH4Bv7uMZo7pottoyHMn4eTcIcneaY=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.42 h1:vEnk9vtjJ62OO2wOhEmgKMZgNcn1w0aF7XCiNXO5rK0=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.42/go.mod h1:GUOPbPJWRZsdt1OJ355upCrry4d3ZFgdX6rhT7gtkto=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.25 h1:s/fF4+yDQDoElYhfIVvSNyeCydfbuTKzhxSXDXCPasU=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.25/go.mod h1:IgPfDv5jqFIzQSNbUEMoitNooSMXjRSDkhXv8jiROvU=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.25 h1:ZntTCl5EsYnhN/IygQEUugpdwbhdkom9uHcbCftiGgA=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.25/go.mod h1:DBdPrgeocww+CSl1C8cEV8PN1mHMBhuCDLpXezyvWkE=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22 h1:yV+hCAHZZYJQcwAaszoBNwLbPItHvApxT0kVIw6jRgs=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22/go.mod h1:kbR1TL8llqB1eGnVbybcA4/wgScxdylOdyAd51yxPdw=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 h1:TToQNkvGguu209puTojY/ozlqy2d/SFNcoLIqTFi42g=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0/go.mod h1:0jp+ltwkf+SwG2fm/PKo8t4y8pJSgOCO4D8Lz3k0aHQ=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3 h1:kT6BcZsmMtNkP/iYMcRG+mIEA/IbeiUimXtGmqF39y0=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3/go.mod h1:Z8uGua2k4PPaGOYn66pK02rhMrot3Xk3tpBuUFPomZU=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3 h1:qcxX0JYlgWH3hpPUnd6U0ikcl6LLA9sLkXE2w1fpMvY=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3/go.mod h1:cLSNEmI45soc+Ef8K/L+8sEA3A3pYFEYf5B5UI+6bH4=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3 h1:ZC7Y/XgKUxwqcdhO5LE8P6oGP1eh6xlQReWNKfhvJno=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3/go.mod h1:WqfO7M9l9yUAw0HcHaikwRd/H6gzYdz7vjejCA5e2oY=
github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2 h1:p9TNFL8bFUMd+38YIpTAXpoxyz0MxC7FlbFEH4P4E1U=
github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2/go.mod h1:fNjyo0Coen9QTwQLWeV6WO2Nytwiu+cCcWaTdKCAqqE=
github.com/aws/aws-sdk-go-v2/service/sso v1.24.3 h1:UTpsIf0loCIWEbrqdLb+0RxnTXfWh2vhw4nQmFi4nPc=
github.com/aws/aws-sdk-go-v2/service/sso v1.24.3/go.mod h1:FZ9j3PFHHAR+w0BSEjK955w5YD2UwB/l/H0yAK3MJvI=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3 h1:2YCmIXv3tmiItw0LlYf6v7gEHebLY45kBEnPezbUKyU=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3/go.mod h1:u19stRyNPxGhj6dRm+Cdgu6N75qnbW7+QN0q0dsAk58=
github.com/aws/aws-sdk-go-v2/service/sts v1.32.3 h1:wVnQ6tigGsRqSWDEEyH6lSAJ9OyFUsSnbaUWChuSGzs=
github.com/aws/aws-sdk-go-v2/service/sts v1.32.3/go.mod h1:VZa9yTFyj4o10YGsmDO4gbQJUvvhY72fhumT8W4LqsE=
github.com/aws/smithy-go v1.22.0 h1:uunKnWlcoL3zO7q+gG2Pk53joueEOsnNB28QdMsmiMM=
github.com/aws/smithy-go v1.22.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.25 h1:r67ps7oHCYnflpgDy2LZU0MAQtQbYIOqNNnqGO6xQkE=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.25/go.mod h1:GrGY+Q4fIokYLtjCVB/aFfCVL6hhGUFl8inD18fDalE=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1 h1:iXtILhvDxB6kPvEXgsDhGaZCSC6LQET5ZHSdJozeI0Y=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1/go.mod h1:9nu0fVANtYiAePIBh2/pFUSwtJ402hLnp854CNoDOeE=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.6 h1:HCpPsWqmYQieU7SS6E9HXfdAMSud0pteVXieJmcpIRI=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.6/go.mod h1:ngUiVRCco++u+soRRVBIvBZxSMMvOVMXA4PJ36JLfSw=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.6 h1:50+XsN70RS7dwJ2CkVNXzj7U2L1HKP8nqTd3XWEXBN4=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.6/go.mod h1:WqgLmwY7so32kG01zD8CPTJWVWM+TzJoOVHwTg4aPug=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.6 h1:BbGDtTi0T1DYlmjBiCr/le3wzhA37O8QTC5/Ab8+EXk=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.6/go.mod h1:hLMJt7Q8ePgViKupeymbqI0la+t9/iYFBjxQCFwuAwI=
github.com/aws/aws-sdk-go-v2/service/s3 v1.70.0 h1:HrHFR8RoS4l4EvodRMFcJMYQ8o3UhmALn2nbInXaxZA=
github.com/aws/aws-sdk-go-v2/service/s3 v1.70.0/go.mod h1:sT/iQz8JK3u/5gZkT+Hmr7GzVZehUMkRZpOaAwYXeGY=
github.com/aws/aws-sdk-go-v2/service/sso v1.24.7 h1:rLnYAfXQ3YAccocshIH5mzNNwZBkBo+bP6EhIxak6Hw=
github.com/aws/aws-sdk-go-v2/service/sso v1.24.7/go.mod h1:ZHtuQJ6t9A/+YDuxOLnbryAmITtr8UysSny3qcyvJTc=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.6 h1:JnhTZR3PiYDNKlXy50/pNeix9aGMo6lLpXwJ1mw8MD4=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.6/go.mod h1:URronUEGfXZN1VpdktPSD1EkAL9mfrV+2F4sjH38qOY=
github.com/aws/aws-sdk-go-v2/service/sts v1.33.2 h1:s4074ZO1Hk8qv65GqNXqDjmkf4HSQqJukaLuuW0TpDA=
github.com/aws/aws-sdk-go-v2/service/sts v1.33.2/go.mod h1:mVggCnIWoM09jP71Wh+ea7+5gAp53q+49wDFs1SW5z8=
github.com/aws/smithy-go v1.22.1 h1:/HPHZQ0g7f4eUeK6HKglFz8uwVfZKgoI25rb/J+dnro=
github.com/aws/smithy-go v1.22.1/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
@@ -135,8 +135,8 @@ github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/nats-io/nats.go v1.37.0 h1:07rauXbVnnJvv1gfIyghFEo6lUcYRY0WXc3x7x0vUxE=
github.com/nats-io/nats.go v1.37.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=
github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDmGD0nc=
github.com/nats-io/nkeys v0.4.8 h1:+wee30071y3vCZAYRsnrmIPaOe47A/SkK/UBDPdIV70=
github.com/nats-io/nkeys v0.4.8/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDmGD0nc=
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
github.com/oklog/ulid/v2 v2.1.0 h1:+9lhoxAP56we25tyYETBBY1YLA2SaoLvUFgrP2miPJU=
@@ -206,8 +206,8 @@ golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
@@ -223,14 +223,14 @@ golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo=
golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -250,8 +250,8 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@@ -267,10 +267,10 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.7.0 h1:ntUhktv3OPE6TgYxXWv9vKvUSJyIFJlyohwbkEwPrKQ=
golang.org/x/time v0.7.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=

View File

@@ -144,6 +144,7 @@ const (
ErrExistingObjectIsDirectory
ErrObjectParentIsFile
ErrDirectoryObjectContainsData
ErrDirectoryNotEmpty
ErrQuotaExceeded
ErrVersioningNotConfigured
@@ -593,6 +594,11 @@ var errorCodeResponse = map[ErrorCode]APIError{
Description: "Directory object contains data payload.",
HTTPStatusCode: http.StatusBadRequest,
},
ErrDirectoryNotEmpty: {
Code: "ErrDirectoryNotEmpty",
Description: "Directory object not empty.",
HTTPStatusCode: http.StatusBadRequest,
},
ErrQuotaExceeded: {
Code: "QuotaExceeded",
Description: "Your request was denied due to quota exceeded.",

View File

@@ -16,7 +16,7 @@
copy_object() {
if [ $# -ne 4 ]; then
echo "copy object command requires command type, source, bucket, key"
log 2 "copy object command requires command type, source, bucket, key"
return 1
fi
local exit_code=0
@@ -24,7 +24,7 @@ copy_object() {
record_command "copy-object" "client:$1"
if [[ $1 == 's3' ]]; then
error=$(send_command aws --no-verify-ssl s3 cp "$2" s3://"$3/$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
elif [[ $1 == 's3api' ]]; then
error=$(send_command aws --no-verify-ssl s3api copy-object --copy-source "$2" --bucket "$3" --key "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
log 5 "s3cmd ${S3CMD_OPTS[*]} --no-check-certificate cp s3://$2 s3://$3/$4"
@@ -32,12 +32,12 @@ copy_object() {
elif [[ $1 == 'mc' ]]; then
error=$(send_command mc --insecure cp "$MC_ALIAS/$2" "$MC_ALIAS/$3/$4" 2>&1) || exit_code=$?
else
echo "'copy-object' not implemented for '$1'"
log 2 "'copy-object' not implemented for '$1'"
return 1
fi
log 5 "copy object exit code: $exit_code"
if [ $exit_code -ne 0 ]; then
echo "error copying object to bucket: $error"
log 2 "error copying object to bucket: $error"
return 1
fi
return 0

View File

@@ -31,7 +31,7 @@ create_bucket() {
log 6 "create bucket"
if [[ $1 == 's3' ]]; then
error=$(send_command aws --no-verify-ssl s3 mb s3://"$2" 2>&1) || exit_code=$?
elif [[ $1 == "aws" ]] || [[ $1 == 's3api' ]]; then
elif [[ $1 == 's3api' ]]; then
error=$(send_command aws --no-verify-ssl s3api create-bucket --bucket "$2" 2>&1) || exit_code=$?
elif [[ $1 == "s3cmd" ]]; then
log 5 "s3cmd ${S3CMD_OPTS[*]} --no-check-certificate mb s3://$2"

View File

@@ -54,6 +54,7 @@ create_multipart_upload_with_user() {
return 1
fi
upload_id="${upload_id//\"/}"
echo "$upload_id"
return 0
}

View File

@@ -7,7 +7,7 @@ create_presigned_url() {
fi
local presign_result=0
if [[ $1 == 'aws' ]]; then
if [[ $1 == 's3api' ]]; then
presigned_url=$(send_command aws s3 presign "s3://$2/$3" --expires-in 900) || presign_result=$?
elif [[ $1 == 's3cmd' ]]; then
presigned_url=$(send_command s3cmd --no-check-certificate "${S3CMD_OPTS[@]}" signurl "s3://$2/$3" "$(echo "$(date +%s)" + 900 | bc)") || presign_result=$?

View File

@@ -32,7 +32,7 @@ delete_bucket() {
exit_code=0
if [[ $1 == 's3' ]]; then
error=$(send_command aws --no-verify-ssl s3 rb s3://"$2") || exit_code=$?
elif [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
elif [[ $1 == 's3api' ]]; then
error=$(send_command aws --no-verify-ssl s3api delete-bucket --bucket "$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rb s3://"$2" 2>&1) || exit_code=$?

View File

@@ -21,7 +21,7 @@ delete_bucket_policy() {
return 1
fi
local delete_result=0
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
if [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
error=$(send_command aws --no-verify-ssl s3api delete-bucket-policy --bucket "$2" 2>&1) || delete_result=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate delpolicy "s3://$2" 2>&1) || delete_result=$?

View File

@@ -21,7 +21,7 @@ delete_bucket_tagging() {
return 1
fi
local result
if [[ $1 == 'aws' ]]; then
if [[ $1 == 's3api' ]]; then
tags=$(send_command aws --no-verify-ssl s3api delete-bucket-tagging --bucket "$2" 2>&1) || result=$?
elif [[ $1 == 'mc' ]]; then
tags=$(send_command mc --insecure tag remove "$MC_ALIAS"/"$2" 2>&1) || result=$?

View File

@@ -25,7 +25,7 @@ delete_object() {
local exit_code=0
if [[ $1 == 's3' ]]; then
delete_object_error=$(send_command aws --no-verify-ssl s3 rm "s3://$2/$3" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
elif [[ $1 == 's3api' ]]; then
delete_object_error=$(send_command aws --no-verify-ssl s3api delete-object --bucket "$2" --key "$3" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
delete_object_error=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate rm "s3://$2/$3" 2>&1) || exit_code=$?

View File

@@ -17,22 +17,22 @@
delete_object_tagging() {
record_command "delete-object-tagging" "client:$1"
if [[ $# -ne 3 ]]; then
echo "delete object tagging command missing command type, bucket, key"
log 2 "delete object tagging command missing command type, bucket, key"
return 1
fi
delete_result=0
if [[ $1 == 'aws' ]]; then
if [[ $1 == 's3api' ]]; then
error=$(send_command aws --no-verify-ssl s3api delete-object-tagging --bucket "$2" --key "$3" 2>&1) || delete_result=$?
elif [[ $1 == 'mc' ]]; then
error=$(send_command mc --insecure tag remove "$MC_ALIAS/$2/$3") || delete_result=$?
elif [ "$1" == 'rest' ]; then
delete_object_tagging_rest "$2" "$3" || delete_result=$?
else
echo "delete-object-tagging command not implemented for '$1'"
log 2 "delete-object-tagging command not implemented for '$1'"
return 1
fi
if [[ $delete_result -ne 0 ]]; then
echo "error deleting object tagging: $error"
log 2 "error deleting object tagging: $error"
return 1
fi
return 0

View File

@@ -21,7 +21,7 @@ get_bucket_acl() {
return 1
fi
local exit_code=0
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
if [[ $1 == 's3api' ]]; then
acl=$(send_command aws --no-verify-ssl s3api get-bucket-acl --bucket "$2" 2>&1) || exit_code="$?"
elif [[ $1 == 's3cmd' ]]; then
acl=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate info "s3://$2" 2>&1) || exit_code="$?"

View File

@@ -17,17 +17,18 @@
get_bucket_location() {
record_command "get-bucket-location" "client:$1"
if [[ $# -ne 2 ]]; then
echo "get bucket location command requires command type, bucket name"
log 2 "get bucket location command requires command type, bucket name"
return 1
fi
if [[ $1 == 'aws' ]]; then
get_result=0
if [[ $1 == 's3api' ]]; then
get_bucket_location_aws "$2" || get_result=$?
elif [[ $1 == 's3cmd' ]]; then
get_bucket_location_s3cmd "$2" || get_result=$?
elif [[ $1 == 'mc' ]]; then
get_bucket_location_mc "$2" || get_result=$?
else
echo "command type '$1' not implemented for get_bucket_location"
log 2 "command type '$1' not implemented for get_bucket_location"
return 1
fi
if [[ $get_result -ne 0 ]]; then
@@ -39,7 +40,7 @@ get_bucket_location() {
get_bucket_location_aws() {
record_command "get-bucket-location" "client:s3api"
if [[ $# -ne 1 ]]; then
echo "get bucket location (aws) requires bucket name"
log 2 "get bucket location (aws) requires bucket name"
return 1
fi
location_json=$(send_command aws --no-verify-ssl s3api get-bucket-location --bucket "$1") || location_result=$?
@@ -59,7 +60,7 @@ get_bucket_location_s3cmd() {
fi
info=$(send_command s3cmd --no-check-certificate info "s3://$1") || results=$?
if [[ $results -ne 0 ]]; then
echo "error getting s3cmd info: $info"
log 2 "error getting bucket location: $location"
return 1
fi
bucket_location=$(echo "$info" | grep -o 'Location:.*' | awk '{print $2}')
@@ -69,12 +70,12 @@ get_bucket_location_s3cmd() {
get_bucket_location_mc() {
record_command "get-bucket-location" "client:mc"
if [[ $# -ne 1 ]]; then
echo "get bucket location (mc) requires bucket name"
log 2 "get bucket location (mc) requires bucket name"
return 1
fi
info=$(send_command mc --insecure stat "$MC_ALIAS/$1") || results=$?
if [[ $results -ne 0 ]]; then
echo "error getting s3cmd info: $info"
log 2 "error getting s3cmd info: $info"
return 1
fi
# shellcheck disable=SC2034

View File

@@ -21,7 +21,7 @@ get_bucket_policy() {
return 1
fi
local get_bucket_policy_result=0
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
if [[ $1 == 's3api' ]]; then
get_bucket_policy_aws "$2" || get_bucket_policy_result=$?
elif [[ $1 == 's3cmd' ]]; then
get_bucket_policy_s3cmd "$2" || get_bucket_policy_result=$?
@@ -97,41 +97,57 @@ get_bucket_policy_s3cmd() {
policy_brackets=false
# NOTE: versitygw sends policies back in multiple lines here, direct in single line
while IFS= read -r line; do
if [[ $policy_brackets == false ]]; then
policy_line=$(echo "$line" | grep 'Policy: ')
if [[ $policy_line != "" ]]; then
if [[ $policy_line != *'{'* ]]; then
break
fi
if [[ $policy_line == *'}'* ]]; then
log 5 "policy on single line"
bucket_policy=${policy_line//Policy:/}
break
else
policy_brackets=true
bucket_policy+="{"
fi
fi
else
bucket_policy+=$line
if [[ $line == "" ]]; then
break
fi
if check_and_load_policy_info; then
break
fi
done <<< "$info"
log 5 "bucket policy: $bucket_policy"
return 0
}
# return 0 for no policy, single-line policy, or loading complete, 1 for still searching or loading
check_and_load_policy_info() {
if [[ $policy_brackets == false ]]; then
if search_for_first_policy_line_or_full_policy; then
return 0
fi
else
bucket_policy+=$line
if [[ $line == "}" ]]; then
return 0
fi
fi
return 1
}
# return 0 for empty or single-line policy, 1 for other cases
search_for_first_policy_line_or_full_policy() {
policy_line=$(echo "$line" | grep 'Policy: ')
if [[ $policy_line != "" ]]; then
if [[ $policy_line != *'{'* ]]; then
return 0
fi
if [[ $policy_line == *'}'* ]]; then
log 5 "policy on single line"
bucket_policy=${policy_line//Policy:/}
return 0
else
policy_brackets=true
bucket_policy+="{"
fi
fi
return 1
}
get_bucket_policy_mc() {
record_command "get-bucket-policy" "client:mc"
if [[ $# -ne 1 ]]; then
echo "aws 'get bucket policy' command requires bucket"
log 2 "aws 'get bucket policy' command requires bucket"
return 1
fi
bucket_policy=$(send_command mc --insecure anonymous get-json "$MC_ALIAS/$1") || get_result=$?
if [[ $get_result -ne 0 ]]; then
echo "error getting policy: $bucket_policy"
log 2 "error getting policy: $bucket_policy"
return 1
fi
return 0

View File

@@ -21,7 +21,7 @@ get_bucket_tagging() {
assert [ $# -eq 2 ]
record_command "get-bucket-tagging" "client:$1"
local result
if [[ $1 == 'aws' ]]; then
if [[ $1 == 's3api' ]]; then
tags=$(send_command aws --no-verify-ssl s3api get-bucket-tagging --bucket "$2" 2>&1) || result=$?
elif [[ $1 == 'mc' ]]; then
tags=$(send_command mc --insecure tag list "$MC_ALIAS"/"$2" 2>&1) || result=$?
@@ -35,7 +35,7 @@ get_bucket_tagging() {
export tags=
return 0
fi
echo "error getting bucket tags: $tags"
log 2 "error getting bucket tags: $tags"
return 1
fi
export tags

View File

@@ -24,7 +24,7 @@ get_object() {
local exit_code=0
if [[ $1 == 's3' ]]; then
get_object_error=$(send_command aws --no-verify-ssl s3 mv "s3://$2/$3" "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
elif [[ $1 == 's3api' ]]; then
get_object_error=$(send_command aws --no-verify-ssl s3api get-object --bucket "$2" --key "$3" "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
get_object_error=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate get "s3://$2/$3" "$4" 2>&1) || exit_code=$?

View File

@@ -21,7 +21,7 @@ get_object_tagging() {
return 1
fi
local result
if [[ "$1" == 'aws' ]] || [[ $1 == 's3api' ]]; then
if [[ $1 == 's3api' ]]; then
tags=$(send_command aws --no-verify-ssl s3api get-object-tagging --bucket "$2" --key "$3" 2>&1) || result=$?
elif [[ "$1" == 'mc' ]]; then
tags=$(send_command mc --insecure tag list "$MC_ALIAS"/"$2"/"$3" 2>&1) || result=$?

View File

@@ -29,21 +29,22 @@ head_bucket() {
return 1
fi
local exit_code=0
if [[ $1 == "aws" ]] || [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
if [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
bucket_info=$(send_command aws --no-verify-ssl s3api head-bucket --bucket "$2" 2>&1) || exit_code=$?
elif [[ $1 == "s3cmd" ]]; then
bucket_info=$(send_command s3cmd --no-check-certificate info "s3://$2" 2>&1) || exit_code=$?
elif [[ $1 == 'mc' ]]; then
bucket_info=$(send_command mc --insecure stat "$MC_ALIAS"/"$2" 2>&1) || exit_code=$?
else
fail "invalid command type $1"
log 2 "invalid command type $1"
fi
if [ $exit_code -ne 0 ]; then
log 2 "error getting bucket info: $bucket_info"
if [[ "$bucket_info" == *"404"* ]] || [[ "$bucket_info" == *"does not exist"* ]]; then
return 1
fi
log 2 "error getting bucket info: $bucket_info"
return 2
fi
echo "$bucket_info"
return 0
}

View File

@@ -21,7 +21,7 @@ head_object() {
return 2
fi
local exit_code=0
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
if [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
metadata=$(send_command aws --no-verify-ssl s3api head-object --bucket "$2" --key "$3" 2>&1) || exit_code="$?"
elif [[ $1 == 's3cmd' ]]; then
metadata=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate info s3://"$2/$3" 2>&1) || exit_code="$?"

View File

@@ -18,14 +18,14 @@ list_buckets() {
log 6 "list_buckets"
record_command "list-buckets" "client:$1"
if [ $# -ne 1 ]; then
echo "list buckets command missing command type"
log 2 "list buckets command missing command type"
return 1
fi
local exit_code=0
if [[ $1 == 's3' ]]; then
buckets=$(send_command aws --no-verify-ssl s3 ls 2>&1 s3://) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
elif [[ $1 == 's3api' ]]; then
list_buckets_s3api "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
buckets=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate ls s3:// 2>&1) || exit_code=$?
@@ -34,11 +34,11 @@ list_buckets() {
elif [[ $1 == 'rest' ]]; then
list_buckets_rest || exit_code=$?
else
echo "list buckets command not implemented for '$1'"
log 2 "list buckets command not implemented for '$1'"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error listing buckets: $buckets"
log 2 "error listing buckets: $buckets"
return 1
fi
@@ -57,7 +57,7 @@ list_buckets() {
list_buckets_with_user() {
record_command "list-buckets" "client:$1"
if [ $# -ne 3 ]; then
echo "'list buckets as user' command missing command type, username, password"
log 2 "'list buckets as user' command missing command type, username, password"
return 1
fi
@@ -71,11 +71,11 @@ list_buckets_with_user() {
elif [[ $1 == 'mc' ]]; then
buckets=$(send_command mc --insecure ls "$MC_ALIAS" 2>&1) || exit_code=$?
else
echo "list buckets command not implemented for '$1'"
log 2 "list buckets command not implemented for '$1'"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error listing buckets: $buckets"
log 2 "error listing buckets: $buckets"
return 1
fi
@@ -97,7 +97,7 @@ list_buckets_s3api() {
return 1
fi
if ! output=$(AWS_ACCESS_KEY_ID="$1" AWS_SECRET_ACCESS_KEY="$2" send_command aws --no-verify-ssl s3api list-buckets 2>&1); then
echo "error listing buckets: $output"
log 2 "error listing buckets: $output"
return 1
fi
log 5 "bucket data: $output"

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
source ./tests/util_list_objects.sh
source ./tests/util/util_list_objects.sh
source ./tests/commands/command.sh
# Copyright 2024 Versity Software
@@ -29,7 +29,7 @@ list_objects() {
local output
local result=0
if [[ $1 == "aws" ]] || [[ $1 == 's3' ]]; then
if [[ $1 == 's3' ]]; then
output=$(send_command aws --no-verify-ssl s3 ls s3://"$2" 2>&1) || result=$?
elif [[ $1 == 's3api' ]]; then
list_objects_s3api "$2" || result=$?
@@ -91,7 +91,7 @@ list_objects_s3api() {
# export objects on success, return 1 for failure
list_objects_s3api_v1() {
if [ $# -lt 1 ] || [ $# -gt 2 ]; then
echo "list objects command requires bucket, (optional) delimiter"
log 2 "list objects command requires bucket, (optional) delimiter"
return 1
fi
if [ "$2" == "" ]; then
@@ -100,7 +100,7 @@ list_objects_s3api_v1() {
objects=$(send_command aws --no-verify-ssl s3api list-objects --bucket "$1" --delimiter "$2") || local result=$?
fi
if [[ $result -ne 0 ]]; then
echo "error listing objects: $objects"
log 2 "error listing objects: $objects"
return 1
fi
export objects

View File

@@ -19,13 +19,13 @@
# export objects on success, return 1 for failure
list_objects_v2() {
if [ $# -ne 1 ]; then
echo "list objects command missing bucket and/or path"
log 2 "list objects command missing bucket and/or path"
return 1
fi
record_command "list-objects-v2 client:s3api"
objects=$(send_command aws --no-verify-ssl s3api list-objects-v2 --bucket "$1") || local result=$?
if [[ $result -ne 0 ]]; then
echo "error listing objects: $objects"
log 2 "error listing objects: $objects"
return 1
fi
}

View File

@@ -14,7 +14,7 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/util_file.sh
source ./tests/util/util_file.sh
source ./tests/commands/command.sh
put_bucket_acl_s3api() {

View File

@@ -21,7 +21,7 @@ put_bucket_policy() {
return 1
fi
local put_policy_result=0
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
if [[ $1 == 's3api' ]]; then
policy=$(send_command aws --no-verify-ssl s3api put-bucket-policy --bucket "$2" --policy "file://$3" 2>&1) || put_policy_result=$?
elif [[ $1 == 's3cmd' ]]; then
policy=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate setpolicy "$3" "s3://$2" 2>&1) || put_policy_result=$?

View File

@@ -23,7 +23,7 @@ put_bucket_tagging() {
local error
local result=0
record_command "put-bucket-tagging" "client:$1"
if [[ $1 == 'aws' ]] || [[ $1 == 's3api' ]]; then
if [[ $1 == 's3api' ]]; then
error=$(send_command aws --no-verify-ssl s3api put-bucket-tagging --bucket "$2" --tagging "TagSet=[{Key=$3,Value=$4}]") || result=$?
elif [[ $1 == 'mc' ]]; then
error=$(send_command mc --insecure tag set "$MC_ALIAS"/"$2" "$3=$4" 2>&1) || result=$?

View File

@@ -27,7 +27,7 @@ put_object() {
local error
if [[ $1 == 's3' ]]; then
error=$(send_command aws --no-verify-ssl s3 mv "$2" s3://"$3/$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then
elif [[ $1 == 's3api' ]]; then
error=$(send_command aws --no-verify-ssl s3api put-object --body "$2" --bucket "$3" --key "$4" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
error=$(send_command s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate put "$2" s3://"$3/$4" 2>&1) || exit_code=$?

View File

@@ -22,7 +22,7 @@ put_object_tagging() {
local error
local result
record_command "put-object-tagging" "client:$1"
if [[ $1 == 'aws' ]]; then
if [[ $1 == 's3api' ]]; then
error=$(send_command aws --no-verify-ssl s3api put-object-tagging --bucket "$2" --key "$3" --tagging "TagSet=[{Key=$4,Value=$5}]" 2>&1) || result=$?
elif [[ $1 == 'mc' ]]; then
error=$(send_command mc --insecure tag set "$MC_ALIAS"/"$2"/"$3" "$4=$5" 2>&1) || result=$?

View File

@@ -17,14 +17,14 @@
upload_part_copy() {
record_command "upload-part-copy" "client:s3api"
if [ $# -ne 5 ]; then
echo "upload multipart part copy function must have bucket, key, upload ID, file name, part number"
log 2 "upload multipart part copy function must have bucket, key, upload ID, file name, part number"
return 1
fi
local etag_json
echo "$1 $2 $3 $4 $5"
log 5 "parameters: $1 $2 $3 $4 $5"
etag_json=$(send_command aws --no-verify-ssl s3api upload-part-copy --bucket "$1" --key "$2" --upload-id "$3" --part-number "$5" --copy-source "$1/$4-$(($5-1))") || local uploaded=$?
if [[ $uploaded -ne 0 ]]; then
echo "Error uploading part $5: $etag_json"
log 2 "Error uploading part $5: $etag_json"
return 1
fi
etag=$(echo "$etag_json" | jq '.CopyPartResult.ETag')

View File

@@ -63,13 +63,7 @@ check_universal_vars() {
source_config_file
fi
if [ -n "$COMMAND_LOG" ]; then
if [ -e "$COMMAND_LOG" ]; then
if ! error=$(rm "$COMMAND_LOG"); then
log 3 "error removing command log: $error"
return 1
fi
fi
echo "******** $(date +"%Y-%m-%d %H:%M:%S") $BATS_TEST_NAME COMMANDS ********" >> "$COMMAND_LOG"
init_command_log
fi
if [ "$GITHUB_ACTIONS" != "true" ] && [ -r "$SECRETS_FILE" ]; then
@@ -80,6 +74,10 @@ check_universal_vars() {
fi
if [[ -n "$LOG_LEVEL" ]]; then
if [[ $LOG_LEVEL -lt 2 ]]; then
log 1 "log level must be 2 or greater"
exit 1
fi
export LOG_LEVEL_INT=$LOG_LEVEL
fi
@@ -139,6 +137,22 @@ check_universal_vars() {
export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_REGION AWS_PROFILE AWS_ENDPOINT_URL
}
delete_command_log() {
if [ -e "$COMMAND_LOG" ]; then
if ! error=$(rm "$COMMAND_LOG"); then
log 2 "error removing command log: $error"
return 1
fi
fi
}
init_command_log() {
if ! delete_command_log; then
exit 1
fi
echo "******** $(date +"%Y-%m-%d %H:%M:%S") $BATS_TEST_NAME COMMANDS ********" >> "$COMMAND_LOG"
}
check_versity_vars() {
if [ -z "$LOCAL_FOLDER" ]; then
log 1 "LOCAL_FOLDER missing"

View File

@@ -86,6 +86,7 @@ func TestListBuckets(s *S3Conf) {
ListBuckets_invalid_max_buckets(s)
ListBuckets_truncated(s)
ListBuckets_success(s)
ListBuckets_empty_success(s)
}
func TestDeleteBucket(s *S3Conf) {
@@ -152,6 +153,7 @@ func TestHeadObject(s *S3Conf) {
HeadObject_directory_object_noslash(s)
HeadObject_non_existing_dir_object(s)
HeadObject_with_contenttype(s)
HeadObject_invalid_parent_dir(s)
HeadObject_success(s)
}
@@ -159,6 +161,7 @@ func TestGetObjectAttributes(s *S3Conf) {
GetObjectAttributes_non_existing_bucket(s)
GetObjectAttributes_non_existing_object(s)
GetObjectAttributes_invalid_attrs(s)
GetObjectAttributes_invalid_parent(s)
GetObjectAttributes_empty_attrs(s)
GetObjectAttributes_existing_object(s)
}
@@ -167,6 +170,7 @@ func TestGetObject(s *S3Conf) {
GetObject_non_existing_key(s)
GetObject_directory_object_noslash(s)
GetObject_invalid_ranges(s)
GetObject_invalid_parent(s)
GetObject_with_meta(s)
GetObject_success(s)
GetObject_directory_success(s)
@@ -198,6 +202,7 @@ func TestListObjectsV2(s *S3Conf) {
ListObjectsV2_truncated_common_prefixes(s)
ListObjectsV2_all_objs_max_keys(s)
ListObjectsV2_list_all_objs(s)
ListObjectsV2_invalid_parent_prefix(s)
}
// VD stands for Versioning Disabled
@@ -239,6 +244,7 @@ func TestPutObjectTagging(s *S3Conf) {
func TestGetObjectTagging(s *S3Conf) {
GetObjectTagging_non_existing_object(s)
GetObjectTagging_unset_tags(s)
GetObjectTagging_invalid_parent(s)
GetObjectTagging_success(s)
}
@@ -515,6 +521,7 @@ func TestPosix(s *S3Conf) {
PutObject_name_too_long(s)
HeadObject_name_too_long(s)
DeleteObject_name_too_long(s)
DeleteObject_directory_not_empty(s)
// posix specific versioning tests
if !s.versioningEnabled {
TestVersioningDisabled(s)
@@ -527,6 +534,7 @@ func TestIAM(s *S3Conf) {
IAM_userplus_CreateBucket(s)
IAM_admin_ChangeBucketOwner(s)
IAM_ChangeBucketOwner_back_to_root(s)
IAM_ListBuckets(s)
}
func TestAccessControl(s *S3Conf) {
@@ -567,6 +575,7 @@ func TestVersioning(s *S3Conf) {
Versioning_CopyObject_special_chars(s)
// HeadObject action
Versioning_HeadObject_invalid_versionId(s)
Versioning_HeadObject_invalid_parent(s)
Versioning_HeadObject_success(s)
Versioning_HeadObject_without_versionId(s)
Versioning_HeadObject_delete_marker(s)
@@ -690,6 +699,7 @@ func GetIntTests() IntTests {
"ListBuckets_invalid_max_buckets": ListBuckets_invalid_max_buckets,
"ListBuckets_truncated": ListBuckets_truncated,
"ListBuckets_success": ListBuckets_success,
"ListBuckets_empty_success": ListBuckets_empty_success,
"DeleteBucket_non_existing_bucket": DeleteBucket_non_existing_bucket,
"DeleteBucket_non_empty_bucket": DeleteBucket_non_empty_bucket,
"DeleteBucket_success_status_code": DeleteBucket_success_status_code,
@@ -725,15 +735,18 @@ func GetIntTests() IntTests {
"HeadObject_non_existing_dir_object": HeadObject_non_existing_dir_object,
"HeadObject_name_too_long": HeadObject_name_too_long,
"HeadObject_with_contenttype": HeadObject_with_contenttype,
"HeadObject_invalid_parent_dir": HeadObject_invalid_parent_dir,
"HeadObject_success": HeadObject_success,
"GetObjectAttributes_non_existing_bucket": GetObjectAttributes_non_existing_bucket,
"GetObjectAttributes_non_existing_object": GetObjectAttributes_non_existing_object,
"GetObjectAttributes_invalid_attrs": GetObjectAttributes_invalid_attrs,
"GetObjectAttributes_invalid_parent": GetObjectAttributes_invalid_parent,
"GetObjectAttributes_empty_attrs": GetObjectAttributes_empty_attrs,
"GetObjectAttributes_existing_object": GetObjectAttributes_existing_object,
"GetObject_non_existing_key": GetObject_non_existing_key,
"GetObject_directory_object_noslash": GetObject_directory_object_noslash,
"GetObject_invalid_ranges": GetObject_invalid_ranges,
"GetObject_invalid_parent": GetObject_invalid_parent,
"GetObject_with_meta": GetObject_with_meta,
"GetObject_success": GetObject_success,
"GetObject_directory_success": GetObject_directory_success,
@@ -759,9 +772,11 @@ func GetIntTests() IntTests {
"ListObjectsV2_truncated_common_prefixes": ListObjectsV2_truncated_common_prefixes,
"ListObjectsV2_all_objs_max_keys": ListObjectsV2_all_objs_max_keys,
"ListObjectsV2_list_all_objs": ListObjectsV2_list_all_objs,
"ListObjectsV2_invalid_parent_prefix": ListObjectsV2_invalid_parent_prefix,
"ListObjectVersions_VD_success": ListObjectVersions_VD_success,
"DeleteObject_non_existing_object": DeleteObject_non_existing_object,
"DeleteObject_directory_object_noslash": DeleteObject_directory_object_noslash,
"DeleteObject_directory_not_empty": DeleteObject_directory_not_empty,
"DeleteObject_name_too_long": DeleteObject_name_too_long,
"DeleteObject_non_existing_dir_object": DeleteObject_non_existing_dir_object,
"DeleteObject_success": DeleteObject_success,
@@ -782,6 +797,7 @@ func GetIntTests() IntTests {
"PutObjectTagging_success": PutObjectTagging_success,
"GetObjectTagging_non_existing_object": GetObjectTagging_non_existing_object,
"GetObjectTagging_unset_tags": GetObjectTagging_unset_tags,
"GetObjectTagging_invalid_parent": GetObjectTagging_invalid_parent,
"GetObjectTagging_success": GetObjectTagging_success,
"DeleteObjectTagging_non_existing_object": DeleteObjectTagging_non_existing_object,
"DeleteObjectTagging_success_status": DeleteObjectTagging_success_status,
@@ -941,6 +957,7 @@ func GetIntTests() IntTests {
"IAM_userplus_CreateBucket": IAM_userplus_CreateBucket,
"IAM_admin_ChangeBucketOwner": IAM_admin_ChangeBucketOwner,
"IAM_ChangeBucketOwner_back_to_root": IAM_ChangeBucketOwner_back_to_root,
"IAM_ListBuckets": IAM_ListBuckets,
"AccessControl_default_ACL_user_access_denied": AccessControl_default_ACL_user_access_denied,
"AccessControl_default_ACL_userplus_access_denied": AccessControl_default_ACL_userplus_access_denied,
"AccessControl_default_ACL_admin_successful_access": AccessControl_default_ACL_admin_successful_access,
@@ -969,6 +986,7 @@ func GetIntTests() IntTests {
"Versioning_CopyObject_from_an_object_version": Versioning_CopyObject_from_an_object_version,
"Versioning_CopyObject_special_chars": Versioning_CopyObject_special_chars,
"Versioning_HeadObject_invalid_versionId": Versioning_HeadObject_invalid_versionId,
"Versioning_HeadObject_invalid_parent": Versioning_HeadObject_invalid_parent,
"Versioning_HeadObject_success": Versioning_HeadObject_success,
"Versioning_HeadObject_without_versionId": Versioning_HeadObject_without_versionId,
"Versioning_HeadObject_delete_marker": Versioning_HeadObject_delete_marker,

View File

@@ -2160,6 +2160,7 @@ func ListBuckets_with_prefix(s *S3Conf) error {
return nil
})
}
func ListBuckets_invalid_max_buckets(s *S3Conf) error {
testName := "ListBuckets_invalid_max_buckets"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -2255,6 +2256,24 @@ func ListBuckets_truncated(s *S3Conf) error {
})
}
func ListBuckets_empty_success(s *S3Conf) error {
testName := "ListBuckets_empty_success"
return actionHandlerNoSetup(s, testName, func(s3client *s3.Client, bucket string) error {
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
out, err := s3client.ListBuckets(ctx, &s3.ListBucketsInput{})
cancel()
if err != nil {
return err
}
if len(out.Buckets) > 0 {
return fmt.Errorf("expected list buckets result to be %v, instead got %v", []types.Bucket{}, out.Buckets)
}
return nil
})
}
func ListBuckets_success(s *S3Conf) error {
testName := "ListBuckets_success"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -2967,9 +2986,15 @@ func PutObject_missing_object_lock_retention_config(s *S3Conf) error {
ObjectLockMode: types.ObjectLockModeCompliance,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLockInvalidHeaders)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLockInvalidHeaders)); err != nil {
// return err
// }
retainDate := time.Now().Add(time.Hour * 48)
@@ -2980,9 +3005,15 @@ func PutObject_missing_object_lock_retention_config(s *S3Conf) error {
ObjectLockRetainUntilDate: &retainDate,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLockInvalidHeaders)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLockInvalidHeaders)); err != nil {
// return err
// }
return nil
})
@@ -3340,6 +3371,34 @@ func HeadObject_with_contenttype(s *S3Conf) error {
})
}
func HeadObject_invalid_parent_dir(s *S3Conf) error {
testName := "HeadObject_invalid_parent_dir"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
obj, dataLen := "not-a-dir", int64(1)
_, err := putObjectWithData(dataLen, &s3.PutObjectInput{
Bucket: &bucket,
Key: &obj,
}, s3client)
if err != nil {
return err
}
obj = "not-a-dir/bad-obj"
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
_, err = s3client.HeadObject(ctx, &s3.HeadObjectInput{
Bucket: &bucket,
Key: &obj,
})
defer cancel()
if err := checkSdkApiErr(err, "NotFound"); err != nil {
return err
}
return nil
})
}
func HeadObject_success(s *S3Conf) error {
testName := "HeadObject_success"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -3456,6 +3515,34 @@ func GetObjectAttributes_invalid_attrs(s *S3Conf) error {
})
}
func GetObjectAttributes_invalid_parent(s *S3Conf) error {
testName := "GetObjectAttributes_invalid_parent"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
obj := "not-a-dir"
_, err := putObjects(s3client, []string{obj}, bucket)
if err != nil {
return err
}
obj = "not-a-dir/bad-obj"
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
_, err = s3client.GetObjectAttributes(ctx, &s3.GetObjectAttributesInput{
Bucket: &bucket,
Key: &obj,
ObjectAttributes: []types.ObjectAttributes{
types.ObjectAttributesEtag,
},
})
cancel()
var bae *types.NoSuchKey
if !errors.As(err, &bae) {
return err
}
return nil
})
}
func GetObjectAttributes_empty_attrs(s *S3Conf) error {
testName := "GetObjectAttributes_empty_attrs"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -3642,6 +3729,33 @@ func GetObject_invalid_ranges(s *S3Conf) error {
})
}
func GetObject_invalid_parent(s *S3Conf) error {
testName := "GetObject_invalid_parent"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
dataLength, obj := int64(1234567), "not-a-dir"
_, err := putObjectWithData(dataLength, &s3.PutObjectInput{
Bucket: &bucket,
Key: &obj,
}, s3client)
if err != nil {
return err
}
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
_, err = s3client.GetObject(ctx, &s3.GetObjectInput{
Bucket: &bucket,
Key: getPtr("not-a-dir/bad-obj"),
})
cancel()
var bae *types.NoSuchKey
if !errors.As(err, &bae) {
return err
}
return nil
})
}
func GetObject_with_meta(s *S3Conf) error {
testName := "GetObject_with_meta"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -4573,6 +4687,45 @@ func ListObjectsV2_list_all_objs(s *S3Conf) error {
})
}
func ListObjectsV2_invalid_parent_prefix(s *S3Conf) error {
testName := "ListObjectsV2_invalid_parent_prefix"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
_, err := putObjects(s3client, []string{"file"}, bucket)
if err != nil {
return err
}
delim, maxKeys := "/", int32(100)
prefix := "file/file/file"
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
out, err := s3client.ListObjectsV2(ctx, &s3.ListObjectsV2Input{
Bucket: &bucket,
Delimiter: &delim,
MaxKeys: &maxKeys,
Prefix: &prefix,
})
cancel()
if err != nil {
return err
}
if len(out.CommonPrefixes) > 0 {
return fmt.Errorf("expected the common prefixes to be %v, instead got %v", []string{""}, out.CommonPrefixes)
}
if *out.MaxKeys != maxKeys {
return fmt.Errorf("expected the max-keys to be %v, instead got %v", maxKeys, *out.MaxKeys)
}
if *out.Delimiter != delim {
return fmt.Errorf("expected the delimiter to be %v, instead got %v", delim, *out.Delimiter)
}
if len(out.Contents) > 0 {
return fmt.Errorf("expected the objects to be %v, instead got %v", []types.Object{}, out.Contents)
}
return nil
})
}
func ListObjectVersions_VD_success(s *S3Conf) error {
testName := "ListObjectVersions_VD_success"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -4666,6 +4819,37 @@ func DeleteObject_directory_object_noslash(s *S3Conf) error {
})
}
func DeleteObject_directory_not_empty(s *S3Conf) error {
testName := "DeleteObject_directory_not_empty"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
obj := "dir/my-obj"
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
_, err := s3client.PutObject(ctx, &s3.PutObjectInput{
Bucket: &bucket,
Key: &obj,
})
cancel()
if err != nil {
return err
}
obj = "dir/"
ctx, cancel = context.WithTimeout(context.Background(), shortTimeout)
_, err = s3client.DeleteObject(ctx, &s3.DeleteObjectInput{
Bucket: &bucket,
Key: &obj,
})
cancel()
// object servers will return no error, but the posix backend returns
// a non-standard directory not empty. This test is a posix only test
// to validate the specific error response.
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrDirectoryNotEmpty)); err != nil {
return err
}
return nil
})
}
func DeleteObject_non_existing_dir_object(s *S3Conf) error {
testName := "DeleteObject_non_existing_dir_object"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -5349,6 +5533,29 @@ func GetObjectTagging_unset_tags(s *S3Conf) error {
})
}
func GetObjectTagging_invalid_parent(s *S3Conf) error {
testName := "GetObjectTagging_invalid_parent"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
obj := "not-a-dir"
_, err := putObjects(s3client, []string{obj}, bucket)
if err != nil {
return err
}
obj = "not-a-dir/bad-obj"
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
_, err = s3client.GetObjectTagging(ctx, &s3.GetObjectTaggingInput{
Bucket: &bucket,
Key: &obj,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrNoSuchKey)); err != nil {
return err
}
return nil
})
}
func GetObjectTagging_success(s *S3Conf) error {
testName := "PutObjectTagging_success"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -10133,9 +10340,15 @@ func WORMProtection_object_lock_legal_hold_locked(s *S3Conf) error {
}
_, err = putObjects(s3client, []string{object}, bucket)
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
// return err
// }
if err := changeBucketObjectLockStatus(s3client, bucket, false); err != nil {
return err
@@ -10865,6 +11078,18 @@ func IAM_ChangeBucketOwner_back_to_root(s *S3Conf) error {
})
}
func IAM_ListBuckets(s *S3Conf) error {
testName := "IAM_ListBuckets"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
err := listBuckets(s)
if err != nil {
return err
}
return nil
})
}
// Posix related tests
func PutObject_overwrite_dir_obj(s *S3Conf) error {
testName := "PutObject_overwrite_dir_obj"
@@ -11516,6 +11741,34 @@ func Versioning_HeadObject_invalid_versionId(s *S3Conf) error {
})
}
func Versioning_HeadObject_invalid_parent(s *S3Conf) error {
testName := "Versioning_HeadObject_invalid_parent"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
dLen := int64(2000)
obj := "not-a-dir"
r, err := putObjectWithData(dLen, &s3.PutObjectInput{
Bucket: &bucket,
Key: &obj,
}, s3client)
if err != nil {
return err
}
obj = "not-a-dir/bad-obj"
ctx, cancel := context.WithTimeout(context.Background(), shortTimeout)
_, err = s3client.HeadObject(ctx, &s3.HeadObjectInput{
Bucket: &bucket,
Key: &obj,
VersionId: r.res.VersionId,
})
cancel()
if err := checkSdkApiErr(err, "NotFound"); err != nil {
return err
}
return nil
})
}
func Versioning_HeadObject_success(s *S3Conf) error {
testName := "Versioning_HeadObject_success"
return actionHandler(s, testName, func(s3client *s3.Client, bucket string) error {
@@ -13249,9 +13502,15 @@ func Versioning_WORM_obj_version_locked_with_legal_hold(s *S3Conf) error {
VersionId: version.VersionId,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
// return err
// }
if err := changeBucketObjectLockStatus(s3client, bucket, false); err != nil {
return err
@@ -13294,9 +13553,15 @@ func Versioning_WORM_obj_version_locked_with_governance_retention(s *S3Conf) err
VersionId: version.VersionId,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
// return err
// }
if err := changeBucketObjectLockStatus(s3client, bucket, false); err != nil {
return err
@@ -13339,9 +13604,15 @@ func Versioning_WORM_obj_version_locked_with_compliance_retention(s *S3Conf) err
VersionId: version.VersionId,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
// return err
// }
if err := changeBucketObjectLockStatus(s3client, bucket, false); err != nil {
return err

View File

@@ -213,6 +213,21 @@ func actionHandler(s *S3Conf, testName string, handler func(s3client *s3.Client,
return handlerErr
}
func actionHandlerNoSetup(s *S3Conf, testName string, handler func(s3client *s3.Client, bucket string) error, _ ...setupOpt) error {
runF(testName)
client := s3.NewFromConfig(s.Config())
handlerErr := handler(client, "")
if handlerErr != nil {
failF("%v: %v", testName, handlerErr)
}
if handlerErr == nil {
passF(testName)
}
return handlerErr
}
type authConfig struct {
testName string
path string
@@ -788,6 +803,18 @@ func changeBucketsOwner(s *S3Conf, buckets []string, owner string) error {
return nil
}
func listBuckets(s *S3Conf) error {
out, err := execCommand("admin", "-a", s.awsID, "-s", s.awsSecret, "-er", s.endpoint, "list-buckets")
if err != nil {
return err
}
if strings.Contains(string(out), adminErrorPrefix) {
return fmt.Errorf("failed to list buckets, %s", out)
}
return nil
}
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
func genRandString(length int) string {
@@ -895,9 +922,15 @@ func checkWORMProtection(client *s3.Client, bucket, object string) error {
Key: &object,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
// return err
// }
ctx, cancel = context.WithTimeout(context.Background(), shortTimeout)
_, err = client.DeleteObject(ctx, &s3.DeleteObjectInput{
@@ -905,9 +938,15 @@ func checkWORMProtection(client *s3.Client, bucket, object string) error {
Key: &object,
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
// return err
// }
ctx, cancel = context.WithTimeout(context.Background(), shortTimeout)
_, err = client.DeleteObjects(ctx, &s3.DeleteObjectsInput{
@@ -921,9 +960,15 @@ func checkWORMProtection(client *s3.Client, bucket, object string) error {
},
})
cancel()
if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
if err := checkSdkApiErr(err, "InvalidRequest"); err != nil {
return err
}
// client sdk regression issue prevents getting full error message,
// change back to below once this is fixed:
// https://github.com/aws/aws-sdk-go-v2/issues/2921
// if err := checkApiErr(err, s3err.GetAPIError(s3err.ErrObjectLocked)); err != nil {
// return err
// }
return nil
}

View File

@@ -129,8 +129,10 @@ log_message() {
return 1
fi
now="$(date "+%Y-%m-%d %H:%M:%S")"
echo "$now $1 $2"
if [[ ( "$1" == "CRIT" ) || ( "$1" == "ERROR" ) ]]; then
echo "$now $1 $2" >&2
fi
if [[ -n "$TEST_LOG_FILE" ]]; then
echo "$now $1 $2" >> "$TEST_LOG_FILE"
echo "$now $1 $2" >> "$TEST_LOG_FILE.tmp"
fi
}

View File

@@ -15,7 +15,7 @@
# under the License.
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util/util.sh
delete_bucket_if_exists() {
if [[ $# -ne 2 ]]; then

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/rest_scripts/rest.sh
# Fields
# shellcheck disable=SC2153
bucket_name="$BUCKET_NAME"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
canonical_request="GET
/$bucket_name
acl=
host:$host
x-amz-content-sha256:UNSIGNED-PAYLOAD
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
UNSIGNED-PAYLOAD"
create_canonical_hash_sts_and_signature
curl_command+=(curl -ks -w "\"%{http_code}\"" "$AWS_ENDPOINT_URL/$bucket_name?acl="
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
-H "\"x-amz-content-sha256: UNSIGNED-PAYLOAD\""
-H "\"x-amz-date: $current_date_time\""
-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/rest_scripts/rest.sh
# Fields
# shellcheck disable=SC2153
bucket_name="$BUCKET_NAME"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
canonical_request="GET
/$bucket_name
ownershipControls=
host:$host
x-amz-content-sha256:UNSIGNED-PAYLOAD
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
UNSIGNED-PAYLOAD"
create_canonical_hash_sts_and_signature
curl_command+=(curl -ks -w "\"%{http_code}\"" "$AWS_ENDPOINT_URL/$bucket_name?ownershipControls="
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
-H "\"x-amz-content-sha256: UNSIGNED-PAYLOAD\""
-H "\"x-amz-date: $current_date_time\""
-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/rest_scripts/rest.sh
# Fields
# shellcheck disable=SC2153
bucket_name="$BUCKET_NAME"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
canonical_request="GET
/$bucket_name
policy=
host:$host
x-amz-content-sha256:UNSIGNED-PAYLOAD
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
UNSIGNED-PAYLOAD"
create_canonical_hash_sts_and_signature
curl_command+=(curl -ks -w "\"%{http_code}\"" "$AWS_ENDPOINT_URL/$bucket_name?policy="
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
-H "\"x-amz-content-sha256: UNSIGNED-PAYLOAD\""
-H "\"x-amz-date: $current_date_time\""
-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/rest_scripts/rest.sh
# Fields
# shellcheck disable=SC2153
bucket_name="$BUCKET_NAME"
# shellcheck disable=SC2153
ownership="$OWNERSHIP"
payload="<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<OwnershipControls xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">
<Rule>
<ObjectOwnership>$ownership</ObjectOwnership>
</Rule>
</OwnershipControls>"
content_md5=$(echo -n "$payload" | openssl dgst -binary -md5 | openssl base64)
payload_hash="$(echo -n "$payload" | sha256sum | awk '{print $1}')"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
canonical_request="PUT
/$bucket_name
ownershipControls=
content-md5:$content_md5
host:$host
x-amz-content-sha256:$payload_hash
x-amz-date:$current_date_time
content-md5;host;x-amz-content-sha256;x-amz-date
$payload_hash"
create_canonical_hash_sts_and_signature
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "$AWS_ENDPOINT_URL/$bucket_name?ownershipControls="
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=content-md5;host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
-H "\"Content-MD5: $content_md5\""
-H "\"x-amz-content-sha256: $payload_hash\""
-H "\"x-amz-date: $current_date_time\""
-d "\"${payload//\"/\\\"}\""
-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1

View File

@@ -0,0 +1,51 @@
#!/usr/bin/env bash
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/rest_scripts/rest.sh
# Fields
# shellcheck disable=SC2153
bucket_name="$BUCKET_NAME"
# shellcheck disable=SC2153
policy_file="$POLICY_FILE"
payload="$(cat "$policy_file")"
payload_hash="$(echo -n "$payload" | sha256sum | awk '{print $1}')"
current_date_time=$(date -u +"%Y%m%dT%H%M%SZ")
canonical_request="PUT
/$bucket_name
policy=
host:$host
x-amz-content-sha256:$payload_hash
x-amz-date:$current_date_time
host;x-amz-content-sha256;x-amz-date
$payload_hash"
create_canonical_hash_sts_and_signature
curl_command+=(curl -ks -w "\"%{http_code}\"" -X PUT "$AWS_ENDPOINT_URL/$bucket_name?policy="
-H "\"Authorization: AWS4-HMAC-SHA256 Credential=$aws_access_key_id/$year_month_day/$aws_region/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=$signature\""
-H "\"x-amz-content-sha256: $payload_hash\""
-H "\"x-amz-date: $current_date_time\""
-d "\"${payload//\"/\\\"}\""
-o "$OUTPUT_FILE")
# shellcheck disable=SC2154
eval "${curl_command[*]}" 2>&1

View File

@@ -20,13 +20,20 @@ load ./bats-assert/load
source ./tests/env.sh
source ./tests/report.sh
source ./tests/setup_mc.sh
source ./tests/util.sh
source ./tests/util/util.sh
source ./tests/versity.sh
# bats setup function
setup() {
base_setup
if [ -n "$TEST_LOG_FILE" ]; then
if ! error=$(touch "$TEST_LOG_FILE.tmp" 2>&1); then
log 2 "error creating log file: $error"
exit 1
fi
fi
log 4 "Running test $BATS_TEST_NAME"
if [[ $LOG_LEVEL -ge 5 ]] || [[ -n "$TIME_LOG" ]]; then
start_time=$(date +%s)
@@ -48,19 +55,18 @@ setup() {
export AWS_PROFILE
}
# fail a test
# param: error message
#fail() {
# log 1 "$1"
# exit 1
#}
delete_temp_log_if_exists() {
if [ -e "$TEST_LOG_FILE.tmp" ]; then
if ! error=$(rm "$TEST_LOG_FILE.tmp" 2>&1); then
log 2 "error deleting temp log: $error"
return 1
fi
fi
return 0
}
# bats teardown function
teardown() {
if [[ ( "$BATS_TEST_COMPLETED" -ne 1 ) && ( -e "$COMMAND_LOG" ) ]]; then
cat "$COMMAND_LOG"
echo "**********************************************************************************"
fi
# shellcheck disable=SC2154
if ! bucket_cleanup_if_bucket_exists "s3api" "$BUCKET_ONE_NAME"; then
log 3 "error deleting bucket $BUCKET_ONE_NAME or contents"
@@ -68,12 +74,40 @@ teardown() {
if ! bucket_cleanup_if_bucket_exists "s3api" "$BUCKET_TWO_NAME"; then
log 3 "error deleting bucket $BUCKET_TWO_NAME or contents"
fi
if user_exists "$USERNAME_ONE" && ! delete_user "$USERNAME_ONE"; then
log 3 "error deleting user $USERNAME_ONE"
fi
if user_exists "$USERNAME_TWO" && ! delete_user "$USERNAME_TWO"; then
log 3 "error deleting user $USERNAME_TWO"
fi
if [ "$REMOVE_TEST_FILE_FOLDER" == "true" ]; then
log 6 "removing test file folder"
if ! error=$(rm -rf "${TEST_FILE_FOLDER:?}" 2>&1); then
log 3 "unable to remove test file folder: $error"
fi
fi
if [[ "$BATS_TEST_COMPLETED" -ne 1 ]]; then
if [[ -e "$COMMAND_LOG" ]]; then
cat "$COMMAND_LOG"
echo "**********************************************************************************"
fi
if [[ -e "$TEST_LOG_FILE.tmp" ]]; then
echo "********************************** LOG *******************************************"
cat "$TEST_LOG_FILE.tmp"
echo "**********************************************************************************"
fi
fi
if ! delete_command_log; then
log 3 "error deleting command log"
fi
if [ -e "$TEST_LOG_FILE.tmp" ]; then
if ! error=$(cat "$TEST_LOG_FILE.tmp" >> "$TEST_LOG_FILE" 2>&1); then
log 2 "error appending temp log to main log: $error"
fi
if ! delete_temp_log_if_exists; then
log 2 "error deleting temp log"
fi
fi
stop_versity
if [[ $LOG_LEVEL -ge 5 ]] || [[ -n "$TIME_LOG" ]]; then
end_time=$(date +%s)

View File

@@ -18,7 +18,7 @@ check_for_alias() {
local alias_result
aliases=$(mc alias list)
if [[ $alias_result -ne 0 ]]; then
echo "error checking for aliases: $aliases"
log 2 "error checking for aliases: $aliases"
return 2
fi
while IFS= read -r line; do
@@ -32,7 +32,7 @@ check_for_alias() {
check_add_mc_alias() {
check_for_alias || alias_result=$?
if [[ $alias_result -eq 2 ]]; then
echo "error checking for aliases"
log 2 "error checking for aliases"
return 1
fi
if [[ $alias_result -eq 0 ]]; then
@@ -41,7 +41,7 @@ check_add_mc_alias() {
local set_result
error=$(mc alias set --insecure "$MC_ALIAS" "$AWS_ENDPOINT_URL" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY") || set_result=$?
if [[ $set_result -ne 0 ]]; then
echo "error setting alias: $error"
log 2 "error setting alias: $error"
return 1
fi
return 0

View File

@@ -15,7 +15,7 @@
# under the License.
source ./tests/env.sh
source ./tests/util.sh
source ./tests/util/util.sh
source ./tests/commands/create_bucket.sh
create_bucket_if_not_exists() {

View File

@@ -15,7 +15,7 @@
# under the License.
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util/util.sh
if ! base_setup; then
log 2 "error starting versity to set up static buckets"

View File

@@ -15,13 +15,13 @@
# under the License.
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util_acl.sh
source ./tests/util_bucket_location.sh
source ./tests/util_file.sh
source ./tests/util_list_buckets.sh
source ./tests/util_policy.sh
source ./tests/util_presigned_url.sh
source ./tests/util/util.sh
source ./tests/util/util_acl.sh
source ./tests/util/util_bucket_location.sh
source ./tests/util/util_file.sh
source ./tests/util/util_list_buckets.sh
source ./tests/util/util_policy.sh
source ./tests/util/util_presigned_url.sh
source ./tests/commands/copy_object.sh
source ./tests/commands/delete_bucket_tagging.sh
source ./tests/commands/delete_object_tagging.sh
@@ -218,7 +218,7 @@ test_common_list_buckets() {
test_common_list_objects() {
if [[ $# -ne 1 ]]; then
echo "common test function for listing objects requires command type"
log 2 "common test function for listing objects requires command type"
return 1
fi
@@ -294,7 +294,7 @@ test_common_set_get_object_tags() {
test_common_presigned_url_utf8_chars() {
if [[ $# -ne 1 ]]; then
echo "presigned url command missing command type"
log 2 "presigned url command missing command type"
return 1
fi
@@ -382,12 +382,7 @@ test_common_get_put_delete_bucket_policy() {
assert_success
effect="Allow"
#principal="*"
if [[ $DIRECT == "true" ]]; then
principal="{\"AWS\": \"arn:aws:iam::$DIRECT_AWS_USER_ID:user/s3user\"}"
else
principal="\"*\""
fi
principal="*"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/*"

View File

@@ -15,13 +15,11 @@
# under the License.
test_put_bucket_acl_s3cmd() {
if [[ $DIRECT != "true" ]]; then
skip "https://github.com/versity/versitygw/issues/695"
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/963"
fi
run setup_bucket "s3cmd" "$BUCKET_ONE_NAME"
assert_success
run put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred"
run setup_bucket "s3cmd" "$BUCKET_ONE_NAME"
assert_success
username=$USERNAME_ONE
@@ -46,9 +44,6 @@ test_put_bucket_acl_s3cmd() {
}
test_common_put_bucket_acl() {
if [[ $RECREATE_BUCKETS == "false" ]]; then
skip "https://github.com/versity/versitygw/issues/716"
fi
assert [ $# -eq 1 ]
run setup_bucket "$1" "$BUCKET_ONE_NAME"

View File

@@ -16,9 +16,9 @@
source ./tests/test_common.sh
source ./tests/setup.sh
source ./tests/util_create_bucket.sh
source ./tests/util_head_bucket.sh
source ./tests/util_tags.sh
source ./tests/util/util_create_bucket.sh
source ./tests/util/util_head_bucket.sh
source ./tests/util/util_tags.sh
source ./tests/commands/delete_bucket_policy.sh
source ./tests/commands/get_bucket_policy.sh
source ./tests/commands/put_bucket_policy.sh

View File

@@ -28,17 +28,22 @@ source ./tests/commands/put_object_retention.sh
source ./tests/commands/put_object_tagging.sh
source ./tests/logger.sh
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util_attributes.sh
source ./tests/util_legal_hold.sh
source ./tests/util_list_buckets.sh
source ./tests/util_list_objects.sh
source ./tests/util_list_parts.sh
source ./tests/util_lock_config.sh
source ./tests/util_rest.sh
source ./tests/util_tags.sh
source ./tests/util_time.sh
source ./tests/util_versioning.sh
source ./tests/util/util.sh
source ./tests/util/util_acl.sh
source ./tests/util/util_attributes.sh
source ./tests/util/util_legal_hold.sh
source ./tests/util/util_list_buckets.sh
source ./tests/util/util_list_objects.sh
source ./tests/util/util_list_parts.sh
source ./tests/util/util_lock_config.sh
source ./tests/util/util_ownership.sh
source ./tests/util/util_policy.sh
source ./tests/util/util_rest.sh
source ./tests/util/util_tags.sh
source ./tests/util/util_time.sh
source ./tests/util/util_versioning.sh
export RUN_USERS=true
@test "test_rest_list_objects" {
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
@@ -185,7 +190,6 @@ source ./tests/util_versioning.sh
}
@test "test_rest_versioning" {
skip "https://github.com/versity/versitygw/issues/864"
test_file="test_file"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
@@ -218,7 +222,6 @@ source ./tests/util_versioning.sh
}
@test "versioning - add version, then delete and check for marker" {
skip "https://github.com/versity/versitygw/issues/864"
test_file="test_file"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
@@ -394,4 +397,59 @@ source ./tests/util_versioning.sh
run add_verify_bucket_tags_rest "$BUCKET_ONE_NAME" "$test_key" "$test_value"
assert_success
}
@test "REST - get, put bucket ownership controls" {
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run get_and_check_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerEnforced"
assert_success
run put_bucket_ownership_controls_rest "$BUCKET_ONE_NAME" "BucketOwnerPreferred"
assert_success
run get_and_check_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred"
assert_success
}
@test "REST - get policy w/o policy" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/959"
fi
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run get_and_check_no_policy_error "$BUCKET_ONE_NAME"
assert_success
}
@test "REST - put policy" {
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
assert_success
log 5 "username: ${lines[0]}"
log 5 "password: ${lines[1]}"
sleep 5
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/policy_file.txt" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
assert_success
run put_and_check_policy_rest "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/policy_file.txt" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
assert_success
}
@test "REST - get ACL" {
if [ "$DIRECT" != "true" ]; then
skip "https://github.com/versity/versitygw/issues/971"
fi
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run get_and_check_acl_rest "$BUCKET_ONE_NAME"
assert_success
}

View File

@@ -15,7 +15,7 @@
# under the License.
source ./tests/test_common.sh
source ./tests/util_file.sh
source ./tests/util/util_file.sh
# complete-multipart-upload
@test "test_complete_multipart_upload" {

View File

@@ -15,7 +15,7 @@
# under the License.
source ./tests/test_common.sh
source ./tests/util_file.sh
source ./tests/util/util_file.sh
@test "test_list_objects_file_count" {
test_common_list_objects_file_count "s3"

View File

@@ -15,14 +15,14 @@
# under the License.
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util_aws.sh
source ./tests/util_create_bucket.sh
source ./tests/util_file.sh
source ./tests/util_lock_config.sh
source ./tests/util_tags.sh
source ./tests/util_users.sh
source ./tests/test_aws_root_inner.sh
source ./tests/util/util.sh
source ./tests/util/util_create_bucket.sh
source ./tests/util/util_file.sh
source ./tests/util/util_head_bucket.sh
source ./tests/util/util_lock_config.sh
source ./tests/util/util_tags.sh
source ./tests/util/util_users.sh
source ./tests/test_s3api_root_inner.sh
source ./tests/test_common.sh
source ./tests/test_common_acl.sh
source ./tests/commands/copy_object.sh
@@ -50,46 +50,67 @@ source ./tests/commands/select_object_content.sh
export RUN_USERS=true
# create-bucket
@test "test_create_delete_bucket_aws" {
test_common_create_delete_bucket "aws"
}
@test "test_create_bucket_invalid_name" {
test_create_bucket_invalid_name_aws_root
if [[ $RECREATE_BUCKETS != "true" ]]; then
return
fi
run create_and_check_bucket_invalid_name "s3api"
assert_success
}
# delete-bucket - test_create_delete_bucket_aws
# create-bucket
@test "test_create_delete_bucket_s3api" {
test_common_create_delete_bucket "s3api"
}
# delete-bucket - test_create_delete_bucket_s3api
# delete-bucket-policy
@test "test_get_put_delete_bucket_policy" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_common_get_put_delete_bucket_policy "aws"
test_common_get_put_delete_bucket_policy "s3api"
}
# delete-bucket-tagging
@test "test-set-get-delete-bucket-tags" {
test_common_set_get_delete_bucket_tags "aws"
}
# get-bucket-acl
@test "test_get_bucket_acl" {
test_get_bucket_acl_aws_root
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run get_bucket_acl_and_check_owner "s3api" "$BUCKET_ONE_NAME"
assert_success
}
# get-bucket-location
@test "test_get_bucket_location" {
test_common_get_bucket_location "aws"
test_common_get_bucket_location "s3api"
}
# get-bucket-policy - test_get_put_delete_bucket_policy
# get-bucket-tagging - test_set_get_delete_bucket_tags
@test "test_head_bucket" {
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run check_for_empty_region "$BUCKET_ONE_NAME"
assert_success
}
@test "test_head_bucket_doesnt_exist" {
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip "skip test for static buckets"
fi
run bucket_info_without_bucket
assert_success
}
@test "test_head_bucket_invalid_name" {
if head_bucket "aws" ""; then
if head_bucket "s3api" ""; then
fail "able to get bucket info for invalid name"
fi
}
@@ -103,23 +124,7 @@ export RUN_USERS=true
test_common_put_bucket_acl "s3api"
}
@test "test_head_bucket" {
run setup_bucket "aws" "$BUCKET_ONE_NAME"
assert_success
head_bucket "aws" "$BUCKET_ONE_NAME" || fail "error getting bucket info"
log 5 "INFO: $bucket_info"
region=$(echo "$bucket_info" | grep -v "InsecureRequestWarning" | jq -r ".BucketRegion" 2>&1) || fail "error getting bucket region: $region"
[[ $region != "" ]] || fail "empty bucket region"
bucket_cleanup "aws" "$BUCKET_ONE_NAME"
}
@test "test_head_bucket_doesnt_exist" {
run setup_bucket "aws" "$BUCKET_ONE_NAME"
assert_success
head_bucket "aws" "$BUCKET_ONE_NAME"a || local info_result=$?
[[ $info_result -eq 1 ]] || fail "bucket info for non-existent bucket returned"
[[ $bucket_info == *"404"* ]] || fail "404 not returned for non-existent bucket info"
bucket_cleanup "aws" "$BUCKET_ONE_NAME"
# delete-bucket-tagging
@test "test-set-get-delete-bucket-tags" {
test_common_set_get_delete_bucket_tags "s3api"
}

View File

@@ -15,51 +15,112 @@
# under the License.
source ./tests/setup.sh
source ./tests/test_aws_root_inner.sh
source ./tests/util_file.sh
source ./tests/util_multipart.sh
source ./tests/util_tags.sh
source ./tests/test_s3api_root_inner.sh
source ./tests/util/util_file.sh
source ./tests/util/util_multipart.sh
source ./tests/util/util_multipart_abort.sh
source ./tests/util/util_tags.sh
source ./tests/commands/get_object.sh
source ./tests/commands/put_object.sh
source ./tests/commands/list_multipart_uploads.sh
# abort-multipart-upload
@test "test_abort_multipart_upload" {
test_abort_multipart_upload_aws_root
local bucket_file="bucket-file"
run create_test_file "$bucket_file"
assert_success
# shellcheck disable=SC2154
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4
assert_success
run object_exists "s3api" "$BUCKET_ONE_NAME" "$bucket_file"
assert_failure 1
}
# complete-multipart-upload
@test "test_complete_multipart_upload" {
test_complete_multipart_upload_aws_root
local bucket_file="bucket-file"
run create_test_files "$bucket_file"
assert_success
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4
assert_success
run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy"
assert_success
}
# create-multipart-upload
@test "test_create_multipart_upload_properties" {
test_create_multipart_upload_properties_aws_root
}
local bucket_file="bucket-file"
# test multi-part upload list parts command
@test "test-multipart-upload-list-parts" {
test_multipart_upload_list_parts_aws_root
}
local expected_content_type="application/zip"
local expected_meta_key="testKey"
local expected_meta_val="testValue"
local expected_hold_status="ON"
local expected_retention_mode="GOVERNANCE"
local expected_tag_key="TestTag"
local expected_tag_val="TestTagVal"
# test listing of active uploads
@test "test-multipart-upload-list-uploads" {
local bucket_file_one="bucket-file-one"
local bucket_file_two="bucket-file-two"
os_name="$(uname)"
if [[ "$os_name" == "Darwin" ]]; then
now=$(date -u +"%Y-%m-%dT%H:%M:%S")
later=$(date -j -v +15S -f "%Y-%m-%dT%H:%M:%S" "$now" +"%Y-%m-%dT%H:%M:%S")
else
now=$(date +"%Y-%m-%dT%H:%M:%S")
later=$(date -d "$now 15 seconds" +"%Y-%m-%dT%H:%M:%S")
fi
if [[ $RECREATE_BUCKETS == false ]]; then
run abort_all_multipart_uploads "$BUCKET_ONE_NAME"
run create_test_files "$bucket_file"
assert_success
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run bucket_cleanup_if_bucket_exists "s3api" "$BUCKET_ONE_NAME"
assert_success
# in static bucket config, bucket will still exist
if ! bucket_exists "s3api" "$BUCKET_ONE_NAME"; then
run create_bucket_object_lock_enabled "$BUCKET_ONE_NAME"
assert_success
fi
run create_test_files "$bucket_file_one" "$bucket_file_two"
run multipart_upload_with_params "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4 \
"$expected_content_type" \
"{\"$expected_meta_key\": \"$expected_meta_val\"}" \
"$expected_hold_status" \
"$expected_retention_mode" \
"$later" \
"$expected_tag_key=$expected_tag_val"
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run get_and_verify_metadata "$bucket_file" "$expected_content_type" "$expected_meta_key" "$expected_meta_val" \
"$expected_hold_status" "$expected_retention_mode" "$later"
assert_success
run create_list_check_multipart_uploads "$BUCKET_ONE_NAME" "$bucket_file_one" "$bucket_file_two"
run check_verify_object_tags "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$expected_tag_key" "$expected_tag_val"
assert_success
run put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "OFF"
assert_success
run get_and_check_legal_hold "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "OFF"
assert_success
run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy"
assert_success
}
@@ -72,7 +133,7 @@ source ./tests/commands/list_multipart_uploads.sh
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4
@@ -90,7 +151,7 @@ source ./tests/commands/list_multipart_uploads.sh
run create_large_file "$bucket_file"
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run multipart_upload_range_too_large "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file"
@@ -102,9 +163,49 @@ source ./tests/commands/list_multipart_uploads.sh
run create_large_file "$bucket_file"
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run run_and_verify_multipart_upload_with_valid_range "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file"
assert_success
}
# test multi-part upload list parts command
@test "test-multipart-upload-list-parts" {
local bucket_file="bucket-file"
run create_test_file "$bucket_file" 0
assert_success
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run start_multipart_upload_list_check_parts "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file"
assert_success
run run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file" 4
assert_success
}
# test listing of active uploads
@test "test-multipart-upload-list-uploads" {
local bucket_file_one="bucket-file-one"
local bucket_file_two="bucket-file-two"
if [[ $RECREATE_BUCKETS == false ]]; then
run abort_all_multipart_uploads "$BUCKET_ONE_NAME"
assert_success
fi
run create_test_files "$bucket_file_one" "$bucket_file_two"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run create_list_check_multipart_uploads "$BUCKET_ONE_NAME" "$bucket_file_one" "$bucket_file_two"
assert_success
}

View File

@@ -15,14 +15,13 @@
# under the License.
source ./tests/setup.sh
source ./tests/util.sh
source ./tests/util_aws.sh
source ./tests/util_create_bucket.sh
source ./tests/util_file.sh
source ./tests/util_lock_config.sh
source ./tests/util_tags.sh
source ./tests/util_users.sh
source ./tests/test_aws_root_inner.sh
source ./tests/util/util.sh
source ./tests/util/util_create_bucket.sh
source ./tests/util/util_file.sh
source ./tests/util/util_lock_config.sh
source ./tests/util/util_tags.sh
source ./tests/util/util_users.sh
source ./tests/test_s3api_root_inner.sh
source ./tests/test_common.sh
source ./tests/test_common_acl.sh
source ./tests/commands/copy_object.sh
@@ -56,14 +55,15 @@ export RUN_USERS=true
}
@test "test_copy_object_empty" {
copy_object_empty || fail "copy objects with no parameters test failure"
run copy_object_empty
assert_success
}
# delete-object - tested with bucket cleanup before or after tests
# delete-object-tagging
@test "test_delete_object_tagging" {
test_common_delete_object_tagging "aws"
test_common_delete_object_tagging "s3api"
}
# delete-objects
@@ -71,72 +71,34 @@ export RUN_USERS=true
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip "https://github.com/versity/versitygw/issues/888"
fi
test_delete_objects_aws_root
test_delete_objects_s3api_root
}
# get-object
@test "test_get_object_full_range" {
test_get_object_full_range_aws_root
test_get_object_full_range_s3api_root
}
@test "test_get_object_invalid_range" {
test_get_object_invalid_range_aws_root
test_get_object_invalid_range_s3api_root
}
# get-object-attributes
@test "test_get_object_attributes" {
test_get_object_attributes_aws_root
test_get_object_attributes_s3api_root
}
@test "test_put_object" {
test_put_object_aws_root
@test "test_get_put_object_legal_hold" {
test_get_put_object_legal_hold_s3api_root
}
# test adding and removing an object on versitygw
@test "test_put_object_with_data" {
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip "https://github.com/versity/versitygw/issues/888"
fi
test_common_put_object_with_data "aws"
}
@test "test_put_object_no_data" {
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip "https://github.com/versity/versitygw/issues/888"
fi
test_common_put_object_no_data "aws"
@test "test_get_put_object_retention" {
test_get_put_object_retention_s3api_root
}
# test listing a bucket's objects on versitygw
@test "test_list_objects" {
test_common_list_objects "aws"
}
@test "test_get_put_object_legal_hold" {
test_get_put_object_legal_hold_aws_root
}
@test "test_get_put_object_retention" {
test_get_put_object_retention_aws_root
}
# test v1 s3api list objects command
@test "test-s3api-list-objects-v1" {
test_s3api_list_objects_v1_aws_root
}
# test v2 s3api list objects command
@test "test-s3api-list-objects-v2" {
test_s3api_list_objects_v2_aws_root
}
# test abilty to set and retrieve object tags
@test "test-set-get-object-tags" {
test_common_set_get_object_tags "aws"
}
@test "test-presigned-url-utf8-chars" {
test_common_presigned_url_utf8_chars "aws"
test_common_list_objects "s3api"
}
@test "test-list-objects-delimiter" {
@@ -149,67 +111,37 @@ export RUN_USERS=true
run create_test_file "$folder_name"/"$object_name"
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_object "aws" "$TEST_FILE_FOLDER/$folder_name/$object_name" "$BUCKET_ONE_NAME" "$folder_name/$object_name"
run put_object "s3api" "$TEST_FILE_FOLDER/$folder_name/$object_name" "$BUCKET_ONE_NAME" "$folder_name/$object_name"
assert_success
run check_object_listing_with_prefixes "$BUCKET_ONE_NAME" "$folder_name" "$object_name"
assert_success
}
# ensure that lists of files greater than a size of 1000 (pagination) are returned properly
#@test "test_list_objects_file_count" {
# test_common_list_objects_file_count "aws"
#}
# ensure that lists of files greater than a size of 1000 (pagination) are returned properly
#@test "test_list_objects_file_count" {
# test_common_list_objects_file_count "aws"
#}
#@test "test_filename_length" {
# file_name=$(printf "%0.sa" $(seq 1 1025))
# echo "$file_name"
# create_test_files "$file_name" || created=$?
# [[ $created -eq 0 ]] || fail "error creating file"
# setup_bucket "aws" "$BUCKET_ONE_NAME" || local setup_result=$?
# [[ $setup_result -eq 0 ]] || fail "error setting up bucket"
# put_object "aws" "$TEST_FILE_FOLDER"/"$file_name" "$BUCKET_ONE_NAME"/"$file_name" || local put_object=$?
# [[ $put_object -eq 0 ]] || fail "Failed to add object to bucket"
#}
@test "test_retention_bypass" {
test_retention_bypass_aws_root
@test "test_put_object" {
test_put_object_s3api_root
}
@test "test_add_object_metadata" {
object_one="object-one"
test_key="x-test-data"
test_value="test-value"
# test adding and removing an object on versitygw
@test "test_put_object_with_data" {
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip "https://github.com/versity/versitygw/issues/888"
fi
test_common_put_object_with_data "s3api"
}
run create_test_files "$object_one"
assert_success
@test "test_put_object_no_data" {
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip "https://github.com/versity/versitygw/issues/888"
fi
test_common_put_object_no_data "s3api"
}
run setup_bucket "aws" "$BUCKET_ONE_NAME"
assert_success
object="$TEST_FILE_FOLDER"/"$object_one"
put_object_with_metadata "aws" "$object" "$BUCKET_ONE_NAME" "$object_one" "$test_key" "$test_value" || fail "failed to add object to bucket"
object_exists "aws" "$BUCKET_ONE_NAME" "$object_one" || fail "object not found after being added to bucket"
get_object_metadata "aws" "$BUCKET_ONE_NAME" "$object_one" || fail "error getting object metadata"
key=$(echo "$metadata" | jq -r 'keys[]' 2>&1) || fail "error getting key from metadata: $key"
value=$(echo "$metadata" | jq -r '.[]' 2>&1) || fail "error getting value from metadata: $value"
[[ $key == "$test_key" ]] || fail "keys doesn't match (expected $key, actual \"$test_key\")"
[[ $value == "$test_value" ]] || fail "values doesn't match (expected $value, actual \"$test_value\")"
bucket_cleanup "aws" "$BUCKET_ONE_NAME"
delete_test_files "$object_one"
@test "test-presigned-url-utf8-chars" {
test_common_presigned_url_utf8_chars "s3api"
}
@test "test_put_object_lock_configuration" {
@@ -229,10 +161,74 @@ export RUN_USERS=true
run get_and_check_object_lock_config "$bucket_name" "$enabled" "$governance" "$days"
assert_success "error getting and checking object lock config"
bucket_cleanup "aws" "$bucket_name"
}
@test "test_put_object_metadata" {
object_one="object-one"
test_key="x-test-data"
test_value="test-value"
run create_test_files "$object_one"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
object="$TEST_FILE_FOLDER"/"$object_one"
run put_object_with_metadata "s3api" "$object" "$BUCKET_ONE_NAME" "$object_one" "$test_key" "$test_value"
assert_success
run object_exists "s3api" "$BUCKET_ONE_NAME" "$object_one"
assert_success
run get_object_metadata_and_check_keys "$BUCKET_ONE_NAME" "$object_one" "$test_key" "$test_value"
assert_success
}
@test "test_retention_bypass" {
test_retention_bypass_s3api_root
}
# test v1 s3api list objects command
@test "test-s3api-list-objects-v1" {
test_s3api_list_objects_v1_s3api_root
}
# test v2 s3api list objects command
@test "test-s3api-list-objects-v2" {
test_s3api_list_objects_v2_s3api_root
}
# test abilty to set and retrieve object tags
@test "test-set-get-object-tags" {
test_common_set_get_object_tags "s3api"
}
# ensure that lists of files greater than a size of 1000 (pagination) are returned properly
#@test "test_list_objects_file_count" {
# test_common_list_objects_file_count "s3api"
#}
# ensure that lists of files greater than a size of 1000 (pagination) are returned properly
#@test "test_list_objects_file_count" {
# test_common_list_objects_file_count "s3api"
#}
#@test "test_filename_length" {
# file_name=$(printf "%0.sa" $(seq 1 1025))
# echo "$file_name"
# create_test_files "$file_name" || created=$?
# [[ $created -eq 0 ]] || fail "error creating file"
# setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$?
# [[ $setup_result -eq 0 ]] || fail "error setting up bucket"
# put_object "s3api" "$TEST_FILE_FOLDER"/"$file_name" "$BUCKET_ONE_NAME"/"$file_name" || local put_object=$?
# [[ $put_object -eq 0 ]] || fail "Failed to add object to bucket"
#}
@test "test_ls_directory_object" {
test_common_ls_directory_object "s3api"
}

View File

@@ -16,11 +16,15 @@
source ./tests/logger.sh
source ./tests/setup.sh
source ./tests/util_multipart.sh
source ./tests/util_file.sh
source ./tests/util_policy.sh
source ./tests/util_tags.sh
source ./tests/util_users.sh
source ./tests/test_s3api_policy_bucket.sh
source ./tests/test_s3api_policy_multipart.sh
source ./tests/test_s3api_policy_object.sh
source ./tests/util/util_multipart.sh
source ./tests/util/util_multipart_abort.sh
source ./tests/util/util_file.sh
source ./tests/util/util_policy.sh
source ./tests/util/util_tags.sh
source ./tests/util/util_users.sh
source ./tests/commands/get_bucket_policy.sh
source ./tests/commands/get_bucket_tagging.sh
source ./tests/commands/get_object.sh
@@ -30,25 +34,60 @@ source ./tests/commands/put_object.sh
export RUN_USERS=true
@test "test_put_policy_invalid_action" {
@test "test_policy_abort_multipart_upload" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_invalid_action
test_s3api_policy_abort_multipart_upload
}
@test "test_policy_get_object_with_user" {
@test "test_policy_allow_deny" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_object_with_user
test_s3api_policy_allow_deny
}
@test "test_policy_get_object_specific_file" {
@test "test_policy_delete" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_object_specific_file
test_s3api_policy_delete
}
@test "test_policy_delete_bucket_policy" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_delete_bucket_policy
}
@test "test_policy_deny" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_deny
}
@test "test_policy_get_bucket_acl" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_bucket_acl
}
@test "test_policy_get_bucket_policy" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_bucket_policy
}
@test "test_policy_get_bucket_tagging" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_bucket_tagging
}
@test "test_policy_get_object_file_wildcard" {
@@ -65,39 +104,18 @@ export RUN_USERS=true
test_s3api_policy_get_object_folder_wildcard
}
@test "test_policy_allow_deny" {
@test "test_policy_get_object_specific_file" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_allow_deny
test_s3api_policy_get_object_specific_file
}
@test "test_policy_deny" {
@test "test_policy_get_object_with_user" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_deny
}
@test "test_policy_put_wildcard" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_put_wildcard
}
@test "test_policy_delete" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_delete
}
@test "test_policy_get_bucket_policy" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_bucket_policy
test_s3api_policy_get_object_with_user
}
@test "test_policy_list_multipart_uploads" {
@@ -107,55 +125,6 @@ export RUN_USERS=true
test_s3api_policy_list_multipart_uploads
}
@test "test_policy_put_bucket_policy" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_put_bucket_policy
}
@test "test_policy_delete_bucket_policy" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_delete_bucket_policy
}
@test "test_policy_get_bucket_acl" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_bucket_acl
}
@test "test_policy_abort_multipart_upload" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_abort_multipart_upload
}
@test "test_policy_two_principals" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_two_principals
}
@test "test_policy_put_bucket_tagging" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_put_bucket_tagging
}
@test "test_policy_get_bucket_tagging" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
test_s3api_policy_get_bucket_tagging
}
@test "test_policy_list_upload_parts" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
@@ -170,673 +139,37 @@ export RUN_USERS=true
test_s3api_policy_put_acl
}
test_s3api_policy_invalid_action() {
policy_file="policy_file"
run create_test_file "$policy_file"
assert_success
effect="Allow"
principal="*"
action="s3:GetObjectt"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/*"
# shellcheck disable=SC2154
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run check_for_empty_policy "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_and_check_for_malformed_policy "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
}
test_s3api_policy_get_object_with_user() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
test_file="test_file"
log 5 "username: $USERNAME_ONE, password: $PASSWORD_ONE"
run create_test_files "$test_file" "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_file"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run setup_user "$username" "$password" "user"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
test_s3api_policy_get_object_specific_file() {
policy_file="policy_file"
test_file="test_file"
test_file_two="test_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$policy_file" "$test_file" "$test_file_two"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/test_file"
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "error copying object"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file_two" "$BUCKET_ONE_NAME" "$test_file_two" || fail "error copying object"
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$TEST_FILE_FOLDER/$test_file_two-copy" "$username" "$password"
assert_success
}
test_s3api_policy_get_object_file_wildcard() {
policy_file="policy_file_one"
policy_file_two="policy_file_two"
policy_file_three="policy_fil"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$policy_file" "$policy_file_two" "$policy_file_three"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/policy_file*"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$policy_file" "$BUCKET_ONE_NAME" "$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$policy_file_two" "$BUCKET_ONE_NAME" "$policy_file_two"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$policy_file_three" "$BUCKET_ONE_NAME" "$policy_file_three"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$policy_file" "$BUCKET_ONE_NAME" "$policy_file" "$TEST_FILE_FOLDER/$policy_file-copy" "$username" "$password"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$policy_file_two" "$BUCKET_ONE_NAME" "$policy_file_two" "$TEST_FILE_FOLDER/$policy_file_two-copy" "$username" "$password"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$policy_file_three" "$TEST_FILE_FOLDER/$policy_file_three" "$username" "$password"
assert_success
}
test_s3api_policy_get_object_folder_wildcard() {
policy_file="policy_file"
test_folder="test_folder"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_folder "$test_folder"
assert_success
run create_test_files "$test_folder/$test_file" "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_folder/*"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
test_s3api_policy_allow_deny() {
policy_file="policy_file"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$policy_file" "$test_file"
assert_success
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_double_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" \
"Deny" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/$test_file" \
"Allow" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/$test_file"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
test_s3api_policy_deny() {
policy_file="policy_file"
test_file_one="test_file_one"
test_file_two="test_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$test_file_one" "$test_file_two" "$policy_file"
assert_success
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
setup_policy_with_double_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" \
"Deny" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/$test_file_two" \
"Allow" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
log 5 "Policy: $(cat "$TEST_FILE_FOLDER/$policy_file")"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_one" || fail "error copying object one"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_two" || fail "error copying object two"
get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file_one" "$TEST_FILE_FOLDER/$test_file_one-copy" "$username" "$password" || fail "error getting object"
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$TEST_FILE_FOLDER/$test_file_two-copy" "$username" "$password"
assert_success
}
test_s3api_policy_put_wildcard() {
policy_file="policy_file"
test_folder="test_folder"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_folder "$test_folder"
assert_success
run create_test_files "$test_folder/$test_file" "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:PutObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_folder/*"
setup_user "$username" "$password" "user" || fail "error creating user"
setup_bucket "s3api" "$BUCKET_ONE_NAME"
log 5 "Policy: $(cat "$TEST_FILE_FOLDER/$policy_file")"
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
if put_object_with_user "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"; then
fail "able to put object despite not being allowed"
@test "test_policy_put_bucket_policy" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
# shellcheck disable=SC2154
[[ "$put_object_error" == *"Access Denied"* ]] || fail "invalid put object error: $put_object_error"
put_object_with_user "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$username" "$password" || fail "error putting file despite policy permissions"
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$test_folder/$test_file-copy" "$username" "$password"
assert_success
download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" || fail "files don't match"
test_s3api_policy_put_bucket_policy
}
test_s3api_policy_delete() {
policy_file="policy_file"
test_file_one="test_file_one"
test_file_two="test_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$test_file_one" "$test_file_two" "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:DeleteObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_file_two"
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
log 5 "Policy: $(cat "$TEST_FILE_FOLDER/$policy_file")"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_one" || fail "error copying object one"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file_two" "$BUCKET_ONE_NAME" "$test_file_two" || fail "error copying object two"
if delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file_one" "$username" "$password"; then
fail "able to delete object despite lack of permissions"
@test "test_policy_put_bucket_tagging" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
# shellcheck disable=SC2154
[[ "$delete_object_error" == *"Access Denied"* ]] || fail "invalid delete object error: $delete_object_error"
delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$username" "$password" || fail "error deleting object despite permissions"
test_s3api_policy_put_bucket_tagging
}
test_s3api_policy_get_bucket_policy() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:GetBucketPolicy"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
if get_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password"; then
fail "able to retrieve bucket policy despite lack of permissions"
@test "test_policy_two_principals" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
get_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password" || fail "error getting bucket policy despite permissions"
# shellcheck disable=SC2154
echo "$bucket_policy" > "$TEST_FILE_FOLDER/$policy_file-copy"
log 5 "ORIG: $(cat "$TEST_FILE_FOLDER/$policy_file")"
log 5 "COPY: $(cat "$TEST_FILE_FOLDER/$policy_file-copy")"
compare_files "$TEST_FILE_FOLDER/$policy_file" "$TEST_FILE_FOLDER/$policy_file-copy" || fail "policies not equal"
test_s3api_policy_two_principals
}
test_s3api_policy_list_multipart_uploads() {
policy_file="policy_file"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file"
assert_success
run create_large_file "$test_file"
assert_success
effect="Allow"
principal="$username"
action="s3:ListBucketMultipartUploads"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
get_bucket_policy "s3api" "$BUCKET_ONE_NAME" || fail "error getting bucket policy"
log 5 "BUCKET POLICY: $bucket_policy"
get_bucket_acl "s3api" "$BUCKET_ONE_NAME" || fail "error getting bucket ACL"
# shellcheck disable=SC2154
log 5 "ACL: $acl"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success "failed to set up policy"
run create_multipart_upload "$BUCKET_ONE_NAME" "$test_file"
assert_success "failed to create multipart upload"
if list_multipart_uploads_with_user "$BUCKET_ONE_NAME" "$username" "$password"; then
fail "able to list multipart uploads despite lack of permissions"
@test "test_policy_put_wildcard" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
# shellcheck disable=SC2154
[[ "$list_multipart_uploads_error" == *"Access Denied"* ]] || fail "invalid list multipart uploads error: $list_multipart_uploads_error"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
list_multipart_uploads_with_user "$BUCKET_ONE_NAME" "$username" "$password" || fail "error listing multipart uploads"
# shellcheck disable=SC2154
log 5 "$uploads"
upload_key=$(echo "$uploads" | grep -v "InsecureRequestWarning" | jq -r ".Uploads[0].Key" 2>&1) || fail "error parsing upload key from uploads message: $upload_key"
[[ $upload_key == "$test_file" ]] || fail "upload key doesn't match file marked as being uploaded"
test_s3api_policy_put_wildcard
}
test_s3api_policy_put_bucket_policy() {
policy_file="policy_file"
policy_file_two="policy_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
effect="Allow"
principal="$username"
action="s3:PutBucketPolicy"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
if put_bucket_policy_with_user "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" "$username" "$password"; then
fail "able to retrieve bucket policy despite lack of permissions"
@test "test_put_policy_invalid_action" {
if [[ -n $SKIP_POLICY ]]; then
skip "will not test policy actions with SKIP_POLICY set"
fi
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file_two" "dummy" "$effect" "$principal" "s3:GetBucketPolicy" "$resource" || fail "failed to set up policy"
put_bucket_policy_with_user "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file_two" "$username" "$password" || fail "error putting bucket policy despite permissions"
get_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password" || fail "error getting bucket policy despite permissions"
# shellcheck disable=SC2154
echo "$bucket_policy" > "$TEST_FILE_FOLDER/$policy_file-copy"
log 5 "ORIG: $(cat "$TEST_FILE_FOLDER/$policy_file_two")"
log 5 "COPY: $(cat "$TEST_FILE_FOLDER/$policy_file-copy")"
compare_files "$TEST_FILE_FOLDER/$policy_file_two" "$TEST_FILE_FOLDER/$policy_file-copy" || fail "policies not equal"
}
test_s3api_policy_delete_bucket_policy() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
effect="Allow"
principal="$username"
action="s3:DeleteBucketPolicy"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if delete_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password"; then
fail "able to delete bucket policy with user $username without right permissions"
fi
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
delete_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password" || fail "unable to delete bucket policy"
}
test_s3api_policy_get_bucket_acl() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
effect="Allow"
principal="$username"
action="s3:GetBucketAcl"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
setup_user "$username" "$password" "user" || fail "error creating user"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if get_bucket_acl_with_user "$BUCKET_ONE_NAME" "$username" "$password"; then
fail "user able to get bucket ACLs despite permissions"
fi
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource" || fail "failed to set up policy"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
get_bucket_acl_with_user "$BUCKET_ONE_NAME" "$username" "$password" || fail "error getting bucket ACL despite permissions"
}
test_s3api_policy_abort_multipart_upload() {
policy_file="policy_file"
test_file="test_file"
username=$USERNAME_ONE
run create_test_file "$policy_file"
assert_success
run create_large_file "$test_file"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if [[ $DIRECT == "true" ]]; then
setup_user_direct "$username" "user" "$BUCKET_ONE_NAME" || fail "error setting up direct user $username"
principal="{\"AWS\": \"arn:aws:iam::$DIRECT_AWS_USER_ID:user/$username\"}"
# shellcheck disable=SC2154
username=$key_id
# shellcheck disable=SC2154
password=$secret_key
else
password=$PASSWORD_ONE
setup_user "$username" "$password" "user" || fail "error setting up user $username"
principal="\"$username\""
fi
setup_policy_with_double_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" \
"Allow" "$principal" "s3:PutObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*" \
"Deny" "$principal" "s3:AbortMultipartUpload" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting first policy"
create_multipart_upload_with_user "$BUCKET_ONE_NAME" "$test_file" "$username" "$password" || fail "error creating multipart upload"
# shellcheck disable=SC2154
if abort_multipart_upload_with_user "$BUCKET_ONE_NAME" "$test_file" "$upload_id" "$username" "$password"; then
fail "abort multipart upload succeeded despite lack of permissions"
fi
# shellcheck disable=SC2154
[[ "$abort_multipart_upload_error" == *"AccessDenied"* ]] || fail "unexpected abort error: $abort_multipart_upload_error"
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$principal" "s3:AbortMultipartUpload" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
abort_multipart_upload_with_user "$BUCKET_ONE_NAME" "$test_file" "$upload_id" "$username" "$password" || fail "error aborting multipart upload despite permissions"
}
test_s3api_policy_two_principals() {
policy_file="policy_file"
test_file="test_file"
run create_test_files "$test_file" "$policy_file"
assert_success "error creating test files"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success "error setting up bucket $BUCKET_ONE_NAME"
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success "error setting up user $USERNAME_ONE"
run setup_user "$USERNAME_TWO" "$PASSWORD_TWO" "user"
assert_success "error setting up user $USERNAME_TWO"
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success "error adding object to bucket"
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_one" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_failure "able to get object with user $USERNAME_ONE despite lack of permission"
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_two" "$USERNAME_TWO" "$PASSWORD_TWO"
assert_failure "able to get object with user $USERNAME_TWO despite lack of permission"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "Allow" "[\"$USERNAME_ONE\", \"$USERNAME_TWO\"]" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
assert_success "error setting up policy"
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success "error putting policy"
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_one" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_success "error getting object with user $USERNAME_ONE"
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_two" "$USERNAME_TWO" "$PASSWORD_TWO"
assert_success "error getting object with user $USERNAME_TWO"
}
test_s3api_policy_put_bucket_tagging() {
policy_file="policy_file"
tag_key="TestKey"
tag_value="TestValue"
run create_test_files "$policy_file"
assert_success "error creating test files"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success "error setting up bucket"
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success "error setting up user"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
assert_success "error setting up policy"
run put_bucket_tagging_with_user "$BUCKET_ONE_NAME" "$tag_key" "$tag_value" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_failure
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success "error putting policy"
run put_bucket_tagging_with_user "$BUCKET_ONE_NAME" "$tag_key" "$tag_value" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_success "unable to put bucket tagging despite user permissions"
run get_and_check_bucket_tags "$BUCKET_ONE_NAME" "$tag_key" "$tag_value"
assert_success
}
test_s3api_policy_put_acl() {
policy_file="policy_file"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
run create_large_file "$test_file"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred" || fail "error putting bucket ownership controls"
setup_user "$username" "$password" "user" || fail "error setting up user $username"
setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$username" "s3:PutBucketAcl" "arn:aws:s3:::$BUCKET_ONE_NAME"
if [[ $DIRECT == "true" ]]; then
put_public_access_block_enable_public_acls "$BUCKET_ONE_NAME" || fail "error enabling public ACLs"
fi
put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" || fail "error putting policy"
put_bucket_canned_acl_with_user "$BUCKET_ONE_NAME" "public-read" "$username" "$password" || fail "error putting canned acl"
get_bucket_acl "s3api" "$BUCKET_ONE_NAME" || fail "error getting bucket acl"
# shellcheck disable=SC2154
log 5 "ACL: $acl"
second_grant=$(echo "$acl" | jq -r ".Grants[1]" 2>&1) || fail "error getting second grant: $second_grant"
second_grantee=$(echo "$second_grant" | jq -r ".Grantee" 2>&1) || fail "error getting second grantee: $second_grantee"
permission=$(echo "$second_grant" | jq -r ".Permission" 2>&1) || fail "error getting permission: $permission"
log 5 "second grantee: $second_grantee"
[[ $permission == "READ" ]] || fail "incorrect permission: $permission"
if [[ $DIRECT == "true" ]]; then
uri=$(echo "$second_grantee" | jq -r ".URI" 2>&1) || fail "error getting uri: $uri"
[[ $uri == "http://acs.amazonaws.com/groups/global/AllUsers" ]] || fail "unexpected URI: $uri"
else
id=$(echo "$second_grantee" | jq -r ".ID" 2>&1) || fail "error getting ID: $id"
[[ $id == "all-users" ]] || fail "unexpected ID: $id"
fi
}
test_s3api_policy_get_bucket_tagging() {
policy_file="policy_file"
test_file="test_file"
tag_key="TestKey"
tag_value="TestValue"
run create_test_files "$policy_file"
assert_success "error creating test files"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success "error creating user '$USERNAME_ONE'"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:GetBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
assert_success "error setting up policy"
run put_bucket_tagging "s3api" "$BUCKET_ONE_NAME" "$tag_key" "$tag_value"
assert_success "unable to put bucket tagging"
run get_bucket_tagging_with_user "$USERNAME_ONE" "$PASSWORD_ONE" "$BUCKET_ONE_NAME"
assert_failure
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success "error putting policy"
run get_and_check_bucket_tags_with_user "$USERNAME_ONE" "$PASSWORD_ONE" "$BUCKET_ONE_NAME" "$tag_key" "$tag_value"
assert_success "get and check bucket tags failed"
}
test_s3api_policy_list_upload_parts() {
policy_file="policy_file"
test_file="test_file"
tag_key="TestKey"
tag_value="TestValue"
run create_test_files "$policy_file"
assert_success "error creating test files"
run create_large_file "$test_file"
assert_success "error creating large file"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success "error setting up bucket"
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success "error creating user '$USERNAME_ONE'"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
assert_success "error setting up policy"
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success "error putting policy"
run create_upload_and_test_parts_listing "$test_file" "$policy_file"
assert_success "error creating upload and testing parts listing"
test_s3api_policy_invalid_action
}

View File

@@ -0,0 +1,244 @@
#!/usr/bin/env bats
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/util/util_acl.sh
test_s3api_policy_delete_bucket_policy() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
effect="Allow"
principal="$username"
action="s3:DeleteBucketPolicy"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run delete_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password"
assert_failure
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run delete_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password"
assert_success
}
test_s3api_policy_get_bucket_acl() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
effect="Allow"
principal="$username"
action="s3:GetBucketAcl"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run get_bucket_acl_with_user "$BUCKET_ONE_NAME" "$username" "$password"
assert_failure
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run get_bucket_acl_with_user "$BUCKET_ONE_NAME" "$username" "$password"
assert_success
}
test_s3api_policy_get_bucket_policy() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:GetBucketPolicy"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run get_bucket_policy_with_user "$BUCKET_ONE_NAME" "$username" "$password"
assert_failure
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run get_and_compare_policy_with_file "$BUCKET_ONE_NAME" "$username" "$password" "$TEST_FILE_FOLDER/$policy_file"
assert_success
}
test_s3api_policy_get_bucket_tagging() {
policy_file="policy_file"
test_file="test_file"
tag_key="TestKey"
tag_value="TestValue"
run create_test_files "$policy_file"
assert_success "error creating test files"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success "error creating user '$USERNAME_ONE'"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:GetBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
assert_success "error setting up policy"
run put_bucket_tagging "s3api" "$BUCKET_ONE_NAME" "$tag_key" "$tag_value"
assert_success "unable to put bucket tagging"
run get_bucket_tagging_with_user "$USERNAME_ONE" "$PASSWORD_ONE" "$BUCKET_ONE_NAME"
assert_failure
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success "error putting policy"
run get_and_check_bucket_tags_with_user "$USERNAME_ONE" "$PASSWORD_ONE" "$BUCKET_ONE_NAME" "$tag_key" "$tag_value"
assert_success "get and check bucket tags failed"
}
test_s3api_policy_put_acl() {
policy_file="policy_file"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
run create_large_file "$test_file"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred"
assert_success
run setup_user "$username" "$password" "user"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$username" "s3:PutBucketAcl" "arn:aws:s3:::$BUCKET_ONE_NAME"
assert_success
if [[ $DIRECT == "true" ]]; then
run put_public_access_block_enable_public_acls "$BUCKET_ONE_NAME"
assert_success
fi
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_bucket_canned_acl_with_user "$BUCKET_ONE_NAME" "public-read" "$username" "$password"
assert_success
run get_check_acl_after_policy "$BUCKET_ONE_NAME"
assert_success
}
test_s3api_policy_put_bucket_policy() {
policy_file="policy_file"
policy_file_two="policy_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_file "$policy_file" 0
assert_success
effect="Allow"
principal="$username"
action="s3:PutBucketPolicy"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy_with_user "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file" "$username" "$password"
assert_failure
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file_two" "dummy" "$effect" "$principal" "s3:GetBucketPolicy" "$resource"
assert_success
run put_bucket_policy_with_user "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file_two" "$username" "$password"
assert_success
run get_and_compare_policy_with_file "$BUCKET_ONE_NAME" "$username" "$password" "$TEST_FILE_FOLDER/$policy_file_two"
assert_success
}
test_s3api_policy_put_bucket_tagging() {
policy_file="policy_file"
tag_key="TestKey"
tag_value="TestValue"
run create_test_files "$policy_file"
assert_success "error creating test files"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success "error setting up bucket"
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success "error setting up user"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutBucketTagging" "arn:aws:s3:::$BUCKET_ONE_NAME"
assert_success "error setting up policy"
run put_bucket_tagging_with_user "$BUCKET_ONE_NAME" "$tag_key" "$tag_value" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_failure
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success "error putting policy"
run put_bucket_tagging_with_user "$BUCKET_ONE_NAME" "$tag_key" "$tag_value" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_success "unable to put bucket tagging despite user permissions"
run get_and_check_bucket_tags "$BUCKET_ONE_NAME" "$tag_key" "$tag_value"
assert_success
}

View File

@@ -0,0 +1,126 @@
#!/usr/bin/env bats
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/util/util_multipart_abort.sh
test_s3api_policy_abort_multipart_upload() {
policy_file="policy_file"
test_file="test_file"
run create_large_file "$test_file"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
assert_success
# shellcheck disable=SC2154
username=${lines[0]}
password=${lines[1]}
run setup_policy_with_double_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" \
"Allow" "$USERNAME_ONE" "s3:PutObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*" \
"Deny" "$USERNAME_ONE" "s3:AbortMultipartUpload" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
assert_success
# shellcheck disable=SC2154
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run create_multipart_upload_with_user "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_success
# shellcheck disable=SC2154
upload_id="$output"
run check_abort_access_denied "$BUCKET_ONE_NAME" "$test_file" "$upload_id" "$username" "$password"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:AbortMultipartUpload" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run abort_multipart_upload_with_user "$BUCKET_ONE_NAME" "$test_file" "$upload_id" "$username" "$password"
assert_success
}
test_s3api_policy_list_multipart_uploads() {
policy_file="policy_file"
test_file="test_file"
run create_test_file "$policy_file"
assert_success
run create_large_file "$test_file"
assert_success
effect="Allow"
principal="$USERNAME_ONE"
action="s3:ListBucketMultipartUploads"
resource="arn:aws:s3:::$BUCKET_ONE_NAME"
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
assert_success
username=${lines[0]}
password=${lines[1]}
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run create_multipart_upload "$BUCKET_ONE_NAME" "$test_file"
assert_success
run list_multipart_uploads_with_user "$BUCKET_ONE_NAME" "$username" "$password"
assert_failure
assert_output -p "Access Denied"
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run list_check_multipart_upload_key "$BUCKET_ONE_NAME" "$username" "$password" "$test_file"
assert_success
}
test_s3api_policy_list_upload_parts() {
policy_file="policy_file"
test_file="test_file"
run create_test_files "$policy_file"
assert_success "error creating test files"
run create_large_file "$test_file"
assert_success "error creating large file"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success "error setting up bucket"
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success "error creating user '$USERNAME_ONE'"
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "Allow" "$USERNAME_ONE" "s3:PutObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
assert_success "error setting up policy"
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success "error putting policy"
run create_upload_and_test_parts_listing "$test_file" "$policy_file"
assert_success "error creating upload and testing parts listing"
}

View File

@@ -0,0 +1,385 @@
#!/usr/bin/env bats
# Copyright 2024 Versity Software
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ./tests/util/util_delete_object.sh
test_s3api_policy_allow_deny() {
policy_file="policy_file"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$policy_file" "$test_file"
assert_success
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_double_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" \
"Deny" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/$test_file" \
"Allow" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/$test_file"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
test_s3api_policy_delete() {
policy_file="policy_file"
test_file_one="test_file_one"
test_file_two="test_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$test_file_one" "$test_file_two" "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:DeleteObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_file_two"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
log 5 "Policy: $(cat "$TEST_FILE_FOLDER/$policy_file")"
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_one"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file_two" "$BUCKET_ONE_NAME" "$test_file_two"
assert_success
run block_delete_object_without_permission "$BUCKET_ONE_NAME" "$test_file_one" "$username" "$password"
assert_success
run delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$username" "$password"
assert_success
}
test_s3api_policy_deny() {
policy_file="policy_file"
test_file_one="test_file_one"
test_file_two="test_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$test_file_one" "$test_file_two" "$policy_file"
assert_success
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_double_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" \
"Deny" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/$test_file_two" \
"Allow" "$username" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
assert_success
log 5 "Policy: $(cat "$TEST_FILE_FOLDER/$policy_file")"
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_one"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file_one" "$BUCKET_ONE_NAME" "$test_file_two"
assert_success
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file_one" "$TEST_FILE_FOLDER/$test_file_one-copy" "$username" "$password"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$TEST_FILE_FOLDER/$test_file_two-copy" "$username" "$password"
assert_success
}
test_s3api_policy_get_object_file_wildcard() {
policy_file="policy_file_one"
policy_file_two="policy_file_two"
policy_file_three="policy_fil"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$policy_file" "$policy_file_two" "$policy_file_three"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/policy_file*"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$policy_file" "$BUCKET_ONE_NAME" "$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$policy_file_two" "$BUCKET_ONE_NAME" "$policy_file_two"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$policy_file_three" "$BUCKET_ONE_NAME" "$policy_file_three"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$policy_file" "$BUCKET_ONE_NAME" "$policy_file" "$TEST_FILE_FOLDER/$policy_file-copy" "$username" "$password"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$policy_file_two" "$BUCKET_ONE_NAME" "$policy_file_two" "$TEST_FILE_FOLDER/$policy_file_two-copy" "$username" "$password"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$policy_file_three" "$TEST_FILE_FOLDER/$policy_file_three" "$username" "$password"
assert_success
}
test_s3api_policy_get_object_folder_wildcard() {
policy_file="policy_file"
test_folder="test_folder"
test_file="test_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_folder "$test_folder"
assert_success
run create_test_files "$test_folder/$test_file" "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_folder/*"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
test_s3api_policy_get_object_specific_file() {
policy_file="policy_file"
test_file="test_file"
test_file_two="test_file_two"
username=$USERNAME_ONE
password=$PASSWORD_ONE
run create_test_files "$policy_file" "$test_file" "$test_file_two"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/test_file"
run setup_user "$username" "$password" "user"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file_two" "$BUCKET_ONE_NAME" "$test_file_two"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file_two" "$TEST_FILE_FOLDER/$test_file_two-copy" "$username" "$password"
assert_success
}
test_s3api_policy_get_object_with_user() {
policy_file="policy_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
test_file="test_file"
log 5 "username: $USERNAME_ONE, password: $PASSWORD_ONE"
run create_test_files "$test_file" "$policy_file"
assert_success
effect="Allow"
principal="$username"
action="s3:GetObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_file"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run setup_user "$username" "$password" "user"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "2012-10-17" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run download_and_compare_file_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
test_s3api_policy_invalid_action() {
policy_file="policy_file"
run create_test_file "$policy_file"
assert_success
effect="Allow"
principal="*"
action="s3:GetObjectt"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/*"
# shellcheck disable=SC2154
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run check_for_empty_policy "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_and_check_for_malformed_policy "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
}
test_s3api_policy_put_wildcard() {
policy_file="policy_file"
test_folder="test_folder"
test_file="test_file"
run create_test_folder "$test_folder"
assert_success
run create_test_files "$test_folder/$test_file" "$policy_file"
assert_success
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
assert_success
# shellcheck disable=SC2154
username=${lines[0]}
password=${lines[1]}
effect="Allow"
principal="$username"
action="s3:PutObject"
resource="arn:aws:s3:::$BUCKET_ONE_NAME/$test_folder/*"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "$effect" "$principal" "$action" "$resource"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run put_object_with_user "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_failure
# shellcheck disable=SC2154
assert_output -p "Access Denied"
run put_object_with_user "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$username" "$password"
assert_success
run verify_user_cant_get_object "s3api" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$test_folder/$test_file-copy" "$username" "$password"
assert_success
run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$test_folder/$test_file" "$BUCKET_ONE_NAME" "$test_folder/$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_success
}
test_s3api_policy_two_principals() {
policy_file="policy_file"
test_file="test_file"
run create_test_files "$test_file" "$policy_file"
assert_success
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run setup_user "$USERNAME_ONE" "$PASSWORD_ONE" "user"
assert_success
run setup_user "$USERNAME_TWO" "$PASSWORD_TWO" "user"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_one" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_failure
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_two" "$USERNAME_TWO" "$PASSWORD_TWO"
assert_failure
run setup_policy_with_single_statement "$TEST_FILE_FOLDER/$policy_file" "dummy" "Allow" "$USERNAME_ONE,$USERNAME_TWO" "s3:GetObject" "arn:aws:s3:::$BUCKET_ONE_NAME/*"
assert_success
run put_bucket_policy "s3api" "$BUCKET_ONE_NAME" "$TEST_FILE_FOLDER/$policy_file"
assert_success
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_one" "$USERNAME_ONE" "$PASSWORD_ONE"
assert_success
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/copy_two" "$USERNAME_TWO" "$PASSWORD_TWO"
assert_success
}

View File

@@ -17,111 +17,14 @@
source ./tests/commands/delete_objects.sh
source ./tests/commands/list_objects_v2.sh
source ./tests/commands/list_parts.sh
source ./tests/util_get_bucket_acl.sh
source ./tests/util_get_object_attributes.sh
source ./tests/util_get_object_retention.sh
source ./tests/util_head_object.sh
source ./tests/util_legal_hold.sh
source ./tests/util_list_objects.sh
source ./tests/util/util_get_bucket_acl.sh
source ./tests/util/util_get_object_attributes.sh
source ./tests/util/util_get_object_retention.sh
source ./tests/util/util_head_object.sh
source ./tests/util/util_legal_hold.sh
source ./tests/util/util_list_objects.sh
test_abort_multipart_upload_aws_root() {
local bucket_file="bucket-file"
run create_test_file "$bucket_file"
assert_success
# shellcheck disable=SC2154
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
assert_success
run run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4
assert_success
run object_exists "aws" "$BUCKET_ONE_NAME" "$bucket_file"
assert_failure 1
}
test_complete_multipart_upload_aws_root() {
local bucket_file="bucket-file"
run create_test_files "$bucket_file"
assert_success
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
assert_success
run multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4
assert_success
run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy"
assert_success
}
test_create_multipart_upload_properties_aws_root() {
local bucket_file="bucket-file"
local expected_content_type="application/zip"
local expected_meta_key="testKey"
local expected_meta_val="testValue"
local expected_hold_status="ON"
local expected_retention_mode="GOVERNANCE"
local expected_tag_key="TestTag"
local expected_tag_val="TestTagVal"
os_name="$(uname)"
if [[ "$os_name" == "Darwin" ]]; then
now=$(date -u +"%Y-%m-%dT%H:%M:%S")
later=$(date -j -v +15S -f "%Y-%m-%dT%H:%M:%S" "$now" +"%Y-%m-%dT%H:%M:%S")
else
now=$(date +"%Y-%m-%dT%H:%M:%S")
later=$(date -d "$now 15 seconds" +"%Y-%m-%dT%H:%M:%S")
fi
run create_test_files "$bucket_file"
assert_success
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run bucket_cleanup_if_bucket_exists "s3api" "$BUCKET_ONE_NAME"
assert_success
# in static bucket config, bucket will still exist
if ! bucket_exists "s3api" "$BUCKET_ONE_NAME"; then
run create_bucket_object_lock_enabled "$BUCKET_ONE_NAME"
assert_success
fi
run multipart_upload_with_params "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file" 4 \
"$expected_content_type" \
"{\"$expected_meta_key\": \"$expected_meta_val\"}" \
"$expected_hold_status" \
"$expected_retention_mode" \
"$later" \
"$expected_tag_key=$expected_tag_val"
assert_success
run get_and_verify_metadata "$bucket_file" "$expected_content_type" "$expected_meta_key" "$expected_meta_val" \
"$expected_hold_status" "$expected_retention_mode" "$later"
assert_success
run check_verify_object_tags "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$expected_tag_key" "$expected_tag_val"
assert_success
run put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "OFF"
assert_success
run get_and_check_legal_hold "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "OFF"
assert_success
run download_and_compare_file "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file-copy" || fail "error getting object"
assert_success
}
test_delete_objects_aws_root() {
test_delete_objects_s3api_root() {
local object_one="test-file-one"
local object_two="test-file-two"
@@ -147,19 +50,7 @@ test_delete_objects_aws_root() {
assert_failure 1
}
test_get_bucket_acl_aws_root() {
# TODO remove when able to assign bucket ownership back to root
if [[ $RECREATE_BUCKETS == "false" ]]; then
skip
fi
run setup_bucket "aws" "$BUCKET_ONE_NAME"
assert_success
run get_bucket_acl_and_check_owner "s3api" "$BUCKET_ONE_NAME"
assert_success
}
test_get_object_full_range_aws_root() {
test_get_object_full_range_s3api_root() {
bucket_file="bucket_file"
run create_test_files "$bucket_file" 0
@@ -178,7 +69,7 @@ test_get_object_full_range_aws_root() {
assert [ "$(cat "$TEST_FILE_FOLDER/$bucket_file-range")" == "9" ]
}
test_get_object_invalid_range_aws_root() {
test_get_object_invalid_range_s3api_root() {
bucket_file="bucket_file"
run create_test_files "$bucket_file"
assert_success
@@ -193,7 +84,7 @@ test_get_object_invalid_range_aws_root() {
assert_success
}
test_put_object_aws_root() {
test_put_object_s3api_root() {
bucket_file="bucket_file"
run create_test_files "$bucket_file"
@@ -212,16 +103,7 @@ test_put_object_aws_root() {
assert_success
}
test_create_bucket_invalid_name_aws_root() {
if [[ $RECREATE_BUCKETS != "true" ]]; then
return
fi
run create_and_check_bucket_invalid_name "aws"
assert_success
}
test_get_object_attributes_aws_root() {
test_get_object_attributes_s3api_root() {
bucket_file="bucket_file"
run create_test_file "$bucket_file"
assert_success
@@ -236,12 +118,7 @@ test_get_object_attributes_aws_root() {
assert_success
}
test_get_put_object_legal_hold_aws_root() {
if [[ $RECREATE_BUCKETS == "false" ]]; then
# https://github.com/versity/versitygw/issues/716
skip
fi
test_get_put_object_legal_hold_s3api_root() {
bucket_file="bucket_file"
username=$USERNAME_ONE
password=$PASSWORD_ONE
@@ -260,9 +137,8 @@ test_get_put_object_legal_hold_aws_root() {
echo "fdkljafajkfs" > "$TEST_FILE_FOLDER/$bucket_file"
run put_object_with_user "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password"
assert_failure 1
# shellcheck disable=SC2154
#[[ $put_object_error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $put_object_error"
assert_failure
assert_output --partial "Object is WORM protected and cannot be overwritten"
run delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password"
assert_failure 1
@@ -276,16 +152,11 @@ test_get_put_object_legal_hold_aws_root() {
assert_success
}
test_get_put_object_retention_aws_root() {
test_get_put_object_retention_s3api_root() {
bucket_file="bucket_file"
username=$USERNAME_ONE
secret_key=$PASSWORD_ONE
if [[ $RECREATE_BUCKETS == "false" ]]; then
# https://github.com/versity/versitygw/issues/716
skip
fi
run legal_hold_retention_setup "$username" "$secret_key" "$bucket_file"
assert_success
@@ -307,21 +178,17 @@ test_get_put_object_retention_aws_root() {
echo "fdkljafajkfs" > "$TEST_FILE_FOLDER/$bucket_file"
run put_object_with_user "s3api" "$TEST_FILE_FOLDER/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key"
assert_failure 1
assert_failure
# shellcheck disable=SC2154
assert_output --partial "Object is WORM protected and cannot be overwritten"
run delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key"
assert_failure 1
assert_failure
# shellcheck disable=SC2154
assert_output --partial "Object is WORM protected and cannot be overwritten"
}
test_retention_bypass_aws_root() {
if [[ $RECREATE_BUCKETS == "false" ]]; then
# https://github.com/versity/versitygw/issues/716
skip
fi
test_retention_bypass_s3api_root() {
bucket_file="bucket_file"
username=$USERNAME_ONE
secret_key=$PASSWORD_ONE
@@ -382,14 +249,14 @@ legal_hold_retention_setup() {
assert_success
}
test_s3api_list_objects_v1_aws_root() {
test_s3api_list_objects_v1_s3api_root() {
local object_one="test-file-one"
local object_two="test-file-two"
run create_test_files "$object_one" "$object_two"
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER"/"$object_one" "$BUCKET_ONE_NAME" "$object_one"
@@ -402,14 +269,14 @@ test_s3api_list_objects_v1_aws_root() {
assert_success
}
test_s3api_list_objects_v2_aws_root() {
test_s3api_list_objects_v2_s3api_root() {
local object_one="test-file-one"
local object_two="test-file-two"
run create_test_files "$object_one" "$object_two"
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER"/"$object_one" "$BUCKET_ONE_NAME" "$object_one"
@@ -421,21 +288,3 @@ test_s3api_list_objects_v2_aws_root() {
run list_check_objects_v2 "$BUCKET_ONE_NAME" "$object_one" 10 "$object_two" 10
assert_success
}
test_multipart_upload_list_parts_aws_root() {
local bucket_file="bucket-file"
run create_test_file "$bucket_file" 0
assert_success
run dd if=/dev/urandom of="$TEST_FILE_FOLDER/$bucket_file" bs=5M count=1
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
assert_success
run start_multipart_upload_list_check_parts "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER"/"$bucket_file"
assert_success
run run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$TEST_FILE_FOLDER/$bucket_file" 4
assert_success
}

View File

@@ -17,9 +17,9 @@
source ./tests/setup.sh
source ./tests/test_common.sh
source ./tests/test_common_acl.sh
source ./tests/util.sh
source ./tests/util_create_bucket.sh
source ./tests/util_users.sh
source ./tests/util/util.sh
source ./tests/util/util_create_bucket.sh
source ./tests/util/util_users.sh
source ./tests/commands/delete_bucket_policy.sh
source ./tests/commands/get_bucket_policy.sh
source ./tests/commands/put_bucket_policy.sh
@@ -47,12 +47,9 @@ export RUN_USERS=true
return
fi
create_bucket_invalid_name "s3cmd" || local create_result=$?
[[ $create_result -eq 0 ]] || fail "Invalid name test failed"
[[ "$bucket_create_error" == *"just the bucket name"* ]] || fail "unexpected error: $bucket_create_error"
bucket_cleanup "s3cmd" "$BUCKET_ONE_NAME"
run create_bucket_invalid_name "s3cmd"
assert_success
assert_output -p "just the bucket name"
}
# delete-bucket - test_create_delete_bucket
@@ -93,6 +90,7 @@ export RUN_USERS=true
}
@test "test_put_bucket_acl" {
skip "https://github.com/versity/versitygw/issues/963"
test_put_bucket_acl_s3cmd
}
@@ -113,19 +111,18 @@ export RUN_USERS=true
run setup_bucket "s3cmd" "$BUCKET_ONE_NAME"
assert_success
head_bucket "s3cmd" "$BUCKET_ONE_NAME"
[[ $bucket_info == *"s3://$BUCKET_ONE_NAME"* ]] || fail "failure to retrieve correct bucket info: $bucket_info"
bucket_cleanup "s3cmd" "$BUCKET_ONE_NAME"
run head_bucket "s3cmd" "$BUCKET_ONE_NAME"
assert_success
assert_output -p "s3://$BUCKET_ONE_NAME"
}
@test "test_get_bucket_info_doesnt_exist_s3cmd" {
run setup_bucket "s3cmd" "$BUCKET_ONE_NAME"
assert_success
head_bucket "s3cmd" "$BUCKET_ONE_NAME"a || local info_result=$?
[[ $info_result -eq 1 ]] || fail "bucket info for non-existent bucket returned"
[[ $bucket_info == *"404"* ]] || fail "404 not returned for non-existent bucket info"
bucket_cleanup "s3cmd" "$BUCKET_ONE_NAME"
run head_bucket "s3cmd" "$BUCKET_ONE_NAME"a
assert_failure 1
assert_output -p "404"
}
@test "test_ls_directory_object" {

View File

@@ -15,7 +15,7 @@
# under the License.
source ./tests/test_user_common.sh
source ./tests/util_users.sh
source ./tests/util/util_users.sh
source ./tests/commands/get_object.sh
source ./tests/commands/put_object.sh
@@ -44,11 +44,12 @@ export RUN_USERS=true
}
@test "test_user_get_object" {
username="$USERNAME_ONE"
password="$USERNAME_ONE"
test_file="test_file"
setup_user "$username" "$password" "user" || fail "error creating user if nonexistent"
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
assert_success
username=${lines[0]}
password=${lines[1]}
run create_test_file "$test_file"
assert_success
@@ -56,20 +57,26 @@ export RUN_USERS=true
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"; then
fail "able to get object despite not being bucket owner"
fi
change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "failed to add object to bucket"
get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" || fail "error getting object"
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_failure
run change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
@test "test_userplus_get_object" {
username="$USERNAME_ONE"
password="$PASSWORD_ONE"
test_file="test_file"
setup_user "$username" "$password" "admin" || fail "error creating user if nonexistent"
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "userplus" "$BUCKET_ONE_NAME"
assert_success
username=${lines[0]}
password=${lines[1]}
run create_test_file "$test_file"
assert_success
@@ -77,20 +84,26 @@ export RUN_USERS=true
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"; then
fail "able to get object despite not being bucket owner"
fi
change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "failed to add object to bucket"
get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" || fail "error getting object"
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_failure
run change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username"
assert_success
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
}
@test "test_user_delete_object" {
username="$USERNAME_ONE"
password="$PASSWORD_ONE"
test_file="test_file"
setup_user "$username" "$password" "user" || fail "error creating user if nonexistent"
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "user" "$BUCKET_ONE_NAME"
assert_success
username=${lines[0]}
password=${lines[1]}
run create_test_file "$test_file"
assert_success
@@ -98,23 +111,29 @@ export RUN_USERS=true
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"; then
fail "able to get object despite not being bucket owner"
fi
change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership"
put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "failed to add object to bucket"
delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password" || fail "error deleting object"
run put_object "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file"
assert_success
run delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_failure
run change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username"
assert_success
run delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_success
}
@test "test_admin_put_get_object" {
if [ "$RECREATE_BUCKETS" == "false" ]; then
skip "https://github.com/versity/versitygw/issues/888"
fi
username="$USERNAME_ONE"
password="$PASSWORD_ONE"
test_file="test_file"
setup_user "$username" "$password" "admin" || fail "error creating user if nonexistent"
run setup_user_versitygw_or_direct "$USERNAME_ONE" "$PASSWORD_ONE" "admin" "$BUCKET_ONE_NAME"
assert_success
username=${lines[0]}
password=${lines[1]}
run create_test_file "$test_file"
assert_success
@@ -122,30 +141,30 @@ export RUN_USERS=true
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
put_object_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password" || fail "failed to add object to bucket"
get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password" || fail "error getting object"
compare_files "$TEST_FILE_FOLDER/$test_file" "$TEST_FILE_FOLDER/$test_file-copy" || fail "files don't match"
list_object_versions "$BUCKET_ONE_NAME"
log 5 "versions: $versions"
run put_object_with_user "s3api" "$TEST_FILE_FOLDER/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_success
run get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy" "$username" "$password"
assert_success
run compare_files "$TEST_FILE_FOLDER/$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_success
run delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password"
assert_success
list_object_versions "$BUCKET_ONE_NAME"
log 5 "versions: $versions"
if get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"; then
fail "file not successfully deleted"
fi
# shellcheck disable=SC2154
[[ "$get_object_error" == *"NoSuchKey"* ]] || fail "unexpected error message: $get_object_error"
bucket_cleanup "s3api" "$BUCKET_ONE_NAME"
delete_test_files "$test_file" "$test_file-copy"
run get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$TEST_FILE_FOLDER/$test_file-copy"
assert_failure
assert_output -p "NoSuchKey"
}
@test "test_user_create_multipart_upload" {
username="$USERNAME_ONE"
password="$PASSWORD_ONE"
test_file="test_file"
setup_user "$username" "$password" "user" || fail "error creating user if nonexistent"
run setup_user_versitygw_or_direct "$USERNAME_TWO" "$PASSWORD_TWO" "user" "$BUCKET_ONE_NAME"
assert_success
username=${lines[0]}
password=${lines[1]}
run create_large_file "$test_file"
assert_success
@@ -153,6 +172,12 @@ export RUN_USERS=true
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership"
create_multipart_upload_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password" || fail "unable to create multipart upload"
run create_multipart_upload_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password"
assert_failure
run change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username"
assert_success
run create_multipart_upload_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password"
assert_success
}

View File

@@ -15,10 +15,10 @@
# under the License.
source ./tests/setup.sh
source ./tests/util_users.sh
source ./tests/util.sh
source ./tests/util_create_bucket.sh
source ./tests/util_list_buckets.sh
source ./tests/util/util.sh
source ./tests/util/util_create_bucket.sh
source ./tests/util/util_list_buckets.sh
source ./tests/util/util_users.sh
source ./tests/commands/list_buckets.sh
test_admin_user() {
@@ -41,7 +41,7 @@ test_admin_user() {
run create_user_with_user "$admin_username" "$admin_password" "$user_username" "$user_password" "user"
assert_success
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if [ "$RECREATE_BUCKETS" == "true" ]; then
@@ -52,7 +52,7 @@ test_admin_user() {
assert_success
fi
run list_and_check_buckets_with_user "aws" "$BUCKET_ONE_NAME" "$BUCKET_TWO_NAME" "$admin_username" "$admin_password"
run list_and_check_buckets_with_user "s3api" "$BUCKET_ONE_NAME" "$BUCKET_TWO_NAME" "$admin_username" "$admin_password"
assert_success
run change_bucket_owner "$admin_username" "$admin_password" "$BUCKET_TWO_NAME" "$user_username"
@@ -89,25 +89,25 @@ test_user_user() {
password="$PASSWORD_ONE"
setup_user "$username" "$password" "user" || fail "error setting up user"
bucket_cleanup_if_bucket_exists "aws" "versity-gwtest-user-bucket"
bucket_cleanup_if_bucket_exists "s3api" "versity-gwtest-user-bucket"
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
if create_bucket_with_user "aws" "versity-gwtest-user-bucket" "$username" "$password"; then
if create_bucket_with_user "s3api" "versity-gwtest-user-bucket" "$username" "$password"; then
fail "creating bucket with 'user' account failed to return error"
fi
# shellcheck disable=SC2154
[[ $error == *"Access Denied"* ]] || fail "error message '$error' doesn't contain 'Access Denied'"
create_bucket "aws" "versity-gwtest-user-bucket" || fail "error creating bucket"
create_bucket "s3api" "versity-gwtest-user-bucket" || fail "error creating bucket"
change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "versity-gwtest-user-bucket" "$username" || fail "error changing bucket owner"
if change_bucket_owner "$username" "$password" "versity-gwtest-user-bucket" "admin"; then
fail "user shouldn't be able to change bucket owner"
fi
list_buckets_with_user "aws" "$username" "$password" || fail "error listing buckets with user '$username'"
list_buckets_with_user "s3api" "$username" "$password" || fail "error listing buckets with user '$username'"
bucket_found=false
for bucket in "${bucket_array[@]}"; do
if [ "$bucket" == "$BUCKET_ONE_NAME" ]; then
@@ -120,7 +120,7 @@ test_user_user() {
fail "user-owned bucket not found in user list"
fi
run delete_bucket "aws" "versity-gwtest-user-bucket"
run delete_bucket "s3api" "versity-gwtest-user-bucket"
assert_success "failed to delete bucket"
delete_user "$username"
}
@@ -133,15 +133,15 @@ test_userplus_operation() {
username="$USERNAME_ONE"
password="$PASSWORD_ONE"
bucket_cleanup_if_bucket_exists "aws" "versity-gwtest-userplus-bucket"
bucket_cleanup_if_bucket_exists "s3api" "versity-gwtest-userplus-bucket"
setup_user "$username" "$password" "userplus" || fail "error creating user '$username'"
run setup_bucket "aws" "$BUCKET_ONE_NAME"
run setup_bucket "s3api" "$BUCKET_ONE_NAME"
assert_success
create_bucket_with_user "aws" "versity-gwtest-userplus-bucket" "$username" "$password" || fail "error creating bucket with user '$username'"
create_bucket_with_user "s3api" "versity-gwtest-userplus-bucket" "$username" "$password" || fail "error creating bucket with user '$username'"
list_buckets_with_user "aws" "$username" "$password" || fail "error listing buckets with user '$username'"
list_buckets_with_user "s3api" "$username" "$password" || fail "error listing buckets with user '$username'"
bucket_found=false
for bucket in "${bucket_array[@]}"; do
if [ "$bucket" == "$BUCKET_ONE_NAME" ]; then
@@ -158,7 +158,7 @@ test_userplus_operation() {
fail "userplus shouldn't be able to change bucket owner"
fi
run delete_bucket "aws" "versity-gwtest-admin-bucket"
run delete_bucket "s3api" "versity-gwtest-admin-bucket"
assert_success "failed to delete bucket"
delete_user "$username"
}

View File

@@ -14,11 +14,11 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/util_bucket.sh
source ./tests/util_create_bucket.sh
source ./tests/util_mc.sh
source ./tests/util_multipart.sh
source ./tests/util_versioning.sh
source ./tests/util/util_bucket.sh
source ./tests/util/util_create_bucket.sh
source ./tests/util/util_mc.sh
source ./tests/util/util_multipart.sh
source ./tests/util/util_versioning.sh
source ./tests/logger.sh
source ./tests/commands/abort_multipart_upload.sh
source ./tests/commands/complete_multipart_upload.sh
@@ -45,7 +45,7 @@ source ./tests/commands/put_object_legal_hold.sh
source ./tests/commands/put_object_lock_configuration.sh
source ./tests/commands/upload_part_copy.sh
source ./tests/commands/upload_part.sh
source ./tests/util_users.sh
source ./tests/util/util_users.sh
# params: bucket name
# return 0 for success, 1 for error
@@ -269,21 +269,21 @@ object_exists() {
put_object_with_metadata() {
if [ $# -ne 6 ]; then
echo "put object command requires command type, source, destination, key, metadata key, metadata value"
log 2 "put object command requires command type, source, destination, key, metadata key, metadata value"
return 1
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]]; then
if [[ $1 == 's3api' ]]; then
error=$(aws --no-verify-ssl s3api put-object --body "$2" --bucket "$3" --key "$4" --metadata "{\"$5\":\"$6\"}") || exit_code=$?
else
echo "invalid command type $1"
log 2 "invalid command type $1"
return 1
fi
log 5 "put object exit code: $exit_code"
if [ $exit_code -ne 0 ]; then
echo "error copying object to bucket: $error"
log 2 "error copying object to bucket: $error"
return 1
fi
return 0
@@ -291,19 +291,19 @@ put_object_with_metadata() {
get_object_metadata() {
if [ $# -ne 3 ]; then
echo "get object metadata command requires command type, bucket, key"
log 2 "get object metadata command requires command type, bucket, key"
return 1
fi
local exit_code=0
if [[ $1 == 'aws' ]]; then
if [[ $1 == 's3api' ]]; then
metadata_struct=$(aws --no-verify-ssl s3api head-object --bucket "$2" --key "$3") || exit_code=$?
else
echo "invalid command type $1"
log 2 "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error copying object to bucket: $error"
log 2 "error copying object to bucket: $error"
return 1
fi
log 5 "raw metadata: $metadata_struct"
@@ -315,12 +315,12 @@ get_object_metadata() {
put_object_multiple() {
if [ $# -ne 3 ]; then
echo "put object command requires command type, source, destination"
log 2 "put object command requires command type, source, destination"
return 1
fi
local exit_code=0
local error
if [[ $1 == 'aws' ]] || [[ $1 == 's3' ]]; then
if [[ $1 == 's3api' ]] || [[ $1 == 's3' ]]; then
# shellcheck disable=SC2086
error=$(aws --no-verify-ssl s3 cp "$(dirname "$2")" s3://"$3" --recursive --exclude="*" --include="$2" 2>&1) || exit_code=$?
elif [[ $1 == 's3cmd' ]]; then
@@ -330,11 +330,11 @@ put_object_multiple() {
# shellcheck disable=SC2086
error=$(mc --insecure cp $2 "$MC_ALIAS"/"$3" 2>&1) || exit_code=$?
else
echo "invalid command type $1"
log 2 "invalid command type $1"
return 1
fi
if [ $exit_code -ne 0 ]; then
echo "error copying object to bucket: $error"
log 2 "error copying object to bucket: $error"
return 1
else
log 5 "$error"
@@ -347,18 +347,18 @@ put_object_multiple() {
# return 0 for success or already exists, 1 for failure
check_and_put_object() {
if [ $# -ne 3 ]; then
echo "check and put object function requires source, bucket, destination"
log 2 "check and put object function requires source, bucket, destination"
return 1
fi
object_exists "aws" "$2" "$3" || local exists_result=$?
object_exists "s3api" "$2" "$3" || local exists_result=$?
if [ "$exists_result" -eq 2 ]; then
echo "error checking if object exists"
log 2 "error checking if object exists"
return 1
fi
if [ "$exists_result" -eq 1 ]; then
copy_object "$1" "$2" || local copy_result=$?
if [ "$copy_result" -ne 0 ]; then
echo "error adding object"
log 2 "error adding object"
return 1
fi
fi
@@ -367,7 +367,7 @@ check_and_put_object() {
remove_insecure_request_warning() {
if [[ $# -ne 1 ]]; then
echo "remove insecure request warning requires input lines"
log 2 "remove insecure request warning requires input lines"
return 1
fi
parsed_output=()
@@ -384,13 +384,13 @@ remove_insecure_request_warning() {
# return 0 for yes, 1 for no, 2 for error
object_is_accessible() {
if [ $# -ne 2 ]; then
echo "object accessibility check missing bucket and/or key"
log 2 "object accessibility check missing bucket and/or key"
return 2
fi
local exit_code=0
object_data=$(aws --no-verify-ssl s3api head-object --bucket "$1" --key "$2" 2>&1) || exit_code="$?"
if [ $exit_code -ne 0 ]; then
echo "Error obtaining object data: $object_data"
log 2 "Error obtaining object data: $object_data"
return 2
fi
etag=$(echo "$object_data" | grep -v "InsecureRequestWarning" | jq '.ETag')
@@ -405,13 +405,13 @@ object_is_accessible() {
# export acl for success, return 1 for error
get_object_acl() {
if [ $# -ne 2 ]; then
echo "object ACL command missing object name"
log 2 "object ACL command missing object name"
return 1
fi
local exit_code=0
acl=$(aws --no-verify-ssl s3api get-object-acl --bucket "$1" --key "$2" 2>&1) || exit_code="$?"
if [ $exit_code -ne 0 ]; then
echo "Error getting object ACLs: $acl"
log 2 "Error getting object ACLs: $acl"
return 1
fi
export acl
@@ -422,14 +422,14 @@ get_object_acl() {
# return 0 for success, 1 for failure
copy_file() {
if [ $# -ne 2 ]; then
echo "copy file command requires src and dest"
log 2 "copy file command requires src and dest"
return 1
fi
local result
error=$(aws --no-verify-ssl s3 cp "$1" "$2") || result=$?
if [[ $result -ne 0 ]]; then
echo "error copying file: $error"
log 2 "error copying file: $error"
return 1
fi
return 0

View File

@@ -44,6 +44,7 @@ get_check_post_change_acl_s3cmd() {
log 5 "ACL after read put: $acl"
acl_lines=$(echo "$acl" | grep "ACL")
log 5 "ACL lines: $acl_lines"
lines=()
while IFS= read -r line; do
lines+=("$line")
done <<< "$acl_lines"
@@ -149,3 +150,96 @@ get_check_acl_after_second_put() {
fi
return 0
}
get_check_acl_after_policy() {
if [ $# -ne 1 ]; then
log 2 "'get_check_acl_after_policy' requires bucket name"
return 1
fi
if ! get_bucket_acl "s3api" "$1"; then
log 2 "error getting bucket acl"
return 1
fi
# shellcheck disable=SC2154
log 5 "ACL: $acl"
if ! second_grant=$(echo "$acl" | jq -r ".Grants[1]" 2>&1); then
log 2 "error getting second grant: $second_grant"
return 1
fi
if ! second_grantee=$(echo "$second_grant" | jq -r ".Grantee" 2>&1); then
log 2 "error getting second grantee: $second_grantee"
return 1
fi
if ! permission=$(echo "$second_grant" | jq -r ".Permission" 2>&1); then
log 2 "error getting permission: $permission"
return 1
fi
log 5 "second grantee: $second_grantee"
if [[ $permission != "READ" ]]; then
log 2 "incorrect permission: $permission"
return 1
fi
if [[ $DIRECT == "true" ]]; then
if ! uri=$(echo "$second_grantee" | jq -r ".URI" 2>&1); then
log 2 "error getting uri: $uri"
return 1
fi
if [[ $uri != "http://acs.amazonaws.com/groups/global/AllUsers" ]]; then
log 2 "unexpected URI: $uri"
return 1
fi
else
if ! id=$(echo "$second_grantee" | jq -r ".ID" 2>&1); then
log 2 "error getting ID: $id"
return 1
fi
if [[ $id != "all-users" ]]; then
log 2 "unexpected ID: $id"
return 1
fi
fi
}
get_and_check_acl_rest() {
if [ $# -ne 1 ]; then
log 2 "'get_and_check_acl_rest' requires bucket name"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OUTPUT_FILE="$TEST_FILE_FOLDER/acl.txt" ./tests/rest_scripts/get_bucket_acl.sh); then
log 2 "error attempting to get bucket ACL response: $result"
return 1
fi
if [ "$result" != "200" ]; then
log 2 "get acl returned code '$result' (message: $(cat "$TEST_FILE_FOLDER/acl.txt"))"
return 1
fi
log 5 "acl: $(cat "$TEST_FILE_FOLDER/acl.txt")"
if ! access_control_policy=$(xmllint --xpath '//*[local-name()="AccessControlPolicy"]' "$TEST_FILE_FOLDER/acl.txt" 2>&1); then
log 2 "error getting access control policy: $access_control_policy"
return 1
fi
if ! owner=$(echo "$access_control_policy" | xmllint --xpath '//*[local-name()="Owner"]' - 2>&1); then
log 2 "error getting owner information: $owner"
return 1
fi
if [ "$DIRECT" == "true" ]; then
if ! display_name=$(echo "$owner" | xmllint --xpath '//*[local-name()="DisplayName"]/text()' - 2>&1); then
log 2 "error getting display name: $display_name"
return 1
fi
if [ "$display_name" != "$DIRECT_DISPLAY_NAME" ]; then
log 2 "display name mismatch (expected '$DIRECT_DISPLAY_NAME', actual '$display_name')"
return 1
fi
else
if ! id=$(echo "$owner" | xmllint --xpath '//*[local-name()="ID"]/text()' - 2>&1); then
log 2 "error getting ID: $id"
return 1
fi
if [ "$id" != "$AWS_ACCESS_KEY_ID" ]; then
log 2 "ID mismatch"
return 1
fi
fi
return 0
}

View File

@@ -16,13 +16,13 @@
abort_all_multipart_uploads() {
if [[ $# -ne 1 ]]; then
echo "abort all multipart uploads command missing bucket name"
log 2 "abort all multipart uploads command missing bucket name"
return 1
fi
upload_list=$(aws --no-verify-ssl s3api list-multipart-uploads --bucket "$1" 2>&1) || list_result=$?
if [[ $list_result -ne 0 ]]; then
echo "error listing multipart uploads: $upload_list"
log 2 "error listing multipart uploads: $upload_list"
return 1
fi
log 5 "$upload_list"
@@ -46,7 +46,7 @@ abort_all_multipart_uploads() {
while read -r line; do
# shellcheck disable=SC2086
if ! error=$(aws --no-verify-ssl s3api abort-multipart-upload --bucket "$1" $line 2>&1); then
echo "error aborting multipart upload: $error"
log 2 "error aborting multipart upload: $error"
return 1
fi
done <<< "$lines"

View File

@@ -1,5 +1,7 @@
#!/usr/bin/env bash
source ./tests/util/util_multipart_abort.sh
# recursively delete an AWS bucket
# param: client, bucket name
# fail if error
@@ -14,7 +16,7 @@ delete_bucket_recursive() {
local error
if [[ $1 == 's3' ]]; then
error=$(aws --no-verify-ssl s3 rb s3://"$2" --force 2>&1) || exit_code="$?"
elif [[ $1 == "aws" ]] || [[ $1 == 's3api' ]]; then
elif [[ $1 == 's3api' ]]; then
if ! delete_bucket_recursive_s3api "$2"; then
log 2 "error deleting bucket recursively (s3api)"
return 1
@@ -123,7 +125,7 @@ delete_bucket_contents() {
local exit_code=0
local error
if [[ $1 == "aws" ]] || [[ $1 == 's3api' ]]; then
if [[ $1 == 's3api' ]]; then
if ! clear_bucket_s3api "$2"; then
log 2 "error clearing bucket (s3api)"
return 1
@@ -287,7 +289,7 @@ setup_bucket() {
# return 0 for yes, 1 for no, 2 for error
bucket_is_accessible() {
if [ $# -ne 1 ]; then
echo "bucket accessibility check missing bucket name"
log 2 "bucket accessibility check missing bucket name"
return 2
fi
local exit_code=0
@@ -299,6 +301,28 @@ bucket_is_accessible() {
if [[ "$error" == *"500"* ]]; then
return 1
fi
echo "Error checking bucket accessibility: $error"
log 2 "Error checking bucket accessibility: $error"
return 2
}
check_for_empty_region() {
if [ $# -ne 1 ]; then
log 2 "'check_for_empty_region' requires bucket name"
return 1
fi
if ! head_bucket "s3api" "$BUCKET_ONE_NAME"; then
log 2 "error getting bucket info"
return 1
fi
# shellcheck disable=SC2154
log 5 "INFO: $bucket_info"
if ! region=$(echo "$bucket_info" | grep -v "InsecureRequestWarning" | jq -r ".BucketRegion" 2>&1); then
log 2 "error getting region: $region"
return 1
fi
if [[ $region == "" ]]; then
log 2 "empty bucket region"
return 1
fi
return 0
}

View File

@@ -14,7 +14,7 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/util_mc.sh
source ./tests/util/util_mc.sh
source ./tests/logger.sh
create_bucket_invalid_name() {
@@ -39,7 +39,7 @@ create_bucket_invalid_name() {
log 2 "error: bucket should have not been created but was"
return 1
fi
export bucket_create_error
echo "$bucket_create_error"
}
create_and_check_bucket_invalid_name() {

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
block_delete_object_without_permission() {
if [ $# -ne 4 ]; then
log 2 "'attempt_delete_object_without_permission' requires bucket, file, username, password"
return 1
fi
if delete_object_with_user "s3api" "$1" "$2" "$3" "$4"; then
log 2 "able to delete object despite lack of permissions"
return 1
fi
# shellcheck disable=SC2154
if [[ "$delete_object_error" != *"Access Denied"* ]]; then
log 2 "invalid delete object error: $delete_object_error"
return 1
fi
return 0
}

View File

@@ -104,17 +104,17 @@ create_test_folder() {
# return: 0 for success, 1 for error
delete_test_files() {
if [ $# -lt 1 ]; then
echo "delete test files command missing filenames"
log 2 "delete test files command missing filenames"
return 1
fi
if [ -z "$TEST_FILE_FOLDER" ]; then
echo "no test file folder defined, not deleting"
log 2 "no test file folder defined, not deleting"
return 1
fi
for name in "$@"; do
rm -rf "${TEST_FILE_FOLDER:?}"/"${name:?}" || rm_result=$?
if [[ $rm_result -ne 0 ]]; then
echo "error deleting file $name"
log 2 "error deleting file $name"
fi
done
return 0
@@ -139,7 +139,7 @@ split_file() {
local split_result
error=$(split -a 1 -d -b "$part_size" "$1" "$1"-) || split_result=$?
if [[ $split_result -ne 0 ]]; then
echo "error splitting file: $error"
log 2 "error splitting file: $error"
return 1
fi
return 0
@@ -150,7 +150,7 @@ split_file() {
# return 0 for same data, 1 for different data, 2 for error
compare_files() {
if [ $# -ne 2 ]; then
echo "file comparison requires two files"
log 2 "file comparison requires two files"
return 2
fi
os=$(uname)

View File

@@ -41,4 +41,33 @@ get_and_check_object_size() {
return 1
fi
return 0
}
}
get_object_metadata_and_check_keys() {
if [ $# -ne 4 ]; then
log 2 "'get_object_metadata_and_check_keys' requires bucket, key, expected metadata key, value"
return 1
fi
if ! get_object_metadata "s3api" "$1" "$2"; then
log 2 "error getting object metadata"
return 1
fi
# shellcheck disable=SC2154
if ! key=$(echo "$metadata" | jq -r 'keys[]' 2>&1); then
log 2 "error getting key from metadata: $key"
return 1
fi
if ! value=$(echo "$metadata" | jq -r '.[]' 2>&1); then
log 2 "error getting value from metadata: $value"
return 1
fi
if [[ $key != "$3" ]]; then
log 2 "keys doesn't match (expected '$3', actual '$key')"
return 1
fi
if [[ $value != "$4" ]]; then
log 2 "values doesn't match (expected '$4', actual '$value')"
return 1
fi
return 0
}

View File

@@ -15,4 +15,16 @@ bucket_info_contains_bucket() {
return 1
fi
return 0
}
bucket_info_without_bucket() {
if head_bucket "s3api" "$BUCKET_ONE_NAME"; then
log 2 "able to get bucket info for non-existent bucket"
return 1
fi
if [[ $bucket_info != *"404"* ]]; then
log 2 "404 not returned for non-existent bucket info"
return 1
fi
return 0
}

View File

@@ -62,7 +62,7 @@ list_and_check_buckets_with_user() {
break
fi
done
echo $bucket_one_found $bucket_two_found
log 5 "buckets found? one: $bucket_one_found, two: $bucket_two_found"
if [ $bucket_one_found == false ] || [ $bucket_two_found == false ]; then
log 2 "Not all buckets found"
return 1

View File

@@ -19,19 +19,19 @@
# return 0 for success, 1 for failure
delete_bucket_recursive_mc() {
if [[ $# -ne 1 ]]; then
echo "delete bucket recursive mc command requires bucket name"
log 2 "delete bucket recursive mc command requires bucket name"
return 1
fi
local exit_code=0
local error
error=$(mc --insecure rm --recursive --force "$MC_ALIAS"/"$1" 2>&1) || exit_code="$?"
if [[ $exit_code -ne 0 ]]; then
echo "error deleting bucket contents: $error"
log 2 "error deleting bucket contents: $error"
return 1
fi
error=$(mc --insecure rb "$MC_ALIAS"/"$1" 2>&1) || exit_code="$?"
if [[ $exit_code -ne 0 ]]; then
echo "error deleting bucket: $error"
log 2 "error deleting bucket: $error"
return 1
fi
return 0

View File

@@ -210,7 +210,7 @@ create_and_list_multipart_uploads() {
fi
if ! list_multipart_uploads "$1"; then
echo "error listing uploads"
log 2 "error listing uploads"
return 1
fi
return 0
@@ -228,7 +228,7 @@ multipart_upload_from_bucket() {
fi
for ((i=0;i<$4;i++)) {
echo "key: $3"
log 5 "key: $3"
if ! put_object "s3api" "$3-$i" "$1" "$2-$i"; then
log 2 "error copying object"
return 1
@@ -262,7 +262,7 @@ multipart_upload_from_bucket() {
multipart_upload_from_bucket_range() {
if [ $# -ne 5 ]; then
echo "multipart upload from bucket with range command requires bucket, copy source, key, part count, and range"
log 2 "multipart upload from bucket with range command requires bucket, copy source, key, part count, and range"
return 1
fi
if ! split_file "$3" "$4"; then
@@ -324,7 +324,7 @@ multipart_upload_before_completion() {
for ((i = 1; i <= $4; i++)); do
# shellcheck disable=SC2154
if ! upload_part "$1" "$2" "$upload_id" "$3" "$i"; then
echo "error uploading part $i"
log 2 "error uploading part $i"
return 1
fi
parts+="{\"ETag\": $etag, \"PartNumber\": $i}"
@@ -476,68 +476,6 @@ multipart_upload_with_params() {
return 0
}
# run upload, then abort it
# params: bucket, key, local file location, number of parts to split into before uploading
# return 0 for success, 1 for failure
run_then_abort_multipart_upload() {
if [ $# -ne 4 ]; then
log 2 "run then abort multipart upload command missing bucket, key, file, and/or part count"
return 1
fi
if ! multipart_upload_before_completion "$1" "$2" "$3" "$4"; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
if ! abort_multipart_upload "$1" "$2" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
return 0
}
# param: bucket name
# return 0 for success, 1 for error
abort_all_multipart_uploads() {
if [ $# -ne 1 ]; then
log 2 "'abort_all_multipart_uploads' requires bucket name"
return 1
fi
if ! list_multipart_uploads "$1"; then
log 2 "error listing multipart uploads"
return 1
fi
# shellcheck disable=SC2154
log 5 "UPLOADS: $uploads"
if ! upload_set=$(echo "$uploads" | grep -v "InsecureRequestWarning" | jq -c '.Uploads[]' 2>&1); then
if [[ $upload_set == *"Cannot iterate over null"* ]]; then
return 0
else
log 2 "error getting upload set: $upload_set"
return 1
fi
fi
log 5 "UPLOAD SET: $upload_set"
for upload in $upload_set; do
log 5 "UPLOAD: $upload"
if ! upload_id=$(echo "$upload" | jq -r ".UploadId" 2>&1); then
log 2 "error getting upload ID: $upload_id"
return 1
fi
log 5 "upload ID: $upload_id"
if ! key=$(echo "$upload" | jq -r ".Key" 2>&1); then
log 2 "error getting key: $key"
return 1
fi
log 5 "Aborting multipart upload for key: $key, UploadId: $upload_id"
if ! abort_multipart_upload "$1" "$key" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
done
}
create_upload_and_get_id_rest() {
if [ $# -ne 2 ]; then
log 2 "'create_upload_and_get_id_rest' requires bucket, key"
@@ -560,41 +498,6 @@ create_upload_and_get_id_rest() {
return 0
}
create_abort_multipart_upload_rest() {
if [ $# -ne 2 ]; then
log 2 "'create_abort_upload_rest' requires bucket, key"
return 1
fi
if ! list_and_check_upload "$1" "$2"; then
log 2 "error listing multipart uploads before creation"
return 1
fi
log 5 "uploads before upload: $(cat "$TEST_FILE_FOLDER/uploads.txt")"
if ! create_upload_and_get_id_rest "$1" "$2"; then
log 2 "error creating upload"
return 1
fi
if ! list_and_check_upload "$1" "$2" "$upload_id"; then
log 2 "error listing multipart uploads after upload creation"
return 1
fi
log 5 "uploads after upload creation: $(cat "$TEST_FILE_FOLDER/uploads.txt")"
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$upload_id" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/abort_multipart_upload.sh); then
log 2 "error aborting multipart upload: $result"
return 1
fi
if [ "$result" != "204" ]; then
log 2 "expected '204' response, actual was '$result' (error: $(cat "$TEST_FILE_FOLDER"/result.txt)"
return 1
fi
log 5 "final uploads: $(cat "$TEST_FILE_FOLDER/uploads.txt")"
if ! list_and_check_upload "$1" "$2"; then
log 2 "error listing multipart uploads after abort"
return 1
fi
return 0
}
multipart_upload_range_too_large() {
if [ $# -ne 3 ]; then
log 2 "'multipart_upload_range_too_large' requires bucket name, key, file location"
@@ -679,3 +582,25 @@ run_and_verify_multipart_upload_with_valid_range() {
fi
return 0
}
list_check_multipart_upload_key() {
if [ $# -ne 4 ]; then
log 2 "'list_check_multipart_upload_key' requires bucket, username, password, expected key"
return 1
fi
if ! list_multipart_uploads_with_user "$1" "$2" "$3"; then
log 2 "error listing multipart uploads with user"
return 1
fi
# shellcheck disable=SC2154
log 5 "$uploads"
if ! upload_key=$(echo "$uploads" | grep -v "InsecureRequestWarning" | jq -r ".Uploads[0].Key" 2>&1); then
log 2 "error parsing upload key from uploads message: $upload_key"
return 1
fi
if [[ "$4" != "$upload_key" ]]; then
log 2 "upload key doesn't match file marked as being uploaded (expected: '$4', actual: '$upload_key')"
return 1
fi
return 0
}

View File

@@ -0,0 +1,115 @@
#!/usr/bin/env bash
check_abort_access_denied() {
if [ $# -ne 5 ]; then
log 2 "'check_abort_access_denied' requires bucket, file, username, password"
return 1
fi
if abort_multipart_upload_with_user "$1" "$2" "$3" "$4" "$5"; then
log 2 "abort multipart upload succeeded despite lack of permissions"
return 1
fi
# shellcheck disable=SC2154
if [[ "$abort_multipart_upload_error" != *"AccessDenied"* ]]; then
log 2 "unexpected abort error: $abort_multipart_upload_error"
return 1
fi
return 0
}
create_abort_multipart_upload_rest() {
if [ $# -ne 2 ]; then
log 2 "'create_abort_upload_rest' requires bucket, key"
return 1
fi
if ! list_and_check_upload "$1" "$2"; then
log 2 "error listing multipart uploads before creation"
return 1
fi
log 5 "uploads before upload: $(cat "$TEST_FILE_FOLDER/uploads.txt")"
if ! create_upload_and_get_id_rest "$1" "$2"; then
log 2 "error creating upload"
return 1
fi
if ! list_and_check_upload "$1" "$2" "$upload_id"; then
log 2 "error listing multipart uploads after upload creation"
return 1
fi
log 5 "uploads after upload creation: $(cat "$TEST_FILE_FOLDER/uploads.txt")"
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OBJECT_KEY="$2" UPLOAD_ID="$upload_id" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/abort_multipart_upload.sh); then
log 2 "error aborting multipart upload: $result"
return 1
fi
if [ "$result" != "204" ]; then
log 2 "expected '204' response, actual was '$result' (error: $(cat "$TEST_FILE_FOLDER"/result.txt)"
return 1
fi
log 5 "final uploads: $(cat "$TEST_FILE_FOLDER/uploads.txt")"
if ! list_and_check_upload "$1" "$2"; then
log 2 "error listing multipart uploads after abort"
return 1
fi
return 0
}
# param: bucket name
# return 0 for success, 1 for error
abort_all_multipart_uploads() {
if [ $# -ne 1 ]; then
log 2 "'abort_all_multipart_uploads' requires bucket name"
return 1
fi
if ! list_multipart_uploads "$1"; then
log 2 "error listing multipart uploads"
return 1
fi
# shellcheck disable=SC2154
log 5 "UPLOADS: $uploads"
if ! upload_set=$(echo "$uploads" | grep -v "InsecureRequestWarning" | jq -c '.Uploads[]' 2>&1); then
if [[ $upload_set == *"Cannot iterate over null"* ]]; then
return 0
else
log 2 "error getting upload set: $upload_set"
return 1
fi
fi
log 5 "UPLOAD SET: $upload_set"
for upload in $upload_set; do
log 5 "UPLOAD: $upload"
if ! upload_id=$(echo "$upload" | jq -r ".UploadId" 2>&1); then
log 2 "error getting upload ID: $upload_id"
return 1
fi
log 5 "upload ID: $upload_id"
if ! key=$(echo "$upload" | jq -r ".Key" 2>&1); then
log 2 "error getting key: $key"
return 1
fi
log 5 "Aborting multipart upload for key: $key, UploadId: $upload_id"
if ! abort_multipart_upload "$1" "$key" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
done
}
# run upload, then abort it
# params: bucket, key, local file location, number of parts to split into before uploading
# return 0 for success, 1 for failure
run_then_abort_multipart_upload() {
if [ $# -ne 4 ]; then
log 2 "run then abort multipart upload command missing bucket, key, file, and/or part count"
return 1
fi
if ! multipart_upload_before_completion "$1" "$2" "$3" "$4"; then
log 2 "error performing pre-completion multipart upload"
return 1
fi
if ! abort_multipart_upload "$1" "$2" "$upload_id"; then
log 2 "error aborting multipart upload"
return 1
fi
return 0
}

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env bash
get_and_check_ownership_controls() {
if [ $# -ne 2 ]; then
log 2 "'get_and_check_ownership_controls' missing bucket name, expected result"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$BUCKET_ONE_NAME" OUTPUT_FILE="$TEST_FILE_FOLDER/ownershipControls.txt" ./tests/rest_scripts/get_bucket_ownership_controls.sh); then
log 2 "error getting bucket ownership controls: $result"
return 1
fi
if [ "$result" != "200" ]; then
log 2 "GetBucketOwnershipControls returned response code: $result, reply: $(cat "$TEST_FILE_FOLDER/ownershipControls.txt")"
return 1
fi
log 5 "controls: $(cat "$TEST_FILE_FOLDER/ownershipControls.txt")"
if ! rule=$(xmllint --xpath '//*[local-name()="ObjectOwnership"]/text()' "$TEST_FILE_FOLDER/ownershipControls.txt" 2>&1); then
log 2 "error getting ownership rule: $rule"
return 1
fi
if [ "$rule" != "$2" ]; then
log 2 "rule mismatch (expected '$2', actual '$rule')"
return 1
fi
return 0
}
put_bucket_ownership_controls_rest() {
if [ $# -ne 2 ]; then
log 2 "'put_bucket_ownership_controls_rest' missing bucket name, ownership"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OWNERSHIP="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/put_bucket_ownership_controls.sh); then
log 2 "error putting bucket ownership controls: $result"
return 1
fi
if [ "$result" != "200" ]; then
log 2 "put bucket ownership controls returned code $result: $(cat "$TEST_FILE_FOLDER/result.txt")"
return 1
fi
return 0
}

View File

@@ -16,7 +16,7 @@
check_for_empty_policy() {
if [[ $# -ne 2 ]]; then
echo "check for empty policy command requires command type, bucket name"
log 2 "check for empty policy command requires command type, bucket name"
return 1
fi
@@ -36,7 +36,7 @@ check_for_empty_policy() {
statement=$(echo "$bucket_policy" | jq -r '.Statement[0]')
log 5 "statement: $statement"
if [[ "" != "$statement" ]] && [[ "null" != "$statement" ]]; then
echo "policy should be empty (actual value: '$statement')"
log 2 "policy should be empty (actual value: '$statement')"
return 1
fi
return 0
@@ -45,16 +45,33 @@ check_for_empty_policy() {
get_modified_principal() {
log 6 "get_modified_principal"
if [ $# -ne 1 ]; then
log 2 "'get_modified_principal' requires principal"
log 2 "'get_modified_principal' requires principal string"
return 1
fi
local first_char="${1:0:1}"
if [ "$first_char" != '{' ] && [ "$first_char" != '[' ] && [ "$first_char" != '"' ]; then
# shellcheck disable=SC2089
modified_principal="\"$1\""
else
modified_principal=$1
IFS=',' read -r -a principals <<< "$1"
modified_principal=""
if [ "${#principals[@]}" -gt 1 ]; then
modified_principal="["
fi
for ((idx=0; idx<${#principals[@]}; idx++)); do
if [ "$DIRECT" == "true" ]; then
if [ "${principals[$idx]}" == "*" ]; then
modified_principal+="{\"AWS\": \"arn:aws:iam::$DIRECT_AWS_USER_ID:user/$DIRECT_S3_ROOT_ACCOUNT_NAME\"}"
else
modified_principal+="{\"AWS\": \"arn:aws:iam::$DIRECT_AWS_USER_ID:user/${principals[$idx]}\"}"
fi
else
# shellcheck disable=SC2089
modified_principal+="\"${principals[$idx]}\""
fi
if [[ ( "${#principals[@]}" -gt 1 ) && ( $idx -lt ${#principals[@]}-1 ) ]]; then
modified_principal+=","
fi
done
if [ "${#principals[@]}" -gt 1 ]; then
modified_principal+="]"
fi
log 5 "modified principal: $modified_principal"
}
get_modified_action() {
@@ -89,19 +106,17 @@ setup_policy_with_single_statement() {
log 2 "error getting modified action"
return 1
fi
bash -c "cat <<EOF > $1
{
\"Version\": \"$2\",
\"Statement\": [
printf '{
"Version": "%s",
"Statement": [
{
\"Effect\": \"$3\",
\"Principal\": $modified_principal,
\"Action\": $modified_action,
\"Resource\": \"$6\"
"Effect": "%s",
"Principal": %s,
"Action": %s,
"Resource": "%s"
}
]
}
EOF"
}' "$2" "$3" "$modified_principal" "$modified_action" "$6" > "$1"
# shellcheck disable=SC2154
#assert_success "failed to set up policy: $output"
log 5 "policy data: $(cat "$1")"
@@ -160,7 +175,20 @@ get_and_check_policy() {
# shellcheck disable=SC2154
log 5 "POLICY: $bucket_policy"
if ! statement=$(echo "$bucket_policy" | jq -r '.Statement[0]' 2>&1); then
if ! check_policy "$bucket_policy" "$3" "$4" "$5" "$6"; then
log 2 "error checking policy"
return 1
fi
return 0
}
check_policy() {
if [ $# -ne 5 ]; then
log 2 "'check_policy' requires policy, expected effect, policy, action, resource"
return 1
fi
log 5 "policy: $1"
if ! statement=$(echo -n "$1" | jq -r '.Statement[0]' 2>&1); then
log 2 "error getting statement value: $statement"
return 1
fi
@@ -168,8 +196,8 @@ get_and_check_policy() {
log 2 "error getting effect: $returned_effect"
return 1
fi
if [[ "$3" != "$returned_effect" ]]; then
log 2 "effect mismatch ($3, $returned_effect)"
if [[ "$2" != "$returned_effect" ]]; then
log 2 "effect mismatch (expected '$2', actual '$returned_effect')"
return 1
fi
if ! returned_principal=$(echo "$statement" | jq -r '.Principal' 2>&1); then
@@ -177,13 +205,13 @@ get_and_check_policy() {
return 1
fi
if [[ -n $DIRECT ]] && arn=$(echo "$returned_principal" | jq -r '.AWS' 2>&1); then
if [[ $arn != "arn:aws:iam::$DIRECT_AWS_USER_ID:user/s3user" ]]; then
log 2 "arn mismatch"
if [[ $arn != "$3" ]]; then
log 2 "arn mismatch (expected '$3', actual '$arn')"
return 1
fi
else
if [[ "$4" != "\"$returned_principal\"" ]]; then
log 2 "principal mismatch ($4, $returned_principal)"
if [[ "$3" != "$returned_principal" ]]; then
log 2 "principal mismatch (expected '$3', actual '$returned_principal')"
return 1
fi
fi
@@ -191,19 +219,19 @@ get_and_check_policy() {
log 2 "error getting action: $returned_action"
return 1
fi
if [[ "$5" != "$returned_action" ]]; then
log 2 "action mismatch ($5, $returned_action)"
if [[ "$4" != "$returned_action" ]]; then
log 2 "action mismatch (expected '$4', actual '$returned_action')"
return 1
fi
if ! returned_resource=$(echo "$statement" | jq -r '.Resource' 2>&1); then
log 2 "error getting resource: $returned_resource"
return 1
fi
if [[ "$6" != "$returned_resource" ]]; then
log 2 "resource mismatch ($6, $returned_resource)"
if [[ "$5" != "$returned_resource" ]]; then
log 2 "resource mismatch (expected '$5', actual '$returned_resource')"
return 1
fi
return 0
return 0
}
put_and_check_for_malformed_policy() {
@@ -222,3 +250,83 @@ put_and_check_for_malformed_policy() {
fi
return 0
}
get_and_check_no_policy_error() {
if [ $# -ne 1 ]; then
log 2 "'get_and_check_no_policy_error' requires bucket name"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OUTPUT_FILE="$TEST_FILE_FOLDER/response.txt" ./tests/rest_scripts/get_bucket_policy.sh); then
log 2 "error attempting to get bucket policy response: $result"
return 1
fi
if [ "$result" != "404" ]; then
log 2 "GetBucketOwnershipControls returned unexpected response code: $result, reply: $(cat "$TEST_FILE_FOLDER/response.txt")"
return 1
fi
log 5 "response: $(cat "$TEST_FILE_FOLDER/response.txt")"
if ! bucket_name=$(xmllint --xpath '//*[local-name()="BucketName"]/text()' "$TEST_FILE_FOLDER/response.txt" 2>&1); then
log 2 "error getting bucket name: $bucket_name"
return 1
fi
if [ "$bucket_name" != "$1" ]; then
log 2 "rule mismatch (expected '$1', actual '$bucket_name')"
return 1
fi
return 0
}
get_and_compare_policy_with_file() {
if [ $# -ne 4 ]; then
log 2 "'get_and_compare_policies' requires bucket, username, password, filename"
return 1
fi
if ! get_bucket_policy_with_user "$1" "$2" "$3"; then
log 2 "error getting bucket policy"
return 1
fi
# shellcheck disable=SC2154
echo -n "$bucket_policy" > "$4-copy"
log 5 "ORIG: $(cat "$4")"
log 5 "COPY: $(cat "$4-copy")"
if ! compare_files "$4" "$4-copy"; then
log 2 "policies not equal"
return 1
fi
return 0
}
put_and_check_policy_rest() {
if [ $# -ne 6 ]; then
log 2 "'put_policy_rest' requires bucket name, policy file, effect, principal, action, resource"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" POLICY_FILE="$2" OUTPUT_FILE="$TEST_FILE_FOLDER/result.txt" ./tests/rest_scripts/put_bucket_policy.sh); then
log 2 "error putting policy: $result"
return 1
fi
log 5 "response code: $result"
if [[ ( "$result" != "204" ) && ( "$result" != "200" ) ]]; then
log 2 "unexpected response code, expected '200' or '204', actual '$result' (reply: $(cat "$TEST_FILE_FOLDER/result.txt"))"
return 1
fi
if ! result=$(COMMAND_LOG="$COMMAND_LOG" BUCKET_NAME="$1" OUTPUT_FILE="$TEST_FILE_FOLDER/policy.txt" ./tests/rest_scripts/get_bucket_policy.sh); then
log 2 "error attempting to get bucket policy response: $result"
return 1
fi
if [ "$result" != "200" ]; then
log 2 "unexpected response code, expected '200', actual '$result' (reply: $(cat "$TEST_FILE_FOLDER/policy.txt"))"
return 1
fi
log 5 "policy: $(cat "$TEST_FILE_FOLDER/policy.txt")"
if [ "$DIRECT" == "true" ]; then
principal="arn:aws:iam::$DIRECT_AWS_USER_ID:user/$4"
else
principal="$4"
fi
if ! check_policy "$(cat "$TEST_FILE_FOLDER/policy.txt")" "$3" "$principal" "$5" "$6"; then
log 2 "policies not equal"
return 1
fi
return 0
}

View File

@@ -107,10 +107,10 @@ x-amz-date:$current_time
host;x-amz-content-sha256;x-amz-date
$3"
echo "canonical: $canonical_request"
log 5 "canonical: $canonical_request"
echo "TEST CREQ"
cat test.creq
log 5 "TEST CREQ"
log 5 "$(cat test.creq)"
}
generate_sts_string() {
@@ -148,6 +148,6 @@ $1
$ymd/us-west-2/s3/aws4_request
$creq_hash"
echo "TEST STS"
cat test.sts
log 5 "TEST STS"
log 5 "$(cat test.sts)"
}

View File

@@ -176,7 +176,7 @@ parse_object_tags_rest() {
check_tags_empty() {
if [[ $# -ne 1 ]]; then
echo "check tags empty requires command type"
log 2 "check tags empty requires command type"
return 1
fi
if [[ $1 == 'aws' ]]; then
@@ -185,12 +185,12 @@ check_tags_empty() {
fi
tag_set=$(echo "$tags" | jq '.TagSet')
if [[ $tag_set != "[]" ]]; then
echo "error: tags not empty: $tags"
log 2 "error: tags not empty: $tags"
return 1
fi
else
if [[ $tags != "" ]] && [[ $tags != *"No tags found"* ]]; then
echo "Error: tags not empty: $tags"
log 2 "Error: tags not empty: $tags"
return 1
fi
fi
@@ -199,11 +199,11 @@ check_tags_empty() {
check_object_tags_empty() {
if [[ $# -ne 3 ]]; then
echo "bucket tags empty check requires command type, bucket, and key"
log 2 "bucket tags empty check requires command type, bucket, and key"
return 2
fi
if ! get_object_tagging "$1" "$2" "$3"; then
echo "failed to get tags"
log 2 "failed to get tags"
return 2
fi
check_tags_empty "$1" || local check_result=$?
@@ -213,11 +213,11 @@ check_object_tags_empty() {
check_bucket_tags_empty() {
if [[ $# -ne 2 ]]; then
echo "bucket tags empty check requires command type, bucket"
log 2 "bucket tags empty check requires command type, bucket"
return 2
fi
if ! get_bucket_tagging "$1" "$2"; then
echo "failed to get tags"
log 2 "failed to get tags"
return 2
fi
check_tags_empty "$1" || local check_result=$?
@@ -227,23 +227,23 @@ check_bucket_tags_empty() {
get_and_verify_object_tags() {
if [[ $# -ne 5 ]]; then
echo "get and verify object tags missing command type, bucket, key, tag key, tag value"
log 2 "get and verify object tags missing command type, bucket, key, tag key, tag value"
return 1
fi
get_object_tagging "$1" "$2" "$3" || get_result=$?
if [[ $get_result -ne 0 ]]; then
echo "failed to get tags"
log 2 "failed to get tags"
return 1
fi
if [[ $1 == 'aws' ]]; then
tag_set_key=$(echo "$tags" | jq '.TagSet[0].Key')
tag_set_value=$(echo "$tags" | jq '.TagSet[0].Value')
if [[ $tag_set_key != '"'$4'"' ]]; then
echo "Key mismatch ($tag_set_key, \"$4\")"
log 2 "Key mismatch ($tag_set_key, \"$4\")"
return 1
fi
if [[ $tag_set_value != '"'$5'"' ]]; then
echo "Value mismatch ($tag_set_value, \"$5\")"
log 2 "Value mismatch ($tag_set_value, \"$5\")"
return 1
fi
else

View File

@@ -53,6 +53,30 @@ setup_user_direct() {
return 0
}
setup_user_versitygw_or_direct() {
if [ $# -ne 4 ]; then
# NOTE: bucket name is required for direct
log 2 "'setup_user_versitygw_or_direct' requires username, password, role, bucket name"
return 1
fi
if [ "$DIRECT" != "true" ]; then
if ! setup_user "$1" "$2" "$3"; then
log 2 "error setting up versitygw user"
return 1
fi
echo "$1"
echo "$2"
else
if ! setup_user_direct "$1" "$3" "$4"; then
log 2 "error setting up direct user"
return 1
fi
echo "$key_id"
echo "$secret_key"
fi
return 0
}
create_user_versitygw() {
log 6 "create_user_versitygw"
if [[ $# -ne 3 ]]; then
@@ -69,7 +93,7 @@ create_user_versitygw() {
create_user_if_nonexistent() {
log 6 "create_user_if_nonexistent"
if [[ $# -ne 3 ]]; then
echo "create user command requires user ID, key, and role"
log 2 "create user command requires user ID, key, and role"
return 1
fi
if user_exists "$1"; then
@@ -117,7 +141,7 @@ put_user_policy_userplus() {
]
}
EOF
if ! error=$(send_command aws iam put-user-policy --user-name "$1" --policy-name "UserPolicy" --policy-document "file://$TEST_FILE_FOLDER/user_policy_file" 2>&1); then
if ! error=$(send_command aws --endpoint-url=https://iam.amazonaws.com iam put-user-policy --user-name "$1" --policy-name "UserPolicy" --policy-document "file://$TEST_FILE_FOLDER/user_policy_file" 2>&1); then
log 2 "error putting user policy: $error"
return 1
fi
@@ -154,7 +178,7 @@ create_user_direct() {
log 2 "create user direct command requires desired username, role, bucket name"
return 1
fi
if ! error=$(send_command aws iam create-user --user-name "$1" 2>&1); then
if ! error=$(send_command aws --endpoint-url=https://iam.amazonaws.com iam create-user --user-name "$1" 2>&1); then
log 2 "error creating new user: $error"
return 1
fi
@@ -162,7 +186,7 @@ create_user_direct() {
log 2 "error attaching user policy"
return 1
fi
if ! keys=$(send_command aws iam create-access-key --user-name "$1" 2>&1); then
if ! keys=$(send_command aws --endpoint-url=https://iam.amazonaws.com iam create-access-key --user-name "$1" 2>&1); then
log 2 "error creating keys for new user: $keys"
return 1
fi
@@ -193,7 +217,7 @@ create_user_with_user() {
list_users_direct() {
log 6 "list_users_direct"
# AWS_ENDPOINT_URL of s3.amazonaws.com doesn't work here
if ! users=$(send_command aws --profile="$AWS_PROFILE" iam list-users 2>&1); then
if ! users=$(send_command aws --profile="$AWS_PROFILE" --endpoint-url=https://iam.amazonaws.com iam list-users 2>&1); then
log 2 "error listing users via direct s3 call: $users"
return 1
fi
@@ -230,7 +254,7 @@ list_users_versitygw() {
log 6 "list_users_versitygw"
users=$(send_command "$VERSITY_EXE" admin --allow-insecure --access "$AWS_ACCESS_KEY_ID" --secret "$AWS_SECRET_ACCESS_KEY" --endpoint-url "$AWS_ENDPOINT_URL" list-users) || local list_result=$?
if [[ $list_result -ne 0 ]]; then
echo "error listing users: $users"
log 2 "error listing users: $users"
return 1
fi
parsed_users=()
@@ -266,17 +290,17 @@ delete_user_direct() {
log 2 "delete user direct command requires username"
return 1
fi
if ! policies=$(send_command aws iam list-user-policies --user-name "$1" --query 'PolicyNames' --output text 2>&1); then
if ! policies=$(send_command aws --endpoint-url=https://iam.amazonaws.com iam list-user-policies --user-name "$1" --query 'PolicyNames' --output text 2>&1); then
log 2 "error getting user policies: $error"
return 1
fi
for policy_name in $policies; do
if ! user_policy_delete_error=$(send_command aws iam delete-user-policy --user-name "$1" --policy-name "$policy_name" 2>&1); then
if ! user_policy_delete_error=$(send_command aws --endpoint-url=https://iam.amazonaws.com iam delete-user-policy --user-name "$1" --policy-name "$policy_name" 2>&1); then
log 2 "error deleting user policy: $user_policy_delete_error"
return 1
fi
done
if ! keys=$(send_command aws iam list-access-keys --user-name "$1" 2>&1); then
if ! keys=$(send_command aws --endpoint-url=https://iam.amazonaws.com iam list-access-keys --user-name "$1" 2>&1); then
log 2 "error getting keys: $keys"
return 1
fi
@@ -285,12 +309,12 @@ delete_user_direct() {
return 1
fi
if [[ $key != "null" ]]; then
if ! error=$(send_command aws iam delete-access-key --user-name "$1" --access-key-id "$key" 2>&1); then
if ! error=$(send_command aws --endpoint-url=https://iam.amazonaws.com iam delete-access-key --user-name "$1" --access-key-id "$key" 2>&1); then
log 2 "error deleting access key: $error"
return 1
fi
fi
if ! error=$(send_command aws --profile="$AWS_PROFILE" iam delete-user --user-name "$1" 2>&1); then
if ! error=$(send_command aws --endpoint-url=https://iam.amazonaws.com --profile="$AWS_PROFILE" iam delete-user --user-name "$1" 2>&1); then
log 2 "error deleting user: $error"
return 1
fi
@@ -335,7 +359,7 @@ delete_user() {
change_bucket_owner_direct() {
log 6 "change_bucket_owner_direct"
if [[ $# -ne 4 ]]; then
echo "change bucket owner command requires ID, key, bucket name, and new owner"
log 2 "change bucket owner command requires ID, key, bucket name, and new owner"
return 1
fi
# TODO add
@@ -356,7 +380,7 @@ reset_bucket_owner() {
change_bucket_owner() {
log 6 "change_bucket_owner"
if [[ $# -ne 4 ]]; then
echo "change bucket owner command requires ID, key, bucket name, and new owner"
log 2 "change bucket owner command requires ID, key, bucket name, and new owner"
return 1
fi
if [[ $DIRECT == "true" ]]; then
@@ -369,7 +393,7 @@ change_bucket_owner() {
log 5 "changing owner for bucket $3, new owner: $4"
error=$(send_command "$VERSITY_EXE" admin --allow-insecure --access "$1" --secret "$2" --endpoint-url "$AWS_ENDPOINT_URL" change-bucket-owner --bucket "$3" --owner "$4" 2>&1) || local change_result=$?
if [[ $change_result -ne 0 ]]; then
echo "error changing bucket owner: $error"
log 2 "error changing bucket owner: $error"
return 1
fi
return 0

View File

@@ -49,23 +49,35 @@ delete_old_versions() {
log 5 "version keys: ${version_keys[*]}"
log 5 "version IDs: ${version_ids[*]}"
for idx in "${!version_keys[@]}"; do
log 5 "idx: $idx"
log 5 "version ID: ${version_ids[$idx]}"
# shellcheck disable=SC2154
if [ "$lock_config_exists" == "true" ]; then
if ! delete_object_version_bypass_retention "$1" "${version_keys[$idx]}" "${version_ids[$idx]}"; then
log 2 "error deleting object version"
return 1
fi
else
if ! delete_object_version "$1" "${version_keys[$idx]}" "${version_ids[$idx]}"; then
log 2 "error deleting object version"
return 1
fi
if ! delete_object_version_with_or_without_retention "$1"; then
log 2 "error deleting version with or without retention"
return 1
fi
done
}
delete_object_version_with_or_without_retention() {
if [ $# -ne 1 ]; then
log 2 "'delete_object_version_with_or_without_retention' requires bucket name"
return 1
fi
log 5 "idx: $idx"
log 5 "version ID: ${version_ids[$idx]}"
# shellcheck disable=SC2154
if [ "$lock_config_exists" == "true" ]; then
if ! delete_object_version_bypass_retention "$1" "${version_keys[$idx]}" "${version_ids[$idx]}"; then
log 2 "error deleting object version"
return 1
fi
else
if ! delete_object_version "$1" "${version_keys[$idx]}" "${version_ids[$idx]}"; then
log 2 "error deleting object version"
return 1
fi
fi
return 0
}
parse_version_data() {
if [ $# -ne 2 ]; then
log 2 "'parse_version_data' requires raw data, element name"

View File

@@ -14,7 +14,7 @@
# specific language governing permissions and limitations
# under the License.
source ./tests/util_file.sh
source ./tests/util/util_file.sh
start_versity_process() {
if [[ $# -ne 1 ]]; then