1
0
Fork 0
mirror of https://github.com/dragonflydb/dragonfly.git synced 2024-12-14 11:58:02 +00:00

tests: fix and enable s3 snapshot test (#3720)

* test: fix s3 snapshot test

* ci: configure s3 regression test

* tests: only run s3 snapshot test if bucket not empty
This commit is contained in:
Andy Dunstall 2024-09-17 15:35:53 +01:00 committed by GitHub
parent 8a34b3e730
commit a64fc74ce1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 24 additions and 5 deletions

View file

@ -19,6 +19,15 @@ inputs:
filter:
required: false
type: string
aws-access-key-id:
required: true
type: string
aws-secret-access-key:
required: true
type: string
s3-bucket:
required: true
type: string
runs:
using: "composite"
@ -56,6 +65,12 @@ runs:
if [[ $code -ne 0 ]]; then
exit 1
fi
env:
# Add environment variables to enable the S3 snapshot test.
DRAGONFLY_S3_BUCKET: ${{ inputs.s3-bucket }}
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
AWS_REGION: us-east-1
- name: Send notification on failure
if: failure() && github.ref == 'refs/heads/main'

View file

@ -45,6 +45,9 @@ jobs:
gspace-secret: ${{ secrets.GSPACES_BOT_DF_BUILD }}
build-folder-name: build
filter: ${{ matrix.build-type == 'Release' && 'not dbg_only' || 'not opt_only' }}
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_S3_ACCESS_SECRET }}
s3-bucket: ${{ secrets.S3_REGTEST_BUCKET }}
- name: Upload logs on failure
if: failure()

View file

@ -309,14 +309,15 @@ async def test_info_persistence_field(async_client):
# If DRAGONFLY_S3_BUCKET is configured, AWS credentials must also be
# configured.
@pytest.mark.skipif(
"DRAGONFLY_S3_BUCKET" not in os.environ, reason="AWS S3 snapshots bucket is not configured"
"DRAGONFLY_S3_BUCKET" not in os.environ or os.environ["DRAGONFLY_S3_BUCKET"] == "",
reason="AWS S3 snapshots bucket is not configured",
)
@dfly_args({**BASIC_ARGS, "dir": "s3://{DRAGONFLY_S3_BUCKET}{DRAGONFLY_TMP}", "dbfilename": ""})
async def test_s3_snapshot(self, async_client):
async def test_s3_snapshot(async_client, tmp_dir):
seeder = StaticSeeder(key_target=10_000)
await seeder.run(async_client)
start_capture = await StaticSeeder.capture()
start_capture = await StaticSeeder.capture(async_client)
try:
# save + flush + load
@ -325,7 +326,7 @@ async def test_s3_snapshot(self, async_client):
await async_client.execute_command(
"DFLY LOAD "
+ os.environ["DRAGONFLY_S3_BUCKET"]
+ str(self.tmp_dir)
+ str(tmp_dir)
+ "/snapshot-summary.dfs"
)
@ -349,7 +350,7 @@ async def test_s3_snapshot(self, async_client):
delete_objects(
os.environ["DRAGONFLY_S3_BUCKET"],
str(self.tmp_dir)[1:],
str(tmp_dir)[1:],
)