mirror of
https://github.com/dragonflydb/dragonfly.git
synced 2024-12-14 11:58:02 +00:00
fix (pytest): generate unique random dbfilename for tests (#3317)
Signed-off-by: adi_holden <adi@dragonflydb.io>
This commit is contained in:
parent
bf1b6cef6e
commit
f20318d88a
3 changed files with 13 additions and 14 deletions
|
@ -1210,8 +1210,9 @@ async def test_take_over_counters(df_factory, master_threads, replica_threads):
|
|||
async def test_take_over_seeder(
|
||||
request, df_factory, df_seeder_factory, master_threads, replica_threads
|
||||
):
|
||||
tmp_file_name = "".join(random.choices(string.ascii_letters, k=10))
|
||||
master = df_factory.create(proactor_threads=master_threads, dbfilename=f"dump_{tmp_file_name}")
|
||||
master = df_factory.create(
|
||||
proactor_threads=master_threads, dbfilename=f"dump_{tmp_file_name()}"
|
||||
)
|
||||
replica = df_factory.create(proactor_threads=replica_threads)
|
||||
df_factory.start_all([master, replica])
|
||||
|
||||
|
@ -1884,9 +1885,8 @@ async def test_client_pause_with_replica(df_factory, df_seeder_factory):
|
|||
|
||||
|
||||
async def test_replicaof_reject_on_load(df_factory, df_seeder_factory):
|
||||
tmp_file_name = "".join(random.choices(string.ascii_letters, k=10))
|
||||
master = df_factory.create()
|
||||
replica = df_factory.create(dbfilename=f"dump_{tmp_file_name}")
|
||||
replica = df_factory.create(dbfilename=f"dump_{tmp_file_name()}")
|
||||
df_factory.start_all([master, replica])
|
||||
|
||||
seeder = SeederV2(key_target=40000)
|
||||
|
@ -2052,10 +2052,8 @@ async def test_journal_doesnt_yield_issue_2500(df_factory, df_seeder_factory):
|
|||
|
||||
@pytest.mark.asyncio
|
||||
async def test_saving_replica(df_factory):
|
||||
tmp_file_name = "".join(random.choices(string.ascii_letters, k=10))
|
||||
|
||||
master = df_factory.create(proactor_threads=1)
|
||||
replica = df_factory.create(proactor_threads=1, dbfilename=f"dump_{tmp_file_name}")
|
||||
replica = df_factory.create(proactor_threads=1, dbfilename=f"dump_{tmp_file_name()}")
|
||||
df_factory.start_all([master, replica])
|
||||
|
||||
c_master = master.client()
|
||||
|
@ -2084,10 +2082,8 @@ async def test_saving_replica(df_factory):
|
|||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_replicating_while_save(df_factory):
|
||||
tmp_file_name = "".join(random.choices(string.ascii_letters, k=10))
|
||||
|
||||
master = df_factory.create(proactor_threads=4)
|
||||
replica = df_factory.create(proactor_threads=4, dbfilename=f"dump_{tmp_file_name}")
|
||||
replica = df_factory.create(proactor_threads=4, dbfilename=f"dump_{tmp_file_name()}")
|
||||
df_factory.start_all([master, replica])
|
||||
|
||||
c_master = master.client()
|
||||
|
|
|
@ -12,7 +12,7 @@ from .instance import RedisServer
|
|||
from random import randint as rand
|
||||
|
||||
from . import dfly_args
|
||||
from .utility import wait_available_async, chunked, is_saving
|
||||
from .utility import wait_available_async, is_saving, tmp_file_name
|
||||
|
||||
from .seeder import StaticSeeder
|
||||
|
||||
|
@ -43,7 +43,7 @@ async def test_consistency(df_factory, format: str, seeder_opts: dict):
|
|||
"""
|
||||
Test consistency over a large variety of data with different sizes
|
||||
"""
|
||||
dbfilename = f"test-consistency{rand(0, 5000)}"
|
||||
dbfilename = f"dump_{tmp_file_name()}"
|
||||
instance = df_factory.create(dbfilename=dbfilename)
|
||||
instance.start()
|
||||
async_client = instance.client()
|
||||
|
@ -376,8 +376,7 @@ class TestDflySnapshotOnShutdown:
|
|||
@pytest.mark.parametrize("format", FILE_FORMATS)
|
||||
@dfly_args({**BASIC_ARGS, "dbfilename": "info-while-snapshot"})
|
||||
async def test_infomemory_while_snapshoting(df_factory, format: str):
|
||||
dbfilename = f"test-consistency{rand(0, 5000)}"
|
||||
instance = df_factory.create(dbfilename=dbfilename)
|
||||
instance = df_factory.create(dbfilename=f"dump_{tmp_file_name()}")
|
||||
instance.start()
|
||||
async_client = instance.client()
|
||||
await async_client.execute_command("DEBUG POPULATE 10000 key 4048 RAND")
|
||||
|
|
|
@ -15,6 +15,10 @@ import os
|
|||
from enum import Enum
|
||||
|
||||
|
||||
def tmp_file_name():
|
||||
return "".join(random.choices(string.ascii_letters, k=10))
|
||||
|
||||
|
||||
def chunked(n, iterable):
|
||||
"""Transform iterable into iterator of chunks of size n"""
|
||||
it = iter(iterable)
|
||||
|
|
Loading…
Reference in a new issue