2022-08-26 10:54:38 +00:00
|
|
|
"""
|
|
|
|
Pytest fixtures to be provided for all tests without import
|
|
|
|
"""
|
|
|
|
|
2023-05-18 12:22:21 +00:00
|
|
|
import logging
|
2022-08-26 10:54:38 +00:00
|
|
|
import os
|
2022-10-31 14:39:20 +00:00
|
|
|
import sys
|
2023-03-24 15:22:14 +00:00
|
|
|
from time import sleep
|
2023-05-13 07:44:25 +00:00
|
|
|
from redis import asyncio as aioredis
|
2022-08-26 10:54:38 +00:00
|
|
|
import pytest
|
2022-10-31 14:39:20 +00:00
|
|
|
import pytest_asyncio
|
2022-08-26 10:54:38 +00:00
|
|
|
import redis
|
2023-05-18 12:22:21 +00:00
|
|
|
import pymemcache
|
2023-04-05 13:22:47 +00:00
|
|
|
import random
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
from pathlib import Path
|
|
|
|
from tempfile import TemporaryDirectory
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2023-05-18 12:22:21 +00:00
|
|
|
from . import DflyInstance, DflyInstanceFactory, DflyParams, PortPicker, dfly_args
|
2023-01-09 20:31:15 +00:00
|
|
|
from .utility import DflySeederFactory
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2023-05-28 12:53:40 +00:00
|
|
|
logging.getLogger('asyncio').setLevel(logging.WARNING)
|
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
DATABASE_INDEX = 1
|
2022-08-26 10:54:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def tmp_dir():
|
|
|
|
"""
|
|
|
|
Pytest fixture to provide the test temporary directory for the session
|
|
|
|
where the Dragonfly executable will be run and where all test data
|
|
|
|
should be stored. The directory will be cleaned up at the end of a session
|
|
|
|
"""
|
|
|
|
tmp = TemporaryDirectory()
|
|
|
|
yield Path(tmp.name)
|
|
|
|
tmp.cleanup()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def test_env(tmp_dir: Path):
|
|
|
|
"""
|
|
|
|
Provide the environment the Dragonfly executable is running in as a
|
|
|
|
python dictionary
|
|
|
|
"""
|
|
|
|
env = os.environ.copy()
|
|
|
|
env["DRAGONFLY_TMP"] = str(tmp_dir)
|
|
|
|
return env
|
|
|
|
|
2023-03-17 06:30:17 +00:00
|
|
|
|
2023-01-09 20:31:15 +00:00
|
|
|
@pytest.fixture(scope="session", params=[{}])
|
|
|
|
def df_seeder_factory(request) -> DflySeederFactory:
|
2023-04-05 13:22:47 +00:00
|
|
|
seed = request.config.getoption("--rand-seed")
|
|
|
|
if seed is None:
|
|
|
|
seed = random.randrange(sys.maxsize)
|
|
|
|
|
|
|
|
|
|
|
|
random.seed(int(seed))
|
|
|
|
print(f"--- Random seed: {seed}, check: {random.randrange(100)} ---")
|
|
|
|
|
2023-01-09 20:31:15 +00:00
|
|
|
return DflySeederFactory(request.config.getoption("--log-seeder"))
|
2022-11-28 14:28:14 +00:00
|
|
|
|
2023-03-17 06:30:17 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest.fixture(scope="session", params=[{}])
|
|
|
|
def df_factory(request, tmp_dir, test_env) -> DflyInstanceFactory:
|
|
|
|
"""
|
|
|
|
Create an instance factory with supplied params.
|
|
|
|
"""
|
|
|
|
scripts_dir = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
path = os.environ.get("DRAGONFLY_PATH", os.path.join(
|
|
|
|
scripts_dir, '../../build-dbg/dragonfly'))
|
|
|
|
|
|
|
|
args = request.param if request.param else {}
|
2023-03-17 06:30:17 +00:00
|
|
|
existing = request.config.getoption("--existing-port")
|
2023-05-18 12:22:21 +00:00
|
|
|
existing_mc = request.config.getoption("--existing-mc-port")
|
2022-11-28 14:28:14 +00:00
|
|
|
params = DflyParams(
|
|
|
|
path=path,
|
|
|
|
cwd=tmp_dir,
|
|
|
|
gdb=request.config.getoption("--gdb"),
|
2023-01-09 20:31:15 +00:00
|
|
|
args=request.config.getoption("--df"),
|
2023-03-17 06:30:17 +00:00
|
|
|
existing_port=int(existing) if existing else None,
|
2023-05-18 12:22:21 +00:00
|
|
|
existing_mc_port=int(existing_mc) if existing else None,
|
2022-11-28 14:28:14 +00:00
|
|
|
env=test_env
|
|
|
|
)
|
|
|
|
|
|
|
|
factory = DflyInstanceFactory(params, args)
|
2022-11-06 14:27:43 +00:00
|
|
|
yield factory
|
|
|
|
factory.stop_all()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="function")
|
|
|
|
def df_local_factory(df_factory: DflyInstanceFactory):
|
2022-11-28 14:28:14 +00:00
|
|
|
factory = DflyInstanceFactory(df_factory.params, df_factory.args)
|
2022-11-06 14:27:43 +00:00
|
|
|
yield factory
|
|
|
|
factory.stop_all()
|
2022-08-26 10:54:38 +00:00
|
|
|
|
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def df_server(df_factory: DflyInstanceFactory) -> DflyInstance:
|
|
|
|
"""
|
|
|
|
Start the default Dragonfly server that will be used for the default pools
|
|
|
|
and clients.
|
|
|
|
"""
|
|
|
|
instance = df_factory.create()
|
|
|
|
instance.start()
|
2022-11-06 14:27:43 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
yield instance
|
|
|
|
clients_left = None
|
2022-08-26 10:54:38 +00:00
|
|
|
try:
|
2022-10-31 14:39:20 +00:00
|
|
|
client = redis.Redis(port=instance.port)
|
2023-03-24 15:22:14 +00:00
|
|
|
client.client_setname("mgr")
|
|
|
|
sleep(0.1)
|
|
|
|
clients_left = [x for x in client.client_list() if x["name"] != "mgr"]
|
2022-10-31 14:39:20 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(e, file=sys.stderr)
|
|
|
|
|
|
|
|
instance.stop()
|
2023-05-13 07:44:25 +00:00
|
|
|
|
|
|
|
# TODO: Investigate spurious open connection with cluster client
|
|
|
|
if not instance['cluster_mode']:
|
|
|
|
assert clients_left == []
|
|
|
|
else:
|
|
|
|
print("Cluster clients left: ", len(clients_left))
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest.fixture(scope="class")
|
|
|
|
def connection(df_server: DflyInstance):
|
|
|
|
return redis.Connection(port=df_server.port)
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2023-03-24 15:22:14 +00:00
|
|
|
# @pytest.fixture(scope="class")
|
|
|
|
# def sync_pool(df_server: DflyInstance):
|
|
|
|
# pool = redis.ConnectionPool(decode_responses=True, port=df_server.port)
|
|
|
|
# yield pool
|
|
|
|
# pool.disconnect()
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2023-03-24 15:22:14 +00:00
|
|
|
# @pytest.fixture(scope="class")
|
|
|
|
# def client(sync_pool):
|
|
|
|
# """
|
|
|
|
# Return a client to the default instance with all entries flushed.
|
|
|
|
# """
|
|
|
|
# client = redis.Redis(connection_pool=sync_pool)
|
|
|
|
# client.flushall()
|
|
|
|
# return client
|
2022-10-26 11:12:49 +00:00
|
|
|
|
|
|
|
|
2023-03-01 06:43:40 +00:00
|
|
|
@pytest.fixture(scope="function")
|
|
|
|
def cluster_client(df_server):
|
|
|
|
"""
|
|
|
|
Return a cluster client to the default instance with all entries flushed.
|
|
|
|
"""
|
|
|
|
client = redis.RedisCluster(decode_responses=True, host="localhost",
|
|
|
|
port=df_server.port)
|
2023-05-13 07:44:25 +00:00
|
|
|
client.client_setname("default-cluster-fixture")
|
2023-03-01 06:43:40 +00:00
|
|
|
client.flushall()
|
|
|
|
|
|
|
|
yield client
|
|
|
|
client.disconnect_connection_pools()
|
|
|
|
|
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest_asyncio.fixture(scope="function")
|
|
|
|
async def async_pool(df_server: DflyInstance):
|
|
|
|
pool = aioredis.ConnectionPool(host="localhost", port=df_server.port,
|
2023-02-25 10:06:05 +00:00
|
|
|
db=DATABASE_INDEX, decode_responses=True, max_connections=32)
|
2022-10-31 14:39:20 +00:00
|
|
|
yield pool
|
2023-05-13 07:44:25 +00:00
|
|
|
await pool.disconnect(inuse_connections=True)
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest_asyncio.fixture(scope="function")
|
|
|
|
async def async_client(async_pool):
|
|
|
|
"""
|
|
|
|
Return an async client to the default instance with all entries flushed.
|
|
|
|
"""
|
|
|
|
client = aioredis.Redis(connection_pool=async_pool)
|
2023-05-13 07:44:25 +00:00
|
|
|
await client.client_setname("default-async-fixture")
|
2022-10-31 14:39:20 +00:00
|
|
|
await client.flushall()
|
2023-03-24 15:22:14 +00:00
|
|
|
yield client
|
2023-01-09 20:31:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_addoption(parser):
|
|
|
|
"""
|
|
|
|
Custom pytest options:
|
|
|
|
--gdb - start all instances inside gdb
|
|
|
|
--df arg - pass arg to all instances, can be used multiple times
|
|
|
|
--log-seeder file - to log commands of last seeder run
|
2023-04-05 13:22:47 +00:00
|
|
|
--existing-port - to provide a port to an existing process instead of starting a new instance
|
|
|
|
--rand-seed - to set the global random seed
|
2023-01-09 20:31:15 +00:00
|
|
|
"""
|
|
|
|
parser.addoption(
|
|
|
|
'--gdb', action='store_true', default=False, help='Run instances in gdb'
|
|
|
|
)
|
|
|
|
parser.addoption(
|
|
|
|
'--df', action='append', default=[], help='Add arguments to dragonfly'
|
|
|
|
)
|
|
|
|
parser.addoption(
|
|
|
|
'--log-seeder', action='store', default=None, help='Store last generator commands in file'
|
|
|
|
)
|
2023-04-05 13:22:47 +00:00
|
|
|
parser.addoption(
|
|
|
|
'--rand-seed', action='store', default=None, help='Set seed for global random. Makes seeder predictable'
|
|
|
|
)
|
2023-03-17 06:30:17 +00:00
|
|
|
parser.addoption(
|
|
|
|
'--existing-port', action='store', default=None, help='Provide a port to the existing process for the test')
|
2023-04-12 16:14:29 +00:00
|
|
|
|
2023-05-18 12:22:21 +00:00
|
|
|
parser.addoption(
|
|
|
|
'--existing-mc-port', action='store', default=None, help='Provide a port to the existing memcached process for the test'
|
|
|
|
)
|
|
|
|
|
2023-04-12 16:14:29 +00:00
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def port_picker():
|
|
|
|
yield PortPicker()
|
2023-05-18 12:22:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="class")
|
|
|
|
def memcached_connection(df_server: DflyInstance):
|
|
|
|
return pymemcache.Client(f"localhost:{df_server.mc_port}")
|