2022-08-26 10:54:38 +00:00
|
|
|
"""
|
|
|
|
Pytest fixtures to be provided for all tests without import
|
|
|
|
"""
|
|
|
|
|
2023-05-18 12:22:21 +00:00
|
|
|
import logging
|
2022-08-26 10:54:38 +00:00
|
|
|
import os
|
2022-10-31 14:39:20 +00:00
|
|
|
import sys
|
2023-03-24 15:22:14 +00:00
|
|
|
from time import sleep
|
2023-09-28 07:11:11 +00:00
|
|
|
from typing import Dict, List, Union
|
2023-05-13 07:44:25 +00:00
|
|
|
from redis import asyncio as aioredis
|
2022-08-26 10:54:38 +00:00
|
|
|
import pytest
|
2022-10-31 14:39:20 +00:00
|
|
|
import pytest_asyncio
|
2022-08-26 10:54:38 +00:00
|
|
|
import redis
|
2023-05-18 12:22:21 +00:00
|
|
|
import pymemcache
|
2023-04-05 13:22:47 +00:00
|
|
|
import random
|
2023-07-06 11:04:45 +00:00
|
|
|
import subprocess
|
2023-07-11 05:28:18 +00:00
|
|
|
from copy import deepcopy
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
from pathlib import Path
|
|
|
|
from tempfile import TemporaryDirectory
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2023-09-28 07:11:11 +00:00
|
|
|
from .instance import DflyInstance, DflyParams, DflyInstanceFactory
|
|
|
|
from . import PortPicker, dfly_args
|
2023-10-23 16:35:39 +00:00
|
|
|
from .utility import DflySeederFactory, gen_ca_cert, gen_certificate
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2023-07-17 10:13:12 +00:00
|
|
|
logging.getLogger("asyncio").setLevel(logging.WARNING)
|
2023-05-28 12:53:40 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
DATABASE_INDEX = 1
|
2022-08-26 10:54:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def tmp_dir():
|
|
|
|
"""
|
|
|
|
Pytest fixture to provide the test temporary directory for the session
|
|
|
|
where the Dragonfly executable will be run and where all test data
|
|
|
|
should be stored. The directory will be cleaned up at the end of a session
|
|
|
|
"""
|
|
|
|
tmp = TemporaryDirectory()
|
|
|
|
yield Path(tmp.name)
|
|
|
|
tmp.cleanup()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def test_env(tmp_dir: Path):
|
|
|
|
"""
|
|
|
|
Provide the environment the Dragonfly executable is running in as a
|
|
|
|
python dictionary
|
|
|
|
"""
|
|
|
|
env = os.environ.copy()
|
|
|
|
env["DRAGONFLY_TMP"] = str(tmp_dir)
|
|
|
|
return env
|
|
|
|
|
2023-03-17 06:30:17 +00:00
|
|
|
|
2023-01-09 20:31:15 +00:00
|
|
|
@pytest.fixture(scope="session", params=[{}])
|
|
|
|
def df_seeder_factory(request) -> DflySeederFactory:
|
2023-04-05 13:22:47 +00:00
|
|
|
seed = request.config.getoption("--rand-seed")
|
|
|
|
if seed is None:
|
|
|
|
seed = random.randrange(sys.maxsize)
|
|
|
|
|
|
|
|
random.seed(int(seed))
|
|
|
|
print(f"--- Random seed: {seed}, check: {random.randrange(100)} ---")
|
|
|
|
|
2023-01-09 20:31:15 +00:00
|
|
|
return DflySeederFactory(request.config.getoption("--log-seeder"))
|
2022-11-28 14:28:14 +00:00
|
|
|
|
2023-03-17 06:30:17 +00:00
|
|
|
|
2023-09-28 07:11:11 +00:00
|
|
|
def parse_args(args: List[str]) -> Dict[str, Union[str, None]]:
|
|
|
|
args_dict = {}
|
|
|
|
for arg in args:
|
|
|
|
if "=" in arg:
|
|
|
|
pos = arg.find("=")
|
|
|
|
name, value = arg[:pos], arg[pos + 1 :]
|
|
|
|
args_dict[name] = value
|
|
|
|
else:
|
|
|
|
args_dict[arg] = None
|
|
|
|
return args_dict
|
|
|
|
|
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest.fixture(scope="session", params=[{}])
|
|
|
|
def df_factory(request, tmp_dir, test_env) -> DflyInstanceFactory:
|
|
|
|
"""
|
|
|
|
Create an instance factory with supplied params.
|
|
|
|
"""
|
|
|
|
scripts_dir = os.path.dirname(os.path.abspath(__file__))
|
2023-07-17 10:13:12 +00:00
|
|
|
path = os.environ.get("DRAGONFLY_PATH", os.path.join(scripts_dir, "../../build-dbg/dragonfly"))
|
2022-10-31 14:39:20 +00:00
|
|
|
|
|
|
|
args = request.param if request.param else {}
|
2023-03-17 06:30:17 +00:00
|
|
|
existing = request.config.getoption("--existing-port")
|
2023-06-01 08:46:52 +00:00
|
|
|
existing_admin = request.config.getoption("--existing-admin-port")
|
2023-05-18 12:22:21 +00:00
|
|
|
existing_mc = request.config.getoption("--existing-mc-port")
|
2022-11-28 14:28:14 +00:00
|
|
|
params = DflyParams(
|
|
|
|
path=path,
|
|
|
|
cwd=tmp_dir,
|
|
|
|
gdb=request.config.getoption("--gdb"),
|
2023-10-02 07:36:41 +00:00
|
|
|
buffered_out=request.config.getoption("--buffered-output"),
|
2023-09-28 07:11:11 +00:00
|
|
|
args=parse_args(request.config.getoption("--df")),
|
2023-03-17 06:30:17 +00:00
|
|
|
existing_port=int(existing) if existing else None,
|
2023-06-01 08:46:52 +00:00
|
|
|
existing_admin_port=int(existing_admin) if existing_admin else None,
|
2023-06-18 04:03:08 +00:00
|
|
|
existing_mc_port=int(existing_mc) if existing_mc else None,
|
2023-07-17 10:13:12 +00:00
|
|
|
env=test_env,
|
2022-11-28 14:28:14 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
factory = DflyInstanceFactory(params, args)
|
2022-11-06 14:27:43 +00:00
|
|
|
yield factory
|
|
|
|
factory.stop_all()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="function")
|
|
|
|
def df_local_factory(df_factory: DflyInstanceFactory):
|
2022-11-28 14:28:14 +00:00
|
|
|
factory = DflyInstanceFactory(df_factory.params, df_factory.args)
|
2022-11-06 14:27:43 +00:00
|
|
|
yield factory
|
|
|
|
factory.stop_all()
|
2022-08-26 10:54:38 +00:00
|
|
|
|
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def df_server(df_factory: DflyInstanceFactory) -> DflyInstance:
|
|
|
|
"""
|
|
|
|
Start the default Dragonfly server that will be used for the default pools
|
|
|
|
and clients.
|
|
|
|
"""
|
|
|
|
instance = df_factory.create()
|
|
|
|
instance.start()
|
2022-11-06 14:27:43 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
yield instance
|
|
|
|
clients_left = None
|
2022-08-26 10:54:38 +00:00
|
|
|
try:
|
2022-10-31 14:39:20 +00:00
|
|
|
client = redis.Redis(port=instance.port)
|
2023-03-24 15:22:14 +00:00
|
|
|
client.client_setname("mgr")
|
|
|
|
sleep(0.1)
|
|
|
|
clients_left = [x for x in client.client_list() if x["name"] != "mgr"]
|
2022-10-31 14:39:20 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(e, file=sys.stderr)
|
|
|
|
|
|
|
|
instance.stop()
|
2023-05-13 07:44:25 +00:00
|
|
|
|
|
|
|
# TODO: Investigate spurious open connection with cluster client
|
2023-06-22 10:47:36 +00:00
|
|
|
# if not instance['cluster_mode']:
|
2023-07-17 10:13:12 +00:00
|
|
|
# TODO: Investigate adding fine grain control over the pool by
|
|
|
|
# by adding a cache ontop of the clients connection pool and then evict
|
|
|
|
# properly with client.connection_pool.disconnect() avoiding non synced
|
|
|
|
# side effects
|
|
|
|
# assert clients_left == []
|
2023-06-22 10:47:36 +00:00
|
|
|
# else:
|
|
|
|
# print("Cluster clients left: ", len(clients_left))
|
|
|
|
|
2023-07-17 10:13:12 +00:00
|
|
|
if instance["cluster_mode"]:
|
2023-05-13 07:44:25 +00:00
|
|
|
print("Cluster clients left: ", len(clients_left))
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest.fixture(scope="class")
|
|
|
|
def connection(df_server: DflyInstance):
|
|
|
|
return redis.Connection(port=df_server.port)
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2023-03-24 15:22:14 +00:00
|
|
|
# @pytest.fixture(scope="class")
|
|
|
|
# def sync_pool(df_server: DflyInstance):
|
|
|
|
# pool = redis.ConnectionPool(decode_responses=True, port=df_server.port)
|
|
|
|
# yield pool
|
|
|
|
# pool.disconnect()
|
2022-08-26 10:54:38 +00:00
|
|
|
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2023-03-24 15:22:14 +00:00
|
|
|
# @pytest.fixture(scope="class")
|
|
|
|
# def client(sync_pool):
|
|
|
|
# """
|
|
|
|
# Return a client to the default instance with all entries flushed.
|
|
|
|
# """
|
|
|
|
# client = redis.Redis(connection_pool=sync_pool)
|
|
|
|
# client.flushall()
|
|
|
|
# return client
|
2022-10-26 11:12:49 +00:00
|
|
|
|
|
|
|
|
2023-03-01 06:43:40 +00:00
|
|
|
@pytest.fixture(scope="function")
|
|
|
|
def cluster_client(df_server):
|
|
|
|
"""
|
|
|
|
Return a cluster client to the default instance with all entries flushed.
|
|
|
|
"""
|
2023-07-17 10:13:12 +00:00
|
|
|
client = redis.RedisCluster(decode_responses=True, host="localhost", port=df_server.port)
|
2023-05-13 07:44:25 +00:00
|
|
|
client.client_setname("default-cluster-fixture")
|
2023-03-01 06:43:40 +00:00
|
|
|
client.flushall()
|
|
|
|
|
|
|
|
yield client
|
|
|
|
client.disconnect_connection_pools()
|
|
|
|
|
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest_asyncio.fixture(scope="function")
|
|
|
|
async def async_pool(df_server: DflyInstance):
|
2023-07-17 10:13:12 +00:00
|
|
|
pool = aioredis.ConnectionPool(
|
|
|
|
host="localhost",
|
|
|
|
port=df_server.port,
|
|
|
|
db=DATABASE_INDEX,
|
|
|
|
decode_responses=True,
|
|
|
|
max_connections=32,
|
|
|
|
)
|
2022-10-31 14:39:20 +00:00
|
|
|
yield pool
|
2023-05-13 07:44:25 +00:00
|
|
|
await pool.disconnect(inuse_connections=True)
|
2022-10-26 11:12:49 +00:00
|
|
|
|
2023-07-17 10:13:12 +00:00
|
|
|
|
2022-10-31 14:39:20 +00:00
|
|
|
@pytest_asyncio.fixture(scope="function")
|
|
|
|
async def async_client(async_pool):
|
|
|
|
"""
|
|
|
|
Return an async client to the default instance with all entries flushed.
|
|
|
|
"""
|
|
|
|
client = aioredis.Redis(connection_pool=async_pool)
|
2023-05-13 07:44:25 +00:00
|
|
|
await client.client_setname("default-async-fixture")
|
2022-10-31 14:39:20 +00:00
|
|
|
await client.flushall()
|
2023-03-24 15:22:14 +00:00
|
|
|
yield client
|
2023-01-09 20:31:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_addoption(parser):
|
2023-07-17 10:13:12 +00:00
|
|
|
parser.addoption("--gdb", action="store_true", default=False, help="Run instances in gdb")
|
|
|
|
parser.addoption("--df", action="append", default=[], help="Add arguments to dragonfly")
|
2023-10-02 07:36:41 +00:00
|
|
|
parser.addoption(
|
|
|
|
"--buffered-output",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Makes instance output buffered, grouping it together",
|
|
|
|
)
|
2023-01-09 20:31:15 +00:00
|
|
|
parser.addoption(
|
2023-07-17 10:13:12 +00:00
|
|
|
"--log-seeder", action="store", default=None, help="Store last generator commands in file"
|
2023-01-09 20:31:15 +00:00
|
|
|
)
|
|
|
|
parser.addoption(
|
2023-07-17 10:13:12 +00:00
|
|
|
"--rand-seed",
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
help="Set seed for global random. Makes seeder predictable",
|
2023-01-09 20:31:15 +00:00
|
|
|
)
|
|
|
|
parser.addoption(
|
2023-07-17 10:13:12 +00:00
|
|
|
"--existing-port",
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
help="Provide a port to the existing process for the test",
|
2023-01-09 20:31:15 +00:00
|
|
|
)
|
2023-04-05 13:22:47 +00:00
|
|
|
parser.addoption(
|
2023-07-17 10:13:12 +00:00
|
|
|
"--existing-admin-port",
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
help="Provide an admin port to the existing process for the test",
|
2023-04-05 13:22:47 +00:00
|
|
|
)
|
2023-05-18 12:22:21 +00:00
|
|
|
parser.addoption(
|
2023-07-17 10:13:12 +00:00
|
|
|
"--existing-mc-port",
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
help="Provide a port to the existing memcached process for the test",
|
2023-05-18 12:22:21 +00:00
|
|
|
)
|
|
|
|
|
2023-04-12 16:14:29 +00:00
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def port_picker():
|
|
|
|
yield PortPicker()
|
2023-05-18 12:22:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="class")
|
|
|
|
def memcached_connection(df_server: DflyInstance):
|
|
|
|
return pymemcache.Client(f"localhost:{df_server.mc_port}")
|
2023-07-06 11:04:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
2023-10-23 16:35:39 +00:00
|
|
|
def with_tls_ca_cert_args(tmp_dir):
|
2023-07-11 05:28:18 +00:00
|
|
|
ca_key = os.path.join(tmp_dir, "ca-key.pem")
|
|
|
|
ca_cert = os.path.join(tmp_dir, "ca-cert.pem")
|
2023-10-23 16:35:39 +00:00
|
|
|
gen_ca_cert(ca_key, ca_cert)
|
2023-07-11 05:28:18 +00:00
|
|
|
return {"ca_key": ca_key, "ca_cert": ca_cert}
|
2023-07-06 11:04:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
2023-10-23 16:35:39 +00:00
|
|
|
def with_tls_server_args(tmp_dir, with_tls_ca_cert_args):
|
2023-07-11 05:28:18 +00:00
|
|
|
tls_server_key = os.path.join(tmp_dir, "df-key.pem")
|
|
|
|
tls_server_req = os.path.join(tmp_dir, "df-req.pem")
|
|
|
|
tls_server_cert = os.path.join(tmp_dir, "df-cert.pem")
|
|
|
|
|
2023-07-17 10:13:12 +00:00
|
|
|
gen_certificate(
|
2023-10-23 16:35:39 +00:00
|
|
|
with_tls_ca_cert_args["ca_key"],
|
|
|
|
with_tls_ca_cert_args["ca_cert"],
|
2023-07-17 10:13:12 +00:00
|
|
|
tls_server_req,
|
|
|
|
tls_server_key,
|
|
|
|
tls_server_cert,
|
|
|
|
)
|
2023-07-11 05:28:18 +00:00
|
|
|
|
2023-08-18 06:28:19 +00:00
|
|
|
args = {"tls": None, "tls_key_file": tls_server_key, "tls_cert_file": tls_server_cert}
|
2023-07-11 05:28:18 +00:00
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
2023-10-23 16:35:39 +00:00
|
|
|
def with_ca_tls_server_args(with_tls_server_args, with_tls_ca_cert_args):
|
2023-07-11 05:28:18 +00:00
|
|
|
args = deepcopy(with_tls_server_args)
|
2023-10-23 16:35:39 +00:00
|
|
|
args["tls_ca_cert_file"] = with_tls_ca_cert_args["ca_cert"]
|
2023-07-11 05:28:18 +00:00
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
2023-10-23 16:35:39 +00:00
|
|
|
def with_tls_client_args(tmp_dir, with_tls_ca_cert_args):
|
2023-07-11 05:28:18 +00:00
|
|
|
tls_client_key = os.path.join(tmp_dir, "client-key.pem")
|
|
|
|
tls_client_req = os.path.join(tmp_dir, "client-req.pem")
|
|
|
|
tls_client_cert = os.path.join(tmp_dir, "client-cert.pem")
|
|
|
|
|
2023-07-17 10:13:12 +00:00
|
|
|
gen_certificate(
|
2023-10-23 16:35:39 +00:00
|
|
|
with_tls_ca_cert_args["ca_key"],
|
|
|
|
with_tls_ca_cert_args["ca_cert"],
|
2023-07-17 10:13:12 +00:00
|
|
|
tls_client_req,
|
|
|
|
tls_client_key,
|
|
|
|
tls_client_cert,
|
|
|
|
)
|
2023-07-11 05:28:18 +00:00
|
|
|
|
2023-07-17 10:13:12 +00:00
|
|
|
args = {"ssl": True, "ssl_keyfile": tls_client_key, "ssl_certfile": tls_client_cert}
|
2023-07-11 05:28:18 +00:00
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
2023-10-23 16:35:39 +00:00
|
|
|
def with_ca_tls_client_args(with_tls_client_args, with_tls_ca_cert_args):
|
2023-07-11 05:28:18 +00:00
|
|
|
args = deepcopy(with_tls_client_args)
|
2023-10-23 16:35:39 +00:00
|
|
|
args["ssl_ca_certs"] = with_tls_ca_cert_args["ca_cert"]
|
2023-07-06 11:04:45 +00:00
|
|
|
return args
|