mirror of
https://github.com/element-hq/synapse.git
synced 2024-12-14 11:57:44 +00:00
Fix up healthcheck generation for workers docker image (#12405)
This wasn't quite generating the right thing.
This commit is contained in:
parent
961ee75a9b
commit
3cdf5a1386
4 changed files with 11 additions and 11 deletions
1
changelog.d/12405.misc
Normal file
1
changelog.d/12405.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Fix up healthcheck generation for workers docker image.
|
|
@ -14,9 +14,6 @@ COPY ./docker/conf-workers/* /conf/
|
|||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# Volume for user-editable config files, logs etc.
|
||||
VOLUME ["/data"]
|
||||
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
|
|
|
@ -308,7 +308,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||
|
||||
Args:
|
||||
environ: _Environ[str]
|
||||
config_path: Where to output the generated Synapse main worker config file.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
"""
|
||||
|
@ -321,7 +321,8 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||
# and adding a replication listener.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll add
|
||||
# another listener for replication. Later we'll write out the result.
|
||||
# another listener for replication. Later we'll write out the result to the shared
|
||||
# config file.
|
||||
listeners = [
|
||||
{
|
||||
"port": 9093,
|
||||
|
@ -387,6 +388,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||
# worker_type + instance #
|
||||
worker_type_counter: Dict[str, int] = {}
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
worker_type = worker_type.strip()
|
||||
|
@ -411,6 +416,8 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||
# Update the shared config with any worker-type specific options
|
||||
shared_config.update(worker_config["shared_extra_conf"])
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
if worker_type_total_count > 1:
|
||||
|
@ -476,15 +483,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||
# Determine the load-balancing upstreams to configure
|
||||
nginx_upstream_config = ""
|
||||
|
||||
# At the same time, prepare a list of internal endpoints to healthcheck
|
||||
# starting with the main process which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
for port in upstream_worker_ports:
|
||||
body += " server localhost:%d;\n" % (port,)
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (port,))
|
||||
|
||||
# Add to the list of configured upstreams
|
||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||
|
|
|
@ -52,7 +52,7 @@ if [[ -n "$WORKERS" ]]; then
|
|||
COMPLEMENT_DOCKERFILE=SynapseWorkers.Dockerfile
|
||||
|
||||
# And provide some more configuration to complement.
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=25
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=60
|
||||
else
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
COMPLEMENT_DOCKERFILE=Dockerfile
|
||||
|
|
Loading…
Reference in a new issue