mirror of
https://github.com/element-hq/synapse.git
synced 2024-12-14 11:57:44 +00:00
Various clean-ups to the logging context code (#8935)
This commit is contained in:
parent
895e04319b
commit
1619802228
11 changed files with 20 additions and 39 deletions
|
@ -1 +1 @@
|
|||
Improve structured logging tests.
|
||||
Various clean-ups to the structured logging and logging context code.
|
||||
|
|
1
changelog.d/8935.misc
Normal file
1
changelog.d/8935.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Various clean-ups to the structured logging and logging context code.
|
|
@ -206,7 +206,7 @@ def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) ->
|
|||
# filter options, but care must when using e.g. MemoryHandler to buffer
|
||||
# writes.
|
||||
|
||||
log_context_filter = LoggingContextFilter(request="")
|
||||
log_context_filter = LoggingContextFilter()
|
||||
log_metadata_filter = MetadataFilter({"server_name": config.server_name})
|
||||
old_factory = logging.getLogRecordFactory()
|
||||
|
||||
|
|
|
@ -128,8 +128,7 @@ class SynapseRequest(Request):
|
|||
|
||||
# create a LogContext for this request
|
||||
request_id = self.get_request_id()
|
||||
logcontext = self.logcontext = LoggingContext(request_id)
|
||||
logcontext.request = request_id
|
||||
self.logcontext = LoggingContext(request_id, request=request_id)
|
||||
|
||||
# override the Server header which is set by twisted
|
||||
self.setHeader("Server", self.site.server_version_string)
|
||||
|
|
|
@ -203,10 +203,6 @@ class _Sentinel:
|
|||
def copy_to(self, record):
|
||||
pass
|
||||
|
||||
def copy_to_twisted_log_entry(self, record):
|
||||
record["request"] = None
|
||||
record["scope"] = None
|
||||
|
||||
def start(self, rusage: "Optional[resource._RUsage]"):
|
||||
pass
|
||||
|
||||
|
@ -372,13 +368,6 @@ class LoggingContext:
|
|||
# we also track the current scope:
|
||||
record.scope = self.scope
|
||||
|
||||
def copy_to_twisted_log_entry(self, record) -> None:
|
||||
"""
|
||||
Copy logging fields from this context to a Twisted log record.
|
||||
"""
|
||||
record["request"] = self.request
|
||||
record["scope"] = self.scope
|
||||
|
||||
def start(self, rusage: "Optional[resource._RUsage]") -> None:
|
||||
"""
|
||||
Record that this logcontext is currently running.
|
||||
|
@ -542,13 +531,10 @@ class LoggingContext:
|
|||
class LoggingContextFilter(logging.Filter):
|
||||
"""Logging filter that adds values from the current logging context to each
|
||||
record.
|
||||
Args:
|
||||
**defaults: Default values to avoid formatters complaining about
|
||||
missing fields
|
||||
"""
|
||||
|
||||
def __init__(self, **defaults) -> None:
|
||||
self.defaults = defaults
|
||||
def __init__(self, request: str = ""):
|
||||
self._default_request = request
|
||||
|
||||
def filter(self, record) -> Literal[True]:
|
||||
"""Add each fields from the logging contexts to the record.
|
||||
|
@ -556,14 +542,14 @@ class LoggingContextFilter(logging.Filter):
|
|||
True to include the record in the log output.
|
||||
"""
|
||||
context = current_context()
|
||||
for key, value in self.defaults.items():
|
||||
setattr(record, key, value)
|
||||
record.request = self._default_request
|
||||
|
||||
# context should never be None, but if it somehow ends up being, then
|
||||
# we end up in a death spiral of infinite loops, so let's check, for
|
||||
# robustness' sake.
|
||||
if context is not None:
|
||||
context.copy_to(record)
|
||||
# Logging is interested in the request.
|
||||
record.request = context.request
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
@ -199,8 +199,7 @@ def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwar
|
|||
_background_process_start_count.labels(desc).inc()
|
||||
_background_process_in_flight_count.labels(desc).inc()
|
||||
|
||||
with BackgroundProcessLoggingContext(desc) as context:
|
||||
context.request = "%s-%i" % (desc, count)
|
||||
with BackgroundProcessLoggingContext(desc, "%s-%i" % (desc, count)) as context:
|
||||
try:
|
||||
ctx = noop_context_manager()
|
||||
if bg_start_span:
|
||||
|
@ -244,8 +243,8 @@ class BackgroundProcessLoggingContext(LoggingContext):
|
|||
|
||||
__slots__ = ["_proc"]
|
||||
|
||||
def __init__(self, name: str):
|
||||
super().__init__(name)
|
||||
def __init__(self, name: str, request: Optional[str] = None):
|
||||
super().__init__(name, request=request)
|
||||
|
||||
self._proc = _BackgroundProcess(name, self)
|
||||
|
||||
|
|
|
@ -172,8 +172,7 @@ class BaseReplicationStreamProtocol(LineOnlyReceiver):
|
|||
# a logcontext which we use for processing incoming commands. We declare it as a
|
||||
# background process so that the CPU stats get reported to prometheus.
|
||||
ctx_name = "replication-conn-%s" % self.conn_id
|
||||
self._logging_context = BackgroundProcessLoggingContext(ctx_name)
|
||||
self._logging_context.request = ctx_name
|
||||
self._logging_context = BackgroundProcessLoggingContext(ctx_name, ctx_name)
|
||||
|
||||
def connectionMade(self):
|
||||
logger.info("[%s] Connection established", self.id())
|
||||
|
|
|
@ -126,7 +126,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
|||
room_version,
|
||||
)
|
||||
|
||||
with LoggingContext(request="send_rejected"):
|
||||
with LoggingContext("send_rejected"):
|
||||
d = run_in_background(self.handler.on_receive_pdu, OTHER_SERVER, ev)
|
||||
self.get_success(d)
|
||||
|
||||
|
@ -178,7 +178,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
|||
room_version,
|
||||
)
|
||||
|
||||
with LoggingContext(request="send_rejected"):
|
||||
with LoggingContext("send_rejected"):
|
||||
d = run_in_background(self.handler.on_receive_pdu, OTHER_SERVER, ev)
|
||||
self.get_success(d)
|
||||
|
||||
|
@ -198,7 +198,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
|||
# the auth code requires that a signature exists, but doesn't check that
|
||||
# signature... go figure.
|
||||
join_event.signatures[other_server] = {"x": "y"}
|
||||
with LoggingContext(request="send_join"):
|
||||
with LoggingContext("send_join"):
|
||||
d = run_in_background(
|
||||
self.handler.on_send_join_request, other_server, join_event
|
||||
)
|
||||
|
|
|
@ -117,11 +117,10 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
|||
"""
|
||||
handler = logging.StreamHandler(self.output)
|
||||
handler.setFormatter(JsonFormatter())
|
||||
handler.addFilter(LoggingContextFilter(request=""))
|
||||
handler.addFilter(LoggingContextFilter())
|
||||
logger = self.get_logger(handler)
|
||||
|
||||
with LoggingContext() as context_one:
|
||||
context_one.request = "test"
|
||||
with LoggingContext(request="test"):
|
||||
logger.info("Hello there, %s!", "wally")
|
||||
|
||||
log = self.get_log_line()
|
||||
|
@ -132,9 +131,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
|||
"level",
|
||||
"namespace",
|
||||
"request",
|
||||
"scope",
|
||||
]
|
||||
self.assertCountEqual(log.keys(), expected_log_keys)
|
||||
self.assertEqual(log["log"], "Hello there, wally!")
|
||||
self.assertEqual(log["request"], "test")
|
||||
self.assertIsNone(log["scope"])
|
||||
|
|
|
@ -134,7 +134,7 @@ class MessageAcceptTests(unittest.HomeserverTestCase):
|
|||
}
|
||||
)
|
||||
|
||||
with LoggingContext(request="lying_event"):
|
||||
with LoggingContext():
|
||||
failure = self.get_failure(
|
||||
self.handler.on_receive_pdu(
|
||||
"test.serv", lying_event, sent_to_us_directly=True
|
||||
|
|
|
@ -48,7 +48,7 @@ def setup_logging():
|
|||
handler = ToTwistedHandler()
|
||||
formatter = logging.Formatter(log_format)
|
||||
handler.setFormatter(formatter)
|
||||
handler.addFilter(LoggingContextFilter(request=""))
|
||||
handler.addFilter(LoggingContextFilter())
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
log_level = os.environ.get("SYNAPSE_TEST_LOG_LEVEL", "ERROR")
|
||||
|
|
Loading…
Reference in a new issue