mirror of
https://github.com/element-hq/synapse.git
synced 2024-12-14 11:57:44 +00:00
Add missing type hints in tests (#14879)
* FIx-up type hints in tests.logging. * Add missing type hints to test_transactions.
This commit is contained in:
parent
345576bc34
commit
fc35e0673f
8 changed files with 75 additions and 42 deletions
1
changelog.d/14879.misc
Normal file
1
changelog.d/14879.misc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Add missing type hints.
|
6
mypy.ini
6
mypy.ini
|
@ -40,10 +40,7 @@ exclude = (?x)
|
||||||
|tests/http/federation/test_matrix_federation_agent.py
|
|tests/http/federation/test_matrix_federation_agent.py
|
||||||
|tests/http/federation/test_srv_resolver.py
|
|tests/http/federation/test_srv_resolver.py
|
||||||
|tests/http/test_proxyagent.py
|
|tests/http/test_proxyagent.py
|
||||||
|tests/logging/__init__.py
|
|
||||||
|tests/logging/test_terse_json.py
|
|
||||||
|tests/module_api/test_api.py
|
|tests/module_api/test_api.py
|
||||||
|tests/rest/client/test_transactions.py
|
|
||||||
|tests/rest/media/v1/test_media_storage.py
|
|tests/rest/media/v1/test_media_storage.py
|
||||||
|tests/server.py
|
|tests/server.py
|
||||||
|tests/test_state.py
|
|tests/test_state.py
|
||||||
|
@ -92,6 +89,9 @@ disallow_untyped_defs = True
|
||||||
[mypy-tests.handlers.*]
|
[mypy-tests.handlers.*]
|
||||||
disallow_untyped_defs = True
|
disallow_untyped_defs = True
|
||||||
|
|
||||||
|
[mypy-tests.logging.*]
|
||||||
|
disallow_untyped_defs = True
|
||||||
|
|
||||||
[mypy-tests.metrics.*]
|
[mypy-tests.metrics.*]
|
||||||
disallow_untyped_defs = True
|
disallow_untyped_defs = True
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ from typing import TYPE_CHECKING, Awaitable, Callable, Dict, Tuple
|
||||||
|
|
||||||
from typing_extensions import ParamSpec
|
from typing_extensions import ParamSpec
|
||||||
|
|
||||||
|
from twisted.internet.defer import Deferred
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
from twisted.web.server import Request
|
from twisted.web.server import Request
|
||||||
|
|
||||||
|
@ -90,7 +91,7 @@ class HttpTransactionCache:
|
||||||
fn: Callable[P, Awaitable[Tuple[int, JsonDict]]],
|
fn: Callable[P, Awaitable[Tuple[int, JsonDict]]],
|
||||||
*args: P.args,
|
*args: P.args,
|
||||||
**kwargs: P.kwargs,
|
**kwargs: P.kwargs,
|
||||||
) -> Awaitable[Tuple[int, JsonDict]]:
|
) -> "Deferred[Tuple[int, JsonDict]]":
|
||||||
"""Fetches the response for this transaction, or executes the given function
|
"""Fetches the response for this transaction, or executes the given function
|
||||||
to produce a response for this transaction.
|
to produce a response for this transaction.
|
||||||
|
|
||||||
|
|
|
@ -13,9 +13,11 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from tests.unittest import TestCase
|
||||||
|
|
||||||
class LoggerCleanupMixin:
|
|
||||||
def get_logger(self, handler):
|
class LoggerCleanupMixin(TestCase):
|
||||||
|
def get_logger(self, handler: logging.Handler) -> logging.Logger:
|
||||||
"""
|
"""
|
||||||
Attach a handler to a logger and add clean-ups to remove revert this.
|
Attach a handler to a logger and add clean-ups to remove revert this.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -153,7 +153,7 @@ class LogContextScopeManagerTestCase(TestCase):
|
||||||
|
|
||||||
scopes = []
|
scopes = []
|
||||||
|
|
||||||
async def task(i: int):
|
async def task(i: int) -> None:
|
||||||
scope = start_active_span(
|
scope = start_active_span(
|
||||||
f"task{i}",
|
f"task{i}",
|
||||||
tracer=self._tracer,
|
tracer=self._tracer,
|
||||||
|
@ -165,7 +165,7 @@ class LogContextScopeManagerTestCase(TestCase):
|
||||||
self.assertEqual(self._tracer.active_span, scope.span)
|
self.assertEqual(self._tracer.active_span, scope.span)
|
||||||
scope.close()
|
scope.close()
|
||||||
|
|
||||||
async def root():
|
async def root() -> None:
|
||||||
with start_active_span("root span", tracer=self._tracer) as root_scope:
|
with start_active_span("root span", tracer=self._tracer) as root_scope:
|
||||||
self.assertEqual(self._tracer.active_span, root_scope.span)
|
self.assertEqual(self._tracer.active_span, root_scope.span)
|
||||||
scopes.append(root_scope)
|
scopes.append(root_scope)
|
||||||
|
|
|
@ -11,7 +11,10 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from twisted.test.proto_helpers import AccumulatingProtocol
|
from typing import Tuple
|
||||||
|
|
||||||
|
from twisted.internet.protocol import Protocol
|
||||||
|
from twisted.test.proto_helpers import AccumulatingProtocol, MemoryReactorClock
|
||||||
|
|
||||||
from synapse.logging import RemoteHandler
|
from synapse.logging import RemoteHandler
|
||||||
|
|
||||||
|
@ -20,7 +23,9 @@ from tests.server import FakeTransport, get_clock
|
||||||
from tests.unittest import TestCase
|
from tests.unittest import TestCase
|
||||||
|
|
||||||
|
|
||||||
def connect_logging_client(reactor, client_id):
|
def connect_logging_client(
|
||||||
|
reactor: MemoryReactorClock, client_id: int
|
||||||
|
) -> Tuple[Protocol, AccumulatingProtocol]:
|
||||||
# This is essentially tests.server.connect_client, but disabling autoflush on
|
# This is essentially tests.server.connect_client, but disabling autoflush on
|
||||||
# the client transport. This is necessary to avoid an infinite loop due to
|
# the client transport. This is necessary to avoid an infinite loop due to
|
||||||
# sending of data via the logging transport causing additional logs to be
|
# sending of data via the logging transport causing additional logs to be
|
||||||
|
@ -35,10 +40,10 @@ def connect_logging_client(reactor, client_id):
|
||||||
|
|
||||||
|
|
||||||
class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
def setUp(self):
|
def setUp(self) -> None:
|
||||||
self.reactor, _ = get_clock()
|
self.reactor, _ = get_clock()
|
||||||
|
|
||||||
def test_log_output(self):
|
def test_log_output(self) -> None:
|
||||||
"""
|
"""
|
||||||
The remote handler delivers logs over TCP.
|
The remote handler delivers logs over TCP.
|
||||||
"""
|
"""
|
||||||
|
@ -51,6 +56,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
client, server = connect_logging_client(self.reactor, 0)
|
client, server = connect_logging_client(self.reactor, 0)
|
||||||
|
|
||||||
# Trigger data being sent
|
# Trigger data being sent
|
||||||
|
assert isinstance(client.transport, FakeTransport)
|
||||||
client.transport.flush()
|
client.transport.flush()
|
||||||
|
|
||||||
# One log message, with a single trailing newline
|
# One log message, with a single trailing newline
|
||||||
|
@ -61,7 +67,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
# Ensure the data passed through properly.
|
# Ensure the data passed through properly.
|
||||||
self.assertEqual(logs[0], "Hello there, wally!")
|
self.assertEqual(logs[0], "Hello there, wally!")
|
||||||
|
|
||||||
def test_log_backpressure_debug(self):
|
def test_log_backpressure_debug(self) -> None:
|
||||||
"""
|
"""
|
||||||
When backpressure is hit, DEBUG logs will be shed.
|
When backpressure is hit, DEBUG logs will be shed.
|
||||||
"""
|
"""
|
||||||
|
@ -83,6 +89,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
|
|
||||||
# Allow the reconnection
|
# Allow the reconnection
|
||||||
client, server = connect_logging_client(self.reactor, 0)
|
client, server = connect_logging_client(self.reactor, 0)
|
||||||
|
assert isinstance(client.transport, FakeTransport)
|
||||||
client.transport.flush()
|
client.transport.flush()
|
||||||
|
|
||||||
# Only the 7 infos made it through, the debugs were elided
|
# Only the 7 infos made it through, the debugs were elided
|
||||||
|
@ -90,7 +97,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertEqual(len(logs), 7)
|
self.assertEqual(len(logs), 7)
|
||||||
self.assertNotIn(b"debug", server.data)
|
self.assertNotIn(b"debug", server.data)
|
||||||
|
|
||||||
def test_log_backpressure_info(self):
|
def test_log_backpressure_info(self) -> None:
|
||||||
"""
|
"""
|
||||||
When backpressure is hit, DEBUG and INFO logs will be shed.
|
When backpressure is hit, DEBUG and INFO logs will be shed.
|
||||||
"""
|
"""
|
||||||
|
@ -116,6 +123,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
|
|
||||||
# Allow the reconnection
|
# Allow the reconnection
|
||||||
client, server = connect_logging_client(self.reactor, 0)
|
client, server = connect_logging_client(self.reactor, 0)
|
||||||
|
assert isinstance(client.transport, FakeTransport)
|
||||||
client.transport.flush()
|
client.transport.flush()
|
||||||
|
|
||||||
# The 10 warnings made it through, the debugs and infos were elided
|
# The 10 warnings made it through, the debugs and infos were elided
|
||||||
|
@ -124,7 +132,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertNotIn(b"debug", server.data)
|
self.assertNotIn(b"debug", server.data)
|
||||||
self.assertNotIn(b"info", server.data)
|
self.assertNotIn(b"info", server.data)
|
||||||
|
|
||||||
def test_log_backpressure_cut_middle(self):
|
def test_log_backpressure_cut_middle(self) -> None:
|
||||||
"""
|
"""
|
||||||
When backpressure is hit, and no more DEBUG and INFOs cannot be culled,
|
When backpressure is hit, and no more DEBUG and INFOs cannot be culled,
|
||||||
it will cut the middle messages out.
|
it will cut the middle messages out.
|
||||||
|
@ -140,6 +148,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
|
|
||||||
# Allow the reconnection
|
# Allow the reconnection
|
||||||
client, server = connect_logging_client(self.reactor, 0)
|
client, server = connect_logging_client(self.reactor, 0)
|
||||||
|
assert isinstance(client.transport, FakeTransport)
|
||||||
client.transport.flush()
|
client.transport.flush()
|
||||||
|
|
||||||
# The first five and last five warnings made it through, the debugs and
|
# The first five and last five warnings made it through, the debugs and
|
||||||
|
@ -151,7 +160,7 @@ class RemoteHandlerTestCase(LoggerCleanupMixin, TestCase):
|
||||||
logs,
|
logs,
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_cancel_connection(self):
|
def test_cancel_connection(self) -> None:
|
||||||
"""
|
"""
|
||||||
Gracefully handle the connection being cancelled.
|
Gracefully handle the connection being cancelled.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -14,24 +14,28 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from io import BytesIO, StringIO
|
from io import BytesIO, StringIO
|
||||||
|
from typing import cast
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
|
from twisted.web.http import HTTPChannel
|
||||||
from twisted.web.server import Request
|
from twisted.web.server import Request
|
||||||
|
|
||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
from synapse.logging._terse_json import JsonFormatter, TerseJsonFormatter
|
from synapse.logging._terse_json import JsonFormatter, TerseJsonFormatter
|
||||||
from synapse.logging.context import LoggingContext, LoggingContextFilter
|
from synapse.logging.context import LoggingContext, LoggingContextFilter
|
||||||
|
from synapse.types import JsonDict
|
||||||
|
|
||||||
from tests.logging import LoggerCleanupMixin
|
from tests.logging import LoggerCleanupMixin
|
||||||
from tests.server import FakeChannel
|
from tests.server import FakeChannel, get_clock
|
||||||
from tests.unittest import TestCase
|
from tests.unittest import TestCase
|
||||||
|
|
||||||
|
|
||||||
class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
def setUp(self):
|
def setUp(self) -> None:
|
||||||
self.output = StringIO()
|
self.output = StringIO()
|
||||||
|
self.reactor, _ = get_clock()
|
||||||
|
|
||||||
def get_log_line(self):
|
def get_log_line(self) -> JsonDict:
|
||||||
# One log message, with a single trailing newline.
|
# One log message, with a single trailing newline.
|
||||||
data = self.output.getvalue()
|
data = self.output.getvalue()
|
||||||
logs = data.splitlines()
|
logs = data.splitlines()
|
||||||
|
@ -39,7 +43,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertEqual(data.count("\n"), 1)
|
self.assertEqual(data.count("\n"), 1)
|
||||||
return json.loads(logs[0])
|
return json.loads(logs[0])
|
||||||
|
|
||||||
def test_terse_json_output(self):
|
def test_terse_json_output(self) -> None:
|
||||||
"""
|
"""
|
||||||
The Terse JSON formatter converts log messages to JSON.
|
The Terse JSON formatter converts log messages to JSON.
|
||||||
"""
|
"""
|
||||||
|
@ -61,7 +65,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertCountEqual(log.keys(), expected_log_keys)
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
||||||
self.assertEqual(log["log"], "Hello there, wally!")
|
self.assertEqual(log["log"], "Hello there, wally!")
|
||||||
|
|
||||||
def test_extra_data(self):
|
def test_extra_data(self) -> None:
|
||||||
"""
|
"""
|
||||||
Additional information can be included in the structured logging.
|
Additional information can be included in the structured logging.
|
||||||
"""
|
"""
|
||||||
|
@ -93,7 +97,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertEqual(log["int"], 3)
|
self.assertEqual(log["int"], 3)
|
||||||
self.assertIs(log["bool"], True)
|
self.assertIs(log["bool"], True)
|
||||||
|
|
||||||
def test_json_output(self):
|
def test_json_output(self) -> None:
|
||||||
"""
|
"""
|
||||||
The Terse JSON formatter converts log messages to JSON.
|
The Terse JSON formatter converts log messages to JSON.
|
||||||
"""
|
"""
|
||||||
|
@ -114,7 +118,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertCountEqual(log.keys(), expected_log_keys)
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
||||||
self.assertEqual(log["log"], "Hello there, wally!")
|
self.assertEqual(log["log"], "Hello there, wally!")
|
||||||
|
|
||||||
def test_with_context(self):
|
def test_with_context(self) -> None:
|
||||||
"""
|
"""
|
||||||
The logging context should be added to the JSON response.
|
The logging context should be added to the JSON response.
|
||||||
"""
|
"""
|
||||||
|
@ -139,7 +143,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertEqual(log["log"], "Hello there, wally!")
|
self.assertEqual(log["log"], "Hello there, wally!")
|
||||||
self.assertEqual(log["request"], "name")
|
self.assertEqual(log["request"], "name")
|
||||||
|
|
||||||
def test_with_request_context(self):
|
def test_with_request_context(self) -> None:
|
||||||
"""
|
"""
|
||||||
Information from the logging context request should be added to the JSON response.
|
Information from the logging context request should be added to the JSON response.
|
||||||
"""
|
"""
|
||||||
|
@ -154,11 +158,13 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
site.server_version_string = "Server v1"
|
site.server_version_string = "Server v1"
|
||||||
site.reactor = Mock()
|
site.reactor = Mock()
|
||||||
site.experimental_cors_msc3886 = False
|
site.experimental_cors_msc3886 = False
|
||||||
request = SynapseRequest(FakeChannel(site, None), site)
|
request = SynapseRequest(
|
||||||
|
cast(HTTPChannel, FakeChannel(site, self.reactor)), site
|
||||||
|
)
|
||||||
# Call requestReceived to finish instantiating the object.
|
# Call requestReceived to finish instantiating the object.
|
||||||
request.content = BytesIO()
|
request.content = BytesIO()
|
||||||
# Partially skip some of the internal processing of SynapseRequest.
|
# Partially skip some internal processing of SynapseRequest.
|
||||||
request._started_processing = Mock()
|
request._started_processing = Mock() # type: ignore[assignment]
|
||||||
request.request_metrics = Mock(spec=["name"])
|
request.request_metrics = Mock(spec=["name"])
|
||||||
with patch.object(Request, "render"):
|
with patch.object(Request, "render"):
|
||||||
request.requestReceived(b"POST", b"/_matrix/client/versions", b"1.1")
|
request.requestReceived(b"POST", b"/_matrix/client/versions", b"1.1")
|
||||||
|
@ -200,7 +206,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
||||||
self.assertEqual(log["protocol"], "1.1")
|
self.assertEqual(log["protocol"], "1.1")
|
||||||
self.assertEqual(log["user_agent"], "")
|
self.assertEqual(log["user_agent"], "")
|
||||||
|
|
||||||
def test_with_exception(self):
|
def test_with_exception(self) -> None:
|
||||||
"""
|
"""
|
||||||
The logging exception type & value should be added to the JSON response.
|
The logging exception type & value should be added to the JSON response.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -13,18 +13,22 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
|
from typing import Any, Generator, Tuple, cast
|
||||||
from unittest.mock import Mock, call
|
from unittest.mock import Mock, call
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor as _reactor
|
||||||
|
|
||||||
from synapse.logging.context import SENTINEL_CONTEXT, LoggingContext, current_context
|
from synapse.logging.context import SENTINEL_CONTEXT, LoggingContext, current_context
|
||||||
from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache
|
from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache
|
||||||
|
from synapse.types import ISynapseReactor, JsonDict
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.test_utils import make_awaitable
|
from tests.test_utils import make_awaitable
|
||||||
from tests.utils import MockClock
|
from tests.utils import MockClock
|
||||||
|
|
||||||
|
reactor = cast(ISynapseReactor, _reactor)
|
||||||
|
|
||||||
|
|
||||||
class HttpTransactionCacheTestCase(unittest.TestCase):
|
class HttpTransactionCacheTestCase(unittest.TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
|
@ -34,11 +38,13 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
|
||||||
self.hs.get_auth = Mock()
|
self.hs.get_auth = Mock()
|
||||||
self.cache = HttpTransactionCache(self.hs)
|
self.cache = HttpTransactionCache(self.hs)
|
||||||
|
|
||||||
self.mock_http_response = (HTTPStatus.OK, "GOOD JOB!")
|
self.mock_http_response = (HTTPStatus.OK, {"result": "GOOD JOB!"})
|
||||||
self.mock_key = "foo"
|
self.mock_key = "foo"
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_executes_given_function(self):
|
def test_executes_given_function(
|
||||||
|
self,
|
||||||
|
) -> Generator["defer.Deferred[Any]", object, None]:
|
||||||
cb = Mock(return_value=make_awaitable(self.mock_http_response))
|
cb = Mock(return_value=make_awaitable(self.mock_http_response))
|
||||||
res = yield self.cache.fetch_or_execute(
|
res = yield self.cache.fetch_or_execute(
|
||||||
self.mock_key, cb, "some_arg", keyword="arg"
|
self.mock_key, cb, "some_arg", keyword="arg"
|
||||||
|
@ -47,7 +53,9 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
|
||||||
self.assertEqual(res, self.mock_http_response)
|
self.assertEqual(res, self.mock_http_response)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_deduplicates_based_on_key(self):
|
def test_deduplicates_based_on_key(
|
||||||
|
self,
|
||||||
|
) -> Generator["defer.Deferred[Any]", object, None]:
|
||||||
cb = Mock(return_value=make_awaitable(self.mock_http_response))
|
cb = Mock(return_value=make_awaitable(self.mock_http_response))
|
||||||
for i in range(3): # invoke multiple times
|
for i in range(3): # invoke multiple times
|
||||||
res = yield self.cache.fetch_or_execute(
|
res = yield self.cache.fetch_or_execute(
|
||||||
|
@ -58,18 +66,20 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
|
||||||
cb.assert_called_once_with("some_arg", keyword="arg", changing_args=0)
|
cb.assert_called_once_with("some_arg", keyword="arg", changing_args=0)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_logcontexts_with_async_result(self):
|
def test_logcontexts_with_async_result(
|
||||||
|
self,
|
||||||
|
) -> Generator["defer.Deferred[Any]", object, None]:
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def cb():
|
def cb() -> Generator["defer.Deferred[object]", object, Tuple[int, JsonDict]]:
|
||||||
yield Clock(reactor).sleep(0)
|
yield Clock(reactor).sleep(0)
|
||||||
return "yay"
|
return 1, {}
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test():
|
def test() -> Generator["defer.Deferred[Any]", object, None]:
|
||||||
with LoggingContext("c") as c1:
|
with LoggingContext("c") as c1:
|
||||||
res = yield self.cache.fetch_or_execute(self.mock_key, cb)
|
res = yield self.cache.fetch_or_execute(self.mock_key, cb)
|
||||||
self.assertIs(current_context(), c1)
|
self.assertIs(current_context(), c1)
|
||||||
self.assertEqual(res, "yay")
|
self.assertEqual(res, (1, {}))
|
||||||
|
|
||||||
# run the test twice in parallel
|
# run the test twice in parallel
|
||||||
d = defer.gatherResults([test(), test()])
|
d = defer.gatherResults([test(), test()])
|
||||||
|
@ -78,13 +88,15 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
|
||||||
self.assertIs(current_context(), SENTINEL_CONTEXT)
|
self.assertIs(current_context(), SENTINEL_CONTEXT)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_does_not_cache_exceptions(self):
|
def test_does_not_cache_exceptions(
|
||||||
|
self,
|
||||||
|
) -> Generator["defer.Deferred[Any]", object, None]:
|
||||||
"""Checks that, if the callback throws an exception, it is called again
|
"""Checks that, if the callback throws an exception, it is called again
|
||||||
for the next request.
|
for the next request.
|
||||||
"""
|
"""
|
||||||
called = [False]
|
called = [False]
|
||||||
|
|
||||||
def cb():
|
def cb() -> "defer.Deferred[Tuple[int, JsonDict]]":
|
||||||
if called[0]:
|
if called[0]:
|
||||||
# return a valid result the second time
|
# return a valid result the second time
|
||||||
return defer.succeed(self.mock_http_response)
|
return defer.succeed(self.mock_http_response)
|
||||||
|
@ -104,13 +116,15 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
|
||||||
self.assertIs(current_context(), test_context)
|
self.assertIs(current_context(), test_context)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_does_not_cache_failures(self):
|
def test_does_not_cache_failures(
|
||||||
|
self,
|
||||||
|
) -> Generator["defer.Deferred[Any]", object, None]:
|
||||||
"""Checks that, if the callback returns a failure, it is called again
|
"""Checks that, if the callback returns a failure, it is called again
|
||||||
for the next request.
|
for the next request.
|
||||||
"""
|
"""
|
||||||
called = [False]
|
called = [False]
|
||||||
|
|
||||||
def cb():
|
def cb() -> "defer.Deferred[Tuple[int, JsonDict]]":
|
||||||
if called[0]:
|
if called[0]:
|
||||||
# return a valid result the second time
|
# return a valid result the second time
|
||||||
return defer.succeed(self.mock_http_response)
|
return defer.succeed(self.mock_http_response)
|
||||||
|
@ -130,7 +144,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
|
||||||
self.assertIs(current_context(), test_context)
|
self.assertIs(current_context(), test_context)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_cleans_up(self):
|
def test_cleans_up(self) -> Generator["defer.Deferred[Any]", object, None]:
|
||||||
cb = Mock(return_value=make_awaitable(self.mock_http_response))
|
cb = Mock(return_value=make_awaitable(self.mock_http_response))
|
||||||
yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg")
|
yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg")
|
||||||
# should NOT have cleaned up yet
|
# should NOT have cleaned up yet
|
||||||
|
|
Loading…
Reference in a new issue