Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/10888.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Improve type hinting in `synapse.util`.
24 changes: 24 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,27 @@ disallow_untyped_defs = True
[mypy-synapse.util.batching_queue]
disallow_untyped_defs = True

[mypy-synapse.util.caches.cached_call]
disallow_untyped_defs = True

[mypy-synapse.util.caches.dictionary_cache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.lrucache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.response_cache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.stream_change_cache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.ttl_cache]
disallow_untyped_defs = True

[mypy-synapse.util.daemonize]
disallow_untyped_defs = True

[mypy-synapse.util.file_consumer]
disallow_untyped_defs = True

Expand Down Expand Up @@ -141,6 +159,9 @@ disallow_untyped_defs = True
[mypy-synapse.util.msisdn]
disallow_untyped_defs = True

[mypy-synapse.util.patch_inline_callbacks]
disallow_untyped_defs = True

[mypy-synapse.util.ratelimitutils]
disallow_untyped_defs = True

Expand All @@ -162,6 +183,9 @@ disallow_untyped_defs = True
[mypy-synapse.util.wheel_timer]
disallow_untyped_defs = True

[mypy-synapse.util.versionstring]
disallow_untyped_defs = True

[mypy-tests.handlers.test_user_directory]
disallow_untyped_defs = True

Expand Down
2 changes: 1 addition & 1 deletion synapse/util/caches/cached_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ async def get(self) -> TV:
# result in the deferred, since `awaiting` a deferred destroys its result.
# (Also, if it's a Failure, GCing the deferred would log a critical error
# about unhandled Failures)
def got_result(r):
def got_result(r: Union[TV, Failure]) -> None:
self._result = r

self._deferred.addBoth(got_result)
Expand Down
11 changes: 6 additions & 5 deletions synapse/util/caches/deferred_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@

from twisted.internet import defer
from twisted.python import failure
from twisted.python.failure import Failure

from synapse.util.async_helpers import ObservableDeferred
from synapse.util.caches.lrucache import LruCache
Expand Down Expand Up @@ -112,7 +113,7 @@ def metrics_cb() -> None:
self.thread: Optional[threading.Thread] = None

@property
def max_entries(self):
def max_entries(self) -> int:
return self.cache.max_size

def check_thread(self) -> None:
Expand Down Expand Up @@ -258,7 +259,7 @@ def compare_and_pop() -> bool:

return False

def cb(result) -> None:
def cb(result: VT) -> None:
if compare_and_pop():
self.cache.set(key, result, entry.callbacks)
else:
Expand All @@ -270,7 +271,7 @@ def cb(result) -> None:
# not have been. Either way, let's double-check now.
entry.invalidate()

def eb(_fail) -> None:
def eb(_fail: Failure) -> None:
compare_and_pop()
entry.invalidate()

Expand All @@ -284,11 +285,11 @@ def eb(_fail) -> None:

def prefill(
self, key: KT, value: VT, callback: Optional[Callable[[], None]] = None
):
) -> None:
callbacks = [callback] if callback else []
self.cache.set(key, value, callbacks=callbacks)

def invalidate(self, key):
def invalidate(self, key) -> None:
"""Delete a key, or tree of entries

If the cache is backed by a regular dict, then "key" must be of
Expand Down
57 changes: 26 additions & 31 deletions synapse/util/caches/lrucache.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
try:
from pympler.asizeof import Asizer

def _get_size_of(val: Any, *, recurse=True) -> int:
def _get_size_of(val: Any, *, recurse: bool = True) -> int:
"""Get an estimate of the size in bytes of the object.

Args:
Expand All @@ -71,7 +71,7 @@ def _get_size_of(val: Any, *, recurse=True) -> int:

except ImportError:

def _get_size_of(val: Any, *, recurse=True) -> int:
def _get_size_of(val: Any, *, recurse: bool = True) -> int:
return 0


Expand All @@ -85,15 +85,6 @@ def _get_size_of(val: Any, *, recurse=True) -> int:
# a general type var, distinct from either KT or VT
T = TypeVar("T")


def enumerate_leaves(node, depth):
if depth == 0:
yield node
else:
for n in node.values():
yield from enumerate_leaves(n, depth - 1)


P = TypeVar("P")


Expand All @@ -102,7 +93,7 @@ class _TimedListNode(ListNode[P]):

__slots__ = ["last_access_ts_secs"]

def update_last_access(self, clock: Clock):
def update_last_access(self, clock: Clock) -> None:
self.last_access_ts_secs = int(clock.time())


Expand All @@ -115,7 +106,7 @@ def update_last_access(self, clock: Clock):


@wrap_as_background_process("LruCache._expire_old_entries")
async def _expire_old_entries(clock: Clock, expiry_seconds: int):
async def _expire_old_entries(clock: Clock, expiry_seconds: int) -> None:
"""Walks the global cache list to find cache entries that haven't been
accessed in the given number of seconds.
"""
Expand Down Expand Up @@ -163,7 +154,7 @@ async def _expire_old_entries(clock: Clock, expiry_seconds: int):
logger.info("Dropped %d items from caches", i)


def setup_expire_lru_cache_entries(hs: "HomeServer"):
def setup_expire_lru_cache_entries(hs: "HomeServer") -> None:
"""Start a background job that expires all cache entries if they have not
been accessed for the given number of seconds.
"""
Expand All @@ -183,7 +174,7 @@ def setup_expire_lru_cache_entries(hs: "HomeServer"):
)


class _Node:
class _Node(Generic[KT, VT]):
__slots__ = [
"_list_node",
"_global_list_node",
Expand All @@ -197,8 +188,8 @@ class _Node:
def __init__(
self,
root: "ListNode[_Node]",
key,
value,
key: KT,
value: VT,
cache: "weakref.ReferenceType[LruCache]",
clock: Clock,
callbacks: Collection[Callable[[], None]] = (),
Expand Down Expand Up @@ -409,7 +400,7 @@ def evict() -> None:

def synchronized(f: FT) -> FT:
@wraps(f)
def inner(*args, **kwargs):
def inner(*args: Any, **kwargs: Any) -> Any:
with lock:
return f(*args, **kwargs)

Expand All @@ -418,17 +409,19 @@ def inner(*args, **kwargs):
cached_cache_len = [0]
if size_callback is not None:

def cache_len():
def cache_len() -> int:
return cached_cache_len[0]

else:

def cache_len():
def cache_len() -> int:
return len(cache)

self.len = synchronized(cache_len)

def add_node(key, value, callbacks: Collection[Callable[[], None]] = ()):
def add_node(
key: KT, value: VT, callbacks: Collection[Callable[[], None]] = ()
) -> None:
node = _Node(
list_root,
key,
Expand All @@ -446,7 +439,7 @@ def add_node(key, value, callbacks: Collection[Callable[[], None]] = ()):
if caches.TRACK_MEMORY_USAGE and metrics:
metrics.inc_memory_usage(node.memory)

def move_node_to_front(node: _Node):
def move_node_to_front(node: _Node) -> None:
node.move_to_front(real_clock, list_root)

def delete_node(node: _Node) -> int:
Expand Down Expand Up @@ -488,7 +481,7 @@ def cache_get(
default: Optional[T] = None,
callbacks: Collection[Callable[[], None]] = (),
update_metrics: bool = True,
):
) -> Union[None, T, VT]:
node = cache.get(key, None)
if node is not None:
move_node_to_front(node)
Expand All @@ -502,7 +495,9 @@ def cache_get(
return default

@synchronized
def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = ()):
def cache_set(
key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = ()
) -> None:
node = cache.get(key, None)
if node is not None:
# We sometimes store large objects, e.g. dicts, which cause
Expand Down Expand Up @@ -547,7 +542,7 @@ def cache_pop(key: KT, default: T) -> Union[T, VT]:
...

@synchronized
def cache_pop(key: KT, default: Optional[T] = None):
def cache_pop(key: KT, default: Optional[T] = None) -> Union[None, T, VT]:
node = cache.get(key, None)
if node:
delete_node(node)
Expand Down Expand Up @@ -612,25 +607,25 @@ def cache_contains(key: KT) -> bool:
self.contains = cache_contains
self.clear = cache_clear

def __getitem__(self, key):
def __getitem__(self, key: KT) -> VT:
result = self.get(key, self.sentinel)
if result is self.sentinel:
raise KeyError()
else:
return result
return cast(VT, result)

def __setitem__(self, key, value):
def __setitem__(self, key: KT, value: VT) -> None:
self.set(key, value)

def __delitem__(self, key, value):
def __delitem__(self, key: KT, value: VT) -> None:
result = self.pop(key, self.sentinel)
if result is self.sentinel:
raise KeyError()

def __len__(self):
def __len__(self) -> int:
return self.len()

def __contains__(self, key):
def __contains__(self, key: KT) -> bool:
return self.contains(key)

def set_cache_factor(self, factor: float) -> bool:
Expand Down
6 changes: 3 additions & 3 deletions synapse/util/caches/response_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ def get(self, key: KV) -> Optional[defer.Deferred]:
return None

def _set(
self, context: ResponseCacheContext[KV], deferred: defer.Deferred
) -> defer.Deferred:
self, context: ResponseCacheContext[KV], deferred: "defer.Deferred[RV]"
) -> "defer.Deferred[RV]":
"""Set the entry for the given key to the given deferred.

*deferred* should run its callbacks in the sentinel logcontext (ie,
Expand All @@ -126,7 +126,7 @@ def _set(
key = context.cache_key
self.pending_result_cache[key] = result

def on_complete(r):
def on_complete(r: RV) -> RV:
# if this cache has a non-zero timeout, and the callback has not cleared
# the should_cache bit, we leave it in the cache for now and schedule
# its removal later.
Expand Down
6 changes: 3 additions & 3 deletions synapse/util/caches/stream_change_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ def __init__(
self,
name: str,
current_stream_pos: int,
max_size=10000,
max_size: int = 10000,
prefilled_cache: Optional[Mapping[EntityType, int]] = None,
):
self._original_max_size = max_size
) -> None:
self._original_max_size: int = max_size
self._max_size = math.floor(max_size)
self._entity_to_key: Dict[EntityType, int] = {}

Expand Down
12 changes: 6 additions & 6 deletions synapse/util/caches/ttlcache.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,12 +159,12 @@ def expire(self) -> None:
del self._expiry_list[0]


@attr.s(frozen=True, slots=True)
class _CacheEntry:
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

CI is unhappy here.

2021-09-27T15:54:47.4244222Z [ERROR]
2021-09-27T15:54:47.4244683Z Traceback (most recent call last):
2021-09-27T15:54:47.4251489Z   File "/home/runner/work/synapse/synapse/.tox/py/lib/python3.6/site-packages/twisted/trial/runner.py", line 693, in loadByName
2021-09-27T15:54:47.4252855Z     return self.suiteFactory([self.findByName(name, recurse=recurse)])
2021-09-27T15:54:47.4254696Z   File "/home/runner/work/synapse/synapse/.tox/py/lib/python3.6/site-packages/twisted/trial/runner.py", line 453, in findByName
2021-09-27T15:54:47.4255759Z     obj = reflect.namedModule(searchName)
2021-09-27T15:54:47.4257137Z   File "/home/runner/work/synapse/synapse/.tox/py/lib/python3.6/site-packages/twisted/python/reflect.py", line 157, in namedModule
2021-09-27T15:54:47.4258112Z     topLevel = __import__(name)
2021-09-27T15:54:47.4258979Z   File "/home/runner/work/synapse/synapse/tests/rest/client/test_third_party_rules.py", line 27, in <module>
2021-09-27T15:54:47.4259789Z     from tests import unittest
2021-09-27T15:54:47.4260567Z   File "/home/runner/work/synapse/synapse/tests/unittest.py", line 38, in <module>
2021-09-27T15:54:47.4261634Z     from synapse.federation.transport import server as federation_server
2021-09-27T15:54:47.4262824Z   File "/home/runner/work/synapse/synapse/synapse/federation/transport/server/__init__.py", line 21, in <module>
2021-09-27T15:54:47.4264029Z     from synapse.federation.transport.server._base import (
2021-09-27T15:54:47.4265283Z   File "/home/runner/work/synapse/synapse/synapse/federation/transport/server/_base.py", line 31, in <module>
2021-09-27T15:54:47.4266216Z     from synapse.server import HomeServer
2021-09-27T15:54:47.4267084Z   File "/home/runner/work/synapse/synapse/synapse/server.py", line 54, in <module>
2021-09-27T15:54:47.4268186Z     from synapse.federation.federation_client import FederationClient
2021-09-27T15:54:47.4275541Z   File "/home/runner/work/synapse/synapse/synapse/federation/federation_client.py", line 58, in <module>
2021-09-27T15:54:47.4277013Z     from synapse.federation.transport.client import SendJoinResponse
2021-09-27T15:54:47.4278483Z   File "/home/runner/work/synapse/synapse/synapse/federation/transport/client.py", line 33, in <module>
2021-09-27T15:54:47.4280832Z     from synapse.http.matrixfederationclient import ByteParser
2021-09-27T15:54:47.4282277Z   File "/home/runner/work/synapse/synapse/synapse/http/matrixfederationclient.py", line 67, in <module>
2021-09-27T15:54:47.4283684Z     from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
2021-09-27T15:54:47.4285068Z   File "/home/runner/work/synapse/synapse/synapse/http/federation/matrix_federation_agent.py", line 41, in <module>
2021-09-27T15:54:47.4286572Z     from synapse.http.federation.well_known_resolver import WellKnownResolver
2021-09-27T15:54:47.4287856Z   File "/home/runner/work/synapse/synapse/synapse/http/federation/well_known_resolver.py", line 32, in <module>
2021-09-27T15:54:47.4288946Z     from synapse.util.caches.ttlcache import TTLCache
2021-09-27T15:54:47.4290319Z   File "/home/runner/work/synapse/synapse/synapse/util/caches/ttlcache.py", line 163, in <module>
2021-09-27T15:54:47.4291158Z     class _CacheEntry(Generic[KT, VT]):
2021-09-27T15:54:47.4309820Z   File "/home/runner/work/synapse/synapse/.tox/py/lib/python3.6/site-packages/attr/_make.py", line 1559, in wrap
2021-09-27T15:54:47.4310783Z     return builder.build_class()
2021-09-27T15:54:47.4313286Z   File "/home/runner/work/synapse/synapse/.tox/py/lib/python3.6/site-packages/attr/_make.py", line 730, in build_class
2021-09-27T15:54:47.4314208Z     return self._create_slots_class()
2021-09-27T15:54:47.4315773Z   File "/home/runner/work/synapse/synapse/.tox/py/lib/python3.6/site-packages/attr/_make.py", line 847, in _create_slots_class
2021-09-27T15:54:47.4316784Z     cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
2021-09-27T15:54:47.4317678Z   File "/opt/hostedtoolcache/Python/3.6.15/x64/lib/python3.6/typing.py", line 948, in __new__
2021-09-27T15:54:47.4318610Z     raise TypeError("Cannot inherit from plain Generic")
2021-09-27T15:54:47.4319430Z builtins.TypeError: Cannot inherit from plain Generic
2021-09-27T15:54:47.4319918Z 
2021-09-27T15:54:47.4320688Z tests.rest.client.test_third_party_rules

Looks like python-attrs/attrs#313

@attr.s(frozen=True, slots=True, auto_attribs=True)
class _CacheEntry: # Should be Generic[KT, VT]. See python-attrs/attrs#313
"""TTLCache entry"""

# expiry_time is the first attribute, so that entries are sorted by expiry.
expiry_time = attr.ib(type=float)
ttl = attr.ib(type=float)
key = attr.ib()
value = attr.ib()
expiry_time: float
ttl: float
key: Any # should be KT
value: Any # should be VT
8 changes: 6 additions & 2 deletions synapse/util/daemonize.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
import os
import signal
import sys
from types import FrameType, TracebackType
from typing import NoReturn, Type


def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") -> None:
Expand Down Expand Up @@ -97,7 +99,9 @@ def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") -
# (we don't normally expect reactor.run to raise any exceptions, but this will
# also catch any other uncaught exceptions before we get that far.)

def excepthook(type_, value, traceback):
def excepthook(
type_: Type[BaseException], value: BaseException, traceback: TracebackType
) -> None:
logger.critical("Unhanded exception", exc_info=(type_, value, traceback))

sys.excepthook = excepthook
Expand All @@ -119,7 +123,7 @@ def excepthook(type_, value, traceback):
sys.exit(1)

# write a log line on SIGTERM.
def sigterm(signum, frame):
def sigterm(signum: signal.Signals, frame: FrameType) -> NoReturn:
logger.warning("Caught signal %s. Stopping daemon." % signum)
sys.exit(0)

Expand Down
Loading