Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit f8d0f72

Browse files
author
David Robertson
authored
More types for synapse.util, part 1 (#10888)
The following modules now pass `disallow_untyped_defs`: * synapse.util.caches.cached_call * synapse.util.caches.lrucache * synapse.util.caches.response_cache * synapse.util.caches.stream_change_cache * synapse.util.caches.ttlcache pass * synapse.util.daemonize * synapse.util.patch_inline_callbacks pass `no-untyped-defs` * synapse.util.versionstring Additional typing in synapse.util.metrics. Didn't get this to pass `no-untyped-defs`, think I'll need to watch #10847
1 parent 6744273 commit f8d0f72

File tree

12 files changed

+134
-73
lines changed

12 files changed

+134
-73
lines changed

changelog.d/10888.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Improve type hinting in `synapse.util`.

mypy.ini

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,9 +102,27 @@ disallow_untyped_defs = True
102102
[mypy-synapse.util.batching_queue]
103103
disallow_untyped_defs = True
104104

105+
[mypy-synapse.util.caches.cached_call]
106+
disallow_untyped_defs = True
107+
105108
[mypy-synapse.util.caches.dictionary_cache]
106109
disallow_untyped_defs = True
107110

111+
[mypy-synapse.util.caches.lrucache]
112+
disallow_untyped_defs = True
113+
114+
[mypy-synapse.util.caches.response_cache]
115+
disallow_untyped_defs = True
116+
117+
[mypy-synapse.util.caches.stream_change_cache]
118+
disallow_untyped_defs = True
119+
120+
[mypy-synapse.util.caches.ttl_cache]
121+
disallow_untyped_defs = True
122+
123+
[mypy-synapse.util.daemonize]
124+
disallow_untyped_defs = True
125+
108126
[mypy-synapse.util.file_consumer]
109127
disallow_untyped_defs = True
110128

@@ -141,6 +159,9 @@ disallow_untyped_defs = True
141159
[mypy-synapse.util.msisdn]
142160
disallow_untyped_defs = True
143161

162+
[mypy-synapse.util.patch_inline_callbacks]
163+
disallow_untyped_defs = True
164+
144165
[mypy-synapse.util.ratelimitutils]
145166
disallow_untyped_defs = True
146167

@@ -162,6 +183,9 @@ disallow_untyped_defs = True
162183
[mypy-synapse.util.wheel_timer]
163184
disallow_untyped_defs = True
164185

186+
[mypy-synapse.util.versionstring]
187+
disallow_untyped_defs = True
188+
165189
[mypy-tests.handlers.test_user_directory]
166190
disallow_untyped_defs = True
167191

synapse/util/caches/cached_call.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ async def get(self) -> TV:
8585
# result in the deferred, since `awaiting` a deferred destroys its result.
8686
# (Also, if it's a Failure, GCing the deferred would log a critical error
8787
# about unhandled Failures)
88-
def got_result(r):
88+
def got_result(r: Union[TV, Failure]) -> None:
8989
self._result = r
9090

9191
self._deferred.addBoth(got_result)

synapse/util/caches/deferred_cache.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131

3232
from twisted.internet import defer
3333
from twisted.python import failure
34+
from twisted.python.failure import Failure
3435

3536
from synapse.util.async_helpers import ObservableDeferred
3637
from synapse.util.caches.lrucache import LruCache
@@ -112,7 +113,7 @@ def metrics_cb() -> None:
112113
self.thread: Optional[threading.Thread] = None
113114

114115
@property
115-
def max_entries(self):
116+
def max_entries(self) -> int:
116117
return self.cache.max_size
117118

118119
def check_thread(self) -> None:
@@ -258,7 +259,7 @@ def compare_and_pop() -> bool:
258259

259260
return False
260261

261-
def cb(result) -> None:
262+
def cb(result: VT) -> None:
262263
if compare_and_pop():
263264
self.cache.set(key, result, entry.callbacks)
264265
else:
@@ -270,7 +271,7 @@ def cb(result) -> None:
270271
# not have been. Either way, let's double-check now.
271272
entry.invalidate()
272273

273-
def eb(_fail) -> None:
274+
def eb(_fail: Failure) -> None:
274275
compare_and_pop()
275276
entry.invalidate()
276277

@@ -284,11 +285,11 @@ def eb(_fail) -> None:
284285

285286
def prefill(
286287
self, key: KT, value: VT, callback: Optional[Callable[[], None]] = None
287-
):
288+
) -> None:
288289
callbacks = [callback] if callback else []
289290
self.cache.set(key, value, callbacks=callbacks)
290291

291-
def invalidate(self, key):
292+
def invalidate(self, key) -> None:
292293
"""Delete a key, or tree of entries
293294
294295
If the cache is backed by a regular dict, then "key" must be of

synapse/util/caches/lrucache.py

Lines changed: 26 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
try:
5353
from pympler.asizeof import Asizer
5454

55-
def _get_size_of(val: Any, *, recurse=True) -> int:
55+
def _get_size_of(val: Any, *, recurse: bool = True) -> int:
5656
"""Get an estimate of the size in bytes of the object.
5757
5858
Args:
@@ -71,7 +71,7 @@ def _get_size_of(val: Any, *, recurse=True) -> int:
7171

7272
except ImportError:
7373

74-
def _get_size_of(val: Any, *, recurse=True) -> int:
74+
def _get_size_of(val: Any, *, recurse: bool = True) -> int:
7575
return 0
7676

7777

@@ -85,15 +85,6 @@ def _get_size_of(val: Any, *, recurse=True) -> int:
8585
# a general type var, distinct from either KT or VT
8686
T = TypeVar("T")
8787

88-
89-
def enumerate_leaves(node, depth):
90-
if depth == 0:
91-
yield node
92-
else:
93-
for n in node.values():
94-
yield from enumerate_leaves(n, depth - 1)
95-
96-
9788
P = TypeVar("P")
9889

9990

@@ -102,7 +93,7 @@ class _TimedListNode(ListNode[P]):
10293

10394
__slots__ = ["last_access_ts_secs"]
10495

105-
def update_last_access(self, clock: Clock):
96+
def update_last_access(self, clock: Clock) -> None:
10697
self.last_access_ts_secs = int(clock.time())
10798

10899

@@ -115,7 +106,7 @@ def update_last_access(self, clock: Clock):
115106

116107

117108
@wrap_as_background_process("LruCache._expire_old_entries")
118-
async def _expire_old_entries(clock: Clock, expiry_seconds: int):
109+
async def _expire_old_entries(clock: Clock, expiry_seconds: int) -> None:
119110
"""Walks the global cache list to find cache entries that haven't been
120111
accessed in the given number of seconds.
121112
"""
@@ -163,7 +154,7 @@ async def _expire_old_entries(clock: Clock, expiry_seconds: int):
163154
logger.info("Dropped %d items from caches", i)
164155

165156

166-
def setup_expire_lru_cache_entries(hs: "HomeServer"):
157+
def setup_expire_lru_cache_entries(hs: "HomeServer") -> None:
167158
"""Start a background job that expires all cache entries if they have not
168159
been accessed for the given number of seconds.
169160
"""
@@ -183,7 +174,7 @@ def setup_expire_lru_cache_entries(hs: "HomeServer"):
183174
)
184175

185176

186-
class _Node:
177+
class _Node(Generic[KT, VT]):
187178
__slots__ = [
188179
"_list_node",
189180
"_global_list_node",
@@ -197,8 +188,8 @@ class _Node:
197188
def __init__(
198189
self,
199190
root: "ListNode[_Node]",
200-
key,
201-
value,
191+
key: KT,
192+
value: VT,
202193
cache: "weakref.ReferenceType[LruCache]",
203194
clock: Clock,
204195
callbacks: Collection[Callable[[], None]] = (),
@@ -409,7 +400,7 @@ def evict() -> None:
409400

410401
def synchronized(f: FT) -> FT:
411402
@wraps(f)
412-
def inner(*args, **kwargs):
403+
def inner(*args: Any, **kwargs: Any) -> Any:
413404
with lock:
414405
return f(*args, **kwargs)
415406

@@ -418,17 +409,19 @@ def inner(*args, **kwargs):
418409
cached_cache_len = [0]
419410
if size_callback is not None:
420411

421-
def cache_len():
412+
def cache_len() -> int:
422413
return cached_cache_len[0]
423414

424415
else:
425416

426-
def cache_len():
417+
def cache_len() -> int:
427418
return len(cache)
428419

429420
self.len = synchronized(cache_len)
430421

431-
def add_node(key, value, callbacks: Collection[Callable[[], None]] = ()):
422+
def add_node(
423+
key: KT, value: VT, callbacks: Collection[Callable[[], None]] = ()
424+
) -> None:
432425
node = _Node(
433426
list_root,
434427
key,
@@ -446,7 +439,7 @@ def add_node(key, value, callbacks: Collection[Callable[[], None]] = ()):
446439
if caches.TRACK_MEMORY_USAGE and metrics:
447440
metrics.inc_memory_usage(node.memory)
448441

449-
def move_node_to_front(node: _Node):
442+
def move_node_to_front(node: _Node) -> None:
450443
node.move_to_front(real_clock, list_root)
451444

452445
def delete_node(node: _Node) -> int:
@@ -488,7 +481,7 @@ def cache_get(
488481
default: Optional[T] = None,
489482
callbacks: Collection[Callable[[], None]] = (),
490483
update_metrics: bool = True,
491-
):
484+
) -> Union[None, T, VT]:
492485
node = cache.get(key, None)
493486
if node is not None:
494487
move_node_to_front(node)
@@ -502,7 +495,9 @@ def cache_get(
502495
return default
503496

504497
@synchronized
505-
def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = ()):
498+
def cache_set(
499+
key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = ()
500+
) -> None:
506501
node = cache.get(key, None)
507502
if node is not None:
508503
# We sometimes store large objects, e.g. dicts, which cause
@@ -547,7 +542,7 @@ def cache_pop(key: KT, default: T) -> Union[T, VT]:
547542
...
548543

549544
@synchronized
550-
def cache_pop(key: KT, default: Optional[T] = None):
545+
def cache_pop(key: KT, default: Optional[T] = None) -> Union[None, T, VT]:
551546
node = cache.get(key, None)
552547
if node:
553548
delete_node(node)
@@ -612,25 +607,25 @@ def cache_contains(key: KT) -> bool:
612607
self.contains = cache_contains
613608
self.clear = cache_clear
614609

615-
def __getitem__(self, key):
610+
def __getitem__(self, key: KT) -> VT:
616611
result = self.get(key, self.sentinel)
617612
if result is self.sentinel:
618613
raise KeyError()
619614
else:
620-
return result
615+
return cast(VT, result)
621616

622-
def __setitem__(self, key, value):
617+
def __setitem__(self, key: KT, value: VT) -> None:
623618
self.set(key, value)
624619

625-
def __delitem__(self, key, value):
620+
def __delitem__(self, key: KT, value: VT) -> None:
626621
result = self.pop(key, self.sentinel)
627622
if result is self.sentinel:
628623
raise KeyError()
629624

630-
def __len__(self):
625+
def __len__(self) -> int:
631626
return self.len()
632627

633-
def __contains__(self, key):
628+
def __contains__(self, key: KT) -> bool:
634629
return self.contains(key)
635630

636631
def set_cache_factor(self, factor: float) -> bool:

synapse/util/caches/response_cache.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,8 @@ def get(self, key: KV) -> Optional[defer.Deferred]:
104104
return None
105105

106106
def _set(
107-
self, context: ResponseCacheContext[KV], deferred: defer.Deferred
108-
) -> defer.Deferred:
107+
self, context: ResponseCacheContext[KV], deferred: "defer.Deferred[RV]"
108+
) -> "defer.Deferred[RV]":
109109
"""Set the entry for the given key to the given deferred.
110110
111111
*deferred* should run its callbacks in the sentinel logcontext (ie,
@@ -126,7 +126,7 @@ def _set(
126126
key = context.cache_key
127127
self.pending_result_cache[key] = result
128128

129-
def on_complete(r):
129+
def on_complete(r: RV) -> RV:
130130
# if this cache has a non-zero timeout, and the callback has not cleared
131131
# the should_cache bit, we leave it in the cache for now and schedule
132132
# its removal later.

synapse/util/caches/stream_change_cache.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,10 +40,10 @@ def __init__(
4040
self,
4141
name: str,
4242
current_stream_pos: int,
43-
max_size=10000,
43+
max_size: int = 10000,
4444
prefilled_cache: Optional[Mapping[EntityType, int]] = None,
45-
):
46-
self._original_max_size = max_size
45+
) -> None:
46+
self._original_max_size: int = max_size
4747
self._max_size = math.floor(max_size)
4848
self._entity_to_key: Dict[EntityType, int] = {}
4949

synapse/util/caches/ttlcache.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -159,12 +159,12 @@ def expire(self) -> None:
159159
del self._expiry_list[0]
160160

161161

162-
@attr.s(frozen=True, slots=True)
163-
class _CacheEntry:
162+
@attr.s(frozen=True, slots=True, auto_attribs=True)
163+
class _CacheEntry: # Should be Generic[KT, VT]. See python-attrs/attrs#313
164164
"""TTLCache entry"""
165165

166166
# expiry_time is the first attribute, so that entries are sorted by expiry.
167-
expiry_time = attr.ib(type=float)
168-
ttl = attr.ib(type=float)
169-
key = attr.ib()
170-
value = attr.ib()
167+
expiry_time: float
168+
ttl: float
169+
key: Any # should be KT
170+
value: Any # should be VT

synapse/util/daemonize.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@
1919
import os
2020
import signal
2121
import sys
22+
from types import FrameType, TracebackType
23+
from typing import NoReturn, Type
2224

2325

2426
def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") -> None:
@@ -97,7 +99,9 @@ def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") -
9799
# (we don't normally expect reactor.run to raise any exceptions, but this will
98100
# also catch any other uncaught exceptions before we get that far.)
99101

100-
def excepthook(type_, value, traceback):
102+
def excepthook(
103+
type_: Type[BaseException], value: BaseException, traceback: TracebackType
104+
) -> None:
101105
logger.critical("Unhanded exception", exc_info=(type_, value, traceback))
102106

103107
sys.excepthook = excepthook
@@ -119,7 +123,7 @@ def excepthook(type_, value, traceback):
119123
sys.exit(1)
120124

121125
# write a log line on SIGTERM.
122-
def sigterm(signum, frame):
126+
def sigterm(signum: signal.Signals, frame: FrameType) -> NoReturn:
123127
logger.warning("Caught signal %s. Stopping daemon." % signum)
124128
sys.exit(0)
125129

0 commit comments

Comments
 (0)