Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Update type annotations for compatiblity with prometheus_client 0.14 #12389

Merged
merged 3 commits into from
Apr 6, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Update type annotations for compatiblity with prometheus_client 0.14
Principally, `prometheus_client.REGISTRY.register` now requires its argument to
extend `prometheus_client.Collector`.

Additionally, `Gauge.set` is now annotated so that passing `Optional[int]`
causes an error.
  • Loading branch information
richvdh committed Apr 6, 2022
commit fa5157527f61c57e4c851ab35286823fa143a2a3
1 change: 1 addition & 0 deletions changelog.d/12389.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Update type annotations for compatiblity with prometheus_client 0.14.
29 changes: 22 additions & 7 deletions synapse/metrics/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -17,6 +18,7 @@
import os
import platform
import threading
from abc import ABC, abstractmethod
from typing import (
Callable,
Dict,
Expand Down Expand Up @@ -53,11 +55,24 @@
)
from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager

# prometheus_client.Collector is new as of prometheus 0.14. We redefine it here
# for compatibility with earlier versions.
try:
from prometheus_client.registry import Collector
except ImportError:

class _Collector(ABC):
@abstractmethod
def collect(self) -> Iterable[Metric]:
pass

Collector = _Collector # type: ignore

logger = logging.getLogger(__name__)

METRICS_PREFIX = "/_synapse/metrics"

all_gauges: "Dict[str, Union[LaterGauge, InFlightGauge]]" = {}
all_gauges: Dict[str, Collector] = {}

HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat")

Expand All @@ -78,11 +93,10 @@ def collect() -> Iterable[Metric]:


@attr.s(slots=True, hash=True, auto_attribs=True)
class LaterGauge:

class LaterGauge(Collector):
name: str
desc: str
labels: Optional[Iterable[str]] = attr.ib(hash=False)
labels: Optional[Sequence[str]] = attr.ib(hash=False)
# callback: should either return a value (if there are no labels for this metric),
# or dict mapping from a label tuple to a value
caller: Callable[
Expand Down Expand Up @@ -125,7 +139,7 @@ def _register(self) -> None:
MetricsEntry = TypeVar("MetricsEntry")


class InFlightGauge(Generic[MetricsEntry]):
class InFlightGauge(Generic[MetricsEntry], Collector):
"""Tracks number of things (e.g. requests, Measure blocks, etc) in flight
at any given time.

Expand Down Expand Up @@ -246,7 +260,7 @@ def _register_with_collector(self) -> None:
all_gauges[self.name] = self


class GaugeBucketCollector:
class GaugeBucketCollector(Collector):
"""Like a Histogram, but the buckets are Gauges which are updated atomically.

The data is updated by calling `update_data` with an iterable of measurements.
Expand Down Expand Up @@ -340,7 +354,7 @@ def _values_to_metric(self, values: Iterable[float]) -> GaugeHistogramMetricFami
#


class CPUMetrics:
class CPUMetrics(Collector):
def __init__(self) -> None:
ticks_per_sec = 100
try:
Expand Down Expand Up @@ -470,6 +484,7 @@ def register_threadpool(name: str, threadpool: ThreadPool) -> None:


__all__ = [
"Collector",
"MetricsResource",
"generate_latest",
"start_http_server",
Expand Down
6 changes: 4 additions & 2 deletions synapse/metrics/_gc.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@

from twisted.internet import task

from synapse.metrics import Collector

"""Prometheus metrics for garbage collection"""


Expand Down Expand Up @@ -71,7 +73,7 @@
)


class GCCounts:
class GCCounts(Collector):
def collect(self) -> Iterable[Metric]:
cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"])
for n, m in enumerate(gc.get_count()):
Expand Down Expand Up @@ -135,7 +137,7 @@ def _maybe_gc() -> None:
#


class PyPyGCStats:
class PyPyGCStats(Collector):
def collect(self) -> Iterable[Metric]:

# @stats is a pretty-printer object with __str__() returning a nice table,
Expand Down
4 changes: 3 additions & 1 deletion synapse/metrics/_reactor_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@

from twisted.internet import reactor

from synapse.metrics import Collector

#
# Twisted reactor metrics
#
Expand Down Expand Up @@ -54,7 +56,7 @@ def __getattr__(self, item: str) -> Any:
return getattr(self._poller, item)


class ReactorLastSeenMetric:
class ReactorLastSeenMetric(Collector):
def __init__(self, epoll_wrapper: EpollWrapper):
self._epoll_wrapper = epoll_wrapper

Expand Down
3 changes: 2 additions & 1 deletion synapse/metrics/background_process_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
noop_context_manager,
start_active_span,
)
from synapse.metrics import Collector

if TYPE_CHECKING:
import resource
Expand Down Expand Up @@ -127,7 +128,7 @@
_bg_metrics_lock = threading.Lock()


class _Collector:
class _Collector(Collector):
"""A custom metrics collector for the background process metrics.

Ensures that all of the metrics are up-to-date with any in-flight processes
Expand Down
18 changes: 14 additions & 4 deletions synapse/metrics/jemalloc.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
import logging
import os
import re
from typing import Iterable, Optional
from typing import Iterable, Literal, Optional, overload

from prometheus_client import Metric
from prometheus_client import REGISTRY, Metric

from synapse.metrics import REGISTRY, GaugeMetricFamily
from synapse.metrics import Collector, GaugeMetricFamily

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -59,6 +59,16 @@ def _setup_jemalloc_stats() -> None:

jemalloc = ctypes.CDLL(jemalloc_path)

@overload
def _mallctl(
name: str, read: Literal[True] = True, write: Optional[int] = None
) -> int:
...

@overload
def _mallctl(name: str, read: Literal[False], write: Optional[int] = None) -> None:
...

def _mallctl(
name: str, read: bool = True, write: Optional[int] = None
) -> Optional[int]:
Expand Down Expand Up @@ -134,7 +144,7 @@ def _jemalloc_refresh_stats() -> None:
except Exception as e:
logger.warning("Failed to reload jemalloc stats: %s", e)

class JemallocCollector:
class JemallocCollector(Collector):
"""Metrics for internal jemalloc stats."""

def collect(self) -> Iterable[Metric]:
Expand Down
4 changes: 1 addition & 3 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,9 +200,7 @@ async def _persist_events_and_state_updates(
if stream < 0:
# backfilled events have negative stream orderings, so we don't
# want to set the event_persisted_position to that.
synapse.metrics.event_persisted_position.set(
events_and_contexts[-1][0].internal_metadata.stream_ordering
)
synapse.metrics.event_persisted_position.set(stream)

for event, context in events_and_contexts:
if context.app_service:
Expand Down