From ae01a7edd32c8a4650700294c50a37385fa07984 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 6 Apr 2022 13:59:04 +0100 Subject: [PATCH] Update type annotations for compatiblity with prometheus_client 0.14 (#12389) Principally, `prometheus_client.REGISTRY.register` now requires its argument to extend `prometheus_client.Collector`. Additionally, `Gauge.set` is now annotated so that passing `Optional[int]` causes an error. --- changelog.d/12389.misc | 1 + synapse/metrics/__init__.py | 16 +++++----- synapse/metrics/_gc.py | 6 ++-- synapse/metrics/_reactor_metrics.py | 4 ++- synapse/metrics/_types.py | 31 +++++++++++++++++++ synapse/metrics/background_process_metrics.py | 3 +- synapse/metrics/jemalloc.py | 20 +++++++++--- synapse/storage/databases/main/events.py | 4 +-- 8 files changed, 67 insertions(+), 18 deletions(-) create mode 100644 changelog.d/12389.misc create mode 100644 synapse/metrics/_types.py diff --git a/changelog.d/12389.misc b/changelog.d/12389.misc new file mode 100644 index 000000000000..00e3a5d75811 --- /dev/null +++ b/changelog.d/12389.misc @@ -0,0 +1 @@ +Update type annotations for compatiblity with prometheus_client 0.14. diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index d321946aa23d..fffd83546ccc 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -1,4 +1,5 @@ # Copyright 2015, 2016 OpenMarket Ltd +# Copyright 2022 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -52,12 +53,13 @@ start_http_server, ) from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager +from synapse.metrics._types import Collector logger = logging.getLogger(__name__) METRICS_PREFIX = "/_synapse/metrics" -all_gauges: "Dict[str, Union[LaterGauge, InFlightGauge]]" = {} +all_gauges: Dict[str, Collector] = {} HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") @@ -78,11 +80,10 @@ def collect() -> Iterable[Metric]: @attr.s(slots=True, hash=True, auto_attribs=True) -class LaterGauge: - +class LaterGauge(Collector): name: str desc: str - labels: Optional[Iterable[str]] = attr.ib(hash=False) + labels: Optional[Sequence[str]] = attr.ib(hash=False) # callback: should either return a value (if there are no labels for this metric), # or dict mapping from a label tuple to a value caller: Callable[ @@ -125,7 +126,7 @@ def _register(self) -> None: MetricsEntry = TypeVar("MetricsEntry") -class InFlightGauge(Generic[MetricsEntry]): +class InFlightGauge(Generic[MetricsEntry], Collector): """Tracks number of things (e.g. requests, Measure blocks, etc) in flight at any given time. @@ -246,7 +247,7 @@ def _register_with_collector(self) -> None: all_gauges[self.name] = self -class GaugeBucketCollector: +class GaugeBucketCollector(Collector): """Like a Histogram, but the buckets are Gauges which are updated atomically. The data is updated by calling `update_data` with an iterable of measurements. @@ -340,7 +341,7 @@ def _values_to_metric(self, values: Iterable[float]) -> GaugeHistogramMetricFami # -class CPUMetrics: +class CPUMetrics(Collector): def __init__(self) -> None: ticks_per_sec = 100 try: @@ -470,6 +471,7 @@ def register_threadpool(name: str, threadpool: ThreadPool) -> None: __all__ = [ + "Collector", "MetricsResource", "generate_latest", "start_http_server", diff --git a/synapse/metrics/_gc.py b/synapse/metrics/_gc.py index 2bc909efa0d3..b7d47ce3e744 100644 --- a/synapse/metrics/_gc.py +++ b/synapse/metrics/_gc.py @@ -30,6 +30,8 @@ from twisted.internet import task +from synapse.metrics._types import Collector + """Prometheus metrics for garbage collection""" @@ -71,7 +73,7 @@ ) -class GCCounts: +class GCCounts(Collector): def collect(self) -> Iterable[Metric]: cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"]) for n, m in enumerate(gc.get_count()): @@ -135,7 +137,7 @@ def _maybe_gc() -> None: # -class PyPyGCStats: +class PyPyGCStats(Collector): def collect(self) -> Iterable[Metric]: # @stats is a pretty-printer object with __str__() returning a nice table, diff --git a/synapse/metrics/_reactor_metrics.py b/synapse/metrics/_reactor_metrics.py index f38f7983131f..a2c6e6842d0c 100644 --- a/synapse/metrics/_reactor_metrics.py +++ b/synapse/metrics/_reactor_metrics.py @@ -21,6 +21,8 @@ from twisted.internet import reactor +from synapse.metrics._types import Collector + # # Twisted reactor metrics # @@ -54,7 +56,7 @@ def __getattr__(self, item: str) -> Any: return getattr(self._poller, item) -class ReactorLastSeenMetric: +class ReactorLastSeenMetric(Collector): def __init__(self, epoll_wrapper: EpollWrapper): self._epoll_wrapper = epoll_wrapper diff --git a/synapse/metrics/_types.py b/synapse/metrics/_types.py new file mode 100644 index 000000000000..dc5aa49397e1 --- /dev/null +++ b/synapse/metrics/_types.py @@ -0,0 +1,31 @@ +# Copyright 2022 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from abc import ABC, abstractmethod +from typing import Iterable + +from prometheus_client import Metric + +try: + from prometheus_client.registry import Collector +except ImportError: + # prometheus_client.Collector is new as of prometheus 0.14. We redefine it here + # for compatibility with earlier versions. + class _Collector(ABC): + @abstractmethod + def collect(self) -> Iterable[Metric]: + pass + + Collector = _Collector # type: ignore diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index 53c508af9170..f61396bb79a9 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -46,6 +46,7 @@ noop_context_manager, start_active_span, ) +from synapse.metrics._types import Collector if TYPE_CHECKING: import resource @@ -127,7 +128,7 @@ _bg_metrics_lock = threading.Lock() -class _Collector: +class _Collector(Collector): """A custom metrics collector for the background process metrics. Ensures that all of the metrics are up-to-date with any in-flight processes diff --git a/synapse/metrics/jemalloc.py b/synapse/metrics/jemalloc.py index 98ed9c0829e8..6bc329f04a9c 100644 --- a/synapse/metrics/jemalloc.py +++ b/synapse/metrics/jemalloc.py @@ -16,11 +16,13 @@ import logging import os import re -from typing import Iterable, Optional +from typing import Iterable, Optional, overload -from prometheus_client import Metric +from prometheus_client import REGISTRY, Metric +from typing_extensions import Literal -from synapse.metrics import REGISTRY, GaugeMetricFamily +from synapse.metrics import GaugeMetricFamily +from synapse.metrics._types import Collector logger = logging.getLogger(__name__) @@ -59,6 +61,16 @@ def _setup_jemalloc_stats() -> None: jemalloc = ctypes.CDLL(jemalloc_path) + @overload + def _mallctl( + name: str, read: Literal[True] = True, write: Optional[int] = None + ) -> int: + ... + + @overload + def _mallctl(name: str, read: Literal[False], write: Optional[int] = None) -> None: + ... + def _mallctl( name: str, read: bool = True, write: Optional[int] = None ) -> Optional[int]: @@ -134,7 +146,7 @@ def _jemalloc_refresh_stats() -> None: except Exception as e: logger.warning("Failed to reload jemalloc stats: %s", e) - class JemallocCollector: + class JemallocCollector(Collector): """Metrics for internal jemalloc stats.""" def collect(self) -> Iterable[Metric]: diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index d253243125fb..57489c30f142 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -200,9 +200,7 @@ async def _persist_events_and_state_updates( if stream < 0: # backfilled events have negative stream orderings, so we don't # want to set the event_persisted_position to that. - synapse.metrics.event_persisted_position.set( - events_and_contexts[-1][0].internal_metadata.stream_ordering - ) + synapse.metrics.event_persisted_position.set(stream) for event, context in events_and_contexts: if context.app_service: