Skip to content

Commit

Permalink
Merge 56469cc into 9854c50
Browse files Browse the repository at this point in the history
  • Loading branch information
alexppg committed Mar 8, 2021
2 parents 9854c50 + 56469cc commit 12b4ba5
Show file tree
Hide file tree
Showing 4 changed files with 97 additions and 26 deletions.
32 changes: 22 additions & 10 deletions pyms/flask/services/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,23 @@
from flask import Blueprint, Flask, Response, request
from prometheus_client import REGISTRY, CollectorRegistry, Counter, Histogram, generate_latest, multiprocess

from pyms.flask.services.driver import DriverService
from pyms.flask.services.driver import DriverService, get_service_name
from pyms.config.conf import get_conf

# Based on https://github.com/sbarratt/flask-prometheus
# and https://github.com/korfuri/python-logging-prometheus/

FLASK_REQUEST_LATENCY = Histogram(
"http_server_requests_seconds", "Flask Request Latency", ["service", "method", "uri", "status"]
)
METRICS_CONFIG = get_conf(service=get_service_name(
service="metrics"), empty_init=True)

FLASK_REQUEST_COUNT = Counter(
"http_server_requests_count", "Flask Request Count", ["service", "method", "uri", "status"]
"http_server_requests_count", "Flask Request Count", [
"service", "method", "uri", "status"]
)

FLASK_REQUEST_LATENCY = Histogram(
"http_server_requests_seconds", "Flask Request Latency", [
"service", "method", "uri", "status"]
)

LOGGER_TOTAL_MESSAGES = Counter(
Expand All @@ -36,8 +43,10 @@ def after_request(self, response: Response) -> Response:
else:
path = request.path
request_latency = time.time() - request.start_time
FLASK_REQUEST_LATENCY.labels(self.app_name, request.method, path, response.status_code).observe(request_latency)
FLASK_REQUEST_COUNT.labels(self.app_name, request.method, path, response.status_code).inc()
FLASK_REQUEST_COUNT.labels(
self.app_name, request.method, path, response.status_code).inc()
FLASK_REQUEST_LATENCY.labels(
self.app_name, request.method, path, response.status_code).observe(request_latency)

return response

Expand All @@ -54,11 +63,14 @@ def __init__(self, *args, **kwargs):
self.serve_metrics()

def init_action(self, microservice_instance):
microservice_instance.application.register_blueprint(microservice_instance.metrics.metrics_blueprint)
microservice_instance.application.register_blueprint(
microservice_instance.metrics.metrics_blueprint)
self.add_logger_handler(
microservice_instance.application.logger, microservice_instance.application.config["APP_NAME"]
microservice_instance.application.logger, microservice_instance.application.config[
"APP_NAME"]
)
self.monitor(microservice_instance.application.config["APP_NAME"], microservice_instance.application)
self.monitor(
microservice_instance.application.config["APP_NAME"], microservice_instance.application)

def init_registry(self) -> None:
try:
Expand Down
28 changes: 27 additions & 1 deletion pyms/flask/services/requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,21 @@
from urllib3.util.retry import Retry

from pyms.constants import LOGGER_NAME
from pyms.flask.services.driver import DriverService
from pyms.flask.services.driver import DriverService, get_service_name
from pyms.flask.services.tracer import inject_span_in_headers
from pyms.config.conf import get_conf

try:
from prometheus_client import Counter, Histogram

REQUESTS_COUNT = Counter(
"http_client_requests_count", "Python requests count", ["service", "method", "uri", "status"]
)
REQUESTS_LATENCY = Histogram(
"http_client_requests_seconds", "Python requests latency", ["service", "method", "uri", "status"]
)
except ModuleNotFoundError: # pragma: no cover
pass

logger = logging.getLogger(LOGGER_NAME)

Expand Down Expand Up @@ -55,6 +68,9 @@ class Service(DriverService):
}
tracer = None

def init_action(self, microservice_instance):
self.app_name = microservice_instance.application.config["APP_NAME"] # pylint: disable=W0201

def requests(self, session: requests.Session) -> requests.Session:
"""
A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a
Expand All @@ -75,6 +91,10 @@ def requests(self, session: requests.Session) -> requests.Session:
adapter = HTTPAdapter(max_retries=max_retries)
session_r.mount("http://", adapter)
session_r.mount("https://", adapter)

metrics_enabled = get_conf(service=get_service_name(service="metrics"), empty_init=True)
if metrics_enabled:
session_r.hooks["response"] = [self.observe_requests]
return session_r

@staticmethod
Expand Down Expand Up @@ -355,3 +375,9 @@ def delete(self, url: str, path_params: dict = None, headers: dict = None, **kwa
logger.debug("Response {}".format(response))

return response

def observe_requests(self, response, *args, **kwargs):
REQUESTS_COUNT.labels(self.app_name, response.request.method, response.url, response.status_code).inc()
REQUESTS_LATENCY.labels(self.app_name, response.request.method, response.url, response.status_code).observe(
float(response.elapsed.total_seconds())
)
2 changes: 2 additions & 0 deletions tests/config-tests-metrics.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
pyms:
services:
metrics: true
requests:
data: /
tracer:
client: "jaeger"
component_name: "Python Microservice with Jaeger"
Expand Down
61 changes: 46 additions & 15 deletions tests/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from pathlib import Path
from tempfile import TemporaryDirectory

import requests_mock
from opentracing import global_tracer
from prometheus_client import generate_latest, values

Expand Down Expand Up @@ -36,23 +37,46 @@ def setUp(self):
ms.reload_conf()
self.app = ms.create_app()
self.client = self.app.test_client()
self.request = ms.requests

def test_metrics_latency(self):
def test_metrics_requests_latency(self):
self.client.get("/")
self.client.get("/metrics")
generated_latency_root = b'http_server_requests_seconds_bucket{le="0.005",method="GET",service="Python Microservice with Jaeger",status="404",uri="/"}'
generated_latency_metrics = b'http_server_requests_seconds_bucket{le="0.005",method="GET",service="Python Microservice with Jaeger",status="200",uri="/metrics"}'
assert generated_latency_root in generate_latest()
assert generated_latency_metrics in generate_latest()

def test_metrics_count(self):
def test_metrics_requests_count(self):
self.client.get("/")
self.client.get("/metrics")
generated_count_root = b'http_server_requests_count_total{method="GET",service="Python Microservice with Jaeger",status="404",uri="/"}'
generated_count_metrics = b'http_server_requests_count_total{method="GET",service="Python Microservice with Jaeger",status="200",uri="/metrics"}'
assert generated_count_root in generate_latest()
assert generated_count_metrics in generate_latest()

@requests_mock.Mocker()
def test_metrics_responses_latency(self, mock_request):
url = "http://www.my-site.com/users"
full_url = url
with self.app.app_context():
mock_request.get(full_url)
self.request.get(url)
self.client.get("/metrics")
generated_latency_url = b'http_client_requests_seconds_bucket{le="0.005",method="GET",service="Python Microservice with Jaeger",status="200",uri="http://www.my-site.com/users"}'
assert generated_latency_url in generate_latest()

@requests_mock.Mocker()
def test_metrics_responses_count(self, mock_request):
url = "http://www.my-site.com/users"
full_url = url
with self.app.app_context():
mock_request.get(full_url)
self.request.get(url)
self.client.get("/metrics")
generated_count_url = b'http_client_requests_count_total{method="GET",service="Python Microservice with Jaeger",status="200",uri="http://www.my-site.com/users"}'
assert generated_count_url in generate_latest()

def test_metrics_logger(self):
self.client.get("/")
self.client.get("/metrics")
Expand Down Expand Up @@ -94,6 +118,7 @@ def setUp(self):
for path in Path(self.temp_dir.name).iterdir():
if self._testMethodName not in path.name:
path.unlink()
self.request = ms.requests

@classmethod
def tearDownClass(cls):
Expand All @@ -110,21 +135,27 @@ def test_metrics_stored_in_directory(self):
assert f"counter_{self._testMethodName}.db" in metrics
assert f"histogram_{self._testMethodName}.db" in metrics

def test_metrics_latency(self):
self.client.get("/")
@requests_mock.Mocker()
def test_metrics_responses_latency(self, mock_request):
url = "http://www.my-site.com/users"
full_url = url
with self.app.app_context():
mock_request.get(full_url)
self.request.get(url)
self.client.get("/metrics")
generated_latency_root = b'http_server_requests_seconds_bucket{le="0.005",method="GET",service="Python Microservice with Jaeger",status="404",uri="/"}'
generated_latency_metrics = b'http_server_requests_seconds_bucket{le="0.005",method="GET",service="Python Microservice with Jaeger",status="200",uri="/metrics"}'
assert generated_latency_root in generate_latest(self.app.ms.metrics.registry)
assert generated_latency_metrics in generate_latest(self.app.ms.metrics.registry)

def test_metrics_count(self):
self.client.get("/")
generated_latency_url = b'http_client_requests_seconds_bucket{le="0.005",method="GET",service="Python Microservice with Jaeger",status="200",uri="http://www.my-site.com/users"}'
assert generated_latency_url in generate_latest()

@requests_mock.Mocker()
def test_metrics_responses_count(self, mock_request):
url = "http://www.my-site.com/users"
full_url = url
with self.app.app_context():
mock_request.get(full_url)
self.request.get(url)
self.client.get("/metrics")
generated_count_root = b'http_server_requests_count_total{method="GET",service="Python Microservice with Jaeger",status="404",uri="/"}'
generated_count_metrics = b'http_server_requests_count_total{method="GET",service="Python Microservice with Jaeger",status="200",uri="/metrics"}'
assert generated_count_root in generate_latest(self.app.ms.metrics.registry)
assert generated_count_metrics in generate_latest(self.app.ms.metrics.registry)
generated_count_url = b'http_client_requests_count_total{method="GET",service="Python Microservice with Jaeger",status="200",uri="http://www.my-site.com/users"}'
assert generated_count_url in generate_latest()

def test_metrics_logger(self):
self.client.get("/")
Expand Down

0 comments on commit 12b4ba5

Please sign in to comment.