-
Notifications
You must be signed in to change notification settings - Fork 1.4k
/
base_check.py
187 lines (152 loc) · 6.37 KB
/
base_check.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from copy import deepcopy
import requests
from six import PY2
from ...errors import CheckException
from ...utils.tracing import traced_class
from .. import AgentCheck
from .mixins import OpenMetricsScraperMixin
STANDARD_FIELDS = [
'prometheus_url',
'namespace',
'metrics',
'prometheus_metrics_prefix',
'health_service_check',
'include_labels',
'label_to_hostname',
'label_joins',
'labels_mapper',
'type_overrides',
'send_histograms_buckets',
'send_distribution_buckets',
'send_monotonic_counter',
'send_monotonic_with_gauge',
'send_distribution_counts_as_monotonic',
'send_distribution_sums_as_monotonic',
'exclude_labels',
'bearer_token_auth',
'bearer_token_path',
'ignore_metrics',
]
class OpenMetricsBaseCheck(OpenMetricsScraperMixin, AgentCheck):
"""
OpenMetricsBaseCheck is a class that helps scrape endpoints that emit Prometheus metrics only
with YAML configurations.
Minimal example configuration:
instances:
- prometheus_url: http://example.com/endpoint
namespace: "foobar"
metrics:
- bar
- foo
Agent 6 signature:
OpenMetricsBaseCheck(name, init_config, instances, default_instances=None, default_namespace=None)
"""
DEFAULT_METRIC_LIMIT = 2000
HTTP_CONFIG_REMAPPER = {
'ssl_verify': {'name': 'tls_verify'},
'ssl_cert': {'name': 'tls_cert'},
'ssl_private_key': {'name': 'tls_private_key'},
'ssl_ca_cert': {'name': 'tls_ca_cert'},
'prometheus_timeout': {'name': 'timeout'},
'request_size': {'name': 'request_size', 'default': 10},
}
# Allow tracing for openmetrics integrations
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
return traced_class(cls)
def __init__(self, *args, **kwargs):
"""
The base class for any Prometheus-based integration.
"""
args = list(args)
default_instances = kwargs.pop('default_instances', None) or {}
default_namespace = kwargs.pop('default_namespace', None)
legacy_kwargs_in_args = args[4:]
del args[4:]
if len(legacy_kwargs_in_args) > 0:
default_instances = legacy_kwargs_in_args[0] or {}
if len(legacy_kwargs_in_args) > 1:
default_namespace = legacy_kwargs_in_args[1]
super(OpenMetricsBaseCheck, self).__init__(*args, **kwargs)
self.config_map = {}
self._http_handlers = {}
self.default_instances = default_instances
self.default_namespace = default_namespace
# pre-generate the scraper configurations
if 'instances' in kwargs:
instances = kwargs['instances']
elif len(args) == 4:
# instances from agent 5 signature
instances = args[3]
elif isinstance(args[2], (tuple, list)):
# instances from agent 6 signature
instances = args[2]
else:
instances = None
if instances is not None:
for instance in instances:
possible_urls = instance.get('possible_prometheus_urls')
if possible_urls is not None:
for url in possible_urls:
try:
new_instance = deepcopy(instance)
new_instance.update({'prometheus_url': url})
scraper_config = self.get_scraper_config(new_instance)
response = self.send_request(url, scraper_config)
response.raise_for_status()
instance['prometheus_url'] = url
self.get_scraper_config(instance)
break
except (IOError, requests.HTTPError, requests.exceptions.SSLError) as e:
self.log.info("Couldn't connect to %s: %s, trying next possible URL.", url, str(e))
else:
raise CheckException(
"The agent could not connect to any of the following URLs: %s." % possible_urls
)
else:
self.get_scraper_config(instance)
def check(self, instance):
# Get the configuration for this specific instance
scraper_config = self.get_scraper_config(instance)
# We should be specifying metrics for checks that are vanilla OpenMetricsBaseCheck-based
if not scraper_config['metrics_mapper']:
raise CheckException(
"You have to collect at least one metric from the endpoint: {}".format(scraper_config['prometheus_url'])
)
self.process(scraper_config)
def get_scraper_config(self, instance):
"""
Validates the instance configuration and creates a scraper configuration for a new instance.
If the endpoint already has a corresponding configuration, return the cached configuration.
"""
endpoint = instance.get('prometheus_url')
if endpoint is None:
raise CheckException("Unable to find prometheus URL in config file.")
# If we've already created the corresponding scraper configuration, return it
if endpoint in self.config_map:
return self.config_map[endpoint]
# Otherwise, we create the scraper configuration
config = self.create_scraper_configuration(instance)
# Add this configuration to the config_map
self.config_map[endpoint] = config
return config
def _finalize_tags_to_submit(self, _tags, metric_name, val, metric, custom_tags=None, hostname=None):
"""
Format the finalized tags
This is generally a noop, but it can be used to change the tags before sending metrics
"""
return _tags
def _filter_metric(self, metric, scraper_config):
"""
Used to filter metrics at the beginning of the processing, by default no metric is filtered
"""
return False
# For documentation generation
# TODO: use an enum and remove STANDARD_FIELDS when mkdocstrings supports it
class StandardFields(object):
pass
if not PY2:
StandardFields.__doc__ = '\n'.join('- `{}`'.format(field) for field in STANDARD_FIELDS)