Skip to content

Commit

Permalink
Merge pull request Yelp#71 from just1900/alt
Browse files Browse the repository at this point in the history
fix compound query key in metric aggregation
  • Loading branch information
jertel committed Apr 22, 2021
2 parents 7c2ddbe + 3d0a285 commit 7b99e21
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 11 deletions.
24 changes: 13 additions & 11 deletions elastalert/ruletypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1122,18 +1122,20 @@ def check_matches_recursive(self, timestamp, query_key, aggregation_data, compou
result,
compound_keys[1:],
match_data)

else:
metric_val = aggregation_data[self.metric_key]['value']
if self.crossed_thresholds(metric_val):
match_data[self.rules['timestamp_field']] = timestamp
match_data[self.metric_key] = metric_val

# add compound key to payload to allow alerts to trigger for every unique occurence
compound_value = [match_data[key] for key in self.rules['compound_query_key']]
match_data[self.rules['query_key']] = ",".join([str(value) for value in compound_value])

self.add_match(match_data)
if 'interval_aggs' in aggregation_data:
metric_val_arr = [term[self.metric_key]['value'] for term in aggregation_data['interval_aggs']['buckets']]
else:
metric_val_arr = [aggregation_data[self.metric_key]['value']]
for metric_val in metric_val_arr:
if self.crossed_thresholds(metric_val):
match_data[self.rules['timestamp_field']] = timestamp
match_data[self.metric_key] = metric_val

# add compound key to payload to allow alerts to trigger for every unique occurence
compound_value = [match_data[key] for key in self.rules['compound_query_key']]
match_data[self.rules['query_key']] = ",".join([str(value) for value in compound_value])
self.add_match(match_data)

def crossed_thresholds(self, metric_value):
if metric_value is None:
Expand Down
35 changes: 35 additions & 0 deletions tests/rules_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -1184,6 +1184,41 @@ def test_metric_aggregation_complex_query_key():
assert rule.matches[1]['sub_qk'] == 'sub_qk_val2'


def test_metric_aggregation_complex_query_key_bucket_interval():
rules = {'buffer_time': datetime.timedelta(minutes=5),
'timestamp_field': '@timestamp',
'metric_agg_type': 'avg',
'metric_agg_key': 'cpu_pct',
'bucket_interval': {'minutes': 1},
'bucket_interval_timedelta': datetime.timedelta(minutes=1),
'compound_query_key': ['qk', 'sub_qk'],
'query_key': 'qk,sub_qk',
'max_threshold': 0.8}

# Quoted from https://elastalert.readthedocs.io/en/latest/ruletypes.html#metric-aggregation
# bucket_interval: If present this will divide the metric calculation window into bucket_interval sized segments.
# The metric value will be calculated and evaluated against the threshold(s) for each segment.
interval_aggs = {"interval_aggs": {"buckets": [
{"metric_cpu_pct_avg": {"value": 0.91}, "key": "1617156690000"},
{"metric_cpu_pct_avg": {"value": 0.89}, "key": "1617156750000"},
{"metric_cpu_pct_avg": {"value": 0.78}, "key": "1617156810000"},
{"metric_cpu_pct_avg": {"value": 0.85}, "key": "1617156870000"},
{"metric_cpu_pct_avg": {"value": 0.86}, "key": "1617156930000"},
]}, "key": "sub_qk_val1"}

query = {"bucket_aggs": {"buckets": [
interval_aggs
]}, "key": "qk_val"}

rule = MetricAggregationRule(rules)
rule.check_matches(datetime.datetime.now(), 'qk_val', query)
assert len(rule.matches) == 4
assert rule.matches[0]['qk'] == 'qk_val'
assert rule.matches[1]['qk'] == 'qk_val'
assert rule.matches[0]['sub_qk'] == 'sub_qk_val1'
assert rule.matches[1]['sub_qk'] == 'sub_qk_val1'


def test_percentage_match():
rules = {'match_bucket_filter': {'term': 'term_val'},
'buffer_time': datetime.timedelta(minutes=5),
Expand Down

0 comments on commit 7b99e21

Please sign in to comment.