Skip to content

Commit

Permalink
introduce _wildcards_re
Browse files Browse the repository at this point in the history
  • Loading branch information
ahmed-mez committed Feb 18, 2020
1 parent 0787283 commit e9b2e8b
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
from __future__ import division

from fnmatch import fnmatchcase, translate
from fnmatch import translate
from math import isinf, isnan
from os.path import isfile
from re import compile
Expand Down Expand Up @@ -98,8 +98,16 @@ def create_scraper_configuration(self, instance=None):

config['metrics_mapper'] = metrics_mapper

# `_metrics_wildcards` holds the potential wildcards to match for metrics
config['_metrics_wildcards'] = None
# `_wildcards_re` is a Pattern object used to match metric wildcards
config['_wildcards_re'] = None

wildcards = set()
for metric in config['metrics_mapper']:
if "*" in metric:
wildcards.add(translate(metric))

if wildcards:
config['_wildcards_re'] = compile('|'.join(wildcards))

# `prometheus_metrics_prefix` allows to specify a prefix that all
# prometheus metrics should have. This can be used when the prometheus
Expand Down Expand Up @@ -148,18 +156,20 @@ def create_scraper_configuration(self, instance=None):
# skipped without a 'Unable to handle metric' debug line in the logs
config['ignore_metrics'] = instance.get('ignore_metrics', default_instance.get('ignore_metrics', []))
config['_ignored_metrics'] = set()
config['_ignored_patterns'] = set()

# `_ignored_re` is a Pattern object used to match ignored metric patterns
config['_ignored_re'] = None
ignored_patterns = set()

# Separate ignored metric names and ignored patterns in different sets for faster lookup later
for metric in config['ignore_metrics']:
if '*' in metric:
config['_ignored_patterns'].add(translate(metric))
ignored_patterns.add(translate(metric))
else:
config['_ignored_metrics'].add(metric)

if config['_ignored_patterns']:
config['_ignored_re'] = compile('|'.join(config['_ignored_patterns']))
if ignored_patterns:
config['_ignored_re'] = compile('|'.join(ignored_patterns))

# If you want to send the buckets as tagged values when dealing with histograms,
# set send_histograms_buckets to True, set to False otherwise.
Expand Down Expand Up @@ -529,7 +539,6 @@ def process_metric(self, metric, scraper_config, metric_transformers=None):

if scraper_config['_ignored_re'] and scraper_config['_ignored_re'].search(metric.name):
# Metric must be ignored
# Cache the ignored metric name to avoid calling fnmatchcase in the next check run
scraper_config['_ignored_metrics'].add(metric.name)
self._send_telemetry_counter(
self.TELEMETRY_COUNTER_METRICS_IGNORE_COUNT, len(metric.samples), scraper_config
Expand Down Expand Up @@ -560,15 +569,10 @@ def process_metric(self, metric, scraper_config, metric_transformers=None):

return

# build the wildcard list if first pass
if scraper_config['_metrics_wildcards'] is None:
scraper_config['_metrics_wildcards'] = [x for x in scraper_config['metrics_mapper'] if '*' in x]

# try matching wildcard
for wildcard in scraper_config['_metrics_wildcards']:
if fnmatchcase(metric.name, wildcard):
self.submit_openmetric(metric.name, metric, scraper_config)
return
# try matching wildcards
if scraper_config['_wildcards_re'] and scraper_config['_wildcards_re'].search(metric.name):
self.submit_openmetric(metric.name, metric, scraper_config)
return

self.log.debug(
'Skipping metric `%s` as it is not defined in the metrics mapper, '
Expand Down
47 changes: 47 additions & 0 deletions datadog_checks_base/tests/test_openmetrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1527,6 +1527,53 @@ def test_ignore_metrics_multiple_wildcards(
aggregator.assert_all_metrics_covered()


def test_match_metric_wildcard(aggregator, mocked_prometheus_check, ref_gauge):
"""
Test that a matched metric is properly collected.
"""
check = mocked_prometheus_check
instance = copy.deepcopy(PROMETHEUS_CHECK_INSTANCE)

config = check.get_scraper_config(instance)
config['_dry_run'] = False

check.process_metric(ref_gauge, config)

aggregator.assert_metric('prometheus.process.vm.bytes', count=1)


def test_match_metrics_multiple_wildcards(
aggregator, mocked_prometheus_check, mocked_prometheus_scraper_config, text_data
):
"""
Test that matched metric patterns are properly collected.
"""
check = mocked_prometheus_check
instance = copy.deepcopy(PROMETHEUS_CHECK_INSTANCE)
instance['_dry_run'] = False
instance['metrics'] = [
{'go_memstats_mcache_*': '', 'go_memstats_heap_released_bytes_total': 'go_memstats.heap.released.bytes_total'},
'*_lookups_total*',
'go_memstats_alloc*',
]

config = check.create_scraper_configuration(instance)

mock_response = mock.MagicMock(
status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type}
)
with mock.patch('requests.get', return_value=mock_response, __name__="get"):
check.process(config)

aggregator.assert_metric('prometheus.go_memstats_mcache_inuse_bytes', count=1)
aggregator.assert_metric('prometheus.go_memstats_mcache_sys_bytes', count=1)
aggregator.assert_metric('prometheus.go_memstats.heap.released.bytes_total', count=1)
aggregator.assert_metric('prometheus.go_memstats_alloc_bytes', count=1)
aggregator.assert_metric('prometheus.go_memstats_alloc_bytes_total', count=1)
aggregator.assert_metric('prometheus.go_memstats_lookups_total', count=1)
aggregator.assert_all_metrics_covered()


def test_label_joins(aggregator, mocked_prometheus_check, mocked_prometheus_scraper_config, mock_get):
""" Tests label join on text format """
check = mocked_prometheus_check
Expand Down

0 comments on commit e9b2e8b

Please sign in to comment.