diff --git a/.travis.yml b/.travis.yml index 292b97c00d6..2bce64ede78 100644 --- a/.travis.yml +++ b/.travis.yml @@ -63,6 +63,8 @@ jobs: env: CHECK=nextcloud PYTHON3=true - stage: test env: CHECK=reboot_required + - stage: test + env: CHECK=riak_repl PYTHON3=true - stage: test env: CHECK=sortdb - stage: test diff --git a/riak_repl/README.md b/riak_repl/README.md index e318f9a64e5..b255ada8779 100644 --- a/riak_repl/README.md +++ b/riak_repl/README.md @@ -8,7 +8,12 @@ This check monitors Riak replication [riak-repl][1]. ### Installation -The riak-repl check is not included in the [Datadog Agent][2] package, so you will need to install it yourself. +To install the Riak replication check on your host: + +1. Install the [developer toolkit][8] on any machine. +2. Run `ddev release build riak_repl` to build the package. +3. [Download the Datadog Agent][2]. +4. Upload the build artifact to any host with an Agent and run `datadog-agent integration install -w path/to/riak_repl/dist/.whl`. ### Configuration @@ -45,3 +50,4 @@ Need help? Contact [Datadog support][6]. [5]: https://docs.datadoghq.com/agent/faq/agent-commands/#agent-status-and-information [6]: https://docs.datadoghq.com/help/ [7]: https://github.com/DataDog/integrations-extras/blob/master/riak_repl/metadata.csv +[8]: https://docs.datadoghq.com/developers/integrations/new_check_howto/#developer-toolkit diff --git a/riak_repl/datadog_checks/riak_repl/riak_repl.py b/riak_repl/datadog_checks/riak_repl/riak_repl.py index 9e1cf28f33f..b8b5fede03e 100644 --- a/riak_repl/datadog_checks/riak_repl/riak_repl.py +++ b/riak_repl/datadog_checks/riak_repl/riak_repl.py @@ -2,8 +2,10 @@ import requests import unicodedata -from datadog_checks.checks import AgentCheck -from datadog_checks.errors import CheckException +from six import iteritems + +from datadog_checks.base import AgentCheck +from datadog_checks.base.errors import CheckException class RiakReplCheck(AgentCheck): @@ -42,22 +44,19 @@ class RiakReplCheck(AgentCheck): ] FULLSYNC_COORDINATOR = [ - "queued", - "in_progress", - "waiting_for_retry", - "starting", - "successful_exits", - "error_exits", - "retry_exits", - "soft_retry_exits", - "busy_nodes", - "fullsyncs_completed", - "last_fullsync_duration" + "queued", + "in_progress", + "waiting_for_retry", + "starting", + "successful_exits", + "error_exits", + "retry_exits", + "soft_retry_exits", + "busy_nodes", + "fullsyncs_completed", + "last_fullsync_duration" ] - def __init__(self, name, init_config, agentConfig, instances=None): - AgentCheck.__init__(self, name, init_config, agentConfig, instances) - def check(self, instance): url = instance.get('url', '') default_timeout = instance.get('default_timeout', 5) @@ -69,7 +68,7 @@ def check(self, instance): try: r = requests.get(url, timeout=timeout) - except requests.exceptions.Timeout as e: + except requests.exceptions.Timeout: raise CheckException('URL: {0} timed out after {1} \ seconds.'.format(url, timeout)) except requests.exceptions.ConnectionError as e: @@ -81,16 +80,16 @@ def check(self, instance): try: stats = json.loads(r.text) - except ValueError as e: + except ValueError: raise CheckException('{0} returned an unserializable \ payload'.format(url)) - for key, val in stats.iteritems(): + for key, val in iteritems(stats): if key in self.REPL_STATS: self.safe_submit_metric("riak_repl." + key, val, tags=tags) if stats['realtime_enabled'] is not None: - for key, val in stats['realtime_queue_stats'].iteritems(): + for key, val in iteritems(stats['realtime_queue_stats']): if key in self.REALTIME_QUEUE_STATS: self.safe_submit_metric("riak_repl.realtime_queue_stats." + key, val, tags=tags) @@ -99,7 +98,7 @@ def check(self, instance): cluster = c.replace("-", "_") if c not in stats['fullsync_coordinator']: continue - for key, val in stats['fullsync_coordinator'][c].iteritems(): + for key, val in iteritems(stats['fullsync_coordinator'][c]): if key in self.FULLSYNC_COORDINATOR: self.safe_submit_metric("riak_repl.fullsync_coordinator." + cluster + "." + key, @@ -113,7 +112,6 @@ def safe_submit_metric(self, name, value, tags=None): except ValueError: self.log.debug("metric name {0} cannot be converted to a \ float: {1}".format(name, value)) - pass try: self.gauge(name, unicodedata.numeric(value), tags=tags) @@ -122,4 +120,3 @@ def safe_submit_metric(self, name, value, tags=None): self.log.debug("metric name {0} cannot be converted to a \ float even using unicode tools:\ {1}".format(name, value)) - pass diff --git a/riak_repl/setup.py b/riak_repl/setup.py index ddef2588afe..9cdc9fc75b3 100644 --- a/riak_repl/setup.py +++ b/riak_repl/setup.py @@ -15,7 +15,7 @@ long_description = f.read() -CHECKS_BASE_REQ = 'datadog-checks-base' +CHECKS_BASE_REQ = 'datadog-checks-base>=4.2.0' setup( diff --git a/riak_repl/tox.ini b/riak_repl/tox.ini index cfd6d7b4f71..462357ecaa3 100644 --- a/riak_repl/tox.ini +++ b/riak_repl/tox.ini @@ -1,23 +1,18 @@ [tox] minversion = 2.0 -basepython = py27 -envlist = unit, integration, flake8 +basepython = py37 +envlist = + py{27,37}-{unit,integration}, flake8 [testenv] -platform = linux2|darwin +platform = linux|darwin|win32 deps = datadog-checks-base[deps] -rrequirements-dev.txt - -[testenv:unit] -commands = - pip install --require-hashes -r requirements.txt - pytest -v -m"not integration" - -[testenv:integration] commands = - pip install --require-hashes -r requirements.txt - pytest -v -m"integration" + pip install -r requirements.in + unit: pytest -v -m "not integration" + integration: pytest -v -m"integration" [testenv:flake8] skip_install = true