Skip to content

Commit

Permalink
Support py3
Browse files Browse the repository at this point in the history
  • Loading branch information
therve committed Mar 13, 2019
1 parent 7a0a1c0 commit 1c31ecb
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 37 deletions.
2 changes: 2 additions & 0 deletions .travis.yml
Expand Up @@ -63,6 +63,8 @@ jobs:
env: CHECK=nextcloud PYTHON3=true
- stage: test
env: CHECK=reboot_required
- stage: test
env: CHECK=riak_repl PYTHON3=true
- stage: test
env: CHECK=sortdb
- stage: test
Expand Down
8 changes: 7 additions & 1 deletion riak_repl/README.md
Expand Up @@ -8,7 +8,12 @@ This check monitors Riak replication [riak-repl][1].

### Installation

The riak-repl check is not included in the [Datadog Agent][2] package, so you will need to install it yourself.
To install the Riak replication check on your host:

1. Install the [developer toolkit][8] on any machine.
2. Run `ddev release build riak_repl` to build the package.
3. [Download the Datadog Agent][2].
4. Upload the build artifact to any host with an Agent and run `datadog-agent integration install -w path/to/riak_repl/dist/<ARTIFACT_NAME>.whl`.

### Configuration

Expand Down Expand Up @@ -45,3 +50,4 @@ Need help? Contact [Datadog support][6].
[5]: https://docs.datadoghq.com/agent/faq/agent-commands/#agent-status-and-information
[6]: https://docs.datadoghq.com/help/
[7]: https://github.com/DataDog/integrations-extras/blob/master/riak_repl/metadata.csv
[8]: https://docs.datadoghq.com/developers/integrations/new_check_howto/#developer-toolkit
43 changes: 20 additions & 23 deletions riak_repl/datadog_checks/riak_repl/riak_repl.py
Expand Up @@ -2,8 +2,10 @@
import requests
import unicodedata

from datadog_checks.checks import AgentCheck
from datadog_checks.errors import CheckException
from six import iteritems

from datadog_checks.base import AgentCheck
from datadog_checks.base.errors import CheckException


class RiakReplCheck(AgentCheck):
Expand Down Expand Up @@ -42,22 +44,19 @@ class RiakReplCheck(AgentCheck):
]

FULLSYNC_COORDINATOR = [
"queued",
"in_progress",
"waiting_for_retry",
"starting",
"successful_exits",
"error_exits",
"retry_exits",
"soft_retry_exits",
"busy_nodes",
"fullsyncs_completed",
"last_fullsync_duration"
"queued",
"in_progress",
"waiting_for_retry",
"starting",
"successful_exits",
"error_exits",
"retry_exits",
"soft_retry_exits",
"busy_nodes",
"fullsyncs_completed",
"last_fullsync_duration"
]

def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)

def check(self, instance):
url = instance.get('url', '')
default_timeout = instance.get('default_timeout', 5)
Expand All @@ -69,7 +68,7 @@ def check(self, instance):

try:
r = requests.get(url, timeout=timeout)
except requests.exceptions.Timeout as e:
except requests.exceptions.Timeout:
raise CheckException('URL: {0} timed out after {1} \
seconds.'.format(url, timeout))
except requests.exceptions.ConnectionError as e:
Expand All @@ -81,16 +80,16 @@ def check(self, instance):

try:
stats = json.loads(r.text)
except ValueError as e:
except ValueError:
raise CheckException('{0} returned an unserializable \
payload'.format(url))

for key, val in stats.iteritems():
for key, val in iteritems(stats):
if key in self.REPL_STATS:
self.safe_submit_metric("riak_repl." + key, val, tags=tags)

if stats['realtime_enabled'] is not None:
for key, val in stats['realtime_queue_stats'].iteritems():
for key, val in iteritems(stats['realtime_queue_stats']):
if key in self.REALTIME_QUEUE_STATS:
self.safe_submit_metric("riak_repl.realtime_queue_stats."
+ key, val, tags=tags)
Expand All @@ -99,7 +98,7 @@ def check(self, instance):
cluster = c.replace("-", "_")
if c not in stats['fullsync_coordinator']:
continue
for key, val in stats['fullsync_coordinator'][c].iteritems():
for key, val in iteritems(stats['fullsync_coordinator'][c]):
if key in self.FULLSYNC_COORDINATOR:
self.safe_submit_metric("riak_repl.fullsync_coordinator."
+ cluster + "." + key,
Expand All @@ -113,7 +112,6 @@ def safe_submit_metric(self, name, value, tags=None):
except ValueError:
self.log.debug("metric name {0} cannot be converted to a \
float: {1}".format(name, value))
pass

try:
self.gauge(name, unicodedata.numeric(value), tags=tags)
Expand All @@ -122,4 +120,3 @@ def safe_submit_metric(self, name, value, tags=None):
self.log.debug("metric name {0} cannot be converted to a \
float even using unicode tools:\
{1}".format(name, value))
pass
2 changes: 1 addition & 1 deletion riak_repl/setup.py
Expand Up @@ -15,7 +15,7 @@
long_description = f.read()


CHECKS_BASE_REQ = 'datadog-checks-base'
CHECKS_BASE_REQ = 'datadog-checks-base>=4.2.0'


setup(
Expand Down
19 changes: 7 additions & 12 deletions riak_repl/tox.ini
@@ -1,23 +1,18 @@
[tox]
minversion = 2.0
basepython = py27
envlist = unit, integration, flake8
basepython = py37
envlist =
py{27,37}-{unit,integration}, flake8

[testenv]
platform = linux2|darwin
platform = linux|darwin|win32
deps =
datadog-checks-base[deps]
-rrequirements-dev.txt

[testenv:unit]
commands =
pip install --require-hashes -r requirements.txt
pytest -v -m"not integration"

[testenv:integration]
commands =
pip install --require-hashes -r requirements.txt
pytest -v -m"integration"
pip install -r requirements.in
unit: pytest -v -m "not integration"
integration: pytest -v -m"integration"

[testenv:flake8]
skip_install = true
Expand Down

0 comments on commit 1c31ecb

Please sign in to comment.