Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Implement counter submission from the dog cli

Also add a rake lint target.
  • Loading branch information...
commit 241902d3ff9148cb40566eec7b2ec5708cf8eaae 1 parent ffc8fbd
@alq666 alq666 authored
View
249 .pylintrc
@@ -0,0 +1,249 @@
+[MASTER]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Profiled execution.
+profile=no
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS,.git
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+
+[MESSAGES CONTROL]
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time.
+#enable=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once).
+disable=C0301, C0103, C0111, C0321
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html
+output-format=colorized
+
+# Include message's id in output
+include-ids=yes
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+files-output=no
+
+# Tells whether to display a full report or only the messages
+reports=yes
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Add a comment according to your evaluation note. This is used by the global
+# evaluation report (RP0004).
+comment=no
+
+
+[BASIC]
+
+# Required attributes for module, separated by a comma
+required-attributes=
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=input
+
+# Regular expression which should only match correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression which should only match correct module level names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression which should only match correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression which should only match correct function names
+function-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct method names
+method-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct instance attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct argument names
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct variable names
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct list comprehension /
+# generator expression variable names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Regular expression which should only match functions or classes name which do
+# not require a docstring
+no-docstring-rgx=__.*__
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=132
+
+# Maximum number of lines in a module
+max-module-lines=1000
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set).
+ignored-classes=SQLObject
+
+# When zope mode is activated, add a predefined set of Zope acquired attributes
+# to generated-members.
+zope=no
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E0201 when accessed. Python regular
+# expressions are accepted.
+generated-members=REQUEST,acl_users,aq_parent
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching the beginning of the name of dummy variables
+# (i.e. not used).
+dummy-variables-rgx=_|dummy
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+
+[CLASSES]
+
+# List of interface methods to ignore, separated by a comma. This is used for
+# instance to not check methods defines in Zope's Interface base class.
+ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=10
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+max-locals=15
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of branch for function / method body
+max-branchs=12
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
View
2  README → README.md
@@ -6,7 +6,7 @@ A Python client for the DataDog API.
- Library Documentation: http://pydoc.datadoghq.com
- HTTP API Documetation: http://api.datadoghq.com/
- DataDogHQ: http://datadoghq.com
-
+- Code: https://github.com/DataDog/dogapi ([![Build Status](https://travis-ci.org/DataDog/dogapi.png?branch=fabric)](https://travis-ci.org/DataDog/dogapi))
Change Log
----------
View
6 Rakefile.rb
@@ -29,6 +29,12 @@
sh "python setup.py egg_info -b '_#{build_number}' bdist_egg"
end
+desc "Run the code through pylint"
+task :lint do
+ sh "find src/dogapi -name '*.py' -type f -exec pylint --rcfile=.pylintrc --reports=n --output-format=colorized {} ';'"
+ sh "find src/dogshell -name '*.py' -type f -exec pylint --rcfile=.pylintrc --reports=n --output-format=colorized {} ';'"
+end
+
namespace :test do
desc "Run integration tests."
View
2  setup.py
@@ -11,7 +11,7 @@
setup(
name = "dogapi",
- version = "1.4.0",
+ version = "1.4.1",
packages = find_packages("src"),
package_dir = {'':'src'},
author = "Datadog, Inc.",
View
9 src/dogapi/common.py
@@ -33,4 +33,13 @@ def get_ec2_instance_id():
except:
return socket.gethostname()
+memoized_hostname = None
+def find_localhost():
+ try:
+ global memoized_hostname
+ if memoized_hostname is None:
+ memoized_hostname = socket.getfqdn()
+ return memoized_hostname
+ except Exception, e:
+ logging.exception("Cannot determine local hostname")
View
2  src/dogshell/__init__.py
@@ -49,5 +49,5 @@ def main():
args.func(args)
-if __name__=='__main__':
+if __name__ == '__main__':
main()
View
3  src/dogshell/common.py
@@ -1,5 +1,5 @@
from __future__ import print_function
-from dogapi.common import is_p3k
+from dogapi.common import is_p3k, find_localhost
get_input = input
@@ -51,7 +51,6 @@ def dog(self):
return self._dog
-
class DogshellConfig(IterableUserDict):
def load(self, config_file, apikey, appkey):
View
16 src/dogshell/metric.py
@@ -1,7 +1,5 @@
-import time, datetime
-import socket
-
-from dogshell.common import report_errors, report_warnings, CommandLineClient
+from dogshell.common import report_errors, report_warnings, CommandLineClient, find_localhost
+from dogapi.constants import MetricType
class MetricClient(CommandLineClient):
@@ -16,8 +14,10 @@ def setup_parser(self, subparsers):
post_parser.add_argument('--device', help='scopes your metric to a specific device', default=None)
post_parser.add_argument('--tags', help='comma-separated list of tags', default=None)
post_parser.add_argument('--localhostname', help='same as --host=`hostname` (overrides --host)', action='store_true')
+ post_parser.add_argument('--counter', help='submit value as a uint64 counter instead of gauge', action='store_false')
parser.set_defaults(func=self._post)
+
def _post(self, args):
self.dog.timeout = args.timeout
if args.localhostname:
@@ -29,7 +29,13 @@ def _post(self, args):
args.tags.split(',') if t]))
else:
tags = None
+
+ # are we sending a counter?
+ if args.counter:
+ metric_type = MetricType.Counter
+ else:
+ metric_type = MetricType.Gauge
res = self.dog.metric(args.name, args.value, host=host,
- device=args.device, tags=tags)
+ device=args.device, tags=tags, metric_type=metric_type)
report_warnings(res)
report_errors(res)
View
526 tests/integration/test_dogshell.py
@@ -2,278 +2,286 @@
import os
import random
import re
+import socket
import subprocess
import time
import tempfile
import unittest
from contextlib import contextmanager
try:
- from configparser import ConfigParser
+ from configparser import ConfigParser
except ImportError:
- from ConfigParser import ConfigParser
+ from ConfigParser import ConfigParser
from hashlib import md5
from dogapi.common import is_p3k
+from dogshell.common import find_localhost
def get_temp_file():
- """Return a (fn, fp) pair"""
- if is_p3k():
- fn = "/tmp/{0}-{1}".format(time.time(), random.random())
- return (fn, open(fn, 'w+'))
- else:
- tf = tempfile.NamedTemporaryFile()
- return (tf.name, tf)
+ """Return a (fn, fp) pair"""
+ if is_p3k():
+ fn = "/tmp/{0}-{1}".format(time.time(), random.random())
+ return (fn, open(fn, 'w+'))
+ else:
+ tf = tempfile.NamedTemporaryFile()
+ return (tf.name, tf)
class TestDogshell(unittest.TestCase):
- # Test init
- def setUp(self):
- # Generate a config file for the dog shell
- self.config_fn, self.config_file = get_temp_file()
- config = ConfigParser()
- config.add_section('Connection')
- config.set('Connection', 'apikey', os.environ['DATADOG_API_KEY'])
- config.set('Connection', 'appkey', os.environ['DATADOG_APP_KEY'])
- config.write(self.config_file)
- self.config_file.flush()
-
- # Tests
- def test_config_args(self):
- out, err, return_code = self.dogshell(["--help"], use_cl_args=True)
-
- def test_comment(self):
- # Post a new comment
- cmd = ["comment", "post"]
- comment_msg = "yo dudes"
- post_data = {}
- out, err, return_code = self.dogshell(cmd, stdin=comment_msg)
- post_data = self.parse_response(out)
- assert 'id' in post_data, post_data
- assert 'url' in post_data, post_data
- assert 'message' in post_data, post_data
- assert comment_msg in post_data['message']
-
- # Read that comment from its id
- cmd = ["comment", "show", post_data['id']]
- out, err, return_code = self.dogshell(cmd)
- show_data = self.parse_response(out)
- assert comment_msg in show_data['message']
-
- # Update the comment
- cmd = ["comment", "update", post_data['id']]
- new_comment = "nothing much"
- out, err, return_code = self.dogshell(cmd, stdin=new_comment)
- update_data = self.parse_response(out)
- self.assertEquals(update_data['id'], post_data['id'])
- assert new_comment in update_data['message']
-
- # Read the updated comment
- cmd = ["comment", "show", post_data['id']]
- out, err, return_code = self.dogshell(cmd)
- show_data2 = self.parse_response(out)
- assert new_comment in show_data2['message']
-
- # Delete the comment
- cmd = ["comment", "delete", post_data['id']]
- out, err, return_code = self.dogshell(cmd)
- self.assertEquals(out, '')
-
- # Shouldn't get anything
- cmd = ["comment", "show", post_data['id']]
- out, err, return_code = self.dogshell(cmd, check_return_code=False)
- self.assertEquals(out, '')
- self.assertEquals(return_code, 1)
-
- def test_event(self):
- # Post an event
- title =" Testing events from dogshell"
- body = "%%%\n*Cool!*\n%%%\n"
- tags = "tag:a,tag:b"
- cmd = ["event", "post", title, "--tags", tags]
- event_id = None
-
- def match_permalink(out):
- match = re.match(r'.*/event/jump_to\?event_id=([0-9]*)', out, re.DOTALL)
- if match:
- return match.group(1)
- else:
- return None
-
- out, err, return_code = self.dogshell(cmd, stdin=body)
- event_id = match_permalink(out)
- assert event_id, out
-
- # Add a bit of latency for the event to appear
- time.sleep(2)
-
- # Retrieve the event
- cmd = ["event", "show", event_id]
- out, err, return_code = self.dogshell(cmd)
- event_id2 = match_permalink(out)
- self.assertEquals(event_id, event_id2)
-
- # Get a stream of events
- cmd = ["event", "stream", "30m", "--tags", tags]
- out, err, return_code = self.dogshell(cmd)
- event_ids = (match_permalink(l) for l in out.split("\n"))
- event_ids = set([e for e in event_ids if e])
- assert event_id in event_ids
-
- def test_metrics(self):
- # Submit a unique metric from a unique host
- unique = self.get_unique()
- metric = "test_metric_%s" % unique
- host = "test_host_%s" % unique
- self.dogshell(["metric", "post", "--host", host, metric, "1"])
- time.sleep(1)
-
- # Query for the metric, commented out because caching prevents us
- # from verifying new metrics
- # out, err, return_code = self.dogshell(["search", "query",
- # "metrics:" + metric])
- # assert metric in out, (metric, out)
-
- # Query for the host
- out, err, return_code = self.dogshell(["search", "query",
- "hosts:" + host])
- assert host in out, (host, out)
-
- # Query for the host and metric
- out, err, return_code = self.dogshell(["search", "query", unique])
- assert host in out, (host, out)
- # Caching prevents us from verifying new metrics
- # assert metric in out, (metric, out)
-
- # Give the host some tags
- tags0 = ["t0", "t1"]
- self.dogshell(["tag", "add", host] + tags0)
-
- # Verify that that host got those tags
- out, err, return_code = self.dogshell(["tag", "show", host])
- for t in tags0:
- assert t in out, (t, out)
-
- # Replace the tags with a different set
- tags1 = ["t2", "t3"]
- self.dogshell(["tag", "replace", host] + tags1)
- out, err, return_code = self.dogshell(["tag", "show", host])
- for t in tags1:
- assert t in out, (t, out)
- for t in tags0:
- assert t not in out, (t, out)
-
- # Remove all the tags
- self.dogshell(["tag", "detach", host])
- out, err, return_code = self.dogshell(["tag", "show", host])
- self.assertEquals(out, "")
-
- def test_dashes(self):
- # Create a dash and write it to a file
- name, temp0 = get_temp_file()
- self.dogshell(["dashboard", "new_file", name])
- dash = json.load(temp0)
-
- assert 'id' in dash, dash
- assert 'title' in dash, dash
-
- # Update the file and push it to the server
- unique = self.get_unique()
- dash['title'] = 'dash title %s' % unique
- name, temp1 = get_temp_file()
- json.dump(dash, temp1)
- temp1.flush()
- self.dogshell(["dashboard", "push", temp1.name])
-
- # Query the server to verify the change
- out, _, _ = self.dogshell(["dashboard", "show", str(dash['id'])])
-
- out = json.loads(out)
- assert "dash" in out, out
- assert "id" in out["dash"], out
- self.assertEquals(out["dash"]["id"], dash["id"])
- assert "title" in out["dash"]
- self.assertEquals(out["dash"]["title"], dash["title"])
-
- new_title = "new_title"
- new_desc = "new_desc"
- new_dash = [{
- "title": "blerg",
- "definition": {
- "requests": [
- {"q": "avg:system.load.15{web,env:prod}"}
- ]
- }
- }]
-
- # Update a dash directly on the server
- self.dogshell(["dashboard", "update", str(dash["id"]), new_title, new_desc], stdin=json.dumps(new_dash))
-
- # Query the server to verify the change
- out, _, _ = self.dogshell(["dashboard", "show", str(dash["id"])])
- out = json.loads(out)
- assert "dash" in out, out
- assert "id" in out["dash"], out
- self.assertEquals(out["dash"]["id"], dash["id"])
- assert "title" in out["dash"], out
- self.assertEquals(out["dash"]["title"], new_title)
- assert "description" in out["dash"], out
- self.assertEquals(out["dash"]["description"], new_desc)
- assert "graphs" in out["dash"], out
- self.assertEquals(out["dash"]["graphs"], new_dash)
-
- # Pull the updated dash to disk
- fd, updated_file = tempfile.mkstemp()
- try:
- self.dogshell(["dashboard", "pull", str(dash["id"]), updated_file])
- updated_dash = {}
- with open(updated_file) as f:
- updated_dash = json.load(f)
- assert "dash" in out
- self.assertEquals(out["dash"], updated_dash)
- finally:
- os.unlink(updated_file)
-
- # Delete the dash
- self.dogshell(["dashboard", "delete", str(dash["id"])])
-
- # Verify that it's not on the server anymore
- out, err, return_code = self.dogshell(["dashboard", "show", str(dash['id'])], check_return_code=False)
- self.assertNotEquals(return_code, 0)
-
- # Test helpers
-
- def dogshell(self, args, stdin=None, check_return_code=True, use_cl_args=False):
- """ Helper function to call the dog shell command
- """
- cmd = ["dog", "--config", self.config_file.name] + args
- if use_cl_args:
- cmd = ["dog",
- "--api-key={0}".format(os.environ["DATADOG_API_KEY"]),
- "--application-key={0}".format(os.environ["DATADOG_APP_KEY"])] + args
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
- if stdin:
- out, err = proc.communicate(stdin.encode("utf-8"))
- else:
- out, err = proc.communicate()
- proc.wait()
- return_code = proc.returncode
- if check_return_code:
- self.assertEquals(return_code, 0, err)
- self.assertEquals(err, b'')
- return out.decode('utf-8'), err.decode('utf-8'), return_code
-
- def get_unique(self):
- return md5(str(time.time() + random.random()).encode('utf-8')).hexdigest()
-
- def parse_response(self, out):
- data = {}
- for line in out.split('\n'):
- parts = re.split('\s+', str(line).strip())
- key = parts[0]
- # Could potentially have errors with other whitespace
- val = " ".join(parts[1:])
- if key:
- data[key] = val
- return data
+ # Test init
+ def setUp(self):
+ # Generate a config file for the dog shell
+ self.config_fn, self.config_file = get_temp_file()
+ config = ConfigParser()
+ config.add_section('Connection')
+ config.set('Connection', 'apikey', os.environ['DATADOG_API_KEY'])
+ config.set('Connection', 'appkey', os.environ['DATADOG_APP_KEY'])
+ config.write(self.config_file)
+ self.config_file.flush()
+
+ # Tests
+ def test_config_args(self):
+ out, err, return_code = self.dogshell(["--help"], use_cl_args=True)
+
+ def test_find_localhost(self):
+ # Once run
+ assert socket.getfqdn() == find_localhost()
+ # Once memoized
+ assert socket.getfqdn() == find_localhost()
+
+ def test_comment(self):
+ # Post a new comment
+ cmd = ["comment", "post"]
+ comment_msg = "yo dudes"
+ post_data = {}
+ out, err, return_code = self.dogshell(cmd, stdin=comment_msg)
+ post_data = self.parse_response(out)
+ assert 'id' in post_data, post_data
+ assert 'url' in post_data, post_data
+ assert 'message' in post_data, post_data
+ assert comment_msg in post_data['message']
+
+ # Read that comment from its id
+ cmd = ["comment", "show", post_data['id']]
+ out, err, return_code = self.dogshell(cmd)
+ show_data = self.parse_response(out)
+ assert comment_msg in show_data['message']
+
+ # Update the comment
+ cmd = ["comment", "update", post_data['id']]
+ new_comment = "nothing much"
+ out, err, return_code = self.dogshell(cmd, stdin=new_comment)
+ update_data = self.parse_response(out)
+ self.assertEquals(update_data['id'], post_data['id'])
+ assert new_comment in update_data['message']
+
+ # Read the updated comment
+ cmd = ["comment", "show", post_data['id']]
+ out, err, return_code = self.dogshell(cmd)
+ show_data2 = self.parse_response(out)
+ assert new_comment in show_data2['message']
+
+ # Delete the comment
+ cmd = ["comment", "delete", post_data['id']]
+ out, err, return_code = self.dogshell(cmd)
+ self.assertEquals(out, '')
+
+ # Shouldn't get anything
+ cmd = ["comment", "show", post_data['id']]
+ out, err, return_code = self.dogshell(cmd, check_return_code=False)
+ self.assertEquals(out, '')
+ self.assertEquals(return_code, 1)
+
+ def test_event(self):
+ # Post an event
+ title =" Testing events from dogshell"
+ body = "%%%\n*Cool!*\n%%%\n"
+ tags = "tag:a,tag:b"
+ cmd = ["event", "post", title, "--tags", tags]
+ event_id = None
+
+ def match_permalink(out):
+ match = re.match(r'.*/event/jump_to\?event_id=([0-9]*)', out, re.DOTALL)
+ if match:
+ return match.group(1)
+ else:
+ return None
+
+ out, err, return_code = self.dogshell(cmd, stdin=body)
+ event_id = match_permalink(out)
+ assert event_id, out
+
+ # Add a bit of latency for the event to appear
+ time.sleep(2)
+
+ # Retrieve the event
+ cmd = ["event", "show", event_id]
+ out, err, return_code = self.dogshell(cmd)
+ event_id2 = match_permalink(out)
+ self.assertEquals(event_id, event_id2)
+
+ # Get a stream of events
+ cmd = ["event", "stream", "30m", "--tags", tags]
+ out, err, return_code = self.dogshell(cmd)
+ event_ids = (match_permalink(l) for l in out.split("\n"))
+ event_ids = set([e for e in event_ids if e])
+ assert event_id in event_ids
+
+ def test_metrics(self):
+ # Submit a unique metric from a unique host
+ unique = self.get_unique()
+ metric = "test.dogapi.test_metric_%s" % unique
+ host = "test.dogapi.test_host_%s" % unique
+ self.dogshell(["metric", "post", "--host", host, metric, "1"])
+ time.sleep(15)
+
+ # Query for the metric, commented out because caching prevents us
+ # from verifying new metrics
+ # out, err, return_code = self.dogshell(["search", "query",
+ # "metrics:" + metric])
+ # assert metric in out, (metric, out)
+
+ # Query for the host
+ out, err, return_code = self.dogshell(["search", "query",
+ "hosts:" + host])
+ assert host in out, (host, out)
+
+ # Query for the host and metric
+ out, err, return_code = self.dogshell(["search", "query", unique])
+ assert host in out, (host, out)
+ # Caching prevents us from verifying new metrics
+ # assert metric in out, (metric, out)
+
+ # Give the host some tags
+ tags0 = ["t0", "t1"]
+ self.dogshell(["tag", "add", host] + tags0)
+
+ # Verify that that host got those tags
+ out, err, return_code = self.dogshell(["tag", "show", host])
+ for t in tags0:
+ assert t in out, (t, out)
+
+ # Replace the tags with a different set
+ tags1 = ["t2", "t3"]
+ self.dogshell(["tag", "replace", host] + tags1)
+ out, err, return_code = self.dogshell(["tag", "show", host])
+ for t in tags1:
+ assert t in out, (t, out)
+ for t in tags0:
+ assert t not in out, (t, out)
+
+ # Remove all the tags
+ self.dogshell(["tag", "detach", host])
+ out, err, return_code = self.dogshell(["tag", "show", host])
+ self.assertEquals(out, "")
+
+ def test_dashes(self):
+ # Create a dash and write it to a file
+ name, temp0 = get_temp_file()
+ self.dogshell(["dashboard", "new_file", name])
+ dash = json.load(temp0)
+
+ assert 'id' in dash, dash
+ assert 'title' in dash, dash
+
+ # Update the file and push it to the server
+ unique = self.get_unique()
+ dash['title'] = 'dash title %s' % unique
+ name, temp1 = get_temp_file()
+ json.dump(dash, temp1)
+ temp1.flush()
+ self.dogshell(["dashboard", "push", temp1.name])
+
+ # Query the server to verify the change
+ out, _, _ = self.dogshell(["dashboard", "show", str(dash['id'])])
+
+ out = json.loads(out)
+ assert "dash" in out, out
+ assert "id" in out["dash"], out
+ self.assertEquals(out["dash"]["id"], dash["id"])
+ assert "title" in out["dash"]
+ self.assertEquals(out["dash"]["title"], dash["title"])
+
+ new_title = "new_title"
+ new_desc = "new_desc"
+ new_dash = [{
+ "title": "blerg",
+ "definition": {
+ "requests": [
+ {"q": "avg:system.load.15{web,env:prod}"}
+ ]
+ }
+ }]
+
+ # Update a dash directly on the server
+ self.dogshell(["dashboard", "update", str(dash["id"]), new_title, new_desc], stdin=json.dumps(new_dash))
+
+ # Query the server to verify the change
+ out, _, _ = self.dogshell(["dashboard", "show", str(dash["id"])])
+ out = json.loads(out)
+ assert "dash" in out, out
+ assert "id" in out["dash"], out
+ self.assertEquals(out["dash"]["id"], dash["id"])
+ assert "title" in out["dash"], out
+ self.assertEquals(out["dash"]["title"], new_title)
+ assert "description" in out["dash"], out
+ self.assertEquals(out["dash"]["description"], new_desc)
+ assert "graphs" in out["dash"], out
+ self.assertEquals(out["dash"]["graphs"], new_dash)
+
+ # Pull the updated dash to disk
+ fd, updated_file = tempfile.mkstemp()
+ try:
+ self.dogshell(["dashboard", "pull", str(dash["id"]), updated_file])
+ updated_dash = {}
+ with open(updated_file) as f:
+ updated_dash = json.load(f)
+ assert "dash" in out
+ self.assertEquals(out["dash"], updated_dash)
+ finally:
+ os.unlink(updated_file)
+
+ # Delete the dash
+ self.dogshell(["dashboard", "delete", str(dash["id"])])
+
+ # Verify that it's not on the server anymore
+ out, err, return_code = self.dogshell(["dashboard", "show", str(dash['id'])], check_return_code=False)
+ self.assertNotEquals(return_code, 0)
+
+ # Test helpers
+
+ def dogshell(self, args, stdin=None, check_return_code=True, use_cl_args=False):
+ """ Helper function to call the dog shell command
+ """
+ cmd = ["dog", "--config", self.config_file.name] + args
+ if use_cl_args:
+ cmd = ["dog",
+ "--api-key={0}".format(os.environ["DATADOG_API_KEY"]),
+ "--application-key={0}".format(os.environ["DATADOG_APP_KEY"])] + args
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
+ if stdin:
+ out, err = proc.communicate(stdin.encode("utf-8"))
+ else:
+ out, err = proc.communicate()
+ proc.wait()
+ return_code = proc.returncode
+ if check_return_code:
+ self.assertEquals(return_code, 0, err)
+ self.assertEquals(err, b'')
+ return out.decode('utf-8'), err.decode('utf-8'), return_code
+
+ def get_unique(self):
+ return md5(str(time.time() + random.random()).encode('utf-8')).hexdigest()
+
+ def parse_response(self, out):
+ data = {}
+ for line in out.split('\n'):
+ parts = re.split('\s+', str(line).strip())
+ key = parts[0]
+ # Could potentially have errors with other whitespace
+ val = " ".join(parts[1:])
+ if key:
+ data[key] = val
+ return data
if __name__ == '__main__':
- unittest.main()
+ unittest.main()
Please sign in to comment.
Something went wrong with that request. Please try again.