Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions datadog/api/service_level_objectives.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from datadog.util.format import force_to_epoch_seconds
from datadog.api.resources import (
GetableAPIResource,
CreateableAPIResource,
Expand Down Expand Up @@ -137,3 +138,56 @@ def delete_many(cls, ids, **params):
body={"ids": ids},
suppress_response_errors_on_codes=[200],
)

@classmethod
def can_delete(cls, ids, **params):
"""
Check if the following SLOs can be safely deleted.

This is used to check if SLO has any references to it.

:param ids: a list of SLO IDs to check
:type ids: list(str)

:returns: Dictionary representing the API's JSON response
"data.ok" represents a list of SLO ids that have no known references.
"errors" contains a dictionary of SLO ID to known reference(s).
"""
params["ids"] = ids
return super(ServiceLevelObjective, cls)._trigger_class_action(
"GET",
"can_delete",
params=params,
body=None,
suppress_response_errors_on_codes=[200],
)

@classmethod
def history(cls, id, from_ts, to_ts, **params):
"""
Get the SLO's history from the given time range.

:param id: SLO ID to query
:type id: str

:param from_ts: `from` timestamp in epoch seconds to query
:type from_ts: int|datetime.datetime

:param to_ts: `to` timestamp in epoch seconds to query, must be > `from_ts`
:type to_ts: int|datetime.datetime

:returns: Dictionary representing the API's JSON response
"data.ok" represents a list of SLO ids that have no known references.
"errors" contains a dictionary of SLO ID to known reference(s).
"""
params["id"] = id
params["from_ts"] = force_to_epoch_seconds(from_ts)
params["to_ts"] = force_to_epoch_seconds(to_ts)
return super(ServiceLevelObjective, cls)._trigger_class_action(
"GET",
"history",
id=id,
params=params,
body=None,
suppress_response_errors_on_codes=[200],
)
2 changes: 2 additions & 0 deletions datadog/dogshell/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from datadog.dogshell.screenboard import ScreenboardClient
from datadog.dogshell.search import SearchClient
from datadog.dogshell.service_check import ServiceCheckClient
from datadog.dogshell.service_level_objective import ServiceLevelObjectiveClient
from datadog.dogshell.tag import TagClient
from datadog.dogshell.timeboard import TimeboardClient
from datadog.dogshell.dashboard import DashboardClient
Expand Down Expand Up @@ -65,6 +66,7 @@ def main():
HostClient.setup_parser(subparsers)
DowntimeClient.setup_parser(subparsers)
ServiceCheckClient.setup_parser(subparsers)
ServiceLevelObjectiveClient.setup_parser(subparsers)

args = parser.parse_args()
config.load(args.config, args.api_key, args.app_key)
Expand Down
66 changes: 58 additions & 8 deletions datadog/dogshell/service_level_objective.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,20 @@
import json

# 3p
from datadog.util.cli import set_of_ints, comma_set, comma_list_or_empty
from datadog.util.cli import (
set_of_ints,
comma_set,
comma_list_or_empty,
parse_date_as_epoch_timestamp,
)
from datadog.util.format import pretty_json

# datadog
from datadog import api
from datadog.dogshell.common import report_errors, report_warnings


class MonitorClient(object):
class ServiceLevelObjectiveClient(object):
@classmethod
def setup_parser(cls, subparsers):
parser = subparsers.add_parser(
Expand All @@ -25,7 +30,6 @@ def setup_parser(cls, subparsers):
create_parser = verb_parsers.add_parser("create", help="Create a SLO")
create_parser.add_argument(
"--type",
required=True,
Comment thread
platinummonkey marked this conversation as resolved.
help="type of the SLO, e.g.",
choices=["metric", "monitor"],
)
Expand All @@ -42,7 +46,6 @@ def setup_parser(cls, subparsers):
create_parser.add_argument(
"--thresholds",
help="comma separated list of <timeframe>:<target>[:<warning>[:<target_display>[:<warning_display>]]",
required=True,
)
create_parser.add_argument(
"--numerator",
Expand Down Expand Up @@ -85,18 +88,16 @@ def setup_parser(cls, subparsers):
)
update_parser.add_argument(
"--type",
required=True,
help="type of the SLO (must specify it's original type)",
choices=["metric", "monitor"],
)
update_parser.add_argument("--name", help="name of the SLO", default=None)
update_parser.add_argument(
"--description", help="description of the SLO", default=None
)
create_parser.add_argument(
update_parser.add_argument(
Comment thread
platinummonkey marked this conversation as resolved.
"--thresholds",
help="comma separated list of <timeframe>:<target>[:<warning>[:<target_display>[:<warning_display>]]",
required=True,
)
update_parser.add_argument(
"--tags",
Expand Down Expand Up @@ -178,11 +179,32 @@ def setup_parser(cls, subparsers):
delete_timeframe_parser.add_argument(
"timeframes",
help="CSV of timeframes to delete, e.g. 7d,30d,90d",
required=True,
type=comma_set,
)
delete_timeframe_parser.set_defaults(func=cls._delete_timeframe)

can_delete_parser = verb_parsers.add_parser(
"can_delete", help="Check if can delete SLOs"
)
can_delete_parser.add_argument(
"slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set
)
can_delete_parser.set_defaults(func=cls._can_delete)

history_parser = verb_parsers.add_parser("history", help="Get the SLO history")
history_parser.add_argument("slo_id", help="SLO to query the history")
history_parser.add_argument(
"from_ts",
type=parse_date_as_epoch_timestamp,
help="`from` date or timestamp",
)
history_parser.add_argument(
"to_ts",
type=parse_date_as_epoch_timestamp,
help="`to` date or timestamp",
)
history_parser.set_defaults(func=cls._history)

@classmethod
def _create(cls, args):
api._timeout = args.timeout
Expand Down Expand Up @@ -402,6 +424,34 @@ def _delete_timeframe(cls, args):
else:
print(json.dumps(res))

@classmethod
def _can_delete(cls, args):
api._timeout = args.timeout

res = api.ServiceLevelObjective.can_delete(args.slo_ids)
if res is not None:
report_warnings(res)
report_errors(res)

if format == "pretty":
print(pretty_json(res))
else:
print(json.dumps(res))

@classmethod
def _history(cls, args):
api._timeout = args.timeout

res = api.ServiceLevelObjective.history(args.slo_id)
if res is not None:
report_warnings(res)
report_errors(res)

if format == "pretty":
print(pretty_json(res))
else:
print(json.dumps(res))

@classmethod
def _escape(cls, s):
return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t")
97 changes: 97 additions & 0 deletions datadog/util/cli.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
from datetime import datetime, timedelta
from argparse import ArgumentTypeError
import json
import re
from datadog.util.format import force_to_epoch_seconds
import time


def comma_list(list_str, item_func=None):
Expand Down Expand Up @@ -50,3 +54,96 @@ def int_or_str(item):

def set_of_ints(int_csv):
return set(list_of_ints(int_csv))


class DateParsingError(Exception):
"""Thrown if parse_date exhausts all possible parsings of a string"""


_date_fieldre = re.compile(r"(\d+)\s?(\w+) (ago|ahead)")


def _midnight():
Comment thread
platinummonkey marked this conversation as resolved.
""" Truncate a date to midnight. Default to UTC midnight today."""
return datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)


def parse_date_as_epoch_timestamp(date_str):
return parse_date(date_str, to_epoch_ts=True)


def _parse_date_noop_formatter(d):
""" NOOP - only here for pylint """
return d


def parse_date(date_str, to_epoch_ts=False):
formatter = _parse_date_noop_formatter
if to_epoch_ts:
formatter = force_to_epoch_seconds

if isinstance(date_str, datetime):
return formatter(date_str)
elif isinstance(date_str, time.struct_time):
return formatter(datetime.fromtimestamp(time.mktime(date_str)))

# Parse relative dates.
if date_str == "today":
return formatter(_midnight())
elif date_str == "yesterday":
return formatter(_midnight() - timedelta(days=1))
elif date_str == "tomorrow":
return formatter(_midnight() + timedelta(days=1))
elif date_str.endswith(("ago", "ahead")):
m = _date_fieldre.match(date_str)
if m:
fields = m.groups()
else:
fields = date_str.split(" ")[1:]
num = int(fields[0])
short_unit = fields[1]
time_direction = {"ago": -1, "ahead": 1}[fields[2]]
assert short_unit, short_unit
units = ["weeks", "days", "hours", "minutes", "seconds"]
# translate 'h' -> 'hours'
short_units = dict([(u[:1], u) for u in units])
unit = short_units.get(short_unit, short_unit)
# translate 'hour' -> 'hours'
if unit[-1] != "s":
unit += "s" # tolerate 1 hour
assert unit in units, "'%s' not in %s" % (unit, units)
return formatter(datetime.utcnow() + time_direction * timedelta(**{unit: num}))
elif date_str == "now":
return formatter(datetime.utcnow())

def _from_epoch_timestamp(seconds):
print("_from_epoch_timestamp({})".format(seconds))
return datetime.utcfromtimestamp(float(seconds))

def _from_epoch_ms_timestamp(millis):
print("_from_epoch_ms_timestamp({})".format(millis))
in_sec = float(millis) / 1000.0
print("_from_epoch_ms_timestamp({}) -> {}".format(millis, in_sec))
return _from_epoch_timestamp(in_sec)

# Or parse date formats (most specific to least specific)
parse_funcs = [
lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f"),
lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S"),
lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%f"),
lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S"),
lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M"),
lambda d: datetime.strptime(d, "%Y-%m-%d-%H"),
lambda d: datetime.strptime(d, "%Y-%m-%d"),
lambda d: datetime.strptime(d, "%Y-%m"),
lambda d: datetime.strptime(d, "%Y"),
_from_epoch_timestamp, # an epoch in seconds
_from_epoch_ms_timestamp, # an epoch in milliseconds
]

for parse_func in parse_funcs:
try:
return formatter(parse_func(date_str))
except Exception:
pass
raise DateParsingError(u"Could not parse {0} as date".format(date_str))
7 changes: 7 additions & 0 deletions datadog/util/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,13 @@ def is_higher_py35():
return _is_py_version_higher_than(3, 5)


def is_pypy():
"""
Assert that PyPy is being used (regardless of 2 or 3)
"""
return '__pypy__' in sys.builtin_module_names


get_input = input

# Python 3.x
Expand Down
8 changes: 8 additions & 0 deletions datadog/util/format.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# stdlib
import calendar
import datetime
import json


Expand All @@ -12,3 +14,9 @@ def construct_url(host, api_version, path):

def construct_path(api_version, path):
return "{}/{}".format(api_version.strip("/"), path.strip("/"))


def force_to_epoch_seconds(epoch_sec_or_dt):
if isinstance(epoch_sec_or_dt, datetime.datetime):
return calendar.timegm(epoch_sec_or_dt.timetuple())
return epoch_sec_or_dt
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def get_readme_md_contents():
name="datadog",
version="0.30.0",
install_requires=install_reqs,
tests_require=["pytest", "mock"],
tests_require=["pytest", "mock", "freezegun"],
packages=["datadog", "datadog.api", "datadog.dogstatsd", "datadog.threadstats", "datadog.util", "datadog.dogshell"],
author="Datadog, Inc.",
long_description=get_readme_md_contents(),
Expand Down
Loading