diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index d2b8bc6bc..d2dc51b3c 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -1,3 +1,4 @@ +from datadog.util.format import force_to_epoch_seconds from datadog.api.resources import ( GetableAPIResource, CreateableAPIResource, @@ -137,3 +138,56 @@ def delete_many(cls, ids, **params): body={"ids": ids}, suppress_response_errors_on_codes=[200], ) + + @classmethod + def can_delete(cls, ids, **params): + """ + Check if the following SLOs can be safely deleted. + + This is used to check if SLO has any references to it. + + :param ids: a list of SLO IDs to check + :type ids: list(str) + + :returns: Dictionary representing the API's JSON response + "data.ok" represents a list of SLO ids that have no known references. + "errors" contains a dictionary of SLO ID to known reference(s). + """ + params["ids"] = ids + return super(ServiceLevelObjective, cls)._trigger_class_action( + "GET", + "can_delete", + params=params, + body=None, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def history(cls, id, from_ts, to_ts, **params): + """ + Get the SLO's history from the given time range. + + :param id: SLO ID to query + :type id: str + + :param from_ts: `from` timestamp in epoch seconds to query + :type from_ts: int|datetime.datetime + + :param to_ts: `to` timestamp in epoch seconds to query, must be > `from_ts` + :type to_ts: int|datetime.datetime + + :returns: Dictionary representing the API's JSON response + "data.ok" represents a list of SLO ids that have no known references. + "errors" contains a dictionary of SLO ID to known reference(s). + """ + params["id"] = id + params["from_ts"] = force_to_epoch_seconds(from_ts) + params["to_ts"] = force_to_epoch_seconds(to_ts) + return super(ServiceLevelObjective, cls)._trigger_class_action( + "GET", + "history", + id=id, + params=params, + body=None, + suppress_response_errors_on_codes=[200], + ) diff --git a/datadog/dogshell/__init__.py b/datadog/dogshell/__init__.py index 67e939889..107d3f7f3 100644 --- a/datadog/dogshell/__init__.py +++ b/datadog/dogshell/__init__.py @@ -17,6 +17,7 @@ from datadog.dogshell.screenboard import ScreenboardClient from datadog.dogshell.search import SearchClient from datadog.dogshell.service_check import ServiceCheckClient +from datadog.dogshell.service_level_objective import ServiceLevelObjectiveClient from datadog.dogshell.tag import TagClient from datadog.dogshell.timeboard import TimeboardClient from datadog.dogshell.dashboard import DashboardClient @@ -65,6 +66,7 @@ def main(): HostClient.setup_parser(subparsers) DowntimeClient.setup_parser(subparsers) ServiceCheckClient.setup_parser(subparsers) + ServiceLevelObjectiveClient.setup_parser(subparsers) args = parser.parse_args() config.load(args.config, args.api_key, args.app_key) diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py index bc8e71066..bdc366d95 100644 --- a/datadog/dogshell/service_level_objective.py +++ b/datadog/dogshell/service_level_objective.py @@ -3,7 +3,12 @@ import json # 3p -from datadog.util.cli import set_of_ints, comma_set, comma_list_or_empty +from datadog.util.cli import ( + set_of_ints, + comma_set, + comma_list_or_empty, + parse_date_as_epoch_timestamp, +) from datadog.util.format import pretty_json # datadog @@ -11,7 +16,7 @@ from datadog.dogshell.common import report_errors, report_warnings -class MonitorClient(object): +class ServiceLevelObjectiveClient(object): @classmethod def setup_parser(cls, subparsers): parser = subparsers.add_parser( @@ -25,7 +30,6 @@ def setup_parser(cls, subparsers): create_parser = verb_parsers.add_parser("create", help="Create a SLO") create_parser.add_argument( "--type", - required=True, help="type of the SLO, e.g.", choices=["metric", "monitor"], ) @@ -42,7 +46,6 @@ def setup_parser(cls, subparsers): create_parser.add_argument( "--thresholds", help="comma separated list of :[:[:[:]]", - required=True, ) create_parser.add_argument( "--numerator", @@ -85,7 +88,6 @@ def setup_parser(cls, subparsers): ) update_parser.add_argument( "--type", - required=True, help="type of the SLO (must specify it's original type)", choices=["metric", "monitor"], ) @@ -93,10 +95,9 @@ def setup_parser(cls, subparsers): update_parser.add_argument( "--description", help="description of the SLO", default=None ) - create_parser.add_argument( + update_parser.add_argument( "--thresholds", help="comma separated list of :[:[:[:]]", - required=True, ) update_parser.add_argument( "--tags", @@ -178,11 +179,32 @@ def setup_parser(cls, subparsers): delete_timeframe_parser.add_argument( "timeframes", help="CSV of timeframes to delete, e.g. 7d,30d,90d", - required=True, type=comma_set, ) delete_timeframe_parser.set_defaults(func=cls._delete_timeframe) + can_delete_parser = verb_parsers.add_parser( + "can_delete", help="Check if can delete SLOs" + ) + can_delete_parser.add_argument( + "slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set + ) + can_delete_parser.set_defaults(func=cls._can_delete) + + history_parser = verb_parsers.add_parser("history", help="Get the SLO history") + history_parser.add_argument("slo_id", help="SLO to query the history") + history_parser.add_argument( + "from_ts", + type=parse_date_as_epoch_timestamp, + help="`from` date or timestamp", + ) + history_parser.add_argument( + "to_ts", + type=parse_date_as_epoch_timestamp, + help="`to` date or timestamp", + ) + history_parser.set_defaults(func=cls._history) + @classmethod def _create(cls, args): api._timeout = args.timeout @@ -402,6 +424,34 @@ def _delete_timeframe(cls, args): else: print(json.dumps(res)) + @classmethod + def _can_delete(cls, args): + api._timeout = args.timeout + + res = api.ServiceLevelObjective.can_delete(args.slo_ids) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _history(cls, args): + api._timeout = args.timeout + + res = api.ServiceLevelObjective.history(args.slo_id) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + @classmethod def _escape(cls, s): return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") diff --git a/datadog/util/cli.py b/datadog/util/cli.py index b9e8ffa00..fb3f7eb8e 100644 --- a/datadog/util/cli.py +++ b/datadog/util/cli.py @@ -1,5 +1,9 @@ +from datetime import datetime, timedelta from argparse import ArgumentTypeError import json +import re +from datadog.util.format import force_to_epoch_seconds +import time def comma_list(list_str, item_func=None): @@ -50,3 +54,96 @@ def int_or_str(item): def set_of_ints(int_csv): return set(list_of_ints(int_csv)) + + +class DateParsingError(Exception): + """Thrown if parse_date exhausts all possible parsings of a string""" + + +_date_fieldre = re.compile(r"(\d+)\s?(\w+) (ago|ahead)") + + +def _midnight(): + """ Truncate a date to midnight. Default to UTC midnight today.""" + return datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) + + +def parse_date_as_epoch_timestamp(date_str): + return parse_date(date_str, to_epoch_ts=True) + + +def _parse_date_noop_formatter(d): + """ NOOP - only here for pylint """ + return d + + +def parse_date(date_str, to_epoch_ts=False): + formatter = _parse_date_noop_formatter + if to_epoch_ts: + formatter = force_to_epoch_seconds + + if isinstance(date_str, datetime): + return formatter(date_str) + elif isinstance(date_str, time.struct_time): + return formatter(datetime.fromtimestamp(time.mktime(date_str))) + + # Parse relative dates. + if date_str == "today": + return formatter(_midnight()) + elif date_str == "yesterday": + return formatter(_midnight() - timedelta(days=1)) + elif date_str == "tomorrow": + return formatter(_midnight() + timedelta(days=1)) + elif date_str.endswith(("ago", "ahead")): + m = _date_fieldre.match(date_str) + if m: + fields = m.groups() + else: + fields = date_str.split(" ")[1:] + num = int(fields[0]) + short_unit = fields[1] + time_direction = {"ago": -1, "ahead": 1}[fields[2]] + assert short_unit, short_unit + units = ["weeks", "days", "hours", "minutes", "seconds"] + # translate 'h' -> 'hours' + short_units = dict([(u[:1], u) for u in units]) + unit = short_units.get(short_unit, short_unit) + # translate 'hour' -> 'hours' + if unit[-1] != "s": + unit += "s" # tolerate 1 hour + assert unit in units, "'%s' not in %s" % (unit, units) + return formatter(datetime.utcnow() + time_direction * timedelta(**{unit: num})) + elif date_str == "now": + return formatter(datetime.utcnow()) + + def _from_epoch_timestamp(seconds): + print("_from_epoch_timestamp({})".format(seconds)) + return datetime.utcfromtimestamp(float(seconds)) + + def _from_epoch_ms_timestamp(millis): + print("_from_epoch_ms_timestamp({})".format(millis)) + in_sec = float(millis) / 1000.0 + print("_from_epoch_ms_timestamp({}) -> {}".format(millis, in_sec)) + return _from_epoch_timestamp(in_sec) + + # Or parse date formats (most specific to least specific) + parse_funcs = [ + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f"), + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S"), + lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%f"), + lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S"), + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M"), + lambda d: datetime.strptime(d, "%Y-%m-%d-%H"), + lambda d: datetime.strptime(d, "%Y-%m-%d"), + lambda d: datetime.strptime(d, "%Y-%m"), + lambda d: datetime.strptime(d, "%Y"), + _from_epoch_timestamp, # an epoch in seconds + _from_epoch_ms_timestamp, # an epoch in milliseconds + ] + + for parse_func in parse_funcs: + try: + return formatter(parse_func(date_str)) + except Exception: + pass + raise DateParsingError(u"Could not parse {0} as date".format(date_str)) diff --git a/datadog/util/compat.py b/datadog/util/compat.py index e15890c42..890b51eff 100644 --- a/datadog/util/compat.py +++ b/datadog/util/compat.py @@ -34,6 +34,13 @@ def is_higher_py35(): return _is_py_version_higher_than(3, 5) +def is_pypy(): + """ + Assert that PyPy is being used (regardless of 2 or 3) + """ + return '__pypy__' in sys.builtin_module_names + + get_input = input # Python 3.x diff --git a/datadog/util/format.py b/datadog/util/format.py index 4b4d83948..022a343ea 100644 --- a/datadog/util/format.py +++ b/datadog/util/format.py @@ -1,4 +1,6 @@ # stdlib +import calendar +import datetime import json @@ -12,3 +14,9 @@ def construct_url(host, api_version, path): def construct_path(api_version, path): return "{}/{}".format(api_version.strip("/"), path.strip("/")) + + +def force_to_epoch_seconds(epoch_sec_or_dt): + if isinstance(epoch_sec_or_dt, datetime.datetime): + return calendar.timegm(epoch_sec_or_dt.timetuple()) + return epoch_sec_or_dt diff --git a/setup.py b/setup.py index d04067545..ab7be70fc 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ def get_readme_md_contents(): name="datadog", version="0.30.0", install_requires=install_reqs, - tests_require=["pytest", "mock"], + tests_require=["pytest", "mock", "freezegun"], packages=["datadog", "datadog.api", "datadog.dogstatsd", "datadog.threadstats", "datadog.util", "datadog.dogshell"], author="Datadog, Inc.", long_description=get_readme_md_contents(), diff --git a/tests/unit/util/test_cli.py b/tests/unit/util/test_cli.py new file mode 100644 index 000000000..d5717dbcf --- /dev/null +++ b/tests/unit/util/test_cli.py @@ -0,0 +1,211 @@ +from argparse import ArgumentTypeError +from freezegun import freeze_time +import datetime +import unittest + +from datadog.util.cli import ( + comma_list, + comma_set, + comma_list_or_empty, + list_of_ints, + list_of_ints_and_strs, + set_of_ints, + DateParsingError, + _midnight, + parse_date_as_epoch_timestamp, + parse_date, +) +from datadog.util.compat import is_pypy +from datadog.util.format import force_to_epoch_seconds + + +class TestCLI(unittest.TestCase): + def test_comma_list(self): + invalid_cases = [None, ""] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + comma_list(invalid_case) + + valid_cases = ( + (["foo"], "foo", None), + (["foo", "bar"], "foo,bar", None), + ([1], "1", int), + ([1, 2], "1,2", int), + ) + for expected, list_str, item_func in valid_cases: + actual = comma_list(list_str, item_func) + self.assertListEqual(expected, actual) + + def test_comma_set(self): + invalid_cases = [None, ""] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + comma_set(invalid_case) + + valid_cases = ( + ({"foo"}, "foo", None), + ({"foo", "bar"}, "foo,bar", None), + ({1}, "1", int), + ({1}, "1,1,1", int), + ({1, 2}, "1,2,1", int), + ) + for expected, list_str, item_func in valid_cases: + actual = comma_set(list_str, item_func) + self.assertSetEqual(expected, actual) + + def test_comma_list_or_empty(self): + valid_cases = ( + ([], None, None), + ([], "", None), + (["foo"], "foo", None), + (["foo", "bar"], "foo,bar", None), + ) + for expected, list_str, item_func in valid_cases: + actual = comma_list_or_empty(list_str) + self.assertListEqual(expected, actual) + + def test_list_of_ints(self): + invalid_cases = [None, "", "foo", '["foo"]'] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + list_of_ints(invalid_case) + + valid_cases = (([1], "1"), ([1, 2], "1,2"), ([1], "[1]"), ([1, 2], "[1,2]")) + for expected, list_str in valid_cases: + actual = list_of_ints(list_str) + self.assertListEqual(expected, actual) + + def test_list_of_ints_and_strs(self): + invalid_cases = [None, ""] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + list_of_ints_and_strs(invalid_case) + + valid_cases = ( + (["foo"], "foo"), + (["foo", "bar"], "foo,bar"), + ([1], "1"), + ([1, 2], "1,2"), + (["foo", 2], "foo,2"), + ) + for expected, list_str in valid_cases: + actual = list_of_ints_and_strs(list_str) + self.assertListEqual(expected, actual) + + def test_set_of_ints(self): + invalid_cases = [None, "", "foo", '["foo"]'] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + set_of_ints(invalid_case) + + valid_cases = ( + ({1}, "1"), + ({1, 2}, "1,2"), + ({1}, "[1]"), + ({1}, "[1,1,1]"), + ({1, 2}, "[1,2,1]"), + ) + for expected, list_str in valid_cases: + actual = set_of_ints(list_str) + self.assertSetEqual(expected, actual) + + @freeze_time("2019-10-23 04:44:32", tz_offset=0) + def test_midnight(self): + d = _midnight() + self.assertEqual(2019, d.year) + self.assertEqual(10, d.month) + self.assertEqual(23, d.day) + self.assertEqual(0, d.hour) + self.assertEqual(0, d.minute) + self.assertEqual(0, d.second) + self.assertEqual(0, d.microsecond) + + @freeze_time("2019-10-23 04:44:32", tz_offset=0) + def test_parse_date(self): + test_date = datetime.datetime(2019, 10, 23, 4, 44, 32, 0) + cases = [ + (test_date, test_date), # already an instance, return + ("today", datetime.datetime(2019, 10, 23, 0, 0, 0)), + ("yesterday", datetime.datetime(2019, 10, 22, 0, 0, 0)), + ("tomorrow", datetime.datetime(2019, 10, 24, 0, 0, 0)), + ("2 days ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2d ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2 days ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("2d ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("now", datetime.datetime(2019, 10, 23, 4, 44, 32)), + ("2019-10-23 04:44:32.000000", test_date), + ("2019-10-23T04:44:32.000000", test_date), + ("2019-10-23 04:44:32", test_date), + ("2019-10-23T04:44:32", test_date), + ("2019-10-23 04:44", datetime.datetime(2019, 10, 23, 4, 44, 0, 0)), + ("2019-10-23-04", datetime.datetime(2019, 10, 23, 4, 0, 0, 0)), + ("2019-10-23", datetime.datetime(2019, 10, 23, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("2019", datetime.datetime(2019, 1, 1, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("1571805872", test_date), # seconds + ] + if not is_pypy(): + cases.append( + ("1571805872000", test_date) + ) # millis, pypy does not work (known) + + for i, (date_str, expected) in enumerate(cases): + actual = parse_date(date_str) + self.assertEqual( + expected, + actual, + "case {}: failed, date_str={} expected={} actual={}".format( + i, date_str, expected, actual + ), + ) + + # test invalid case + with self.assertRaises(DateParsingError): + parse_date("foo") + + @freeze_time("2019-10-23 04:44:32", tz_offset=0) + def test_parse_date_as_epoch_timestamp(self): + # this applies the same rules but always returns epoch seconds + test_date = datetime.datetime(2019, 10, 23, 4, 44, 32, 0) + cases = [ + (test_date, test_date), # already an instance, return + ("today", datetime.datetime(2019, 10, 23, 0, 0, 0)), + ("yesterday", datetime.datetime(2019, 10, 22, 0, 0, 0)), + ("tomorrow", datetime.datetime(2019, 10, 24, 0, 0, 0)), + ("2 days ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2d ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2 days ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("2d ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("now", datetime.datetime(2019, 10, 23, 4, 44, 32)), + ("2019-10-23 04:44:32.000000", test_date), + ("2019-10-23T04:44:32.000000", test_date), + ("2019-10-23 04:44:32", test_date), + ("2019-10-23T04:44:32", test_date), + ("2019-10-23 04:44", datetime.datetime(2019, 10, 23, 4, 44, 0, 0)), + ("2019-10-23-04", datetime.datetime(2019, 10, 23, 4, 0, 0, 0)), + ("2019-10-23", datetime.datetime(2019, 10, 23, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("2019", datetime.datetime(2019, 1, 1, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("1571805872", test_date), # seconds + ] + if not is_pypy(): + cases.append( + ("1571805872000", test_date) + ) # millis, pypy does not work (known) + + for i, (date_str, expected) in enumerate(cases): + actual_timestamp = parse_date_as_epoch_timestamp(date_str) + expected_timestamp = force_to_epoch_seconds(expected) + self.assertEqual( + expected_timestamp, + actual_timestamp, + "case {}: failed, date_str={} expected={} actual={}".format( + i, date_str, expected_timestamp, actual_timestamp + ), + ) + + # test invalid case + with self.assertRaises(DateParsingError): + parse_date_as_epoch_timestamp("foo") diff --git a/tox.ini b/tox.ini index 5612d31c3..cf75f2a2a 100644 --- a/tox.ini +++ b/tox.ini @@ -11,6 +11,7 @@ passenv = DD_TEST_CLIENT* usedevelop = true deps = !integration: mock + !integration: freezegun pytest commands = !integration: pytest -v tests/unit {posargs}