diff --git a/backdrop/core/query.py b/backdrop/core/query.py new file mode 100644 index 00000000..79d88270 --- /dev/null +++ b/backdrop/core/query.py @@ -0,0 +1,93 @@ +from collections import namedtuple + +from .timeutils import now + + +""" +This is the internal Query object + - Create list of attributes to build the query from + - We use delta internally, but the end user will use 'duration' +""" +_Query = namedtuple( + '_Query', + ['start_at', 'end_at', 'delta', 'period', + 'filter_by', 'group_by', 'sort_by', 'limit', 'collect']) + + +class Query(_Query): + @classmethod + def create(cls, + start_at=None, end_at=None, duration=None, delta=None, + period=None, filter_by=None, group_by=None, + sort_by=None, limit=None, collect=None): + delta = None + if duration is not None: + date = start_at or end_at or now() + delta = duration if start_at else -duration + start_at, end_at = cls.__calculate_start_and_end(period, date, + delta) + return Query(start_at, end_at, delta, period, + filter_by or [], group_by, sort_by, limit, collect or []) + + @staticmethod + def __calculate_start_and_end(period, date, delta): + duration = period.delta * delta + start_of_period = period.start(date) + + start_at, end_at = sorted( + [start_of_period, start_of_period + duration]) + + return start_at, end_at + + @property + def collect_fields(self): + """Return a unique list of collect field names + >>> query = Query.create(collect=[('foo', 'sum'), ('foo', 'set')]) + >>> query.collect_fields + ['foo'] + """ + return list(set([field for field, _ in self.collect])) + + @property + def group_keys(self): + """Return a list of fields that are being grouped on + + This is kinda coupled to how we group with Mongo but these keys + are in the returned results and are used in the nested merge to + create the hierarchical response. + + >>> from ..core.timeseries import WEEK + >>> Query.create(group_by="foo").group_keys + ['foo'] + >>> Query.create(period=WEEK).group_keys + ['_week_start_at'] + >>> Query.create(group_by="foo", period=WEEK).group_keys + ['foo', '_week_start_at'] + """ + keys = [] + if self.group_by: + keys.append(self.group_by) + if self.period: + keys.append(self.period.start_at_key) + return keys + + @property + def is_grouped(self): + """ + >>> Query.create(group_by="foo").is_grouped + True + >>> Query.create(period="week").is_grouped + True + >>> Query.create().is_grouped + False + """ + return bool(self.group_by) or bool(self.period) + + def get_shifted_query(self, shift): + """Return a new Query where the date is shifted by n periods""" + args = self._asdict() + + args['start_at'] = args['start_at'] + (self.period.delta * shift) + args['end_at'] = args['end_at'] + (self.period.delta * shift) + + return Query.create(**args) diff --git a/backdrop/read/api.py b/backdrop/read/api.py index 92923561..61d12f42 100644 --- a/backdrop/read/api.py +++ b/backdrop/read/api.py @@ -5,8 +5,8 @@ from flask import Flask, jsonify, request from flask_featureflags import FeatureFlag -from backdrop.read.query import Query +from .query import parse_query_from_request from .validation import validate_request_args from ..core import log_handler, cache_control from ..core.data_set import NewDataSet @@ -183,7 +183,7 @@ def fetch(data_set_config): data_set = NewDataSet(storage, data_set_config) try: - query = Query.parse(request.args) + query = parse_query_from_request(request) data = data_set.execute_query(query) except InvalidOperationError: diff --git a/backdrop/read/query.py b/backdrop/read/query.py index 94447d88..3ab087e4 100644 --- a/backdrop/read/query.py +++ b/backdrop/read/query.py @@ -1,7 +1,14 @@ -from collections import namedtuple - from backdrop.core.timeseries import parse_period -from backdrop.core.timeutils import now, parse_time_as_utc +from backdrop.core.timeutils import parse_time_as_utc +from backdrop.core.query import Query + + +__all__ = ['parse_query_from_request'] + + +def parse_query_from_request(request): + """Parses a Query object from a flask request""" + return Query.create(**parse_request_args(request.args)) def if_present(func, value): @@ -52,97 +59,3 @@ def parse_filter_by(filter_by): args['collect'].append((collect_arg, 'default')) return args - -""" -This is the internal Query object - - Create list of attributes to build the query from - - We use delta internally, but the end user will use 'duration' -""" -_Query = namedtuple( - '_Query', - ['start_at', 'end_at', 'delta', 'period', - 'filter_by', 'group_by', 'sort_by', 'limit', 'collect']) - - -class Query(_Query): - @classmethod - def create(cls, - start_at=None, end_at=None, duration=None, delta=None, - period=None, filter_by=None, group_by=None, - sort_by=None, limit=None, collect=None): - delta = None - if duration is not None: - date = start_at or end_at or now() - delta = duration if start_at else -duration - start_at, end_at = cls.__calculate_start_and_end(period, date, - delta) - return Query(start_at, end_at, delta, period, - filter_by or [], group_by, sort_by, limit, collect or []) - - @classmethod - def parse(cls, request_args): - args = parse_request_args(request_args) - return Query.create(**args) - - @staticmethod - def __calculate_start_and_end(period, date, delta): - duration = period.delta * delta - start_of_period = period.start(date) - - start_at, end_at = sorted( - [start_of_period, start_of_period + duration]) - - return start_at, end_at - - @property - def collect_fields(self): - """Return a unique list of collect field names - >>> query = Query.create(collect=[('foo', 'sum'), ('foo', 'set')]) - >>> query.collect_fields - ['foo'] - """ - return list(set([field for field, _ in self.collect])) - - @property - def group_keys(self): - """Return a list of fields that are being grouped on - - This is kinda coupled to how we group with Mongo but these keys - are in the returned results and are used in the nested merge to - create the hierarchical response. - - >>> from ..core.timeseries import WEEK - >>> Query.create(group_by="foo").group_keys - ['foo'] - >>> Query.create(period=WEEK).group_keys - ['_week_start_at'] - >>> Query.create(group_by="foo", period=WEEK).group_keys - ['foo', '_week_start_at'] - """ - keys = [] - if self.group_by: - keys.append(self.group_by) - if self.period: - keys.append(self.period.start_at_key) - return keys - - @property - def is_grouped(self): - """ - >>> Query.create(group_by="foo").is_grouped - True - >>> Query.create(period="week").is_grouped - True - >>> Query.create().is_grouped - False - """ - return bool(self.group_by) or bool(self.period) - - def get_shifted_query(self, shift): - """Return a new Query where the date is shifted by n periods""" - args = self._asdict() - - args['start_at'] = args['start_at'] + (self.period.delta * shift) - args['end_at'] = args['end_at'] + (self.period.delta * shift) - - return Query.create(**args) diff --git a/tests/core/integration/test_data_set_integration.py b/tests/core/integration/test_data_set_integration.py index 89428120..9c7cb1d6 100644 --- a/tests/core/integration/test_data_set_integration.py +++ b/tests/core/integration/test_data_set_integration.py @@ -7,7 +7,7 @@ from backdrop.core.data_set import DataSetConfig, NewDataSet from backdrop.core.storage.mongo import MongoStorageEngine from backdrop.core.timeseries import WEEK -from backdrop.read.query import Query +from backdrop.core.query import Query from tests.support.test_helpers import d_tz HOSTS = ['localhost'] diff --git a/tests/core/storage/test_storage.py b/tests/core/storage/test_storage.py index d3fb7060..0552dc76 100644 --- a/tests/core/storage/test_storage.py +++ b/tests/core/storage/test_storage.py @@ -5,7 +5,7 @@ from nose.tools import assert_raises from freezegun import freeze_time -from backdrop.read.query import Query +from backdrop.core.query import Query from backdrop.core.errors import DataSetCreationError from backdrop.core.records import add_period_keys from backdrop.core.timeseries import DAY diff --git a/tests/core/test_data_set.py b/tests/core/test_data_set.py index 41dbc7a2..e2419ee7 100644 --- a/tests/core/test_data_set.py +++ b/tests/core/test_data_set.py @@ -5,7 +5,7 @@ from backdrop.core import data_set from backdrop.core.data_set import DataSetConfig -from backdrop.read.query import Query +from backdrop.core.query import Query from backdrop.core.timeseries import WEEK, MONTH from backdrop.core.errors import ValidationError from tests.support.test_helpers import d, d_tz, match diff --git a/tests/read/test_query.py b/tests/core/test_query.py similarity index 96% rename from tests/read/test_query.py rename to tests/core/test_query.py index cc7695d1..10d9eac1 100644 --- a/tests/read/test_query.py +++ b/tests/core/test_query.py @@ -1,11 +1,12 @@ from datetime import datetime from freezegun import freeze_time -from hamcrest import * +from hamcrest import assert_that, is_ import pytz from unittest import TestCase from backdrop.core.timeseries import Day -from backdrop.read.query import Query +from backdrop.core.query import Query + from tests.support.test_helpers import d_tz diff --git a/tests/read/test_parse_request_args.py b/tests/read/test_parse_request_args.py index 77a5df15..a77cbea8 100644 --- a/tests/read/test_parse_request_args.py +++ b/tests/read/test_parse_request_args.py @@ -1,7 +1,7 @@ from datetime import datetime import unittest -from hamcrest import * +from hamcrest import assert_that, is_, has_item import pytz from werkzeug.datastructures import MultiDict diff --git a/tests/read/test_read_api_query_endpoint.py b/tests/read/test_read_api_query_endpoint.py index 2a8ab15b..ab0d6e0e 100644 --- a/tests/read/test_read_api_query_endpoint.py +++ b/tests/read/test_read_api_query_endpoint.py @@ -1,11 +1,11 @@ import unittest import urllib import datetime -from hamcrest import * +from hamcrest import assert_that, is_ from mock import patch import pytz from backdrop.read import api -from backdrop.read.query import Query +from backdrop.core.query import Query from tests.support.data_set import fake_data_set_exists from tests.support.test_helpers import has_status diff --git a/tests/read/test_read_api_service_data_endpoint.py b/tests/read/test_read_api_service_data_endpoint.py index 4fbe9941..b5f9683c 100644 --- a/tests/read/test_read_api_service_data_endpoint.py +++ b/tests/read/test_read_api_service_data_endpoint.py @@ -6,7 +6,7 @@ import pytz from backdrop.core.timeseries import WEEK from backdrop.read import api -from backdrop.read.query import Query +from backdrop.core.query import Query from tests.support.data_set import fake_data_set_exists, fake_no_data_sets_exist from tests.support.test_helpers import has_status, has_header, d_tz