Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Add validation for when period is in request args
Browse files Browse the repository at this point in the history
It doesn't make sense to have a period query without both start and end, or a grouped period query with a limit.
  • Loading branch information
nick-gravgaard committed Jan 28, 2014
1 parent dca7bbe commit 2ae217d
Show file tree
Hide file tree
Showing 11 changed files with 43 additions and 53 deletions.
15 changes: 11 additions & 4 deletions backdrop/read/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,17 @@ def validate(self, request_args, context):

class PeriodQueryValidator(Validator):
def validate(self, request_args, context):
if 'period' in request_args and 'delta' not in request_args:
if 'start_at' not in request_args or 'end_at' not in request_args:
self.add_error("both 'start_at' and 'end_at' are required "
"for a period query")
if 'period' in request_args:
if 'delta' not in request_args:
if 'start_at' not in request_args or \
'end_at' not in request_args:
self.add_error("both 'start_at' and 'end_at' are required "
"for a period query")
if 'group_by' not in request_args and 'limit' in request_args:
# When executing a grouped periodic query, the limit is
# applied to the list of groups rather than the time series
# inside them
self.add_error("A grouped period query cannot be limited")


class PositiveIntegerValidator(Validator):
Expand Down
2 changes: 1 addition & 1 deletion features/end_to_end.feature
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,6 @@ Feature: end-to-end platform test
Given I have the data in "grouped_timestamps.json"
and I have a bucket named "flavour_events"
when I post the data to "/flavour_events"
and I go to "/flavour_events?period=week&group_by=flavour"
and I go to "/flavour_events?period=week&group_by=flavour&start_at=2013-03-18T00:00:00Z&end_at=2013-04-08T00:00:00Z"
then I should get back a status of "200"
and the JSON should have "4" result(s)
3 changes: 3 additions & 0 deletions features/fixtures/collectables.json
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
[
{
"_timestamp": "2013-08-05T10:10:10+00:00",
"_week_start_at": "2013-08-05T00:00:00+00:00",
"pickup": "mushroom"
},
{
"_timestamp": "2013-08-05T10:10:10+00:00",
"_week_start_at": "2013-08-05T00:00:00+00:00",
"pickup": "ring"
},
{
"_timestamp": "2013-08-12T10:10:10+00:00",
"_week_start_at": "2013-08-12T00:00:00+00:00",
"pickup": "1-up"
}
Expand Down
4 changes: 2 additions & 2 deletions features/read_api/collect.feature
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ Feature: collect fields into grouped responses

Scenario: should be able to collect on a key for period grouped queries
Given "licensing_2.json" is in "foo" bucket
when I go to "/foo?collect=authority&period=week&group_by=licence_name"
when I go to "/foo?collect=authority&period=week&group_by=licence_name&start_at=2012-12-03T00:00:00Z&end_at=2012-12-17T00:00:00Z"
then I should get back a status of "200"
and the "2nd" result should have "authority" with item ""Westminster""
and the "2nd" result should have "authority" with item ""Camden""
Expand All @@ -40,7 +40,7 @@ Feature: collect fields into grouped responses

Scenario: should be able to perform maths on sub groups
Given "evl_volumetrics.json" is in "foo" bucket
when I go to "/foo?period=month&group_by=channel&collect=volume:sum"
when I go to "/foo?period=month&group_by=channel&collect=volume:sum&start_at=2012-04-01T00:00:00Z&end_at=2012-05-01T00:00:00Z"
then I should get back a status of "200"
and the "1st" result should have "volume:sum" with json "1862526.0"
and the "1st" result should have a sub group with "volume:sum" with json "1862526.0"
Expand Down
5 changes: 3 additions & 2 deletions features/read_api/collect_and_period.feature
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ Feature: combining period and group queries
Given "collectables.json" is in "collect_me" bucket

Scenario: combining a period and collect query
when I go to "/collect_me?period=week&collect=pickup"
when I go to "/collect_me?period=week&collect=pickup&start_at=2013-08-05T00:00:00Z&end_at=2013-08-12T00:00:00Z"
then I should get back a status of "200"
and the "1st" result should be "{"_count": 2.0, "pickup:set": ["mushroom", "ring"], "_end_at": "2013-08-12T00:00:00+00:00", "pickup": ["mushroom", "ring"], "_start_at": "2013-08-05T00:00:00+00:00"}"
and the JSON should have "1" result(s)
and the "1st" result should be "{"_count": 2.0, "pickup:set": ["mushroom", "ring"], "_end_at": "2013-08-12T00:00:00+00:00", "pickup": ["mushroom", "ring"], "_start_at": "2013-08-05T00:00:00+00:00"}"
10 changes: 5 additions & 5 deletions features/read_api/group.feature
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,15 @@ Feature: grouping queries for read api

Scenario: grouping data by time period - week
Given "stored_timestamps.json" is in "weekly" bucket
when I go to "/weekly?period=week"
when I go to "/weekly?period=week&start_at=2013-03-11T00:00:00Z&end_at=2013-03-25T00:00:00Z"
then I should get back a status of "200"
and the JSON should have "2" results
and the "1st" result should be "{"_count": 3.0, "_start_at": "2013-03-11T00:00:00+00:00", "_end_at" : "2013-03-18T00:00:00+00:00"}"
and the "2nd" result should be "{"_count": 2.0, "_start_at": "2013-03-18T00:00:00+00:00", "_end_at" : "2013-03-25T00:00:00+00:00"}"

Scenario: grouping data by time period (week) and filtering
Given "stored_timestamps_for_filtering.json" is in "weekly" bucket
when I go to "/weekly?period=week&filter_by=name:alpha"
when I go to "/weekly?period=week&filter_by=name:alpha&start_at=2013-03-11T00:00:00Z&end_at=2013-03-25T00:00:00Z"
then I should get back a status of "200"
and the JSON should have "2" results
and the "1st" result should be "{"_count": 2.0, "_start_at": "2013-03-11T00:00:00+00:00", "_end_at" : "2013-03-18T00:00:00+00:00"}"
Expand All @@ -45,15 +45,15 @@ Feature: grouping queries for read api

Scenario: grouping data by time period (week) and a name
Given "stored_timestamps_for_filtering.json" is in "weekly" bucket
when I go to "/weekly?period=week&group_by=name"
when I go to "/weekly?period=week&group_by=name&start_at=2013-03-11T00:00:00Z&end_at=2013-03-25T00:00:00Z"
then I should get back a status of "200"
and the JSON should have "2" results
and the "1st" result should have "values" with item "{"_start_at": "2013-03-11T00:00:00+00:00", "_end_at": "2013-03-18T00:00:00+00:00", "_count": 2.0}"


Scenario: grouping data by time period (week) and a name and filtered by a key and value
Given "stored_timestamps_for_filtering.json" is in "weekly" bucket
when I go to "/weekly?period=week&group_by=name&filter_by=name:alpha"
when I go to "/weekly?period=week&group_by=name&filter_by=name:alpha&start_at=2013-03-11T00:00:00Z&end_at=2013-03-25T00:00:00Z"
then I should get back a status of "200"
and the JSON should have "1" results
and the "1st" result should have "values" with item "{"_start_at": "2013-03-11T00:00:00+00:00", "_end_at": "2013-03-18T00:00:00+00:00", "_count": 2.0}"
Expand All @@ -62,7 +62,7 @@ Feature: grouping queries for read api

Scenario: grouping data by time period (week) and a name that doesn't exist
Given "stored_timestamps_for_filtering.json" is in "weekly" bucket
when I go to "/weekly?period=week&group_by=wibble"
when I go to "/weekly?period=week&group_by=wibble&start_at=2013-03-11T00:00:00Z&end_at=2013-03-25T00:00:00Z"
then I should get back a status of "200"
and the JSON should have "0" results

Expand Down
8 changes: 0 additions & 8 deletions features/read_api/group_by_day.feature
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@
Feature: querying for data grouped by day
This feature is for querying for data grouped by day

Scenario: grouping data by day
Given I have the data in "daily_timestamps.json"
And I have a bucket named "day"
When I post the data to "/day"
Then I should get back a status of "200"
When I go to "/day?period=day"
Then the JSON should have "12" results

Scenario: grouping data by day between two timestamps
Given I have the data in "daily_timestamps.json"
And I have a bucket named "day"
Expand Down
8 changes: 0 additions & 8 deletions features/read_api/group_by_hour.feature
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@
Feature: querying for data grouped by hour
This feature is for querying for data grouped by hour

Scenario: grouping data by hour
Given I have the data in "hourly_timestamps.json"
And I have a bucket named "hour"
When I post the data to "/hour"
Then I should get back a status of "200"
When I go to "/hour?period=hour"
Then the JSON should have "27" results

Scenario: grouping data by hour between two days
Given I have the data in "hourly_timestamps.json"
And I have a bucket named "hour"
Expand Down
9 changes: 0 additions & 9 deletions features/read_api/monthly_grouping.feature
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@
Feature: querying for data grouped by month
This feature is for querying for data grouped by month

Scenario: grouping data by month
Given I have the data in "monthly_timestamps.json"
And I have a bucket named "month"
When I post the data to "/month"
Then I should get back a status of "200"
When I go to "/month?period=month"
Then the JSON should have "12" results

Scenario: grouping data by month between two allowed timestamps
Given I have the data in "monthly_timestamps.json"
And I have a bucket named "month"
Expand All @@ -18,7 +10,6 @@ Feature: querying for data grouped by month
When I go to "/month?period=month&start_at=2013-05-01T00:00:00Z&end_at=2013-07-01T00:00:00Z"
Then the JSON should have "2" results


Scenario: grouping data by month between two disallowed timestamps
Given I have the data in "monthly_timestamps.json"
And I have a bucket named "month_with_wrong_timestamp"
Expand Down
16 changes: 4 additions & 12 deletions features/read_api/sort_and_limit.feature
Original file line number Diff line number Diff line change
Expand Up @@ -33,21 +33,13 @@ Feature: sorting and limiting

Scenario: Sort periodic grouped query on a key
Given "licensing_2.json" is in "foo" bucket
when I go to "/foo?group_by=authority&period=week&sort_by=_count:descending"
when I go to "/foo?group_by=authority&period=week&sort_by=_count:descending&start_at=2012-12-03T00:00:00Z&end_at=2012-12-17T00:00:00Z"
then I should get back a status of "200"
and the JSON should have "2" results
and the "1st" result should have "authority" equaling "Westminster"

Scenario: Sort periodic grouped query on a key and limit
Scenario: Limiting a periodic query is not allowed if start and end are defined
Given "licensing_2.json" is in "foo" bucket
when I go to "/foo?group_by=authority&period=week&sort_by=_count:ascending&limit=1"
then I should get back a status of "200"
and the JSON should have "1" results
and the "1st" result should have "authority" equaling "Camden"
when I go to "/foo?period=week&limit=1&start_at=2012-12-03T00:00:00Z&end_at=2012-12-17T00:00:00Z"
then I should get back a status of "400"

Scenario: Limit periodic query
Given "licensing_2.json" is in "foo" bucket
when I go to "/foo?period=week&limit=1"
then I should get back a status of "200"
and the JSON should have "1" result
and the "1st" result should have "_start_at" equaling "2012-12-03T00:00:00+00:00"
16 changes: 14 additions & 2 deletions features/steps/read_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,19 @@ def step(context, query):
headers={"If-None-Match": etag})


def get_error_message(response_data):
message = ""
try:
message = json.loads(response_data).get('message', "")
except:
pass
return message


@then('I should get back a status of "{expected_status}"')
def step(context, expected_status):
assert_that(context.response.status_code, is_(int(expected_status)))
assert_that(context.response.status_code, is_(int(expected_status)),
get_error_message(context.response.data))


@then('I should get a "{header}" header of "{value}"')
Expand All @@ -102,7 +112,9 @@ def step(context, expected_message):

@then('the JSON should have "(?P<n>\d+)" results?')
def step(context, n):
the_data = json.loads(context.response.data)['data']
response_data = json.loads(context.response.data)
assert_that('data' in response_data, response_data.get('message', None))
the_data = response_data['data']
assert_that(the_data, has_length(int(n)))


Expand Down

0 comments on commit 2ae217d

Please sign in to comment.