diff --git a/backdrop/admin/app.py b/backdrop/admin/app.py index 50b6485e..b222d8e2 100644 --- a/backdrop/admin/app.py +++ b/backdrop/admin/app.py @@ -8,8 +8,6 @@ from ..core import cache_control, log_handler, database from ..core.bucket import Bucket from ..core.errors import ParseError, ValidationError -from ..core.log_handler \ - import create_request_logger, create_response_logger from ..core.repository \ import BucketConfigRepository, UserConfigRepository from ..core.flaskutils import BucketConverter @@ -35,7 +33,10 @@ app.config['DATABASE_NAME'] ) -bucket_repository = BucketConfigRepository(db) +bucket_repository = BucketConfigRepository( + app.config['STAGECRAFT_URL'], + app.config['STAGECRAFT_DATA_SET_QUERY_TOKEN']) + user_repository = UserConfigRepository(db) diff --git a/backdrop/admin/config/development.py b/backdrop/admin/config/development.py index 8f0cb8f2..3c5d982c 100644 --- a/backdrop/admin/config/development.py +++ b/backdrop/admin/config/development.py @@ -11,3 +11,6 @@ from development_environment import * except ImportError: from development_environment_sample import * + +STAGECRAFT_URL = 'http://localhost:8080' +STAGECRAFT_DATA_SET_QUERY_TOKEN = 'stagecraft-data-set-query-token-fake' diff --git a/backdrop/admin/config/test.py b/backdrop/admin/config/test.py index 97eb4df7..9ccd2aa4 100644 --- a/backdrop/admin/config/test.py +++ b/backdrop/admin/config/test.py @@ -9,3 +9,5 @@ MONGO_PORT = 27017 from test_environment import * + +from development import STAGECRAFT_URL, STAGECRAFT_DATA_SET_QUERY_TOKEN diff --git a/backdrop/core/repository.py b/backdrop/core/repository.py index b2d3f136..e2dd9bde 100644 --- a/backdrop/core/repository.py +++ b/backdrop/core/repository.py @@ -1,3 +1,8 @@ + +import json + +import requests + from backdrop.core.bucket import BucketConfig from backdrop.core.user import UserConfig @@ -42,27 +47,66 @@ def _create_model(self, doc): class BucketConfigRepository(object): - def __init__(self, db): - self._db = db - self._repository = _Repository(db, BucketConfig, "buckets", "name") - - def save(self, bucket_config, create_bucket=True): - self._repository.save(bucket_config) - - if bucket_config.realtime and create_bucket: - self._db.create_capped_collection(bucket_config.name, - bucket_config.capped_size) + def __init__(self, stagecraft_url, stagecraft_token): + self._stagecraft_url = stagecraft_url + self._stagecraft_token = stagecraft_token def get_all(self): - return self._repository.get_all() + data_set_url = '{url}/data-sets/'.format(url=self._stagecraft_url) + + data_sets = _decode_json(_get_url(data_set_url)) + return [_make_bucket_config(data_set) for data_set in data_sets] def retrieve(self, name): - return self._repository.retrieve(name) + if len(name) == 0: + raise ValueError('Name must not be empty') + data_set_url = ('{url}/data-sets/{data_set_name}'.format( + url=self._stagecraft_url, + data_set_name=name)) + + data_set = _decode_json(_get_url(data_set_url)) + return _make_bucket_config(data_set) def get_bucket_for_query(self, data_group, data_type): - return self._repository.find_first_instance_of( - {"data_group": data_group, - "data_type": data_type}) + empty_vars = [] + if len(data_group) == 0: + empty_vars += ['Data Group'] + if len(data_type) == 0: + empty_vars += ['Data Type'] + if len(empty_vars) > 0: + raise ValueError(' and '.join(empty_vars) + 'must not be empty') + data_set_url = ('{url}/data-sets?data-group={data_group_name}' + '&data-type={data_type_name}'.format( + url=self._stagecraft_url, + data_group_name=data_group, + data_type_name=data_type)) + + data_sets = _decode_json(_get_url(data_set_url)) + if len(data_sets) > 0: + return _make_bucket_config(data_sets[0]) + return None + + +def _make_bucket_config(stagecraft_dict): + if stagecraft_dict is None: + return None + return BucketConfig(**stagecraft_dict) + + +def _decode_json(string): + return json.loads(string) if string is not None else None + + +def _get_url(url): + response = requests.get(url) + try: + response.raise_for_status() + except requests.HTTPError as e: + if e.response.status_code == 404: + return None + raise e + + return response.content class UserConfigRepository(object): diff --git a/backdrop/read/api.py b/backdrop/read/api.py index a30762fd..f59ad94f 100644 --- a/backdrop/read/api.py +++ b/backdrop/read/api.py @@ -3,10 +3,8 @@ from os import getenv from bson import ObjectId -from flask import Flask, jsonify, request, redirect +from flask import Flask, jsonify, request from flask_featureflags import FeatureFlag -from backdrop.core.log_handler \ - import create_request_logger, create_response_logger from backdrop.read.query import Query from .validation import validate_request_args @@ -32,7 +30,9 @@ app.config['DATABASE_NAME'] ) -bucket_repository = BucketConfigRepository(db) +bucket_repository = BucketConfigRepository( + app.config['STAGECRAFT_URL'], + app.config['STAGECRAFT_DATA_SET_QUERY_TOKEN']) log_handler.set_up_logging(app, GOVUK_ENV) diff --git a/backdrop/read/config/development.py b/backdrop/read/config/development.py index 18bf8567..b9d32bbf 100644 --- a/backdrop/read/config/development.py +++ b/backdrop/read/config/development.py @@ -3,44 +3,47 @@ MONGO_PORT = 27017 LOG_LEVEL = "DEBUG" RAW_QUERIES_ALLOWED = { - "government_annotations": True, - "govuk_realtime": True, - "licence_finder_monitoring": True, - "licensing": False, - "licensing_journey": True, - "licensing_monitoring": True, - "licensing_realtime": True, - "lpa_volumes": True, - "lpa_monitoring": True, - # electronic vehicle licensing - "electronic_vehicle_licensing_monitoring": True, - "evl_customer_satisfaction": True, - "evl_volumetrics": True, - "sorn_monitoring": True, - "sorn_realtime": True, - "tax_disc_monitoring": True, - "tax_disc_realtime": True, - # fco - "deposit_foreign_marriage_journey": True, - "deposit_foreign_marriage_monitoring": True, - "deposit_foreign_marriage_realtime": True, - "pay_foreign_marriage_certificates_journey": True, - "pay_foreign_marriage_certificates_monitoring": True, - "pay_foreign_marriage_certificates_realtime": True, - "pay_legalisation_drop_off_journey": True, - "pay_legalisation_drop_off_monitoring": True, - "pay_legalisation_drop_off_realtime": True, - "pay_legalisation_post_journey": True, - "pay_legalisation_post_monitoring": True, - "pay_legalisation_post_realtime": True, - "pay_register_birth_abroad_journey": True, - "pay_register_birth_abroad_monitoring": True, - "pay_register_birth_abroad_realtime": True, - "pay_register_death_abroad_journey": True, - "pay_register_death_abroad_monitoring": True, - "pay_register_death_abroad_realtime": True, - # HMRC preview - "hmrc_preview": True, - # LPA / Lasting Power of Attorney - "lpa_journey": True, + "government_annotations": True, + "govuk_realtime": True, + "licence_finder_monitoring": True, + "licensing": False, + "licensing_journey": True, + "licensing_monitoring": True, + "licensing_realtime": True, + "lpa_volumes": True, + "lpa_monitoring": True, + # electronic vehicle licensing + "electronic_vehicle_licensing_monitoring": True, + "evl_customer_satisfaction": True, + "evl_volumetrics": True, + "sorn_monitoring": True, + "sorn_realtime": True, + "tax_disc_monitoring": True, + "tax_disc_realtime": True, + # fco + "deposit_foreign_marriage_journey": True, + "deposit_foreign_marriage_monitoring": True, + "deposit_foreign_marriage_realtime": True, + "pay_foreign_marriage_certificates_journey": True, + "pay_foreign_marriage_certificates_monitoring": True, + "pay_foreign_marriage_certificates_realtime": True, + "pay_legalisation_drop_off_journey": True, + "pay_legalisation_drop_off_monitoring": True, + "pay_legalisation_drop_off_realtime": True, + "pay_legalisation_post_journey": True, + "pay_legalisation_post_monitoring": True, + "pay_legalisation_post_realtime": True, + "pay_register_birth_abroad_journey": True, + "pay_register_birth_abroad_monitoring": True, + "pay_register_birth_abroad_realtime": True, + "pay_register_death_abroad_journey": True, + "pay_register_death_abroad_monitoring": True, + "pay_register_death_abroad_realtime": True, + # HMRC preview + "hmrc_preview": True, + # LPA / Lasting Power of Attorney + "lpa_journey": True, } + +STAGECRAFT_URL = 'http://localhost:8080' +STAGECRAFT_DATA_SET_QUERY_TOKEN = 'stagecraft-data-set-query-token-fake' diff --git a/backdrop/read/config/test.py b/backdrop/read/config/test.py index 229890ce..174df7c6 100644 --- a/backdrop/read/config/test.py +++ b/backdrop/read/config/test.py @@ -10,3 +10,5 @@ "rawr": True, "month": True, } + +from development import STAGECRAFT_URL, STAGECRAFT_DATA_SET_QUERY_TOKEN diff --git a/backdrop/write/api.py b/backdrop/write/api.py index e36c4ef9..30bd5e64 100644 --- a/backdrop/write/api.py +++ b/backdrop/write/api.py @@ -31,7 +31,10 @@ app.config['DATABASE_NAME'] ) -bucket_repository = BucketConfigRepository(db) +bucket_repository = BucketConfigRepository( + app.config['STAGECRAFT_URL'], + app.config['STAGECRAFT_DATA_SET_QUERY_TOKEN']) + user_repository = UserConfigRepository(db) log_handler.set_up_logging(app, GOVUK_ENV) diff --git a/backdrop/write/config/development.py b/backdrop/write/config/development.py index 9f157bc0..7391a6fb 100644 --- a/backdrop/write/config/development.py +++ b/backdrop/write/config/development.py @@ -12,3 +12,6 @@ from development_environment import * except ImportError: from development_environment_sample import * + +STAGECRAFT_URL = 'http://localhost:8080' +STAGECRAFT_DATA_SET_QUERY_TOKEN = 'stagecraft-data-set-query-token-fake' diff --git a/backdrop/write/config/test.py b/backdrop/write/config/test.py index 9cb5076d..e1421592 100644 --- a/backdrop/write/config/test.py +++ b/backdrop/write/config/test.py @@ -10,5 +10,6 @@ "evl_volumetrics": ["_timestamp", "service", "transaction"], } -from development import CREATE_COLLECTION_ENDPOINT_TOKEN +from development import (CREATE_COLLECTION_ENDPOINT_TOKEN, STAGECRAFT_URL, + STAGECRAFT_DATA_SET_QUERY_TOKEN) from test_environment import * diff --git a/features/admin/csv_upload.feature b/features/admin/csv_upload.feature index aa592c52..dc95ff14 100644 --- a/features/admin/csv_upload.feature +++ b/features/admin/csv_upload.feature @@ -2,8 +2,9 @@ Feature: CSV Upload Scenario: Upload CSV data - Given I have a bucket named "my_bucket" - And bucket setting upload_format is "csv" + Given I have a bucket named "my_bucket" with settings + | key | value | + | upload_format | "csv" | And I am logged in And I can upload to "my_bucket" And a file named "data.csv" @@ -28,8 +29,9 @@ Feature: CSV Upload city,città coffee,caffè """ - And I have a bucket named "my_bucket" - And bucket setting upload_format is "csv" + And I have a bucket named "my_bucket" with settings + | key | value | + | upload_format | "csv" | And I am logged in And I can upload to "my_bucket" When I go to "/my_bucket/upload" @@ -59,8 +61,9 @@ Feature: CSV Upload 2013-01-01,2013-01-07,abc,287 2013-01-01,2013-01-07,def,425 """ - And I have a bucket named "bucket_with_auto_id" - And bucket setting upload_format is "csv" + And I have a bucket named "bucket_with_auto_id" with settings + | key | value | + | upload_format | "csv" | And I am logged in And I can upload to "bucket_with_auto_id" When I go to "/bucket_with_auto_id/upload" diff --git a/features/admin/csv_upload_validation.feature b/features/admin/csv_upload_validation.feature index 0d461875..e73bfbfe 100644 --- a/features/admin/csv_upload_validation.feature +++ b/features/admin/csv_upload_validation.feature @@ -8,8 +8,9 @@ Feature: csv upload validation Pawel,27,Polish,male Max,35,Italian,male """ - And I have a bucket named "foo" - And bucket setting upload_format is "csv" + And I have a bucket named "foo" with settings + | key | value | + | upload_format | "csv" | And I am logged in And I can upload to "foo" When I go to "/foo/upload" @@ -25,8 +26,9 @@ Feature: csv upload validation Pawel,27,Polish,male Max,35,Italian """ - And I have a bucket named "foo" - And bucket setting upload_format is "csv" + And I have a bucket named "foo" with settings + | key | value | + | upload_format | "csv" | And I am logged in And I can upload to "foo" When I go to "/foo/upload" @@ -37,8 +39,9 @@ Feature: csv upload validation Scenario: file too large Given a file named "data.csv" of size "1000000" bytes - And I have a bucket named "foo" - And bucket setting upload_format is "csv" + And I have a bucket named "foo" with settings + | key | value | + | upload_format | "csv" | And I am logged in And I can upload to "foo" When I go to "/foo/upload" @@ -49,8 +52,9 @@ Feature: csv upload validation Scenario: non UTF8 characters Given a file named "data.csv" with fixture "bad-characters.csv" - And I have a bucket named "foo" - And bucket setting upload_format is "csv" + And I have a bucket named "foo" with settings + | key | value | + | upload_format | "csv" | And I am logged in And I can upload to "foo" When I go to "/foo/upload" diff --git a/features/admin/excel_upload.feature b/features/admin/excel_upload.feature index d769fe6f..aacf9b6e 100644 --- a/features/admin/excel_upload.feature +++ b/features/admin/excel_upload.feature @@ -3,8 +3,9 @@ Feature: excel upload Scenario: Upload XLSX file Given a file named "data.xlsx" with fixture "data.xlsx" - And I have a bucket named "my_xlsx_bucket" - And bucket setting upload_format is "excel" + And I have a bucket named "my_xlsx_bucket" with settings + | key | value | + | upload_format | "excel" | And I am logged in And I can upload to "my_xlsx_bucket" When I go to "/my_xlsx_bucket/upload" @@ -18,8 +19,9 @@ Feature: excel upload Scenario: using _timestamp for an auto id Given a file named "LPA_MI_EXAMPLE.xls" with fixture "LPA_MI_EXAMPLE.xls" - And I have a bucket named "bucket_with_timestamp_auto_id" - And bucket setting upload_format is "excel" + And I have a bucket named "bucket_with_timestamp_auto_id" with settings + | key | value | + | upload_format | "excel" | And I am logged in And I can upload to "bucket_with_timestamp_auto_id" When I go to "/bucket_with_timestamp_auto_id/upload" diff --git a/features/contrib/evl_upload.feature b/features/contrib/evl_upload.feature index 22fb264c..b3581f1a 100644 --- a/features/contrib/evl_upload.feature +++ b/features/contrib/evl_upload.feature @@ -3,9 +3,10 @@ Feature: EVL Upload Scenario: Upload call center volumes Given a file named "CEG Data.xlsx" with fixture "contrib/CEG Transaction Tracker.xlsx" - and I have a bucket named "evl_ceg_data" - and bucket setting upload_format is "excel" - and bucket setting upload_filters is ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.ceg_volumes"] + and I have a bucket named "evl_ceg_data" with settings + | key | value | + | upload_format | "excel" | + | upload_filters | ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.ceg_volumes"] | and I am logged in and I can upload to "evl_ceg_data" when I go to "/evl_ceg_data/upload" @@ -19,9 +20,10 @@ Feature: EVL Upload Scenario: Upload services volumetrics Given a file named "EVL Volumetrics.xlsx" with fixture "contrib/EVL Services Volumetrics Sample.xls" - and I have a bucket named "evl_services_volumetrics" - and bucket setting upload_format is "excel" - and bucket setting upload_filters is ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.service_volumetrics"] + and I have a bucket named "evl_services_volumetrics" with settings + | key | value | + | upload_format | "excel" | + | upload_filters | ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.service_volumetrics"] | and I am logged in and I can upload to "evl_services_volumetrics" when I go to "/evl_services_volumetrics/upload" @@ -35,9 +37,10 @@ Feature: EVL Upload Scenario: Upload service failures Given a file named "EVL Volumetrics.xlsx" with fixture "contrib/EVL Services Volumetrics Sample.xls" - and I have a bucket named "evl_services_failures" - and bucket setting upload_format is "excel" - and bucket setting upload_filters is ["backdrop.contrib.evl_upload_filters.service_failures"] + and I have a bucket named "evl_services_failures" with settings + | key | value | + | upload_format | "excel" | + | upload_filters | ["backdrop.contrib.evl_upload_filters.service_failures"] | and I am logged in and I can upload to "evl_services_failures" when I go to "/evl_services_failures/upload" @@ -53,9 +56,10 @@ Feature: EVL Upload Scenario: Upload channel volumetrics Given a file named "EVL Volumetrics.xlsx" with fixture "contrib/EVL Channel Volumetrics Sample.xls" - and I have a bucket named "evl_channel_volumetrics" - and bucket setting upload_format is "excel" - and bucket setting upload_filters is ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.channel_volumetrics"] + and I have a bucket named "evl_channel_volumetrics" with settings + | key | value | + | upload_format | "excel" | + | upload_filters | ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.channel_volumetrics"] | and I am logged in and I can upload to "evl_channel_volumetrics" when I go to "/evl_channel_volumetrics/upload" @@ -70,9 +74,10 @@ Feature: EVL Upload Scenario: Upload customer satisfaction Given a file named "EVL Satisfaction.xlsx" with fixture "contrib/EVL Customer Satisfaction.xlsx" - and I have a bucket named "evl_customer_satisfaction" - and bucket setting upload_format is "excel" - and bucket setting upload_filters is ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.customer_satisfaction"] + and I have a bucket named "evl_customer_satisfaction" with settings + | key | value | + | upload_format | "excel" | + | upload_filters | ["backdrop.core.upload.filters.first_sheet_filter","backdrop.contrib.evl_upload_filters.customer_satisfaction"] | and I am logged in and I can upload to "evl_customer_satisfaction" when I go to "/evl_customer_satisfaction/upload" @@ -87,9 +92,10 @@ Feature: EVL Upload Scenario: Upload evl volumetrics Given a file named "evl-volumetrics.xls" with fixture "contrib/evl-volumetrics.xls" - and I have a bucket named "evl_volumetrics" - and bucket setting upload_format is "excel" - and bucket setting upload_filters is ["backdrop.contrib.evl_upload_filters.volumetrics"] + and I have a bucket named "evl_volumetrics" with settings + | key | value | + | upload_format | "excel" | + | upload_filters | ["backdrop.contrib.evl_upload_filters.volumetrics"] | and I am logged in and I can upload to "evl_volumetrics" when I go to "/evl_volumetrics/upload" diff --git a/features/end_to_end.feature b/features/end_to_end.feature index 6974537d..2c77784f 100644 --- a/features/end_to_end.feature +++ b/features/end_to_end.feature @@ -3,17 +3,19 @@ Feature: end-to-end platform test Scenario: write data to platform Given I have the data in "dinosaurs.json" - and I have a bucket named "reptiles" + and I have a bucket named "reptiles" with settings + | key | value | + | raw_queries_allowed | true | and I use the bearer token for the bucket - and bucket setting raw_queries_allowed is true when I post the data to "/reptiles" then I should get back a status of "200" Scenario: write and retrieve data from platform Given I have the data in "dinosaurs.json" - and I have a bucket named "reptiles" + and I have a bucket named "reptiles" with settings + | key | value | + | raw_queries_allowed | true | and I use the bearer token for the bucket - and bucket setting raw_queries_allowed is true when I post the data to "/reptiles" and I go to "/reptiles?filter_by=size:big" then I should get back a status of "200" diff --git a/features/environment.py b/features/environment.py index 205c9d3f..de1c8a92 100644 --- a/features/environment.py +++ b/features/environment.py @@ -4,6 +4,8 @@ from backdrop.core.log_handler import get_log_file_handler from features.support.splinter_client import SplinterClient +from features.support.stagecraft import StagecraftService + sys.path.append( os.path.join(os.path.dirname(__file__), '..') ) @@ -46,6 +48,15 @@ def after_scenario(context, scenario): handler() except Exception as e: log.exception(e) + if server_running(context): + context.mock_stagecraft_server.stop() + context.mock_stagecraft_server = None + + +def server_running(context): + return 'mock_stagecraft_server' in context and \ + context.mock_stagecraft_server and \ + context.mock_stagecraft_server.running def after_feature(context, _): diff --git a/features/read_api/cache_control.feature b/features/read_api/cache_control.feature index 7c707b75..e42b8845 100644 --- a/features/read_api/cache_control.feature +++ b/features/read_api/cache_control.feature @@ -5,9 +5,9 @@ Feature: the read api should provide cache control headers then the "Cache-Control" header should be "no-cache" Scenario: query returns an etag - Given "licensing.json" is in "foo" bucket - and I have a bucket named "foo" - and bucket setting raw_queries_allowed is true + Given "licensing.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo" then the "ETag" header should be ""7c7cec78f75fa9f30428778f2b6da9b42bd104d0"" diff --git a/features/read_api/combination_query.feature b/features/read_api/combination_query.feature index 65d449cb..53d77f82 100644 --- a/features/read_api/combination_query.feature +++ b/features/read_api/combination_query.feature @@ -3,8 +3,9 @@ Feature: more complex combination of parameters that are used by clients Background: - Given "licensing_preview.json" is in "licensing" bucket - and bucket setting raw_queries_allowed is true + Given "licensing_preview.json" is in "licensing" bucket with settings + | key | value | + | raw_queries_allowed | true | Scenario: for an authority get weekly data for the top 3 licences between two points in time diff --git a/features/read_api/filter.feature b/features/read_api/filter.feature index c7afa659..3e7731c8 100644 --- a/features/read_api/filter.feature +++ b/features/read_api/filter.feature @@ -12,8 +12,9 @@ Feature: filtering queries for read api then I should get back a status of "400" Scenario: querying for data between two points - Given "licensing.json" is in "foo" bucket - and bucket setting raw_queries_allowed is true + Given "licensing.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo?start_at=2012-12-12T01:01:02%2B00:00&end_at=2012-12-14T00:00:00%2B00:00" then I should get back a status of "200" and the JSON should have "1" results @@ -21,8 +22,9 @@ Feature: filtering queries for read api Scenario: filtering by a key and value - Given "licensing.json" is in "foo" bucket - and bucket setting raw_queries_allowed is true + Given "licensing.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo?filter_by=authority:Camden" then I should get back a status of "200" and the JSON should have "2" results @@ -39,16 +41,18 @@ Feature: filtering queries for read api Scenario: querying for data between two points and filtered by a key and value - Given "licensing.json" is in "foo" bucket - and bucket setting raw_queries_allowed is true + Given "licensing.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo?start_at=2012-12-13T00:00:02%2B00:00&end_at=2012-12-19T00:00:00%2B00:00&filter_by=type:success" then I should get back a status of "200" and the JSON should have "1" results and the "1st" result should be "{"_timestamp": "2012-12-13T01:01:01+00:00", "licence_name": "Temporary events notice", "interaction": "success", "authority": "Westminster", "type": "success", "_id": "1236"}" Scenario: querying for boolean kind of data - Given "dinosaurs.json" is in "lizards" bucket - and bucket setting raw_queries_allowed is true + Given "dinosaurs.json" is in "lizards" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/lizards?filter_by=eats_people:true" then I should get back a status of "200" and the JSON should have "3" results diff --git a/features/read_api/group.feature b/features/read_api/group.feature index e770609b..71d3ce0b 100644 --- a/features/read_api/group.feature +++ b/features/read_api/group.feature @@ -17,8 +17,6 @@ Feature: grouping queries for read api then I should get back a status of "200" and the JSON should have "2" results and the "2nd" result should be "{"authority": "Westminster", "_count": 3}" - - Given "licensing_2.json" is in "foo" bucket when I go to "/foo?group_by=licence_name&filter_by=authority:Westminster" then I should get back a status of "200" and the JSON should have "2" results diff --git a/features/read_api/querying_from_service_data_endpoint.feature b/features/read_api/querying_from_service_data_endpoint.feature index c126fff8..66613571 100644 --- a/features/read_api/querying_from_service_data_endpoint.feature +++ b/features/read_api/querying_from_service_data_endpoint.feature @@ -2,10 +2,11 @@ Feature: Querying data from service-data endpoint Scenario: querying data - Given "dinosaurs.json" is in "rawr" bucket - and bucket setting data_group is "dinosaurs" - and bucket setting data_type is "taxonomy" - and bucket setting raw_queries_allowed is true + Given "dinosaurs.json" is in "rawr" bucket with settings + | key | value | + | data_group | "dinosaurs" | + | data_type | "taxonomy" | + | raw_queries_allowed | true | when I go to "/data/dinosaurs/taxonomy?filter_by=eats_people:true" then I should get back a status of "200" and the JSON should have "3" results diff --git a/features/read_api/read_api.feature b/features/read_api/read_api.feature index 7a6970cc..b339e460 100644 --- a/features/read_api/read_api.feature +++ b/features/read_api/read_api.feature @@ -2,19 +2,22 @@ Feature: the performance platform read api Scenario: getting all the data in a bucket - Given "licensing.json" is in "foo" bucket - and bucket setting raw_queries_allowed is true + Given "licensing.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo" then I should get back a status of "200" and the JSON should have "6" results Scenario: my data does not have timestamps - Given "dinosaurs.json" is in "rawr" bucket - and bucket setting raw_queries_allowed is true + Given "dinosaurs.json" is in "rawr" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/rawr" then I should get back a status of "200" and the JSON should have "4" results Scenario: querying a bucket that does not exist - When I go to "/foobar" - then I should get back a status of "404" + Given Stagecraft is running + when I go to "/foobar" + then I should get back a status of "404" diff --git a/features/read_api/sort_and_limit.feature b/features/read_api/sort_and_limit.feature index 252fdea0..634124e4 100644 --- a/features/read_api/sort_and_limit.feature +++ b/features/read_api/sort_and_limit.feature @@ -2,24 +2,27 @@ Feature: sorting and limiting Scenario: Sort the data on a key that has a numeric value in ascending order - Given "sort_and_limit.json" is in "foo" bucket - and bucket setting raw_queries_allowed is true + Given "sort_and_limit.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo?sort_by=value:ascending" then I should get back a status of "200" and the "1st" result should have "value" equaling the integer "3" and the "last" result should have "value" equaling the integer "8" Scenario: Sort the data on a key that has a numeric value in descending order - Given "sort_and_limit.json" is in "foo" bucket - and bucket setting raw_queries_allowed is true + Given "sort_and_limit.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo?sort_by=value:descending" then I should get back a status of "200" and the "1st" result should have "value" equaling the integer "8" and the "last" result should have "value" equaling the integer "3" Scenario: Limit the data to first 3 elements - Given "sort_and_limit.json" is in "foo" bucket - and bucket setting raw_queries_allowed is true + Given "sort_and_limit.json" is in "foo" bucket with settings + | key | value | + | raw_queries_allowed | true | when I go to "/foo?limit=3" then I should get back a status of "200" and the JSON should have "3" results diff --git a/features/read_api/status.feature b/features/read_api/status.feature index 28ccf0ff..a513b833 100644 --- a/features/read_api/status.feature +++ b/features/read_api/status.feature @@ -2,19 +2,25 @@ Feature: the read/status api Scenario: checking an in-date bucket - Given I have a record updated "10 seconds" ago in the "recent" bucket - and bucket setting max_age_expected is 60 + Given I have a bucket named "recent" with settings + | key | value | + | max_age_expected | 60 | + and I have a record updated "10 seconds" ago in the "recent" bucket when I go to "/_status/buckets" then I should get back a status of "200" Scenario: checking an out-of-date bucket - Given I have a record updated "10 seconds" ago in the "recent" bucket - and bucket setting max_age_expected is 1 + Given I have a bucket named "recent" with settings + | key | value | + | max_age_expected | 1 | + and I have a record updated "10 seconds" ago in the "recent" bucket when I go to "/_status/buckets" then I should get back a status of "500" Scenario: checking a bucket with no max age expected - Given I have a record updated "10 seconds" ago in the "recent" bucket - and bucket setting max_age_expected is None + Given I have a bucket named "recent" with settings + | key | value | + | max_age_expected | None | + and I have a record updated "10 seconds" ago in the "recent" bucket when I go to "/_status/buckets" then I should get back a status of "200" diff --git a/features/steps/read_api.py b/features/steps/read_api.py index db83da69..b3aecb21 100644 --- a/features/steps/read_api.py +++ b/features/steps/read_api.py @@ -7,6 +7,7 @@ import datetime import re import pytz +from features.support.stagecraft import StagecraftService FIXTURE_PATH = os.path.join(os.path.dirname(__file__), '..', 'fixtures') @@ -16,7 +17,37 @@ def step(context): context.client.set_config_parameter('PREVENT_RAW_QUERIES', True) -def ensure_bucket_exists(context, bucket_name): +def ensure_bucket_exists(context, bucket_name, settings={}): + # these should mostly match the default BucketConfig.__new__() kwargs + response = { + 'name': bucket_name, + 'data_group': bucket_name, + 'data_type': bucket_name, + 'raw_queries_allowed': False, + 'bearer_token': '%s-bearer-token' % bucket_name, + 'upload_format': 'csv', + 'upload_filters': ['backdrop.core.upload.filters.first_sheet_filter'], + 'auto_ids': None, + 'queryable': True, + 'realtime': False, + 'capped_size': 5040, + 'max_age_expected': 2678400, + } + + response.update(settings) + + url_response_dict = { + ('GET', u'data-sets/{}'.format(bucket_name)): response, + ('GET', u'data-sets/'): [response], + ('GET', u'data-sets?data-group={}&data-type={}'.format( + response['data_group'], response['data_type'])): [response], + } + + if 'mock_stagecraft_server' in context and context.mock_stagecraft_server: + context.mock_stagecraft_server.stop() + context.mock_stagecraft_server = StagecraftService(8080, url_response_dict) + context.mock_stagecraft_server.start() + context.bucket = bucket_name bucket_data = { '_id': bucket_name, @@ -40,9 +71,31 @@ def step(context, fixture_name, bucket_name): context.client.storage()[bucket_name].save(obj) +def get_bucket_settings_from_context_table(table): + def to_py(string_in): + if string_in == "None": + return None + else: + return json.loads(string_in) + return {row['key']: to_py(row['value']) for row in table} + + +@given('"{fixture_name}" is in "{bucket_name}" bucket with settings') +def step(context, fixture_name, bucket_name): + settings = get_bucket_settings_from_context_table(context.table) + + ensure_bucket_exists(context, bucket_name, settings) + fixture_path = os.path.join(FIXTURE_PATH, fixture_name) + with open(fixture_path) as fixture: + for obj in json.load(fixture): + for key in ['_timestamp', '_day_start_at', '_week_start_at', '_month_start_at']: + if key in obj: + obj[key] = parser.parse(obj[key]).astimezone(pytz.utc) + context.client.storage()[bucket_name].save(obj) + + @given('I have a record updated "{timespan}" ago in the "{bucket_name}" bucket') def step(context, timespan, bucket_name): - ensure_bucket_exists(context, bucket_name) now = datetime.datetime.now() number_of_seconds = int(re.match(r'^(\d+) seconds?', timespan).group(1)) timedelta = datetime.timedelta(seconds=number_of_seconds) @@ -53,19 +106,23 @@ def step(context, timespan, bucket_name): context.client.storage()[bucket_name].save(record) +@given('I have a bucket named "{bucket_name}" with settings') +def step(context, bucket_name): + settings = get_bucket_settings_from_context_table(context.table) + ensure_bucket_exists(context, bucket_name, settings) + + @given('I have a bucket named "{bucket_name}"') def step(context, bucket_name): ensure_bucket_exists(context, bucket_name) -@given('bucket setting {setting} is {set_to}') -def step(context, setting, set_to): - if set_to == "None": - set_to = None - else: - set_to = json.loads(set_to) - context.client.storage()["buckets"].update( - {"_id": context.bucket}, {"$set": {setting: set_to}}, safe=True) +@given('Stagecraft is running') +def step(context): + if 'mock_stagecraft_server' in context and context.mock_stagecraft_server: + context.mock_stagecraft_server.stop() + context.mock_stagecraft_server = StagecraftService(8080, {}) + context.mock_stagecraft_server.start() @when('I go to "{query}"') diff --git a/features/support/stagecraft.py b/features/support/stagecraft.py new file mode 100644 index 00000000..201a5441 --- /dev/null +++ b/features/support/stagecraft.py @@ -0,0 +1,60 @@ +from flask import Flask, Response, abort, json, request +from multiprocessing import Process +import requests + +from features.support.support import wait_until + + +class StagecraftService(object): + def __init__(self, port, url_response_dict): + self.__port = port + self.__url_response_dict = url_response_dict + self.__app = Flask('fake_stagecraft') + self.__proc = None + + @self.__app.route('/', defaults={'path': ''}) + @self.__app.route('/') + def catch_all(path): + if path == "_is_fake_server_up": + return Response('Yes', 200) + + path_and_query = path + if len(request.query_string) > 0: + path_and_query += '?{}'.format(request.query_string) + + key = (request.method, path_and_query) + + resp_item = self.__url_response_dict.get(key, None) + if resp_item is None: + abort(404) + return Response(json.dumps(resp_item), mimetype='application/json') + + def start(self): + if self.stopped(): + self.__proc = Process(target=self._run) + self.__proc.start() + wait_until(self.running) + + def running(self): + if self.__proc is None: + return False + try: + url = 'http://127.0.0.1:{}/_is_fake_server_up'.format(self.__port) + return requests.get(url).status_code == 200 + except: + return False + + def stopped(self): + return not self.running() + + def stop(self): + if self.running(): + self.__proc.terminate() + self.__proc.join() + self.__proc = None + wait_until(self.stopped) + + def _run(self): + # reloading is disabled to stop the Flask webserver starting up twice + # when used in conjunction with multiprocessing + self.__app.run(port=self.__port, use_reloader=False) diff --git a/features/support/support.py b/features/support/support.py index e0190248..2877a872 100644 --- a/features/support/support.py +++ b/features/support/support.py @@ -6,11 +6,10 @@ def wait_until(condition, timeout=15, interval=0.1): deadline = time.time() + timeout - while time.time() < deadline: - if condition(): - return + while not condition(): + if time.time() >= deadline: + raise RuntimeError('timeout') time.sleep(interval) - raise RuntimeError("timeout") class BaseClient(object): diff --git a/features/write_api/write_api.feature b/features/write_api/write_api.feature index 3d4307af..f66970ef 100644 --- a/features/write_api/write_api.feature +++ b/features/write_api/write_api.feature @@ -43,10 +43,11 @@ Feature: the performance platform write api Scenario: posting to a bucket with data group and data type Given I have the data in "timestamps.json" - and I have a bucket named "data_with_times" + and I have a bucket named "data_with_times" with settings + | key | value | + | data_group | "transaction" | + | data_type | "timings" | and I use the bearer token for the bucket - and bucket setting data_group is "transaction" - and bucket setting data_type is "timings" when I post to the specific path "/data/transaction/timings" then I should get back a status of "200" and the stored data should contain "3" "_week_start_at" on "2013-03-11" diff --git a/requirements.txt b/requirements.txt index f1fae255..ae3949af 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,6 +8,7 @@ pymongo==2.6.3 python-dateutil==2.1 pytz==2013b rauth==0.5.5 +requests==1.2.3 statsd==2.0.3 xlrd==0.9.2 logstash_formatter==0.5.7 diff --git a/requirements_for_tests.txt b/requirements_for_tests.txt index 7b8aa67a..d0980f7b 100644 --- a/requirements_for_tests.txt +++ b/requirements_for_tests.txt @@ -7,7 +7,6 @@ mock==1.0.1 nose==1.3.0 pep8==1.4.5 PyHamcrest==1.7.1 -requests==1.2.3 selenium==2.37.2 splinter==0.5.4 freezegun==0.1.11 diff --git a/tests/admin/test_file_upload_integration.py b/tests/admin/test_file_upload_integration.py index 87d834f6..416d0b72 100644 --- a/tests/admin/test_file_upload_integration.py +++ b/tests/admin/test_file_upload_integration.py @@ -3,10 +3,11 @@ import datetime from hamcrest import assert_that, has_entry, is_, has_entries from pymongo import MongoClient -from tests.support.bucket import stub_bucket_retrieve_by_name, setup_bucket, stub_user_retrieve_by_email +from tests.support.bucket import fake_bucket_exists, stub_user_retrieve_by_email from tests.support.test_helpers import has_status from tests.admin.support.clamscan import stub_clamscan from tests.admin.support.oauth_test_case import OauthTestCase +from tests.support.bucket import fake_bucket_exists class TestFileUploadIntegration(OauthTestCase): @@ -19,7 +20,7 @@ def _sign_in(self, email): name="test", email=email) - @stub_bucket_retrieve_by_name("test", upload_format="csv") + @fake_bucket_exists("test", upload_format="csv") @stub_user_retrieve_by_email("test@example.com", buckets=["test"]) @stub_clamscan(is_virus=False) def test_accepts_content_type_for_csv(self): @@ -33,7 +34,7 @@ def test_accepts_content_type_for_csv(self): assert_that(response, has_status(200)) - @stub_bucket_retrieve_by_name("test", upload_format="csv") + @fake_bucket_exists("test", upload_format="csv") @stub_user_retrieve_by_email("test@example.com", buckets=["test"]) @stub_clamscan(is_virus=True) def test_rejects_content_type_for_exe(self): @@ -48,7 +49,7 @@ def test_rejects_content_type_for_exe(self): assert_that(response, has_status(400)) - @stub_bucket_retrieve_by_name("test_upload_integration", upload_format="csv") + @fake_bucket_exists("test_upload_integration", upload_format="csv") @stub_user_retrieve_by_email("test@example.com", buckets=["test_upload_integration"]) @stub_clamscan(is_virus=False) def test_data_hits_the_database_when_uploading_csv(self): @@ -69,7 +70,7 @@ def test_data_hits_the_database_when_uploading_csv(self): assert_that(record, has_entry('_id', 'hello')) assert_that(record, has_entry('value', 'some_value')) - @stub_bucket_retrieve_by_name("integration_test_excel_bucket", upload_format="excel") + @fake_bucket_exists("integration_test_excel_bucket", upload_format="excel") @stub_user_retrieve_by_email("test@example.com", buckets=["integration_test_excel_bucket"]) @stub_clamscan(is_virus=False) def test_data_hits_the_database_when_uploading_xlsx(self): @@ -91,7 +92,7 @@ def test_data_hits_the_database_when_uploading_xlsx(self): assert_that(record, has_entry('age', 27)) assert_that(record, has_entry('nationality', 'Polish')) - @setup_bucket("evl_ceg_data", data_group="group", data_type="type", upload_format="excel", upload_filters=["backdrop.core.upload.filters.first_sheet_filter", "backdrop.contrib.evl_upload_filters.ceg_volumes"]) + @fake_bucket_exists("evl_ceg_data", data_group="group", data_type="type", upload_format="excel", upload_filters=["backdrop.core.upload.filters.first_sheet_filter", "backdrop.contrib.evl_upload_filters.ceg_volumes"]) @stub_user_retrieve_by_email("test@example.com", buckets=["evl_ceg_data"]) @stub_clamscan(is_virus=False) def test_upload_applies_filters(self): @@ -118,7 +119,7 @@ def test_upload_applies_filters(self): "_timestamp": datetime.datetime(2007, 7, 1, 0, 0), })) - @setup_bucket("bucket_with_timestamp_auto_id", data_group="group", data_type="type", upload_format="excel", auto_ids=["_timestamp", "key"]) + @fake_bucket_exists("bucket_with_timestamp_auto_id", data_group="group", data_type="type", upload_format="excel", auto_ids=["_timestamp", "key"]) @stub_user_retrieve_by_email("test@example.com", buckets=["bucket_with_timestamp_auto_id"]) @stub_clamscan(is_virus=False) def test_upload_auto_generate_ids(self): diff --git a/tests/admin/test_signon_integration.py b/tests/admin/test_signon_integration.py index 424b2ea6..73bab2d4 100644 --- a/tests/admin/test_signon_integration.py +++ b/tests/admin/test_signon_integration.py @@ -3,7 +3,7 @@ from mock import patch from werkzeug.urls import url_decode from backdrop.admin import app -from tests.support.bucket import stub_bucket_retrieve_by_name, stub_user_retrieve_by_email +from tests.support.bucket import fake_bucket_exists, stub_user_retrieve_by_email from tests.support.test_helpers import has_status from tests.admin.support.oauth_test_case import OauthTestCase @@ -88,6 +88,7 @@ def test_upload_page_redirects_non_authenticated_user_to_sign_in(self): response = self.client.get('/test/upload') assert_that(response, has_status(302)) + @fake_bucket_exists("test", upload_format="csv") @stub_user_retrieve_by_email("bob@example.com", buckets=[]) def test_upload_page_is_not_found_if_user_has_no_permissions(self): self.given_user_is_signed_in_as(email="bob@example.com") @@ -95,7 +96,7 @@ def test_upload_page_is_not_found_if_user_has_no_permissions(self): response = self.client.get('/test/upload') assert_that(response, has_status(404)) - @stub_bucket_retrieve_by_name("test", upload_format="csv") + @fake_bucket_exists("test", upload_format="csv") @stub_user_retrieve_by_email("bob@example.com", buckets=["test"]) def test_upload_page_is_available_to_user_with_permission(self): self.given_user_is_signed_in_as(email="bob@example.com") diff --git a/tests/core/integration/test_repository.py b/tests/core/integration/test_repository.py index cba33fc9..d85053d3 100644 --- a/tests/core/integration/test_repository.py +++ b/tests/core/integration/test_repository.py @@ -1,4 +1,10 @@ import unittest +import mock + +from contextlib import contextmanager + +from os.path import dirname, join as pjoin + from hamcrest import assert_that, is_, has_entries from backdrop.core.bucket import BucketConfig from backdrop.core.database import Database @@ -9,6 +15,15 @@ PORT = 27017 DB_NAME = 'performance_platform_test' BUCKET = 'buckets' +STAGECRAFT_URL = 'fake_url_should_not_be_called' +STAGECRAFT_DATA_SET_QUERY_TOKEN = 'fake_token_should_not_be_used' + + +@contextmanager +def fixture(name): + filename = pjoin(dirname(__file__), '..', '..', 'fixtures', name) + with open(filename, 'r') as f: + yield f.read() class TestBucketRepositoryIntegration(unittest.TestCase): @@ -17,47 +32,27 @@ def setUp(self): self.db = Database(HOST, PORT, DB_NAME) self.db._mongo.drop_database(DB_NAME) self.mongo_collection = self.db.get_collection(BUCKET) - self.repository = BucketConfigRepository(self.db) - - def test_saving_a_config_with_default_values(self): - config = BucketConfig("some_bucket", data_group="group", data_type="type") - - self.repository.save(config) - - results = list(self.mongo_collection._collection.find()) - - assert_that(len(results), is_(1)) - assert_that(results[0], has_entries({ - "name": "some_bucket", - "raw_queries_allowed": False, - "bearer_token": None, - "upload_format": "csv" - })) - - def test_saving_a_realtime_config_creates_a_capped_collection(self): - config = BucketConfig("realtime_bucket", data_group="group", data_type="type", realtime=True) - - self.repository.save(config) - - assert_that(self.db.mongo_database["realtime_bucket"].options(), is_({"capped": True, "size": 5040})) + self.repository = BucketConfigRepository( + STAGECRAFT_URL, STAGECRAFT_DATA_SET_QUERY_TOKEN) def test_retrieves_config_by_name(self): - self.repository.save(BucketConfig("not_my_bucket", data_group="group", data_type="type")) - self.repository.save(BucketConfig("my_bucket", data_group="group", data_type="type")) - self.repository.save(BucketConfig("someones_bucket", data_group="group", data_type="type")) + with fixture('stagecraft_get_single_data_set.json') as content: + with mock.patch('backdrop.core.repository._get_url') as mocked: + mocked.return_value = content + config = self.repository.retrieve(name="govuk_visitors") + mocked.assert_called_once_with( + 'fake_url_should_not_be_called/data-sets/govuk_visitors') - config = self.repository.retrieve(name="my_bucket") - - assert_that(config.name, is_("my_bucket")) + assert_that(config.name, is_('govuk_visitors')) def test_retrieves_config_for_service_and_data_type(self): - self.repository.save(BucketConfig("b1", data_group="my_service", data_type="my_type")) - self.repository.save(BucketConfig("b2", data_group="my_service", data_type="not_my_type")) - self.repository.save(BucketConfig("b3", data_group="not_my_service", data_type="my_type")) - - config = self.repository.get_bucket_for_query(data_group="my_service", data_type="my_type") + with fixture('stagecraft_query_data_group_type.json') as content: + with mock.patch('backdrop.core.repository._get_url') as mocked: + mocked.return_value = content + config = self.repository.get_bucket_for_query( + data_group="govuk", data_type="realtime") - assert_that(config.name, is_("b1")) + assert_that(config.name, is_("govuk_realtime")) class TestUserRepositoryIntegration(object): diff --git a/tests/core/test_repository.py b/tests/core/test_repository.py index 8f3d1ced..58c1511d 100644 --- a/tests/core/test_repository.py +++ b/tests/core/test_repository.py @@ -1,10 +1,13 @@ +import json +import mock import unittest -from collections import namedtuple + from backdrop.core.bucket import BucketConfig -from backdrop.core.repository import BucketConfigRepository, UserConfigRepository +from backdrop.core.repository import (BucketConfigRepository, + UserConfigRepository) from hamcrest import assert_that, equal_to, is_, has_entries, match_equality from mock import Mock -from nose.tools import * +from nose.tools import assert_raises from backdrop.core.user import UserConfig @@ -15,89 +18,38 @@ def setUp(self): self.db = Mock() self.mongo_collection = Mock() self.db.get_collection.return_value = self.mongo_collection - self.bucket_repo = BucketConfigRepository(self.db) - - def test_saving_a_bucket(self): - bucket = BucketConfig("bucket_name", data_group="data_group", data_type="type") - - self.bucket_repo.save(bucket) - self.mongo_collection.save.assert_called_with(match_equality(has_entries({ - "_id": "bucket_name", - "name": "bucket_name", - "data_group": "data_group", - "data_type": "type", - "raw_queries_allowed": False, - "bearer_token": None, - "upload_format": "csv", - }))) - - def test_saving_a_bucket_with_some_attributes(self): - bucket = BucketConfig("bucket_name", - data_group="data_group", data_type="type", - raw_queries_allowed=True, - upload_format="excel") - - self.bucket_repo.save(bucket) - self.mongo_collection.save.assert_called_with(match_equality(has_entries({ - "_id": "bucket_name", - "name": "bucket_name", - "data_group": "data_group", - "data_type": "type", - "raw_queries_allowed": True, - "bearer_token": None, - "upload_format": "excel", - }))) - - def test_saving_fails_with_non_bucket_object(self): - not_bucket = {"foo": "bar"} - - assert_raises(ValueError, self.bucket_repo.save, not_bucket) - - def test_saving_fails_with_non_bucket_namedtuple(self): - NotBucket = namedtuple("NotBucket", "name raw_queries_allowed") - not_bucket = NotBucket("name", True) - assert_raises(ValueError, self.bucket_repo.save, not_bucket) + self.bucket_repo = BucketConfigRepository( + 'fake_stagecraft_url', 'fake_stagecraft_token') def test_bucket_config_is_created_from_retrieved_data(self): - self.mongo_collection.find_one.return_value = { - "_id": "bucket_name", + fake_stagecraft_response = json.dumps({ "name": "bucket_name", "data_group": "data_group", "data_type": "type", "raw_queries_allowed": False, "bearer_token": "my-bearer-token", "upload_format": "excel" - } - bucket = self.bucket_repo.retrieve(name="bucket_name") + }) + with mock.patch('backdrop.core.repository._get_url') as mocked: + mocked.return_value = fake_stagecraft_response + + bucket = self.bucket_repo.retrieve(name="bucket_name") + expected_bucket = BucketConfig("bucket_name", - data_group="data_group", data_type="type", + data_group="data_group", + data_type="type", raw_queries_allowed=False, bearer_token="my-bearer-token", upload_format="excel") assert_that(bucket, equal_to(expected_bucket)) - def test_saving_a_realtime_bucket_creates_a_capped_collection(self): - capped_bucket = BucketConfig("capped_bucket", - data_group="data_group", data_type="type", - realtime=True, capped_size=7665) - - self.bucket_repo.save(capped_bucket) - - self.db.create_capped_collection.assert_called_with("capped_bucket", 7665) - - def test_saving_a_realtime_bucket_does_not_create_a_collection_if_creation_flag_is_off(self): - capped_bucket = BucketConfig("capped_bucket", - data_group="data_group", data_type="type", - realtime=True, capped_size=7665) - - self.bucket_repo.save(capped_bucket, create_bucket=False) - - assert not self.db.create_capped_collection.called - def test_retrieving_non_existent_bucket_returns_none(self): self.mongo_collection.find_one.return_value = None - bucket = self.bucket_repo.retrieve(name="bucket_name") + + with mock.patch('backdrop.core.repository._get_url') as mocked: + mocked.return_value = None + bucket = self.bucket_repo.retrieve(name="non_existent") assert_that(bucket, is_(None)) diff --git a/tests/fixtures/stagecraft_get_single_data_set.json b/tests/fixtures/stagecraft_get_single_data_set.json new file mode 100644 index 00000000..9077ae3b --- /dev/null +++ b/tests/fixtures/stagecraft_get_single_data_set.json @@ -0,0 +1,14 @@ +{ + "name": "govuk_visitors", + "data_group": "govuk", + "data_type": "visitors", + "raw_queries_allowed": true, + "bearer_token": "", + "upload_format": "", + "upload_filters": "", + "auto_ids": "", + "queryable": true, + "realtime": false, + "capped_size": null, + "max_age_expected": 86400 +} \ No newline at end of file diff --git a/tests/fixtures/stagecraft_list_data_sets.json b/tests/fixtures/stagecraft_list_data_sets.json new file mode 100644 index 00000000..870f2e3a --- /dev/null +++ b/tests/fixtures/stagecraft_list_data_sets.json @@ -0,0 +1,72 @@ +[ + { + "name": "govuk_visitors", + "data_group": "govuk", + "data_type": "visitors", + "raw_queries_allowed": true, + "bearer_token": "", + "upload_format": "", + "upload_filters": "", + "auto_ids": "", + "queryable": true, + "realtime": false, + "capped_size": null, + "max_age_expected": 86400 + }, + { + "name": "govuk_realtime", + "data_group": "govuk", + "data_type": "realtime", + "raw_queries_allowed": true, + "bearer_token": "", + "upload_format": "", + "upload_filters": "", + "auto_ids": "", + "queryable": true, + "realtime": false, + "capped_size": null, + "max_age_expected": 86400 + }, + { + "name": "govuk_blarg", + "data_group": "govuk", + "data_type": "blarg", + "raw_queries_allowed": true, + "bearer_token": "", + "upload_format": "", + "upload_filters": "", + "auto_ids": "", + "queryable": true, + "realtime": false, + "capped_size": null, + "max_age_expected": 86400 + }, + { + "name": "govuk_meh", + "data_group": "govuk", + "data_type": "meh", + "raw_queries_allowed": true, + "bearer_token": "", + "upload_format": "", + "upload_filters": "", + "auto_ids": "", + "queryable": true, + "realtime": false, + "capped_size": null, + "max_age_expected": 86400 + }, + { + "name": "govuk_abc", + "data_group": "govuk", + "data_type": "abc", + "raw_queries_allowed": true, + "bearer_token": "", + "upload_format": "", + "upload_filters": "", + "auto_ids": "", + "queryable": true, + "realtime": false, + "capped_size": null, + "max_age_expected": 86400 + } +] \ No newline at end of file diff --git a/tests/fixtures/stagecraft_query_data_group_type.json b/tests/fixtures/stagecraft_query_data_group_type.json new file mode 100644 index 00000000..173c394f --- /dev/null +++ b/tests/fixtures/stagecraft_query_data_group_type.json @@ -0,0 +1,16 @@ +[ + { + "name": "govuk_realtime", + "data_group": "govuk", + "data_type": "realtime", + "raw_queries_allowed": true, + "bearer_token": "", + "upload_format": "", + "upload_filters": "", + "auto_ids": "", + "queryable": true, + "realtime": false, + "capped_size": null, + "max_age_expected": 86400 + } +] diff --git a/tests/read/test_raw_event_access.py b/tests/read/test_raw_event_access.py index 14ef288f..ff706c21 100644 --- a/tests/read/test_raw_event_access.py +++ b/tests/read/test_raw_event_access.py @@ -2,7 +2,7 @@ from hamcrest import assert_that from backdrop.read import api from tests.support.test_helpers import is_bad_request, is_ok -from tests.support.bucket import stub_bucket_retrieve_by_name +from tests.support.bucket import fake_bucket_exists class TestRawEventAccess(unittest.TestCase): @@ -13,17 +13,17 @@ def setUp(self): def tearDown(self): api.app.config['RAW_QUERIES_ALLOWED']['foo'] = True - @stub_bucket_retrieve_by_name("foo") + @fake_bucket_exists("foo") def test_that_querying_for_raw_events_is_disabled(self): response = self.app.get("/foo?filter_by=foo:bar") assert_that(response, is_bad_request()) - @stub_bucket_retrieve_by_name("bar") + @fake_bucket_exists("bar") def test_that_queries_with_group_by_are_allowed(self): response = self.app.get("/bar?filter_by=foo:bar&group_by=pie") assert_that(response, is_ok()) - @stub_bucket_retrieve_by_name("pub") + @fake_bucket_exists("pub") def test_that_querying_for_less_than_7_days_periods_is_disabled(self): response = self.app.get( "/pub?" @@ -34,14 +34,14 @@ def test_that_querying_for_less_than_7_days_periods_is_disabled(self): assert_that(response, is_bad_request()) - @stub_bucket_retrieve_by_name("foo") + @fake_bucket_exists("foo") def test_that_querying_for_more_than_7_days_is_valid(self): response = self.app.get("/foo?group_by=pie" "&start_at=2013-04-01T00:00:00Z" "&end_at=2013-04-08T00:00:00Z") assert_that(response, is_ok()) - @stub_bucket_retrieve_by_name("foo") + @fake_bucket_exists("foo") def test_that_non_midnight_values_are_disallowed_for_start_at(self): response = self.app.get("/foo?group_by=pie" "&start_at=2012-01-01T00:01:00Z" @@ -61,7 +61,7 @@ def test_that_non_midnight_values_are_disallowed_for_start_at(self): assert_that(response, is_bad_request()) - @stub_bucket_retrieve_by_name("foo") + @fake_bucket_exists("foo") def test_that_non_midnight_values_are_disallowed_for_end_at(self): response = self.app.get("/foo?group_by=pie" "&end_at=2012-01-20T00:01:00Z" @@ -81,7 +81,7 @@ def test_that_non_midnight_values_are_disallowed_for_end_at(self): assert_that(response, is_bad_request()) - @stub_bucket_retrieve_by_name("foo") + @fake_bucket_exists("foo") def test_on_invalid_dates(self): response = self.app.get("/foo?group_by=pie" "&start_at=foo" diff --git a/tests/read/test_read_api_query_endpoint.py b/tests/read/test_read_api_query_endpoint.py index e53507ac..ce6c1007 100644 --- a/tests/read/test_read_api_query_endpoint.py +++ b/tests/read/test_read_api_query_endpoint.py @@ -7,7 +7,7 @@ from backdrop.core.timeseries import WEEK from backdrop.read import api from backdrop.read.query import Query -from tests.support.bucket import stub_bucket_retrieve_by_name +from tests.support.bucket import fake_bucket_exists from tests.support.test_helpers import has_status from warnings import warn @@ -26,7 +26,7 @@ class QueryingApiTestCase(unittest.TestCase): def setUp(self): self.app = api.app.test_client() - @stub_bucket_retrieve_by_name("foo", raw_queries_allowed=True) + @fake_bucket_exists("foo", raw_queries_allowed=True) @patch('backdrop.core.bucket.Bucket.query') def test_filter_by_query_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -34,7 +34,7 @@ def test_filter_by_query_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(filter_by=[[u'zombies', u'yes']])) - @stub_bucket_retrieve_by_name("foo") + @fake_bucket_exists("foo") @patch('backdrop.core.bucket.Bucket.query') def test_group_by_query_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -42,7 +42,7 @@ def test_group_by_query_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(group_by=u'zombies')) - @stub_bucket_retrieve_by_name("foo", raw_queries_allowed=True) + @fake_bucket_exists("foo", raw_queries_allowed=True) @patch('backdrop.core.bucket.Bucket.query') def test_query_with_start_and_end_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -57,7 +57,7 @@ def test_query_with_start_and_end_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(start_at=expected_start_at, end_at=expected_end_at)) - @stub_bucket_retrieve_by_name("foo", raw_queries_allowed=True) + @fake_bucket_exists("foo", raw_queries_allowed=True) @patch('backdrop.core.bucket.Bucket.query') def test_sort_query_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -73,7 +73,7 @@ def test_sort_query_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(sort_by=["value", "descending"])) - @stub_bucket_retrieve_by_name("bucket", queryable=False) + @fake_bucket_exists("bucket", queryable=False) def test_returns_404_when_bucket_is_not_queryable(self): response = self.app.get('/bucket') assert_that(response, has_status(404)) @@ -83,24 +83,24 @@ class PreflightChecksApiTestCase(unittest.TestCase): def setUp(self): self.app = api.app.test_client() - @stub_bucket_retrieve_by_name("bucket") + @fake_bucket_exists("bucket") def test_cors_preflight_requests_have_empty_body(self): response = self.app.open('/bucket', method='OPTIONS') assert_that(response.status_code, is_(200)) assert_that(response.data, is_("")) - @stub_bucket_retrieve_by_name("bucket") + @fake_bucket_exists("bucket") def test_cors_preflight_are_allowed_from_all_origins(self): response = self.app.open('/bucket', method='OPTIONS') assert_that(response.headers['Access-Control-Allow-Origin'], is_('*')) - @stub_bucket_retrieve_by_name("bucket") + @fake_bucket_exists("bucket") def test_cors_preflight_result_cache(self): response = self.app.open('/bucket', method='OPTIONS') assert_that(response.headers['Access-Control-Max-Age'], is_('86400')) - @stub_bucket_retrieve_by_name("bucket") + @fake_bucket_exists("bucket") def test_cors_requests_can_cache_control(self): response = self.app.open('/bucket', method='OPTIONS') assert_that(response.headers['Access-Control-Allow-Headers'], diff --git a/tests/read/test_read_api_service_data_endpoint.py b/tests/read/test_read_api_service_data_endpoint.py index 84966349..063acf1b 100644 --- a/tests/read/test_read_api_service_data_endpoint.py +++ b/tests/read/test_read_api_service_data_endpoint.py @@ -1,13 +1,13 @@ import unittest import urllib import datetime -from hamcrest import * -from mock import patch, Mock +from hamcrest import assert_that, is_ +from mock import patch import pytz from backdrop.core.timeseries import WEEK from backdrop.read import api from backdrop.read.query import Query -from tests.support.bucket import setup_bucket +from tests.support.bucket import fake_bucket_exists, fake_no_buckets_exist from tests.support.test_helpers import has_status, has_header, d_tz @@ -20,7 +20,7 @@ class QueryingApiTestCase(unittest.TestCase): def setUp(self): self.app = api.app.test_client() - @setup_bucket("foo", data_group="some-group", data_type="some-type") + @fake_bucket_exists("foo", data_group="some-group", data_type="some-type") @patch('backdrop.core.bucket.Bucket.query') def test_period_query_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -34,7 +34,7 @@ def test_period_query_is_executed(self, mock_query): start_at=d_tz(2012, 11, 5), end_at=d_tz(2012, 12, 3))) - @setup_bucket("foo", data_group="some-group", data_type="some-type", raw_queries_allowed=True) + @fake_bucket_exists("foo", data_group="some-group", data_type="some-type", raw_queries_allowed=True) @patch('backdrop.core.bucket.Bucket.query') def test_filter_by_query_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -42,7 +42,7 @@ def test_filter_by_query_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(filter_by=[[u'zombies', u'yes']])) - @setup_bucket("foo", data_group="some-group", data_type="some-type") + @fake_bucket_exists("foo", data_group="some-group", data_type="some-type") @patch('backdrop.core.bucket.Bucket.query') def test_group_by_query_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -50,7 +50,7 @@ def test_group_by_query_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(group_by=u'zombies')) - @setup_bucket("foo", data_group="some-group", data_type="some-type", raw_queries_allowed=True) + @fake_bucket_exists("foo", data_group="some-group", data_type="some-type", raw_queries_allowed=True) @patch('backdrop.core.bucket.Bucket.query') def test_query_with_start_and_end_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -66,7 +66,7 @@ def test_query_with_start_and_end_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(start_at=expected_start_at, end_at=expected_end_at)) - @setup_bucket("foo", data_group="some-group", data_type="some-type") + @fake_bucket_exists("foo", data_group="some-group", data_type="some-type") @patch('backdrop.core.bucket.Bucket.query') def test_group_by_with_period_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -82,7 +82,7 @@ def test_group_by_with_period_is_executed(self, mock_query): start_at=d_tz(2012, 11, 5), end_at=d_tz(2012, 12, 3))) - @setup_bucket("foo", data_group="some-group", data_type="some-type", raw_queries_allowed=True) + @fake_bucket_exists("foo", data_group="some-group", data_type="some-type", raw_queries_allowed=True) @patch('backdrop.core.bucket.Bucket.query') def test_sort_query_is_executed(self, mock_query): mock_query.return_value = NoneData() @@ -98,11 +98,12 @@ def test_sort_query_is_executed(self, mock_query): mock_query.assert_called_with( Query.create(sort_by=["value", "descending"])) - @setup_bucket("bucket", data_group="some-group", data_type="some-type", queryable=False) + @fake_bucket_exists("bucket", data_group="some-group", data_type="some-type", queryable=False) def test_returns_404_when_bucket_is_not_queryable(self): response = self.app.get('/data/some-group/some-type') assert_that(response, has_status(404)) + @fake_no_buckets_exist() def test_returns_404_when_bucket_does_not_exist(self): response = self.app.get('/data/no-group/no-type') assert_that(response, has_status(404)) @@ -113,34 +114,34 @@ def setUp(self): self.app = api.app.test_client() api.db._mongo.drop_database(api.app.config['DATABASE_NAME']) - @setup_bucket("bucket", data_group="some-group", data_type="some-type") + @fake_bucket_exists("bucket", data_group="some-group", data_type="some-type") def test_cors_preflight_requests_have_empty_body(self): response = self.app.open('/data/some-group/some-type', method='OPTIONS') assert_that(response.status_code, is_(200)) assert_that(response.data, is_("")) - @setup_bucket("bucket", data_group="some-group", data_type="some-type") + @fake_bucket_exists("bucket", data_group="some-group", data_type="some-type") def test_cors_preflight_are_allowed_from_all_origins(self): response = self.app.open('/data/some-group/some-type', method='OPTIONS') assert_that(response, has_header('Access-Control-Allow-Origin', '*')) - @setup_bucket("bucket", data_group="some-group", data_type="some-type") + @fake_bucket_exists("bucket", data_group="some-group", data_type="some-type") def test_cors_preflight_result_cache(self): response = self.app.open('/data/some-group/some-type', method='OPTIONS') assert_that(response, has_header('Access-Control-Max-Age', '86400')) - @setup_bucket("bucket", data_group="some-group", data_type="some-type") + @fake_bucket_exists("bucket", data_group="some-group", data_type="some-type") def test_cors_requests_can_cache_control(self): response = self.app.open('/data/some-group/some-type', method='OPTIONS') assert_that(response, has_header('Access-Control-Allow-Headers', 'cache-control')) - @setup_bucket("bucket", data_group="some-group", data_type="some-type", raw_queries_allowed=True) + @fake_bucket_exists("bucket", data_group="some-group", data_type="some-type", raw_queries_allowed=True) def test_max_age_is_30_min_for_non_realtime_buckets(self): response = self.app.get('/data/some-group/some-type') assert_that(response, has_header('Cache-Control', 'max-age=1800, must-revalidate')) - @setup_bucket("bucket", data_group="some-group", data_type="some-type", realtime=True, raw_queries_allowed=True) + @fake_bucket_exists("bucket", data_group="some-group", data_type="some-type", realtime=True, raw_queries_allowed=True) def test_max_age_is_2_min_for_realtime_buckets(self): response = self.app.get('/data/some-group/some-type') diff --git a/tests/support/bucket.py b/tests/support/bucket.py index 9b01156d..3979a2ac 100644 --- a/tests/support/bucket.py +++ b/tests/support/bucket.py @@ -1,46 +1,75 @@ from functools import wraps from mock import patch +from contextlib import contextmanager + from backdrop.core.bucket import BucketConfig from backdrop.core.user import UserConfig -from backdrop.write.api import bucket_repository -def stub_bucket_retrieve_by_name(name, data_group="group", data_type="type", - *bucket_args, **bucket_kwargs): +@contextmanager +def pretend_this_bucket_exists(bucket_config): + # NOTE: To fake this really accurately, one could *actually* create the + # collections with the config provided (ie capped size). As it stands, + # a collection will be automatically created by Mongo when written to. This + # will not use the capped_size specified in bucket_config. + try: + namespace = 'backdrop.core.repository.BucketConfigRepository' + with patch(namespace + '.retrieve') as retrieve: + with patch(namespace + '.get_bucket_for_query') as query: + with patch(namespace + '.get_all') as get_all: + def retrieve_side_effect(name): + if name == bucket_config.name: + return bucket_config + + def query_side_effect(data_group, data_type): + if (data_group == bucket_config.data_group + and data_type == bucket_config.data_type): + return bucket_config + + retrieve.side_effect = retrieve_side_effect + query.side_effect = query_side_effect + get_all.side_effect = NotImplementedError( + "Need to patch get_all") + yield + finally: + pass # NOTE: delete the collection in Mongo here + + +def fake_bucket_exists(name, data_group="group", data_type="type", + *bucket_args, **bucket_kwargs): setup_bucket_name = name def decorator(func): @wraps(func) - def wrapped_stub_bucket_retrieve_by_name(*args, **kwargs): - with patch('backdrop.core.repository.BucketConfigRepository.retrieve') as retrieve: - def side_effect(name): - if name == setup_bucket_name: - return BucketConfig( - setup_bucket_name, data_group, data_type, - *bucket_args, **bucket_kwargs - ) - retrieve.side_effect = side_effect + def wrapped_fake_bucket_exists(*args, **kwargs): + with pretend_this_bucket_exists( + BucketConfig( + setup_bucket_name, + data_group, + data_type, + *bucket_args, + **bucket_kwargs)): func(*args, **kwargs) - return wrapped_stub_bucket_retrieve_by_name + return wrapped_fake_bucket_exists return decorator -def setup_bucket(name, *bucket_args, **bucket_kwargs): - setup_bucket_name = name - +def fake_no_buckets_exist(): def decorator(func): @wraps(func) - def wrapped_setup_bucket(*args, **kwargs): - bucket = BucketConfig( - setup_bucket_name, *bucket_args, **bucket_kwargs) - bucket_repository.save(bucket) - try: - func(*args, **kwargs) - finally: - bucket_repository._repository.collection._collection.remove( - {"_id": setup_bucket_name}) - return wrapped_setup_bucket + def wrapped_fake_no_buckets_exist(*args, **kwargs): + namespace = 'backdrop.core.repository.BucketConfigRepository' + with patch(namespace + '.retrieve') as retrieve: + with patch(namespace + '.get_bucket_for_query') as query: + with patch(namespace + '.get_all') as get_all: + retrieve.return_value = None + query.return_value = None + get_all.return_value = [] + + func(*args, **kwargs) + + return wrapped_fake_no_buckets_exist return decorator @@ -50,7 +79,8 @@ def stub_user_retrieve_by_email(email, buckets=None): def decorator(func): @wraps(func) def wrapped_stub_user_retrieve_by_name(*args, **kwargs): - with patch('backdrop.core.repository.UserConfigRepository.retrieve') as retrieve: + namespace = 'backdrop.core.repository.UserConfigRepository' + with patch(namespace + '.retrieve') as retrieve: def side_effect(email): if email == setup_user_email: return UserConfig(email, buckets) diff --git a/tests/write/test_flask_integration.py b/tests/write/test_flask_integration.py index 22578463..39b4d410 100644 --- a/tests/write/test_flask_integration.py +++ b/tests/write/test_flask_integration.py @@ -6,7 +6,7 @@ import pytz from mock import patch from backdrop.core.records import Record -from tests.support.bucket import stub_bucket_retrieve_by_name +from tests.support.bucket import fake_bucket_exists from tests.support.test_helpers import is_bad_request, is_ok, \ is_error_response, has_status, is_not_found @@ -18,7 +18,7 @@ class PostDataTestCase(unittest.TestCase): def setUp(self): self.app = api.app.test_client() - @stub_bucket_retrieve_by_name("foo") + @fake_bucket_exists("foo") def test_needs_an_authorization_header_even_if_no_token_is_configured(self): response = self.app.post( '/foo', @@ -28,7 +28,7 @@ def test_needs_an_authorization_header_even_if_no_token_is_configured(self): assert_that( response, is_unauthorized()) assert_that( response, is_error_response()) - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") def test_needs_an_authorization_header(self): response = self.app.post( '/foo', @@ -38,7 +38,7 @@ def test_needs_an_authorization_header(self): assert_that( response, is_unauthorized()) assert_that( response, is_error_response()) - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") def test_authorization_header_must_be_correct_format(self): response = self.app.post( '/foo', @@ -49,7 +49,7 @@ def test_authorization_header_must_be_correct_format(self): assert_that( response, is_unauthorized()) assert_that( response, is_error_response()) - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") def test_authorization_header_must_match_server_side_value(self): response = self.app.post( '/foo', @@ -60,7 +60,7 @@ def test_authorization_header_must_match_server_side_value(self): assert_that( response, is_unauthorized()) assert_that( response, is_error_response()) - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") def test_request_must_be_json(self): response = self.app.post( '/foo', @@ -71,7 +71,7 @@ def test_request_must_be_json(self): assert_that( response, is_bad_request()) assert_that( response, is_error_response()) - @stub_bucket_retrieve_by_name("foo_bucket", bearer_token="foo_bucket-bearer-token") + @fake_bucket_exists("foo_bucket", bearer_token="foo_bucket-bearer-token") @patch("backdrop.core.bucket.Bucket.store") def test_empty_list_gets_accepted(self, store): self.app.post( @@ -85,7 +85,7 @@ def test_empty_list_gets_accepted(self, store): [] ) - @stub_bucket_retrieve_by_name("foo_bucket", bearer_token="foo_bucket-bearer-token") + @fake_bucket_exists("foo_bucket", bearer_token="foo_bucket-bearer-token") @patch("backdrop.core.bucket.Bucket.store") def test_data_gets_stored(self, store): self.app.post( @@ -99,7 +99,7 @@ def test_data_gets_stored(self, store): [Record({"foo": "bar"})] ) - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") @patch("backdrop.core.bucket.Bucket.store") def test__timestamps_get_stored_as_utc_datetimes(self, store): expected_event_with_time = { @@ -117,7 +117,7 @@ def test__timestamps_get_stored_as_utc_datetimes(self, store): [Record(expected_event_with_time)] ) - @stub_bucket_retrieve_by_name("foo_bucket", bearer_token="foo_bucket-bearer-token") + @fake_bucket_exists("foo_bucket", bearer_token="foo_bucket-bearer-token") def test_data_with_empty_keys_400s(self): response = self.app.post( '/foo_bucket', @@ -129,7 +129,7 @@ def test_data_with_empty_keys_400s(self): assert_that( response, is_bad_request()) assert_that( response, is_error_response()) - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") @patch("backdrop.core.bucket.Bucket.store") def test__id_gets_stored(self, store): response = self.app.post( @@ -144,7 +144,7 @@ def test__id_gets_stored(self, store): [Record({"_id": "foo"})] ) - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") def test_invalid__id_returns_400(self): response = self.app.post( '/foo', @@ -158,7 +158,7 @@ def test_invalid__id_returns_400(self): @patch("backdrop.write.api.statsd") @patch("backdrop.core.bucket.Bucket.parse_and_store") - @stub_bucket_retrieve_by_name("foo", bearer_token="foo-bearer-token") + @fake_bucket_exists("foo", bearer_token="foo-bearer-token") def test_exception_handling(self, parse_and_store, statsd): parse_and_store.side_effect = RuntimeError("BOOM")