Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Create a data set if it doesn't exist when writing
Browse files Browse the repository at this point in the history
This decouples creation of a data set in stagecraft from creation in
backdrop, which has been the cause of a lot of the bugs around creating
data sets.

Once this is done stagecraft can be updated to remove it's dependency on
backdrop. Once that is done the create and delete end points here can be
removed.

One point to note is that collections will not be removed when the data
set is deleted in stagecraft. The data set will not be available
(queryable or writable), however, if a data set with the same group and
type is created the previous data will still be there. Although it can
be emptied.
  • Loading branch information
robyoung committed Jul 31, 2014
1 parent 44ef780 commit e55b5d5
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 7 deletions.
4 changes: 4 additions & 0 deletions backdrop/core/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,10 @@ def get_seconds_out_of_date(self):
now - last_updated - max_age_delta
).total_seconds())

def create_if_not_exists(self):
if not self.storage.data_set_exists(self.name):
self.storage.create_data_set(self.name, self.config['capped_size'])

def empty(self):
return self.storage.empty_data_set(self.name)

Expand Down
2 changes: 2 additions & 0 deletions backdrop/write/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,7 @@ def _validate_auth(data_set_config):

def _append_to_data_set(data_set_config, data, ok_message=None):
data_set = DataSet(storage, data_set_config)
data_set.create_if_not_exists()
data_set.store(data)

if ok_message:
Expand All @@ -270,6 +271,7 @@ def _append_to_data_set(data_set_config, data, ok_message=None):

def _empty_data_set(data_set_config):
data_set = DataSet(storage, data_set_config)
data_set.create_if_not_exists()
data_set.empty()
return jsonify(
status='ok',
Expand Down
14 changes: 8 additions & 6 deletions features/write_api/empty_data_set.feature
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@ Feature: empty_data_set
Scenario: emptying a data-set by PUTing an empty JSON list
Given I have the data in "dinosaur.json"
and I have a data_set named "some_data_set" with settings
| key | value |
| data_group | "group" |
| data_type | "type" |
| key | value |
| data_group | "group" |
| data_type | "type" |
| capped_size | 0 |
and I use the bearer token for the data_set
when I POST to the specific path "/data/group/type"
given I have JSON data '[]'
Expand All @@ -20,9 +21,10 @@ Feature: empty_data_set
@empty_data_set
Scenario: PUT is only implemented for an empty JSON list
Given I have a data_set named "some_data_set" with settings
| key | value |
| data_group | "group" |
| data_type | "type" |
| key | value |
| data_group | "group" |
| data_type | "type" |
| capped_size | 0 |
and I use the bearer token for the data_set
given I have JSON data '[{"a": 1}]'
when I PUT to the specific path "/data/group/type"
Expand Down
17 changes: 16 additions & 1 deletion tests/core/test_data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ def setup_config(self, additional_config={}):
'name': 'test_data_set',
'data_group': 'group',
'data_type': 'type',
'capped_size': 0,
}
self.data_set_config = dict(base_config.items() + additional_config.items())
self.data_set = data_set.DataSet(
Expand Down Expand Up @@ -110,7 +111,7 @@ def test_record_gets_validated(self):

def test_each_record_gets_validated_further_when_schema_given(self):
self.setup_config({'schema': self.schema})
#does store only take lists?
# does store only take lists?
with assert_raises(SchemaValidationError) as e:
self.data_set.store([{"_timestamp": "2014-06-12T00:00:00+0000"}, {'foo': 'bar'}])

Expand Down Expand Up @@ -330,3 +331,17 @@ def test_sorted_week_and_group_query_with_limit(self):
assert_that(data, contains(
has_entries({'some_group': 'val2'})
))


class TestDataSet_create(BaseDataSetTest):

def test_data_set_is_created_if_it_does_not_exist(self):
self.mock_storage.data_set_exists.return_value = False
self.data_set.create_if_not_exists()
self.mock_storage.create_data_set.assert_called_with(
'test_data_set', 0)

def test_data_set_is_not_created_if_it_does_exist(self):
self.mock_storage.data_set_exists.return_value = True
self.data_set.create_if_not_exists()
assert_that(self.mock_storage.create_data_set.called, is_(False))
1 change: 1 addition & 0 deletions tests/support/performanceplatform_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ def wrapped_fake_data_set_exists(*args, **kwargs):
'name': setup_data_set_name,
'data_group': data_group,
'data_type': data_type,
'capped_size': 0,
}
config = dict(base_config.items() + data_set_kwargs.items())
with pretend_this_data_set_exists(config):
Expand Down

0 comments on commit e55b5d5

Please sign in to comment.