Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Add delete to data sets
Browse files Browse the repository at this point in the history
Added delete functionality for single entries on data sets
  • Loading branch information
ambrozic committed Jul 11, 2017
1 parent ab68ef3 commit 2646e4e
Show file tree
Hide file tree
Showing 5 changed files with 97 additions and 23 deletions.
5 changes: 5 additions & 0 deletions backdrop/core/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ def patch(self, record_id, record):
else:
return 'No record found with id {}'.format(record_id)

def delete(self, record_id):
if self.storage.find_record(self.name, record_id) is not None:
return self.storage.delete_record(self.name, record_id)
return 'No record found with id {}'.format(record_id)

def empty(self):
return self.storage.empty_data_set(self.name)

Expand Down
10 changes: 6 additions & 4 deletions backdrop/core/storage/mongo.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
import os
import logging
import datetime
import itertools
import logging
import os

import pymongo
from pymongo.errors import AutoReconnect, CollectionInvalid
from bson import Code
from pymongo.errors import AutoReconnect, CollectionInvalid

from .. import timeutils
from ..errors import DataSetCreationError


logger = logging.getLogger(__name__)

__all__ = ['MongoStorageEngine']
Expand Down Expand Up @@ -152,6 +151,9 @@ def update_record(self, data_set_id, record_id, record):
self._collection(data_set_id).update(
{"_id": record_id}, {"$set": record})

def delete_record(self, data_set_id, record_id):
self._collection(data_set_id).remove({"_id": record_id})

def execute_query(self, data_set_id, query):
return map(convert_datetimes_to_utc,
self._execute_query(data_set_id, query))
Expand Down
44 changes: 34 additions & 10 deletions backdrop/write/api.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,23 @@
import datetime
import pytz

from os import getenv
from celery import Celery

import pytz
from celery import Celery
from dateutil.parser import parse as datetime_parse
from flask import abort, Flask, g, jsonify, request
from flask_featureflags import FeatureFlag
from performanceplatform import client

from backdrop import statsd
from backdrop.core.data_set import DataSet
from backdrop.core.flaskutils import DataSetConverter
from backdrop.write.decompressing_request import DecompressingRequest

from ..core.errors import ParseError, ValidationError
from .validation import auth_header_is_valid, extract_bearer_token
from ..core import log_handler, cache_control
from ..core.errors import ParseError, ValidationError
from ..core.flaskutils import generate_request_id

from ..core.storage.mongo import MongoStorageEngine

from .validation import auth_header_is_valid, extract_bearer_token

from performanceplatform import client

GOVUK_ENV = getenv("GOVUK_ENV", "development")

app = Flask("backdrop.write.api")
Expand Down Expand Up @@ -140,6 +136,29 @@ def write_by_group(data_group, data_type):
return jsonify(status='ok')


@app.route('/data/<data_group>/<data_type>/<data_set_id>', methods=['DELETE'])
@cache_control.nocache
@statsd.timer('write.route.data.delete.data_set')
def delete_by_group_type_and_id(data_group, data_type, data_set_id):
"""
Delete by group, type and id
e.g. DELETE https://BACKDROP/data/gcloud/sales/MjAxNi0wOC0xNSAwMDowMD
"""

data_set_config = admin_api.get_data_set(data_group, data_type)
_validate_config(data_set_config)
# _validate_auth(data_set_config)

try:
errors = _delete_data_set(data_set_config, data_set_id)
if errors:
return (jsonify(messages=errors), 400)
return jsonify(status='ok')

except (ParseError, ValidationError) as e:
abort(400, repr(e))


@app.route('/data/<data_group>/<data_type>/<data_set_id>', methods=['PATCH'])
@cache_control.nocache
@statsd.timer('write.route.data.patch.data_set')
Expand Down Expand Up @@ -319,6 +338,11 @@ def _patch_data_set(data_set_config, data_set_id, data):
return data_set.patch(data_set_id, data)


def _delete_data_set(data_set_config, data_set_id):
data_set = DataSet(storage, data_set_config)
return data_set.delete(data_set_id)


def _empty_data_set(data_set_config):
audit_delete(data_set_config['name'])
data_set = DataSet(storage, data_set_config)
Expand Down
25 changes: 22 additions & 3 deletions tests/core/storage/test_storage.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
import datetime

from freezegun import freeze_time
from hamcrest import assert_that, is_, less_than, contains, has_entries, \
instance_of, has_entry, contains_inanyorder
from nose.tools import assert_raises
from freezegun import freeze_time

from backdrop.core.query import Query
from backdrop.core.errors import DataSetCreationError
from backdrop.core.query import Query
from backdrop.core.records import add_period_keys
from backdrop.core.timeseries import DAY

from tests.support.test_helpers import d_tz


Expand Down Expand Up @@ -118,6 +117,26 @@ def test_empty_a_data_set(self):

assert_that(len(self.engine.execute_query('foo_bar', Query.create())), is_(0))

def test_delete_record(self):
data = [{'_id': '111', 'foo': 'bar'}, {'_id': '222', 'bar': 'foo'}]
self._save_all('foo_bar', *data)
assert_that(len(self.engine.execute_query('foo_bar', Query.create())), is_(2))

self.engine.delete_record('foo_bar', '111')
assert_that(
self.engine.execute_query('foo_bar', Query.create()),
contains(has_entries({'_id': '222', 'bar': 'foo'}))
)

self.engine.delete_record('foo_bar', '333')
assert_that(
self.engine.execute_query('foo_bar', Query.create()),
contains(has_entries({'_id': '222', 'bar': 'foo'}))
)

self.engine.delete_record('foo_bar', '222')
assert_that(self.engine.execute_query('foo_bar', Query.create()), is_([]))

def test_datetimes_are_returned_as_utc(self):
self._save_all('foo_bar',
{'_timestamp': datetime.datetime(2012, 8, 8)})
Expand Down
36 changes: 30 additions & 6 deletions tests/core/test_data_set.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
from freezegun import freeze_time
from hamcrest import assert_that, has_item, has_entries, \
has_length, contains, has_entry, contains_string, \
is_
from nose.tools import assert_raises
has_length, contains, has_entry, is_
from mock import Mock, patch
from freezegun import freeze_time
from nose.tools import assert_raises

from backdrop.core import data_set
from backdrop.core.query import Query
from backdrop.core.timeseries import WEEK, MONTH
from backdrop.core.errors import ValidationError
from jsonschema import ValidationError as SchemaValidationError
from tests.support.test_helpers import d, d_tz, match


Expand Down Expand Up @@ -246,6 +243,33 @@ def test_record_not_found(self):
assert_that(result, is_('No record found with id uuid'))


class TestDataSet_delete(BaseDataSetTest):
schema = {
"$schema": "http://json-schema.org/schema#",
"title": "Timestamps",
"type": "object",
"properties": {
"_timestamp": {
"description": "An ISO8601 formatted date time",
"type": "string",
"format": "date-time"
}
},
"required": ["_timestamp"]
}

def test_deleting_a_simple_record(self):
self.data_set.delete('uuid')
self.mock_storage.delete_record.assert_called_with(
'test_data_set', 'uuid'
)

def test_record_not_found(self):
self.mock_storage.find_record.return_value = None
result = self.data_set.delete('uuid')
assert_that(result, is_('No record found with id uuid'))


class TestDataSet_execute_query(BaseDataSetTest):

def test_period_query_fails_when_weeks_do_not_start_on_monday(self):
Expand Down

0 comments on commit 2646e4e

Please sign in to comment.