Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Lint and pep8 fixes.
Browse files Browse the repository at this point in the history
  • Loading branch information
blairboy362 committed Aug 28, 2018
1 parent a68135b commit 60f82ff
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 51 deletions.
28 changes: 16 additions & 12 deletions backdrop/core/storage/sql_query_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@


def create_data_set_exists_query(mogrify, data_set_id):
return mogrify("""
return mogrify(
"""
SELECT 1 FROM mongo
WHERE collection=%(collection)s
LIMIT 1
Expand All @@ -38,7 +39,8 @@ def create_delete_data_set_query(mogrify, data_set_id):


def create_get_last_updated_query(mogrify, data_set_id):
return mogrify("""
return mogrify(
"""
SELECT record FROM mongo
WHERE collection = %(collection)s
ORDER BY updated_at DESC
Expand All @@ -55,7 +57,8 @@ def create_find_record_query(mogrify, data_set_id, record_id):
"SELECT record FROM mongo WHERE id='some-collection:some-record'"
"""

return mogrify("""
return mogrify(
"""
SELECT record FROM mongo
WHERE id=%(id)s
""",
Expand All @@ -64,7 +67,8 @@ def create_find_record_query(mogrify, data_set_id, record_id):


def create_update_record_query(mogrify, data_set_id, record, record_id, ts, updated_at):
return mogrify("""
return mogrify(
"""
INSERT INTO mongo (id, collection, timestamp, updated_at, record)
VALUES
(
Expand All @@ -79,13 +83,13 @@ def create_update_record_query(mogrify, data_set_id, record, record_id, ts, upda
updated_at=%(updated_at)s,
record=%(record)s
""",
{
'id': _create_id(data_set_id, record_id),
'collection': data_set_id,
'timestamp': ts,
'updated_at': updated_at,
'record': json.dumps(record, default=_json_serialize_datetimes)
}
{
'id': _create_id(data_set_id, record_id),
'collection': data_set_id,
'timestamp': ts,
'updated_at': updated_at,
'record': json.dumps(record, default=_json_serialize_datetimes)
}
)


Expand Down Expand Up @@ -341,4 +345,4 @@ def _json_serialize_datetimes(obj):

if isinstance(obj, (datetime, date)):
return obj.isoformat()
raise TypeError("Type %s not serializable" % type(obj))
raise TypeError("Type %s not serializable" % type(obj))
4 changes: 2 additions & 2 deletions tests/core/integration/test_mongo_data_set_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@
from backdrop.core.data_set import DataSet
from backdrop.core.storage.mongo import MongoStorageEngine

from .test_data_set_integration import BaseDataSetIntegrationTest

DATABASE_URL = 'mongodb://localhost:27017/backdrop_test'
DATA_SET = 'data_set_integration_test'

from .test_data_set_integration import BaseDataSetIntegrationTest

class TestMongoDataSetIntegration(BaseDataSetIntegrationTest, unittest.TestCase):

Expand All @@ -32,4 +33,3 @@ def tearDown(self):

def _save(self, obj):
self.mongo_collection.save(obj)

74 changes: 37 additions & 37 deletions tests/core/storage/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def test_create_and_delete(self):
assert_that(self.engine.data_set_exists('foo_bar'), is_(False))

def test_simple_saving_and_finding(self):
self._save_all('foo_bar', {'_id':'test_id_1','foo': 'bar'})
self._save_all('foo_bar', {'_id': 'test_id_1', 'foo': 'bar'})

x = self.engine.execute_query('foo_bar', Query.create())
assert_that(x,
Expand All @@ -74,9 +74,9 @@ def test_saving_a_record_adds_an_updated_at(self):
def test_get_last_updated(self):
self.engine.create_data_set('foo_bar', 0)
with freeze_time('2012-12-12'):
self.engine.save_record('foo_bar', {'_id':'test_id_2','foo': 'first'})
self.engine.save_record('foo_bar', {'_id': 'test_id_2', 'foo': 'first'})
with freeze_time('2012-11-12'):
self.engine.save_record('foo_bar', {'_id':'test_id_3','foo': 'second'})
self.engine.save_record('foo_bar', {'_id': 'test_id_3', 'foo': 'second'})

assert_that(self.engine.get_last_updated('foo_bar'),
is_(d_tz(2012, 12, 12)))
Expand All @@ -98,15 +98,15 @@ def test_capped_data_set_is_capped(self):
self.engine.create_data_set('foo_bar', 1)

for i in range(100):
self.engine.save_record('foo_bar', {'_id':'test_id_4','foo': i})
self.engine.save_record('foo_bar', {'_id': 'test_id_4', 'foo': i})

assert_that(
len(self.engine.execute_query('foo_bar', Query.create())),
less_than(70))

def test_empty_a_data_set(self):
self._save_all('foo_bar',
{'_id':'test_id_5','foo': 'bar'}, {'_id':'test_id_6','bar': 'foo'})
{'_id': 'test_id_5', 'foo': 'bar'}, {'_id': 'test_id_6', 'bar': 'foo'})

assert_that(len(self.engine.execute_query('foo_bar', Query.create())), is_(2))

Expand Down Expand Up @@ -137,7 +137,7 @@ def test_delete_record(self):

def test_datetimes_are_returned_as_utc(self):
self._save_all('foo_bar',
{'_id':'test_id_7','_timestamp': datetime.datetime(2012, 8, 8)})
{'_id': 'test_id_7', '_timestamp': datetime.datetime(2012, 8, 8)})

results = self.engine.execute_query('foo_bar', Query.create())

Expand All @@ -146,7 +146,7 @@ def test_datetimes_are_returned_as_utc(self):
has_entries({'_timestamp': d_tz(2012, 8, 8)})))

def test_query_with_filter(self):
self._save_all('foo_bar', {'_id':'test_id_8','foo': 'bar'}, {'_id':'test_id_9','foo': 'foo'})
self._save_all('foo_bar', {'_id': 'test_id_8', 'foo': 'bar'}, {'_id': 'test_id_9', 'foo': 'foo'})

results = self.engine.execute_query('foo_bar', Query.create(
filter_by=[('foo', 'bar')]))
Expand All @@ -157,9 +157,9 @@ def test_query_with_filter(self):

def test_basic_query_with_time_limits(self):
self._save_all('foo_bar',
{'_id':'test_id_10','_timestamp': d_tz(2012, 12, 12)},
{'_id':'test_id_11','_timestamp': d_tz(2012, 12, 14)},
{'_id':'test_id_12','_timestamp': d_tz(2012, 12, 11)})
{'_id': 'test_id_10', '_timestamp': d_tz(2012, 12, 12)},
{'_id': 'test_id_11', '_timestamp': d_tz(2012, 12, 14)},
{'_id': 'test_id_12', '_timestamp': d_tz(2012, 12, 11)})

# start at
results = self.engine.execute_query('foo_bar', Query.create(
Expand Down Expand Up @@ -188,8 +188,8 @@ def test_basic_query_with_time_limits(self):

def test_basic_query_with_sort_ascending(self):
self._save_all('foo_bar',
{'_id':'test_id_13','foo': 'mug'},
{'_id':'test_id_14','foo': 'book'})
{'_id': 'test_id_13', 'foo': 'mug'},
{'_id': 'test_id_14', 'foo': 'book'})

results = self.engine.execute_query('foo_bar', Query.create(
sort_by=('foo', 'ascending')))
Expand All @@ -201,8 +201,8 @@ def test_basic_query_with_sort_ascending(self):

def test_basic_query_with_sort_descending(self):
self._save_all('foo_bar',
{'_id':'test_id_15','foo': 'mug'},
{'_id':'test_id_16','foo': 'book'})
{'_id': 'test_id_15', 'foo': 'mug'},
{'_id': 'test_id_16', 'foo': 'book'})

results = self.engine.execute_query('foo_bar', Query.create(
sort_by=('foo', 'descending')))
Expand All @@ -213,7 +213,7 @@ def test_basic_query_with_sort_descending(self):
has_entry('foo', 'book')))

def test_basic_query_with_limit(self):
self._save_all('foo_bar', {'_id':'test_id_17','foo': 'bar'}, {'_id':'test_id_18','foo': 'foo'})
self._save_all('foo_bar', {'_id': 'test_id_17', 'foo': 'bar'}, {'_id': 'test_id_18', 'foo': 'foo'})

results = self.engine.execute_query('foo_bar', Query.create(limit=1))

Expand All @@ -222,8 +222,8 @@ def test_basic_query_with_limit(self):
# !GROUPED!
def test_query_grouped_by_field(self):
self._save_all('foo_bar',
{'_id':'test_id_19','foo': 'foo'}, {'_id':'test_id_20','foo': 'foo'},
{'_id':'test_id_21','foo': 'bar'})
{'_id': 'test_id_19', 'foo': 'foo'}, {'_id': 'test_id_20', 'foo': 'foo'},
{'_id': 'test_id_21', 'foo': 'bar'})

results = self.engine.execute_query('foo_bar', Query.create(
group_by=['foo']))
Expand All @@ -236,9 +236,9 @@ def test_query_grouped_by_field(self):
def test_query_grouped_by_period(self):
self._save_all_with_periods(
'foo_bar',
{'_id':'test_id_22','_timestamp': d_tz(2012, 12, 12, 12)},
{'_id':'test_id_23','_timestamp': d_tz(2012, 12, 12, 15)},
{'_id':'test_id_24','_timestamp': d_tz(2012, 12, 13, 12)})
{'_id': 'test_id_22', '_timestamp': d_tz(2012, 12, 12, 12)},
{'_id': 'test_id_23', '_timestamp': d_tz(2012, 12, 12, 15)},
{'_id': 'test_id_24', '_timestamp': d_tz(2012, 12, 13, 12)})

results = self.engine.execute_query('foo_bar', Query.create(
period=DAY))
Expand All @@ -253,10 +253,10 @@ def test_query_grouped_by_period(self):
def test_group_by_field_and_period(self):
self._save_all_with_periods(
'foo_bar',
{'_id':'test_id_25','_timestamp': d_tz(2012, 12, 12), 'foo': 'foo'},
{'_id':'test_id_26','_timestamp': d_tz(2012, 12, 13), 'foo': 'foo'},
{'_id':'test_id_27','_timestamp': d_tz(2012, 12, 12), 'foo': 'bar'},
{'_id':'test_id_28','_timestamp': d_tz(2012, 12, 12), 'foo': 'bar'})
{'_id': 'test_id_25', '_timestamp': d_tz(2012, 12, 12), 'foo': 'foo'},
{'_id': 'test_id_26', '_timestamp': d_tz(2012, 12, 13), 'foo': 'foo'},
{'_id': 'test_id_27', '_timestamp': d_tz(2012, 12, 12), 'foo': 'bar'},
{'_id': 'test_id_28', '_timestamp': d_tz(2012, 12, 12), 'foo': 'bar'})

results = self.engine.execute_query('foo_bar', Query.create(
group_by=['foo'], period=DAY))
Expand All @@ -270,9 +270,9 @@ def test_group_by_field_and_period(self):
def test_group_query_with_collect_fields(self):
self._save_all(
'foo_bar',
{'_id':'test_id_29','foo': 'foo', 'c': 1},
{'_id':'test_id_30','foo': 'foo', 'c': 3},
{'_id':'test_id_31','foo': 'bar', 'c': 2}
{'_id': 'test_id_29', 'foo': 'foo', 'c': 1},
{'_id': 'test_id_30', 'foo': 'foo', 'c': 3},
{'_id': 'test_id_31', 'foo': 'bar', 'c': 2}
)

results = self.engine.execute_query('foo_bar', Query.create(
Expand All @@ -285,10 +285,10 @@ def test_group_query_with_collect_fields(self):

def test_group_and_collect_with_false_values(self):
self._save_all('foo_bar',
{'_id':'test_id_32','foo': 'one', 'bar': False},
{'_id':'test_id_33','foo': 'two', 'bar': True},
{'_id':'test_id_34','foo': 'two', 'bar': True},
{'_id':'test_id_35','foo': 'one', 'bar': False})
{'_id': 'test_id_32', 'foo': 'one', 'bar': False},
{'_id': 'test_id_33', 'foo': 'two', 'bar': True},
{'_id': 'test_id_34', 'foo': 'two', 'bar': True},
{'_id': 'test_id_35', 'foo': 'one', 'bar': False})

results = self.engine.execute_query('foo_bar', Query.create(
group_by=['foo'], collect=[('bar', 'sum')]))
Expand All @@ -300,9 +300,9 @@ def test_group_and_collect_with_false_values(self):

def test_group_query_ignores_records_without_grouping_key(self):
self._save_all('foo_bar',
{'_id':'test_id_36','foo': 'one'},
{'_id':'test_id_37','foo': 'two'},
{'_id':'test_id_38','bar': 'one'})
{'_id': 'test_id_36', 'foo': 'one'},
{'_id': 'test_id_37', 'foo': 'two'},
{'_id': 'test_id_38', 'bar': 'one'})

results = self.engine.execute_query('foo_bar', Query.create(
group_by=['foo']))
Expand All @@ -314,9 +314,9 @@ def test_group_query_ignores_records_without_grouping_key(self):

def test_basic_query_with_inclusive_time_limits(self):
self._save_all('foo_bar',
{'_id':'test_id_39','_timestamp': d_tz(2014, 12, 01)},
{'_id':'test_id_40','_timestamp': d_tz(2014, 12, 02)},
{'_id':'test_id_41','_timestamp': d_tz(2014, 12, 03)})
{'_id': 'test_id_39', '_timestamp': d_tz(2014, 12, 01)},
{'_id': 'test_id_40', '_timestamp': d_tz(2014, 12, 02)},
{'_id': 'test_id_41', '_timestamp': d_tz(2014, 12, 03)})

# start at
results = self.engine.execute_query('foo_bar', Query.create(
Expand Down
3 changes: 3 additions & 0 deletions tests/support/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ def d(year, month, day, hour=0, minute=0, second=0):
def fixture_path(name):
return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'features', 'fixtures', name))


def mock_mogrify(template, values):
"""
>>> mock_mogrify("select %(thing)s from %(other_thing)s ...", {'thing': 'value', 'other_thing': 'other_value'})
Expand All @@ -123,6 +124,7 @@ def mock_mogrify(template, values):
"".join([_mogrify_token(token, values) for token in tokens])
).strip()


def _mogrify_token(token, values):
match = re.match("%\(([^\(]+)\)s", token)
if match:
Expand All @@ -131,6 +133,7 @@ def _mogrify_token(token, values):
else:
return token


@contextmanager
def json_fixture(name, parse_dates=False):
if not parse_dates:
Expand Down

0 comments on commit 60f82ff

Please sign in to comment.