Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Fixes for pep8 and final test failures
Browse files Browse the repository at this point in the history
  • Loading branch information
jcbashdown authored and vagrant committed Aug 18, 2014
1 parent e010ff2 commit 8f983f9
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 31 deletions.
2 changes: 1 addition & 1 deletion backdrop/admin/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
def protected(f):
@wraps(f)
def verify_user_logged_in(*args, **kwargs):
if not "user" in session:
if "user" not in session:
return redirect(
url_for('oauth_sign_in'))
return f(*args, **kwargs)
Expand Down
16 changes: 9 additions & 7 deletions backdrop/core/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,20 +86,22 @@ def store(self, records):
for record in records:
# doesn't change data
errors += validate_record_schema(record, self.config['schema'])
print errors
records_with_timestamp_parse_and_errors = map(parse_timestamp, records)
records = map(lambda item: item[0], records_with_timestamp_parse_and_errors)
parse_timestamp_errors = map(lambda item: item[1], records_with_timestamp_parse_and_errors)
errors += filter(lambda item: (item is not None), parse_timestamp_errors)
print errors
records = map(lambda item: item[0],
records_with_timestamp_parse_and_errors)
parse_timestamp_errors = map(lambda item: item[1],
records_with_timestamp_parse_and_errors)
errors += filter(lambda item: (item is not None),
parse_timestamp_errors)
# validate
# order was important? should be after auto ids and timestamp?
# doesn't change data
validate_record_errors = map(validate_record, records)
errors += filter(lambda item: item is not None, validate_record_errors)
print errors
# add auto-id keys
records, auto_id_errors = add_auto_ids(records, self.config.get('auto_ids', None))
records, auto_id_errors = add_auto_ids(
records,
self.config.get('auto_ids', None))
errors += auto_id_errors
print errors

Expand Down
18 changes: 9 additions & 9 deletions backdrop/core/records.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def _generate_auto_id(record, auto_id_keys):
>>> _generate_auto_id({'foo':'foo'}, ['bar'])
Traceback (most recent call last):
...
ValidationError: The following required fields are missing: bar
ValidationError: The following required id fields are missing: bar
"""
missing_keys = set(auto_id_keys) - set(record.keys())
if len(missing_keys) > 0:
Expand All @@ -59,17 +59,17 @@ def _generate_auto_id(record, auto_id_keys):


def parse_timestamp(record):
# now defunct?
"""Parses a timestamp in a record
>>> parse_timestamp({'_timestamp': '2012-12-12T00:00:00'})
{'_timestamp': datetime.datetime(2012, 12, 12, 0, 0, tzinfo=<UTC>)}
({'_timestamp': datetime.datetime(2012, 12, 12, 0, 0, tzinfo=<UTC>)}, None)
>>> parse_timestamp({})
{}
>>> parse_timestamp({'_timestamp': 'invalid'})
Traceback (most recent call last):
...
ParseError: _timestamp is not a valid timestamp, it must be ISO8601
({}, None)
>>> record, error = parse_timestamp({'_timestamp': 'invalid'})
>>> record
{'_timestamp': 'invalid'}
>>> error
'_timestamp is not a valid timestamp, it must be ISO8601'
"""
error = None
if '_timestamp' in record:
Expand All @@ -87,7 +87,7 @@ def add_period_keys(record):
Add a field for each of the periods in timeseries.PERIODS
>>> record = add_period_keys(
... parse_timestamp({'_timestamp': '2012-12-12T12:12:00'}))
... parse_timestamp({'_timestamp': '2012-12-12T12:12:00'})[0])
>>> record['_hour_start_at']
datetime.datetime(2012, 12, 12, 12, 0, tzinfo=<UTC>)
>>> record['_day_start_at']
Expand Down
31 changes: 17 additions & 14 deletions backdrop/write/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,13 +118,14 @@ def write_by_group(data_group, data_type):
_validate_config(data_set_config)
_validate_auth(data_set_config)

data = listify_json(get_json_from_request(request))
errors, ok_message = _append_to_data_set(data_set_config, data)
try:
data = listify_json(get_json_from_request(request))
except ValidationError as e:
return abort(400, repr(e))
errors = _append_to_data_set(data_set_config, data)

if errors:
abort(400, json.dumps(errors))
elif ok_message:
return jsonify(status='ok', message=ok_message)
else:
return jsonify(status='ok')

Expand Down Expand Up @@ -166,19 +167,21 @@ def post_to_data_set(data_set_name):
_validate_config(data_set_config)
_validate_auth(data_set_config)

data = listify_json(get_json_from_request(request))
errors, ok_message = _append_to_data_set(
try:
data = listify_json(get_json_from_request(request))
except ValidationError as e:
return abort(400, repr(e))
errors = _append_to_data_set(
data_set_config,
data,
ok_message="Deprecation Warning: accessing by data-set name is "
"deprecated, Please use the /data-group/data-type form")
data)

if errors:
abort(400, json.dumps(errors))
elif ok_message:
return jsonify(status='ok', message=ok_message)
else:
return jsonify(status='ok')
ok_message = ("Deprecation Warning: accessing by data-set name is "
"deprecated, Please use the /data-group/data-type form")
return jsonify(status='ok',
message=ok_message)


@app.route('/data-sets/<data_set_name>', methods=['POST'])
Expand Down Expand Up @@ -264,10 +267,10 @@ def _validate_auth(data_set_config):
token, data_set_config['name']))


def _append_to_data_set(data_set_config, data, ok_message=None):
def _append_to_data_set(data_set_config, data):
data_set = DataSet(storage, data_set_config)
data_set.create_if_not_exists()
return data_set.store(data), ok_message
return data_set.store(data)


def _empty_data_set(data_set_config):
Expand Down

0 comments on commit 8f983f9

Please sign in to comment.