Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

now mongo errors are caught, omg it is so difficult

  • Loading branch information...
commit ae846570fa34cce708f42dff74e9863b274fa35f 1 parent 897edd0
@h4ck3rm1k3 authored
View
21 billy/bin/update.py
@@ -1,4 +1,8 @@
#!/usr/bin/env python
+import bson.binary
+from bson.binary import ALL_UUID_SUBTYPES
+from bson.binary import OLD_UUID_SUBTYPE
+
import os
import sys
import json
@@ -15,6 +19,9 @@
# code snippet, to be included in 'sitecustomize.py'
import sys
+
+from pymongo.errors import OperationFailure
+
def info(type, value, tb):
print "except hook"
if hasattr(sys, 'ps1') or not sys.stderr.isatty():
@@ -166,6 +173,7 @@ def _do_imports(abbrev, args):
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
dist['boundary_id'] = dist['boundary_id'] % dist
dist['num_seats'] = int(dist['num_seats'])
+ _log.debug(dist)
db.districts.save(dist, safe=True)
else:
_log.warning("%s not found, continuing without "
@@ -208,6 +216,7 @@ def _do_reports(abbrev, args):
if 'committees' in args.types:
report['committees'] = committee_report(abbrev)
+ _log.debug(report)
db.reports.save(report, safe=True)
@@ -436,9 +445,20 @@ def main(old_scrape_compat=False):
_log.debug(scrape_data)
db.billy_runs.save(scrape_data, safe=True)
+ except KeyError as e:
+ _log.debug("Caught exception1 :")
+ _log.debug(e)
+ exit (123)
+
+ except pymongo.errors.OperationFailure as e :
+ _log.debug("Caught exception3 :")
+ _log.debug(e)
+ exit (123)
+
except Exception as e:
_log.debug("Caught exception :")
_log.debug(e)
+ exit (123)
# exc_type, exc_obj, exc_tb = sys.exc_info()
# fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
# print (exc_type, fname, exc_tb.tb_lineno)
@@ -464,6 +484,7 @@ def main(old_scrape_compat=False):
scrape_data['imported'] = import_report
# We're tying the run-logging into the import stage - since import
# already writes to the DB, we might as well throw this in too.
+ _log.debug(scrape_data)
db.billy_runs.save(scrape_data, safe=True)
# reports
View
21 billy/importers/utils.py
@@ -3,14 +3,16 @@
import time
import json
import datetime
-
+import traceback
from bson.son import SON
import pymongo.errors
import name_tools
-
+import logging
+_log = logging.getLogger('billy')
from billy import db
from billy.conf import settings
-
+import sys
+import traceback
if settings.ENABLE_OYSTER:
oyster_import_exception = None
try:
@@ -279,13 +281,24 @@ def prepare_obj(obj):
def next_big_id(abbr, letter, collection):
+ _log.debug("next_big_id")
+
query = SON([('_id', abbr)])
update = SON([('$inc', SON([('seq', 1)]))])
- seq = db.command(SON([('findandmodify', collection),
+
+ seq = -1
+ try :
+ seq = db.command(SON([('findandmodify', collection),
('query', query),
('update', update),
('new', True),
('upsert', True)]))['value']['seq']
+ except Exception as e :
+ traceback.print_exc(file=sys.stderr)
+ traceback.print_exc()
+ _log.error("ERROR")
+ _log.debug(e)
+
return "%s%s%08d" % (abbr.upper(), letter, seq)
View
24 billy/scrape/__init__.py
@@ -143,6 +143,8 @@ def all_sessions(self):
_log.debug("all sessions")
sessions = []
+ traceback.print_exc()
+
if 'terms' not in self.metadata: # we expect a metadata and terms
return sessions
@@ -268,8 +270,12 @@ def check_sessions(metadata, sessions):
metadata_session_details = list(metadata.get('_ignored_scraped_sessions',
[]))
+
+ _log.debug("all_sessions_in_terms:%s" %all_sessions_in_terms)
+
for k, v in metadata['session_details'].iteritems():
try:
+ _log.debug("check session details from meta data:%s" % k)
all_sessions_in_terms.remove(k)
except ValueError:
raise ScrapeError('session %s exists in session_details but not '
@@ -280,14 +286,26 @@ def check_sessions(metadata, sessions):
if not sessions:
raise ScrapeError('no sessions from session_list()')
+ _log.debug("all_sessions_in_terms:%s" % all_sessions_in_terms)
+
+
if all_sessions_in_terms:
raise ScrapeError('no session_details for session(s): %r' %
all_sessions_in_terms)
unaccounted_sessions = []
+
+ _log.debug("metadata_session_details:%s" % metadata_session_details)
+
+
for s in sessions:
if s not in metadata_session_details:
+ _log.debug("unaccounted for")
+ _log.debug(s)
unaccounted_sessions.append(s)
- if unaccounted_sessions:
- raise ScrapeError('session(s) unaccounted for: %r' %
- unaccounted_sessions)
+
+ for s in unaccounted_sessions:
+ if len(s) >0:
+ _log.debug("unaccounted for")
+ _log.debug(s)
+ raise ScrapeError('session(s) unaccounted for: %r' % unaccounted_sessions)
Please sign in to comment.
Something went wrong with that request. Please try again.