Browse files

fixing up some minor naming things

  • Loading branch information...
1 parent 2a83cf4 commit 3878325cb70b7da8380aaa3a06769b782fba8d55 Paul Tagliamonte committed with paultag Feb 21, 2012
Showing with 26 additions and 26 deletions.
  1. +8 −8 billy/bin/update.py
  2. +4 −4 billy/site/browse/templates/billy/run_detail.html
  3. +14 −14 billy/site/browse/views.py
View
16 billy/bin/update.py
@@ -168,18 +168,18 @@ def _do_imports(abbrev, args):
report = {}
if args.legislators:
- report['legislator_report'] = \
+ report['legislators'] = \
import_legislators(abbrev, settings.BILLY_DATA_DIR)
if args.bills:
- report['bill_report'] = import_bills(abbrev, settings.BILLY_DATA_DIR)
+ report['bills'] = import_bills(abbrev, settings.BILLY_DATA_DIR)
if args.committees:
- report['committee_report'] = \
+ report['committees'] = \
import_committees(abbrev, settings.BILLY_DATA_DIR)
if args.events:
- report['event_report'] = \
+ report['events'] = \
import_events(abbrev, settings.BILLY_DATA_DIR)
return report
@@ -377,9 +377,9 @@ def main(old_scrape_compat=False):
lex = e
exec_end = dt.datetime.utcnow()
- exec_record['start'] = exec_start
- exec_record['end'] = exec_end
- scrape_data['scrape'] = exec_record
+ exec_record['started'] = exec_start
+ exec_record['ended'] = exec_end
+ scrape_data['scraped'] = exec_record
for record in run_record:
if "exception" in record:
@@ -400,7 +400,7 @@ def main(old_scrape_compat=False):
# imports
if args.do_import:
import_report = _do_imports(abbrev, args)
- scrape_data['import'] = import_report
+ scrape_data['imported'] = import_report
# We're tying the run-logging into the import stage - since import
# already writes to the DB, we might as well throw this in too.
db.billy_runs.save( scrape_data, safe=True )
View
8 billy/site/browse/templates/billy/run_detail.html
@@ -202,15 +202,15 @@
{% endif %}
</p>
<p>
-<h1>run report for {{ runlog.scrape.state }}</h1>
+<h1>run report for {{ runlog.scraped.state }}</h1>
-The last scrape took <b>{{ runlog.scrape.t_delta }}</b><br /><br />
+The last scrape took <b>{{ runlog.scraped.t_delta }}</b><br /><br />
-<code>{% for key in runlog.scrape.args %}{{ key }} {% endfor %}</code><br />
+<code>{% for key in runlog.scraped.args %}{{ key }} {% endfor %}</code><br />
<br />
</p>
<p>
-{% for key in runlog.scrape.run_record %}
+{% for key in runlog.scraped.run_record %}
<!-- <h4>{{ key.chamber }} {{ key.type }} ({{ key.time }})</h4>
Scrape took <b>{{ key.t_delta }}</b> -->
{% if key.exception %}
View
28 billy/site/browse/views.py
@@ -67,12 +67,12 @@ def overview(request, abbr):
try:
runlog = db.billy_runs.find({
- "scrape.state" : abbr
- }).sort( "scrape.start", direction=pymongo.DESCENDING )[0]
+ "scraped.state" : abbr
+ }).sort( "scraped.started", direction=pymongo.DESCENDING )[0]
# This hack brought to you by Django's inability to do subtraction
# in the templte :)
- runlog['scrape']['time_delta'] = ( runlog['scrape']['end'] - \
- runlog['scrape']['start'] )
+ runlog['scraped']['time_delta'] = ( runlog['scrape']['ended'] - \
+ runlog['scraped']['started'] )
context['runlog'] = runlog
if "failure" in runlog:
context['warning_title'] = "This build is currently broken!"
@@ -102,7 +102,7 @@ def _do_pie( runs ):
excs = {}
for run in runs:
if "failure" in run:
- for r in run['scrape']['run_record']:
+ for r in run['scraped']['run_record']:
if "exception" in r:
ex = r['exception']
try:
@@ -121,7 +121,7 @@ def _do_stacked( runs ):
ret[field] = []
for run in runs:
- guy = run['scrape']['run_record']
+ guy = run['scraped']['run_record']
for field in fields:
try:
g = None
@@ -146,21 +146,21 @@ def _do_digest( runs ):
data = { "runs" : [], "avgs" : [], "stat" : [] }
for run in runs:
timeDelta = (
- run['scrape']['end'] - run['scrape']['start']
+ run['scraped']['ended'] - run['scraped']['started']
).total_seconds()
oldAverage = rolling_average( oldAverage, timeDelta, oldAverageCount )
oldAverageCount += 1
stat = "Failure" if "failure" in run else ""
- s = time.mktime(run['scrape']['start'].timetuple())
+ s = time.mktime(run['scraped']['started'].timetuple())
data['runs'].append([ s, timeDelta, stat ])
data['avgs'].append([ s, oldAverage, '' ])
data['stat'].append( stat )
return data
history_count = 50
- default_spec = { "scrape.state" : abbr }
+ default_spec = { "scraped.state" : abbr }
data = {
"lines" : {},
"pies" : {},
@@ -218,14 +218,14 @@ def _do_digest( runs ):
def run_detail(request, abbr):
runlog = db.billy_runs.find({
- "scrape.state" : abbr
- }).sort( "scrape.start", direction=pymongo.DESCENDING )[0]
+ "scraped.state" : abbr
+ }).sort( "scraped.started", direction=pymongo.DESCENDING )[0]
# pre-process goodies for the template
- runlog['scrape']['t_delta'] = (
- runlog['scrape']['end'] - runlog['scrape']['start']
+ runlog['scraped']['t_delta'] = (
+ runlog['scraped']['ended'] - runlog['scraped']['started']
)
- for entry in runlog['scrape']['run_record']:
+ for entry in runlog['scraped']['run_record']:
entry['t_delta'] = (
entry['end_time'] - entry['start_time']
)

0 comments on commit 3878325

Please sign in to comment.