Skip to content

Commit

Permalink
Fixed major bug that stopped status check_every from working. Added r…
Browse files Browse the repository at this point in the history
…andom backoff for general check.
  • Loading branch information
jthingelstad committed Mar 12, 2013
1 parent 56ac3c7 commit 1f60ad9
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 12 deletions.
23 changes: 12 additions & 11 deletions bumble-bee/bumble-bee.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def record_statistics(self, site):
print "Pulling statistics info from %s." % data_url
(status, data, duration) = self.pull_json(site['pagename'], data_url)

ret_value = True
if status:
# Record the new data into the DB
if self.args.verbose >= 2:
Expand Down Expand Up @@ -115,20 +116,20 @@ def record_statistics(self, site):
self.apiary_db.commit()

self.stats['statistics'] += 1
return True
else:
self.record_error(site['pagename'], 'Statistics returned unexpected JSON.')
message = "[[%s]] Statistics returned unexpected JSON." % site['pagename']
self.botlog(bot='Bumble Bee', type='warn', message=message)
return False
ret_value = False

else:
if self.args.verbose >= 3:
print "Did not receive valid data from %s" % (data_url)
return False
ret_value = False

# Update the status table that we did our work!
self.update_status(site, 'statistics')
return ret_value

def record_smwinfo(self, site):
# Go out and get the statistic information
Expand All @@ -137,6 +138,7 @@ def record_smwinfo(self, site):
print "Pulling SMW info from %s." % data_url
(status, data, duration) = self.pull_json(site['pagename'], data_url)

ret_value = True
if status:
# Record the new data into the DB
if self.args.verbose >= 2:
Expand Down Expand Up @@ -187,17 +189,16 @@ def record_smwinfo(self, site):
self.apiary_db.commit()

self.stats['smwinfo'] += 1
return True
else:
self.record_error(site['pagename'], 'SMWInfo returned unexpected JSON.')
message = "[[%s]] SMWInfo returned unexpected JSON." % site['pagename']
self.botlog(bot='Bumble Bee', type='warn', message=message)
return False
ret_value = False

else:
if self.args.verbose >= 3:
print "Did not receive valid data from %s" % (data_url)
return False
ret_value = False

# Update the status table that we did our work!
# TODO: Commenting out. There is a bug that if this updates at the same time as the previous one
Expand Down Expand Up @@ -408,11 +409,11 @@ def main(self):
if site['In error'] and status:
site['In error'] = False
self.clear_error(site['pagename'])
if site['Collect skin data']:
status = self.record_skins(site)
if site['In error'] and status:
site['In error'] = False
self.clear_error(site['pagename'])
#if site['Collect skin data']:
#status = self.record_skins(site)
#if site['In error'] and status:
#site['In error'] = False
#self.clear_error(site['pagename'])

duration = time.time() - start_time
if self.args.segment is not None:
Expand Down
3 changes: 2 additions & 1 deletion lib/apiary.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import MySQLdb as mdb
import simplejson
import urllib2
import random
from urllib2 import Request, urlopen, URLError, HTTPError
from simplemediawiki import MediaWiki

Expand Down Expand Up @@ -268,7 +269,7 @@ def get_status(self, site):
print "Skipping stats..."
self.stats['skippedstatistics'] += 1

if general_delta > (24 * 60): # General checks are always bound to 24 hours
if general_delta > ((24 + random.randint(0,24)) * 60): # General checks are always bound to 24 hours, plus a random offset to keep checks evenly distributed
check_general = True
else:
if self.args.verbose >= 2:
Expand Down

0 comments on commit 1f60ad9

Please sign in to comment.