Skip to content
Browse files

Removed output from all provider batch func as we are now using logging

  • Loading branch information...
1 parent cf6af70 commit 8d616b3764d9cfb543af23cee8b26b16dbd473f4 David King committed
View
8 molly/apps/feeds/providers/ical.py
@@ -27,23 +27,19 @@ class ICalFeedsProvider(BaseFeedsProvider):
verbose_name = 'iCal'
@batch('%d * * * *' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
"""
Pulls iCal feeds
"""
from molly.apps.feeds.models import Feed
for feed in Feed.objects.filter(provider=self.class_path):
- output.write("Importing %s\n" % feed.title)
+ logger.info("Importing %s\n" % feed.title)
try:
self.import_feed(feed)
except Exception, e:
- output.write("Error importing %s\n" % feed.title)
- traceback.print_exc(file=output)
- output.write('\n')
logger.warn("Error importing feed %r" % feed.title,
exc_info=True, extra={'url': feed.rss_url})
-
return metadata
def import_feed(self, feed):
View
8 molly/apps/feeds/providers/rss.py
@@ -29,23 +29,19 @@ class RSSFeedsProvider(BaseFeedsProvider):
verbose_name = 'RSS'
@batch('%d * * * *' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
"""
Pulls RSS feeds
"""
from molly.apps.feeds.models import Feed
for feed in Feed.objects.filter(provider=self.class_path):
- output.write("Importing %s\n" % feed.title)
+ logger.info("Importing %s\n" % feed.title)
try:
self.import_feed(feed)
except Exception, e:
- output.write("Error importing %s\n" % feed.title)
- traceback.print_exc(file=output)
- output.write('\n')
logger.warn("Error importing feed %r" % feed.title,
exc_info=True, extra={'url': feed.rss_url})
-
return metadata
def import_feed(self, feed):
View
7 molly/apps/feeds/providers/talks_cam.py
@@ -21,20 +21,17 @@ class TalksCamFeedsProvider(BaseFeedsProvider):
verbose_name = 'TalksCam'
@batch('%d * * * *' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
"""
Pulls TalksCam feeds
"""
from molly.apps.feeds.models import Feed
for feed in Feed.objects.filter(provider=self.class_path):
- output.write("Importing %s\n" % feed.title)
+ logger.info("Importing %s\n" % feed.title)
try:
self.import_feed(feed)
except Exception, e:
- output.write("Error importing %s\n" % feed.title)
- traceback.print_exc(file=output)
- output.write('\n')
logger.warn("Error importing feed %r" % feed.title,
exc_info=True, extra={'url': feed.rss_url})
View
14 molly/apps/places/providers/acislive.py
@@ -255,11 +255,7 @@ def __init__(self, urls=None):
self.urls = urls
@batch('%d 10 * * sat' % random.randint(0, 59))
- def import_data(self, metadata, output):
-
- self._output = output
-
-
+ def import_data(self, **metadata):
# Searching can flag up the same results again and again, so store
# which ones we've found
found_routes = set()
@@ -269,7 +265,7 @@ def import_data(self, metadata, output):
# Try and find all bus routes in the system
for term in list(ascii_lowercase) + map(str, range(0,9)):
found_routes = self._scrape_search(
- url, self.SEARCH_PAGE % (url, term), found_routes, output)
+ url, self.SEARCH_PAGE % (url, term), found_routes)
# Now try and find buses that don't exist on that system any more
for route in Route.objects.filter(external_ref__startswith=url):
@@ -277,7 +273,7 @@ def import_data(self, metadata, output):
logger.info('Removed route not found on system: %s', route)
route.delete()
- def _scrape_search(self, url, search_page, found_routes, output):
+ def _scrape_search(self, url, search_page, found_routes):
results = etree.parse(urlopen(search_page), parser = etree.HTMLParser())
for tr in results.find('.//table').findall('tr')[1:]:
reset_queries()
@@ -306,11 +302,11 @@ def _scrape_search(self, url, search_page, found_routes, output):
route.operator = operator
route.service_name = destination
route.save()
- self._scrape(route, link, output)
+ self._scrape(route, link)
return found_routes
- def _scrape(self, route, url, output):
+ def _scrape(self, route, url):
url += '&showall=1'
service = etree.parse(urlopen(url), parser = etree.HTMLParser())
route.stops.clear()
View
8 molly/apps/places/providers/atcocif.py
@@ -38,14 +38,14 @@ def __init__(self, url):
self._entity_type = NaptanMapsProvider(None)._get_entity_types()['BCT'][0]
@batch('%d 10 * * wed' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
deleted_routes = set(Route.objects.filter(external_ref__startswith=self._url).values_list('external_ref'))
archive = ZipFile(StringIO(urlopen(self._url).read()))
for file in archive.namelist():
- output.write(file)
+ logger.info(file)
routes = self._import_cif(archive.open(file))
- output.write(': %d routes in file\n' % len(routes))
+ logger.info(': %d routes in file\n' % len(routes))
self._import_routes(routes)
deleted_routes -= set(self._url + route['id'] for route in routes)
archive.close()
@@ -276,4 +276,4 @@ def _get_source(self):
if __name__ == '__main__':
- AtcoCifTimetableProvider('http://store.datagm.org.uk/sets/TfGM/GMPTE_CIF.zip').import_data({}, sys.stdout)
+ AtcoCifTimetableProvider('http://store.datagm.org.uk/sets/TfGM/GMPTE_CIF.zip').import_data({})
View
2 molly/apps/places/providers/bbc_tpeg.py
@@ -51,7 +51,7 @@ def __init__(self, url=_TPEG_URL):
self._tpeg_url = url
@batch('%d-59/3 * * * *' % random.randint(0, 2))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
source, entity_type = self._get_source(), self._get_entity_type()
parser = etree.XMLParser(load_dtd=True)
View
2 molly/apps/places/providers/cif.py
@@ -189,7 +189,7 @@ def import_from_string(self, cif):
self._save_stops(self._save_journey(self._save_route()))
@batch('%d 15 * * mon' % random.randint(0, 59))
- def import_from_file(self, metadata, output):
+ def import_from_file(self, **metadata):
with open(self._filename) as file:
for line in file:
self._handle_line(line)
View
4 molly/apps/places/providers/naptan.py
@@ -674,7 +674,7 @@ def __init__(self, method=None, areas=None, username=None, password=None):
self._areas = areas
@batch('%d 10 * * mon' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
username, password = self._username, self._password
self._source = self._get_source()
@@ -775,4 +775,4 @@ def _get_source(self):
else:
if __name__ == '__main__':
p = NaptanMapsProvider(method='ftp', username=SECRETS.journeyweb[0], password=SECRETS.journeyweb[1], areas=('340',))
- p.import_data(None, None)
+ p.import_data()
View
17 molly/apps/places/providers/osm.py
@@ -6,6 +6,7 @@
import random
import os
import yaml
+import logging
from xml.sax import saxutils, handler, make_parser
@@ -25,18 +26,20 @@
from molly.geolocation import reverse_geocode
from molly.conf.settings import batch
+
+logger = logging.getLogger(__name__)
+
def node_id(id):
return "N%d" % int(id)
def way_id(id):
return "W%d" % int(id)
class OSMHandler(handler.ContentHandler):
- def __init__(self, source, entity_types, find_types, output, lat_north=None,
+ def __init__(self, source, entity_types, find_types, lat_north=None,
lat_south=None, lon_west=None, lon_east=None, identities={}):
self.source = source
self.entity_types = entity_types
self.find_types = find_types
- self.output = output
self._lat_north = lat_north
self._lat_south = lat_south
self._lon_west = lon_west
@@ -213,7 +216,7 @@ def endDocument(self):
entity.delete()
self.delete_count += 1
- self.output.write("""\
+ logger.info("""\
Complete
Created: %6d
Modified: %6d
@@ -302,7 +305,7 @@ def to_tuple(tag):
self.identities = {}
@batch('%d 9 * * mon' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
"Imports places data from OpenStreetMap"
old_etag = metadata.get('etag', '')
@@ -310,17 +313,15 @@ def import_data(self, metadata, output):
request = AnyMethodRequest(self._url, method='HEAD')
response = urllib2.urlopen(request)
new_etag = response.headers['ETag'][1:-1]
- self.output = output
if not settings.DEBUG and new_etag == old_etag:
- output.write('OSM data not updated. Not updating.\n')
+ logger.info('OSM data not updated. Not updating.\n')
return
parser = make_parser(['xml.sax.xmlreader.IncrementalParser'])
parser.setContentHandler(OSMHandler(self._get_source(),
self._get_entity_types(),
lambda tags, type_list=None: self._find_types(tags, self._osm_tags if type_list is None else type_list),
- output,
self._lat_north,
self._lat_south,
self._lon_west,
@@ -442,7 +443,7 @@ def disambiguate_titles(self, source):
else:
title = inferred_name
except:
- self.output.write("Couldn't geocode for %s\n" % inferred_name)
+ logger.info("Couldn't geocode for %s\n" % inferred_name)
title = inferred_name
try:
name = entity.names.get(language_code=lang_code)
View
2 molly/apps/places/providers/postcodes.py
@@ -36,7 +36,7 @@ def _download_codepoint_open(self):
archive_file.close()
@batch('%d 12 1 1 *' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
entity_type, source = self._get_entity_type(), self._get_source()
View
8 molly/apps/podcasts/providers/opml.py
@@ -71,7 +71,7 @@ def parse_outline(self, outline):
self.update_podcast(podcast)
@batch('%d * * * *' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
self._category = None
@@ -89,8 +89,6 @@ def import_data(self, metadata, output):
self.parse_outline(outline)
rss_urls.append(outline.attrib['xmlUrl'])
except Exception, e:
- output.write("Update of podcast %r failed." % outline.attrib['xmlUrl'])
- traceback.print_exc(file=output)
if not failure_logged:
logger.exception("Update of podcast %r failed.", outline.attrib['xmlUrl'])
failure_logged = True
@@ -103,8 +101,6 @@ def import_data(self, metadata, output):
self.parse_outline(outline)
rss_urls.append(outline.attrib['xmlUrl'])
except Exception, e:
- output.write("Update of podcast %r failed." % outline.attrib['xmlUrl'])
- traceback.print_exc(file=output)
if not failure_logged:
logger.exception("Update of podcast %r failed.", outline.attrib['xmlUrl'])
failure_logged = True
@@ -114,4 +110,4 @@ def import_data(self, metadata, output):
if not podcast.rss_url in rss_urls:
podcast.delete()
- return metadata
+ return metadata
View
2 molly/apps/podcasts/providers/pp.py
@@ -19,7 +19,7 @@ def __init__(self, url):
self.url = url
@batch('%d * * * *' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
atom = self.atom
xml = etree.parse(urllib.urlopen(self.url))
View
2 molly/apps/podcasts/providers/rss.py
@@ -37,7 +37,7 @@ def atom(self):
return Namespace('http://www.w3.org/2005/Atom')
@batch('%d * * * *' % random.randint(0, 59))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
for slug, url in self.podcasts:
podcast, url = Podcast.objects.get_or_create(
provider=self.class_path,
View
4 molly/apps/podcasts/tests.py
@@ -13,7 +13,7 @@ def setUp(self):
opml = OPMLPodcastsProvider(url = 'http://www.bbc.co.uk/radio/opml/bbc_podcast_opml_v2.xml',
rss_re = r'http://downloads.bbc.co.uk/podcasts/(.+)/rss.xml')
opml.class_path = 'molly.providers.apps.podcasts.opml.OPMLPodcastsProvider'
- opml.import_data({}, sys.stdout)
+ opml.import_data({})
def testPodcasts(self):
podcasts = Podcast.objects.all()
@@ -23,4 +23,4 @@ def testPodcasts(self):
r = c.get('/podcasts/%s/' % podcast.category.slug)
r = c.get('/podcasts/%s/%s/' % (podcast.category.slug, podcast.slug))
- self.assertTrue(r.context['podcast'].podcastitem_set.count() > 0)
+ self.assertTrue(r.context['podcast'].podcastitem_set.count() > 0)
View
5 molly/apps/weather/providers/bbc.py
@@ -104,7 +104,7 @@ def _find_choice_match(choices, verbose):
)
@batch('%d-%d/15 * * * *' % (lambda x:(x, x+45))(random.randint(0, 14)))
- def import_data(self, metadata, output):
+ def import_data(self, **metadata):
"""
Pulls weather data from the BBC
"""
@@ -113,9 +113,6 @@ def import_data(self, metadata, output):
observations = self.get_observations_data()
forecasts = self.get_forecast_data()
except Exception as e:
- output.write("Error importing weather data from BBC\n")
- traceback.print_exc(file=output)
- output.write('\n')
logger.exception("Error importing weather data from BBC")
return metadata

0 comments on commit 8d616b3

Please sign in to comment.
Something went wrong with that request. Please try again.