Skip to content

Commit

Permalink
removed scrapy.log.started attribute, and avoid checking if log has a…
Browse files Browse the repository at this point in the history
…lready been started (since it should be called once anyway)
  • Loading branch information
pablohoffman committed Oct 9, 2012
1 parent 1f89eb5 commit 1a905d6
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 20 deletions.
1 change: 1 addition & 0 deletions docs/news.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ Scrapy changes:
- DBM is now the default storage backend for HTTP cache middleware
- number of log messages (per level) are now tracked through Scrapy stats (stat name: ``log_count/LEVEL``)
- number received responses are now tracked through Scrapy stats (stat name: ``response_received_count``)
- removed ``scrapy.log.started`` attribute

Scrapyd changes:

Expand Down
4 changes: 0 additions & 4 deletions docs/topics/logging.rst
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,6 @@ scrapy.log module
.. module:: scrapy.log
:synopsis: Logging facility

.. attribute:: started

A boolean which is ``True`` if logging has been started or ``False`` otherwise.

.. function:: start(logfile=None, loglevel=None, logstdout=None)

Start the logging facility. This must be called before actually logging any
Expand Down
6 changes: 4 additions & 2 deletions scrapy/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,18 @@ class ScrapyCommand(object):

def __init__(self):
self.settings = None # set in scrapy.cmdline
self.configured = False

def set_crawler(self, crawler):
assert not hasattr(self, '_crawler'), "crawler already set"
self._crawler = crawler

@property
def crawler(self):
if not log.started:
if not self.configured:
log.start_from_crawler(self._crawler)
self._crawler.configure()
self._crawler.configure()
self.configured = True
return self._crawler

def syntax(self):
Expand Down
23 changes: 9 additions & 14 deletions scrapy/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@
SILENT: "SILENT",
}

started = False

class ScrapyFileLogObserver(log.FileLogObserver):

def __init__(self, f, level=INFO, encoding='utf-8', crawler=None):
Expand Down Expand Up @@ -113,15 +111,14 @@ def _get_log_level(level_name_or_id):
raise ValueError("Unknown log level: %r" % level_name_or_id)

def start(logfile=None, loglevel='INFO', logstdout=True, logencoding='utf-8', crawler=None):
if log.defaultObserver: # check twisted log not already started
loglevel = _get_log_level(loglevel)
file = open(logfile, 'a') if logfile else sys.stderr
sflo = ScrapyFileLogObserver(file, loglevel, logencoding, crawler)
_oldshowwarning = warnings.showwarning
log.startLoggingWithObserver(sflo.emit, setStdout=logstdout)
# restore warnings, wrongly silenced by Twisted
warnings.showwarning = _oldshowwarning
return sflo
loglevel = _get_log_level(loglevel)
file = open(logfile, 'a') if logfile else sys.stderr
sflo = ScrapyFileLogObserver(file, loglevel, logencoding, crawler)
_oldshowwarning = warnings.showwarning
log.startLoggingWithObserver(sflo.emit, setStdout=logstdout)
# restore warnings, wrongly silenced by Twisted
warnings.showwarning = _oldshowwarning
return sflo

def msg(message=None, _level=INFO, **kw):
kw['logLevel'] = kw.pop('level', _level)
Expand All @@ -137,11 +134,9 @@ def err(_stuff=None, _why=None, **kw):
log.err(_stuff, _why, **kw)

def start_from_crawler(crawler):
global started
settings = crawler.settings
if started or not settings.getbool('LOG_ENABLED'):
if not settings.getbool('LOG_ENABLED'):
return
started = True

start(settings['LOG_FILE'], settings['LOG_LEVEL'], settings['LOG_STDOUT'],
settings['LOG_ENCODING'], crawler)
Expand Down

0 comments on commit 1a905d6

Please sign in to comment.