Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

removed another instance of scrapy.conf.settings singleton, this time…

… from scrapy.utils.trackref. From now on, trackrefs functionality will be always enabled as it imposes a very minimal performance overhead
  • Loading branch information...
commit 8f2dda12cc593e318c6ad924f7872491874cc612 1 parent cd82301
@pablohoffman pablohoffman authored
View
1  docs/news.rst
@@ -34,6 +34,7 @@ Scrapy changes:
- downloader handlers (:setting:`DOWNLOAD_HANDLERS` setting) now receive settings as the first argument of the constructor
- replaced memory usage acounting with (more portable) `resource`_ module, removed ``scrapy.utils.memory`` module
- removed signal: ``scrapy.mail.mail_sent``
+- removed ``TRACK_REFS`` setting, now :ref:`trackrefs <topics-leaks-trackrefs>` is always enabled
Scrapyd changes:
View
11 docs/topics/leaks.rst
@@ -56,14 +56,9 @@ Debugging memory leaks with ``trackref``
memory leaks. It basically tracks the references to all live Requests,
Responses, Item and Selector objects.
-To activate the ``trackref`` module, enable the :setting:`TRACK_REFS` setting.
-It only imposes a minor performance impact, so it should be OK to use it, even
-in production environments.
-
-Once you have ``trackref`` enabled, you can enter the telnet console and inspect
-how many objects (of the classes mentioned above) are currently alive using the
-``prefs()`` function which is an alias to the
-:func:`~scrapy.utils.trackref.print_live_refs` function::
+You can enter the telnet console and inspect how many objects (of the classes
+mentioned above) are currently alive using the ``prefs()`` function which is an
+alias to the :func:`~scrapy.utils.trackref.print_live_refs` function::
telnet localhost 6023
View
3  scrapy/contrib/debug.py
@@ -19,7 +19,6 @@ class StackTraceDump(object):
def __init__(self, crawler=None):
self.crawler = crawler
- self.dumprefs = crawler.settings.getbool('TRACK_REFS')
try:
signal.signal(signal.SIGUSR2, self.dump_stacktrace)
signal.signal(signal.SIGQUIT, self.dump_stacktrace)
@@ -34,7 +33,7 @@ def from_crawler(cls, crawler):
def dump_stacktrace(self, signum, frame):
stackdumps = self._thread_stacks()
enginestatus = format_engine_status(self.crawler.engine)
- liverefs = format_live_refs() if self.dumprefs else ""
+ liverefs = format_live_refs()
msg = "Dumping stack trace and engine status" \
"\n{0}\n{1}\n{2}".format(enginestatus, liverefs, stackdumps)
log.msg(msg)
View
14 scrapy/contrib/memdebug.py
@@ -12,20 +12,19 @@
class MemoryDebugger(object):
- def __init__(self, stats, trackrefs=False):
+ def __init__(self, stats):
try:
import libxml2
self.libxml2 = libxml2
except ImportError:
self.libxml2 = None
self.stats = stats
- self.trackrefs = trackrefs
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
- o = cls(crawler.stats, crawler.settings.getbool('TRACK_REFS'))
+ o = cls(crawler.stats)
crawler.signals.connect(o.engine_started, signals.engine_started)
crawler.signals.connect(o.engine_stopped, signals.engine_stopped)
return o
@@ -40,8 +39,7 @@ def engine_stopped(self):
self.stats.set_value('memdebug/libxml2_leaked_bytes', self.libxml2.debugMemory(1))
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage))
- if self.trackrefs:
- for cls, wdict in live_refs.iteritems():
- if not wdict:
- continue
- self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
+ for cls, wdict in live_refs.iteritems():
+ if not wdict:
+ continue
+ self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
View
10 scrapy/utils/trackref.py
@@ -2,8 +2,7 @@
references to live object instances.
If you want live objects for a particular class to be tracked, you only have to
-subclass form object_ref (instead of object). Also, remember to turn on
-tracking by enabling the TRACK_REFS setting.
+subclass form object_ref (instead of object).
About performance: This library has a minimal performance impact when enabled,
and no performance penalty at all when disabled (as object_ref becomes just an
@@ -16,8 +15,6 @@
from operator import itemgetter
from types import NoneType
-from scrapy.conf import settings
-
live_refs = defaultdict(weakref.WeakKeyDictionary)
class object_ref(object):
@@ -31,12 +28,7 @@ def __new__(cls, *args, **kwargs):
live_refs[cls][obj] = time()
return obj
-if not settings.getbool('TRACK_REFS'):
- object_ref = object
-
def format_live_refs(ignore=NoneType):
- if object_ref is object:
- return "The trackref module is disabled. Use TRACK_REFS setting to enable it."
s = "Live References" + os.linesep + os.linesep
now = time()
for cls, wdict in live_refs.iteritems():
Please sign in to comment.
Something went wrong with that request. Please try again.