Skip to content

Commit

Permalink
Optimization - avoid temporary list objects, unnecessary function call
Browse files Browse the repository at this point in the history
  • Loading branch information
mlyundin committed Sep 8, 2015
1 parent 6490cb5 commit be7821a
Show file tree
Hide file tree
Showing 7 changed files with 11 additions and 11 deletions.
2 changes: 1 addition & 1 deletion scrapy/commands/check.py
Expand Up @@ -62,7 +62,7 @@ def run(self, args, opts):
self.settings['SPIDER_CONTRACTS_BASE'],
self.settings['SPIDER_CONTRACTS'],
)
conman = ContractsManager([load_object(c) for c in contracts])
conman = ContractsManager(load_object(c) for c in contracts)
runner = TextTestRunner(verbosity=2 if opts.verbose else 1)
result = TextTestResult(runner.stream, runner.descriptions, runner.verbosity)

Expand Down
4 changes: 2 additions & 2 deletions scrapy/commands/genspider.py
Expand Up @@ -88,8 +88,8 @@ def _genspider(self, module, name, domain, template_name, template_file):
'module': module,
'name': name,
'domain': domain,
'classname': '%sSpider' % ''.join([s.capitalize() \
for s in module.split('_')])
'classname': '%sSpider' % ''.join(s.capitalize() \
for s in module.split('_'))
}
spiders_module = import_module(self.settings['NEWSPIDER_MODULE'])
spiders_dir = abspath(dirname(spiders_module.__file__))
Expand Down
2 changes: 1 addition & 1 deletion scrapy/crawler.py
Expand Up @@ -173,7 +173,7 @@ def stop(self):
Returns a deferred that is fired when they all have ended.
"""
return defer.DeferredList([c.stop() for c in list(self.crawlers)])
return defer.DeferredList([c.stop() for c in self.crawlers])

@defer.inlineCallbacks
def join(self):
Expand Down
6 changes: 3 additions & 3 deletions scrapy/loader/__init__.py
Expand Up @@ -69,7 +69,7 @@ def get_value(self, value, *processors, **kw):
regex = kw.get('re', None)
if regex:
value = arg_to_iter(value)
value = flatten([extract_regex(regex, x) for x in value])
value = flatten(extract_regex(regex, x) for x in value)

for proc in processors:
if value is None:
Expand Down Expand Up @@ -149,7 +149,7 @@ def _get_values(self, xpaths, **kw):
def _get_xpathvalues(self, xpaths, **kw):
self._check_selector_method()
xpaths = arg_to_iter(xpaths)
return flatten([self.selector.xpath(xpath).extract() for xpath in xpaths])
return flatten(self.selector.xpath(xpath).extract() for xpath in xpaths)

def add_css(self, field_name, css, *processors, **kw):
values = self._get_cssvalues(css, **kw)
Expand All @@ -166,7 +166,7 @@ def get_css(self, css, *processors, **kw):
def _get_cssvalues(self, csss, **kw):
self._check_selector_method()
csss = arg_to_iter(csss)
return flatten([self.selector.css(css).extract() for css in csss])
return flatten(self.selector.css(css).extract() for css in csss)


XPathItemLoader = create_deprecated_class('XPathItemLoader', ItemLoader)
2 changes: 1 addition & 1 deletion scrapy/shell.py
Expand Up @@ -146,7 +146,7 @@ def get_help(self):
"update local objects")
b.append(" view(response) View response in a browser")

return "\n".join(["[s] %s" % l for l in b])
return "\n".join("[s] %s" % l for l in b)

def _is_relevant(self, value):
return isinstance(value, self.relevant_classes)
Expand Down
2 changes: 1 addition & 1 deletion scrapy/utils/defer.py
Expand Up @@ -61,7 +61,7 @@ def parallel(iterable, count, callable, *args, **named):
"""
coop = task.Cooperator()
work = (callable(elem, *args, **named) for elem in iterable)
return defer.DeferredList([coop.coiterate(work) for i in range(count)])
return defer.DeferredList([coop.coiterate(work) for _ in range(count)])

def process_chain(callbacks, input, *a, **kw):
"""Return a Deferred built by chaining the given callbacks"""
Expand Down
4 changes: 2 additions & 2 deletions scrapy/utils/request.py
Expand Up @@ -44,8 +44,8 @@ def request_fingerprint(request, include_headers=None):
"""
if include_headers:
include_headers = tuple([to_bytes(h.lower())
for h in sorted(include_headers)])
include_headers = tuple(to_bytes(h.lower())
for h in sorted(include_headers))
cache = _fingerprint_cache.setdefault(request, {})
if include_headers not in cache:
fp = hashlib.sha1()
Expand Down

0 comments on commit be7821a

Please sign in to comment.