-
Notifications
You must be signed in to change notification settings - Fork 10.4k
/
crawl.py
37 lines (29 loc) · 1.05 KB
/
crawl.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
from scrapy.commands import BaseRunSpiderCommand
from scrapy.exceptions import UsageError
class Command(BaseRunSpiderCommand):
requires_project = True
def syntax(self):
return "[options] <spider>"
def short_desc(self):
return "Run a spider"
def run(self, args, opts):
if len(args) < 1:
raise UsageError()
elif len(args) > 1:
raise UsageError(
"running 'scrapy crawl' with more than one spider is not supported"
)
spname = args[0]
crawl_defer = self.crawler_process.crawl(spname, **opts.spargs)
if getattr(crawl_defer, "result", None) is not None and issubclass(
crawl_defer.result.type, Exception
):
self.exitcode = 1
else:
self.crawler_process.start()
if (
self.crawler_process.bootstrap_failed
or hasattr(self.crawler_process, "has_exception")
and self.crawler_process.has_exception
):
self.exitcode = 1