Ausgabe der neuen DB Einträge
This commit is contained in:
parent
bad48e1627
commit
cfbbb9ee3d
2399 changed files with 843193 additions and 43 deletions
33
venv/lib/python3.9/site-packages/scrapy/commands/crawl.py
Normal file
33
venv/lib/python3.9/site-packages/scrapy/commands/crawl.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from scrapy.commands import BaseRunSpiderCommand
|
||||
from scrapy.exceptions import UsageError
|
||||
|
||||
|
||||
class Command(BaseRunSpiderCommand):
|
||||
|
||||
requires_project = True
|
||||
|
||||
def syntax(self):
|
||||
return "[options] <spider>"
|
||||
|
||||
def short_desc(self):
|
||||
return "Run a spider"
|
||||
|
||||
def run(self, args, opts):
|
||||
if len(args) < 1:
|
||||
raise UsageError()
|
||||
elif len(args) > 1:
|
||||
raise UsageError("running 'scrapy crawl' with more than one spider is no longer supported")
|
||||
spname = args[0]
|
||||
|
||||
crawl_defer = self.crawler_process.crawl(spname, **opts.spargs)
|
||||
|
||||
if getattr(crawl_defer, 'result', None) is not None and issubclass(crawl_defer.result.type, Exception):
|
||||
self.exitcode = 1
|
||||
else:
|
||||
self.crawler_process.start()
|
||||
|
||||
if (
|
||||
self.crawler_process.bootstrap_failed
|
||||
or hasattr(self.crawler_process, 'has_exception') and self.crawler_process.has_exception
|
||||
):
|
||||
self.exitcode = 1
|
||||
Loading…
Add table
Add a link
Reference in a new issue