1.在spiders同级创建任意目录,如:commands
2.在其中创建 crawlall.py 文件 (此处文件名就是自定义的命令)
3.编写代码
from scrapy
.commands
import ScrapyCommand
from scrapy
.utils
.project
import get_project_settings
class Command(ScrapyCommand
):
requires_project
= True
def syntax(self
):
return '[options]'
def short_desc(self
):
return 'Runs all of the spiders'
def run(self
, args
, opts
):
spider_list
= self
.crawler_process
.spiders
.list()
for name
in spider_list
:
self
.crawler_process
.crawl
(name
, **opts
.__dict__
)
self
.crawler_process
.start
()
4.setting.py配置:COMMANDS_MODULE = ‘项目名称.commands’
5.命令行执行:scrapy crawlall
转载请注明原文地址:https://tech.qufami.com/read-6621.html