为了账号安全,请及时绑定邮箱和手机立即绑定

scrapy crawl douban_spider这个出错

  File "d:\jsuk\python37\lib\runpy.py", line 85, in _run_code

    exec(code, run_globals)

  File "D:\JsuK\Python37\Scripts\scrapy.exe\__main__.py", line 9, in <module>

  File "d:\jsuk\python37\lib\site-packages\scrapy\cmdline.py", line 150, in exec

ute

    _run_print_help(parser, _run_command, cmd, args, opts)

  File "d:\jsuk\python37\lib\site-packages\scrapy\cmdline.py", line 90, in _run_

print_help

    func(*a, **kw)

  File "d:\jsuk\python37\lib\site-packages\scrapy\cmdline.py", line 157, in _run

_command

    cmd.run(args, opts)

  File "d:\jsuk\python37\lib\site-packages\scrapy\commands\crawl.py", line 57, i

n run

    self.crawler_process.crawl(spname, **opts.spargs)

  File "d:\jsuk\python37\lib\site-packages\scrapy\crawler.py", line 170, in craw

l

    crawler = self.create_crawler(crawler_or_spidercls)

  File "d:\jsuk\python37\lib\site-packages\scrapy\crawler.py", line 198, in crea

te_crawler

    return self._create_crawler(crawler_or_spidercls)

  File "d:\jsuk\python37\lib\site-packages\scrapy\crawler.py", line 203, in _cre

ate_crawler

    return Crawler(spidercls, self.settings)

  File "d:\jsuk\python37\lib\site-packages\scrapy\crawler.py", line 55, in __ini

t__

    self.extensions = ExtensionManager.from_crawler(self)

  File "d:\jsuk\python37\lib\site-packages\scrapy\middleware.py", line 58, in fr

om_crawler

    return cls.from_settings(crawler.settings, crawler)

  File "d:\jsuk\python37\lib\site-packages\scrapy\middleware.py", line 34, in fr

om_settings

    mwcls = load_object(clspath)

  File "d:\jsuk\python37\lib\site-packages\scrapy\utils\misc.py", line 44, in lo

ad_object

    mod = import_module(module)

  File "d:\jsuk\python37\lib\importlib\__init__.py", line 127, in import_module

    return _bootstrap._gcd_import(name[level:], package, level)

  File "<frozen importlib._bootstrap>", line 1006, in _gcd_import

  File "<frozen importlib._bootstrap>", line 983, in _find_and_load

  File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked

  File "<frozen importlib._bootstrap>", line 677, in _load_unlocked

  File "<frozen importlib._bootstrap_external>", line 728, in exec_module

  File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed

  File "d:\jsuk\python37\lib\site-packages\scrapy\extensions\telnet.py", line 12

, in <module>

    from twisted.conch import manhole, telnet

  File "d:\jsuk\python37\lib\site-packages\twisted\conch\manhole.py", line 154

    def write(self, data, async=False):

                              ^

SyntaxError: invalid syntax

求大神指教

正在回答

6 回答

哪个地方的async????

0 回复 有任何疑惑可以回复我~

同样碰到该问题,请教,怎么解决

1 回复 有任何疑惑可以回复我~
#1

Lysokin

应该是3.7 async变成关键字了,修改该变量即可,比如async_
2018-08-09 回复 有任何疑惑可以回复我~
#2

慕码人0083296 回复 Lysokin

谢谢大神 你的答案很好的解决了我的问题
2018-08-19 回复 有任何疑惑可以回复我~

我也是同样的错误..不知道是哪里错了.

0 回复 有任何疑惑可以回复我~

将async随便换一个字段,例如:shark就可以了

1 回复 有任何疑惑可以回复我~

举报

0/150
提交
取消

scrapy crawl douban_spider这个出错

我要回答 关注问题
意见反馈 帮助中心 APP下载
官方微信