[Bug 1810516] Re: scrapy interrupt from python3-twisted
Javier Ruano
1810516 at bugs.launchpad.net
Fri Jan 4 13:28:21 UTC 2019
With python3.7 it produces same output
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:17 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:19 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:24 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
cannot import name 'etree' from 'lxml' (/usr/lib/python3/dist-packages/lxml/__init__.py)Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:29 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:34 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:39 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:44 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:49 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:54 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:59 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:04 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:09 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:14 [scrapy.extensions.logstats] INFO: Crawled 0 pages (at 0 pages/min), scraped 0 items (at 0 items/min)
Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:14 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py", line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line 902, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
** Also affects: python-scrapy (Ubuntu)
Importance: Undecided
Status: New
** Also affects: python3.6 (Ubuntu)
Importance: Undecided
Status: New
** Also affects: python3.7 (Ubuntu)
Importance: Undecided
Status: New
--
You received this bug notification because you are a member of Ubuntu
Foundations Bugs, which is subscribed to twisted in Ubuntu.
https://bugs.launchpad.net/bugs/1810516
Title:
scrapy interrupt from python3-twisted
Status in python-scrapy package in Ubuntu:
New
Status in python3.6 package in Ubuntu:
New
Status in python3.7 package in Ubuntu:
New
Status in twisted package in Ubuntu:
New
Bug description:
python3 3.6.7-1~18.04
python3-twisted 17.9.0-2
python3-twisted-bin 17.9.0-2
scrapy 1.5
2019-01-04 13:30:28 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 13:30:33 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 13:30:38 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
^[[1;2BUnhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 13:30:43 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
ProblemType: Bug
DistroRelease: Ubuntu 18.04
Package: python3-twisted 17.9.0-2
ProcVersionSignature: Ubuntu 4.15.0-43.46-generic 4.15.18
Uname: Linux 4.15.0-43-generic x86_64
NonfreeKernelModules: nvidia_modeset nvidia
ApportVersion: 2.20.9-0ubuntu7.5
Architecture: amd64
Date: Fri Jan 4 13:34:08 2019
InstallationDate: Installed on 2016-10-27 (798 days ago)
InstallationMedia: Ubuntu 16.04 LTS "Xenial Xerus" - Release amd64 (20160420.1)
PackageArchitecture: all
SourcePackage: twisted
UpgradeStatus: Upgraded to bionic on 2018-11-29 (35 days ago)
To manage notifications about this bug go to:
https://bugs.launchpad.net/ubuntu/+source/python-scrapy/+bug/1810516/+subscriptions
More information about the foundations-bugs
mailing list