python3 3.6.7-1~18.04
python3-twisted 17.9.0-2
python3-twisted-bin 17.9.0-2
scrapy 1.5
2019-01-04 13:30:28 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 13:30:33 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 13:30:38 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
^[[1;2BUnhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 13:30:43 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/scrapy/commands/crawl.py", line 58, in run
self.crawler_process.start()
File "/usr/lib/python3/dist-packages/scrapy/crawler.py", line 291, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1243, in run
self.mainLoop()
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 1252, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/lib/python3/dist-packages/twisted/internet/base.py", line 878, in runUntilCurrent
call.func(*call.args, **call.kw)
File "/usr/lib/python3/dist-packages/scrapy/utils/reactor.py", line 41, in __call__
return self._func(*self._a, **self._kw)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 135, in _next_request
self.crawl(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 210, in crawl
self.schedule(request, spider)
File "/usr/lib/python3/dist-packages/scrapy/core/engine.py", line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 57, in enqueue_request
dqok = self._dqpush(request)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File "/usr/lib/python3/dist-packages/scrapy/core/scheduler.py", line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
ProblemType: Bug
DistroRelease: Ubuntu 18.04
Package: python3-twisted 17.9.0-2
ProcVersionSignature: Ubuntu 4.15.0-43.46-generic 4.15.18
Uname: Linux 4.15.0-43-generic x86_64
NonfreeKernelModules: nvidia_modeset nvidia
ApportVersion: 2.20.9-0ubuntu7.5
Architecture: amd64
Date: Fri Jan 4 13:34:08 2019
InstallationDate: Installed on 2016-10-27 (798 days ago)
InstallationMedia: Ubuntu 16.04 LTS "Xenial Xerus" - Release amd64 (20160420.1)
PackageArchitecture: all
SourcePackage: twisted
UpgradeStatus: Upgraded to bionic on 2018-11-29 (35 days ago)
With python3.7 it produces same output lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ commands/ crawl.py" , line 58, in run crawler_ process. start() lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ crawler. py", line 293, in start run(installSign alHandlers= False) # blocking call lib/python3. 7/dist- packages/ twisted/ internet/ base.py" , line 1267, in run lib/python3. 7/dist- packages/ twisted/ internet/ base.py" , line 1276, in mainLoop runUntilCurrent () lib/python3. 7/dist- packages/ twisted/ internet/ base.py" , line 902, in runUntilCurrent func(*call. args, **call.kw) lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ utils/reactor. py", line 41, in __call__ *self._ a, **self._kw) lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ core/engine. py", line 135, in _next_request crawl(request, spider) lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ core/engine. py", line 210, in crawl schedule( request, spider) lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ core/engine. py", line 216, in schedule scheduler. enqueue_ request( request) : lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ core/scheduler. py", line 57, in enqueue_request request) lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ core/scheduler. py", line 86, in _dqpush dqs.push( reqd, -request.priority) python3/ dist-packages/ queuelib/ pqueue. py", line 33, in push queues[ priority] = self.qfactory( priority) lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ core/scheduler. py", line 114, in _newdq join(self. dqdir, 'p%s' % priority)) python3/ dist-packages/ queuelib/ queue.py" , line 142, in __init__ unpack( self.SIZE_ FORMAT, qsize)
Unhandled Error
Traceback (most recent call last):
File "/usr/local/
self.
File "/usr/local/
reactor.
File "/usr/local/
self.mainLoop()
File "/usr/local/
self.
--- <exception caught here> ---
File "/usr/local/
call.
File "/usr/local/
return self._func(
File "/usr/local/
self.
File "/usr/local/
self.
File "/usr/local/
if not self.slot.
File "/usr/local/
dqok = self._dqpush(
File "/usr/local/
self.
File "/usr/lib/
self.
File "/usr/local/
return self.dqclass(
File "/usr/lib/
self.size, = struct.
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:17 [twisted] CRITICAL: Unhandled Error lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ commands/ crawl.py" , line 58, in run crawler_ process. start() lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ crawler. py", line 293, in start run(installSign alHandlers= False) # blocking call lib/python3. 7/dist- packages/ twisted/ internet/ base.py" , line 1267, in run lib/python3. 7/dist- packages/ twisted/ internet/ base.py" , line 1276, in mainLoop runUntilCurrent () lib/python3. 7/dist- packages/ twisted/ internet/ base.py" , line 902, in runUntilCurrent func(*call. args, **call.kw) lib/python3. 7/dist- packages/ Scrapy- 1.5.0-py3. 7.egg/scrapy/ utils/reactor. py", line 41, in __call__
Traceback (most recent call last):
File "/usr/local/
self.
File "/usr/local/
reactor.
File "/usr/local/
self.mainLoop()
File "/usr/local/
self.
--- <exception caught here> ---
File "/usr/local/
call.
File "/usr/local/
r...