Skip to content

Instantly share code, notes, and snippets.

@rothnic
Last active February 9, 2017 17:47
Show Gist options
  • Save rothnic/2bea681d8413b2728123f90f5331e9d6 to your computer and use it in GitHub Desktop.
Save rothnic/2bea681d8413b2728123f90f5331e9d6 to your computer and use it in GitHub Desktop.
Stacktrace when killing a slow read_text command on s3 files with dask.
→ conda list
# packages in environment at /Users/nroth/anaconda/envs/dask:
#
appnope 0.1.0 py36_0 defaults
bokeh 0.12.4 py36_0 defaults
boto3 1.4.3 py36_0 defaults
botocore 1.4.90 py36_0 defaults
chest 0.2.3 py36_0 defaults
cloudpickle 0.2.2 py36_0 defaults
cycler 0.10.0 py36_0 defaults
dask 0.13.0 py36_0 defaults
decorator 4.0.11 py36_0 defaults
docutils 0.13.1 py36_0 defaults
entrypoints 0.2.2 py36_0 defaults
freetype 2.5.5 2 defaults
heapdict 1.0.0 py36_1 defaults
icu 54.1 0 defaults
ipykernel 4.5.2 py36_0 defaults
ipython 5.1.0 py36_1 defaults
ipython_genutils 0.1.0 py36_0 defaults
ipywidgets 5.2.2 py36_1 defaults
jinja2 2.9.4 py36_0 defaults
jmespath 0.9.0 py36_0 defaults
jsonschema 2.5.1 py36_0 defaults
jupyter 1.0.0 py36_3 defaults
jupyter_client 4.4.0 py36_0 defaults
jupyter_console 5.0.0 py36_0 defaults
jupyter_core 4.2.1 py36_0 defaults
libpng 1.6.27 0 defaults
locket 0.2.0 py36_1 defaults
markupsafe 0.23 py36_2 defaults
matplotlib 2.0.0 np111py36_0 defaults
mistune 0.7.3 py36_1 defaults
mkl 2017.0.1 0 defaults
nbconvert 4.2.0 py36_0 defaults
nbformat 4.2.0 py36_0 defaults
notebook 4.3.1 py36_0 defaults
numpy 1.11.3 py36_0 defaults
openssl 1.0.2k 0 defaults
pandas 0.19.2 np111py36_1 defaults
partd 0.3.7 py36_0 defaults
path.py 10.0 py36_0 defaults
pexpect 4.2.1 py36_0 defaults
pickleshare 0.7.4 py36_0 defaults
pip 9.0.1 py36_1 defaults
prompt_toolkit 1.0.9 py36_0 defaults
ptyprocess 0.5.1 py36_0 defaults
pygments 2.1.3 py36_0 defaults
pyparsing 2.1.4 py36_0 defaults
pyqt 5.6.0 py36_2 defaults
python 3.6.0 0 defaults
python-dateutil 2.6.0 py36_0 defaults
pytz 2016.10 py36_0 defaults
pyyaml 3.12 py36_0 defaults
pyzmq 16.0.2 py36_0 defaults
qt 5.6.2 0 defaults
qtconsole 4.2.1 py36_1 defaults
readline 6.2 2 defaults
requests 2.12.4 py36_0 defaults
s3fs 0.0.8 py36_0 defaults
s3transfer 0.1.10 py36_0 defaults
setuptools 27.2.0 py36_0 defaults
simplegeneric 0.8.1 py36_1 defaults
sip 4.18 py36_0 defaults
six 1.10.0 py36_0 defaults
sqlite 3.13.0 0 defaults
terminado 0.6 py36_0 defaults
tk 8.5.18 0 defaults
toolz 0.8.2 py36_0 defaults
tornado 4.4.2 py36_0 defaults
traitlets 4.3.1 py36_0 defaults
wcwidth 0.1.7 py36_0 defaults
wheel 0.29.0 py36_0 defaults
widgetsnbextension 1.2.6 py36_0 defaults
xz 5.2.2 1 defaults
yaml 0.1.6 0 defaults
zlib 1.2.8 3 defaults
Process ForkPoolWorker-21:
Process ForkPoolWorker-25:
Process ForkPoolWorker-23:
Process ForkPoolWorker-26:
Process ForkPoolWorker-24:
Process ForkPoolWorker-22:
Process ForkPoolWorker-20:
Process ForkPoolWorker-19:
Traceback (most recent call last):
Traceback (most recent call last):
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
Traceback (most recent call last):
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
<ipython-input-45-b99fb61d2096> in <module>()
----> 1 b.take(5)
/Users/nroth/anaconda/envs/dask/lib/python3.6/site-packages/dask/bag/core.py in take(self, k, npartitions, compute)
975
976 if compute:
--> 977 return tuple(b.compute())
978 else:
979 return b
/Users/nroth/anaconda/envs/dask/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
77 Extra keywords to forward to the scheduler ``get`` function.
78 """
---> 79 return compute(self, **kwargs)[0]
80
81 @classmethod
/Users/nroth/anaconda/envs/dask/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
177 dsk = merge(var.dask for var in variables)
178 keys = [var._keys() for var in variables]
--> 179 results = get(dsk, keys, **kwargs)
180
181 results_iter = iter(results)
/Users/nroth/anaconda/envs/dask/lib/python3.6/site-packages/dask/multiprocessing.py in get(dsk, keys, num_workers, func_loads, func_dumps, optimize_graph, **kwargs)
84 result = get_async(pool.apply_async, len(pool._pool), dsk3, keys,
85 get_id=_process_get_id,
---> 86 dumps=dumps, loads=loads, **kwargs)
87 finally:
88 if cleanup:
/Users/nroth/anaconda/envs/dask/lib/python3.6/site-packages/dask/async.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, raise_on_exception, rerun_exceptions_locally, callbacks, dumps, loads, **kwargs)
482 # Main loop, wait on tasks to finish, insert new ones
483 while state['waiting'] or state['ready'] or state['running']:
--> 484 key, res_info = queue.get()
485 res, tb, worker_id = loads(res_info)
486 if isinstance(res, BaseException):
/Users/nroth/anaconda/envs/dask/lib/python3.6/queue.py in get(self, block, timeout)
162 elif timeout is None:
163 while not self._qsize():
--> 164 self.not_empty.wait()
165 elif timeout < 0:
166 raise ValueError("'timeout' must be a non-negative number")
/Users/nroth/anaconda/envs/dask/lib/python3.6/threading.py in wait(self, timeout)
293 try: # restore state no matter what (e.g., KeyboardInterrupt)
294 if timeout is None:
--> 295 waiter.acquire()
296 gotit = True
297 else:
KeyboardInterrupt:
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
self.run()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 342, in get
with self._rlock:
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 342, in get
with self._rlock:
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 342, in get
with self._rlock:
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 342, in get
with self._rlock:
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 342, in get
with self._rlock:
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 342, in get
with self._rlock:
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 343, in get
res = self._reader.recv_bytes()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/queues.py", line 342, in get
with self._rlock:
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
KeyboardInterrupt
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/connection.py", line 216, in recv_bytes
buf = self._recv_bytes(maxlength)
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/connection.py", line 407, in _recv_bytes
buf = self._recv(4)
KeyboardInterrupt
File "/Users/nroth/anaconda/envs/dask/lib/python3.6/multiprocessing/connection.py", line 379, in _recv
chunk = read(handle, remaining)
KeyboardInterrupt
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment