Skip to content

Instantly share code, notes, and snippets.

@josephhardinee
Created August 14, 2018 19:19
Show Gist options
  • Save josephhardinee/23594e44d943a6f97fc689842d501952 to your computer and use it in GitHub Desktop.
Save josephhardinee/23594e44d943a6f97fc689842d501952 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import dask_jobqueue\n",
"from dask.distributed import Client\n",
"import numpy as np\n",
"import time"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Case 1\n",
"Adaptive starts, then immediately terminates workers. "
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"cluster = dask_jobqueue.PBSCluster()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<distributed.deploy.adaptive.Adaptive at 0x2aed720856a0>"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"cluster.adapt(minimum=2, maximum=4)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"time.sleep(30) # I actually just wait until I see them start running. "
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"client = Client(cluster)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table style=\"border: 2px solid white;\">\n",
"<tr>\n",
"<td style=\"vertical-align: top; border: 0px solid white\">\n",
"<h3>Client</h3>\n",
"<ul>\n",
" <li><b>Scheduler: </b>tcp://10.23.216.82:58462\n",
" <li><b>Dashboard: </b><a href='http://10.23.216.82:8787/status' target='_blank'>http://10.23.216.82:8787/status</a>\n",
"</ul>\n",
"</td>\n",
"<td style=\"vertical-align: top; border: 0px solid white\">\n",
"<h3>Cluster</h3>\n",
"<ul>\n",
" <li><b>Workers: </b>12</li>\n",
" <li><b>Cores: </b>60</li>\n",
" <li><b>Memory: </b>540.00 GB</li>\n",
"</ul>\n",
"</td>\n",
"</tr>\n",
"</table>"
],
"text/plain": [
"<Client: scheduler='tcp://10.23.216.82:58462' processes=0 cores=0>"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"client"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"or-condo-pbs01: \n",
" Req'd Req'd Elap\n",
"Job ID Username Queue Jobname SessID NDS TSK Memory Time S Time\n",
"----------------------- ----------- -------- ---------------- ------ ----- ------ --------- --------- - ---------\n",
"398436.or-condo-pbs01 josephhardi high_mem dask-worker 108413 -- -- -- 00:08:00 C -- \n",
"398437.or-condo-pbs01 josephhardi high_mem dask-worker 190514 -- -- -- 00:08:00 C -- \n",
"398438.or-condo-pbs01 josephhardi high_mem dask-worker 107711 -- -- -- 00:08:00 C -- \n",
"398439.or-condo-pbs01 josephhardi high_mem dask-worker 178733 -- -- -- 00:08:00 C -- \n",
"398440.or-condo-pbs01 josephhardi high_mem dask-worker 188305 -- -- -- 00:08:00 C -- \n",
"398442.or-condo-pbs01 josephhardi high_mem dask-worker 109520 -- -- -- 00:08:00 R 00:00:07\n",
"398443.or-condo-pbs01 josephhardi high_mem dask-worker 191612 -- -- -- 00:08:00 R 00:00:07\n"
]
}
],
"source": [
"!qstat -u josephhardinee"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now if I wait another 10-15 seconds or so."
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"or-condo-pbs01: \n",
" Req'd Req'd Elap\n",
"Job ID Username Queue Jobname SessID NDS TSK Memory Time S Time\n",
"----------------------- ----------- -------- ---------------- ------ ----- ------ --------- --------- - ---------\n",
"398436.or-condo-pbs01 josephhardi high_mem dask-worker 108413 -- -- -- 00:08:00 C -- \n",
"398437.or-condo-pbs01 josephhardi high_mem dask-worker 190514 -- -- -- 00:08:00 C -- \n",
"398438.or-condo-pbs01 josephhardi high_mem dask-worker 107711 -- -- -- 00:08:00 C -- \n",
"398439.or-condo-pbs01 josephhardi high_mem dask-worker 178733 -- -- -- 00:08:00 C -- \n",
"398440.or-condo-pbs01 josephhardi high_mem dask-worker 188305 -- -- -- 00:08:00 C -- \n",
"398442.or-condo-pbs01 josephhardi high_mem dask-worker 109520 -- -- -- 00:08:00 C -- \n",
"398443.or-condo-pbs01 josephhardi high_mem dask-worker 191612 -- -- -- 00:08:00 C -- \n"
]
}
],
"source": [
"!qstat -u josephhardinee"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Case 2: Not using adaptive\n",
"As a sanity check if I just use scale (I usually restart kernel for this portion). "
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import dask_jobqueue\n",
"from dask.distributed import Client\n",
"import numpy as np\n",
"import time"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"cluster = dask_jobqueue.PBSCluster()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"cluster.scale(2)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"time.sleep(30) # I actually just wait until I see them start running. "
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table style=\"border: 2px solid white;\">\n",
"<tr>\n",
"<td style=\"vertical-align: top; border: 0px solid white\">\n",
"<h3>Client</h3>\n",
"<ul>\n",
" <li><b>Scheduler: </b>tcp://10.23.216.82:57077\n",
" <li><b>Dashboard: </b><a href='http://10.23.216.82:8787/status' target='_blank'>http://10.23.216.82:8787/status</a>\n",
"</ul>\n",
"</td>\n",
"<td style=\"vertical-align: top; border: 0px solid white\">\n",
"<h3>Cluster</h3>\n",
"<ul>\n",
" <li><b>Workers: </b>12</li>\n",
" <li><b>Cores: </b>60</li>\n",
" <li><b>Memory: </b>540.00 GB</li>\n",
"</ul>\n",
"</td>\n",
"</tr>\n",
"</table>"
],
"text/plain": [
"<Client: scheduler='tcp://10.23.216.82:57077' processes=12 cores=60>"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"client = Client(cluster)\n",
"client"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"or-condo-pbs01: \n",
" Req'd Req'd Elap\n",
"Job ID Username Queue Jobname SessID NDS TSK Memory Time S Time\n",
"----------------------- ----------- -------- ---------------- ------ ----- ------ --------- --------- - ---------\n",
"398544.or-condo-pbs01 josephhardi high_mem dask-worker 111160 -- -- -- 00:08:00 R 00:00:06\n",
"398545.or-condo-pbs01 josephhardi high_mem dask-worker 193261 -- -- -- 00:08:00 R 00:00:06\n"
]
}
],
"source": [
"!qstat -u josephhardinee"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"time.sleep(60) # Sleeping a little longer to show they stay around"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"or-condo-pbs01: \n",
" Req'd Req'd Elap\n",
"Job ID Username Queue Jobname SessID NDS TSK Memory Time S Time\n",
"----------------------- ----------- -------- ---------------- ------ ----- ------ --------- --------- - ---------\n",
"398544.or-condo-pbs01 josephhardi high_mem dask-worker 111160 -- -- -- 00:08:00 R 00:01:20\n",
"398545.or-condo-pbs01 josephhardi high_mem dask-worker 193261 -- -- -- 00:08:00 R 00:01:20\n"
]
}
],
"source": [
"!qstat -u josephhardinee"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we see that the workers do stick around and keep running so it is not a configuration in the base system issue. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Worker Log\n",
"The error log for one of the adaptive workers is below:"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"distributed.nanny - INFO - Start Nanny at: 'tcp://10.23.217.88:38230'\n",
"distributed.nanny - INFO - Start Nanny at: 'tcp://10.23.217.88:59591'\n",
"distributed.nanny - INFO - Start Nanny at: 'tcp://10.23.217.88:48146'\n",
"distributed.nanny - INFO - Start Nanny at: 'tcp://10.23.217.88:41269'\n",
"distributed.nanny - INFO - Start Nanny at: 'tcp://10.23.217.88:57155'\n",
"distributed.nanny - INFO - Start Nanny at: 'tcp://10.23.217.88:35403'\n",
"distributed.worker - INFO - Start worker at: tcp://10.23.217.88:51270\n",
"distributed.worker - INFO - Listening to: tcp://10.23.217.88:51270\n",
"distributed.worker - INFO - nanny at: 10.23.217.88:57155\n",
"distributed.worker - INFO - bokeh at: 10.23.217.88:8789\n",
"distributed.worker - INFO - Waiting to connect to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Start worker at: tcp://10.23.217.88:46034\n",
"distributed.worker - INFO - Listening to: tcp://10.23.217.88:46034\n",
"distributed.worker - INFO - nanny at: 10.23.217.88:41269\n",
"distributed.worker - INFO - bokeh at: 10.23.217.88:56117\n",
"distributed.worker - INFO - Waiting to connect to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - Threads: 5\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Memory: 45.00 GB\n",
"distributed.worker - INFO - Local Directory: /localscratch/tmp.josephhardinee.398443.or-condo-pbs01/worker-77ngj203\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Threads: 5\n",
"distributed.worker - INFO - Memory: 45.00 GB\n",
"distributed.worker - INFO - Local Directory: /localscratch/tmp.josephhardinee.398443.or-condo-pbs01/worker-_wokbm59\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Start worker at: tcp://10.23.217.88:35790\n",
"distributed.worker - INFO - Listening to: tcp://10.23.217.88:35790\n",
"distributed.worker - INFO - nanny at: 10.23.217.88:48146\n",
"distributed.worker - INFO - bokeh at: 10.23.217.88:57072\n",
"distributed.worker - INFO - Waiting to connect to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - Registered to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.core - INFO - Starting established connection\n",
"distributed.worker - INFO - Threads: 5\n",
"distributed.worker - INFO - Memory: 45.00 GB\n",
"distributed.worker - INFO - Local Directory: /localscratch/tmp.josephhardinee.398443.or-condo-pbs01/worker-7oeky8ex\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Registered to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.core - INFO - Starting established connection\n",
"distributed.worker - INFO - Registered to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.core - INFO - Starting established connection\n",
"distributed.worker - INFO - Start worker at: tcp://10.23.217.88:39182\n",
"distributed.worker - INFO - Listening to: tcp://10.23.217.88:39182\n",
"distributed.worker - INFO - nanny at: 10.23.217.88:35403\n",
"distributed.worker - INFO - bokeh at: 10.23.217.88:46993\n",
"distributed.worker - INFO - Waiting to connect to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Threads: 5\n",
"distributed.worker - INFO - Memory: 45.00 GB\n",
"distributed.worker - INFO - Local Directory: /localscratch/tmp.josephhardinee.398443.or-condo-pbs01/worker-3an669iw\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Registered to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.core - INFO - Starting established connection\n",
"distributed.worker - INFO - Start worker at: tcp://10.23.217.88:54551\n",
"distributed.worker - INFO - Listening to: tcp://10.23.217.88:54551\n",
"distributed.worker - INFO - nanny at: 10.23.217.88:59591\n",
"distributed.worker - INFO - bokeh at: 10.23.217.88:48696\n",
"distributed.worker - INFO - Waiting to connect to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Threads: 5\n",
"distributed.worker - INFO - Memory: 45.00 GB\n",
"distributed.worker - INFO - Local Directory: /localscratch/tmp.josephhardinee.398443.or-condo-pbs01/worker-od913vc8\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Registered to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.core - INFO - Starting established connection\n",
"distributed.worker - INFO - Start worker at: tcp://10.23.217.88:42768\n",
"distributed.worker - INFO - Listening to: tcp://10.23.217.88:42768\n",
"distributed.worker - INFO - nanny at: 10.23.217.88:38230\n",
"distributed.worker - INFO - bokeh at: 10.23.217.88:36628\n",
"distributed.worker - INFO - Waiting to connect to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Threads: 5\n",
"distributed.worker - INFO - Memory: 45.00 GB\n",
"distributed.worker - INFO - Local Directory: /localscratch/tmp.josephhardinee.398443.or-condo-pbs01/worker-y_6h0k3q\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.worker - INFO - Registered to: tcp://10.23.216.82:58462\n",
"distributed.worker - INFO - -------------------------------------------------\n",
"distributed.core - INFO - Starting established connection\n",
"distributed.worker - INFO - Stopping worker at tcp://10.23.217.88:51270\n",
"distributed.worker - INFO - Stopping worker at tcp://10.23.217.88:54551\n",
"distributed.nanny - INFO - Closing Nanny at 'tcp://10.23.217.88:57155'\n",
"distributed.nanny - INFO - Closing Nanny at 'tcp://10.23.217.88:59591'\n",
"distributed.dask_worker - INFO - End worker\n",
"distributed.process - WARNING - reaping stray process <ForkServerProcess(ForkServerProcess-1, started daemon)>\n",
"distributed.process - WARNING - reaping stray process <ForkServerProcess(ForkServerProcess-6, started daemon)>\n",
"distributed.process - WARNING - reaping stray process <ForkServerProcess(ForkServerProcess-3, started daemon)>\n",
"distributed.process - WARNING - reaping stray process <ForkServerProcess(ForkServerProcess-4, started daemon)>\n"
]
}
],
"source": [
"!cat dask-worker.e398443"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Environment:"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"# packages in environment at /home/josephhardinee/.conda/envs/py3:\n",
"#\n",
"arm-pyart 1.8.0 <pip>\n",
"backports 1.0 py36hfa02d7e_1 anaconda\n",
"backports.weakref 1.0rc1 py36_0 anaconda\n",
"bkcharts 0.2 py36_0 \n",
"bleach 1.5.0 py36_0 \n",
"blosc 1.14.0 1 conda-forge\n",
"bokeh 0.12.16 py36_0 conda-forge\n",
"boto3 1.4.4 py36_0 \n",
"botocore 1.5.67 py36_0 \n",
"bottleneck 1.2.1 py36_1 conda-forge\n",
"bzip2 1.0.6 1 conda-forge\n",
"cachey 0.1.1 <pip>\n",
"cairo 1.14.8 0 \n",
"cffi 1.10.0 py36_0 \n",
"cftime 1.0.0 py36_0 conda-forge\n",
"click 6.7 py36_0 \n",
"cloudpickle 0.2.2 py36_0 \n",
"cuda80 1.0 0 soumith\n",
"cudatoolkit 8.0 3 anaconda\n",
"cudnn 6.0.21 cuda8.0_0 anaconda\n",
"curl 7.49.0 1 \n",
"cycler 0.10.0 py36_0 \n",
"cython 0.25.2 py36_0 \n",
"cytoolz 0.9.0.1 py36_0 conda-forge\n",
"dask 0.18.1 py_0 conda-forge\n",
"dask-core 0.18.1 py_0 conda-forge\n",
"dask-jobqueue 0.3.0 py_0 conda-forge\n",
"dbus 1.10.20 0 \n",
"decorator 4.0.11 py36_0 \n",
"distributed 1.22.0 py36_0 conda-forge\n",
"docrep 0.2.3 py_1 conda-forge\n",
"docutils 0.13.1 py36_0 \n",
"earthsim 1.0.1 py_0 conda-forge\n",
"entrypoints 0.2.3 py36_0 \n",
"expat 2.1.0 0 \n",
"fastparquet 0.0.6 py36_0 \n",
"fontconfig 2.12.1 3 \n",
"freetype 2.5.5 2 \n",
"git 2.11.1 0 \n",
"glib 2.50.2 1 \n",
"graphviz 0.8 <pip>\n",
"graphviz 2.38.0 5 \n",
"gst-plugins-base 1.8.0 0 \n",
"gstreamer 1.8.0 0 \n",
"h5netcdf 0.6.1 py_0 conda-forge\n",
"h5py 2.7.1 py36_3 conda-forge\n",
"harfbuzz 0.9.39 2 \n",
"hdf4 4.2.12 1 \n",
"hdf5 1.10.1 2 conda-forge\n",
"heapdict 1.0.0 py36_1 \n",
"html5lib 0.9999999 py36_0 anaconda\n",
"icu 54.1 0 \n",
"ipykernel 4.6.1 py36_0 \n",
"ipyparallel 6.0.2 <pip>\n",
"ipyparallel 6.0.2 py36_0 \n",
"ipython 6.1.0 py36_0 \n",
"ipython_genutils 0.2.0 py36_0 \n",
"ipywidgets 7.2.1 py36_1 conda-forge\n",
"jbig 2.1 0 \n",
"jedi 0.10.2 py36_2 \n",
"jinja2 2.9.6 py36_0 \n",
"jmespath 0.9.0 py36_0 \n",
"joblib 0.12.2 py_0 conda-forge\n",
"jpeg 9b 0 \n",
"jsonschema 2.6.0 py36_0 \n",
"jupyter_client 5.1.0 py36_0 \n",
"jupyter_core 4.3.0 py36_0 \n",
"jupyterlab 0.32.1 py36_0 conda-forge\n",
"jupyterlab_launcher 0.10.5 py36_0 conda-forge\n",
"libevent 2.0.22 0 conda-forge\n",
"libffi 3.2.1 1 \n",
"libgcc 5.2.0 0 \n",
"libgfortran 3.0.0 1 \n",
"libiconv 1.14 0 \n",
"libnetcdf 4.6.1 2 conda-forge\n",
"libpng 1.6.27 0 \n",
"libprotobuf 3.4.0 0 anaconda\n",
"libsodium 1.0.10 0 \n",
"libtiff 4.0.6 3 \n",
"libtool 2.4.2 0 \n",
"libxcb 1.12 1 \n",
"libxml2 2.9.4 0 \n",
"llvmlite 0.20.0 py36_0 \n",
"locket 0.2.0 py36_1 \n",
"lzo 2.10 0 conda-forge\n",
"markdown 2.6.9 py36_0 anaconda\n",
"markupsafe 0.23 py36_2 \n",
"matplotlib 2.0.2 np113py36_0 \n",
"mistune 0.7.4 py36_0 \n",
"mkl 2017.0.3 0 \n",
"msgpack-python 0.4.8 py36_0 \n",
"nbconvert 5.2.1 py36_0 \n",
"nbformat 4.3.0 py36_0 \n",
"nccl 1.3.4 cuda8.0_1 \n",
"ncurses 5.9 10 conda-forge\n",
"netcdf4 1.4.0 py36_0 conda-forge\n",
"netifaces 0.10.6 <pip>\n",
"nodejs 6.10.3 0 \n",
"notebook 5.0.0 py36_0 \n",
"numba 0.35.0 np113py36_0 \n",
"numexpr 2.6.2 np113py36_0 \n",
"numpy 1.13.1 py36_0 \n",
"olefile 0.44 py36_0 \n",
"openssl 1.0.2l 0 \n",
"packaging 17.1 py_0 conda-forge\n",
"pandas 0.20.2 np113py36_0 \n",
"pandas-datareader 0.4.0 py36_0 \n",
"pandocfilters 1.4.1 py36_0 \n",
"pango 1.40.3 1 \n",
"partd 0.3.8 py36_0 \n",
"path.py 10.3.1 py36_0 \n",
"patsy 0.4.1 py36_0 \n",
"pcre 8.39 1 \n",
"pexpect 4.2.1 py36_0 \n",
"pickleshare 0.7.4 py36_0 \n",
"pillow 4.2.1 py36_0 \n",
"pip 9.0.1 py36_1 \n",
"pip 18.0 <pip>\n",
"pixman 0.34.0 0 \n",
"ply 3.10 py36_0 \n",
"prompt_toolkit 1.0.14 py36_0 \n",
"protobuf 3.4.0 py36_0 anaconda\n",
"psutil 5.2.2 py36_0 \n",
"ptyprocess 0.5.1 py36_0 \n",
"py 1.4.34 py36_0 \n",
"pycparser 2.18 py36_0 \n",
"PyDisdrometer 0.1.15.1 <pip>\n",
"pygments 2.2.0 py36_0 \n",
"pyparsing 2.1.4 py36_0 \n",
"pyqt 5.6.0 py36_2 \n",
"pytables 3.4.4 py36_8 conda-forge\n",
"pytest 3.1.2 py36_0 \n",
"python 3.6.1 2 \n",
"python-dateutil 2.6.0 py36_0 \n",
"pytmatrix 0.3.1 <pip>\n",
"pytmatrix 0.3.1 py36_0 conda-forge\n",
"pytorch 0.2.0 py36h53baedd_4cu80 [cuda80] soumith\n",
"pytz 2017.2 py36_0 \n",
"pyyaml 3.12 py36_0 \n",
"pyzmq 16.0.2 py36_0 \n",
"qt 5.6.2 4 \n",
"readline 6.2 2 \n",
"requests 2.14.2 py36_0 \n",
"requests-file 1.4.1 py36_0 \n",
"requests-ftp 0.3.1 py36_0 \n",
"s3fs 0.1.1 py36_0 \n",
"s3transfer 0.1.10 py36_0 \n",
"scikit-learn 0.19.0 np113py36_0 \n",
"scipy 0.19.1 np113py36_0 \n",
"seaborn 0.8 py36_0 \n",
"setuptools 27.2.0 py36_0 \n",
"simplegeneric 0.8.1 py36_1 \n",
"sip 4.18 py36_0 \n",
"six 1.10.0 py36_0 \n",
"snakeviz 0.4.1 py36_0 \n",
"sortedcontainers 1.5.7 py36_0 \n",
"sqlite 3.13.0 0 \n",
"statsmodels 0.8.0 np113py36_0 \n",
"tblib 1.3.2 py36_0 \n",
"tensorflow-gpu 1.3.0 0 anaconda\n",
"tensorflow-gpu-base 1.3.0 py36cuda8.0cudnn6.0_0 anaconda\n",
"tensorflow-tensorboard 0.1.5 py36_0 anaconda\n",
"terminado 0.6 py36_0 \n",
"testpath 0.3.1 py36_0 \n",
"thriftpy 0.3.9 py36_0 conda-forge\n",
"tk 8.5.18 0 \n",
"tmux 2.3 0 conda-forge\n",
"toolz 0.8.2 py36_0 \n",
"torchvision 0.1.9 py36h7584368_1 soumith\n",
"tornado 4.5.1 py36_0 \n",
"traitlets 4.3.2 py36_0 \n",
"ujson 1.35 py36_0 \n",
"umap-learn 0.3.0 py36_0 conda-forge\n",
"versioneer 0.18 <pip>\n",
"wcwidth 0.1.7 py36_0 \n",
"werkzeug 0.12.2 py36hc703753_0 anaconda\n",
"wheel 0.29.0 py36_0 \n",
"widgetsnbextension 3.2.1 py36_0 conda-forge\n",
"xarray 0.10.7 py36_0 conda-forge\n",
"xz 5.2.2 1 \n",
"yaml 0.1.6 0 \n",
"zeromq 4.1.5 0 \n",
"zict 0.1.3 py_0 conda-forge\n",
"zlib 1.2.11 0 conda-forge\n"
]
}
],
"source": [
"!conda list"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Dask configuration"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"jobqueue:\n",
" pbs:\n",
" name: dask-worker\n",
" cores: 32\n",
" memory: 270GB\n",
" processes: 6\n",
" interface: ib0\n",
" local-directory: $localscratch\n",
" queue: high_mem\n",
" project: arm\n",
" walltime: 00:08:00\n",
" job-extra: ['-W group_list=cades-arm']\n"
]
}
],
"source": [
"!cat /home/josephhardinee/.config/dask/stratus.yml"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.1"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment