Skip to content

Instantly share code, notes, and snippets.

@eddiebergman
Last active February 13, 2023 08:57
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save eddiebergman/1719b8e72c93e137009ebcbeea80a60c to your computer and use it in GitHub Desktop.
Save eddiebergman/1719b8e72c93e137009ebcbeea80a60c to your computer and use it in GitHub Desktop.
from dask_jobqueue import SLURMCluster
import time
from pathlib import Path
from concurrent.futures import wait, ALL_COMPLETED
def f(x: int) -> int:
time.sleep(20)
print(f"Done {x}")
return x * 2
if __name__ == "__main__":
here = Path(__file__).absolute().parent
logs = here / "logs-test-dask-slurm"
logs.mkdir(exist_ok=True)
cluster = SLURMCluster(
memory="1GB",
processes=1,
cores=2,
local_directory=here,
log_directory=logs,
queue="gki_cpu-cascadelake",
job_extra_directives=["--time 0-00:01:00"]
)
print(cluster.job_script())
# Launch 10 workers
cluster.scale(jobs=10)
client = cluster.get_client()
executor = client.get_executor()
futures = [executor.submit(f, i) for i in range(30)]
finished, unfinished = wait(futures, return_when=ALL_COMPLETED)
results = [future.result() for future in finished]
print(results)
@eddiebergman
Copy link
Author

Here's the output:

print(cluster.job_script())

#!/usr/bin/env bash

#SBATCH -J dask-worker
#SBATCH -e /work/dlclarge2/bergmane-test-dask/logs-test-dask-slurm/dask-worker-%J.err
#SBATCH -o /work/dlclarge2/bergmane-test-dask/logs-test-dask-slurm/dask-worker-%J.out
#SBATCH -p gki_cpu-cascadelake
#SBATCH -n 1
#SBATCH --cpus-per-task=2
#SBATCH --mem=954M
#SBATCH -t 00:30:00
#SBATCH --time 0-00:01:00

/work/dlclarge2/bergmane-test-dask/.eddie-venv/bin/python -m distributed.cli.dask_worker tcp://10.5.166.215:41675 --nthreads 2 --memory-limit 0.93GiB --name dummy-name --nanny --death-timeout 60 --local-directory /work/dlclarge2/bergmane-test-dask

print(results)

[2, 24, 42, 38, 46, 30, 56, 52, 26, 48, 8, 10, 40, 18, 44, 4, 36, 12, 6, 20, 28, 54, 58, 22, 0, 50, 32, 34, 16, 14]

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment