Skip to content

Instantly share code, notes, and snippets.

@brevans
Last active October 19, 2018 19:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save brevans/449ecf9a78aac11b3f863b5fd2de80c3 to your computer and use it in GitHub Desktop.
Save brevans/449ecf9a78aac11b3f863b5fd2de80c3 to your computer and use it in GitHub Desktop.
installing cryoSPARC2 on Farnam
#!/bin/bash
## cryosparc install
# where to install cryosparc2 and its sample database
install_path=$HOME/project/software/cryosparc2
# the license ID you got from Structura
license_id=
# your email
my_email=$(cat ~/.forward)
# partition to submit your cryosparc jobs to
# not sure you can change at runtime
partition=gpu
# don't edit below here
###########################
my_name="$USER"
db_path=${install_path}/cryosparc_database
# a temp password
cryosparc_passwd=Password123
module load CUDA/9.0.176
worker_path=${install_path}/cryosparc2_worker
ssd_path=/tmp/${USER}/cryosparc_cache
mkdir -p $install_path
cd $install_path
curl -sL https://get.cryosparc.com/download/master-latest/$license_id > cryosparc2_master.tar.gz
curl -sL https://get.cryosparc.com/download/worker-latest/$license_id > cryosparc2_worker.tar.gz
tar -xf cryosparc2_master.tar.gz
tar -xf cryosparc2_worker.tar.gz
cd ${install_path}/cryosparc2_master
./install.sh --license $license_id --hostname $(hostname) --dbpath $db_path --yes
source ~/.bashrc
cd ${install_path}/cryosparc2_worker
./install.sh --license $license_id --cudapath $CUDA_HOME --yes
source ~/.bashrc
# Farnam cluster setup
mkdir -p ${install_path}/site_configs && cd ${install_path}/site_configs
cat << EOF > cluster_info.json
{
"name" : "farnam",
"worker_bin_path" : "${install_path}/cryosparc2_worker/bin/cryosparcw",
"cache_path" : "/tmp/{{ cryosparc_username }}/cryosparc_cache",
"send_cmd_tpl" : "{{ command }}",
"qsub_cmd_tpl" : "sbatch {{ script_path_abs }}",
"qstat_cmd_tpl" : "squeue -j {{ cluster_job_id }}",
"qdel_cmd_tpl" : "scancel {{ cluster_job_id }}",
"qinfo_cmd_tpl" : "sinfo"
}
EOF
cat << EOF > cluster_script.sh
#!/usr/bin/env bash
#SBATCH --job-name cryosparc_{{ project_uid }}_{{ job_uid }}
#SBATCH -c {{ num_cpu }}
#SBATCH --gres=gpu:{{ num_gpu }}
#SBATCH -p ${partition}
#SBATCH --mem={{ (ram_gb*1024)|int }}
#SBATCH -o {{ job_dir_abs }}
#SBATCH -e {{ job_dir_abs }}
module load CUDA/9.0.176
mkdir -p /tmp/${USER}/cryosparc_cache
{{ run_cmd }}
EOF
cryosparcm start
cryosparcm createuser --email $my_email --password $cryosparc_passwd --name $my_name
cryosparcm cluster connect
cryosparcm stop
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment