Skip to content

Instantly share code, notes, and snippets.

@d3v-null
Last active June 17, 2022 04:05
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save d3v-null/9ce233114ebb16dc18072303845257e5 to your computer and use it in GitHub Desktop.
Save d3v-null/9ce233114ebb16dc18072303845257e5 to your computer and use it in GitHub Desktop.
#!/bin/bash -l
#SBATCH -J "*:~.✩"
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --account=mwaeor
#SBATCH --export=NONE
#SBATCH --partition=gpuq
#SBATCH --cpus-per-task=32
#SBATCH --mem=350G
#SBATCH --tmp=500G
#SBATCH --time=1:00:00
#SBATCH --gres=gpu:1
#SBATCH --array=0-4
# ################################################### #
# Dev Null's MWA Calibration and Imaging SLURM script #
# ################################################### #
# Feel free to use share this with anyone who might benefit from it. No guarantees it will work for you.
# - preprocessing: Birli
# - AOflagger flagging
# - geometric, digital, pfb corrections
# - spectral temporal averaging
# - write to MS
# - DI Calibration: Hyperdrive
# - Imaging: WSClean
# - calibration convergence statistics for various uv-distance cutoff windows
# ######################### #
# !!! SBATCH Parameters !!! #
# ######################### #
# - adjust the --array parameter above to match the length of $obsids
# - if using slow imaging, you'll need about 16 hours
# - use the following parameters if you want to skip the slurm queue to perform
# a quick smoke test
#/SBATCH --cpus-per-task=1
#/SBATCH --time=00:15:00
#/SBATCH --partition=workq
#/SBATCH --array=0
# needed for wsclean
export OPENBLAS_NUM_THREADS=1
# uncomment this to debug Birli, Marlu or Hyperdrive
# export RUST_LOG=birli=debug
# export RUST_LOG=birli,marlu=debug # etc.
# imaging / preprocessing parameters
export mem_lmt="300"
export time_res_s="2"
export freq_res_khz="20"
module use /pawsey/mwa/software/python3/modulefiles
module load hyperdrive/chj
module load wsclean
module load singularity
module load cotter
module load casa
module load python
module load astropy
module load wcslib
# an array of imaging parameters
declare -A images
images['quick_41deg_4096px']='-size 4096 4096 -scale 0.01 -niter 1000'
# images['slow_20deg_8192px']='-size 8192 8192 -scale 0.0025 -niter 10000'
# takes the type of vis, imaging parameters name, out path, runs wsclean
wsclean_img() {
wsclean -name "img_$1_$2" \
-mwa-path /astro/mwaeor/jline/software \
-multiscale \
-mgain 0.85 \
${images[$2]} \
-circular-beam \
-pol iquv -link-polarizations i \
-channels-out 4 -join-channels \
-no-mf-weighting -weight briggs 0 \
-grid-with-beam -use-idg -idg-mode hybrid \
-auto-threshold 1 -auto-mask 4 \
-abs-mem ${mem_lmt} \
-save-source-list \
"$3"
}
set -e
set -x
# an array of obsids, each slurm array task processes a different obsid
export obsids=(1220611936 1220612056 1220612176 1220612296 1220612416)
export OBSID="${obsids[$SLURM_ARRAY_TASK_ID]}"
echo "obsid ${OBSID} date $(date -Iseconds)"
# which source list to use
# export srclist=srclist_pumav3_EoR0aegean_EoR1pietro+ForA_wsclean_phase1+2
# export srclist=srclist_pumav3_EoR0LoBESv2_fixedEoR1pietro+ForA_phase1+2
export srclist=srclist_pumav3_EoR0aegean_EoR1pietro+ForA_phase1+2_TGSSgalactic
export obs_srclist=${srclist}_${OBSID}_peel1000.yaml
# various paths used by the script, and their equivalient path in nvmetmp
export METAFITS="${OBSID}.metafits"
export VIS_MODEL_ASTRO="hyp_model_${srclist}.ms"
export VIS_MODEL_TMP="/nvmetmp/hyp_model_${srclist}.ms"
export VIS_UNCAL_ASTRO="birli_${OBSID}_${time_res_s}s_${freq_res_khz}kHz.ms"
export VIS_UNCAL_TMP="/nvmetmp/${VIS_UNCAL_ASTRO}"
export VIS_CAL_ASTRO="hyp_cal_${OBSID}_${time_res_s}s_${freq_res_khz}kHz.ms"
export VIS_CAL_TMP="/nvmetmp/${VIS_CAL_ASTRO}"
export SOLNS="hyp_solns.fits"
cd "/astro/mwaeor/dev/pretty/${OBSID}" || exit
# write the metafits headers in human-readable format
fitshdr ${METAFITS} | tee ${METAFITS}.txt
# # quick smoke test, check the input files exist and birli is working
# singularity exec \
# --bind /nvmetmp:/nvmetmp \
# /astro/mwaeor/dev/singularity/birli/birli_latest.sif \
# /opt/cargo/bin/birli --dry-run \
# -m ${METAFITS} \
# ${OBSID}_2*.fits
# genereate our sourclist
if [[ ! -f ${obs_srclist} ]]; then
hyperdrive srclist-by-beam \
-i rts \
-m ${METAFITS} \
-n 1000 \
--source-dist-cutoff 20 \
"/astro/mwaeor/dev/calibration/${srclist}.txt" "${obs_srclist}"
fi
# generate model visibilities (for debugging)
if [[ ! -d ${VIS_MODEL_ASTRO} ]]; then
hyperdrive simulate-vis \
--metafits ${METAFITS} \
--source-list ${obs_srclist} \
--output-model-file ${VIS_MODEL_TMP}
fixmwams ${VIS_MODEL_TMP} ${METAFITS}
mv ${VIS_MODEL_TMP} ${VIS_MODEL_ASTRO}
else
echo "model ${VIS_MODEL_ASTRO} exists"
fi
# generate un-calibrated visibilities with Birli via singularity
if [[ ! -d "${VIS_UNCAL_ASTRO}" ]]; then
singularity exec \
--bind /nvmetmp:/nvmetmp \
/astro/mwaeor/dev/singularity/birli/birli_latest.sif \
/opt/cargo/bin/birli \
-m ${METAFITS} \
-M "${VIS_UNCAL_TMP}" \
--avg-time-res ${time_res_s} --avg-freq-res ${freq_res_khz} \
--max-memory ${mem_lmt} \
${OBSID}_2*.fits
fixmwams "${VIS_UNCAL_TMP}" ${METAFITS}
cp -r "${VIS_UNCAL_TMP}" "${VIS_UNCAL_ASTRO}"
else
echo "vis uncal ${VIS_MODEL_ASTRO} exists"
fi
# ################### #
# !!! Flag output !!! #
# ################### #
# Birli writes flags to ms, but if you want a separate flags file, use the -f option, and pass in either
# - FlagFile_ch%%%.mwaf for mwax
# - or FlagFile_%%.mwaf for legacy
# see <https://github.com/MWATelescope/Birli/blob/main/README.md#output>
if [[ ! -d "${VIS_UNCAL_TMP}" ]]; then
time cp -r "${VIS_UNCAL_ASTRO}" "${VIS_UNCAL_TMP}"
fi
# calibrate various uv-length windows, plot solutions
if false; then
# generate baseline length cuttoff windows from uv-length percentiles using casa
casa -c "import numpy as np
tb.open('${VIS_UNCAL_TMP}')
uvlengths = (tb.getcol('UVW')**2).sum(axis=0)**0.5
percentiles = np.percentile(uvlengths, range(0,100,10))
with open('uv_windows.txt', 'w') as f:
for row in np.stack((percentiles[:-5],percentiles[5:])).T.tolist():
f.write(\"%f %f\n\" % (row[0], row[1]))
"
# calibrate and plot each each uv-window
export dical_cmd="hyperdrive di-cal -d '${OBSID}.metafits' '${VIS_UNCAL_TMP}' -s '${obs_srclist}'"
export splot_cmd="hyperdrive solutions-plot -m '${OBSID}.metafits'"
awk '{
solns=sprintf("hyp_sols_p%s.fits", NR)
system(sprintf(
"'"$dical_cmd"' -o %s --uvw-min %sl --uvw-max %sl; '"$splot_cmd"' %s",
solns, $1, $2, solns
))}' <uv_windows.txt
# count the percentage of NaNs in each window's calibration solutions
python3 -c "from astropy.io import fits; import numpy as np; import glob;
for [[left, right], soln_path] in zip(map(lambda l: l.strip().split(' '), open('uv_windows.txt').readlines()), sorted(glob.glob('hyp_sols_p*.fits'))):
solns=fits.open(soln_path)[1].data
convergence=np.count_nonzero(np.isnan(solns))/np.product(solns.shape)
print(f'{left} {right}, {convergence}')
" | tee convergence.txt
fi
# image uncalibrated visibilities
# wsclean_img "uncal" "quick_41deg_4096px" "${VIS_UNCAL_TMP}"
# generate calibrated visibilities without cutoff
if [[ ! -d "${VIS_CAL_ASTRO}" ]]; then
# calibrate visibilities
hyperdrive di-calibrate \
--data ${METAFITS} "${VIS_UNCAL_TMP}" \
--source-list "${obs_srclist}" \
--outputs ${SOLNS}
# plot solutions
hyperdrive solutions-plot \
-m ${METAFITS} \
${SOLNS}
# apply solutions
hyperdrive solutions-apply \
--data ${METAFITS} "${VIS_UNCAL_TMP}" \
--solutions ${SOLNS} \
--outputs "${VIS_CAL_TMP}"
rm -rf "${VIS_UNCAL_TMP}"
fixmwams "${VIS_CAL_TMP}" ${METAFITS}
cp -r "${VIS_CAL_TMP}" "${VIS_CAL_ASTRO}"
fi
if [[ ! -d "${VIS_CAL_TMP}" ]]; then
cp -r "${VIS_CAL_ASTRO}" "${VIS_CAL_TMP}"
fi
# image calibrated visibilities with various imaging settings
for img in "${!images[@]}"; do
echo "obsid ${OBSID} img ${img} date $(date -Iseconds)"
wsclean_img "cal" "$img" "${VIS_CAL_TMP}"
done
rm -rf "${VIS_CAL_TMP}"
echo "done $(date -Iseconds)"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment