Skip to content

Instantly share code, notes, and snippets.

@satra
Created May 19, 2018 20:06
Show Gist options
  • Save satra/ca140fe2cb41659b1b9666909df19cf4 to your computer and use it in GitHub Desktop.
Save satra/ca140fe2cb41659b1b9666909df19cf4 to your computer and use it in GitHub Desktop.
Apply ANTs Jointfusion using Mindboggle atlases
docker run -it --rm kaczmarj/neurodocker:master generate docker \
--base neurodebian:stretch \
--pkg-manager apt \
--install graphviz tree git-annex-standalone vim \
emacs-nox nano less ncdu tig sed build-essential \
libsm-dev libx11-dev libxt-dev libxext-dev libglu1-mesa \
--freesurfer version=6.0.0-min \
--ants version=b43df4bfc8 method=source cmake_opts='-DBUILD_SHARED_LIBS=ON' make_opts='-j 4'\
--run 'ln -s /usr/lib/x86_64-linux-gnu /usr/lib64' \
--miniconda \
conda_install="python=3.6 pip jupyter cmake mesalib vtk pandas \
matplotlib colormath nipype" \
pip_install="datalad[full] duecredit" \
env_name="simple2" \
activate=true \
--workdir /opt \
--run 'mkdir -p /opt/data && cd /opt/data && \
curl -sSL https://osf.io/download/rh9km/?revision=2 -o templates.zip && \
unzip templates.zip && \
rm -f /opt/data/templates.zip && \
curl -sSL https://osf.io/download/d2cmy/ -o OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_OASIS-30_v2.nii.gz && \
curl -sSL https://osf.io/download/qz3kx/ -o OASIS-TRT_brains_to_OASIS_Atropos_template.tar.gz && \
tar zxf OASIS-TRT_brains_to_OASIS_Atropos_template.tar.gz && \
rm OASIS-TRT_brains_to_OASIS_Atropos_template.tar.gz && \
curl -sSL https://osf.io/download/dcf94/ -o OASIS-TRT_labels_to_OASIS_Atropos_template.tar.gz && \
tar zxf OASIS-TRT_labels_to_OASIS_Atropos_template.tar.gz && \
rm OASIS-TRT_labels_to_OASIS_Atropos_template.tar.gz' \
--run-bash 'source /opt/miniconda-latest/etc/profile.d/conda.sh && \
conda activate simple2 && \
git clone https://github.com/nipy/mindboggle.git && \
cd /opt/mindboggle && \
git checkout edf95a3 && \
python setup.py install && \
sed -i "s/7.0/8.1/g" vtk_cpp_tools/CMakeLists.txt && \
mkdir /opt/vtk_cpp_tools && \
cd /opt/vtk_cpp_tools && \
cmake /opt/mindboggle/vtk_cpp_tools && \
make' \
--env vtk_cpp_tools=/opt/vtk_cpp_tools > Dockerfile
from glob import glob
import os
from nipype import Workflow, MapNode, Node
from nipype.interfaces.ants import ApplyTransforms, AntsJointFusion, LabelGeometry
from nipype.utils.misc import human_order_sorted
T = glob('/data/out/ants_subjects/arno/antsTemplateToSubject*')[::-1]
ref = '/data/T1.nii.gz'
mask = '/data/out/ants_subjects/arno/antsBrainExtractionMask.nii.gz'
T1s = human_order_sorted(glob('/opt/data/OASIS-TRT_brains_to_OASIS_Atropos_template/*.nii.gz'))
labels = human_order_sorted(glob('/opt/data/OASIS-TRT_labels_to_OASIS_Atropos_template/*.nii.gz'))
thickness = '/data/out/ants_subjects/arno/antsCorticalThickness.nii.gz'
N = 20
wf = Workflow('labelflow')
transformer = MapNode(ApplyTransforms(), iterfield=['input_image'], name="transformer")
transformer.inputs.reference_image = ref
transformer.inputs.transforms = T
transformer.inputs.input_image = T1s[:N]
transformer.inputs.dimension = 3
transformer.inputs.invert_transform_flags = [False, False]
transformer_nn = MapNode(ApplyTransforms(), iterfield=['input_image'], name="transformer_nn")
transformer_nn.inputs.reference_image = ref
transformer_nn.inputs.transforms = T
transformer_nn.inputs.dimension = 3
transformer_nn.inputs.invert_transform_flags = [False, False]
transformer_nn.inputs.input_image = labels[:N]
transformer_nn.inputs.interpolation = 'NearestNeighbor'
labeler = Node(AntsJointFusion(), name='labeler')
labeler.inputs.dimension = 3
labeler.inputs.target_image = [ref]
labeler.inputs.out_label_fusion = 'label.nii.gz'
labeler.inputs.mask_image = mask
labeler.inputs.num_threads = 8
wf.connect(transformer, 'output_image', labeler, 'atlas_image')
wf.connect(transformer_nn, 'output_image', labeler, 'atlas_segmentation_image')
tocsv = Node(LabelGeometry(), name='get_measures')
tocsv.inputs.intensity_image = thickness
wf.connect(labeler, 'out_label_fusion', tocsv, 'label_image')
wf.base_dir = os.getcwd()
wf.config['monitoring'] = {'enabled': True}
wf.run('MultiProc')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment