Skip to content

Instantly share code, notes, and snippets.

@JohnGriffiths
Last active February 15, 2022 21:55
Show Gist options
  • Save JohnGriffiths/5957856 to your computer and use it in GitHub Desktop.
Save JohnGriffiths/5957856 to your computer and use it in GitHub Desktop.
"""
----------------------------------
jg_MUSC_connectome_tractography.py
----------------------------------
Modified version of MUSC connectome (probabilistic) tractography pipeline
(original code is at bottom of file, plus links to website)
Modifications:
- Using own ROIs, and flirt, bedpostx, etc. are done, so just picking up the pipeline from the probtrackx part
- Re-written ('improved') various non-python bits in python
- Added 'verbose==2' flag to probtrackx call so it outputs streamlines
- (Creates a voxelwise 'connectome atlas' ??)
Notes:
- To do:
- tidy up all this scripts
- make sure the libraries, nipype bits, etc work on cbu server
- show others how to do that (incl. git; gists??)
- **ADD VERBOSE=2 (add as an argument, or have to modify the musicip code??**
- **look into connectome atlas generation**
- **add chris filo neuroutils particle reader to tidy up streamlines into vtk**
- **look into ways to flexibly re-parcellate these datasets
- **look into ways of seeding from g/wm surface**
- (note: nipype tractography workflow separates 1 tensor vs. 2 tensor models. don't think MUSC does that?)
- (note: musc also has a deterministic tractography option with dtk. perhaps use this?
- (Q: how much easier / quicker is this than a conventional nipype workflow?)
- ipython notebook??
"""
"""
--------------
Import modules
--------------
"""
import os
from os.path import join,isdir,isfile
import glob
import numpy as np
import nibabel as nib
from nipype.interfaces import fsl
import muscip.connectome as mcon
import bedpostx_particle_reader as bpx
import subprocess as sp
from IPython import embed
from datetime import datetime
from subprocess import call, STDOUT
devnull = open(os.devnull, 'wb')
"""
----------------------------------------
Define filenames, variables, steps to do
---------------------------------------
"""
subs = ['CBU110593_MR10033_CC410222']
dd = '/imaging/camcan/sandbox/sd03/DTI' # data directory (read from)
gd = '/imaging/camcan/sandbox/jg03/data/connectome_tractography/MUSC_workflow/test' # group directory (output to)
if isdir(gd): call('rm -r ' + gd, shell=True)
os.mkdir(gd)
fd = '/imaging/local/software/fsl/latest/x86_64/fsl/data' # FSL directory for template images
FSL_ROI_template = join(fd,'atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr0-1mm.nii.gz')
FSL_FA_template = join(fd,'standard/FMRIB58_FA_1mm.nii.gz')
n_samples = 2000
do__delete_dirs = True
do__flirting = True
do__tractography = True
do__read_particles = False
do__delete_raw_particles = True
do__connectome_construction = True
"""
------
Do it!
------
"""
print '\n\njg_MUSC_connectome_workflow - awesomeness on a stick. '
# output steps to be run
print "\n\nWhat we're going to do:"
for d in dir():
if 'do__' in d: print '\n%s : %s ' %(d, eval(d))
# remove dirs
if isdir(gd) and do__delete_dirs: call('rm -r ' + gd, shell=True)
os.mkdir(gd)
# loop and go
for s_it, s in enumerate(subs):
print '\n\nSubject #%d : %s' %(s_it+1, s)
"""
Define files etc. for this subject
"""
sd = join(gd,s) # subject directory
if not isdir(sd): os.mkdir(sd)
# extant files
nodif_brain_mask = join(dd, s+'.bedpostX', 'nodif_brain_mask.nii.gz')
base_name = join(dd, s+'.bedpostX', 'merged')
FA_native = join(dd, 'FA', s+'_nldtiFA.nii.gz')
# files to be
ROIs_native = join(sd,'HarvOx_ROIs_native.nii.gz')
FA_MNI = join(sd,'FA_MNI.nii.gz')
FA_native2MNI_xfm = join(sd,'FA_native2MNI_xfm')
FA_native2MNI_xfm_inv = join(sd,'FA_MNI2native_xfm')
merged_file = join(sd, 'allROIs_merged.nii.gz')
"""
Make native space seeds
"""
if do__flirting:
flirt_cmd = 'flirt -in %s -ref %s -out %s -omat %s -usesqform ' %(FA_native, FSL_FA_template, FA_MNI,FA_native2MNI_xfm)
print '\n\ngetting native-->MNI mapping with flirt: \n\n' + flirt_cmd
call([flirt_cmd], stdout=devnull, stderr=STDOUT, shell=True)
#fsl.FLIRT(in_file = FA_native, reference = FSL_FA_template, dof=12, out_file=FA_MNI, out_matrix_file=FA_native2MNI_xfm).run() # (nipype alternative)
convxfm_cmd = 'convert_xfm -omat %s -inverse %s ' % (FA_native2MNI_xfm_inv, FA_native2MNI_xfm)
print '\n\ninverting mapping: \n\n' + convxfm_cmd
call(convxfm_cmd, stdout=devnull, stderr=STDOUT, shell=True)
#fsl.ConvertXFM(in_file=FA_native2MNI_xfm, invert_xfm = True, out_file = FA_native2MNI_xfm_inv).run() # (nipype alternative)
# Apply inv transform to atlas
applyxfm_cmd = 'flirt -in %s -ref %s -applyxfm -init %s -out %s -interp nearestneighbour' %(FSL_ROI_template,FA_native,FA_native2MNI_xfm_inv,ROIs_native)
print '\n\napplying inv transform to ROI atlas: \n\n' + applyxfm_cmd #; os.system(appxfm_cmd)
call(applyxfm_cmd, stdout=devnull, stderr=STDOUT, shell=True)
#fsl.ApplyXfm(in_file=FSL_ROI_template, in_matrix_file=FA_native2MNI_xfm_inv,interp='nearestneighbour',reference=FA_native,out_file=ROIs_native).run()
"""
Loop through all ROIs and run tractography
"""
ROI_vals = np.unique(nib.load(ROIs_native).get_data())
merge_list = ''
for r_it, r in enumerate(ROI_vals):
print '\n\nTractographizing ROI %s of %s ' %(str(r_it+1), str(len(ROI_vals)+1))
# Define filenames and command strings
rn = 'ROI_' + str(r) # ROI name
rfn = join(sd, rn+'.nii.gz') # ROI filename
td = join(sd, rn) # tractography directory
os.mkdir(td) # #if isdir(td): os.system('rm -r ' + td)
tfn_fdt = rn + '_fdt_paths.nii.gz' # probtrack filename
tfn_trk = rn + '_trackvis_particles.trk' # trackvis filename
thresh_args = ' %s -thr %s -uthr %s %s' % (ROIs_native, str(r),str(r),rfn)
thresh_cmd = 'fslmaths ' + thresh_args
track_args1 = ' --mode=seedmask -x %s -l -c 0.2 -S 2000 --steplength=0.5 -P %s --forcedir --opd ' % (rfn, str(n_samples))
track_args2 = ' --pd -s %s -m %s --out=%s --dir=%s --verbose=2' %(base_name, FA_native, tfn_fdt,td)
track_cmd = 'probtrackx ' + track_args1+track_args2
merge_list = merge_list + ' ' + join(td,tfn_fdt)
# Run tractography
if do__tractography:
print '\n\nThresholding ROI: \n\n %s' %thresh_cmd
call(thresh_cmd, stdout=devnull, stderr=STDOUT, shell=True)
print '\n\nRunning tractography: \n\n %s \n\nstart time: %s' % (track_cmd , str(datetime.now()))
call(track_cmd, stdout=devnull, stderr=STDOUT,shell=True)
print '\nfinish time: %s ' % str(datetime.now())
merge_list = merge_list + ' ' + join(td,tfn_fdt)
# Read particles and write to .trk
if do__read_particles:
try:
print '\n\nReading particle files: \n\nstart time: %s' % str(datetime.now())
particle_files_list = glob.glob(join(td,'particle*'))
p2t = bpx.Particle2Trackvis(particle_files = particle_files_list,reference_file = nodif_brain_mask, out_file = join(sd,tfn_trk))
print '\nfinish time: %s ' % str(datetime.now())
if delete_particles:
print '\n\n deleting particle files'
os.remove(join(td, 'particle*'))
except: 'no particles found'
"""
Merge tractography outputs and make connectomes
"""
print '\n\nFinished all tractography!'
if do__connectome_construction:
print '\n Now merging track images and generating connectomes...'
# Merge tractography outputs
merge_args = ' -t ' + merged_file + ' ' + merge_list
merge_cmd = 'fslmerge ' + merge_args
print '\n\nMerging streamline distributions into a 4D stack. \n\nstart time: %s' % str(datetime.now())
call(merge_cmd, stdout = devnull, stderr = STDOUT, shell=True)
print '\nfinish time: %s ' % str(datetime.now())
# Generate connectomes
os.chdir(sd)
connectome = mcon.TNProbtrackxConnectome(fdt_image=merged_file , roi_image = ROIs_native)
print '\n\nGenerating connectome. \n\nstart time: %s' % str(datetime.now())
connectome.generate_network()
print '\nfinish time: %s ' % str(datetime.now())
# Set subject ID and write connectome to file
connectome.subject_id = s #
connectome.write('CTOME')
print '\n\nFinished subject %s ' %s
print '\n\nFinished all subjectsi :)'
devnull.close()
"""
Code Graveyard:
--------------
# writing ROIs with nibabel wasn't quite working...
#nib.Nifti1Image(nib.load(ROIs_native).get_data()==r,nib.load(ROIs_native).get_affine()).to_file(thisROI_fname)
#dat = (nib.load(ROIs_native).get_data()==r)*1
#dat.dtype = 'uint8'
#aff = nib.load(ROIs_native).get_affine()
#nib.Nifti1Image(dat,aff).to_filename(thisROI_fname) # dtype='uint8')
#nib.Nifti1Image((nib.load(ROIs_native).get_data()==r)*1,nib.load(ROIs_native).get_affine()).to_filename(thisROI_fname)
#with open(os.devnull, 'wb') as devnull:
# subprocess.check_call(['/etc/init.d/apache2', 'restart'], stdout=devnull, stderr=subprocess.STDOUT)
#with open(os.devnull, 'wb') as devnull: call([flirt_cmd], stdout = devnull, stderr = sp.STDOUT, shell=True)
# Apply affine transform to images and .trks
ut_dir = '/home/jg03/
# (use nipype interface for this??)
tt_cmd = 'track_transform %s %s -src %s -ref %s -reg %s' %(trk_file, trk_file, nodif_brain_mask, T1_img, b0tot1_reg)
print 'running track transform'
print '...time: (ADD TIME HERE)'
os.system(tt_cmd)
print '...finished track_transform. time: (ADD TIME HERE)'
#track_transform DIFFUSION/fibers.trk DIFFUSION/fibers.trk -src NIFTI/b0.nii.gz -ref NIFTI/t1.nii.gz -reg NIFTI/transformations/b0-to-t1.mat
Simon Davis
15:52 (1 hour ago)
to me
webpages:
http://academicdepartments.musc.edu/neurosciences/research/laboratories/bonilha/documents/overview.html
http://academicdepartments.musc.edu/neurosciences/research/laboratories/bonilha/documents/documentation.markdown
the bedposts are in /imaging/camcan/sandbox/sd03/DTI/${SUBJ}.bedpostx/
i put the MUSC toolbox in my /imaging/camcan/sandbox/sd03/scripts folder.
"""
"""
MUSC online tutorial:
-------------------
overview
Table of Contents
1 DTI Processing
1.1 Deterministic (CMTK Analogue)
1.1.1 Pre-processing
1.1.2 Registration
1.1.3 Segmentation / Parcellation
1.1.4 Streamline Generation (Fiber Tracking)
1.1.5 Connectome Creation
1.2 Probabilistic
1.2.1 Pre-processing
1.2.2 Registration
1.2.3 Segmentation / Parcellation
1.2.4 Streamline Generation (Fiber Tracking)
1.2.5 Connectome Creation
1 DTI Processing
1.1 Deterministic (CMTK Analogue)
1.1.1 Pre-processing
Extract first b0 volume - this will be used later in registration.
fslroi NIFTI/dwi-orig.nii.gz NIFTI/b0.nii.gz 0 1
Remove skull from first b0 volume.
bet NIFTI/b0.nii.gz NIFTI/b0-brain.nii.gz -f 0.2
Remove skull from T1 volume.
bet NIFTI/t1.nii.gz NIFTI/t1-brain.nii.gz
Binarize brain-extracted b0 image.
fslmaths NIFTI/b0-brain.nii.gz -bin NIFTI/b0-brain-mask.nii.gz
Apply motion correction to diffusion volumes, registering each volume of the dwi to the first b0 volume.
eddy_correct NIFTI/dwi-orig.nii.gz NIFTI/dwi.nii.gz 0
1.1.2 Registration
Register b0 to T1 using a 12-parameter linear transformation. This may also be done as a 6-parameter, rigid transform, but we feel the 12-parameter is more appropriate.
flirt -in NIFTI/b0-brain.nii.gz -ref NIFTI/t1-brain.nii.gz -o NIFTI/b0-to-t1.nii.gz -omat NIFTI/transformations/b0-to-t1.mat -usesqform
1.1.3 Segmentation / Parcellation
Use freesurfer to segment and parcellate the T1 image. This process may take up to a full day depending upon computing resources.
recon-all -sd `pwd` -s FREESURFER -i NIFTI/t1.nii.gz
Use muscip to map segmentation to 83 regions of interest, overlayed on our native T1 image space.
To accomplish this, first load ipython
ipython
Then, after ipython has initialized, procede mapping using muscip.
# use muscip to map processed freesurfer directory to expected roi and wm images
import muscip.atlas.freesurfer as mfree
fs = mfree.load('FREESURFER')
roi_image = fs.roi_img()
white_matter_mask_image = fs.wm_mask()
# use nibabel to save images in memory to disk
import nibabel as nib
nib.save(roi_image, 'NIFTI/roi.nii.gz')
nib.save(white_matter_mask_image, 'NIFTI/wm.nii.gz')
1.1.4 Streamline Generation (Fiber Tracking)
Reconstruct tensors using Diffusion Toolkit.
dti_recon NIFTI/dwi.nii.gz DIFFUSION/dti -gm bvecs-dtk -b 1000 -axial -p 3 -sn 1 -ot nii.gz
Generate streamlines using Diffusion Toolkit. We typically use the FACT (Fiber Assignment by Continuous Tracking) algorithm. We constrain our tacking voxels by applying the reconstructed FA image with a threshold typically between 0.1 and 0.2 depending upon application, the lower thresholds being more lenient. We feel we can afford to be lenient in this step because we will later be filtering streamlines by our overlayed ROIs.
dti_tracker DIFFUSION/dti DIFFUSION/fibers.trk -at 45 -m DIFFUSION/dti_fa.nii.gz 0.15 2.0 -it nii.gz -rseed 24
spline_filter DIFFUSION/fibers.trk 0.5 DIFFUSION/fibers.trk
Register streamlines to native T1 space using Diffusion Toolkit.
track_transform DIFFUSION/fibers.trk DIFFUSION/fibers.trk -src NIFTI/b0.nii.gz -ref NIFTI/t1.nii.gz -reg NIFTI/transformations/b0-to-t1.mat
1.1.5 Connectome Creation
Use muscip to select streamlines that meet our critera, most importantly that the endpoints of kept streamlines reside inside the volumes of distint regions of interest. This will again be done inside ipytho
ipython
wait for ipython to initialize ...
import muscip.connectome as mcon
# load connectome and extract network
connectome = mcon.TNDtkConnectome(fibers='DIFFUSION/fibers.trk', min_fiber_length=20.0, max_fiber_length=300.0, roi_image='NIFTI/roi.nii.gz')
connectome.generate_network()
# write connectome to file
connectome.write('CTOME')
While still in the same ipython session, use muscip to generate hagmann densities.
# assuming we are still in ipython and have same connectome object in memory...
# set our white matter mask image
connectome.wm_image = 'NIFTI/wm.nii.gz'
connectome.populate_hagmann_density()
# write connectome to file
connectome.write('CTOME')
While still in the same ipython session, use muscip to add scalars such as FA to our connectome.
# assuming we are still in ipython and have same connectome object in memory...
# add a scalar image
connectome.add_scalar('DIFFUSION/dti_fa.nii.gz', 'fa', aff='NIFTI/transformations/b0-to-t1.mat')
# set subject ID and write connectome to file
connectome.subject_id = 'SUBID' # replace w/ actual subject ID (string)
connectome.write('CTOME')
1.2 Probabilistic
1.2.1 Pre-processing
Extract first b0 volume - this will be used later in registration.
fslroi NIFTI/dwi-orig.nii.gz NIFTI/b0.nii.gz 0 1
Remove skull from first b0 volume.
bet NIFTI/b0.nii.gz NIFTI/b0-brain.nii.gz -f 0.2
Remove skull from T1 volume.
bet NIFTI/t1.nii.gz NIFTI/t1-brain.nii.gz
Binarize brain-extracted b0 image.
fslmaths NIFTI/b0-brain.nii.gz -bin NIFTI/b0-brain-mask.nii.gz
Apply motion correction to diffusion volumes, registering each volume of the dwi to the first b0 volume.
eddy_correct NIFTI/dwi-orig.nii.gz NIFTI/dwi.nii.gz 0
1.2.2 Registration
Register b0 to T1 using a 12-parameter linear transformation. This may also be done as a 6-parameter, rigid transform, but we feel the 12-parameter is more appropriate.
flirt -in NIFTI/b0-brain.nii.gz -ref NIFTI/t1-brain.nii.gz -o NIFTI/b0-to-t1.nii.gz -omat NIFTI/transformations/b0-to-t1.mat -usesqform
1.2.3 Segmentation / Parcellation
Use freesurfer to segment and parcellate the T1 image. This process may take up to a full day depending upon computing resources.
recon-all -sd `pwd` -s FREESURFER -i NIFTI/t1.nii.gz
Use muscip to map segmentation to 83 regions of interest, overlayed on our native T1 image space. To accomplish this, first load ipython
ipython
Then, after ipython has initialized, procede mapping using muscip.
# use muscip to map processed freesurfer directory to expected roi and wm images
import muscip.atlas.freesurfer as mfree
fs = mfree.load('FREESURFER')
roi_image = fs.roi_img()
white_matter_mask_image = fs.wm_mask()
# use nibabel to save images in memory to disk
import nibabel as nib
nib.save(roi_image, 'NIFTI/roi.nii.gz')
nib.save(white_matter_mask_image, 'NIFTI/wm.nii.gz')
Create individual mask images for each label in ROIs.
# create a directory to contain seed masks
mkdir DIFFUSION/seed_masks
# get min and max values for labels.. we will assume
# we are dealing with integers
min_i=`fslstats NIFTI/roi.nii.gz -l 0.1 -R | cut -d ' ' -f1`
max_i=`fslstats NIFTI/roi.nii.gz -l 0.1 -R | cut -d ' ' -f2`
# create masks for each region (removing any .0's from the values if
# needed, akin to casting as int)
for i in `seq ${min_i/.0*/} ${max_i/.0*/}`; do
fslmaths roi.nii.gz -thr $i -uthr $i -bin "DIFFUSION/seed_masks/"${i}.nii.gz
done
1.2.4 Streamline Generation (Fiber Tracking)
Model crossing fibers using bedpostx.
# stage input files
cp NIFTI/dwi.nii.gz DIFFUSION/data.nii.gz
cp NIFTI/b0-brain-mask.nii.gz DIFFUSION/nodif_brain_mask.nii.gz
# run bedpostx (this will take a long time to run)
bedpostx DIFFUSION
Generate streamlines using probtrackx. For this we will use mostly default parameters: 2000 seeds per voxel, 5000 steps per streamline.
# for each seed mask, use probtrackx to create
# a fiber distribution
for seed_mask in DIFFUSION/seed_masks/*.nii.gz; do
mask_file=`basename "$seed_mask"`
mask_name=${mask_file%.nii*}
echo -e "Starting tracking for region: $mask_name..."
probtrackx --mode=seedmask -x "$seed_mask" -l -c 0.2 -S 2000 \
--steplength=0.5 -P 5000 --forcedir --opd --pd -s \
DIFFUSION.bedpostX/merged -m DIFFUSION/nodif_brain_mask.nii.gz \
--xfm=NIFTI/transformations/b0-to-t1.nii.gz --dir \
DIFFUSION/fdt_paths --out=${mask_name}.nii.gz
done
Merge individual streamline distributions into a single 4-dimensional stack.
cd DIFFUSION/fdt_paths
fslmerge -t fdt.nii.gz `ls . | sort -h`
1.2.5 Connectome Creation
Use muscip to generate connectomes. With fiber distributions in the same space as our image containing regions of interest, for each fiber distribution corresponding to a given seed region extract fiber density inside all other regions of interest. This will be done in an ipython session
#ipython
#wait for ipython to load.
import muscip.connectome as mcon
# load connectome and extract network
connectome = mcon.TNProbtackxConnectome(fdt_image='DIFFUSION/fdt.nii.gz', roi_image='NIFTI/roi.nii.gz')
connectome.generate_network()
# set subject ID and write connectome to file
connectome.subject_id = 'SUBID' # replace w/ actual subject ID (string)
connectome.write('CTOME')
Date: 2012-11-20 01:08:25 EST
Author: Travis Nesland
Org version 7.7 with Emacs version 23
Validate XHTML 1.0
"""
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment