Skip to content

Instantly share code, notes, and snippets.

@mgxd
Created January 12, 2021 15:11
Show Gist options
  • Save mgxd/8143de8f23e16b5731def7c41bf14da1 to your computer and use it in GitHub Desktop.
Save mgxd/8143de8f23e16b5731def7c41bf14da1 to your computer and use it in GitHub Desktop.
infant_epi_mask
import os
import nibabel as nb
import numpy as np
from nipype.interfaces.base import isdefined
from nipype.interfaces import fsl, ants, afni
import nipype.pipeline.engine as pe
from niworkflows.interfaces.registration import (
EstimateReferenceImage,
_get_vols_to_discard,
)
from niworkflows.interfaces.images import ValidateImage
# patch to apply signal drift correction to median image
class _EstimateReferenceImage(EstimateReferenceImage):
"""
Generate a reference 3D map from BOLD and SBRef EPI images for BOLD datasets.
Given a 4D BOLD file or one or more 3/4D SBRefs, estimate a reference
image for subsequent motion estimation and coregistration steps.
For the case of BOLD datasets, it estimates a number of T1w saturated volumes
(non-steady state at the beginning of the scan) and calculates the median
across them.
Otherwise (SBRefs or detected zero non-steady state frames), a median of
of a subset of motion corrected volumes is used.
If the input reference (BOLD or SBRef) is 3D already, it just returns a
copy of the image with the NIfTI header extensions removed.
LIMITATION: If one wants to extract the reference from several SBRefs
with several echoes each, the first echo should be selected elsewhere
and run this interface in ``multiecho = False`` mode.
"""
def _run_interface(self, runtime):
is_sbref = isdefined(self.inputs.sbref_file)
ref_input = self.inputs.sbref_file if is_sbref else self.inputs.in_file
if self.inputs.multiecho:
if len(ref_input) < 2:
input_name = "sbref_file" if is_sbref else "in_file"
raise ValueError("Argument 'multiecho' is True but "
f"'{input_name}' has only one element.")
else:
# Select only the first echo (see LIMITATION above for SBRefs)
ref_input = ref_input[:1]
elif not is_sbref and len(ref_input) > 1:
raise ValueError("Input 'in_file' cannot point to more than one file "
"for single-echo BOLD datasets.")
# Build the nibabel spatial image we will work with
ref_im = []
for im_i in ref_input:
nib_i = nb.squeeze_image(nb.load(im_i))
if nib_i.dataobj.ndim == 3:
ref_im.append(nib_i)
elif nib_i.dataobj.ndim == 4:
ref_im += nb.four_to_three(nib_i)
ref_im = nb.squeeze_image(nb.concat_images(ref_im))
# Volumes to discard only makes sense with BOLD inputs.
if not is_sbref:
n_volumes_to_discard = _get_vols_to_discard(ref_im)
out_ref_fname = os.path.join(runtime.cwd, "ref_bold.nii.gz")
else:
n_volumes_to_discard = 0
out_ref_fname = os.path.join(runtime.cwd, "ref_sbref.nii.gz")
# Set interface outputs
self._results["n_volumes_to_discard"] = n_volumes_to_discard
self._results["ref_image"] = out_ref_fname
# Slicing may induce inconsistencies with shape-dependent values in extensions.
# For now, remove all. If this turns out to be a mistake, we can select extensions
# that don't break pipeline stages.
ref_im.header.extensions.clear()
# If reference is only 1 volume, return it directly
if ref_im.dataobj.ndim == 3:
ref_im.to_filename(out_ref_fname)
return runtime
if n_volumes_to_discard == 0:
if ref_im.shape[-1] > 40:
ref_im = nb.Nifti1Image(
ref_im.dataobj[:, :, :, 20:40], ref_im.affine, ref_im.header
)
ref_name = os.path.join(runtime.cwd, "slice.nii.gz")
ref_im.to_filename(ref_name)
if self.inputs.mc_method == "AFNI":
res = afni.Volreg(
in_file=ref_name,
args="-Fourier -twopass",
zpad=4,
outputtype="NIFTI_GZ",
).run()
elif self.inputs.mc_method == "FSL":
res = fsl.MCFLIRT(
in_file=ref_name, ref_vol=0, interpolation="sinc"
).run()
mc_slice_nii = nb.load(res.outputs.out_file)
median_image_data = np.median(mc_slice_nii.get_fdata(), axis=3)
else:
median_image_data = np.median(
ref_im.dataobj[..., :n_volumes_to_discard], axis=3
)
# # correct for median signal intensity
# signal_drift = median_image_data[0] / median_image_data
# median_image_data /= signal_drift
nb.Nifti1Image(median_image_data, ref_im.affine, ref_im.header).to_filename(
out_ref_fname
)
return runtime
# micro workflow to generate reference
wf = pe.Workflow(name='gen_ref_wf', base_dir='.')
val_img = pe.MapNode(ValidateImage(), name='val_img', iterfield=['in_file'])
gen_ref = pe.Node(_EstimateReferenceImage(multiecho=False), name='gen_ref')
wf.connect(val_img, 'out_file', gen_ref, 'in_file')
wf.config['execution']['remove_unnecessary_outputs'] = False
def epi_mask(in_file, out_file=None):
"""Use grayscale morphological operations to obtain a quick mask of EPI data."""
from pathlib import Path
import nibabel as nb
import numpy as np
from scipy import ndimage
from skimage.morphology import ball
if out_file is None:
out_file = Path("mask.nii.gz").absolute()
img = nb.load(in_file)
data = img.get_fdata(dtype="float32")
# First open to blur out the skull around the brain
opened = ndimage.grey_opening(data, structure=ball(3))
# Second, close large vessels and the ventricles
closed = ndimage.grey_closing(opened, structure=ball(2))
# Window filter on percentile 30
closed -= np.percentile(closed, 30)
# Window filter on percentile 90 of data
maxnorm = np.percentile(closed[closed > 0], 90)
closed = np.clip(closed, a_min=0.0, a_max=maxnorm)
# Calculate index of center of masses
cm = tuple(np.round(ndimage.measurements.center_of_mass(closed)).astype(int))
# Erode the picture of the brain by a lot
eroded = ndimage.grey_erosion(closed, structure=ball(5))
# Calculate the residual
wshed = opened - eroded
wshed -= wshed.min()
wshed = np.round(1e3 * wshed / wshed.max()).astype(np.uint16)
markers = np.zeros_like(wshed, dtype=int)
markers[cm] = 2
markers[0, 0, -1] = -1
# Run watershed
labels = ndimage.watershed_ift(wshed, markers)
hdr = img.header.copy()
hdr.set_data_dtype("uint8")
nb.Nifti1Image(
ndimage.binary_dilation(labels == 2, ball(2)).astype("uint8"), img.affine, hdr
).to_filename(out_file)
return out_file
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment