- T1 (multiple "flavors")
- T2
- T2 star
- T1/T2 infant
- mean bold
- FA
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>>
>>> # ANTs-flavored
>>> seg = antspynet.brain_extraction(t1, modality="t1", verbose=True)
>>> ants.plot(t1, overlay=seg, overlay_alpha=0.5)
>>>
>>> # FreeSurfer-flavored
>>> seg = antspynet.brain_extraction(t1, modality="t1nobrainer", verbose=True)
>>> ants.plot(t1, overlay=seg, overlay_alpha=0.5)
>>>
>>> # Combined
>>> seg = antspynet.brain_extraction(t1, modality="t1combined", verbose=True)
>>> ants.plot(t1, overlay=seg, overlay_alpha=0.5)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
>
> # ANTs-flavored
> seg <- brainExtraction( t1, modality = "t1", verbose = TRUE )
> plot( t1, seg, alpha = 0.5 )
>
> # FreeSurfer-flavored
> seg <- brainExtraction( t1, modality = "t1nobrainer", verbose = TRUE )
> plot( t1, seg, alpha = 0.5 )
>
> # Combined
> seg <- brainExtraction( t1, modality = "t1combined", verbose = TRUE )
> plot( t1, seg, alpha = 0.5 )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>> seg = antspynet.deep_atropos(t1, verbose=True)
>>> ants.plot(t1, overlay=seg['segmentation_image'], overlay_alpha=0.75)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
> seg <- deepAtropos( t1, verbose = TRUE )
> plot( t1, seg$segmentationImage, alpha = 0.75 )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>> kk = antspynet.cortical_thickness(t1, verbose=True)
>>> ants.plot(t1, overlay=kk['thickness_image'], overlay_alpha=0.75)
>>>
>>> # Also see antspynet.longitudinal_cortical_thickness(...)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
> kk <- corticalThickness( t1, verbose = TRUE )
> plot( t1, kk$thicknessImage, alpha = 0.75 )
>
> # Also see longitudinalCorticalThickness(...)
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>> dkt = antspynet.desikan_killiany_tourville_labeling(t1, do_lobar_parcellation=True, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
> dkt <- desikanKillianyTourvilleLabeling( t1, doLobarParcellation = TRUE, verbose = TRUE )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>> df = antspynet.deep_flash(t1, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
> df <- deepFlash( t1, verbose = TRUE )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>> hipp = antspynet.hippmapp3r_segmentation(t1, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
> hipp <- hippMapp3rSegmentation( t1, verbose = TRUE )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>> age = antspynet.brain_age(t1, number_of_simulations=3, sd_affine=0.01, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
> age <- brainAge( t1, numberOfSimulations = 3, sdAffine = 0.01, verbose = TRUE )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('mprage_hippmapp3r'))
>>> seg = antspynet.claustrum_segmentation(t1, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'mprage_hippmapp3r' ) )
> seg <- claustrumSegmentation( t1, verbose = TRUE )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data('kirby'))
>>> seg = antspynet.hypothalamus_segmentation(t1, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( 'kirby' ) )
> seg <- hypothalamusSegmentation( t1, verbose = TRUE )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> t1_file = tf.keras.utils.get_file(fname="t1.nii.gz", origin="https://figshare.com/ndownloader/files/40251796", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>> flair_file = tf.keras.utils.get_file(fname="flair.nii.gz", origin="https://figshare.com/ndownloader/files/40251793", force_download=True)
>>> flair = ants.image_read(flair_file)
>>>
>>> wmh = antspynet.sysu_media_wmh_segmentation(flair, t1, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "t1.nii.gz", origin = "https://figshare.com/ndownloader/files/40251796", force_download = TRUE )
> t1 <- antsImageRead( t1File )
> flairFile <- tensorflow::tf$keras$utils$get_file( fname = "flair.nii.gz", origin = "https://figshare.com/ndownloader/files/40251793", force_download = TRUE )
> flair <- antsImageRead( flairFile )
>
> wmh <- sysuMediaWmhSegmentation( flair, t1, verbose = TRUE )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> t1_file = tf.keras.utils.get_file(fname="t1.nii.gz", origin="https://figshare.com/ndownloader/files/40251796", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>> flair_file = tf.keras.utils.get_file(fname="flair.nii.gz", origin="https://figshare.com/ndownloader/files/40251793", force_download=True)
>>> flair = ants.image_read(flair_file)
>>>
>>> wmh = antspynet.hypermapp3r_segmentation(t1, flair, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "t1.nii.gz", origin = "https://figshare.com/ndownloader/files/40251796", force_download = TRUE )
> t1 <- antsImageRead( t1File )
> flairFile <- tensorflow::tf$keras$utils$get_file( fname = "flair.nii.gz", origin = "https://figshare.com/ndownloader/files/40251793", force_download = TRUE )
> flair <- antsImageRead( flairFile )
>
> wmh <- hyperMapp3rSegmentation( t1, flair, verbose = TRUE )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> t1_file = tf.keras.utils.get_file(fname="t1.nii.gz", origin="https://figshare.com/ndownloader/files/40251796", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>> flair_file = tf.keras.utils.get_file(fname="flair.nii.gz", origin="https://figshare.com/ndownloader/files/40251793", force_download=True)
>>> flair = ants.image_read(flair_file)
>>>
>>> wmh = antspynet.shiva_wmh_segmentation(flair, t1, which_model = "all", verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "t1.nii.gz", origin = "https://figshare.com/ndownloader/files/40251796", force_download = TRUE )
> t1 <- antsImageRead( t1File )
> flairFile <- tensorflow::tf$keras$utils$get_file( fname = "flair.nii.gz", origin = "https://figshare.com/ndownloader/files/40251793", force_download = TRUE )
> flair <- antsImageRead( flairFile )
>
> wmh <- shivaWmhSegmentation( flair, t1, whichModel = "all", verbose = TRUE )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> t1_file = tf.keras.utils.get_file(fname="t1.nii.gz", origin="https://figshare.com/ndownloader/files/40251796", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>> t1 = ants.resample_image(t1, (240, 240, 64), use_voxels=True)
>>> flair_file = tf.keras.utils.get_file(fname="flair.nii.gz", origin="https://figshare.com/ndownloader/files/40251793", force_download=True)
>>> flair = ants.image_read(flair_file)
>>> flair = ants.resample_image(flair, (240, 240, 64), use_voxels=True)
>>>
>>> wmh = antspynet.wmh_segmentation(flair, t1, use_combined_model=True, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "t1.nii.gz", origin = "https://figshare.com/ndownloader/files/40251796", force_download = TRUE )
> t1 <- antsImageRead( t1File )
> t1 <- resampleImage( t1, c( 240, 240, 64 ), useVoxels = TRUE )
> flairFile <- tensorflow::tf$keras$utils$get_file( fname = "flair.nii.gz", origin = "https://figshare.com/ndownloader/files/40251793", force_download = TRUE )
> flair <- antsImageRead( flairFile )
> flair <- resampleImage( flair, c( 240, 240, 64 ), useVoxels = TRUE )
>
> wmh <- wmhSegmentation( t1, flair, useCombinedModel = TRUE, verbose = TRUE )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> t1_file = tf.keras.utils.get_file(fname="pvs_t1.nii.gz", origin="https://figshare.com/ndownloader/files/48675367", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>> flair_file = tf.keras.utils.get_file(fname="pvs_flair.nii.gz", origin="https://figshare.com/ndownloader/files/48675352", force_download=True)
>>> flair = ants.image_read(flair_file)
>>>
>>> pvs = antspynet.shiva_pvs_segmentation(t1, flair, which_model = "all", verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "pvs_t1.nii.gz", origin = "https://figshare.com/ndownloader/files/48675367", force_download = TRUE )
> t1 <- antsImageRead( t1File )
> flairFile <- tensorflow::tf$keras$utils$get_file( fname = "pvs_flair.nii.gz", origin = "https://figshare.com/ndownloader/files/48675352", force_download = TRUE )
> flair <- antsImageRead( flairFile )
>
> pvs <- shivaPvsSegmentation( t1, flair, whichModel = "all", verbose = TRUE )
>>> import ants
>>> import antspynet
>>>
>>> t1 = ants.image_read(antspynet.get_antsxnet_data("mprage_hippmapp3r"))
>>>
>>> # Computing the thickness image will take relatively more time than the other
>>> # outputs so the recommendation would be to run it initially without computing the
>>> # thickness image to just get a sense of the application.
>>>
>>> cereb = antspynet.cerebellum_morphology(t1, compute_thickness_image=True, verbose=True)
>>>
>>> # possible refinement with cerebellum estimate (comment out since it's not needed for this image).
>>> # mask = ants.threshold_image(cereb['cerebellum_probability_image'], 0.5, 1, 1, 0)
>>> # cereb = antspynet.cerebellum_morphology(t1, cerebellum_mask=mask, verbose=True)
>>>
>>> ants.image_write(cereb['cerebellum_probability_image'], "cerebellum_probability_mask.nii.gz")
>>> ants.image_write(cereb['thickness_image'], "kk.nii.gz")
>>> ants.image_write(cereb['parcellation_segmentation_image'], "parcelation.nii.gz")
> library( ANTsR )
> library( ANTsRNet )
>
> t1 <- antsImageRead( getANTsXNetData( "mprage_hippmapp3r" ) )
>
> # Computing the thickness image will take relatively more time than the other
> # outputs so the recommendation would be to run it initially without computing the
> # thickness image to just get a sense of the application.
> cereb <- cerebellumMorphology( t1, computeThicknessImage = TRUE, verbose = TRUE )
>
> # possible refinement with cerebellum estimate (comment out since it's not needed for this image).
> # mask <- thresholdImage( cereb$cerebellumProbabilityImage, 0.5, 1, 1, 0 )
> # cereb <- cerebellumMorphology( t1, cerebellumMask = mask, verbose = TRUE )
>
> # Write output to disk
> antsImageWrite( cereb$cerebellumProbabilityImage, "cerebellumProbabilityMask.nii.gz" )
> antsImageWrite( cereb$tissueSegmentationImage, "tissue.nii.gz" )
> antsImageWrite( cereb$thicknessImage, "kk.nii.gz" )
> antsImageWrite( cereb$parcellationSegmentationImage, "parcelation.nii.gz" )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> flair_file = tf.keras.utils.get_file(fname="flair.nii.gz", origin="https://figshare.com/ndownloader/files/42385077", force_download=True)
>>> flair = ants.image_read(flair_file)
>>> t1_file = tf.keras.utils.get_file(fname="t1.nii.gz", origin="https://figshare.com/ndownloader/files/42385071", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>> t1_contrast_file = tf.keras.utils.get_file(fname="t1_contrast.nii.gz", origin="https://figshare.com/ndownloader/files/42385068", force_download=True)
>>> t1_contrast = ants.image_read(t1_contrast_file)
>>> t2_file = tf.keras.utils.get_file(fname="t2.nii.gz", origin="https://figshare.com/ndownloader/files/42385074", force_download=True)
>>> t2 = ants.image_read(t2_file)
>>>
>>> bt = antspynet.brain_tumor_segmentation(flair, t1, t1_contrast, t2, patch_stride_length=32, verbose=True)
>>> # ants.image_write(bt['segmentation_image'], "brain_tumor_segmentation.nii.gz")
> library( ANTsR )
> library( ANTsRNet )
>
> flairFile <- tensorflow::tf$keras$utils$get_file( fname = "flair.nii.gz", origin = "https://figshare.com/ndownloader/files/42385077", force_download = TRUE )
> flair <- antsImageRead( flairFile )
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "t1.nii.gz", origin = "https://figshare.com/ndownloader/files/42385071", force_download = TRUE )
> t1 <- antsImageRead( t1File )
> t1ContrastFile <- tensorflow::tf$keras$utils$get_file( fname = "t1_contrast.nii.gz", origin = "https://figshare.com/ndownloader/files/42385068", force_download = TRUE )
> t1Contrast <- antsImageRead( t1ContrastFile )
> t2File <- tensorflow::tf$keras$utils$get_file( fname = "t2.nii.gz", origin = "https://figshare.com/ndownloader/files/42385074", force_download = TRUE )
> t2 <- antsImageRead( t2File )
>
> bt <- brainTumorSegmentation( flair, t1, t1Contrast, t2, patchStrideLength = 32, verbose = TRUE )
> #antsImageWrite( bt$segmentationImage, "brainTumorSegmentation.nii.gz" )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> mmbop_file = tf.keras.utils.get_file(fname="mra.nii.gz", origin="https://figshare.com/ndownloader/files/46406755", force_download=True)
>>> mra = ants.image_read(mra_file)
>>> vessels = antspynet.mra_brain_vessel_segmentation(mra, verbose=True)
> library( ANTsR )
> library( ANTsRNet )
>
> mmbopFile <- tensorflow::tf$keras$utils$get_file( fname = "mra.nii.gz", origin = "https://figshare.com/ndownloader/files/46406755", force_download = TRUE )
> mra <- antsImageRead( mmbopFile )
> vessels <- mraBrainVesselSegmentation( mra, verbose = TRUE )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> t1_file = tf.keras.utils.get_file(fname="t1w_with_lesion.nii.gz", origin="https://figshare.com/ndownloader/files/44053868", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>>
>>> probability_mask = antspynet.lesion_segmentation(t1, do_preprocessing=True, verbose=True)
>>> ants.image_write(probability_mask, "lesion_probability_mask.nii.gz")
> library( ANTsR )
> library( ANTsRNet )
> library( tensorflow )
>
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "t1w_with_lesion.nii.gz", origin = "https://figshare.com/ndownloader/files/44053868", force_download = TRUE )
> t1 <- antsImageRead( t1File )
>
> probabilityMask <- lesionSegmentation( t1, doPreprocessing = TRUE, verbose = TRUE )
> antsImageWrite( probabilityMask, "lesion_probability_mask.nii.gz" )
>>> import ants
>>> import antspynet
>>> import tensorflow as tf
>>>
>>> t1_file = tf.keras.utils.get_file(fname="t1w_with_lesion.nii.gz", origin="https://figshare.com/ndownloader/files/44053868", force_download=True)
>>> t1 = ants.image_read(t1_file)
>>>
>>> probability_mask = antspynet.lesion_segmentation(t1, do_preprocessing=True, verbose=True)
>>> lesion_mask = ants.threshold_image(probability_mask, 0.5, 1.1, 1, 0)
>>> t1_inpainted = antspynet.whole_head_inpainting(t1, roi_mask=lesion_mask, modality="t1", mode="axial", verbose=True)
>>> ants.image_write(t1_inpainted, "t1_repaired.nii.gz")
> library( ANTsR )
> library( ANTsRNet )
> library( tensorflow )
>
> t1File <- tensorflow::tf$keras$utils$get_file( fname = "t1w_with_lesion.nii.gz", origin = "https://figshare.com/ndownloader/files/44053868", force_download = TRUE )
> t1 <- antsImageRead( t1File )
>
> probabilityMask <- lesionSegmentation( t1, doPreprocessing = TRUE, verbose = TRUE )
> lesionMask <- thresholdImage( probabilityMask, 0.5, 1.1, 1, 0 )
> t1Inpainted <- wholeHeadInpainting( t1, roiMask = lesionMask, modality = "t1", mode = "axial", verbose = TRUE )
> antsImageWrite( t1Inpainted, "t1_repaired.nii.gz" )