Skip to content

Instantly share code, notes, and snippets.

@zivy
Created January 2, 2018 21:25
Show Gist options
  • Star 8 You must be signed in to star a gist
  • Fork 5 You must be signed in to fork a gist
  • Save zivy/79d7ee0490faee1156c1277a78e4a4c4 to your computer and use it in GitHub Desktop.
Save zivy/79d7ee0490faee1156c1277a78e4a4c4 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import SimpleITK as sitk\n",
"import numpy as np\n",
"\n",
"#utility method that either downloads data from the MIDAS repository or\n",
"#if already downloaded returns the file name for reading from disk (cached data)\n",
"%run update_path_to_download_script\n",
"from downloaddata import fetch_data as fdata\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Load 3D volumes."
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Fetching nac-hncma-atlas2013-Slicer4Version/Data/A1_grayT1.nrrd\n",
"Fetching vm_head_mri.mha\n",
"Fetching head_mr_oriented.mha\n"
]
}
],
"source": [
"data = [sitk.ReadImage(fdata(\"nac-hncma-atlas2013-Slicer4Version/Data/A1_grayT1.nrrd\")),\n",
" sitk.ReadImage(fdata(\"vm_head_mri.mha\")),\n",
" sitk.ReadImage(fdata(\"head_mr_oriented.mha\"))]"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"dimension = data[0].GetDimension()\n",
"\n",
"# Physical image size corresponds to the largest physical size in the training set, or any other arbitrary size.\n",
"reference_physical_size = np.zeros(dimension)\n",
"for img in data:\n",
" reference_physical_size[:] = [(sz-1)*spc if sz*spc>mx else mx for sz,spc,mx in zip(img.GetSize(), img.GetSpacing(), reference_physical_size)]\n",
"\n",
"# Create the reference image with a zero origin, identity direction cosine matrix and dimension \n",
"reference_origin = np.zeros(dimension)\n",
"reference_direction = np.identity(dimension).flatten()\n",
"reference_size = [128]*dimension # Arbitrary sizes, smallest size that yields desired results. \n",
"reference_spacing = [ phys_sz/(sz-1) for sz,phys_sz in zip(reference_size, reference_physical_size) ]\n",
"\n",
"reference_image = sitk.Image(reference_size, data[0].GetPixelIDValue())\n",
"reference_image.SetOrigin(reference_origin)\n",
"reference_image.SetSpacing(reference_spacing)\n",
"reference_image.SetDirection(reference_direction)\n",
"\n",
"# Always use the TransformContinuousIndexToPhysicalPoint to compute an indexed point's physical coordinates as \n",
"# this takes into account size, spacing and direction cosines. For the vast majority of images the direction \n",
"# cosines are the identity matrix, but when this isn't the case simply multiplying the central index by the \n",
"# spacing will not yield the correct coordinates resulting in a long debugging session. \n",
"reference_center = np.array(reference_image.TransformContinuousIndexToPhysicalPoint(np.array(reference_image.GetSize())/2.0))"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"for img in data:\n",
" # Transform which maps from the reference_image to the current img with the translation mapping the image\n",
" # origins to each other.\n",
" transform = sitk.AffineTransform(dimension)\n",
" transform.SetMatrix(img.GetDirection())\n",
" transform.SetTranslation(np.array(img.GetOrigin()) - reference_origin)\n",
" # Modify the transformation to align the centers of the original and reference image instead of their origins.\n",
" centering_transform = sitk.TranslationTransform(dimension)\n",
" img_center = np.array(img.TransformContinuousIndexToPhysicalPoint(np.array(img.GetSize())/2.0))\n",
" centering_transform.SetOffset(np.array(transform.GetInverse().TransformPoint(img_center) - reference_center))\n",
" centered_transform = sitk.Transform(transform)\n",
" centered_transform.AddTransform(centering_transform)\n",
" # Using the linear interpolator as these are intensity images, if there is a need to resample a ground truth \n",
" # segmentation then the segmentation image should be resampled using the NearestNeighbor interpolator so that \n",
" # no new labels are introduced.\n",
" sitk.Show(sitk.Resample(img, reference_image, centered_transform, sitk.sitkLinear, 0.0))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment