Skip to content

Instantly share code, notes, and snippets.

@seba-perez
Last active June 14, 2020 01:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save seba-perez/ae68e780576a7526daa04e3d3c2946d4 to your computer and use it in GitHub Desktop.
Save seba-perez/ae68e780576a7526daa04e3d3c2946d4 to your computer and use it in GitHub Desktop.
Convolution tools for image resolution matching and unsharp masking.
import numpy as np
from astropy import nddata
from astropy.io import fits
from astropy.convolution import convolve_fft, Gaussian2DKernel
from astropy.convolution import convolve
from pathlib import Path
def get_Gaussian_kernel(from_beam, to_beam, pixelsize):
"""Generate Gaussian kernel."""
stddev = np.sqrt(to_beam**2 - from_beam**2)
stddev /= np.sqrt(8. * np.log(2.))
stddev /= pixelsize
x_size = int(6. * stddev + 1.)
return Gaussian2DKernel(x_stddev=stddev, x_size=x_size,
y_size=x_size, model='linear_interp')
def circularize_beam(inputfile, project=None, from_beam=0.0,
to_beam=0.1, writesmooth=False):
"""Brings an fits image to a new "to_beam" resolution by convolution
with a circular Gaussian kernel. Should work for both, images and
cubes. It can take an image or a cube.
"""
# Check if project exists. Create work directory if not.
if project is None:
project = Path(inputfile).parent.absolute()
print("Creating project directory:", project)
Path(project).mkdir(parents=True, exist_ok=True)
# Read in the data
input = fits.open(inputfile)
data = np.squeeze(input[0].data)
header = input[0].header
print("Input data has", data.ndim, "dimensions of", data.shape)
cdelt = np.abs(header['CDELT1'] * 3600.)
# Create empty cube for storing convolved data.
nz = 1
smooth = np.zeros((header['NAXIS1'], header['NAXIS2']))
if data.ndim > 2:
nz = header['NAXIS3']
smooth = np.zeros((nz, header['NAXIS1'], header['NAXIS2']))
# Create circular Gaussian kernel.
print(
"Beam size (from header) prior convolution:",
'{:.4f} x {:.4f}'.format(
header['BMAJ'] * 3600.,
header['BMIN'] * 3600.))
beam = get_Gaussian_kernel(from_beam, to_beam, cdelt)
if data.ndim > 2:
for i in np.arange(nz):
im = data[i, :, :]
print("im: ", i)
# im = convolve_fft(im, beam)
im = convolve(im, beam)
smooth[i, :, :] += im
else:
im = data
smooth += convolve_fft(im, beam)
# smooth += convolve(im, beam, boundary='extend')
# Fix units to new beam size.
# smooth *= header['BMIN']*header['BMAJ'] / (to_beam/3600.)**2.
# Fix header beam size.
header['BMIN'] = to_beam / 3600.
header['BMAJ'] = to_beam / 3600.
header['BPA'] = 0.0
if writesmooth:
hdu = fits.PrimaryHDU()
hdu.data = smooth.astype('float32')
hdu.header = header
hdu.writeto(project + '/' + Path(inputfile).stem + '_smooth.fits',
output_verify='fix', overwrite=True)
print(
"Returning smoothed data with beam:",
'{:.4f} x {:.4f}'.format(
header['BMAJ'] * 3600.,
header['BMIN'] * 3600.))
return smooth
def unsharp_masking(inputfile, project=None,
times_beam=3.0, unsharp_factor=2.0):
"""Generates an unsharp-masking version of a fits image. It substracts
the large scales (convolved version of the image) to emphasize
fine structure, as follows:
unsharp_img = img + unsharp_factor * (img - smoothed_img)
"""
# Check if project exists. Create work directory if not.
if project is None:
project = Path(inputfile).parent.absolute()
print("Creating project directory:", project)
Path(project).mkdir(parents=True, exist_ok=True)
# Read in the data
input = fits.open(inputfile)
img = np.squeeze(input[0].data)
img0 = np.copy(img)
header = input[0].header
print("Input data has", img.ndim, "dimensions of", img.shape)
# Set smoothing scale.
cdelt = np.abs(header['CDELT1'] * 3600.)
scale = times_beam * 0.5 * (hdr['BMIN'] + hdr['BMAJ']) * 3600.
gauss = get_Gaussian_kernel(0.0, scale, cdelt)
# Do convolution.
smooth = np.zeros((header['NAXIS1'], header['NAXIS2']))
smooth += convolve_fft(img, gauss)
# Unsharp masking.
print(
'Subtracting scales similar to',
'{:.3f} x {:.3f}'.format(
scale,
scale))
unsharp = img0 + unsharp_factor * (img0 - smooth)
# Exporting unsharp-masked data.
uhdu = fits.PrimaryHDU()
udata = np.zeros((1, 1, header['NAXIS1'], header['NAXIS2']))
udata[0, 0, :, :] += unsharp
uhdu.data = udata
uhdu.header = hdr
uhdu.writeto(project + '/' + Path(inputfile).stem + '_unsharp.fits',
output_verify='fix', overwrite=True)
return unsharp
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment