Skip to content

Instantly share code, notes, and snippets.

@serazing
Created February 21, 2019 00:15
Show Gist options
  • Save serazing/00ea08626b7976054c146f6ec0db0def to your computer and use it in GitHub Desktop.
Save serazing/00ea08626b7976054c146f6ec0db0def to your computer and use it in GitHub Desktop.
Some functions based on pyresample to filter along-track data based on a climatology of the internal deformation radius
import xarray as xr
import numpy as np
import numba
import pandas as pd
import pyresample
def interpolate_rossby_radius(Rd, lon, lat, interp_type='gaussian',
radius_of_influence=500e3, fill_value=None,
reduce_data=True, nprocs=1, neighbours=8):
lat_min, lat_max = np.min(lat), np.max(lat)
lon_min, lon_max = np.min(lon), np.max(lon)
#Rd_subset = Rd.where((Rd['nav_lon'] >= lon_min) & (Rd['nav_lon'] <= lon_max) &
# (Rd['nav_lat'] >= lat_min) & (Rd['nav_lat'] <= lat_max)
# )
#print(Rd_subset.mean())
Rd_masked = np.ma.masked_array(Rd.data, mask=np.isnan(Rd))
lon_masked = np.ma.masked_array(Rd['lon'].data, mask=np.isnan(Rd))
lat_masked = np.ma.masked_array(Rd['lat'].data, mask=np.isnan(Rd))
lon_grid, lat_grid = pyresample.utils.check_and_wrap(Rd['lon'].data,
Rd['lat'].data)
lon_ship, lat_ship = pyresample.utils.check_and_wrap(np.asarray(lon), np.asarray(lat))
grid_def = pyresample.geometry.GridDefinition(lons=lon_grid, lats=lat_grid)
swath_def = pyresample.geometry.SwathDefinition(lons=lon_ship, lats=lat_ship)
if interp_type is 'nearest':
Rd_shiptrack = pyresample.kd_tree.resample_nearest(grid_def, Rd_masked, swath_def,
radius_of_influence=radius_of_influence,
fill_value=None,
reduce_data=reduce_data,
nprocs=nprocs)
elif interp_type is 'gaussian':
Rd_shiptrack = pyresample.kd_tree.resample_gauss(grid_def, Rd_masked, swath_def,
radius_of_influence=radius_of_influence,
sigmas=500e3,
fill_value=None,
reduce_data=reduce_data,
nprocs=nprocs, neighbours=neighbours)
Rd_shiptrack = xr.DataArray(Rd_shiptrack, coords=lon.coords, dims=lat.dims)
return Rd_shiptrack
@numba.jit
def _mesoscale_filter(data, mask, time, lon, lat, rossby, win_dt=1., max_break=0.5):
"""
Private low-level function written using numba to speed-up the computation
Parameters
----------
data : 1darray
Input observational data to filter
t : 1darray
Time record
lon : 1darray
Longitudinal coordinates
lat : 1darray
Latitudinal coordinates
rossby : 1darray
Rossby radius
Returns
-------
data_meso : 1darray
Data filtered to retain only mesoscale variations
"""
def distance(lon1, lat1, lon2, lat2):
EARTH_RADIUS = 6371 * 1e3
dlon = lon2 - lon1
dlat = lat2 - lat1
dy = np.pi / 180. * EARTH_RADIUS * dlat
dx = (np.cos(np.pi / 180. * lat2) * np.pi / 180. * EARTH_RADIUS * dlon)
distance = (dx ** 2 + dy ** 2 ) ** 0.5
return distance
nobs = len(data) # Total number of observation
data_meso = np.zeros(nobs) # Initialize outputs
# Loop on every data point
max_break *= (3600 * 1e9)
win_dt *= (3600 * 24 * 1e9)
# Loop on every data point
for ic in range(1, nobs - 1):
# Reference point at the center of the window
t_ref, lon_ref, lat_ref = time[ic], lon[ic], lat[ic]
rossby_ref = rossby[ic] # TODO: USE A CHANGING ROSSBY RADIUS
# Next data point: r for "right"
jr = ic + 1
dist_r = distance(lon_ref, lat_ref, lon[jr], lat[jr])
dt_r = abs(time[jr] - t_ref)
# Previous data point: l for "left"
jl = ic - 1
dist_l = distance(lon_ref, lat_ref, lon[jl], lat[jl])
dt_l = abs(time[jl] - t_ref)
# Initialize convolution sums, weight sums and window size
conv_r, conv_l, wsum_r, wsum_l = 0, 0, 0, 0
nwin_r, nwin_l = 0, 0
# Right side of the window
while (dt_r < win_dt) and (jr < nobs - 1):
w_r = np.exp(- dist_r ** 2 / (2. * rossby_ref ** 2)) # Gaussian weight
conv_r += w_r * data[jr]
wsum_r += w_r * mask[jr]
nwin_r += 1
# Go to next observation
jr += 1
dist_r = distance(lon_ref, lat_ref, lon[jr], lat[jr])
dt_r = abs(time[jr] - t_ref)
# Left side of the window
while (dt_l < win_dt) and (jl > 1):
w_l = np.exp(- dist_l ** 2 / (2. * rossby_ref ** 2)) # Gaussian weight
conv_l += w_l * data[jl]
wsum_l += w_l * mask[jl]
nwin_l += 1
# Go to previous observation
jl -= 1
dist_l = distance(lon_ref, lat_ref, lon[jl], lat[jl])
dt_l = abs(time[jl] - t_ref)
# Make some tests to exclude unvalid data
norm_factor = (wsum_l + mask[ic] + wsum_r)
if ((nwin_r >= 5) and (nwin_l >= 5) and
(abs(time[ic] - time[ic + 1]) < max_break) and
(abs(time[ic] - time[ic - 1]) < max_break) and
(norm_factor != 0.)
):
data_meso[ic] = (conv_r + data[ic] + conv_l) / norm_factor
else:
data_meso[ic] = np.nan
return data_meso
def mesoscale_filter(data, data_rossby, win_dt=1, max_break=0.5):
"""
Perform a gaussian filter based on the local deformation radius to
remove scales smaller than mesoscale motions
Parameters
----------
data : xr.DataArray
Input observational data to filter
Returns
-------
data_meso : xr.DataArray
Data filtered to retain only mesoscale variations
"""
#mean_rossby = get_mean_rossby_radius(ds, data_rossby)
#print(mean_rossby)
mask = 1. - np.isnan(data)
data_filled = np.nan_to_num(data)
res = _mesoscale_filter(data_filled, mask.data, pd.to_numeric(data['time'].data),
data['lon'].data, data['lat'].data, data_rossby.data,
win_dt=win_dt, max_break=max_break)
data_meso = xr.DataArray(res,
name=data.name + '_ME',
dims=data.dims,
coords=data.coords)
return data_meso
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment