Skip to content

Instantly share code, notes, and snippets.

@emileten
Created January 12, 2022 01:50
Show Gist options
  • Save emileten/e78322e0747bf7d8e156921f3b83d9a1 to your computer and use it in GitHub Desktop.
Save emileten/e78322e0747bf7d8e156921f3b83d9a1 to your computer and use it in GitHub Desktop.
import numpy as np
import xarray as xr
from xclim import sdba
def _datafactory(
x, start_time="1950-01-01", variable_name="fakevariable", lon=1.0, lat=1.0
):
"""Populate xr.Dataset with synthetic data for testing"""
start_time = str(start_time)
if x.ndim != 1:
raise ValueError("'x' needs dim of one")
time = xr.cftime_range(
start=start_time, freq="D", periods=len(x), calendar="standard"
)
out = xr.Dataset(
{variable_name: (["time", "lon", "lat"], x[:, np.newaxis, np.newaxis])},
coords={
"index": time,
"time": time,
"lon": (["lon"], [lon]),
"lat": (["lat"], [lat]),
},
)
# need to set variable units to pass xclim 0.29 check on units
out[variable_name].attrs["units"] = "K"
return out
def test_train_qdm_extrapolation_historical():
"""Check extrapolation in QDM training behaves as expected
"""
# Setup input data.
ref = _datafactory(np.array([2.9, 2.95, 3., 3.1]))
hist = _datafactory(np.array([1.5, 1.9, 2, 2.5]))
ref_vec = np.arange(3.5,4.5001, 0.001)
hist_vec = np.arange(1.5,2.5001, 0.001)
ref = _datafactory(ref_vec)
hist = _datafactory(hist_vec)
# various problems
sdba.adjustment.QuantileDeltaMapping.train(
ref=ref['fakevariable'],
hist=hist['fakevariable'],
kind="*",
nquantiles=[0.000000001, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.99999999999999],
group=sdba.Grouper('time')
).adjust(hist['fakevariable'], interp='nearest', extrapolation='constant')
#
#
# qdm_exp = sdba.adjustment.QuantileDeltaMapping.train(
# ref=ref['fakevariable'],
# hist=hist['fakevariable'],
# kind="*",
# nquantiles=[0., 0.5, 1.]
# )
# qdm_exp.adjust(hist['fakevariable'], interp='nearest', extrapolation='nan')
# qdm = sdba.adjustment.QuantileDeltaMapping.train(
# ref=ref['fakevariable'],
# hist=hist['fakevariable'],
# kind="*",
# nquantiles=[0.5]
# )
# output_key = "memory://test_train_qdm/test_output.zarr"
# hist_key = "memory://test_train_qdm/hist.zarr"
# ref_key = "memory://test_train_qdm/ref.zarr"
#
# # Load up a fake repo with our input data in the place of big data and cloud
# # storage.
# repository.write(hist_key, hist)
# repository.write(ref_key, ref)
#
# train_qdm(
# historical=hist_key,
# reference=ref_key,
# out=output_key,
# variable="fakevariable",
# kind="multiplicative",
# )
#
# assert QuantileDeltaMapping.from_dataset(repository.read(output_key))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment