Skip to content

Instantly share code, notes, and snippets.

@matthew-mizielinski
Last active July 21, 2016 13:43
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save matthew-mizielinski/b3f7824905453530120be81edf0e8a61 to your computer and use it in GitHub Desktop.
Save matthew-mizielinski/b3f7824905453530120be81edf0e8a61 to your computer and use it in GitHub Desktop.
A python script using the iris library (http://scitools.org.uk/iris/) to compute time-series of the area-weighted mean value of a supplied netCDF data set writing the data to a new file
#Crown Copyright, 2016
import iris, sys
def dict_diff(dict_a, dict_b):
''' return a dictionary showing differences between two dictionaries'''
result = {}
for key in set(dict_a.keys()+dict_b.keys()):
if ((key in dict_a and (not key in dict_b or dict_a[key] != dict_b[key])) or
(key in dict_b and (not key in dict_a or dict_a[key] != dict_b[key])) ):
result[key] = (dict_a.get(key, None), dict_b.get(key, None))
return result
def synchronise_attributes(cubelist):
''' given an iris cubelist overwrite any attributes (metadata) which would prevent
the concatenation into a single object'''
attributes_to_overwrite = set()
if len(cubelist) == 1:
return cubelist
c0 = cubelist[0]
for c in cubelist[1:]:
attribute_differences = dict_diff(c0.attributes,c.attributes).keys()
attributes_to_overwrite.update(set(attribute_differences))
for att in attributes_to_overwrite:
for c in cubelist:
c.attributes[att] = "overwritten"
#deal with arguments in simple way
if len(sys.argv) != 3:
print "usage: ", sys.argv[0], "[source directory] [destination directory]"
print "data from within the Arctic circle is read, spatially averaged and written to the destination file"
sys.exit()
data_dir = sys.argv[1]
output_dir = sys.argv[2]
# More complicated arguments can/should be dealt with using the argparse module
# load all files from the source directory, modifiying metadata as they are loaded
var_cubelist = iris.load(data_dir + "/*.nc")
# To construct a single time series the metadata in the different variables (iris cubes) needs to be
# synchronised. If the metadata is already consistent then nothing is done.
synchronise_attributes(var_cubelist)
# with consistent metadata a single iris cube should be obtained.
var_cube = var_cubelist.concatenate_cube()
#extract all data north of 67 N
#construct Constraint object
latitude_constraint = iris.Constraint(latitude= lambda lat: lat > 67.)
# use the latitude constraint to subset the data being processed
var_arctic = var_cube.extract(latitude_constraint)
#calculate weights for area averaging
aw = iris.analysis.cartography.area_weights(var_arctic)
#calculate area weighted mean over whole arctic region
var_arctic_ts = var_arctic.collapsed(['latitude','longitude'], iris.analysis.MEAN, weights = aw)
#write results to the specified file
# construct netCDF file name
output_file = output_dir +"/arctic" + data_dir.replace("/","_")+".nc"
# write to disk
iris.save(var_arctic_ts, output_file, unlimited_dimensions = ['time'])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment