Skip to content

Instantly share code, notes, and snippets.

@jgomezdans
Created June 14, 2010 15:22
Show Gist options
  • Save jgomezdans/437820 to your computer and use it in GitHub Desktop.
Save jgomezdans/437820 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A script to create daily averae values from NCEP reanalysis netcdf data.
Saves the data as GeoTIFFs with daily
:version: 1
:author: Jose Gomez-Dans < j.gomez.dans@geog.ucl.ac.uk>
"""
import numpy as np
# Function writeHdrFile
# Writes an ENVI .hdr file to be associated with a data file
#
# Arguments:
# filename: Name of .hdr file to be written
# samples: Number of pixels per line (samples)
# lines: Number of lines
# bands: Number of bands
# datatype: Numeric code for relevant data type
def writeHdrFile(filename, samples, lines, bands, datatype, interleave="bil"):
try:
hdrfile = open(filename, "w")
except:
print "Could not open header file " + str(filename) + " for writing"
raise
# end try
hdrfile.write("ENVI\n")
hdrfile.write("description = { Created by bil_handler.py }\n")
hdrfile.write("samples = " + str(samples) + "\n")
hdrfile.write("lines = " + str(lines) + "\n")
hdrfile.write("bands = " + str(bands) + "\n")
hdrfile.write("header offset = 0\n")
hdrfile.write("file type = ENVI Standard\n")
hdrfile.write("data type = " + str(datatype) + "\n")
hdrfile.write("interleave = " + interleave + "\n")
hdrfile.write("byte order = 0\n")
hdrfile.flush()
hdrfile.close()
def save_file ( var_name, year, data, days ):
"""
Save the file. initially, GeoTIFF format, one band per day for each param.
Note that the geotransform is a bit dodgy. This is because the original
data are in T62 gaussian grid format, and we are using WGS84 for the
output. The difference is less than 0.1 degrees, though.
"""
from osgeo import gdal
from osgeo import osr
spatial_ref = osr.SpatialReference()
spatial_ref.ImportFromEPSG ( 4326 )
driver = gdal.GetDriverByName ( "GTiff" )
fname = "/data/geospatial_08/ucfajlg/NCEP/daily/%s_%s.tif" % ( var_name, \
year )
dst_ds = driver.Create ( fname, 192, 94, days, gdal.GDT_Float32 )
# This is weird, T62 gaussian grid data...
dst_ds.SetGeoTransform ( (-0.9375, 1.875, 0.0, \
89.494064331054688, 0.0, -1.9041290283203125) )
dst_ds.SetProjection ( spatial_ref.ExportToWkt() )
for b in xrange(days):
dst_ds.GetRasterBand(b+1).WriteArray ( data[b, :, :] )
dst_ds = None
def save_envi_file ( var_name, year, data, days ):
fname = "/data/geospatial_08/ucfajlg/NCEP/daily/%s_%s_a.img" % ( var_name, \
year )
### TODO: This doesn't work. Need to rearrange data array for ENVI-friendly
### saving!!!!!!!!!
data.tofile ( fname )
( bands,lines, samples ) = data.shape
print bands
print samples
print lines
datatype = 'f'
writeHdrFile(fname.replace(".img", ".hdr"), samples, lines, bands, 4 )
def daily_avg ( fname, proc="mean" ):
"""
Calculates the daily avierage from the product, or its integral if its
precipitation rate (kg/m2s). Does the scaling as well, but doesn't check
for NaNs and that sort of stuff. Leap years are considered too.
"""
from scipy.io import netcdf_file
import os
f = netcdf_file ( fname, 'r' )
var_name = os.path.basename ( fname ).split(".")[0]
year = os.path.basename ( fname ).split(".")[-2]
data = f.variables[ var_name ]*f.variables[ var_name ].scale_factor + \
f.variables[ var_name ].add_offset
try:
data = np.reshape ( data, (-1, 365) + data.shape[1:] )
days = 365
except ValueError:
# Anho bisiesto
data = np.reshape ( data, (-1, 366) + data.shape[1:] )
days = 366
if var_name != "prate":
daily_mean = np.mean ( data, axis=0).squeeze()
else:
# Precipitation needs to be converted to kg/m2 and integrated
daily_mean = 21600.*np.sum ( data, axis=0).squeeze()
save_file ( var_name, year, daily_mean, days )
save_envi_file ( var_name, year, daily_mean, days )
if __name__ == "__main__":
import glob
import sys
files = glob.glob ("*.nc")
files.sort()
for fich in files:
print fich
sys.stdout.flush()
if fich.find("cprat")<0:
daily_avg ( fich )
break
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment