Skip to content

Instantly share code, notes, and snippets.

@mdekauwe
Created May 19, 2010 13:31
Show Gist options
  • Save mdekauwe/406303 to your computer and use it in GitHub Desktop.
Save mdekauwe/406303 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
"""
Code extracts MODIS derived products (1km) from the HDF, rescales them, checks values against
acceptable QA and outputs to a binary file. Code seems generic enough that one can just
add another method for a new MODIS product. Currently works on LAI, NDVI/EVI and LST. Code
also writes out an ENVI .hdr file...nice.
*** NOTE...I don't vouch for the reliability of the default QA, please supply your own on
the cmd line.
That's all folks.
"""
__author__ = "Martin De Kauwe"
__version__ = "1.0 (19.05.2010)"
__email__ = "mdekauwe@gmail.com"
import warnings
warnings.simplefilter('ignore', DeprecationWarning)
import re, glob, sys, os, optparse
import optparse
import fnmatch
import matplotlib.pylab as plt
import numpy as np
from pyhdf.SD import SD, SDC
def clParsar():
desc = """Script extracts the MODIS binary layers from the HDFs and dumps and envi (*hopefully*) hdr file"""
clp = optparse.OptionParser("Usage: %prog [options] filename", description = desc)
clp.add_option("-d", "--dir", default="/users/global/malngu/grid-area/MODIS/ndvi", help="Path to the HDF directory")
clp.add_option("-l", action="store_true", dest="EXTRACT_LAI", default=False, help = "Extract MODIS LAI/LAI STDEV V005")
clp.add_option("-n", action="store_true", dest="EXTRACT_NDVI", default=False, help = "Extract MODIS NDVI/EVI/red/NIR V005")
clp.add_option("-t", action="store_true", dest="EXTRACT_LST", default=False, help = "Extract MODIS LST V005")
clp.add_option("-v", action="store_true", dest="VERBOSE", default=False, help = "Verbosity?")
clp.add_option("-a", "--view_zenith_angle", default="60.0", help="Set the LST view zenith angle")
clp.add_option("-q", "--qafile", default="", help="Extract MODIS prdouct that meets QA file definitions (QA ints on sep lines)!")
return clp.parse_args()
class ExtractModisData():
def __init__(self, qafile, EXTRACT_LAI, EXTRACT_NDVI, EXTRACT_LST):
self.EXTRACT_LAI = EXTRACT_LAI
self.EXTRACT_NDVI = EXTRACT_NDVI
self.EXTRACT_LST = EXTRACT_LST
self.qafile = qafile
if self.qafile:
self.good_QA = np.fromfile(self.qafile, sep='\n').astype(np.int16)
else:
# Default QA
if self.EXTRACT_LAI:
#self.good_QA = np.array([0, 2, 24, 26, 32, 34, 56, 58]).astype(np.int16)
self.good_QA = np.array([0]).astype(np.int16)
elif self.EXTRACT_NDVI:
self.good_QA = np.array([2048, 2049, 2052, 2053, 2112, 2113, 2116, 2117, 2560, 2561, 2564, 2565, 2624, 2625, 2628, 2629]).astype(np.int16)
elif self.EXTRACT_LST:
self.good_QA = np.arange(256).astype(np.int16)
def outputBinary(self, data, outfile):
f = open(outfile, 'wb')
data.tofile(f)
f.close()
# Generate ENVI hdr
dt = data.dtype
datatype = self.what_is_ENVI_type(dt.char)
numrows, numcols = data.shape
self.write_ENVI_HDR(outfile + ".HDR", numcols, numrows, 1, datatype, lat, lon, 'bsq', '-1.0')
def process_LAI_files(self, modis_sd, lon, lat):
sds_name = {'fpar':'Fpar_1km', 'lai':'Lai_1km', 'qc':
'FparLai_QC', 'fparstdev':'FparStdDev_1km',
'laistdev':'LaiStdDev_1km'}
# read the sds data
d = dict(zip([name for name in sds_name.iterkeys()], [modis_sd.select(sds).get() for sds in sds_name.itervalues()]))
a = dict(zip([name for name in sds_name.iterkeys()], [modis_sd.select(sds).attributes() for sds in sds_name.itervalues()]))
# Only save the information that is valid and which passes the QA, apply scaling factors
for key, value in sds_name.items():
if key is not 'qc' and key is not 'qcnight':
undef = a[key]['valid_range'][0] - 1
d[key][(d['qc'] != self.good_QA)] = undef
d[key] = np.where(np.logical_or(d[key] < a[key]['valid_range'][0], d[key] > a[key]['valid_range'][1]), \
-1.0, np.float32(d[key]) * np.float32(a[key]['scale_factor']))
outfile = "FPAR.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['fpar'], outfile)
outfile = "FPARSTDEV.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['fparstdev'], outfile)
outfile = "LAI.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['lai'], outfile)
outfile = "LAISTDEV.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['laistdev'], outfile)
def process_NDVI_files(self, modis_sd, lon, lat):
sds_name = {'ndvi':'1 km 16 days NDVI','qc': '1 km 16 days VI Quality',
'evi': '1 km 16 days EVI', 'red': '1 km 16 days red reflectance',
'nir': '1 km 16 days NIR reflectance'}
# read the sds data
d = dict(zip([name for name in sds_name.iterkeys()], [modis_sd.select(sds).get() for sds in sds_name.itervalues()]))
a = dict(zip([name for name in sds_name.iterkeys()], [modis_sd.select(sds).attributes() for sds in sds_name.itervalues()]))
# Only save the information that is valid and which passes the QA, apply scaling factors
for key, value in sds_name.items():
if key is not 'qc' and key is not 'qcnight':
undef = a[key]['valid_range'][0] - 1
d[key][(d['qc'] != self.good_QA)] = unde
if key == 'angle' or key == 'anglenight':
d[key] = np.where(np.logical_or(d[key] < a[key]['valid_range'][0], d[key] > a[key]['valid_range'][1]), \
-5.0, np.float32(d[key]) / np.float32(a[key]['scale_factor']))
outfile = "NDVI.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['ndvi'], outfile)
outfile = "EVI.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['evi'], outfile)
outfile = "RED.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['red'], outfile)
outfile = "NIR.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['nir'], outfile)
def process_LST_files(self, modis_sd, lon, lat, view_zenith_angle):
sds_name = {'lst':'LST_Day_1km', 'qc': 'QC_Day', 'view':'Day_view_time',
'angle':'Day_view_angl', 'lstnight': 'LST_Night_1km',
'qcnight': 'QC_Night', 'viewnight':'Night_view_time',
'anglenight':'Night_view_angl'}
# read the sds data
d = dict(zip([name for name in sds_name.iterkeys()], [modis_sd.select(sds).get() for sds in sds_name.itervalues()]))
a = dict(zip([name for name in sds_name.iterkeys()], [modis_sd.select(sds).attributes() for sds in sds_name.itervalues()]))
# Only save the information that is valid and which passes the QA, apply scaling factors
for key, value in sds_name.items():
if key is not 'qc' and key is not 'qcnight':
undef = a[key]['valid_range'][0] - 1
d[key][(d['qc'] != self.good_QA)] = undef
if key == 'angle' or key == 'anglenight':
d[key] = np.where(np.logical_or(d[key] < a[key]['valid_range'][0], d[key] > a[key]['valid_range'][1]), \
-1.0, np.float32(d[key]) * np.float32(a[key]['scale_factor']) + np.float32(a[key]['add_offset']))
else:
d[key] = np.where(np.logical_or(d[key] < a[key]['valid_range'][0], d[key] > a[key]['valid_range'][1]), \
-1.0, np.float32(d[key]) * np.float32(a[key]['scale_factor']))
# might need to revisit this...but only want 6am to 18:00
d['lst'][(d['view'] < 5.99) & (d['view'] > 18.01)] = -1.0
d['lst'][(d['angle'] >= view_zenith_angle)] = -1.0
# let pass all night time view times
d['lstnight'][(d['anglenight'] >= view_zenith_angle)] = -1.0
outfile = "LST.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['lst'], outfile)
outfile = "LSTNIGHT.%s.BIN" % ("".join(filename.split(".")[:-1]))
self.outputBinary(d['lstnight'], outfile)
def write_ENVI_HDR(self, filename, samples, lines, bands, datatype, lat, lon, interleave, undef):
pixel_size = 926.625433055556
# This needs to be 1 on intel...put a ARCH check
byte_order = 1
f = open(filename, "w")
f.write('ENVI\n')
f.write('description = { Created by ' + sys.argv[0] + '}\n')
f.write('samples = ' + str(samples) + '\n')
f.write('lines = ' + str(lines) + '\n')
f.write('bands = ' + str(bands) + '\n')
f.write('header offset = 0\n')
f.write('file type = ENVI Standard\n')
f.write('data type = ' + str(datatype) + '\n')
f.write('interleave = ' + interleave + '\n')
f.write('sensor type = MODIS\n')
f.write('byte order = ' + str(byte_order) + '\n')
f.write('map info = {Sinusoidal, 1.0000, 1.0000, ' + lon + ', ' + lat + ', ' +
str(pixel_size) + ', ' + str(pixel_size) + ', units = Meters}\n')
f.write('projection info = {16, 6371007.2, 0.0, 0.0, 0.0, Sinusoidal, units = Meters}\n')
f.write('data ignore value = ' + undef + '\n')
f.flush()
f.close()
def what_is_ENVI_type(self, format_string):
# Constant defining the mapping of format strings onto data types
ENVI_TYPE_MAP = {"b":1,"B":1,"h":2,"H":12,"i":3,"I":13,"f":4,"d":5,"l":14,"L":15}
if format_string not in ENVI_TYPE_MAP:
print "Failed to generate a hdr type for ENVI"
sys.exit(1)
return ENVI_TYPE_MAP[format_string]
if __name__ == "__main__":
""" sweep the cmd line and extract anything of interest """
(options, args) = clParsar()
path = options.dir
qafile = options.qafile
view_zenith_angle = options.view_zenith_angle
if options.EXTRACT_LAI and options.EXTRACT_NDVI and options.EXTRACT_LST:
print 'Remember you need to set a flag for the data to extract, i.e. -n'
sys.exit(2)
HDF = ExtractModisData(qafile, options.EXTRACT_LAI, options.EXTRACT_NDVI, options.EXTRACT_LST)
dirList = fnmatch.filter(os.listdir(os.path.abspath(path)), '*.hdf')
for filename in dirList:
fname = path + '/' + filename
if options.VERBOSE:
print fname
modis_sd = SD(fname, SDC.READ)
# query global attributes to get geolocation info...
s = str(modis_sd.attributes())
lon, lat = re.findall(r"UpperLeftPointMtrs=\((-?\d+\.\d*|\d*\.\d+),-?(\d+\.\d*|\d*\.\d+)", s)[0]
if options.EXTRACT_LAI:
HDF.process_LAI_files(modis_sd, lon, lat)
elif options.EXTRACT_NDVI:
HDF.process_NDVI_files(modis_sd, lon, lat)
elif options.EXTRACT_LST:
HDF.process_LST_files(modis_sd, lon, lat, view_zenith_angle)
else:
'We have a problem!'
sys.exit(1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment