Created
December 7, 2012 07:23
-
-
Save cynici/4231463 to your computer and use it in GitHub Desktop.
NPP: List GDAS file for VIIRS TrueColorGen algorithm in descending order of preference
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#! /usr/bin/env python | |
# -*- coding: utf-8 -*- | |
import sys, os, logging, re | |
from optparse import OptionParser | |
from datetime import datetime, timedelta | |
help_text = """%prog npp_dat_file | |
Given a NPP raw data filename e.g. NPP.20120827.114454.dat, | |
output GDAS filenames of best matching candidates in descending order. | |
The DRL VIIRS TrueColorGen algorithm requires appropriate GDAS file | |
from ftp://is.sci.gsfc.nasa.gov/ancillary/temporal/global/gdas/ | |
Global Data Assimilation System (GDAS) grib1 files are produced | |
every 6 hours at 00, 0600, 1200 and 1800 UTC daily. The time, | |
date and hour of the GDAS files in grib1 format can be found | |
in the filename. For example, gdas1.PGrbF00.120612.18z | |
corresponds to June 12, 2012, 1800 UTC. When choosing the GDAS | |
ancillary file (grib1 format), choose one which is closer | |
in time (+-3 hours) rather than the date. For example | |
if you have an SDR granule at 1700 UTC, the GDAS file for 1800 hours | |
on the same day would be the best match. However, if that is | |
not available, it would be preferable to use the GDAS file corresponding | |
to 1800 UTC for the day before rather than the GDAS file at 1200 UTC | |
for the same day. It is recommended to use a gdas file for ncep_met | |
that is within +-7 days of the granule time.""" | |
datetime_re = re.compile('\D(?P<yyyy>\d{4})(?P<mm>\d{2})(?P<dd>\d{2})\D(?P<HH>\d{2})(?P<MM>\d{2})(?P<SS>\d{2})\D') | |
def make_gdas_path(fnformat, dateval, hour_str, dir=''): | |
filename = dateval.strftime(fnformat)%dict(hour=hour_str) | |
return os.path.join(dir, filename) | |
def main(argv=None): | |
if argv is None: | |
argv = sys.argv | |
debuglevelD = { | |
'debug': logging.DEBUG, | |
'info': logging.INFO, | |
'warning': logging.WARNING, | |
'error': logging.ERROR, | |
'critical': logging.CRITICAL, | |
} | |
defvals = { | |
'gdas_dir': '$HOME/drl/ancillary/gdas', | |
'fnformat': 'gdas1.PGrbF00.%y%m%d.%%(hour)sz', | |
} | |
parser = OptionParser(help_text) | |
parser.add_option("--gdas-dir", dest="gdas_dir", type="string", | |
help="Directory for GDAS files (%s)"%defvals['gdas_dir'], metavar='DIR') | |
parser.add_option("--fnformat", dest="fnformat", type="string", | |
help="GDAS filename (%s)"%defvals['fnformat'], metavar='STRFTIME_FORMAT') | |
parser.add_option("-l", "--loglevel", dest="loglevel", type="string", | |
help="Verbosity %s"%debuglevelD.keys(), metavar='LOGLEVEL') | |
parser.set_defaults(**defvals) | |
(options, args) = parser.parse_args() | |
if options.loglevel: | |
if options.loglevel not in debuglevelD: raise AssertionError("Verbosity level must be one of: %s"%debuglevelD.keys()) | |
dbglvl = debuglevelD[options.loglevel] | |
else: | |
dbglvl = logging.WARNING | |
logger = logging.getLogger() | |
logger.setLevel(dbglvl) | |
ch = logging.StreamHandler() | |
ch.setFormatter( logging.Formatter('%(asctime)s %(lineno)d %(name)s %(funcName)s - %(levelname)s - %(message)s') ) | |
ch.setLevel(dbglvl) | |
logger.addHandler(ch) | |
if len(args) != 1: | |
parser.error("Requires exactly one NPP dat filename.") | |
m = datetime_re.search(os.path.basename(args[0])) | |
if m: | |
overpass_dt = datetime.strptime("%(yyyy)s-%(mm)s-%(dd)s %(HH)s:%(MM)s:%(SS)s"%(m.groupdict()), "%Y-%m-%d %H:%M:%S") | |
else: | |
parser.error("Failed to extract datetime from given NPP filename: %s" % args[0]) | |
overpass_secofday = (overpass_dt.hour * 3600) + (overpass_dt.minute * 60) + overpass_dt.second | |
# Choose the hour nearest to NPP overpass time | |
gdas_hours = [0, 6, 12, 18, 24] | |
absdiff = [] | |
for hr in gdas_hours: | |
absdiff.append(abs(overpass_secofday-(hr * 3600))) | |
best_hour_str = "%02d" % gdas_hours[absdiff.index(min(absdiff))] | |
if best_hour_str == "24": | |
best_hour_str = "00" | |
overpass_dt = overpass_dt + timedelta(1) | |
logger.debug("Overpass near midnight, so matching prefers the following day %s" % overpass_dt) | |
gdas_dir = os.path.expandvars(options.gdas_dir) | |
gdas_list = [] | |
gdas_file = make_gdas_path(options.fnformat, overpass_dt, best_hour_str, dir=gdas_dir) | |
if os.path.exists(gdas_file): | |
print gdas_file | |
return 0 | |
gdas_list.append(gdas_file) | |
for delta in xrange(1, 8): | |
for dt in [overpass_dt-timedelta(delta), overpass_dt+timedelta(delta)]: | |
gdas_file = make_gdas_path(options.fnformat, dt, best_hour_str, dir=gdas_dir) | |
if os.path.exists(gdas_file): | |
print gdas_file | |
return 0 | |
gdas_list.append(gdas_file) | |
print "\n".join(gdas_list) | |
return 0 | |
if __name__ == "__main__": | |
sys.exit(main()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment