Last active
January 1, 2016 04:29
-
-
Save flibbertigibbet/8092443 to your computer and use it in GitHub Desktop.
Downloads GFloat National Elevation Dataset data from USGS, converts to GTiff, then merges to a single tif. Expects a file 'ned_links.html' in the same directory that has the HTML contents of the email from USGS containing the download links for the GFloat zip files. Depends on BeautifulSoup and gdal; expects to be on a unix-y system.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
from bs4 import BeautifulSoup | |
import requests, os, subprocess | |
from time import sleep | |
download_dir = os.getcwd() | |
soup = BeautifulSoup(open('ned_links.html','rb')) | |
of = open('just_ned_links.txt', 'wb') | |
for link in soup.find_all('a'): | |
of.write(link.get('href') + '\n') | |
of.close() | |
inf = open('just_ned_links.txt', 'rb') | |
for ln in inf: | |
url = ln.strip('\n') | |
got_fname = url.find('FNAME=') | |
fname = os.path.join(download_dir, url[got_fname + 6:url.find('&', got_fname)]) | |
r = requests.get(url) | |
if not r.ok: | |
print('failed downloading ' + fname + ' from ' + url) | |
print(str(r.status_code) + ": " + r.reason + '\n\n') | |
break # bail | |
else: | |
print('got ' + fname + '...') | |
of = open(fname, 'wb') | |
of.write(r.content) | |
of.close() | |
inf.close() | |
extract_dir = os.path.join(download_dir, 'extracted') | |
tif_dir = os.path.join(download_dir, 'tifs') | |
if not os.path.isdir(extract_dir): | |
os.mkdir(extract_dir) | |
if not os.path.isdir(tif_dir): | |
os.mkdir(tif_dir) | |
for p, d, fs in os.walk(download_dir): | |
for f in fs: | |
if f.endswith('.zip'): | |
print('converting ' + f + '...') | |
f_strip = f[:-4] | |
subprocess.check_output(['unzip', os.path.join(p, f), | |
'-d', extract_dir]) | |
# convert GFloat downloads to GTiff | |
# gdal_translate -co COMPRESS=LZW -co PREDICTOR=3 -co TFW=YES | |
# -of GTiff floatn39w075_1.flt 'test.tif' | |
subprocess.check_output(['gdal_translate', '-co', 'COMPRESS=LZW', '-co', | |
'PREDICTOR=3', '-co', 'TFW=YES', '-of', 'GTiff', | |
os.path.join(os.path.join(extract_dir, f_strip), 'float' + f_strip + '_1.flt'), | |
os.path.join(tif_dir, f_strip + '.tif')]) | |
# afterwards, merge with: | |
# gdal_merge.py -o merged_elevation.tif *.tif | |
print("finished converting. merging tifs...") | |
sleep(3) | |
subprocess.check_output(['gdal_merge.py', '-o', | |
os.path.join(tif_dir, 'merged_elevation.tif'), os.path.join(tif_dir, '*.tif')]) | |
print('Merged tif is at: ' + os.path.join(tif_dir, 'merged_elevation.tif')) | |
# remove directory of extracted zip files | |
suprocess.call(['rm', '-r', extract_dir]) | |
print('done!') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment