Skip to content

Instantly share code, notes, and snippets.

@mlt
Last active December 1, 2017 01:58
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mlt/2a89fe48dac8b6d7d28145fc9351aa98 to your computer and use it in GitHub Desktop.
Save mlt/2a89fe48dac8b6d7d28145fc9351aa98 to your computer and use it in GitHub Desktop.
import os, sys, glob, arcpy, string, zipfile, time, urllib, json
import shutil, urllib2
from contextlib import closing
def printMessage(messageString):
#print messageString
arcpy.AddMessage(messageString)
def duration_human(seconds):
seconds = long(round(seconds))
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
years, days = divmod(days, 365.242199)
minutes = long(minutes)
hours = long(hours)
days = long(days)
years = long(years)
duration = []
if years > 0:
duration.append('%d year' % years + 's' * (years != 1))
else:
if days > 0:
duration.append('%d day' % days + 's'*(days != 1))
if hours > 0:
duration.append('%d hour' % hours + 's'*(hours != 1))
if minutes > 0:
duration.append('%d minute' % minutes + 's'*(minutes != 1))
if seconds > 0:
duration.append('%d second' % seconds + 's'*(seconds != 1))
return ' '.join(duration)
def mosaicRaster(src):
# this script mosaics the individual tiled DEMs together and then produces a 3 meter DEM and a hillshade.
#
arcpy.CheckOutExtension("Spatial")
arcpy.env.pyramid = "PYRAMIDS -1 BILINEAR DEFAULT 75 SKIP_FIRST"
outGDBName = "elevation_data.gdb"
outGDB = os.path.join(src,outGDBName)
if not arcpy.Exists(outGDB):
arcpy.CreateFileGDB_management(src,outGDBName,"10.0")
printMessage("\t\tCreating Raster Dataset " + outGDB + "/" + "dem01")
inputWorkspace = os.path.join(src,"geodatabase")
targetRaster = os.path.join(outGDB,"dem01")
printMessage("\t\tCreating 1 meter mosaic for " + src)
arcpy.env.workspace = inputWorkspace
wsList = arcpy.ListWorkspaces("*.*","FileGDB")
arcpy.env.snapRaster = wsList[0]+"/dem_1m_m"
if not arcpy.Exists(outGDB + "/dem01"):
arcpy.CreateRasterDataset_management(outGDB,"dem01","1","32_BIT_FLOAT","#","1","#","NONE","128 128","LZ77","10000 5600000")
arcpy.WorkspaceToRasterDataset_management(inputWorkspace,targetRaster,"INCLUDE_SUBDIRECTORIES","LAST","FIRST","#","#","NONE","0.0","NONE","NONE")
arcpy.BuildPyramids_management(targetRaster)
arcpy.CalculateStatistics_management(targetRaster,"1","1")
hsRaster = os.path.join(outGDB,"dem01hs")
if not arcpy.Exists(hsRaster):
arcpy.HillShade_3d(targetRaster,hsRaster,"315","45","NO_SHADOWS","1")
arcpy.BuildPyramids_management(hsRaster)
## if not arcpy.Exists(outGDB+"/dem03"):
## printMessage("\t\tAggregating to 3-meter raster for " + src)
## if arcpy.Exists(statewideRaster):
## arcpy.env.snapRaster = statewideRaster
## aggRaster = arcpy.sa.Aggregate(targetRaster,"3","MEAN","EXPAND","DATA")
## aggRaster.save(os.path.join(outGDB,"dem03"))
## arcpy.BuildPyramids_management(aggRaster)
## arcpy.CalculateStatistics_management(aggRaster,"1","1")
##
## hsRaster = os.path.join(outGDB,"dem03hs")
## if not arcpy.Exists(hsRaster):
## printMessage("\t\tCreating Hillshade of 3 meter raster")
## arcpy.HillShade_3d(aggRaster,hsRaster,"315","45","NO_SHADOWS","1")
## arcpy.BuildPyramids_management(hsRaster)
## arcpy.CalculateStatistics_management(hsRaster,"1","1")
def makeTileIndex(src):
# this script creates the tile index for this delivery by extracting tiles from a master tile feature class
#
# what this script does is goe through all of the geodatabases in the geodatabase folder and then queries the template
# feature class to produce a product.
projGDB = os.path.join(src,"elevation_data.gdb")
if not arcpy.Exists(projGDB):
arcpy.CreateFileGDB_management(src,"elevation_data.gdb","9.3")
gdbPath = os.path.join(src,"geodatabase")
gdbList = os.listdir(gdbPath)
theQuery = ""
i = 0
for gdb in gdbList:
tileName = os.path.splitext(gdb)[0]
ext = os.path.splitext(gdb)[1]
if string.lower(ext) == ".gdb":
if i == 0:
theQuery = theQuery + "\"DNR_QQQ_ID\" = '"+ tileName + "'"
else:
theQuery = theQuery + " or \"DNR_QQQ_ID\" = '"+ tileName + "'"
i += 1
printMessage("\t\tCreating Tile Index for " + src)
if arcpy.Exists("tile"): arcpy.Delete_management("tile")
arcpy.MakeFeatureLayer_management(tileIndex,"tile",theQuery)
arcpy.CopyFeatures_management("tile",os.path.join(src,"elevation_data.gdb","tile_index"))
def mergeBreaklines(src):
# src variable represents the base folder of that data. For example, d:/dem_work/faribault
# the script expects to find a folder called "Geodatabase" with a number of geodatabases underneath that
# represent GDBs for each individual tile in the folder.
printMessage("\t\tMerging Hydro Breaklines for project area")
dest = os.path.join(src,"elevation_data.gdb","hydro_breaklines")
dirLst = os.listdir(os.path.join(src,"geodatabase"))
fcLst = []
for f in dirLst:
theFC = os.path.join(src,"geodatabase",f,"terrain_data","brkln_hydro_py")
if arcpy.Exists(theFC):
fcLst.append(theFC)
if len(fcLst) > 0:
fcLstTxt = string.join(fcLst,";")
arcpy.Merge_management(fcLstTxt,dest)
arcpy.AddMessage("finished mosaicing breaklines...")
else:
arcpy.AddMessage("No Breaklines found to merge")
def mergeContours(src):
# src variable represents the base folder of that data. For example, d:/dem_work/faribault
# the script expects to find a folder called "Geodatabase" with a number of geodatabases underneath that
# represent GDBs for each individual tile in the folder.
dest = os.path.join(src,"elevation_data.gdb","contours")
dirLst = os.listdir(os.path.join(src,"geodatabase"))
fcLst = []
for f in dirLst:
theFC = os.path.join(src,"geodatabase",f,"contour_2f_3m")
if arcpy.Exists(theFC):
fcLst.append(theFC)
fcLstTxt = string.join(fcLst,";")
if len(fcLst) > 0:
arcpy.Merge_management(fcLstTxt,dest)
arcpy.AddMessage("finished mosaicing contours...")
else:
arcpy.AddMessage("No Countours found to mosaic!")
def mergeBuildings(src):
# src variable represents the base folder of that data. For example, d:/dem_work/faribault
# the script expects to find a folder called "Geodatabase" with a number of geodatabases underneath that
# represent GDBs for each individual tile in the folder.
sr = arcpy.SpatialReference(26915)
arcpy.env.outputCoordinateSystem = sr
arcpy.env.XYDomain = "0 0 10000000 10000000"
arcpy.env.ZDomain = "0, 5000"
dest = os.path.join(src,"elevation_data.gdb","buildings")
dirLst = os.listdir(os.path.join(src,"geodatabase"))
fcLst = []
for f in dirLst:
theFC = os.path.join(src,"geodatabase",f,"building_loc_py")
if arcpy.Exists(theFC):
fcLst.append(theFC)
fcLstTxt = string.join(fcLst,";")
if len(fcLst) > 0:
arcpy.Merge_management(fcLstTxt,dest)
arcpy.AddMessage("finished mosaicing buildings...")
else:
arcpy.AddMessage("No buildings found to mosaic!")
def merge_tiles(src):
# geodatabase and las are two folderst that are to be in the src folder.
gdbPath = os.path.join(src,"geodatabase")
lasPath = os.path.join(src,"las")
startTime = time.time()
printMessage("Initial QA/QC Processing for " + src + " starting at " + time.ctime())
# create the tile index for the files found in the folders...
#if not arcpy.Exists(src + "/elevation_data.gdb/tile_index"):
# printMessage("\tExtracting Tile Index at " + time.ctime())
# makeTileIndex(src)
# mosaic all of the individual DEMs
if not arcpy.Exists(src + "/elevation_data.gdb/dem03hs"):
printMessage("\tMosaicing and processing DEM at " + time.ctime())
st = time.time()
mosaicRaster(src)
printMessage("\t\processing rasters took " + duration_human(time.time() - st))
# mosaic all of the individual edge-of-water breaklines...
if not arcpy.Exists(src + "/elevation_data.gdb/hydro_breaklines"):
printMessage("\tMosaicing breaklines at " + time.ctime())
st = time.time()
try:
mergeBreaklines(src)
except:
pass
printMessage("\t\tmerging breaklines took " + duration_human(time.time() - st))
# mosaic all of the individual contours
if not arcpy.Exists(src + "/elevation_data.gdb/contours"):
printMessage("\tMerging 2' Contours starting at " + time.ctime())
st = time.time()
try:
mergeContours(src)
except:
pass
printMessage("\t\tmerging contours took " + duration_human(time.time() - st))
# mosaic all of the individual buildings
if not arcpy.Exists(src + "/elevation_data.gdb/buildings"):
printMessage("\tMerging Buildings starting at " + time.ctime())
st = time.time()
try:
mergeBuildings(src)
except:
pass
printMessage("\t\tmerging buildings took " + duration_human(time.time() - st))
#####Main program block...
# set some global variables
# get the parent path of the script
srcPath = os.path.split(os.path.split(sys.argv[0])[0])[0]
# The location of the lidar data on the LMIC/MnGEO ftp site....
dataPath = "ftp://ftp.lmic.state.mn.us/pub/data/elevation/lidar/q250k"
# get the input parameters from the user...
dest = arcpy.GetParameterAsText(0) # where to store the output
if not dest:
dest = "d:\\temp\\testing"
inFC = arcpy.GetParameterAsText(1) # what to use as input polygon feature class
if not inFC:
inFC = "d:\\temp\\test_polygon_sandhillriver.shp"
GDB = arcpy.GetParameter(2) # download Geodatabase tiles
LAS = arcpy.GetParameter(3) # download LAS tiles
merge = arcpy.GetParameter(4) # merge tiled data together
d = arcpy.Describe(inFC)
srs = d.spatialReference
# the Tile scheme service URL
tileServiceURL = "http://arcgis.dnr.state.mn.us/arcgis/rest/services/LiDARViewer/TileIndex2/MapServer/0/query?"
# the Query parameter Dictionary
paramDict = {
'where' : "",
'text' : "",
'objectIds' : "",
'time' : "",
'geometryType': "esriGeometryPolygon",
'inSR' : srs.factoryCode,
'spatialRel' : "esriSpatialRelIntersects",
'relationParam' : "",
'outFields' : "dnr_qqq_id, las_tile_name",
'returnGeometry' : "false", # we could return geography and make the tile index.....
'maxAllowableOffset' : "",
'geometryPrecision' : "",
'outSR' : "26915",
'returnIdsOnly' : "false",
'returnCountOnly' : "false",
'orderByFields' : "",
'groupByFieldsForStatistics' : "",
'outStatistics' : "",
'returnZ' : "false",
'returnM' : "false",
'gdbVersion' : "",
'f' : "pjson" }
# set up an empty list that we will populate with tile names
gdbList = []
lasList = []
rows = arcpy.SearchCursor(inFC)
for row in rows:
theShape = row.shape
paramDict['geometry'] = theShape.JSON
# encode the query request parameters
params = urllib.urlencode(paramDict)
# send the request to the tile service and get a JSON result
result = json.load(urllib.urlopen(tileServiceURL,params))
recs = result.get("features")
i = 0
for rec in recs:
i += 1
gdbName = rec.get("attributes").get("dnr_qqq_id")
lasName = rec.get("attributes").get("las_tile_name")
if gdbName <> "":
gdbList.append(gdbName)
if lasName <> "":
lasList.append(lasName)
del rows
arcpy.AddMessage("")
arcpy.AddMessage("++++++++++++++++++++++++++++++++++++++++++++++++++")
arcpy.AddMessage("Using Selected Polygon From " + inFC + ".")
arcpy.AddMessage("All data will be copied to " + dest + ".")
if GDB and not os.path.exists(os.path.join(dest,"geodatabase")):
os.makedirs(os.path.join(dest,"geodatabase"))
if LAS and not os.path.exists(os.path.join(dest,"las")):
os.mkdir(os.path.join(dest,"las"))
replace = False
tileGDBList = []
tileLASList = []
count = 0
gdbCount = len(gdbList)
lasCount = len(lasList)
max_tile_count = 2000
if gdbCount > max_tile_count:
arcpy.AddWarning("Number of tiles = " + str(gdbCount) + " Too many tiles selected - the limit is 200")
sys.exit()
arcpy.AddMessage("Your selected polygon has identified " + str(gdbCount) + " source data tiles.")
if GDB and LAS:
msg = "Copying " + str(gdbCount) + " Geodatabase and LAS tiles to " + dest
elif GDB:
msg = "Copying " + str(gdbCount) + " Geodatabase tiles to " + dest
elif LAS:
msg = "Copying " + str(gdbCount) + " LAS tiles to " + dest
else:
msg = "Not copying LAS or Geodatabases - looking to merge existing data if available..."
arcpy.AddMessage(msg)
r = 0
gdbSuccessCount = 0
if GDB:
for g in gdbList:
tileGDBID = g
q250k = "q" + string.split(tileGDBID,"-")[0]
srcGDB = dataPath +"/" + q250k + "/geodatabase/"+ tileGDBID +".gdb.zip"
destGDB = dest+"\\geodatabase\\"+ tileGDBID+".gdb"
destGDBzip = destGDB+".zip"
if not os.path.exists(destGDB):
arcpy.AddMessage("Copying Geodatabase " + tileGDBID)
if not os.path.exists(destGDBzip):
try:
with closing(urllib2.urlopen(srcGDB)) as r:
with open(destGDBzip, 'wb') as f:
shutil.copyfileobj(r, f)
gdbSuccessCount += 1
except:
arcpy.AddMessage("\t WARNING: Geodatabase for tile " + tileGDBID + " not found - moving on to next one...")
if os.path.exists(destGDBzip):
zf = zipfile.ZipFile(destGDBzip)
zf.extractall(dest+"\\geodatabase")
del zf
os.remove(destGDBzip)
else:
gdbSuccessCount += 1
arcpy.AddMessage("\tGeodatabase " + tileGDBID + " exists, moving on to next tile...")
if LAS:
# this is a batch file that will be used to unzip the LAZ files....
f1 = open(dest + "\\las\\unziplaz.bat", "w")
# now go through the list and download the LAZ files...
lasSuccessCount = 0
for l in lasList:
q250k = "q" + string.split(l,"-")[0]
srcLAZ = dataPath + "/" + q250k + "/laz/"+ l +".laz"
destLAZ = dest + "\\las\\"+ l +".laz"
destLAS = dest + "\\las\\"+ l + ".las"
if not os.path.exists(destLAZ):
arcpy.AddMessage("Copying LAZ " + destLAZ + "\n")
try:
with closing(urllib2.urlopen(srcLAZ)) as r:
with open(destLAZ, 'wb') as f:
shutil.copyfileobj(r, f)
lasSuccessCount += 1
except:
arcpy.AddMessage("\t WARNING: LAZ file for tile " + srcLAZ + " not found - moving on to next one...")
if not os.path.exists(destLAS) and os.path.exists(destLAZ):
f1.write(dest + "\\las\\laszip.exe -v -i " + destLAZ + " -o " + destLAS + "\n")
#close the batch file
f1.close()
if lasSuccessCount > 0:
# add a message telling the user what we are doing...
arcpy.AddMessage("Unzipping LAZ Files....please be patient")
# download LASZIP.EXE from the mngeo site...
lazsrc = "ftp://ftp.lmic.state.mn.us/pub/data/elevation/lidar/tools/lastools/laszip.exe"
lazdest = dest + "\\las\\laszip.exe"
urllib.urlretrieve(lazsrc,lazdest)
#call the batch file to unzip the LAZ files...
os.system(dest + "\\las\\unziplaz.bat")
# if the user has ArcGIS 10.1 then build a LAS dataset.
if arcpy.GetInstallInfo()['Version'] > '10':
lasDName = dest + "\\" + "las_data.lasd"
if not os.path.exists(lasDName):
arcpy.AddMessage("Creating LAS Dataset - " + lasDName)
try:
arcpy.CreateLasDataset_management(dest+"\\las",lasDName,"NO_RECURSION","#","#","COMPUTE_STATS","RELATIVE_PATHS")
except:
arcpy.AddWarning("LAS files downloaded but attempt to bring into LAS dataset failed...")
else:
arcpy.AddMessage("LAS Dataset - " + lasDName + " found....")
else:
arcpy.AddWarning("No LAS files found in requested area....")
# if the user wants to merge the downloaded data then do that now....
if merge and (not GDB or gdbSuccessCount > 1):
arcpy.AddMessage("Merging tiles to "+ dest + "\\ELEVATION_DATA.GDB")
merge_tiles(dest)
elif gdbCount == 1:
arcpy.AddMessage("Only one GDB found...no merging necessary")
elif gdbCount == 0:
arcpy.AddWarning("No Geodatabase Tiles found in area requested....")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment