Skip to content

Instantly share code, notes, and snippets.

@ColinTalbert
Created August 10, 2017 15:22
Show Gist options
  • Save ColinTalbert/da210212d8ad6a06bcf54c198d31ab20 to your computer and use it in GitHub Desktop.
Save ColinTalbert/da210212d8ad6a06bcf54c198d31ab20 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"metadata": {
"name": "",
"signature": "sha256:2d1f61ab2271314bbba4f39cdbdc74b25fbbe32a349c2a11e358da042f0ea71d"
},
"nbformat": 3,
"nbformat_minor": 0,
"worksheets": [
{
"cells": [
{
"cell_type": "heading",
"level": 3,
"metadata": {},
"source": [
"In CreateHabitatFromCSVs I showed our process from going from the raw 2D hydrodynamic data to a habitat map for a single flow.\n",
"\n",
"In this notebook I'm going to reuse that code to generate all of the output for all the representative flows and save it to disk as standard geotiff files."
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"import os\n",
"import re\n",
"import time\n",
"\n",
"import numpy as np\n",
"import pandas as pd\n",
"import scipy\n",
"import scipy.interpolate\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import seaborn as sns\n",
"sns.set_style(\"dark\")\n",
"\n",
"from shapely.geometry import mapping, Polygon, MultiPolygon, LineString, Point\n",
"from osgeo import gdal, gdalnumeric, ogr, osr\n",
"from PIL import Image, ImageDraw"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 2
},
{
"cell_type": "heading",
"level": 5,
"metadata": {},
"source": [
"Consolidate the various bits of code from CreateHabitatFromCSVs into several reusable functions"
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"def flow_from_fname(fname):\n",
" s = os.path.split(f)[1].split('q')[1]\n",
" n = \"\"\n",
" for c in s:\n",
" if c.isdigit():\n",
" n += c\n",
" elif c == \"_\":\n",
" n += \".\"\n",
" return float(n)\n",
"\n",
"def var_from_fname(fname):\n",
" s = os.path.split(f)[1].split('q')[1]\n",
" return s[-6:-4]\n",
"\n",
"def get_fname(var, flow):\n",
" fname1 = os.path.join(csv_dir, 'del3_q' + str(flow).replace(\".\", \"_\") + var + '.csv')\n",
" if os.path.exists(fname1):\n",
" return fname1\n",
"\n",
" fname2 = os.path.join(csv_dir, 'del3_q' + str(int(flow)).replace(\".\", \"_\") + var + '.csv')\n",
" if os.path.exists(fname2):\n",
" return fname2\n",
"\n",
" raise Exception(fname1)\n",
" \n",
"def get_data_for_flow(flow):\n",
" covariate_lkup = {'da':'depth', 'va':'velocity', 'fa':'froude', 'sa':'shear', }\n",
" data = {}\n",
" for covariate in covariates:\n",
" fname = get_fname(covariate, flow)\n",
" data[covariate] = pd.io.parsers.read_csv(fname, skipinitialspace=False,\n",
" header=None,\n",
" names=['id', 'x', 'y', covariate_lkup[covariate]])\n",
"\n",
" # data[covariate].set_index(['id'], inplace=True)\n",
" # data[covariate].drop('id', axis=1, inplace=True)\n",
"\n",
" data = pd.concat(data.values(), axis=1)\n",
" data = data.T.groupby(level=0).first().T\n",
" data['shearstress'] = data.shear.apply(lambda x: 998.2 * x ** 2)\n",
" data['reynolds'] = data[['depth', 'velocity']].apply(lambda row: row.velocity * row.depth / 0.000001007, axis=1)\n",
" return data\n",
"\n",
"def get_var(data, var):\n",
" X = data.x.ravel()\n",
" Y = data.y.ravel()\n",
" Z = data[var].ravel()\n",
" return X, Y, Z\n",
"\n",
"def get_verts_wts(X1, Y1, x, y):\n",
" points = np.array(zip(X1, Y1))\n",
" tri = scipy.spatial.qhull.Delaunay(points)\n",
" simplex = tri.find_simplex(zip(x.ravel(), y.ravel()))\n",
" vertices = np.take(tri.simplices, simplex, axis=0)\n",
" temp = np.take(tri.transform, simplex, axis=0)\n",
" delta = zip(x.ravel(), y.ravel()) - temp[:, 2]\n",
" bary = np.einsum('njk,nk->nj', temp[:, :2, :], delta)\n",
" vtx, wts = vertices, np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True)))\n",
" return simplex, vtx, wts\n",
"\n",
"def get_initial_products(X_pnts, Y_pnts, extent, cell_size=1):\n",
" \"\"\"X_pnts, Y_pnts are 1d arrays of the X and Y coords of our nodes\n",
" extent is minX, maxX, minY, maxY or the output layer,\n",
" \"\"\"\n",
" width = int((extent[1] - extent[0]) / cell_size)\n",
" height = int((extent[3] - extent[2]) / cell_size)\n",
"\n",
" y, x = np.mgrid[extent[2]:extent[3]:complex(0, height),\n",
" extent[1]:extent[0]:complex(0, width) ]\n",
" x = np.fliplr(x)\n",
" y = np.flipud(y)\n",
"\n",
" # The mask is a grid with false outside a convex hull around our points\n",
" mask = get_mask(X_pnts, Y_pnts, x, y)\n",
" simplex, vtx, wts = get_verts_wts(X_pnts, Y_pnts, x[mask], y[mask])\n",
" return x, y, vtx, wts, mask\n",
"\n",
"def apply_products(Z, x, y, mask, vtx, wts):\n",
" ansx = np.empty(x.shape)\n",
" ansx.fill(np.nan)\n",
"\n",
" raw_ans = np.einsum('nj,nj->n', np.take(Z.ravel(), vtx), wts)\n",
" ansx[np.nonzero(mask)[0], np.nonzero(mask)[1]] = raw_ans\n",
"\n",
" return ansx\n",
"\n",
"\n",
"def world2Pixel(geoMatrix, x, y):\n",
" \"\"\"\n",
" Uses a gdal geomatrix (gdal.GetGeoTransform()) to calculate\n",
" the pixel location of a geospatial coordinate\n",
" \"\"\"\n",
" ulX = geoMatrix[0]\n",
" ulY = geoMatrix[3]\n",
" xDist = geoMatrix[1]\n",
" yDist = geoMatrix[5]\n",
" rtnX = geoMatrix[2]\n",
" rtnY = geoMatrix[4]\n",
" pixel = int((x - ulX) / xDist)\n",
" line = int((ulY - y) / xDist)\n",
" return (pixel, line)\n",
"\n",
"def get_mask(X_points, Y_points, x_grid, y_grid):\n",
" transform = get_transform(x_grid, y_grid)\n",
" points = np.array(zip(X_points, Y_points))\n",
" trans_points = [world2Pixel(transform, x, y) for x, y in points]\n",
" hull = Polygon(trans_points).convex_hull\n",
" pnts = np.asarray(hull.exterior)\n",
"\n",
" width = int(x_grid.max() - x_grid.min())\n",
" height = int(y_grid.max() - y_grid.min())\n",
" rasterPoly = Image.new(\"L\", (width, height), 0)\n",
" rasterize = ImageDraw.Draw(rasterPoly)\n",
" rasterize.polygon(list(map(tuple, pnts.astype(np.int32)[:-1])), fill=1, outline=0)\n",
" rasterize.line(list(map(tuple, pnts.astype(np.int32)[:-1])), fill=0, width=2)\n",
" mask = imageToArray(rasterPoly)\n",
" return mask == 1\n",
"\n",
"\n",
"def get_transform(x, y, cell_size=1.0):\n",
" origin = (x.min(), y.max())\n",
" origin = (x.min() - (cell_size / 2), y.max() + (cell_size / 2))\n",
" transform = [origin[0], cell_size, 0.0, origin[1], 0.0, cell_size * -1]\n",
" return transform\n",
"\n",
"# This function will convert the rasterized clipper shapefile\n",
"# to a mask for use within GDAL.\n",
"def imageToArray(i):\n",
" \"\"\"\n",
" Converts a Python Imaging Library array to a\n",
" gdalnumeric image.\n",
" \"\"\"\n",
" a = gdalnumeric.fromstring(i.tostring(), 'b')\n",
" a.shape = i.im.size[1], i.im.size[0]\n",
" return a\n",
"\n",
"\n",
"\n",
"def shp_to_mask(shp_fname, width, height, geotransform, srs):\n",
" shp = ogr.Open(shp_fname)\n",
" lyr = shp.GetLayer()\n",
" target_ds = gdal.GetDriverByName('MEM').Create('', width, height, 1, gdal.GDT_Byte)\n",
" target_ds.SetGeoTransform(geotransform)\n",
" target_ds.SetProjection(srs.ExportToWkt())\n",
"\n",
" target_ds.GetRasterBand(1).Fill(0)\n",
"\n",
" gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1])\n",
" return target_ds.GetRasterBand(1).ReadAsArray()\n",
"\n",
"def save_one(outfname, x, y, array):\n",
" cell_size = 1.0\n",
"# origin = (x.min() - (cell_size / 2), y.max() + (cell_size / 2)) for pixel = area\n",
" origin = (x.min(), y.max())\n",
" transform = [origin[0], cell_size, 0.0, origin[1], 0.0, cell_size * -1]\n",
" out_srs = osr.SpatialReference()\n",
" out_srs.ImportFromEPSG(32618)\n",
"\n",
" array_to_raster(outfname, array, transform, out_srs)\n",
"\n",
"def array_to_raster(raster_fname, array, geotransform, srs, ndval=-3.4028234663852886e+38):\n",
"\n",
" rot_grid = array.astype(np.float32)\n",
" rot_grid = np.ma.masked_where(np.isnan(rot_grid), rot_grid)\n",
"\n",
" array_nd = np.where(rot_grid.mask, ndval, rot_grid)\n",
"\n",
" cols = rot_grid.shape[1]\n",
" rows = rot_grid.shape[0]\n",
"\n",
" driver = gdal.GetDriverByName('GTiff')\n",
" outRaster = driver.Create(raster_fname, cols, rows, 1, gdal.GDT_Float32)\n",
" outRaster.SetGeoTransform(geotransform)\n",
" outband = outRaster.GetRasterBand(1)\n",
" outband.WriteArray(array_nd)\n",
" outRaster.SetProjection(srs.ExportToWkt())\n",
" outband.SetNoDataValue(ndval)\n",
" outband.FlushCache()\n",
" outband.GetStatistics(0, 1)\n",
" histogram = outband.GetDefaultHistogram()\n",
" outband.SetDefaultHistogram(float(rot_grid.min()), float(rot_grid.max()), np.histogram(rot_grid.compressed(), bins=256)[0].tolist())\n",
"\n",
"def plot_one(img, title=\"not set\"):\n",
" fig = plt.figure(figsize=(12, 12))\n",
" ax = fig.add_subplot(111)\n",
"\n",
"\n",
" im = ax.imshow(img, cmap=plt.cm.jet, interpolation=\"nearest\") # vmin=regular_grids[label].min(), vmax=regular_grids[label].max(),\n",
"\n",
" ax.set_title(title)\n",
" try:\n",
" plt.colorbar(im)\n",
" except:\n",
" pass\n",
" plt.show()"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 3
},
{
"cell_type": "heading",
"level": 3,
"metadata": {},
"source": [
"This is the function that converts the covariate grids to final habitat"
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"def cov_grids_to_hab(cov_grids, HSC):\n",
"\n",
" reclassed_grids = {}\n",
" for var, reclass_func in HSC.iteritems():\n",
" vf = np.vectorize(reclass_func)\n",
" reclassed_grids[var] = vf(cov_grids[var])\n",
" \n",
" grid_stack = np.dstack([grid for grid in reclassed_grids.itervalues()])\n",
" habitat = np.amin(grid_stack, axis=2)\n",
" return habitat"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 4
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"def process_one_group(data, outdir, extent, poly_clip, hsc):\n",
" \n",
" X, Y, Z = get_var(data, 'depth')\n",
" t1 = time.time()\n",
" print \"Getting initial products: \",\n",
" x, y, vtx, wts, mask = get_initial_products(X, Y, extent, cell_size=1)\n",
" print time.time() - t1\n",
" \n",
" cell_size = 1\n",
" origin = [x.min(), y.max()]\n",
" transform = [origin[0], cell_size, 0.0, origin[1], 0.0, cell_size * -1]\n",
" out_srs = osr.SpatialReference()\n",
" out_srs.ImportFromEPSG(32618)\n",
" cut_pixels = shp_to_mask(poly_clip, x.shape[1], x.shape[0], transform, out_srs)\n",
"\n",
" cov_grids = {}\n",
" \n",
" t1 = time.time()\n",
" print \"\\tdepth: \",\n",
" depth_grid = apply_products(Z, x, y, mask, vtx, wts)\n",
" depth_grid[depth_grid < 0] = np.nan\n",
" depth_grid[cut_pixels == 0] = np.nan\n",
"\n",
" fname = os.path.join(outdir, \"depth.tif\")\n",
" save_one(fname, x, y, depth_grid)\n",
" cov_grids['depth'] = depth_grid\n",
" print time.time() - t1\n",
" \n",
" for var in ['velocity', 'froude', 'shear', 'shearstress', 'reynolds']:\n",
" print \"\\t\" + var + \": \",\n",
" X, Y, Z = get_var(data, var)\n",
" t1 = time.time()\n",
" out_grid = apply_products(Z, x, y, mask, vtx, wts)\n",
" out_grid[depth_grid < 0] = np.nan\n",
" out_grid[cut_pixels == 0] = np.nan\n",
" fname = os.path.join(outdir, var + \".tif\")\n",
" save_one(fname, x, y, out_grid)\n",
" cov_grids[var] = out_grid\n",
" print time.time() - t1\n",
" \n",
" #and finally spit out a habitat map of this\n",
" habitat_grid = cov_grids_to_hab(cov_grids, hsc)\n",
" fname = os.path.join(outdir, \"habitat.tif\")\n",
" save_one(fname, x, y, habitat_grid)"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 5
},
{
"cell_type": "heading",
"level": 3,
"metadata": {},
"source": [
"These are the variables you'll need to change before running"
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"##########################################################################\n",
"##########################################################################\n",
"##########################################################################\n",
"csv_dir = r\"X:\\Delaware_DSS\\2011_DSS\\DerivedData\\Persistence\\Source\\FromKelly20140818\"\n",
"poly_clip = r\"X:\\Delaware_DSS\\2005\\!Del_DSS_SupportingCaseFiles\\Delaware_3\\GISWORK\\del3polyclip.shp\"\n",
"\n",
"out_dir = r\"X:\\Delaware_DSS\\2011_DSS\\DerivedData\\Persistence\\Output_2005\"\n",
"\n",
"HSC = {'depth':lambda pixval: pixval > 0.06,\n",
" 'velocity':lambda pixval: 0.02 < pixval < 3.3,\n",
" 'froude':lambda pixval: pixval < 0.44,\n",
" 'shear':lambda pixval: pixval < 0.22,\n",
" 'shearstress':lambda pixval: pixval < 47.5}\n",
"\n",
"##########################################################################\n",
"##########################################################################\n",
"##########################################################################"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 7
},
{
"cell_type": "heading",
"level": 1,
"metadata": {},
"source": [
"Ok let's run these already!"
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"files = [os.path.join(csv_dir, f) for f in os.listdir(csv_dir)]\n",
"flows = list(set([flow_from_fname(f) for f in files]))\n",
"flows.sort()\n",
"covariates = list(set([var_from_fname(f) for f in files]))\n",
"print \"flows =\", flows\n",
"print \"covariates =\", covariates\n",
"\n",
"data = get_data_for_flow(max(flows))\n",
"X, Y, Z = get_var(data, 'depth')\n",
"pad = 0\n",
"extent = [X.min() - pad, X.max() + pad, Y.min() - pad, Y.max() + pad]\n",
"\n",
"\n",
"for flow in flows:\n",
" out_flow_dir = os.path.join(out_dir, str(flow))\n",
" if not os.path.exists(out_flow_dir):\n",
" os.makedirs(out_flow_dir)\n",
" data = get_data_for_flow(flow)\n",
" print \"processing\", flow, \" xyz rows =\", data.shape[0]\n",
" process_one_group(data, out_flow_dir, extent, poly_clip, HSC)\n"
],
"language": "python",
"metadata": {},
"outputs": [
{
"output_type": "stream",
"stream": "stdout",
"text": [
"flows = [2.5, 14.3, 18.3, 23.6, 30.0, 39.0, 50.4, 64.9, 83.6, 107.6, 138.5, 178.3, 229.6, 295.6, 380.6, 490.0]\n",
"covariates = ['va', 'fa', 'sa', 'da']\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 2.5 xyz rows = 27856\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.72099995613\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 0.953000068665\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.01600003242\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.98199987411\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99399995804\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.0119998455\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.96599984169\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 14.3 xyz rows = 30645\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.24900007248\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.882999897003\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.87299990654\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.77199983597\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.97000002861\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 18.3 xyz rows = 28028\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.19900012016\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.862999916077\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.78900003433\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.04999995232\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.9470000267\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 23.6 xyz rows = 27856\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.64599990845\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.83700013161\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.898999929428\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.03600001335\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 30.0 xyz rows = 27856\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.61000013351\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.25999999046\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00099992752\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99199986458\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00499987602\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99800014496\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.01099991798\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 39.0 xyz rows = 27856\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.64800000191\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.54999995232\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00999999046\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.01300001144\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.98799991608\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.01099991798\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.97000002861\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 50.4 xyz rows = 29549\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.27399992943\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.46600008011\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.98500013351\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99899983406\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 64.9 xyz rows = 28396\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"7.53699994087\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.96300005913\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.98300004005\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00200009346\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00699996948\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.992000103\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 83.6 xyz rows = 28089\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"6.88700008392\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99300003052\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.92600011826\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99899983406\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.05599999428\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.94099998474\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 107.6 xyz rows = 28304\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"6.97799992561\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.69899988174\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99699997902\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99800014496\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 138.5 xyz rows = 28304\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"6.89100003242\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.5720000267\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.0\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.98699998856\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99099993706\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.01000022888\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 178.3 xyz rows = 28153\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"6.91100001335\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.89499998093\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99300003052\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99800014496\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.9960000515\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00300002098\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00100016594\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 229.6 xyz rows = 11954\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"4.38199996948\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.01999998093\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99800014496\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99800014496\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.941999912262\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 295.6 xyz rows = 7824\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"4.19800019264\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.35500001907\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.98900008202\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99900007248\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 380.6 xyz rows = 9215\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"4.18099999428\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.44000005722\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99799990654\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.93499994278\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.1130001545\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.960999965668\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.92000007629\n",
"processing"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 490.0 xyz rows = 14816\n",
"Getting initial products: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"4.60799980164\n",
"\tdepth: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.30900001526\n",
"\tvelocity: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.99199986458\n",
"\tfroude: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.99799990654\n",
"\tshear: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.0640001297\n",
"\tshearstress: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"2.00499987602\n",
"\treynolds: "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"1.98399996758\n"
]
}
],
"prompt_number": 6
},
{
"cell_type": "heading",
"level": 3,
"metadata": {},
"source": [
"how about a visualization just to make sure these worked?\n",
"Notice how flow 2.5 doesn't look right! Is it almost the same data as 39.0"
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"# %matplotlib inline\n",
"%matplotlib qt4\n",
"import math\n",
"import rasterio\n",
"\n",
"files = [os.path.join(csv_dir, f) for f in os.listdir(csv_dir)]\n",
"flows = list(set([flow_from_fname(f) for f in files]))\n",
"flows.sort()\n",
"covariates = list(set([var_from_fname(f) for f in files]))\n",
"print \"flows =\", flows\n",
"print \"covariates =\", covariates\n",
"\n",
"data = get_data_for_flow(max(flows))\n",
"X, Y, Z = get_var(data, 'depth')\n",
"pad = 0\n",
"extent = [X.min() - pad, X.max() + pad, Y.min() - pad, Y.max() + pad]\n",
"\n",
"\n",
"\n",
"def plot_one(ax, data, cm, vmin, vmax):\n",
" ax.imshow(data, cmap=cm, interpolation=\"nearest\", vmin=vmin, vmax=vmax)\n",
"cols = 3\n",
"rows = int(math.ceil(len(flows)/3.0))\n",
"\n",
"fig = plt.figure(figsize=(24, 16))\n",
"\n",
"ax1 = fig.add_subplot(rows, cols, 1)\n",
"first_flow = min(flows)\n",
"fname1 = os.path.join(out_dir, str(first_flow), \"habitat.tif\")\n",
"hab1 = rasterio.open(fname1).read_band(1).astype(np.float32)\n",
"\n",
"cm_hab = mpl.colors.ListedColormap(['white', 'red'])\n",
"cm_depth = plt.cm.jet\n",
"vmin=0\n",
"vmax=3\n",
" \n",
"plot_one(ax1, hab1, cm_hab, vmin, vmax)\n",
"ax1.set_title(str(first_flow))\n",
"ax1.get_xaxis().set_visible(False)\n",
"ax1.get_yaxis().set_visible(False)\n",
"\n",
"\n",
"inc = 2\n",
"for flow in flows[1:]:\n",
" print flow, inc, int(str(rows)+ str(cols)+ str(inc)),\n",
" fname = os.path.join(out_dir, str(flow), \"habitat.tif\")\n",
"# fname = os.path.join(out_dir, str(flow), \"depth.tif\")\n",
" hab = rasterio.open(fname).read_band(1).astype(np.float32)\n",
" ax = fig.add_subplot(rows, cols, inc, sharex=ax1, sharey=ax1)\n",
" plot_one(ax, hab, cm_hab, vmin, vmax)\n",
" \n",
" ax.set_title(str(flow))\n",
" ax.get_xaxis().set_visible(False)\n",
" ax.get_yaxis().set_visible(False)\n",
" inc +=1\n",
" \n",
"plt.show()\n"
],
"language": "python",
"metadata": {},
"outputs": [
{
"output_type": "stream",
"stream": "stdout",
"text": [
" flows = [2.5, 14.3, 18.3, 23.6, 30.0, 39.0, 50.4, 64.9, 83.6, 107.6, 138.5, 178.3, 229.6, 295.6, 380.6, 490.0]\n",
"covariates = ['va', 'fa', 'sa', 'da']\n",
"14.3"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
" 2 632 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"18.3 3 633 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"23.6 4 634 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"30.0 5 635 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"39.0 6 636 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"50.4 7 637 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"64.9 8 638 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"83.6 9 639 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"107.6 10 6310 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"138.5 11 6311 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"178.3 12 6312 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"229.6 13 6313 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"295.6 14 6314 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"380.6 15 6315 "
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"490.0 16 6316\n"
]
}
],
"prompt_number": 10
},
{
"cell_type": "code",
"collapsed": false,
"input": [],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 7
}
],
"metadata": {}
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment