Skip to content

Instantly share code, notes, and snippets.

@AndreLester
Created October 12, 2019 15:27
Show Gist options
  • Save AndreLester/b757ca3468685bd658be2c40d9a7cd83 to your computer and use it in GitHub Desktop.
Save AndreLester/b757ca3468685bd658be2c40d9a7cd83 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Triangulate the Survey"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 1. Extract the Survey Points"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"91245\n"
]
}
],
"source": [
"import pickle\n",
"\n",
"with open('../data/interim/survey_after.pickle', 'rb') as input_file:\n",
" survey_after = pickle.load(input_file)\n",
" \n",
"# Format [no, x, y, height, description]\n",
"print(len(survey_after))"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CPU times: user 8.06 s, sys: 21.7 ms, total: 8.08 s\n",
"Wall time: 8.09 s\n"
]
}
],
"source": [
"%%time\n",
"from rtree import index\n",
"\n",
"idx_pts = index.Index()\n",
"for i, vertex in enumerate(survey_after):\n",
" x, y = vertex[1], vertex[2]\n",
" idx_pts.insert(i, (x, y, x, y))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 2. Boundary Edges"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[87448, 87450]\n",
"[87450, 90021]\n",
"[90021, 90017]\n",
"[90017, 90016]\n",
"[90016, 90009]\n",
"[90009, 90003]\n"
]
}
],
"source": [
"import fiona\n",
"from shapely.geometry import LineString\n",
"\n",
"\n",
"segments = []\n",
"\n",
"with fiona.open('../data/interim/design_concave_hull.shp') as source:\n",
" for item in source:\n",
" for shape in item['geometry']['coordinates']:\n",
" coords = []\n",
" for pt in shape:\n",
" coords.append(pt)\n",
" for i, pt in enumerate(coords):\n",
" if i == len(coords)-1:\n",
" continue\n",
" x1, y1 = pt[0], pt[1]\n",
" x2, y2 = coords[i+1][0], coords[i+1][1]\n",
" start = list(idx_pts.nearest((x1, y1, x1, y1), 1))[0]\n",
" end = list(idx_pts.nearest((x2, y2, x2, y2), 1))[0]\n",
" segments.append([start, end])\n",
" \n",
"for i, segment in enumerate(segments):\n",
" if i < 6:\n",
" print(segment)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 3. Breaklines"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1448\n"
]
}
],
"source": [
"print(len(segments))"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"84014\n",
"\n",
"CPU times: user 34.3 s, sys: 71.1 ms, total: 34.3 s\n",
"Wall time: 34.3 s\n"
]
}
],
"source": [
"%%time\n",
"with fiona.open('../data/processed/Breaklines Design.shp') as source:\n",
" for line in source:\n",
" coords = []\n",
" for pt in line['geometry']['coordinates']:\n",
" coords.append(pt)\n",
" for i, pt in enumerate(coords):\n",
" if i == len(coords)-1:\n",
" continue\n",
" x1, y1 = pt[0], pt[1]\n",
" x2, y2 = coords[i+1][0], coords[i+1][1]\n",
" start = list(idx_pts.nearest((x1, y1, x1, y1), 1))[0]\n",
" end = list(idx_pts.nearest((x2, y2, x2, y2), 1))[0]\n",
" segments.append([start, end])\n",
"\n",
"idx_segments = index.Index()\n",
"for i, segm in enumerate(segments):\n",
" x1, y1 = survey_after[segm[0]][1], survey_after[segm[0]][2]\n",
" x2, y2 = survey_after[segm[1]][1], survey_after[segm[1]][2]\n",
" line = LineString([(x1, y1), (x2, y2)])\n",
" idx_segments.insert(i, line.bounds)\n",
"\n",
"print(len(segments))\n",
"print()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 4. Write .poly file for Triangle"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"with open('../data/interim/dtm.poly','w') as dest:\n",
" print('# DTM poly created by Andre Kruger', file=dest)\n",
" print('#', file=dest)\n",
" print('# Vertex Dimension Attribute Marker', file=dest)\n",
" print('# Count Count 0/1', file=dest)\n",
" print(len(survey_after), '2', '0', '0', sep='\\t', file=dest)\n",
" print('# Vertex X Y Attributes Marker', file=dest)\n",
" for i, pt in enumerate(survey_after):\n",
" #print(i, round(pt[0],3), round(pt[1],3), round(pt[2],3), sep='\\t', file=dest)\n",
" print(i, round(pt[1],3), round(pt[2],3), sep='\\t', file=dest)\n",
" print('#', file=dest)\n",
" print('# Segment Marker', file=dest)\n",
" print('# Count 0/1', file=dest)\n",
" print(len(segments), '0', sep='\\t', file=dest)\n",
" for i, segment in enumerate(segments):\n",
" print(i, segment[0], segment[1], sep='\\t', file=dest)\n",
" print('0', file=dest) "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 5. Execute Triangle"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"#!triangle -h"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Opening ../data/interim/dtm.poly.\n",
"Constructing Delaunay triangulation by divide-and-conquer method.\n",
"Warning: A duplicate vertex at (-67404.124, -2810249.078) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-67232.581, -2810583.36) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-67203.27, -2810669.502) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-66990.742, -2811081.967) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-66765.055, -2808684.137) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-66742.285, -2808676.322) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-64464.145, -2819515.004) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-64106.605, -2818920.463) appeared and was ignored.\n",
"Warning: A duplicate vertex at (-64051.224, -2821652.244) appeared and was ignored.\n",
"Delaunay milliseconds: 84\n",
"Recovering segments in Delaunay triangulation.\n",
"Segment milliseconds: 43\n",
"Removing unwanted triangles.\n",
"Hole milliseconds: 1\n",
"\n",
"Writing ../data/interim/dtm.1.node.\n",
"Writing ../data/interim/dtm.1.ele.\n",
"Writing ../data/interim/dtm.1.poly.\n",
"Writing ../data/interim/dtm.1.edge.\n",
"Writing ../data/interim/dtm.1.neigh.\n",
"\n",
"Output milliseconds: 452\n",
"Total running milliseconds: 603\n",
"\n",
"Statistics:\n",
"\n",
" Input vertices: 91245\n",
" Input segments: 84014\n",
" Input holes: 0\n",
"\n",
" Mesh vertices: 91299\n",
" Mesh triangles: 181148\n",
" Mesh edges: 272446\n",
" Mesh exterior boundary edges: 1448\n",
" Mesh interior boundary edges: 82692\n",
" Mesh subsegments (constrained edges): 84140\n",
"\n"
]
}
],
"source": [
"!triangle -pzen ../data/interim/dtm.poly"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 6. Load the Generated Nodes"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"91245\n",
"91308\n"
]
}
],
"source": [
"nodes = []\n",
"\n",
"with open('../data/interim/dtm.1.node') as source:\n",
" for i, line in enumerate(source):\n",
" if i == 0 or line.lstrip()[0] == '#':\n",
" continue\n",
" data = line.split()\n",
" nodes.append([float(data[1]), float(data[2])])\n",
"\n",
"print(len(survey_after))\n",
"print(len(nodes))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 7. Attach Heights to the Nodes\n",
"Todo: What to do about breaklines that crosses?"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[-64019.058, -2821685.367, 1608.083]\n",
"[-64018.777, -2821684.843, 1607.489]\n",
"[-64020.814, -2821684.409, 1608.027]\n",
"[-64018.304, -2821683.962, 1607.509]\n",
"[-64020.54, -2821683.898, 1607.447]\n",
"[-64022.569, -2821683.453, 1607.972]\n"
]
}
],
"source": [
"def point2line(line, point):\n",
" Cx, Cy = point[0], point[1]\n",
" Ax, Ay = line[0][0], line[0][1]\n",
" Bx, By = line[1][0], line[1][1]\n",
" L = ((Bx-Ax)**2+(By-Ay)**2)**0.5\n",
" r = ((Cx-Ax)*(Bx-Ax)+(Cy-Ay)*(By-Ay))/L**2\n",
" if r > 1 or r < 0: \n",
" return False\n",
" ch = ((Cx-Ax)**2+(Cy-Ay)**2)**0.5\n",
" Px = Ax+r*(Bx-Ax)\n",
" Py = Ay+r*(By-Ay)\n",
" dist = ((Cx-Px)**2+(Cy-Py)**2)**0.5\n",
" if r <= 1 and r >= 0:\n",
" return [dist, L, ch]\n",
" return False\n",
"\n",
"\n",
"#####\n",
"inspect = []\n",
"is_pt = []\n",
"#####\n",
"\n",
"for i, nd in enumerate(nodes):\n",
" x1, y1 = nd[0], nd[1]\n",
" ndx = list(idx_pts.nearest((x1, y1, x1, y1), 1))[0]\n",
" x2, y2, height = survey_after[ndx][1], survey_after[ndx][2], survey_after[ndx][3]\n",
" dist = ((x1-x2)**2+(y1-y2)**2)**0.5\n",
" nodes[i].append(height)\n",
" if dist > 0.001:\n",
" boundary = list(idx_segments.nearest((x1, y1, x1, y1), 1))\n",
" segm_list = []\n",
" for bdry in boundary:\n",
" segm = segments[bdry]\n",
" x_start, y_start = survey_after[segm[0]][0], survey_after[segm[0]][1]\n",
" x_end, y_end = survey_after[segm[1]][0], survey_after[segm[1]][1]\n",
" result = point2line(((x_start, y_start),(x_end, y_end)), (x1, y1))\n",
" if result is not False:\n",
" result.append(bdry)\n",
" segm_list.append(result)\n",
" segm_list.sort()\n",
" for item in segm_list:\n",
" ####\n",
" if item[0] < 0.001:\n",
" #####\n",
" inspect.append(item[3])\n",
" is_pt.append((x1, y1))\n",
" #####\n",
" print(round(item[0],3), end = '\\t')\n",
" print(item[1], item[2], item[3])\n",
" #print()\n",
" \n",
"for i, nd in enumerate(nodes):\n",
" if i < 6:\n",
" print(nd)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"#### INSPECT\n",
"\n",
"from shapely.geometry import LineString\n",
"from shapely.geometry import Point\n",
"\n",
"with open('../data/interim/Lune.csv', 'w') as sink:\n",
" print('Line', file=sink)\n",
" for ndx in inspect:\n",
" segm = segments[ndx]\n",
" x1, y1 = survey[segm[0]][0], survey[segm[0]][1]\n",
" x2, y2 = survey[segm[1]][0], survey[segm[1]][1]\n",
" line = LineString([(x1, y1), (x2, y2)])\n",
" print(line.wkt, file=sink)\n",
" \n",
"with open('../data/interim/Point.csv', 'w') as sink:\n",
" print('Point', file=sink)\n",
" for pt in is_pt:\n",
" point = Point(pt[0], pt[1])\n",
" print(point.wkt, file=sink)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 8. Generate the Triangles"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'ellps': 'WGS84',\n",
" 'k': 1,\n",
" 'lat_0': 0,\n",
" 'lon_0': 31,\n",
" 'no_defs': True,\n",
" 'proj': 'tmerc',\n",
" 'units': 'm',\n",
" 'x_0': 0,\n",
" 'y_0': 0}\n"
]
}
],
"source": [
"import pprint\n",
"from collections import OrderedDict\n",
"\n",
"\n",
"with fiona.open('../data/processed/hull.shp') as source:\n",
" source_crs = source.crs\n",
" pprint.pprint(source_crs)\n",
" \n",
"source_driver = 'ESRI Shapefile'\n",
"source_schema = {'geometry': 'Polygon',\n",
" 'properties': OrderedDict([('No', 'int:10'),\n",
" ('pointA', 'int:10'),\n",
" ('pointB', 'int:10'),\n",
" ('pointC', 'int:10'),\n",
" ('Slope', 'float')])}\n",
"\n",
"\n",
"with fiona.open('../data/interim/triangles_design.shp','w',\n",
" driver=source_driver,\n",
" crs=source_crs,\n",
" schema=source_schema) as sink:\n",
" with open('../data/interim/dtm.1.ele') as source:\n",
" for i, line in enumerate(source):\n",
" if i == 0 or line.lstrip()[0] == '#':\n",
" continue\n",
" no = i - 1\n",
" data = line.split()\n",
" pt1, pt2, pt3 = nodes[int(data[1])], nodes[int(data[2])], nodes[int(data[3])]\n",
" x1, y1 = pt1[0], pt1[1]\n",
" x2, y2 = pt2[0], pt2[1]\n",
" x3, y3 = pt3[0], pt3[1]\n",
" rec = {}\n",
" rec['properties'] = {}\n",
" rec['geometry'] = {}\n",
" rec['properties']['No'] = no\n",
" rec['properties']['pointA'] = data[1]\n",
" rec['properties']['pointB'] = data[2]\n",
" rec['properties']['pointC'] = data[3]\n",
" rec['properties']['Slope'] = 0.1\n",
" rec['geometry']['type'] = 'Polygon'\n",
" rec['geometry']['coordinates'] = [[(x1, y1), (x2, y2), (x3, y3), (x1, y1)]]\n",
" #pprint.pprint(rec)\n",
" sink.write(rec)\n",
" "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 9. Use Matplotlib"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1200\n",
"1213.117 1630.98\n"
]
}
],
"source": [
"import math\n",
"import numpy as np\n",
"import matplotlib.tri as tri\n",
"import matplotlib.pyplot as plt\n",
"\n",
"\n",
"x = []\n",
"y = []\n",
"z = []\n",
"for nd in nodes:\n",
" x.append(nd[0])\n",
" y.append(nd[1])\n",
" z.append(nd[2])\n",
"x = np.array(x)\n",
"y = np.array(y)\n",
"z = np.array(z)\n",
" \n",
"elements = []\n",
"with open('../data/interim/dtm.1.ele') as source:\n",
" for i, line in enumerate(source):\n",
" if i == 0 or line.lstrip()[0] == '#':\n",
" continue\n",
" data = line.split()\n",
" pt1, pt2, pt3 = data[1], data[2], data[3]\n",
" elements.append([pt1, pt2, pt3])\n",
"elements = np.array(elements)\n",
"\n",
"# Contours\n",
"\n",
"minor = 0.5\n",
"major = 5\n",
"\n",
"lowest, highest = min(z), max(z)\n",
"start = (math.floor(lowest/100))*100\n",
"print(start)\n",
"values = np.arange(start, highest, step=minor)\n",
"levels = []\n",
"for item in values:\n",
" if item > lowest and item < highest:\n",
" levels.append(item)\n",
"del(values)\n",
"print(lowest, highest)\n",
"\n",
"x0, x1 = min(x), max(x)\n",
"y0, y1 = min(y), max(y)\n",
"extent = (x0, x1, y0, y1)\n",
"triang = tri.Triangulation(x, y, elements)\n",
"contours = plt.tricontour(triang, z, levels=levels, extent=extent)\n",
"\n",
"lines = list()\n",
"for i, line in enumerate(contours.collections):\n",
" linestrings = []\n",
" for path in line.get_paths():\n",
" if len(path.vertices) > 1:\n",
" linestrings.append(path.vertices)\n",
" lines.append([ i, levels[i], linestrings])\n",
"\n",
"#print(len(elements))\n",
"#print(len(triang.edges))\n",
"#print(triang.edges[5000])\n",
"#print(lines[0])\n",
"#print()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Pickle the DTM to file"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import pickle\n",
"\n",
"with open('../data/interim/DTM2.pickle', 'wb') as output_file:\n",
" pickle.dump((x, y, z, elements), output_file)"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1333.9029286092264\n",
"\n",
"[masked_array(data = 0.019813984233383573,\n",
" mask = False,\n",
" fill_value = 1e+20)\n",
", masked_array(data = -0.020668418865952016,\n",
" mask = False,\n",
" fill_value = 1e+20)\n",
"]\n"
]
}
],
"source": [
"surface = tri.LinearTriInterpolator(triang, z, trifinder=None)\n",
"print(surface.__call__(-65530.81559185, -2815079.13037146))\n",
"print()\n",
"print(surface.gradient(-65530.81559185, -2815079.13037146))"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1-4\t2-7\t3-6\t4-6\t5-9\t6-9\t7-15\t8-14\t9-14\t10-14\t11-18\t12-17\t13-11\t14-12\t15-16\t16-13\t17-14\t18-20\t19-24\t20-22\t21-16\t22-15\t23-15\t24-19\t25-16\t26-18\t27-20\t28-15\t29-21\t30-18\t31-19\t32-23\t33-23\t34-30\t35-18\t36-11\t37-7\t38-5\t39-6\t40-4\t41-4\t42-3\t43-4\t44-4\t45-3\t46-4\t47-3\t48-3\t49-3\t51-4\t52-5\t57-3\t58-3\t63-3\t64-3\t70-3\t71-3\t74-3\t86-4\t90-5\t91-3\t97-3\t101-4\t139-3\t179-3\t181-3\t182-4\t184-3\t189-3\t192-3\t197-3\t200-3\t221-3\t222-5\t223-5\t224-5\t225-3\t226-4\t227-5\t228-5\t229-5\t230-8\t231-6\t232-5\t233-4\t234-6\t235-11\t236-10\t237-12\t238-8\t239-7\t240-6\t241-7\t242-5\t243-8\t244-4\t245-7\t246-4\t247-3\t248-4\t249-4\t250-3\t326-3\t329-3\t400-3\t456-3\t508-3\t535-4\t536-3\t537-3\t538-3\t539-3\t540-3\t541-3\t542-4\t543-3\t544-3\t545-3\t546-4\t547-4\t549-3\t550-3\t551-3\t554-4\t555-4\t556-3\t557-4\t558-4\t559-4\t560-4\t561-4\t562-3\t563-3\t564-3\t565-3\t583-3\t584-3\t588-3\t603-3\t617-3\t628-3\t629-3\t630-3\t631-3\t632-3\t633-3\t634-5\t635-4\t636-6\t637-7\t638-7\t639-7\t640-6\t641-5\t642-4\t643-5\t644-7\t645-5\t646-6\t647-7\t648-9\t649-7\t650-8\t651-6\t652-7\t653-7\t654-10\t655-11\t656-9\t657-5\t658-5\t659-4\t660-5\t661-7\t662-6\t663-5\t664-7\t665-5\t666-4\t667-4\t668-5\t669-3\t670-4\t671-4\t672-3\t673-4\t674-4\t675-4\t676-5\t677-6\t678-5\t679-3\t680-3\t683-3\t684-4\t685-4\t686-3\t689-4\t704-3\t705-3\t706-4\t707-3\t708-4\t709-3\t710-8\t711-6\t712-6\t713-4\t714-5\t715-4\t716-3\t717-3\t726-3\t727-3\t728-3\t738-3\t757-3\t758-3\t759-4\t760-3\t761-3\t762-3\t763-3\t764-4\t765-4\t766-3\t767-3\t768-3\t769-3\t770-3\t771-3\t772-3\t773-5\t774-3\t775-3\t776-3\t777-4\t778-8\t779-5\t780-6\t781-4\t782-4\t783-3\t784-3\t785-4\t786-6\t787-4\t788-4\t789-3\t"
]
}
],
"source": [
"for i, line in enumerate(lines):\n",
" if len(line[2]) > 2:\n",
" print(i, '-', len(line[2]), sep='', end='\\t')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 10. Write Contours to File"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"with fiona.open('../data/processed/hull.shp') as source:\n",
" source_crs = source.crs\n",
" \n",
"source_driver = 'ESRI Shapefile'\n",
"source_schema = {'geometry': 'LineString',\n",
" 'properties': OrderedDict([('Height', 'float'),\n",
" ('Type', 'str:8')])}\n",
"\n",
"\n",
"with fiona.open('../data/interim/contours_design.shp','w',\n",
" driver=source_driver,\n",
" crs=source_crs,\n",
" schema=source_schema) as sink:\n",
" for i, line in enumerate(lines):\n",
" elevation = line[1]\n",
" if elevation%major == 0:\n",
" cntr = 'Major'\n",
" else:\n",
" cntr = 'Minor'\n",
" for j, item in enumerate(line[2]):\n",
" rec = {}\n",
" rec['properties'] = {}\n",
" rec['geometry'] = {}\n",
" rec['properties']['Height'] = elevation\n",
" rec['properties']['Type'] = cntr\n",
" rec['geometry']['type'] = 'LineString'\n",
" ln = item.tolist()\n",
" rec['geometry']['coordinates'] = ln\n",
" sink.write(rec)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 11. Write Edges to File"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[1809 1795]\n"
]
}
],
"source": [
"print(triang.edges[5000])"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"with fiona.open('../data/processed/hull.shp') as source:\n",
" source_crs = source.crs\n",
" \n",
"source_driver = 'ESRI Shapefile'\n",
"source_schema = {'geometry': 'LineString',\n",
" 'properties': OrderedDict([('No', 'int:32'),\n",
" ('Height1', 'float'),\n",
" ('Height2', 'float')])}\n",
"\n",
"\n",
"with fiona.open('../data/interim/edges2.shp','w',\n",
" driver=source_driver,\n",
" crs=source_crs,\n",
" schema=source_schema) as sink:\n",
" for i, line in enumerate(triang.edges):\n",
" ndx1, ndx2 = line[0], line[1]\n",
" x1, y1, z1 = x[ndx1], y[ndx1], z[ndx1]\n",
" x2, y2, z2 = x[ndx2], y[ndx2], z[ndx2]\n",
" rec = {}\n",
" rec['properties'] = {}\n",
" rec['geometry'] = {}\n",
" rec['properties']['No'] = i\n",
" rec['properties']['Height1'] = z1\n",
" rec['properties']['Height2'] = z2\n",
" rec['geometry']['type'] = 'LineString'\n",
" rec['geometry']['coordinates'] = [[x1 ,y1, z1],[x2, y2, z2]]\n",
" sink.write(rec)"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"!say -v Tessa \"Finished\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment