# coding: utf-8 | |
# In[10]: | |
import pandas as pd | |
import fiona | |
import numpy as np | |
from bokeh.io import show, output_file | |
from bokeh.models import ColumnDataSource, HoverTool, LogColorMapper | |
from bokeh.palettes import Reds6 as palette | |
from bokeh.plotting import figure, save | |
from bokeh.resources import CDN | |
from shapely.geometry import Polygon, Point, MultiPoint, MultiPolygon | |
from shapely.prepared import prep | |
""" from: https://www.offenedaten-koeln.de/dataset/59a8a033-5ac8-4240-ab06-608a7f542472/resource/a677cd63-d887-4f0f-95cf-25781641e576 | |
converted with http://www.gdal.org/ogr2ogr.html: | |
ogr2ogr -lco ENCODING=UTF-8 -t_srs EPSG:4326 Stadtteil_WGS84.shp Stadtteil.shp """ | |
SHAPEFILE="/home/flopp/Documents/Coding/maps/offene/Stadtteil_WGS84.shp" | |
def read_data(filename): | |
colnames =["scrape_date","scrape_time","scrape_weekday","u_id", | |
"bike_id","lat","lon","bike_name"] | |
with open(filename,"r") as f: | |
#data = pd.read_csv(filename, names=colnames,nrows=1000 ) | |
data = pd.read_csv(filename, names=colnames) | |
data.drop_duplicates(inplace=True, subset=['bike_id','scrape_time','lat','lon']) | |
data = data[data.bike_name.str.contains("BIKE")].reset_index() # Drop Stations | |
data.scrape_date = pd.to_datetime(data.scrape_date) # Convert to datetime object | |
data.scrape_time = pd.to_datetime(data.scrape_time, format="%H-%M-%S") | |
return data | |
def calc_points_per_poly(poly, points): | |
""" Returns number of points contained per poly """ | |
poly = prep(poly) | |
return int(len(list(filter(poly.contains, points)))) | |
# In[2]: | |
# Read bike location data | |
# bike data scraped: https://data-dive.com/cologne-bike-rentals-getting-data | |
data = read_data("/home/flopp/Documents/Coding/kvb/data/2017-03-01.csv") | |
# In[3]: | |
# create dataset with only one observation per hour | |
time = pd.DatetimeIndex(data.scrape_time) | |
data_hourly = data[time.minute < 1] # first obsv for each hour | |
time = pd.DatetimeIndex(data_hourly.scrape_time) | |
data_hourly['hour'] = time.hour | |
periods = len(data_hourly[ | |
data_hourly.duplicated(subset='hour') == False])# number of obsv. | |
# In[4]: | |
# list of lists - one sublist per period | |
map_points = [] | |
all_points = [] | |
for i in range(periods): | |
map_points.append(list()) | |
all_points.append(list()) | |
map_points[i] = [Point(x,y) for x,y in | |
zip(data_hourly[data_hourly.hour == i].lon, | |
data_hourly[data_hourly.hour == i].lat)] # Points to Shapely Pts | |
all_points[i] = MultiPoint(map_points[i]) # all bike points | |
# In[22]: | |
# Extract features from shapefile | |
shp = fiona.open(SHAPEFILE) | |
district_name = [ feat["properties"]["STT_NAME"] for feat in shp] | |
district_area = [ feat["properties"]["SHAPE_AREA"] for feat in shp] | |
district_x = [ [x[0] for x in feat["geometry"]["coordinates"][0]] for feat in shp] | |
district_y = [ [x[1] for x in feat["geometry"]["coordinates"][0]] for feat in shp] | |
district_xy = [ [ xy for xy in feat["geometry"]["coordinates"][0]] for feat in shp] | |
district_poly = [ Polygon(xy) for xy in district_xy] # coords to Polygon | |
# In[23]: | |
# calc bikes per district for each period | |
num_bikes = [] | |
bikes_per_area = [] | |
for i in range(periods): | |
num_bikes.append(list()) | |
bikes_per_area.append(list()) | |
num_bikes[i] = [ calc_points_per_poly(poly, all_points[i]) for poly in district_poly] | |
bikes_per_area[i] = [ x/y*10000 for x,y in zip(num_bikes[i], district_area)] | |
# In[7]: | |
# Prepare data source for plot | |
rate_hours = {str(i): v for i, v | |
in enumerate(bikes_per_area)} # from list to dict | |
data = dict(x=district_x, y=district_y, name=district_name, | |
rate=bikes_per_area[0], **rate_hours) # merge dicts | |
source = ColumnDataSource(data) # one col per obsv. period | |
# In[8]: | |
# prepare plotting with bokeh | |
custom_colors = ['#f2f2f2', '#fee5d9', '#fcbba1', '#fc9272', '#fb6a4a', '#de2d26'] | |
color_mapper = LogColorMapper(palette=custom_colors) | |
TOOLS = "pan,wheel_zoom,reset,hover,save" | |
p = figure( | |
title="KVB bike density per district, Mar. 2017", tools=TOOLS, | |
x_axis_location=None, y_axis_location=None | |
) | |
p.grid.grid_line_color = None | |
p.patches('x', 'y', source=source, | |
fill_color={'field': 'rate', 'transform': color_mapper}, | |
fill_alpha=0.8, line_color="black", line_width=0.3) | |
hover = p.select_one(HoverTool) | |
hover.point_policy = "follow_mouse" | |
hover.tooltips = [("District", "@name"),("Bikes per km²", "@rate"), | |
("(Long, Lat)", "($x, $y)")] | |
# In[9]: | |
from bokeh.layouts import column, row, widgetbox | |
from bokeh.models import CustomJS, Slider, Toggle | |
output_file("kvb_interactive.html") | |
# add slider with callback to update data source | |
slider = Slider(start=0, end=23, value=0, step=1, title="Hour of day") | |
def update(source=source, slider=slider, window=None): | |
""" Update the map: change the bike density measure according to slider | |
will be translated to JavaScript and Called in Browser """ | |
data = source.data | |
v = cb_obj.get('value') | |
data['rate'] = [x for x in data[v]] | |
source.trigger('change') | |
slider.js_on_change('value', CustomJS.from_py_func(update)) | |
show(column(p,widgetbox(slider),)) | |
# Add Animation: Automatically loop through data source | |
output_file("kvb_js_dynamic_animate.html") | |
# add button with callback to control animation | |
callback = CustomJS(args=dict(p=p, source=source), code=""" | |
var data = source.data; | |
var f = cb_obj.active; | |
var j = 0; | |
if(f == true){ | |
mytimer = setInterval(replace_data, 500); | |
} else { | |
clearInterval(mytimer); | |
} | |
function replace_data() { | |
j++; | |
if(data[j] === undefined) { | |
j=0; | |
} | |
p.title.text = "Bike density per district in period: " +j; | |
data['rate'] = data[j]; | |
source.change.emit(); | |
} | |
""") | |
btn = Toggle(label="Play/Stop Animation", button_type="success", active=False, callback=callback) | |
show(column(widgetbox(btn,slider),p)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment