Skip to content

Instantly share code, notes, and snippets.

View sergiolucero's full-sized avatar
💭
coding the days away

Sergio Lucero sergiolucero

💭
coding the days away
View GitHub Profile
import folium
import geopandas as gp
from fetch_airquality import fetch_data
df = fetch_data().rename(columns={'city': 'ciudad'})
df['medida'] = ['%s[%s]' %(p, u) for p, u in zip(df['parameter'], df['unit'])]
df = df.drop(['parameter', 'unit'], axis=1)
data = df.to_html(index=False, border=10, justify='right')
m = folium.Map(df[['lat','lng']].values)
@sergiolucero
sergiolucero / chilechinaplotter.py
Last active September 20, 2017 17:58
de Chile a China
import folium
import geopandas as gp
import requests
data = requests.get('http://sergiolucerovera.pythonanywhere.com/static/chile.shp')
chile = gp.GeoDataFrame(data.json())
elihc = None # use geopandas.read_file??
centroid = [(cc[1],cc[0]) for cc in chile.centroid]
cmap = folium(location=centroid, zoom=15, tiles='Staro_Pramen')
from dask.distributed import Client, local_client
from dask import delayed, compute
@delayed
def dostand(stand):
stand.compute_stand_values_by_area()
stand.compute_stand_values_by_volume()
stand.compute_NPV()
def standloop(standlist, option_dict):
@sergiolucero
sergiolucero / bikemapper.py
Last active August 23, 2017 16:53
de Bici Las Condes a Folium
import folium
import pandas as pd
URL = 'https://www.bicilascondes.cl/availability_map/getJsonObject'
COLORS = ['red', 'darkred', 'lightred', 'yellow',
'darkgreen', 'lightgreen', 'green'] # low is bad->red, high is good->green
bike_data = pd.read_json(URL)
bikemap = folium.Map(bike_data[['lat','lon']].mean().tolist(), # data centroid
tiles = 'Stamen Terrain', zoom_start = 15)
@sergiolucero
sergiolucero / concudemo.py
Last active July 14, 2017 15:52
Python concurrency 101
# deploy and time on Linode, LightSail, both my HPs, fpfi.cl, etc
import time
import multiprocessing
from dask import delayed, compute
nTasks = 100
def long_work():
for i in range(100000):
n = i*i*i*i*i*i #simulated long work time
@sergiolucero
sergiolucero / airqualitydemo.py
Created July 22, 2017 21:21
Open Air Quality
import openaq
api=openaq.OpenAQ()
api.cities(country='CL',limit=10)
api.latest(city='Antofagasta', df=True)
df_pm10 = api.measurements(city='Andacollo',location='Hospital',parameter='pm10',df=True,limit=10)
@sergiolucero
sergiolucero / shinycaller.py
Created July 30, 2017 22:52
python calls my shiny Rplotter
# starting point is https://github.com/smouksassi/ggplotwithyourdata
from pyvirtualdisplay import Display
from selenium import webdriver
RPLOTTER_URL = 'http://50.116.4.6:3838/'
with Display():
browser = webdriver.Firefox()
try:
browser.get('http://www.google.com')
print browser.title #this should print "Google"
@sergiolucero
sergiolucero / excelplotter.R
Created August 1, 2017 08:25
shiny excel gg(repel)plotter
library(shiny)
library(openxlsx)
library(lattice)
library(readxl)
library(ggplot2)
library(ggrepel)
setwd('C:/Users/Sergio/Sandbox/shiny_servers/excel_plotter')
#############################################################
read_excel_allsheets <- function(filename) {
sheets <- readxl::excel_sheets(filename)
@sergiolucero
sergiolucero / remediation.py
Created August 13, 2017 10:15
how to solve the groundwater remediation problem
from pflotran import Flow, Transport
from randomsoils import soil_generator
import pandas as pd
nYears = 30; time_step = 1/12 # allow change every month
grid, wells = pd.read_csv('basic_grid_and_wells.csv')
for ix in range(nRuns): # run a few simulations
modified_soil = soil_generator(grid)
pumping_schedule = pd.random(wells, range(0,nYears,time_step))
@sergiolucero
sergiolucero / wikipullpopulation.py
Last active August 19, 2017 18:12
How to pull population from wikipedia pages
import wikipedia
CITIES=['Paris','Barcelona','Tokyo', 'New York City','Amsterdam','Copenhaguen','San Francisco']
AMBIGUOUS_CITIES = ['Santiago'] # need to dig deeper, 'population' is not contained in the summary
for city in CITIES:
citywiki = wikipedia.page(city)
cwsum = citywiki.summary
poploc = cwsum.index('population') # first and only?
print(city, cwsum[poploc:poploc+30])