Skip to content

Instantly share code, notes, and snippets.

@bmander
bmander / digest_cuts.py
Created April 16, 2014 00:27
A one-off script for digesting the King County service cut spreadsheet into something that the refreq utility can use.
import csv
AM_PEAK_LEN=3.0 #hours, 6am-9am
PM_PEAK_LEN=3.0 #hours 3pm-6pm
ROUTE_ROW=0
CUR_TRIP_PEAK_AM=3
CUR_TRIP_PEAK_PM=4
PROP_TRIP_PEAK_AM=5
PROP_TRIP_PEAK_PM=6
@bmander
bmander / squishwords.py
Created June 5, 2014 19:59
A silly little script for translating documents to a compressed invented language.
# language corpus here: http://www.kilgarriff.co.uk/BNClists/all.al.gz
# a tale of two cities here: http://www.gutenberg.org/cache/epub/98/pg98.txt
import math
import re
codes = {}
codelens = {}
def baseN(num,b,numerals="abcdefghijklmnopqrstuvwxyz"):
print "hello, world"
BufferedReader reader;
String line;
class ShapePoint {
public int shape_id;
public float lat;
public float lon;
int WIDTH = 500;
int HEIGHT = 500;
int ANTS_AT_A_TIME = 30;
boolean DRAW_PATH = false;
float sum_array( float[] floats ) {
float ret = 0;
for(int i=0; i<floats.length; i++){
ret += floats[i];
int NGROUPS=5;
float INFINITY = 100000000;
int RANDOM_POINTS = 75;
boolean USE_RANDOM_POINTS = true;
float TRUCK_SLACK = 1.1;
class PointGroup {
ArrayList points;
PVector mean;
@bmander
bmander / rift.py
Created December 9, 2010 00:38
display maps color-coded by intersection connectivity
import sqlite3
def main(osmdb_name, gdb_name,map_filename):
osmdb = sqlite3.connect(osmdb_name)
gdb = sqlite3.connect(gdb_name)
c = gdb.cursor()
c.execute( "select vertex1, count(*) from edges where vertex1 LIKE 'osm%' group by vertex1" )
@bmander
bmander / transitmapper.py
Created December 9, 2010 00:40
print maps of stops coded by stoptime count
from gtfs import Schedule
from gtfs.entity import Trip, StopTime, Stop
from sqlalchemy.orm import eagerload
import csv
import sys
from prender import processing
def cons(ary):
for i in range(len(ary)-1):
@bmander
bmander / ntuplechain.py
Created December 18, 2010 00:52
Create markov chains using the Google ntuple dataset
"""Get a bunch of 2gram files. Then split the 2grams with spaceize(). Then set up a database like
create table digram (word1 text, word2 text, year integer, match_count integer, page_count integer, volume_count integer);
.separator "\t"
.import spaceized.csv digram
then you're set up to use chain()"""
import csv
import sqlite3
@bmander
bmander / energy.py
Created January 1, 2011 01:14
calculate total energy accumulation from insolation measurements
# wow. energy.
#
# grab source data from http://www-k12.atmos.washington.edu/k12/grayskies/nw_weather.html
import csv
from datetime import datetime, timedelta
MINS_IN_DAY=1440
fp = open( "LFEM_data-.csv" )