Skip to content

Instantly share code, notes, and snippets.

@michelleboisson
Created November 6, 2012 22:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save michelleboisson/4028062 to your computer and use it in GitHub Desktop.
Save michelleboisson/4028062 to your computer and use it in GitHub Desktop.
Data Without Borders - Assignment 7
# CRIME MAPPING
#We’ve done some basic analysis of our Stop and Frisk data this semester, looking at
#basic statistics (e.g. number of stops by race or most common crimes by race) and
#timeseries information. We haven’t yet touched geography though. Let’s spend this
#week’s assignment answering the question: Where are Stop n Frisks happening in New
#York?
snf = read.csv("/Users/michelleboisson/Documents/ITP/* Data without Borders/snf_3.csv", as.is=TRUE)
geo = read.csv("/Users/michelleboisson/Documents/ITP/* Data without Borders/geo.csv", as.is=TRUE)
#Create one column with x and y together in both the snf and geo data frames
snf$xy <- paste(snf$x, ",", snf$y, sep="")
geo$xy <- paste(geo$xcoord, ",", geo$ycoord, sep="")
#before merging I want to check the size of snf
dim(snf)
#[1] 58089 14
#now merge snf and geo on $xy
snf.merged = merge(snf, geo, by="xy")
dim(snf.merged)
#[1] 56274 18
#It's smaller, I'm not sure what happened...
nrow(snf) - nrow(snf.merged)
# 1815 rows are missing!
# MAPPING
library(maps)
map('county', 'new york', xlim=c(-74.25026, -73.70196), ylim=c(40.50553, 40.91289), mar=c(0,0,0,0))
points(snf.merged$lon, snf.merged$lat, col=rgb(200, 0, 0, 30, maxColorVal=255), pch=20, cex=0.3)
# I see clustering of stops in certain areas. The most seem to be around the divide of
# upper Manhattan and the Bronx, and another large cluster on the border of Brooklyn and Queens
# COLORS
install.packages(“RColorBrewer”)
library(RColorBrewer)
colors <- brewer.pal(8, "Set3")
colors
[1] "#8DD3C7" "#FFFFB3" "#BEBADA" "#FB8072" "#80B1D3" "#FDB462" "#B3DE69" "#FCCDE5"
unique.races = unique(snf.merged$race)
#[1] "B" "Q" "P" "W" "Z" "U" "A" "I"
#create data frame as a key for each race and it's color
race.colors = data.frame(race = unique.races, colors = colors)
# the race.colors into snf.merged
snf.merged = merge(snf.merged, race.colors, by="race")
#draw the map
map('county', 'new york', xlim=c(-74.25026, -73.70196), ylim=c(40.50553, 40.91289), mar=c(0,0,0,0))
#add the points with colors
points(snf.merged$lon, snf.merged$lat, col=snf.merged$colors, pch=20, cex=0.3)
# oh snap! that's so cool!
# There are definite clustering of the races in certain areas of new york city
# Now I want to add a legend
# first fill in the full names of the races.
fname = c("Black","White Hispanic","Pacific Islander","White","White Hispanic","Unknown","Asian","Native American")
race.colors$name = fname
legend("topleft", legend=as.vector(race.colors$name), fill=as.vector(race.colors$colors))
# I'm tired of looking for the functions to draw the map then display the points
# so I'm writing a funciton
showMap = function(){
map('county', 'new york', xlim=c(-74.25026, -73.70196), ylim=c(40.50553, 40.91289), mar=c(0,0,0,0))
points(snf.merged$lon, snf.merged$lat, col=snf.merged$colors, pch=20, cex=0.3)
}
#LET'S MAKE A MOVIE
library(animation)
#grab the days from the time string
days = substr(snf.merged$time,9,10)
#add it to our data frame
snf.merged$day = days
#create the movie - plot the stops from each day, one day at a time
# create map
#map('county', 'new york', xlim=c(-74.25026, -73.70196), ylim=c(40.50553, 40.91289), mar=c(0,0,0,0))
legend("topleft", legend=as.vector(race.colors$race), fill=as.vector(race.colors$colors))
saveHTML( {
for (i in 1:30) {
map('county', 'new york', xlim=c(-74.25026, -73.70196), ylim=c(40.50553, 40.91289), mar=c(0,0,0,0))
todays.snf = snf.merged[as.numeric(snf.merged$day) == i, ]
points(todays.snf$lon, todays.snf$lat, col=todays.snf$color, pch=20, cex=0.3)
if (i == 1) {
ani.record(reset=TRUE)
} else {
ani.record()
}
}
ani.pause()
} )
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment