Skip to content

Instantly share code, notes, and snippets.

@vsoch
Created February 6, 2014 20:55
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save vsoch/8852307 to your computer and use it in GitHub Desktop.
mapASD.R: Use the foursquare API to create a resource map for a search term of interest
# foursquareASD will create a map of ASD locations across the United States
library(RJSONIO)
library(RCurl)
options(RCurlOptions = list(cainfo = system.file("CurlSSL", "cacert.pem", package = "RCurl")))
# First, read in file with latitudes and longitudes of major cities
# Obtained from http://notebook.gaslampmedia.com/download-zip-code-latitude-longitude-city-state-county-csv/
ll = read.csv('zip_codes_states.csv',sep=",",head=TRUE)
clientid = "YOURCLIENTID"
clientsecret = "YOURCLIENTSECRET"
venue_name = c()
venue_lat = c()
venue_long = c()
venue_city = c()
venue_state = c()
venue_country = c()
venue_checkins = c()
venue_users = c()
for (i in 1:dim(ll)[1]) {
lat = ll$latitude[i]
long = ll$longitude[i]
# Do query and parse results
query = paste("https://api.foursquare.com/v2/venues/explore?client_id=",clientid,"&client_secret=",clientsecret,"&ll=",lat,",",long,"&query=autism&v=20130815",sep="")
result = getURL(query)
data <- fromJSON(result)
# For each result, save a bunch of fields, you can tweak this to your liking
if (length(data$response$groups[[1]]$items) > 0) {
for (r in 1:length(data$response$groups[[1]]$items)) {
tmp = data$response$groups[[1]]$items[[r]]$venue
venue_name = c(venue_name,tmp$name)
venue_lat = c(venue_lat,tmp$location$lat)
venue_long = c(venue_long,tmp$location$lng)
venue_city = c(venue_city,tmp$location$city)
venue_state = c(venue_state,tmp$location$state)
venue_country = c(venue_country,tmp$location$country)
venue_checkins = c(venue_checkins,tmp$stats[1])
venue_users = c(venue_users,tmp$stats[2])
}
}
}
# SAVE RESULT
save(venue_name,venue_lat,venue_long,venue_city,venue_state,venue_country,venue_checkins,venue_users,file='venuesResult.RData')
# Note - depending on your internet connection (and the API), the above loop can hit a snag every now and then. I chose to let it hit, and then continue at the index where I left off. For this reason, there could be duplicate results.
# Put in nice data frame
data = as.data.frame(cbind(locationvar,venue_checkins,venue_name,venue_lat,venue_long,venue_checkins,venue_users))
# Find duplicate results
dsub = subset(data,!duplicated(data))
names(dsub) = c("latlong","checkins","name","latitude","longitude")
# Export to file so we can use in Google Fusion Table
tabley = dsub[,2:5]
write.table(tabley,file="import.csv",quote=TRUE,sep=",",row.names=FALSE)
# You can now import this as a google fusion table
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment