Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Demo code showing how to collect analyze the subreddits a particular user has participated in, using R sintead of python (praw). I don't believe the code is exiting when it should, but this should at least serve as a jumping off point from the person who asked for help: http://www.reddit.com/r/rstats/comments/1tq4au/accessing_the_reddit_api_thro…
#install.packages("rjson","RCurl")
library(RCurl)
library(rjson)
get_User_subs_page = function(user, after=NULL, cache=c()){
baseurl = "http://www.reddit.com/user/"
params = "limit=100"
if(!is.null(after)){
params = paste(params, "&after=",after, sep="")
}
userCommentsUrl = paste(baseurl, user, "/.json?",params, sep="")
jsonResponse = getURLContent(userCommentsUrl, curl=getCurlHandle())
StructuredResponse = fromJSON(jsonResponse, unexpected.escape = "keep")
# Extract subreddits
n=length(StructuredResponse$data$children) # Count retrieved comment objects
substemp = rep(NA)
done = F
for(i in 1:n){
obj = StructuredResponse$data$children[[i]]$data
if(any(cache==obj$id)){
print("done")
done=T
return(list(substemp, NULL, cache, done))
}
substemp[i] = obj$subreddit
cache=c(cache, obj$id)
}
nextID = StructuredResponse$data$after # for paging
list(substemp, nextID, cache, done)
}
USER="ENTER USERNAME HERE"
subreddits = c()
afterID=NULL
ids_cache = c()
done=F
iter=0
while(!done){
iter=iter+1
print(iter)
subs_page = get_User_subs_page(user=USER, after=afterID, cache=ids_cache)
subreddits = c(subreddits, subs_page[[1]])
afterID=subs_page[[2]]
ids_cache=subs_page[[3]]
done = subs_page[[4]]
Sys.sleep(2) # sleep 2 seconds to respect reddit API so we don't get blocked
}
aggregate(subreddits, by=list(subreddits), length)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.