public
Last active

Facebook likes grabber

  • Download Gist
fbGrab.py
Python
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
#This is a really simple script:
##Grab the list of members of a Facebook group (no paging as yet...)
###For each member, try to grab their Likes
 
import urllib,csv,argparse
 
 
##INSTRUCTIONS:
#1) Download this file, eg to ~/Downloads/fbGrab.py
#2) Get a Facebook group ID and paste into gid below
#3) Get a Facebook auth token (click link from Facebook API page) and paste in FBTOKEN below
#4) Save the file
#5) in the terminal, cd to where your file is (eg cd ~/Downloads/fbGroup.py)
#6) run the script with: python fbGrab.py
#7) the results are dropped into a CSV file in the same directory
 
 
#----
####CONFIGURE THESE BITS
#A group ID
gid='YOURGROUPIDHERE'
#A current access token
FBTOKEN='YOURTOKENHERE'
#How many group members
glim=1000
#------
 
try: import simplejson as json
except ImportError: import json
 
#Grab a copy of a current token from an example Facebook API call.
#Something a bit like this:
#AAAAAAITEghMBAOMYrWLBTYpf9ciZBLXaw56uOt2huS7C4cCiOiegEZBeiZB1N4ZCqHgQZDZD
 
#parser = argparse.ArgumentParser(description='Generate social positioning map around a Facebook group')
 
#parser.add_argument('-gid',default='2311573955',help='Facebook group ID')
#gid='2311573955'
 
#parser.add_argument('-FBTOKEN',help='Facebook API token')
 
 
 
 
#Quick test - output file is simple 2 column CSV that we can render in Gephi
fn='fbgroupliketest_'+str(gid)+'.csv'
writer=csv.writer(open(fn,'wb+'),quoting=csv.QUOTE_ALL)
 
uids=[]
 
def getGroupMembers(gid):
gurl='https://graph.facebook.com/'+str(gid)+'/members?limit='+str(glim)+'&access_token='+FBTOKEN
data=json.load(urllib.urlopen(gurl))
if "error" in data:
print "Something seems to be going wrong - check OAUTH key?"
print data['error']['message'],data['error']['code'],data['error']['type']
exit(-1)
else:
return data
 
#Grab the likes for a particular Facebook user by Facebook User ID
def getLikes(uid,gid):
#Should probably implement at least a simple cache here
lurl="https://graph.facebook.com/"+str(uid)+"/likes?access_token="+FBTOKEN
ldata=json.load(urllib.urlopen(lurl))
print ldata
if len(ldata['data'])>0:
for i in ldata['data']:
if 'name' in i:
writer.writerow([str(uid),i['name'].encode('ascii','ignore')])
#We could colour nodes based on category, etc, though would require richer output format.
#In the past, I have used the networkx library to construct "native" graph based representations of interest networks.
if 'category' in i:
print str(uid),i['name'],i['category']
 
#For each user in the group membership list, get their likes
def parseGroupMembers(groupData,gid):
for user in groupData['data']:
uid=user['id']
writer.writerow([str(uid),str(gid)])
#x is just a fudge used in progress reporting
x=0
#Prevent duplicate fetches
if uid not in uids:
getLikes(user['id'],gid)
uids.append(uid)
#Really crude progress reporting
print x
x=x+1
#need to handle paging?
#parse next page URL and recall this function
 
 
groupdata=getGroupMembers(gid)
parseGroupMembers(groupdata,gid)

can i get to make in webbase...
pm me..

Please sign in to comment on this gist.

Something went wrong with that request. Please try again.