Skip to content

Instantly share code, notes, and snippets.

@s5unty
Created May 2, 2011 14:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save s5unty/951656 to your computer and use it in GitHub Desktop.
Save s5unty/951656 to your computer and use it in GitHub Desktop.
syncing friendfeed to follow5 using python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set noexpandtab:
# digu 应该限制了经由 api 方式更新的频率(>5m),所以我无法保证每次都能正常更新。
# 不要手动删除 friendfeed 最近 n 条历史记录,否则会异常并导致重复发推
# 异常主要原因是前后两次获取到的 friendfeed 记录可能会不一致
import json
import urllib
import urllib2
import base64
import time
import sys
import os
import re
from urllib2 import URLError
default_encoding = 'utf-8'
if sys.getdefaultencoding() != default_encoding:
reload(sys)
sys.setdefaultencoding(default_encoding)
def follow5(username,password,msg):
API_KEY="<YOUR_API_KEY>" # 用你自己申请得到的 API_KEY
url="http://api.follow5.com/api/statuses/update.xml?api_key="+API_KEY
form = {
"status": msg
}
data = urllib.urlencode(form)
auth = 'Basic '+base64.b64encode(username+":"+password)
headers = {
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
"Authorization": auth
}
req = urllib2.Request(url, data, headers)
try:
result = urllib2.urlopen(req)
print result
except URLError, e:
print "URL error(%s)" % (e)
if result.code != 200:
return False
return True
def digu(username,password,msg):
#url = "http://api.minicloud.com.cn/1.1/statuses/update.json"
url = "http://api.minicloud.com.cn/statuses/update.json"
form = {
"content": msg
}
data = urllib.urlencode(form)
auth = 'Basic '+base64.b64encode(username+":"+password)
headers = {
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
"Authorization": auth
}
req = urllib2.Request(url, data, headers)
try:
result = urllib2.urlopen(req)
except URLError, e:
print "URL error(%s)" % (e)
if result.code != 200:
return False
return True
def googl(link):
url = 'http://goo.gl/api/shorten'
form = {
'security_token':'null',
'url':link
}
data = urllib.urlencode(form)
headers = {
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8'
}
req = urllib2.Request(url, data, headers)
try:
result = urllib2.urlopen(req)
except URLError, e:
return link
res = result.read()
res = json.loads(res)
return res['short_url']
def init(file, feed):
if not os.path.exists(file):
f = open(file, 'w+')
f.write(json.dumps(feed, ensure_ascii=False))
f.close()
f = open(file, 'r+')
ret = eval(f.read())
f.close()
return ret;
def deinit(file, feed, refresh):
if refresh:
f = open(file, 'w')
f.write(json.dumps(feed, ensure_ascii=False))
f.close()
def tweet(link, body):
if re.search("^http://friendfeed.com/./", link):
return body
elif re.search("^http://twitter.com/s5unty/", link):
return body
else:
return body + ' - ' + googl(link)
def main(username, password):
cached = sys.path[0] + '/.friendfeed.log'
origin = json.load(urllib.urlopen( \
"http://friendfeed-api.com/v2/feed/<YOUR_FriendFeed_USERNAME>?raw=1&start=0&num=5"))
master = init(cached, origin)
refresh = False;
entries = master['entries']
#### DEBUG ##################################################################################
# for entry in master['entries'][::-1]:
# print entry['id'], " ", tweet(entry['rawLink'], entry['rawBody']);
# print "-------------------------------------------------------------------------------"
# for entry in origin['entries'][::-1]:
# if not re.search(entry['id'], str(entries)):
# print entry['id'], "@", tweet(entry['rawLink'], entry['rawBody']);
# else:
# print entry['id'], " ", tweet(entry['rawLink'], entry['rawBody']);
#############################################################################################
for entry in origin['entries'][::-1]:
if not re.search(entry['id'], str(entries)):
refresh = True;
twitter = tweet(entry['rawLink'], entry['rawBody']);
# digu(username, password, twitter)
follow5(username, password, twitter)
deinit(cached, origin, refresh)
if __name__ == "__main__":
main("<USERNAME>", "<PASSWORD>")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment