Skip to content

Instantly share code, notes, and snippets.

@kwellman
kwellman / zappa_cert.py
Created February 15, 2017 21:05
Generate and output Let's Encrypt certificate for Zappa project
import os, sys
from zappa.cli import ZappaCLI
from zappa.util import parse_s3_url
from zappa import letsencrypt
# get command line arguments
directory, stage, output_filename = sys.argv[1:]
# change working directory
owd = os.getcwd()
import urllib2, urllib, base64, json
from datetime import datetime
from time import mktime
import feedparser
def days_since_last_post(rss_url):
d = feedparser.parse(rss_url)
if not len(d.entries):
return -1
# get date of last blog post and convert to datetime object
@kwellman
kwellman / make_regex.py
Created May 5, 2012 21:18
Mining twitter blog post (make_regex.py)
import re
def make_regex(query):
"""Returns a compiled regex. Use returned object like so regex.search(tweet)"""
s = r'(\s\S+){0,2}\s'.join(query.split())
return re.compile(s, re.IGNORECASE)
@kwellman
kwellman / init_queries.py
Created May 5, 2012 21:14
Mining twitter blog post (init_queries.py)
def init_queries():
phrases = [
'wish there was',
'why isn\'t there',
'wish someone would create',
'somebody needs to create',
'somebody should create',
'someone needs to create',
'someone should create',
]
@kwellman
kwellman / search_twitter.py
Created May 5, 2012 21:13
Mining twitter blog post (search_twitter.py)
import urllib, urllib2, json
def search_twitter(query, no_retweets=True):
if no_retweets:
# use the negation operator to filter out retweets
query += ' -RT'
url = 'http://search.twitter.com/search.json?%s' % urllib.urlencode({
'q': query,
'lang': 'en', # restrict results to english tweets
@kwellman
kwellman / mining_twitter.py
Created May 5, 2012 21:11
Mining twitter blog post
import re, urllib, urllib2, json
BLACKLIST = ['odesk', 'elance', '#jobs', 'now hiring']
def search_twitter(query, no_retweets=True):
if no_retweets:
# use the negation operator to filter out retweets
query += ' -RT'
url = 'http://search.twitter.com/search.json?%s' % urllib.urlencode({
@kwellman
kwellman / gist:632478
Created October 18, 2010 16:00
readability_benchmarks.py
"""Quick and dirty benchmarking for readability functions.
"""
import re, time, os, json
from urllib import urlopen
from hn import grabContent
from lxml_readability import extract
import socket
socket.setdefaulttimeout(30)
@kwellman
kwellman / gist:632442
Created October 18, 2010 15:44
lxml_readability.py
"""An lxml Port of Nirmal Patel's port (http://nirmalpatel.com/fcgi/hn.py) of
Arc90's Readability to Python.
"""
import re
from lxml.html import fromstring, tostring
from lxml.html.clean import Cleaner
NEGATIVE = re.compile('comment|meta|footer|footnote|foot')
POSITIVE = re.compile('post|hentry|entry|content|text|body|article')
#!/usr/bin/python
"""Simple script to split the terminal window into regions using GNU Screen and
automatically run commands in the new regions.
"""
import os, sys
import optparse
parser = optparse.OptionParser(usage='splitscr.py [options] <command> ...')
parser.add_option('-s', '--newscreen', action='store_true', dest='new_screen', help='Create a new screen session for the splits')
parser.add_option('-v', '--vsplit', action='store_true', dest='vertical_split', help='Split vertically instead of horizontally')