Skip to content

Instantly share code, notes, and snippets.

@thepaul
thepaul / SanerConfigParser.py
Created May 13, 2014 22:42
# the ConfigParser.*ConfigParser family is just comically terrible
import ConfigParser
class SanerConfigParser(ConfigParser.SafeConfigParser):
def __init__(self, defaults=None, dict_type=dict, allow_no_value=False):
ConfigParser.SafeConfigParser.__init__(self, dict_type=dict_type,
allow_no_value=allow_no_value)
self.mydefaults = defaults or {}
no_default = object()
@thepaul
thepaul / ssh_tunnel_control.py
Created May 14, 2014 20:32
ssh tunnel up/down
import contextlib
import os
import subprocess
import tempfile
class SshTunnelDefinition:
def __init__(self, host, user=None, executable='ssh', localforwards=(),
remoteforwards=(), port=None, options=None):
self.executable = executable
@thepaul
thepaul / gist:aec592cb62294e587ef8
Created June 17, 2015 22:33
things in the Python standard library which use super()
argparse (all classes)
abc.ABCMeta
collections.Counter
fractions.Fraction
plistlib.Dict and plistlib.Plist
random.Random
unittest.TextTestResult and unittest.FunctionTestCase
weakref.KeyedRef
zipfile.ZipExtFile
ctypes.py_object
@thepaul
thepaul / gist:0e611da8b4fadec94568
Created July 9, 2015 21:29
wait for all subprocesses in a list to exit
import signal
def wait_for_subprocs(procs, cb=lambda proc: 0):
# do-nothing handler for SIGCHLD, just so it's something other than SIG_DFL.
# otherwise, Python won't interrupt syscalls.
oldhandler = signal.signal(signal.SIGCHLD, lambda *_: None)
try:
while procs:
signal.pause()
aliveprocs = []
@thepaul
thepaul / export2csv.py
Created June 22, 2009 22:41
export random database stuff to csv
# export2csv.py
#
# export random database stuff to csv
from __future__ import with_statement
import csv
def export2csv(cursor, outf):
"""
cursor should have an executed query already
diff --git a/tools/dockerz/Dockerfile b/tools/dockerz/Dockerfile
new file mode 100644
index 0000000..0eafb35
--- /dev/null
+++ b/tools/dockerz/Dockerfile
@@ -0,0 +1,17 @@
+FROM ubuntu:trusty
+MAINTAINER <pcannon@epochlabs.com>
+
+RUN apt-get -y update \
@thepaul
thepaul / have_pidfile.py
Created October 9, 2010 22:12
pidfile context manager
import os
import contextmanager
@contextlib.contextmanager
def have_pidfile(fname):
f = open(fname, 'w')
f.write('%d\n' % os.getpid())
f.flush()
s = os.fstat(f.fileno())
dev, ino = s.st_dev, s.st_ino
@thepaul
thepaul / socket_info.py
Created October 9, 2010 23:04
output info about socket objects
import socket
from subprocess import check_output
from sys import stdout
from os import getpid
def info_about_socket(s, out=stdout):
"""
Write some information about the status and state of a socket object
to the file-like object 'out' (stdout, by default).
@thepaul
thepaul / gist:660680
Created November 3, 2010 01:52
index records by first field and output according to that index. original need called for commas, but set VSEP to the empty string if you just want separation of multiple values with OFS.
awk '{x[$1]=x[$1]?(x[$1] VSEP OFS $2):$2}END{for(e in x){print e,x[e]}}' VSEP=,
@thepaul
thepaul / ParallelBatcher.py
Created January 13, 2011 19:08
Do Twisted Deferred-producing jobs, X at a time
# ParallelBatcher.py
#
# Job pipeline for Twisted Matrix
# the paul 2011
#
# Sort of goes between defer.DeferredList and plain Deferred chaining.
# When you have lots of jobs to do which take time (most likely because
# they have to wait on some network action) but you don't want to do
# them all at the same time (maybe the remote network action is CPU- or
# bandwidth-intensive and you want to avoid overloading the remote