Skip to content

Instantly share code, notes, and snippets.

@korc
Last active September 30, 2015 04:06
Show Gist options
  • Save korc/53d598d1bbde66b93d9d to your computer and use it in GitHub Desktop.
Save korc/53d598d1bbde66b93d9d to your computer and use it in GitHub Desktop.
pcap2pg
#!/usr/bin/python
import getopt
import psycopg2
import sys
import datetime
from utils import PSQLPipeCopierMixIn, PCapReader, _std_init, SharkReader, PCapProducer
sql_initdb="""
CREATE TABLE source (id serial primary key, name text not null);
CREATE TABLE pcap (
id bigserial PRIMARY KEY,
source_id int REFERENCES source(id) on UPDATE CASCADE,
"frame.number" bigint not null,
"frame.time" timestamptz not null default now(),
UNIQUE (source_id, "frame.number")
);
"""
class Copier(object, PSQLPipeCopierMixIn):
sql_table = "pcap"
__init__=_std_init
def init_db():
try: db=psycopg2.connect("")
except psycopg2.OperationalError as e:
print >>sys.stderr, """Error connecting to database: %s
Please set PG* env vars accordingly
ref: http://www.postgresql.org/docs/9.4/static/libpq-envars.html
Hint: psql should be able to connect to DB without any args"""%(e,)
raise SystemExit(2)
with db.cursor() as cur:
try: cur.execute("select 1 from pcap where 1=0")
except psycopg2.ProgrammingError:
cur.execute("abort")
cur.execute(sql_initdb)
db.commit()
return db
def main(args):
opts, fields=getopt.getopt(args, "q:r:w:l:")
linktype=update_cursor=copier=last_db_framenum=outfile=pfile=query=None
for opt, val in opts:
if opt=="-r":
pfile=PCapReader(val)
elif opt=="-q":
query=val
elif opt=="-w":
outfile=open(val, "wb")
elif opt=="-l":
linktype=int(val)
if linktype is None and pfile is not None:
linktype=pfile.dlt
elif linktype is None:
linktype=1
db=init_db()
with db.cursor() as cur:
cur.execute("select * from pcap WHERE 1=0")
cur.fetchone()
cur_cols=set(map(lambda x: x[0], cur.description))
for new_col in filter(lambda x: x not in cur_cols, fields):
if new_col in ("eth.src", "eth.dst"): new_coltype="macaddr"
elif new_col in ("ip.src", "ip.dst"): new_coltype="inet"
elif new_col in ("udp.srcport", "udp.dstport", "tcp.srcport", "tcp.dstport"):
new_coltype="int"
else: new_coltype="text"
cur.execute('alter table pcap add COLUMN "%s" %s'%(new_col, new_coltype))
db.commit()
if pfile:
pcap_source=pfile
read_dlt=pfile.dlt
frame_number=0
with db.cursor() as cur:
cur.execute("select id from source where name=%s", (pfile.fname,))
ret=cur.fetchone()
if ret:
source_id=ret[0]
else:
cur.execute("insert into source (name) values (%s) returning id", (pfile.fname,))
db.commit()
source_id=cur.fetchone()[0]
cur.execute('select max("frame.number") from pcap where source_id=%s', (source_id,))
last_db_framenum=cur.fetchone()[0]
if last_db_framenum is None: last_db_framenum=0
else:
if query is None: query="1=1"
pcap_source=db.cursor()
pcap_source.execute(('SELECT source_id, "frame.number" FROM pcap '
'WHERE %s '
'ORDER BY "frame.time", source_id, "frame.number"'%(query,)))
pcap_source_files={}
sharks={}
if outfile: outfile_producer=PCapProducer(linktype=linktype)
for pcap_data in iter(pcap_source):
if pfile:
capture_stamp, pkt_data=pcap_data
frame_number+=1
else:
source_id, frame_number=pcap_data
if source_id not in pcap_source_files:
with db.cursor() as src_cur:
src_cur.execute("select name from source where id=%s", (source_id,))
pcap_source_files[source_id]=PCapReader(src_cur.fetchone()[0])
capture_stamp, pkt_data = pcap_source_files[source_id][frame_number]
read_dlt=pcap_source_files[source_id].dlt
if outfile is not None:
outfile_producer.write(pkt_data, capture_stamp)
outfile.write(outfile_producer.read())
if fields:
try: shark=sharks[read_dlt]
except KeyError:
shark=sharks[read_dlt]=SharkReader(fields=fields, encap=read_dlt)
pkt_inf=shark.translate(capture_stamp, pkt_data)
else: pkt_inf={}
if "matched" in pkt_inf: del pkt_inf["matched"]
if "frame.protocols" in pkt_inf:
pkt_inf["frame.protocols"]=pkt_inf["frame.protocols"].split(":")
if last_db_framenum is not None and frame_number>last_db_framenum:
if update_cursor is not None:
db.commit()
update_cursor.close()
update_cursor=None
if copier is None: copier=Copier()
copier.add_data(pkt_inf, **{"source_id": source_id, "frame.number": frame_number,
"frame.time": datetime.datetime.fromtimestamp(capture_stamp)})
elif pkt_inf:
update_cursor=db.cursor()
cols=pkt_inf.keys()
update_sql='update pcap set %s where source_id=%%s and "frame.number"=%%s'%(
",".join(map(lambda f: '"%s"=%%s'%(f,), cols)))
update_args=tuple(map(lambda x: pkt_inf[x], cols))+(source_id, frame_number)
update_cursor.execute(update_sql, update_args)
if update_cursor is not None:
db.commit()
update_cursor.close()
if outfile is not None:
outfile_producer.close()
data=outfile_producer.read()
if data: outfile.write(data)
outfile.close()
if __name__ == '__main__':
if len(sys.argv)<2:
print >>sys.stderr, """Usage: %s <OPTIONS> <columns..>
Where OPTIONS can be:
-w <file.pcap> write pcap file
-r <file.pcap> read pcap file
-q <query> sql query after WHERE
-l <type> data link type (default=autodetect or 1, cf. "pcap/bpf.h")
"""%sys.argv[0].rsplit("/", 1)[-1]
raise SystemExit(1)
main(sys.argv[1:])
#!/bin/sh
: ${py:=$(dirname "$0")/pcap2pg.py}
: ${wlan_fields:=frame.protocols wlan.da wlan.ra wlan.ta wlan.sa wlan.bssid radiotap.channel.freq radiotap.dbm_antsignal wlan.fc.type_subtype}
: ${eth_fields:=frame.protocols eth.src}
usage() {
cat <<EOF
Usage: ${0##*/} <mode> [<mode_args>]
where mode is
wlan [<pcap_file>]
ip
EOF
}
mode="$1"
test -n "$mode" || { usage >&2; exit 1; }
shift
set -e -x
case "$mode" in
wlan)
pcap="$1"
test -z "$pcap" || python "$py" -r "$pcap" $wlan_fields
;;
ip)
python "$py" -q "'{ip}'<@\"frame.protocols\" and \"ip.src\" is null" ip.src ip.dst ip.proto
;;
esac
#!/usr/bin/python
# License: Public Domain
import os, sys
import re, struct, json, random, datetime, logging
import subprocess, signal
import socket, ssl, select
import cStringIO as StringIO
import warnings
try:
import gzip,lzma
except ImportError as e:
print >>sys.stderr, "Error importing gzip,lzma:",`e`
class EndOfData(EOFError): pass
def read_fcntl(fd_r, size=8192):
import fcntl, errno
if not isinstance(fd_r, int): fd_r=fd_r.fileno()
old_flags=fcntl.fcntl(fd_r, fcntl.F_GETFL)
fcntl.fcntl(fd_r, fcntl.F_SETFL, old_flags|os.O_NONBLOCK)
buf=[]
while size>0:
try:
data=os.read(fd_r, size)
if not data: break
buf.append(data)
size-=len(data)
except OSError as e:
if buf or e.errno==errno.EWOULDBLOCK: break
else: raise
fcntl.fcntl(fd_r, fcntl.F_SETFL, old_flags)
return "".join(buf)
def iter_states(sm, ds="", buf=None, state="_start", start=None, strip=None, method="search"):
next_states=sm[state][0] if start is None else start
ds_iter=iter(ds)
if buf is None:
buf=ds_iter.next()
while buf:
matches=[]
for s,tests in map(lambda s: (s,sm[s][1]), next_states+(["_anytime"] if "_anytime" in sm else [])):
try:
if callable(getattr(tests, method)): tests=[tests]
except AttributeError: pass
for test in tests:
match=getattr(test, method)(buf)
if match:
matches.append((s, match))
if matches:
state,match=reduce(lambda a,b: b if b[1].start()<a[1].start() else a, matches)
_next=(yield (state,match,buf[:match.start()]))
buf=buf[match.end():] if strip is None else buf[match.end():].lstrip(strip)
if _next is None: _next=sm[state][0]
if _next is not None: next_states=_next
if not matches or not buf:
try: buf=buf+ds_iter.next()
except StopIteration:
try: sm=sm["_next"]
except KeyError:
break
else:
next_states=sm[start][0]
if not next_states: break
if buf: yield None, (state, next_states), buf
class PipeSock(object):
_timeout=None
debug=False
use_fcntl=True
def __init__(self, pipe, out_pipe=None, **attr):
self.pipe=pipe
if out_pipe is None: out_pipe=pipe
self.out_pipe=out_pipe
for k in attr: setattr(self, k, attr[k])
def settimeout(self, tmout): self._timeout=tmout
def gettimeout(self): return self._timeout
def recv(self, sz):
if self.use_fcntl: return read_fcntl(self.pipe, sz)
if select.select([self.pipe], [], [], self._timeout)[0]:
d=self.pipe.read(1)
if d=="": return ""
else: raise socket.timeout()
data=[d]
sz=sz-1
while sz>0 and select.select([self.pipe], [], [], 0)[0]:
d=self.pipe.read(1)
if d=="": break
sz=sz-1
data.append(d)
if self.debug: print >>sys.stderr, "<", `"".join(data)`
return "".join(data)
def send(self, data):
if self.debug: print >>sys.stderr, ">", `data`
return self.out_pipe.write(data)
class SqlCondition(object):
def __init__(self, cond=None, cond_join_word=" AND ", **args):
self.cond=cond or []
self.args=args
self.cond_join_word=cond_join_word
def __str__(self):
return self.cond_join_word.join(map(lambda s: "(%s)"%s, self.cond))
def where(self):
if not self.cond: return ""
return " WHERE %s"%(self,)
def append(self, cond, **kwargs):
self.cond.append(cond)
for k in kwargs: self.args[k]=kwargs[k]
class Byte(int): pass
class Int(int): pass
re_pattern_cls=type(re.compile(""))
def sock_token(sock, token, buf="", include_token=True):
if isinstance(token,(int,long)):
tst=lambda b: len(b)>=token
spl=lambda b: (b[:token],b[token:])
rcv_size=lambda b: token-len(buf)
elif isinstance(token, re_pattern_cls):
def tst(b):
match=token.search(b)
return match is not None
def spl(b):
match=token.search(b)
return ((b[:match.end() if include_token else match.start()], match), b[match.end():])
rcv_size=lambda b: 8192
else:
tst=lambda b: token in b
rcv_size=lambda b: 8192
strip_len=0 if include_token else len(token)
def spl(buf):
idx=buf.index(token)+len(token)
return buf[:idx-strip_len],buf[idx:]
while True:
if tst(buf): break
data=sock.recv(rcv_size(buf))
if data=="": raise EOFError("Socket closed before token (%s) %r"%(type(token).__name__,token), buf)
else: buf+=data
return spl(buf)
def _std_dyn_init(self, *args, **kwargs):
for k,v in kwargs.iteritems(): setattr(self, k, v)
for idx,v in enumerate(args): setattr(self, self._init_args[idx], v)
def _std_init(self, *args, **attrs):
for idx,v in enumerate(args): setattr(self, self._init_args[idx], v)
for k in attrs: setattr(self, k, attrs[k])
def charrange(start,end):
return ''.join(map(chr,range(ord(start),ord(end)+1)))
def randomchars(size,alphabet=charrange('a','z')):
return ''.join(map(lambda x: random.sample(alphabet, 1)[0], range(size)))
def shrtn(s, maxsize=15, use_str=False):
if not isinstance(s,(str,unicode)): return repr(s)
return "%s%s"%((str if use_str else repr)(s[:maxsize]), ".." if len(s)>maxsize else "")
urlunquote_re=re.compile(r'%(?:u([0-9a-f]{4})|([0-9a-f]{2}))', re.I)
def urlunquote_func(m):
s=m.group(1)
return chr(int(m.group(2),16)) if s is None else unichr(int(s, 16)).encode("utf8")
def urlunquote(s): return urlunquote_re.sub(urlunquote_func, s)
class HexDump(object):
_init_args=("data",)
__init__=_std_dyn_init
cols=80
bpl=16
def __iter__(self):
bpl=self.bpl
data=self.data
for ofs in range(0, len(data), bpl):
line_data=data[ofs:ofs+bpl]
yield (ofs, " ".join(map(lambda x: "%02x"%(ord(x)), line_data)), "".join(map(lambda x: x if ord(x)>0x20 and ord(x)<0x7f else ".", line_data)))
def __str__(self):
return "\n".join(map(lambda x: "%08x %s %s"%x, self))
import code
class GtkDebug(object):
def __init__(self, container):
# noinspection PyUnresolvedReferences
from gi.repository import Gtk # @UnresolvedImport
self.entry=Gtk.Entry()
container.pack_start(self.entry, False, True, 0)
self.entry.connect("activate", self.on_entry_activate)
self.interpreter=code.InteractiveInterpreter()
self.entry.show()
def on_entry_activate(self, entry):
self.interpreter.runsource(entry.get_text())
class CharGen(object):
__slots__=['alphabet','index','maxlen','maxindex','result_converter']
def __init__(self,alphabet=charrange('a','z'), maxlen=None, maxindex=None):
self.alphabet=alphabet
self.index=1
self.maxlen=maxlen
self.maxindex=maxindex
self.result_converter=self.join_str
def __iter__(self):
while True:
y=self.get_value_by_index(self.index)
if self.maxlen is not None and len(y)>self.maxlen: break
if self.maxindex is not None and self.index>self.maxindex: break
yield y
self.index+=1
def reset(self): self.index=0
def get_value(self): return self.get_value_by_index(self.index)
value=property(get_value)
@staticmethod
def join_str(val): return ''.join(val)
def get_value_by_index(self,i):
if i<1: raise ValueError,"Index must be a positive integer"
div=i
ret=[]
while True:
div=div-1
div,mod=divmod(div,len(self.alphabet))
ret.insert(0,self.alphabet[mod])
if div==0: break
return self.result_converter(ret)
def dir2dict(path, fnpat=("*",), hidden=False, d=None):
import fnmatch
if isinstance(fnpat, basestring): fnpat=[fnpat]
if d is None: d={}
orig_path=path.split(os.path.sep)
def get_fileinfo(fname):
st=os.stat(fname)
return dict(size=st.st_size)
def clean_empty(dr):
if "files" in dr and not dr["files"]: del dr["files"]
if "dirs" in dr:
for n in dr["dirs"].keys():
clean_empty(dr["dirs"][n])
if not dr["dirs"][n]: del dr["dirs"][n]
if not dr["dirs"]: del dr["dirs"]
for dirpath, dirnames, filenames in os.walk(path):
if not hidden: dirnames[:]=filter(lambda n: not n.startswith("."), dirnames)
path_comp=dirpath.split(os.path.sep)[len(orig_path):]
d1=reduce(lambda a, b: a["dirs"][b], path_comp, d)
matching_files=filter(lambda fn: any(map(lambda pat: fnmatch.fnmatch(fn, pat), fnpat)), filenames)
d1.setdefault("files", {}).update(map(lambda fn: (fn, get_fileinfo(os.path.join(dirpath, fn))), matching_files))
d1.setdefault("dirs", {}).update(map(lambda n: (n, {}), dirnames))
clean_empty(d)
return d
def dbg_func(func):
def gen(*args, **kwargs):
print >>sys.stderr, "%s(%s%s%s):"%(func.__name__, ", ".join(map(repr, args)), ", " if args and kwargs else "", ", ".join(map(lambda (k,v): "%s=%r"%(k,v), kwargs.items()))),
sys.stderr.flush()
ret=func(*args,**kwargs)
print >>sys.stderr, `ret`
return ret
gen.orig_func=func
return gen
def undbg_func(func):
return func.orig_func
class StreamHandler(object):
debug=False
read_buf_size=8192
def __init__(self, stream=None, endian=">", **attrs):
self.endian=endian
self.int_fmt=struct.Struct(endian+"I")
if stream is None: stream=StringIO.StringIO()
elif isinstance(stream, str): stream=StringIO.StringIO(stream)
self.stream=stream
self.configure_rw(stream)
for k in attrs: setattr(self, k, attrs[k])
def configure_rw(self, stream):
if hasattr(stream, "fileno"): self.read, self.write=self._read_os, self._write_os
else:
if hasattr(stream, "recv"): self.read=self._read_recv
elif hasattr(stream, "read"): self.read=self._read_read
if hasattr(stream, "send"): self.write=self._write_send
elif hasattr(stream, "write"): self.write=self._write_write
def _read_os(self, *args, **kwargs): return os.read(self.stream.fileno(), *args, **kwargs)
def _write_os(self, *args, **kwargs): return os.write(self.stream.fileno(), *args, **kwargs)
def _read_recv(self, *args, **kwargs): return self.stream.recv(*args, **kwargs)
def _write_send(self, *args, **kwargs): return self.stream.send(*args, **kwargs)
def _read_read(self, *args, **kwargs): return self.stream.read(*args, **kwargs)
def _write_write(self, *args, **kwargs): return self.stream.write(*args, **kwargs)
def read_search(self, cond_list, buf="", eol=True):
no_match=False
while True:
if self.debug:
print >>sys.stderr,"%s: %r -> [%s]?"%(self.__class__.__name__, buf, ",".join(map(lambda x: getattr(x, "name", repr(x)), cond_list))),
sys.stderr.flush()
if not buf or no_match:
data=self.read(self.read_buf_size)
if self.debug: print >>sys.stderr, "data: %r (%d bytes)"%(data, len(data))
if data=="": raise EndOfData(buf)
buf=buf+data
cur_match=None
no_match=True
for idx,cond in enumerate(cond_list):
match=cond.search(buf)
if match and (not cur_match or match.start()<cur_match.start()):
cur_match=match
cur_cond=cond
if cur_match:
buf=buf[cur_match.end():]
if not eol: buf=buf.rstrip("\n").rstrip("\r")
return SearchMatch(index=idx, pat=cur_cond, match=cur_match, buf=buf, skipped=buf[:cur_match.start()])
def read_fully(self, sz):
ret=[]
while sz:
if self.debug:
print >>sys.stderr,"reading from %r %d bytes"%(self.stream, sz),
sys.stderr.flush()
data=self.read(sz)
if self.debug: print >>sys.stderr,"got: %r (%d bytes)"%(data, len(data))
if data=="": raise EndOfData("".join(ret))
sz=sz-len(data)
ret.append(data)
return "".join(ret)
def write_fully(self, data):
if self.debug:
print >>sys.stderr,"writing to %r data: %r (%d bytes)"%(self.stream, data, len(data)),
sys.stderr.flush()
while data:
bytes_written=self.write(data)
if bytes_written is None: break
if self.debug:
print >>sys.stderr, "%d.."%(bytes_written),
sys.stderr.flush()
data=data[bytes_written:]
if self.debug: print >>sys.stderr,"done"
def read_byte(self): return ord(self.read_fully(1))
def read_int(self): return self.int_fmt.unpack(self.read_fully(4))[0]
def read_pstring(self): return self.read_fully(self.read_int())
def read_intblob(self): return reduce(lambda a,b: (a<<8)+ord(b), self.read_pstring(), 0)
def write_byte(self, data): return self.write_fully(chr(data))
def write_int(self, data): return self.write_fully(self.int_fmt.pack(data))
def write_pstring(self, data): return self.write_fully(self.int_fmt.pack(len(data))+data)
def write_seq(self, *seq):
for elem in seq:
if isinstance(elem, Byte): self.write_byte(elem)
elif isinstance(elem, Int): self.write_int(elem)
elif isinstance(elem, str): self.write_pstring(elem)
else: raise ValueError("Don't know what to do with sequence type %r"%(type(elem).__name__))
def getvalue(self, *args, **kwargs): return self.stream.getvalue(*args, **kwargs)
def read_hex():
ret=[]
while True:
data=raw_input("hex> ")
if not data: break
ret.append(data.strip().split(" ")[0].replace(" ","").decode("hex"))
return "".join(ret)
def blob2long(blob): return reduce(lambda a,b: (a<<8)+ord(b), blob, 0)
def long2blob(val):
ret=[]
while True:
ret.append(chr(val&0xff))
val=val>>8
if not val: break
return "".join(reversed(ret))
def cached_property_prop(fn):
cached_attr_name="__cached__%s"%(fn.__name__,)
from functools import wraps
@wraps(fn)
def getter(self):
try: return getattr(self, cached_attr_name)
except AttributeError: value=fn(self)
setattr(self, cached_attr_name, value)
return value
return property(getter,
lambda self,value: setattr(self, cached_attr_name, value),
lambda self: delattr(self, cached_attr_name))
class cached_property_obj(object):
def __init__(self, fget):
self.__name__=fget.__name__
self.__doc__=fget.__doc__
self.__module__=fget.__module__
self.fget=fget
def __get__(self, instance, owner):
if instance is None: return self
try: return instance._property_cache[self.__name__]
except AttributeError:
instance._property_cache={}
except KeyError: pass
v=instance._property_cache[self.__name__]=self.fget(instance)
return v
def __set__(self, instance, value):
try: instance._property_cache[self.__name__]=value
except AttributeError: instance._property_cache={self.__name__:value}
def __delete__(self, instance):
try: del instance._property_cache[self.__name__]
except (AttributeError, KeyError): pass
cached_property=cached_property_obj
class cached_property_dict(object):
def __init__(self, fget):
self.fget=fget
self.__name__ = fget.__name__
def __get__(self, instance, owner):
if instance is None: return self
try: return instance.__dict__[self.__name__]
except KeyError: pass
ret=instance.__dict__[self.__name__]=self.fget(instance)
return ret
def repr_provider(func):
"""
Usage:
class Something(...):
@repr_provider
def __repr__(self): return 'just instance-specific info' """
from functools import wraps
@wraps(func)
def gen(self):
repstr=func(self)
return "<%s.%s %sat 0x%x>"%(self.__class__.__module__,
self.__class__.__name__,
"%s "%(repstr,) if repstr else "",
id(self))
return gen
def exc_hook(exc_type, exc_val, tb):
"Usage: sys.excepthook=exc_hook"
exc_hook.prev_hook(exc_type, exc_val, tb)
if tb is not None:
import pdb
pdb.post_mortem(tb)
exc_hook.prev_hook=sys.excepthook
def colorize_log():
_log_colors=dict(map(
lambda (k,v): (k, "\033[%sm"%(v,)),
dict(blue="34", red="1;31", yellow="33", reset="1;0").items()))
logging.addLevelName(logging.INFO, "{blue}{level}{reset}".format(
level=logging.getLevelName(logging.INFO), **_log_colors))
logging.addLevelName(logging.WARNING, "{yellow}{level}{reset}".format(
level=logging.getLevelName(logging.WARNING), **_log_colors))
logging.addLevelName(logging.ERROR, "{red}{level}{reset}".format(
level=logging.getLevelName(logging.ERROR), **_log_colors))
class Re(object):
__init_args=("pat", "flags", "name")
flags=0
name=None
def __init__(self, *args, **attr):
for idx,v in enumerate(args): setattr(self, self.__init_args[idx], v)
for k in attr: setattr(self, k, attr[k])
self.rex=re.compile(self.pat, self.flags)
def __repr__(self): return "<Re r%r @%x>"%(self.pat, id(self))
def search(self, *args, **kwargs):
return SearchMatch(pat=self, match=self.rex.search(*args, **kwargs))
escdq_re=r'(?:[^"\\]|(?:\\\\)*\\"|\\[^"])*'
class SearchMatch(object):
match=None
def __init__(self, **attr):
for k in attr: setattr(self, k, attr[k])
def __getitem__(self, key): return self.match.group(key)
def __nonzero__(self): return False if self.match is None else True
@property
def name(self): return self.pat.name
@repr_provider
def __repr__(self):
return "%s %r, groups=%r"%(self.name or "",
self.match.group() if self.match else None,
self.match.groups() if self.match else None)
def start(self, *args, **kwargs): return self.match.start(*args, **kwargs)
def group(self, *args, **kwargs): return self.match.group(*args, **kwargs)
def end(self, *args, **kwargs): return self.match.end(*args, **kwargs)
def groupdict(self, *args, **kwargs): return self.match.groupdict(*args, **kwargs)
class StructProperty(object):
defaults=dict()
class Value(object):
def __init__(self, prop, value):
self._prop=prop
if value is None: return
unpacked_value=prop.struct.unpack_from(value[0], value[1]) \
if isinstance(value, tuple) else prop.struct.unpack(value)
for idx, name in enumerate(self._prop.names):
setattr(self, name, unpacked_value[idx])
def __str__(self):
return self._prop.struct.pack(*map(lambda n: getattr(self, n), self._prop.names))
def __len__(self):
return len(self._prop)
@repr_provider
def __repr__(self):
return ", ".join(map(lambda n: "%s=%r"%(n, getattr(self, n)), self._prop.names))
class IncompleteValue(Value):
# noinspection PyMissingConstructor
def __init__(self, prop, value):
self._prop=prop
self._partial_value=value
def __len__(self):
return len(self._partial_value)
def __add__(self, other):
full_len=len(self._partial_value)+len(other)
struct_size=len(self._prop)
if full_len==struct_size:
return self._prop(self._partial_value+other)
elif full_len>struct_size:
raise struct.error("unpack requires a string argument of length %d"%(struct_size,))
else:
return self.__class__(self._prop, self._partial_value+other)
def __neg__(self):
return len(self._prop)-len(self)
def __len__(self): return self.size
def __init__(self, fmt=None, *names, **defaults):
if fmt is not None:
self.fmt=fmt
self.names=names
self.struct=struct.Struct(self.fmt)
self.size=self.struct.size
self._prop_name="__prop_%d"%(id(self))
self.defaults=dict(self.defaults, **defaults)
def __call__(self, *args, **kwargs):
if args and not kwargs:
value=args[0]
if isinstance(value, self.Value): return value
if value is not None and not isinstance(value, tuple) and len(value)<len(self):
return self.IncompleteValue(self, value)
return self.Value(self, value)
else:
ret=self.Value(self, None)
for k in kwargs: setattr(ret, k, kwargs[k])
for k in self.defaults:
if k not in kwargs: setattr(ret, k, self.defaults[k])
return ret
# noinspection PyUnusedLocal
def __get__(self, instance, owner):
if instance is None: return self
try: return instance.__dict__[self._prop_name]
except KeyError: return None
def __set__(self, instance, value):
if value is None:
if self._prop_name in instance.__dict__:
self.__delete__(instance)
else: instance.__dict__[self._prop_name]=self(value)
def __delete__(self, instance):
del instance.__dict__[self._prop_name]
escape_binary_re=re.compile(r'[\x00-\x0f\x11-\x1f\x7f-\xff]+')
def psql_copy_escape(value, null="\\N"):
if value is None: return null
if isinstance(value, list):
return "{%s}"%(",".join(map(lambda x: psql_copy_escape(x, null="NULL"), value)))
if isinstance(value, datetime.datetime): return value.isoformat()
if isinstance(value, (int,long)): return str(value)
if isinstance(value, float): return "%.32e"%value
return escape_binary_re.sub(lambda m: "".join(map(lambda c: "\\x%02x"%(ord(c)), m.group(0))), value.replace("\\","\\\\")).replace("\n","\\n")
class PSQLEscape(object):
bin_re=re.compile(r'[\x00-\x0f\x11-\x1f\x7f-\xff]+')
def __init__(self):
self.escape_funcs={}
def escape(self, value, **args):
try: return self.escape_funcs[type(value)](value, **args)
except KeyError:
for func in filter(lambda f: hasattr(f,"_clss"), map(lambda n: getattr(self,n), dir(self))):
for cls in func._clss:
if isinstance(value, cls):
self.escape_funcs[type(value)]=func
return func(value, **args)
raise NotImplementedError("Cannot escape type", type(value))
def escape_null(self, value, null="\\N", **args): return null
escape_null._clss=[type(None)]
def escape_list(self, value, **args):
return "{%s}"%(",".join(map(lambda x: self.escape(x, null="NULL"), value)))
escape_list._clss=[list,tuple,set]
def escape_datetime(self, value, **args): return value.isoformat()
escape_datetime._clss=[datetime.datetime]
def escape_float(self, value, **args): return "%.32e"%value
escape_float._clss=[float]
def escape_str(self, value, **args):
return self.bin_re.sub(
lambda m: "".join(map(lambda c: "\\x%02x"%(ord(c)), m.group(0))),
value.replace("\\","\\\\")
).replace("\n","\\n")
escape_str._clss=[str,unicode,buffer]
def escape_int(self, value, **args): return str(value)
escape_int._clss=[int,float,long]
class PSQLCopierMixIn:
commit_evt_count=10000
extra_opts=[]
dsn = ""
sql_select_pkey = """
SELECT column_name FROM information_schema.constraint_column_usage
WHERE constraint_name = (
SELECT constraint_name
FROM information_schema.table_constraints
WHERE constraint_type = 'PRIMARY KEY' AND table_name = %s
)
"""
@property
def sql_table(self): raise NotImplementedError("sql table need to be defined")
def start_copier(self):
self.psql_copier=subprocess.Popen(["psql","-c","COPY %s (%s) FROM STDIN"%(self.sql_table, ",".join(map(lambda x: '"%s"'%x,self.cols)))]+self.extra_opts, stdin=subprocess.PIPE, close_fds=True)
self.uncommitted_count=0
def stop_copier(self):
try: proc=self.psql_copier
except AttributeError: return
proc.stdin.close()
exit_status=proc.wait()
del self.psql_copier
if exit_status:
raise RuntimeError("stopping psql_copier failed", exit_status, proc)
def pg_typeof(self, obj):
with self.db.cursor() as cursor:
cursor.execute("select pg_typeof(%s)", (obj,))
coltype=cursor.fetchone()[0]
if coltype=="unknown": coltype="text"
return coltype
def restart_copier(self):
with self.copier_lock:
self.stop_copier()
self.start_copier()
@cached_property
def copier_lock(self):
from threading import Lock
return Lock()
@cached_property
def inf_template(self): return {}
@cached_property
def cols(self): return filter(lambda n: n not in self.auto_cols, self.get_existing_cols())
@cached_property
def db(self): return psycopg2.connect(self.dsn)
@cached_property
def cursor(self):
warnings.warn("Not closing this cursor can create deadlock")
return self.db.cursor()
@cached_property
def auto_cols(self):
with self.db.cursor() as cursor:
cursor.execute(self.sql_select_pkey, (self.sql_table,))
ret=cursor.fetchone()
return set(ret) if ret else set()
@cached_property
def psql_escape_func(self): return PSQLEscape().escape
def get_existing_cols(self):
with self.db.cursor() as cursor:
cursor.execute("select * from \"%s\" where 1=0"%(self.sql_table))
cursor.fetchall()
return map(lambda x: x[0], cursor.description)
def coldict_to_vals(self, inf, skip_cols={}):
vals=map(lambda n: inf.pop(n, None), filter(lambda c: c not in skip_cols, self.cols))
if inf:
print >>sys.stderr, "New cols:", inf.keys()
with self.copier_lock:
self.stop_copier()
new_cols=inf.keys()
existing_cols=set(self.get_existing_cols())
with self.db.cursor() as cursor:
for colname in new_cols:
if colname not in existing_cols:
coltype=self.pg_typeof(inf[colname])
cursor.execute('alter table "%s" add "%s" %s'%(self.sql_table, colname, coltype))
self.cols.append(colname)
vals.append(inf.pop(colname))
self.db.commit()
self.start_copier()
return vals
def add_data(self, inf, **extras):
vals=self.coldict_to_vals(dict(self.inf_template, **dict(inf, **extras)))
output="\t".join(map(self.psql_escape_func, vals))
with self.copier_lock:
try: self.psql_copier
except AttributeError: self.start_copier()
self.psql_copier.stdin.write(output)
self.psql_copier.stdin.write("\n")
self.uncommitted_count+=1
if self.uncommitted_count>=self.commit_evt_count:
self.restart_copier()
def __del__(self):
self.stop_copier()
class PSQLPipeCopierMixIn(PSQLCopierMixIn):
class _PipeCopier(object):
def __init__(self, table, columns, dsn=""):
(rd, wr)=os.pipe()
self.pid=os.fork()
if self.pid:
self.stdin=os.fdopen(wr, "w")
os.close(rd)
else:
db=psycopg2.connect(dsn)
cur=db.cursor()
os.close(wr)
cur.copy_from(os.fdopen(rd), table, columns=map(lambda x: '"%s"'%(x,), columns))
db.commit()
exit(0)
def wait(self):
(pid, exit_st)=os.waitpid(self.pid, 0)
return exit_st>>8
def start_copier(self):
self.psql_copier=self._PipeCopier(self.sql_table, self.cols, self.dsn)
self.uncommitted_count=0
class SocksSocket(object):
socks_server=None
socks_timeout=15
debug=False
def __init__(self,*sock_args,**kwargs):
if "socks_server" in kwargs: self.socks_server=kwargs.pop("socks_server")
if "socks_timeout" in kwargs: self.socks_timeout=kwargs.pop("socks_timeout")
if "socks" in kwargs: self.socks=kwargs.pop("socks")
else:
self.socks=socket.socket()
self.socks.settimeout(self.socks_timeout)
self.socks_log=[]
self.socks.connect(self.socks_server)
self.socks_send("\x05\x01\x00")
self.supported_methods=self.socks_recv()
def socks_send(self,data):
self.socks_log.append(("SEND",data))
if self.debug:
print >>sys.stderr, "Sending: %r"%(data,),
sys.stdout.flush()
self.socks.send(data)
if self.debug: print >>sys.stderr, "Ok."
def socks_recv(self):
if self.debug:
print >>sys.stderr, "Receiving: ..",
sys.stdout.flush()
data=self.socks.recv(1500)
if self.debug: print >>sys.stderr, "OK: %r"%(data)
self.socks_log.append(("RECV",data))
return data
def connect(self,address):
ip,port=address
if ip.count(".")==3 and ip.replace(".","").isdigit() and max(map(int,ip.split(".")))<256:
ip_data="".join(map(chr,map(int,ip.split("."))))
ip_type='\x01'
else:
ip_data=ip
ip_type='\x03'+chr(len(ip))
self.socks_send("\x05\x01\x00"+ip_type+ip_data+struct.pack(">H",port))
self.connect_reply=self.socks_recv()
if self.connect_reply=='':
import errno
raise socket.error,(errno.ECONNREFUSED,"Connection refused")
def __getattr__(self,name):
if not name.startswith("_"):
return getattr(self.socks,name)
raise AttributeError,"Invalid attribute for SocksSocket: %r"%(name)
def open_filedesc(fname, mode="rb"):
"""Open file based on description string
fname -- descriptor string
can be filename or tcp:ip:port[,options] or tcp-l:port[,options]
options: ssl, ca, cert, key, ip (for tcp-l)
if any of cert, ca or ssl is specified, tries to use SSL connection
if ca is given, checks also server certificate chain
mode -- mode (default rb)
"""
params={}
if isinstance(fname, file):
return fname
if fname.startswith("system:"):
proc=subprocess.Popen(fname.split(":",1)[1], shell=True, stdout=subprocess.PIPE, close_fds=True)
return proc.stdout
if fname.startswith("tcp:"):
ip,port=fname.split(":", 2)[1:]
if "," in port:
port,params=port.split(",",1)
params=dict(map(lambda x: x if len(x)==2 else (x[0], True), map(lambda x: tuple(x.split("=",1)), params.split(","))))
sock=socket.socket()
sock.connect((ip,int(port)))
if params.get("cert") or params.get("ca") or params.get("ssl"):
return ssl.wrap_socket(sock, keyfile=params.get("key"), certfile=params.get("cert"), ca_certs=params.get("ca"), cert_reqs=ssl.CERT_REQUIRED if params.get("ca") else ssl.CERT_NONE).makefile(mode)
return sock.makefile(mode)
if fname.startswith("tcp-l:"):
port=fname.split(":", 1)[1]
if "," in port:
port,params=port.split(",",1)
params=dict(map(lambda x: x if len(x)==2 else (x[0], True), map(lambda x: tuple(x.split("=",1)), params.split(","))))
sock=socket.socket()
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((params.get("ip", "0.0.0.0"), int(port)))
sock.listen(1)
clsock,claddr=sock.accept()
logging.info("Accepted connection from: %r", claddr)
sock.close()
if params.get("cert") or params.get("ca") or params.get("ssl"):
return ssl.wrap_socket(clsock, server_side=True, keyfile=params.get("key"), certfile=params.get("cert"), ca_certs=params.get("ca"), cert_reqs=ssl.CERT_REQUIRED if params.get("ca") else ssl.CERT_NONE).makefile(mode)
return clsock.makefile(mode)
if fname.startswith("udp-l:"):
port=fname.split(":", 1)[1]
if "," in port:
port,params=port.split(",",1)
params=dict(map(lambda x: x if len(x)==2 else (x[0], True), map(lambda x: tuple(x.split("=",1)), params.split(","))))
sock=socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((params.get("ip", "0.0.0.0"), int(port)))
if params.get("cert") or params.get("ca") or params.get("ssl"):
import dtls
dtls.do_patch()
return ssl.wrap_socket(sock, server_side=True, keyfile=params.get("key"), certfile=params.get("cert"), ca_certs=params.get("ca"), cert_reqs=ssl.CERT_REQUIRED if params.get("ca") else ssl.CERT_NONE).makefile(mode)
return sock.makefile(mode)
file_ext=fname[fname.rfind(".")+1:]
if file_ext=="gz":
import gzip
return gzip.open(fname, mode)
elif file_ext in ("lzma","xz"):
import lzma
return lzma.LZMAFile(fname, mode)
return open(fname, mode)
def func_has_arg(func, argname):
if argname in inspect.getargspec(func)[0]:
return True
closure=func.func_closure if hasattr(func, "func_closure") else func.__closure__
if closure and any(map(lambda cell: argname in inspect.getargspec(cell.cell_contents)[0], closure)):
return True
return False
try: import psycopg2.pool, psycopg2.extras, inspect, types
except ImportError:
print >>sys.stderr, "no psycopg2, postgres unavailable"
class PGConnBottlePlugin(object):
statement_timeout=30
minconn=0
maxconn=10
keyword="cursor"
dsn=""
_init_args=("dsn",)
__init__=_std_dyn_init
def setup(self, app):
app.conn_pool=psycopg2.pool.ThreadedConnectionPool(self.minconn, self.maxconn, self.dsn)
app.sql_statement_timeout=self.statement_timeout
def _raise_err(self, e):
if isinstance(e, (psycopg2.Error, KeyError)):
from bottle import HTTPResponse
raise HTTPResponse(to_json(dict(error=dict(type=type(e).__name__, message=str(e)))), status=400, **{"Content-Type": "application/json"})
else: raise
def apply(self, callback, context):
if not func_has_arg(callback, self.keyword):
return callback
from functools import wraps
@wraps(callback)
def wrapper(*args, **kwargs):
conn_pool=context["app"].conn_pool
cursor=conn_pool.getconn().cursor(cursor_factory=psycopg2.extras.DictCursor)
if self.statement_timeout:
cursor.execute("SET statement_timeout TO %s;"%(int(context["app"].sql_statement_timeout*1000)))
kwargs[self.keyword]=cursor
try: ret=callback(*args, **kwargs)
except Exception,e:
cursor.close()
conn_pool.putconn(cursor.connection)
self._raise_err(e)
if isinstance(ret, types.GeneratorType):
def result_generator(source, cursor, conn_pool):
while True:
try: ret_val=ret.next()
except StopIteration: break
except Exception,e:
cursor.close()
conn_pool.putconn(cursor.connection)
self._raise_err(e)
yield ret_val
cursor.close()
conn_pool.putconn(cursor.connection)
return result_generator(ret, cursor, conn_pool)
else:
cursor.close()
conn_pool.putconn(cursor.connection)
return ret
return wrapper
class PCapReader(object):
pcap=None
cache_size=5
def __init__(self, fname):
self.fname=fname
self.frame_cache={}
self.open_pcap()
def __getitem__(self, key):
try: return self.frame_cache[key]
except KeyError: return self.get_frame(key)
def __iter__(self):
while True:
try: ret=self.pcap.next()
except StopIteration: break
cur_frame=self.cur_frame=self.cur_frame+1
self.frame_cache[cur_frame]=ret
try: del self.frame_cache[cur_frame-self.cache_size]
except KeyError: pass
yield ret
def get_frame(self, nr):
cur_frame,pcap=self.cur_frame,self.pcap
if nr<1: raise KeyError("Frame number must be positive number")
if cur_frame>nr:
cur_frame,pcap=0,self.open_pcap()
while cur_frame<(nr-5):
pcap.next()
cur_frame+=1
frame_cache={}
while cur_frame<nr:
try: data=pcap.next()
except StopIteration:
self.cur_frame,self.frame_cache=cur_frame,frame_cache
raise KeyError("End of PCAP")
cur_frame+=1
frame_cache[cur_frame]=data
self.cur_frame,self.frame_cache=cur_frame,frame_cache
return frame_cache[nr]
def open_pcap(self):
if self.pcap is not None: self.pcap.close()
import pcap
self.pcap=pcap.pcap(self.fname.encode("utf-8") if isinstance(self.fname, unicode) else self.fname)
self.dlt=self.pcap.datalink()
self.cur_frame=0
return self.pcap
class PCapProducer(object):
linktype=1 # pcap.DLT_EN10MB
_pipe_opened=False
__init__=_std_dyn_init
@cached_property
def fd_r(self): return self.fd_pipe[0]
@cached_property
def fd_w(self): return self.fd_pipe[1]
@cached_property
def fd_pipe(self):
self._pipe_opened=True
return os.pipe()
@cached_property
def dump_pcap(self):
import pcap
return pcap.pcap(dumpfile="/proc/self/fd/%s"%(self.fd_r), dumptype=self.linktype)
def write(self, data, timestamp):
data=str(data)
self.dump_pcap.dump(data, type("header", (object,), dict(len=len(data), caplen=len(data), sec=int(timestamp), usec=int(timestamp*10e6-int(timestamp)*10e6))))
def close(self):
self.dump_pcap.dump_close()
self.closed=True
def read_fcntl(self):
import fcntl,errno
r=self.fd_r
old_flags=fcntl.fcntl(r, fcntl.F_GETFL)
fcntl.fcntl(r, fcntl.F_SETFL, old_flags|os.O_NONBLOCK)
buf=[]
while True:
try: buf.append(os.read(r, 4096))
except OSError,e:
if e.errno==errno.EWOULDBLOCK: break
else: raise
fcntl.fcntl(r, fcntl.F_SETFL, old_flags)
return "".join(buf)
def read_dumb(self):
buf=[]
fd_r=self.fd_r
while select.select([fd_r],[],[], 0)[0]:
buf.append(os.read(fd_r,1))
return "".join(buf)
read=read_fcntl
def __delete__(self):
if self._pipe_opened:
os.close(self.fd_w)
os.close(self.fd_r)
class TSharkReader(object):
def __init__(self, fields, pcap_network=1, extra_args=[]):
self.fields=fields
self.cmd=subprocess.Popen(["tshark", "-l", "-i", "-", "-Tfields"]+reduce(lambda a,b: a+["-e", b], fields, extra_args), close_fds=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# (int)magic, (short)major, (short)minor, (int)thiszone, (int)sigfigs, (int)snaplen, (Int)pcap_network
self.cmd.stdin.write(struct.pack("<I2H4I", 0xa1b2c3d4, 2, 4, 0, 0, 65535, pcap_network))
#thread.start_new_thread(self.et_reader_thread, ())
def write_packet(self, pkt_time, pkt):
# (int)ts_sec, (int)data_size, (int)data_size_orig, (char*)data
self.cmd.stdin.write(struct.pack("<4I", int(pkt_time), int(1000000*(pkt_time%1)), len(pkt), len(pkt))+str(pkt))
self.cmd.stdin.flush()
def next(self):
line=self.cmd.stdout.readline().rstrip("\n").split("\t", len(self.fields)-1)
return dict(map(lambda (idx,k): (k,line[idx]), enumerate(self.fields)))
def __iter__(self):
while True:
try: yield self.next()
except IOError: break
def __del__(self):
try: self.cmd
except AttributeError: return
self.cmd.stdin.close()
self.cmd.stdout.close()
self.cmd.terminate()
self.cmd.wait()
class SharkReader(object):
encap="EN10MB"
fields=[]
extra_args=[]
filter=None
data_parser=None
_proc_running=False
_init_args=("fields",)
__init__=_std_dyn_init
class DataParser(object):
header_re=re.compile(r'(?P<field_num>\d+) (?P<field_fmt>\S+) (?P<base_fmt>\S+) - ')
data_re=re.compile(r'^\d+(?P<fields>(?: \d+=(?:"")?"'+escdq_re+'")*)(?: (?P<matched>1|0))? -$', re.M)
field_re=re.compile(r'(?P<num>\d+)="(?:"")?(?P<value>'+escdq_re+r')"')
def __init__(self, data, fields):
self.data=data
self.fields={}
for m in self.header_re.finditer(data):
num=int(m.group("field_num"))
self.fields[num]={"fmt": m.group("field_fmt"), "base": m.group("base_fmt"), "name": fields[num]}
def parse(self, data):
m=self.data_re.match(data)
ret={}
if m is None: raise ValueError("Cannot parse data", data)
ret["matched"]=m.group("matched")
for fm in self.field_re.finditer(m.group("fields")):
num=int(fm.group("num"))
field_def=self.fields[num]
ret[field_def["name"]]=int(fm.group("value")) if field_def["fmt"] in ("FT_UINT16",) else fm.group("value")
return ret
@cached_property
def pkt_hdr(self): return struct.Struct("<4L")
@cached_property
def proc(self):
self._proc_running=True
return subprocess.Popen(self.cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
@cached_property
def cmd(self):
return ["rawshark", "-l", "-d", "encap:%s"%self.encap, "-r", "-"]+(["-R", self.filter] if self.filter else [])+self.extra_args+reduce(lambda a,b: a+["-F", b], self.fields, [])
def translate(self, timestamp, data):
if self.data_parser is None:
self.data_parser=self.DataParser(self.proc.stdout.readline(), self.fields)
if not isinstance(data, str): data=str(data)
pcap_frame=self.pkt_hdr.pack(int(timestamp), int(timestamp*10e6-int(timestamp)*10e6), len(data), len(data))+data
try:
self.proc.stdin.write(pcap_frame)
shark_ret=self.proc.stdout.readline()
except Exception,e:
exit_st=self.proc.poll()
if exit_st is not None:
errstr=self.proc.stderr.read()
raise RuntimeError(errstr)
raise e
return self.data_parser.parse(shark_ret)
def __del__(self):
if not self._proc_running: return
self.proc.stdin.close()
self.proc.stdout.close()
exit_st=self.proc.poll()
if exit_st is None:
self.proc.terminate()
exit_st=self.proc.wait()
if exit_st and exit_st!=-signal.SIGTERM:
print >>sys.stderr, "Exit status of %s:"%(" ".join(self.cmd)), exit_st
class StateMachine(object):
debug=False
quiet=False
strip_chars=""
class NeedMoreData(ValueError): pass
class EndOfData(EOFError): pass
class Re(object):
class Match(object):
_init_args=("match", "pattern")
__init__=_std_dyn_init
def __getitem__(self, key):
return self.match.group(key)
def start(self, *args, **kwargs): return self.match.start(*args, **kwargs)
def end(self, *args, **kwargs): return self.match.end(*args, **kwargs)
def group(self, *args, **kwargs): return self.match.group(*args, **kwargs)
flags=0
_init_args=("pattern", "flags")
def __init__(self, *args, **kwargs):
_std_dyn_init(self, *args, **kwargs)
self.regex=re.compile(self.pattern, self.flags)
def search(self, data):
ret=self.regex.search(data)
return None if ret is None else self.Match(ret, self.pattern)
class PatternList(object):
def __init__(self, *patlist):
self.patlist=patlist
def search(self, data):
ret=map(lambda pat: pat.search(data), self.patlist)
ret.sort(key=lambda x: x.start())
return None if not ret else ret[0]
@classmethod
def State(cls, patterns, *next_states):
if isinstance(patterns, (list, tuple)):
patterns=cls.PatternList(*patterns)
def func_gen(func):
func.patterns=patterns
func.next_states=next_states
return func
return func_gen
def run(self):
self._state=self.start
self._data=""
self._match=None
i_method=type(self.run)
while True:
if self.debug:
print >>sys.stderr, "executing:", self._state.__name__
try: next_states=self._state() if isinstance(self._state, i_method) else self._state(self)
except self.EndOfData: break
if next_states is None: next_states=self._state.next_states
next_states=map(lambda x: getattr(self, x) if isinstance(x, basestring) else x, next_states)
if self.debug:
print >>sys.stderr, "next states:", map(lambda x: x.__name__, next_states)
if self._data: print >>sys.stderr, "remaining data:", `self._data`
while True:
try: (match_idx, self.match),remaining_data=self._find_match(self._data, map(lambda x: x.patterns, next_states))
except self.NeedMoreData:
self._data=self._data+self._recv_data()
else:
if not self.quiet and self.match.start():
print >>sys.stderr, "Skipped data:",`self._data[:self.match.start()]`
self._data=remaining_data.lstrip(self.strip_chars) if self.strip_chars else remaining_data
self._state=next_states[match_idx]
break
def send(self, data):
if self.debug:
print >>sys.stderr, ">",`data`,
sys.stderr.flush()
while data:
ret=self.sock.send(data)
if self.debug:
print >>sys.stderr,ret,
sys.stderr.flush()
data=data[ret:]
if self.debug: print >>sys.stderr
return ret
def _find_match(self, data, tests):
ret_all=filter(lambda (idx,res): res is not None, map(lambda (idx, test): (idx, test.search(data)), enumerate(tests)))
if not ret_all: raise self.NeedMoreData
ret_all.sort(key=lambda ret: ret[1].start())
ret=ret_all[0]
return ret,data[ret[1].end():]
def _recv_data(self):
if self.debug:
print >>sys.stderr, "<",
sys.stderr.flush()
data=self.sock.recv(8192)
if self.debug: print >>sys.stderr, shrtn(data, 1024)
if data=="": raise self.EndOfData
return data
def start(self):
raise NotImplementedError("start function needs to implemented and decorated with @Statemachine.State")
code_type=type(compile("True","","eval"))
class MyJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (datetime.datetime,datetime.date)): return obj.isoformat()
if isinstance(obj, Exception):
return to_json(dict(type=type(obj).__name__, message=getattr(obj, "message", None), args=getattr(obj, "args", None)))
if isinstance(obj, code_type):
return repr(obj)
return json.JSONEncoder.default(self, obj)
def to_json(*args, **kwargs):
return json.dumps(args[0] if len(args)==1 and not kwargs else dict(*args, **kwargs), cls=MyJSONEncoder)
class CRC(object):
crc=0xffff
_init_args=("crc",)
__init__=_std_dyn_init
@property
def crc_i(self): return (~self.crc)&0xffff
def _calc_next(self,crc_i,c): return self.table[(c^crc_i)&0xff] ^ (crc_i>>8)
def calculate(self, buf):
crc_i=0
for c in buf: crc_i=self._calc_next(crc_i,ord(c))
return (~crc_i)&0xffff
def update(self, data):
for c in data: self.crc_i=self._calc_next(self.crc_i, ord(c))
return self.crc
@classmethod
def g(cls, width, poly, reflect_in=True, cls_name=None):
table=[]
table_idx_width=8
reflect=lambda v,w: reduce(lambda a,i: a+(1<<(w-i-1) if v&(1<<i) else 0), xrange(w), 0)
crc_shift=(8-width) if width<8 else 0
msb_mask=0x1<<(width-1)
mask=((msb_mask-1)<<1)|1
for val in range(1<<table_idx_width):
if reflect_in: val=reflect(val, table_idx_width)
val=reduce(lambda a,b: (a<<1)^((poly<<crc_shift) if a&(msb_mask<<crc_shift) else 0),
xrange(table_idx_width), val<<(width-table_idx_width+crc_shift))
if reflect_in: val=reflect(val>>crc_shift, width)<<crc_shift
table.append(val&(mask<<crc_shift))
if cls_name is None: cls_name="%s_g"%cls.__name__
return type(cls_name, (cls,), {"table":table})
if __name__ == '__main__':
import user # @UnusedImport
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment