Skip to content

Instantly share code, notes, and snippets.

@lahwran
Created August 30, 2014 18:05
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save lahwran/e09bf80f9d7f10a9adb5 to your computer and use it in GitHub Desktop.
Save lahwran/e09bf80f9d7f10a9adb5 to your computer and use it in GitHub Desktop.
# to use:
# 1. put this file in ~/callstatement/callstatement.py
# 2. make a file ~/Library/Python/2.7/site-packages/derp.pth with this as the contents:
# "/Users/USER/callstatement/\nimport callstatement\n"
# 3. export PYTHONIOENCODING="callstatement_utf8"
# warning: EXTREMELY NASTY DO NOT USE IN PRODUCTION CODE
# WILL BREAK EVERYTHING I AM NOT RESPONSIBLE IF
# YOU'RE FOOLISH ENOUGH TO ACTUALLY TRY TO USE
# THIS
import codecs
import cStringIO
import encodings
import tokenize
import token
import collections
tokens = dict((value, name)
for name, value
in vars(token).items()
if type(value) == int)
tokens.update(dict((value, name)
for name, value
in vars(tokenize).items()
if type(value) == int))
del token
class Token(collections.namedtuple("Token",
"type string start end")):
pass
def itertokens(iterator):
for token in iterator:
token = Token(*token[:4])
token.startc = token.start[1]
token.endc = token.end[1]
token.pair = token.type, token.string
derp = yield token
def translate_tokens(readline):
#return tokenize.generate_tokens(readline)
iterator = itertokens(tokenize.generate_tokens(readline))
for token in iterator:
if token.pair == (tokenize.NAME, 'call'):
line = [token]
while line[-1].pair != (tokenize.OP, ":"):
ni = iterator.next()
line.append(ni)
assert line[1].type == tokenize.NAME
func = line[1].string
uses_retval = line[-3].pair == (tokenize.NAME, "as")
if uses_retval:
assert line[-2].type == tokenize.NAME
returnval = line[-2].string
line = line[:-2]
else:
returnval = "_ignored_value"
funccall = line[2:-1]
assert funccall[0].pair == (tokenize.OP, "(")
assert funccall[-1].pair == (tokenize.OP, ")")
if funccall[-2].pair == (tokenize.OP, ","):
del funccall[-2]
assert [x.pair for x in funccall[-4:-1]] == [
(tokenize.OP, ".")] * 3
funccall_front = funccall[:-4]
ln, ls = token.start
rvl = len(returnval)
mc = "___make_call___"
mcl = len(mc)
genned_line = [
[tokenize.NAME, "class", (ln, ls), (ln, ls+5), ''],
[tokenize.NAME, returnval, (ln, ls+6), (ln, ls+6+rvl), ''],
[tokenize.OP, "(", (ln, ls+6+rvl), (ln, ls+7+rvl), ''],
[tokenize.NAME, mc, (ln, ls+7+rvl), (ln, ls+7+rvl+mcl), ''],
]
# correct anything on the same line
if funccall_front[0].start[0] == ln:
gen_offset = (ls+7+rvl+mcl)
offset = funccall_front[0].start[1]
funccall_front_new = []
for call_token in funccall_front:
sline, schar = call_token.start
eline, echar = call_token.end
if sline == ln:
schar -= offset
schar += gen_offset
if eline == ln:
echar -= offset
echar += gen_offset
funccall_front_new.append(
[call_token.type, call_token.string,
(sline, schar), (eline, echar), ''])
funccall_front = funccall_front_new
genned_line.extend(funccall_front)
ln, ls = genned_line[-1][3]
lf = len(func)
genned_line.extend([
[tokenize.OP, ")", (ln, ls), (ln, ls+1), ""],
[tokenize.OP, ",", (ln, ls+1), (ln, ls+2), ""],
[tokenize.OP, func, (ln, ls+3), (ln, ls+3+lf), ""],
[tokenize.OP, ")", (ln, ls+3+lf), (ln, ls+4+lf), ""],
[tokenize.OP, ":", (ln, ls+4+lf), (ln, ls+5+lf), ""],
])
for t in genned_line:
yield t
#yield tokenize.NAME, 'class'
#yield tokenize.OP, '>>'
#yield tokenize.NAME, "open"
#yield tokenize.OP, "("
#yield tokenize.STRING,
#yield tokenize.OP, ","
#yield tokenize.STRING, "'a'"
#yield tokenize.OP, ")"
#yield tokenize.OP, ","
else:
yield token + ("",)
def make_decode():
utf8 = encodings.search_function('utf8')
def decode(input=None, errors="strict", stream=None, decode=True):
data = input
if stream is not None:
data = stream.read()
stream = cStringIO.StringIO(data)
data = tokenize.untokenize(translate_tokens(stream.readline))
if decode:
return utf8.decode(data, errors)
return data
class StreamReader(encodings.utf_8.StreamReader):
def __init__(self, *args, **kwargs):
codecs.StreamReader.__init__(self, *args, **kwargs)
data = decode(stream=self.stream)
self.stream = cStringIO.StringIO(data)
return decode, StreamReader
@codecs.register
def search_function(s):
if s != 'callstatement_utf8':
return None
utf8 = encodings.search_function('utf8')
decode, StreamReader = make_decode()
return codecs.CodecInfo(
name='callstatement_utf8',
encode=utf8.encode,
decode=decode,
incrementalencoder=utf8.incrementalencoder,
incrementaldecoder=utf8.incrementaldecoder,
streamreader=StreamReader,
streamwriter=utf8.streamwriter
)
class Metaclass(type):
def __new__(cls, name, bases, dct):
if bases == (object,):
return type.__new__(cls, name, bases, dct)
del dct["__module__"]
_args = bases[0]._args
return bases[-1](*_args, **dct)
def makecall(*args):
class Call(object):
__metaclass__ = Metaclass
_args = args
return Call
import __builtin__
__builtin__.___make_call___ = makecall
if __name__ == "__main__":
decode, x = make_decode()
print decode("""
print '\xc3\xbcthis is a test'
print "this is another test"
print "this is yet another test"
print "derp"
def derp(arg, func):
print arg
func(123)
return "1234"
def derk(func):
func(321)
return "4321"
call derp(raw_input("herk"), ...) as result:
def func(value):
print value
print result
if test:
call derk(...) as result:
def func(value):
print value
# call derk(123, 456,
# 789, 10, 11, 12, ...)
# as result:
# def func(value):
# print value
print result
""")[0].encode("utf-8")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment