Skip to content

Instantly share code, notes, and snippets.

#ifndef BYTECONVERSION_H
#define BYTECONVERSION_H
#ifdef HASENDIAN_H
#include <endian.h>
#else
#include <stdint.h>
#ifndef ISBIGENDIAN
uint32_t be32toh(uint32_t val) {return val;}
#include "libfreenect.h"
#include "libfreenect_sync.h"
#include "libfreenect_cv.h"
IplImage *freenect_sync_get_depth_cv(int index)
{
static IplImage *image = 0;
static char *data = 0;
static char *data_out = 0;
if (!data_out)
import numpy as np
height, width = 480, 640
depth = np.array(np.random.random((height, width)) * 2**11, dtype=np.uint16)
with open('out.pgm', 'w') as fp:
fp.write('P5 %d %d 65535\n' % (width, height))
fp.write(depth.tostring())
@bwhite
bwhite / pil_opencv.py
Created May 28, 2011 19:58
OpenCV and PIL Helpers (project tp)
###################################################################################################
import cv
import Image
import cStringIO as StringIO
def pil2cv(pil_image):
channels = 1 if pil_image.mode == 'L' else 3
cv_im = cv.CreateImageHeader(pil_image.size, cv.IPL_DEPTH_8U, channels)
cv.SetData(cv_im, pil_image.tostring())
return cv_im
def break_crypto(ciphertext, num_bytes=1):
"""Brute force convergent encryption
Psuedocode to show vulnerability to small messages.
This is an example for small plaintexts (roughly known byte size)
Args:
ciphertext: plaintext that has been encrypted using symmetric
encryption with the key being a hash of the plaintext.
num_bytes: Number of bytes in the plaintext message.
import json
def filter_by_completed(users, results, num_tasks=100):
new_users = {}
for user_id, user_data in users.items():
assert user_data.get('tasks_finished', 0) <= num_tasks
if user_data.get('tasks_finished', 0) == num_tasks:
new_users[user_id] = user_data
new_results = dict((result_id, result_data)
@bwhite
bwhite / pack_data.py
Created February 22, 2012 20:01
Make a data packed .py file
import zlib
import base64
import cPickle as pickle
import argparse
def models_to_py(output_path, paths):
data = [(x, open(x).read()) for x in paths]
with open(output_path, 'w') as fp:
fp.write('import zlib, base64, cPickle\ndata = cPickle.loads(zlib.decompress(base64.b64decode("')
@bwhite
bwhite / verbose.py
Created June 27, 2012 16:30
Hadoopy Cookbook: Enable verbose job output
# At the start of a file where you want verbose job/hadoopy output put the following
# This is normally used in driver files which contain one or more launch commands
import logging
logging.basicConfig(level=logging.INFO)
@bwhite
bwhite / compress.py
Created June 27, 2012 16:37
Hadoopy Cookbook: Compress intermediate and final output
# To enable map output compression (pre-shuffle) and reduce output compression use the following jobconfs
# For Snappy (in CDH3+, splittable and fast)
jobconfs = ['mapred.map.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec',
'mapred.compress.map.output=true',
'mapred.output.compress=true',
'mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec']
# For GZip (in nearly all Hadoop distros, not splittable)
jobconfs = ['mapred.map.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec',
@bwhite
bwhite / random_uniform.py
Created June 27, 2012 19:39
Hadoopy Cookbook: Uniform random sampling of key/value pairs
#!/usr/bin/env python
# (C) Copyright 2012 Brandyn A. White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of