Skip to content

Instantly share code, notes, and snippets.

@elsonidoq
elsonidoq / safe_write_example.py
Created May 2, 2014 17:54
Why safe_write is useful?
with safe_write('delete_me') as f:
f.write('some contents')
with safe_write('delete_me') as f:
1/0
# delete_me contents still are "some contents"
@elsonidoq
elsonidoq / safe_write.py
Last active August 29, 2015 14:00
Class for making sure a file is not cleaned if an exception gets risen after you opened
from tempfile import mktemp
import shutil
class safe_write(object):
def __init__(self, fname):
self.fname = fname
self.tmp_fname = mktemp()
def __enter__(self):
self.stream = open(self.tmp_fname, 'w')
@elsonidoq
elsonidoq / find_dependencies.py
Last active November 28, 2016 03:56
Find dependencies of .so files on linux machines, and copies them on a specified directory. It is very simple, and does not have a nice heuristic for picking upon different versions of the same file. However, you can use the option --skip-patterns to add a comma sepparated list of strings that should not appear on the path of the file (you can f…
import shutil
from optparse import OptionParser
import sys
import functools
import re
import subprocess
import os
READELF = '/usr/bin/readelf'
def get_dependencies(fname, skip_patterns):
@elsonidoq
elsonidoq / gist:4230231
Created December 7, 2012 02:23
Mutual information test output
X ~ N(0,1)
y1 = 1 <=> x > 0
y2 = 1 con probabilidad 0.5
I(y1;x) = H(X) - H(X|Y1) = 0.97
I(y1;x) = H(Y1) - H(Y1|X) = 0.97
I(y2;x) = H(X) - H(X|Y2) = 0.08
I(y2;x) = H(Y2) - H(Y2|X) = 0.08
@elsonidoq
elsonidoq / gist:4230224
Created December 7, 2012 02:21
Test for mutual information
from scipy import asarray
from random import gauss, randint
def test():
x= asarray([gauss(0,1) for i in range(1000)])
y1= asarray([int(e>0) for e in x])
y2= asarray([randint(0,1) for e in x])
hx, bx= histogram(x, bins=x.size/10, density=True)
dx= digitize(x,bx)
@elsonidoq
elsonidoq / gist:4230222
Created December 7, 2012 02:21
Python implementation of mutual information for continuous variables
from math import log
log2= lambda x:log(x,2)
from scipy import histogram, digitize, stats, mean, std
from collections import defaultdict
def mutual_information(x,y):
return entropy(y) - conditional_entropy(x,y)
def conditional_entropy(x, y):
"""
@elsonidoq
elsonidoq / test_mutual_information.py
Created December 7, 2012 02:16
Test mutual information
from scipy import asarray
from random import gauss, randint
def test():
x= asarray([gauss(0,1) for i in range(1000)])
y1= asarray([int(e>0) for e in x])
y2= asarray([randint(0,1) for e in x])
hx, bx= histogram(x, bins=x.size/10, density=True)
dx= digitize(x,bx)
@elsonidoq
elsonidoq / mutual_information.py
Created December 6, 2012 18:07
Mutual information
from math import log
log2= lambda x:log(x,2)
from scipy import histogram, digitize, stats, mean, std
from collections import defaultdict
def conditional_entropy(x, y):
"""
x: vector de numeros reales
y: vector de numeros enteros
@elsonidoq
elsonidoq / biggest_nonsparse_matrix.py
Created September 8, 2012 17:06
La matriz más grande que no da MemoryError
from scipy import *
min=10000; max=min*100
s= (min+max)/2
while True:
if max - min < 2: break
avg= (min+max)/2
try:
m=zeros((s,s))
del m