Skip to content

Instantly share code, notes, and snippets.

from __future__ import division
import multiprocessing
import scipy.spatial.distance
import numpy as np
import sklearn.datasets
from time import time
from multiprocessing import Pool
from itertools import combinations
'''
A logistic regression example using the meta-graph checkpointing
features of Tensorflow.
Author: João Felipe Santos, based on code by Aymeric Damien
(https://github.com/aymericdamien/TensorFlow-Examples/)
'''
from __future__ import print_function
from keras.models import Sequential
from keras.layers import Dense
from keras.utils.io_utils import HDF5Matrix
import numpy as np
def create_dataset():
import h5py
X = np.random.randn(200,10).astype('float32')
y = np.random.randint(0, 2, size=(200,1))
f = h5py.File('test.h5', 'w')
@jfsantos
jfsantos / AudioDisplay.jl
Created November 4, 2014 01:10
Rendering audio in IJulia notebooks
using WAV
function inline_audioplayer(filepath)
markup = """<audio controls="controls" {autoplay}>
<source src="$filepath" />
Your browser does not support the audio element.
</audio>"""
display(MIME("text/html") ,markup)
end
@jfsantos
jfsantos / test_ccall.jl
Last active October 16, 2018 13:42
Passing structures and arrays back and forth in Julia with ccall
immutable Cdvec
val::Ptr{Cdouble}
len::Clong
end
a = [1.0, 2.0, 3.0]
b = [3.1, 4.2, 7.3]
Ca = Cdvec(pointer(a), length(a))
Cb = Cdvec(pointer(b), length(b))
def train_fn(model, optimizer, criterion, batch):
x, y, lengths = batch
x = Variable(x.cuda())
y = Variable(y.cuda(), requires_grad=False)
mask = Variable(torch.ByteTensor(x.size()).fill_(1).cuda(),
requires_grad=False)
for k, l in enumerate(lengths):
mask[:l, k, :] = 0
import glob
import logging
import os
import numpy as np
import re
import soundfile
from numpy.lib.stride_tricks import as_strided
from maracas.maracas import asl_meter
from audio_tools import iterate_invert_spectrogram
import torch
from torch.autograd import Variable
import numpy as np
import pickle
import os
from glob import glob
from tqdm import tqdm
@jfsantos
jfsantos / error.log
Created April 11, 2017 16:24
Issue when compiling PyTorch on a crouton env (ASUS Chromebook Flip)
-- Build files have been written to: /home/jfsantos/pytorch/torch/lib/build/libshm
[ 50%] Built target torch_shm_manager
[ 75%] Building CXX object CMakeFiles/shm.dir/core.cpp.o
/home/jfsantos/pytorch/torch/lib/libshm/core.cpp:149:1: error: invalid conversion from 'void* (*)(void*, long int)' to 'void* (*)(void*, ptrdiff_t) {aka void* (*)(void*, int)}' [-fpermissive]
};
^
/home/jfsantos/pytorch/torch/lib/libshm/core.cpp:149:1: error: invalid conversion from 'void* (*)(void*, void*, long int)' to 'void* (*)(void*, void*, ptrdiff_t) {aka void* (*)(void*, void*, int)}' [-fpermissive]
CMakeFiles/shm.dir/build.make:62: recipe for target 'CMakeFiles/shm.dir/core.cpp.o' failed
make[2]: *** [CMakeFiles/shm.dir/core.cpp.o] Error 1
CMakeFiles/Makefile2:67: recipe for target 'CMakeFiles/shm.dir/all' failed
from torch.utils.data import Dataset
class DummyDataset(Dataset):
def __init__(self, items):
super(DummyDataset, self).__init__()
self.items = items
def __getitem__(self, index):
return self.items[index]