I hereby claim:
- I am jbed on github.
- I am jason128 (https://keybase.io/jason128) on keybase.
- I have a public key ASAKteCJXb25zZO9-tv9oZpD0S74odBhXcqpZfU2XEnx4Ao
To claim this, I am signing this object:
# http://jmlr.org/papers/volume11/erhan10a/erhan10a.pdf | |
import cPickle as pickle | |
import numpy as np | |
from matplotlib import pyplot as plt | |
from os.path import join | |
from sklearn.cluster import KMeans | |
# download data here: http://www.cs.toronto.edu/~kriz/cifar.html | |
with open(join('data','cifar-10-batches-py','data_batch_1'),'rb') as f: |
import re, math | |
from collections import Counter | |
def sentence_to_vector(text): | |
words = WORD.findall(text) | |
return Counter(words) | |
from sklearn.cluster.k_means_ import _k_init | |
from operator import itemgetter | |
import numpy as np | |
def quasi_random_sampling(X, n_samples=25): | |
def row_norms(X, squared=True): | |
squared_norms = (X**2).sum(axis=1) | |
if squared: return squared_norms | |
else: return np.sqrt(squared_norms) |
# Inception-v3: http://arxiv.org/abs/1512.00567 | |
from lasagne.layers import InputLayer | |
from lasagne.layers import Conv2DLayer | |
from lasagne.layers import Pool2DLayer | |
from lasagne.layers import DenseLayer | |
from lasagne.layers import GlobalPoolLayer | |
from lasagne.layers import ConcatLayer | |
from lasagne.layers.normalization import batch_norm | |
from lasagne.nonlinearities import softmax |
I hereby claim:
To claim this, I am signing this object:
#PCA whitening involves finding the inverse square root of the covariance matrix | |
#of a set of observations, which is prohibitively expensive when dealing | |
#with natural images | |
#starting with a path to a single image (img_path) | |
import numpy as np | |
from PIL import Image | |
from sklearn import preprocessing | |
from keras.models import Sequential | |
from keras.layers.core import Dense, Dropout, Activation, Flatten | |
from keras.layers.convolutional import Convolution2D, MaxPooling2D | |
from keras.layers.normalization import BatchNormalization | |
#AlexNet with batch normalization in Keras | |
#input image is 224x224 | |
model = Sequential() | |
model.add(Convolution2D(64, 3, 11, 11, border_mode='full')) |