Skip to content

Instantly share code, notes, and snippets.

Keybase proof

I hereby claim:

  • I am jbed on github.
  • I am jason128 (https://keybase.io/jason128) on keybase.
  • I have a public key ASAKteCJXb25zZO9-tv9oZpD0S74odBhXcqpZfU2XEnx4Ao

To claim this, I am signing this object:

Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@JBed
JBed / Inception.py
Created February 11, 2016 22:42
Inception-v3
# Inception-v3: http://arxiv.org/abs/1512.00567
from lasagne.layers import InputLayer
from lasagne.layers import Conv2DLayer
from lasagne.layers import Pool2DLayer
from lasagne.layers import DenseLayer
from lasagne.layers import GlobalPoolLayer
from lasagne.layers import ConcatLayer
from lasagne.layers.normalization import batch_norm
from lasagne.nonlinearities import softmax
from sklearn.cluster.k_means_ import _k_init
from operator import itemgetter
import numpy as np
def quasi_random_sampling(X, n_samples=25):
def row_norms(X, squared=True):
squared_norms = (X**2).sum(axis=1)
if squared: return squared_norms
else: return np.sqrt(squared_norms)
@JBed
JBed / cos_similarity.py
Created August 4, 2015 18:53
simple cosine similarity
import re, math
from collections import Counter
def sentence_to_vector(text):
words = WORD.findall(text)
return Counter(words)
@JBed
JBed / f_whitening.py
Last active April 9, 2020 16:08
1/f whitening for large natural images
#PCA whitening involves finding the inverse square root of the covariance matrix
#of a set of observations, which is prohibitively expensive when dealing
#with natural images
#starting with a path to a single image (img_path)
import numpy as np
from PIL import Image
from sklearn import preprocessing
@JBed
JBed / gist:4ca8012dad91bf055e55
Created July 11, 2015 22:45
k-means unsupervised pre-training in python
# http://jmlr.org/papers/volume11/erhan10a/erhan10a.pdf
import cPickle as pickle
import numpy as np
from matplotlib import pyplot as plt
from os.path import join
from sklearn.cluster import KMeans
# download data here: http://www.cs.toronto.edu/~kriz/cifar.html
with open(join('data','cifar-10-batches-py','data_batch_1'),'rb') as f:
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.layers.normalization import BatchNormalization
#AlexNet with batch normalization in Keras
#input image is 224x224
model = Sequential()
model.add(Convolution2D(64, 3, 11, 11, border_mode='full'))