Skip to content

Instantly share code, notes, and snippets.

@amueller
amueller / learning_gabor_filters.py
Created April 19, 2012 11:39
Learning Gabor filters with scikit-learn and ICA or k-means
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import fetch_mldata
from sklearn.decomposition import FastICA, PCA
from sklearn.cluster import KMeans
# fetch natural image patches
image_patches = fetch_mldata("natural scenes data")
X = image_patches.data
@syhw
syhw / dnn_compare_optims.py
Created July 21, 2014 09:07
comparing SGD vs SAG vs Adadelta vs Adagrad
"""
A deep neural network with or w/o dropout in one file.
"""
import numpy
import theano
import sys
import math
from theano import tensor as T
from theano import shared
@goldingn
goldingn / CUR4FIC
Last active January 3, 2016 05:49
Playing with CUR decomposition (versus k-means) as a method for picking inducing points in sparse Gaussian processes
# clear the workspace
rm(list = ls())
# load the relevant libraries
# install.packages(rCUR)
library(rCUR) # for CUR decomposition
# install.packages(irlba)
library(irlba) # for fast svd
@GaelVaroquaux
GaelVaroquaux / bench_dbscan.py
Last active December 20, 2015 10:19
Benchmarking scikit_learn 0.14.X release
import numpy as np
import time
from sklearn import cluster
from sklearn import datasets
lfw = datasets.fetch_lfw_people()
X_lfw = lfw.data[:, :5]
eps = 8. # This choice of EPS gives 44 clusters
@amueller
amueller / gist:4299381
Created December 15, 2012 21:26
Plotting PCAs of pairs of MNIST digit classes
import numpy as np
import matplotlib.pyplot as plt
from itertools import product
from sklearn.decomposition import RandomizedPCA
from sklearn.datasets import fetch_mldata
from sklearn.utils import shuffle
mnist = fetch_mldata("MNIST original")
X_train, y_train = mnist.data[:60000] / 255., mnist.target[:60000]