Skip to content

Instantly share code, notes, and snippets.

View mehdidc's full-sized avatar

Mehdi Cherti mehdidc

View GitHub Profile
from sklearn.svm import SVR
def smooth_image(x, y, z, w=100, h=100, model=SVR()):
X = np.vstack((x, y)).T
model.fit(X, z)
x, y = np.meshgrid(
np.linspace(x.min(), x.max(), w),
np.linspace(y.min(), y.max(), h)
)
x = x.flatten()
# Code adapted from https://github.com/kylemcdonald/Parametric-t-SNE
import numpy as np
import theano.tensor as T
def Hbeta(D, beta):
P = np.exp(-D * beta)
sumP = np.sum(P)
H = np.log(sumP) + beta * np.sum(np.multiply(D, P)) / sumP
P = P / sumP
return H, P
@mehdidc
mehdidc / ranking.py
Created June 27, 2016 01:48 — forked from agramfort/ranking.py
Pairwise ranking using scikit-learn LinearSVC
"""
Implementation of pairwise ranking using scikit-learn LinearSVC
Reference: "Large Margin Rank Boundaries for Ordinal Regression", R. Herbrich,
T. Graepel, K. Obermayer.
Authors: Fabian Pedregosa <fabian@fseoane.net>
Alexandre Gramfort <alexandre.gramfort@inria.fr>
"""
from nolearn.lasagne import NeuralNet, BatchIterator
from lasagne import layers, nonlinearities, updates, init, objectives
import numpy as np
class EarlyStopping(object):
def __init__(self, patience=100, criterion='valid_loss',
criterion_smaller_is_better=True):
self.patience = patience
if criterion_smaller_is_better is True:
from nolearn.lasagne import NeuralNet, BatchIterator
from lasagne import layers, nonlinearities, updates, init, objectives
from nolearn.lasagne.base import objective
from lasagne.objectives import aggregate
from lasagne.regularization import regularize_layer_params, l2, l1
import numpy as np
from keras.layers import Dense, Input, Dropout
from keras.models import Sequential
from keras.optimizers import Adadelta
from sklearn.datasets import make_blobs
from keras.utils.np_utils import to_categorical
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.regularizers import l2, l1
import matplotlib.pyplot as plt
from keras.layers import Dense, Input, Dropout
from keras.models import Sequential
from keras.optimizers import Adadelta
from sklearn.datasets import make_blobs
from keras.utils.np_utils import to_categorical
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.regularizers import l2, l1
import matplotlib.pyplot as plt
from keras.layers import Dense, Input, Dropout
from keras.models import Sequential
from keras.optimizers import Adadelta
from sklearn.datasets import make_blobs
from keras.utils.np_utils import to_categorical
from keras.callbacks import EarlyStopping, ModelCheckpoint, LearningRateScheduler
from keras.regularizers import l2, l1
import matplotlib.pyplot as plt
from nolearn.lasagne import NeuralNet, BatchIterator
from lasagne import layers, nonlinearities, updates, init, objectives
import numpy as np
import theano
class EarlyStopping(object):
def __init__(self, patience=100, criterion='valid_loss',
criterion_smaller_is_better=True):
self.patience = patience
from skopt import gp_minimize, forest_minimize, dummy_minimize, gbrt_minimize
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import Matern
from skopt.benchmarks import branin
x0 = [[1, 2], [3, 4], [5, 6]]
y0 = map(branin, x0)
res = gp_minimize(branin,