Skip to content

Instantly share code, notes, and snippets.

View CustomScikitLearnClass.py
from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin
class CustomClassifier(BaseEstimator, ClassifierMixin):
def __init__(self):
pass
def fit(self, X, y=None):
pass
View TreeEmbeddingLogisticRegression.py
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import OneHotEncoder
class TreeEmbeddingLogisticRegression(BaseEstimator, ClassifierMixin):
"""Fits a logistic regression model on tree embeddings.
"""
def __init__(self, **kwargs):
self.kwargs = kwargs
View ExponentialDecayRegressor.py
from scipy.optimize import curve_fit
from sklearn.base import BaseEstimator, RegressorMixin
import statsmodels.api as sm
class ExponentialDecayRegressor(BaseEstimator, RegressorMixin):
"""Fits an exponential decay curve
"""
def __init__(self, starting_values=[1.,1.e-5,1.], **kwargs,):
self.starting_values = starting_values
View QuantileRegression.py
from sklearn.base import BaseEstimator, RegressorMixin
import statsmodels.api as sm
class QuantileRegression(BaseEstimator, RegressorMixin):
"""Sklearn wrapper for statsmodels Quantile Regression
"""
def __init__(self, quantile=0.5, **kwargs):
self.quantile = quantile
self.kwargs = kwargs
self.model = None
View ThompsonSampler.py
class ThompsonSampler:
"""Thompson Sampling using a Beta distribution associated with each option.
The beta distribution will be updated when rewards associated with each option
are observed.
"""
def __init__(self, env, n_learning=0):
# boilier plate data storage
self.env = env
View finetune_cnn_dogbreeds.py
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from torch.utils.data import Dataset, DataLoader, SubsetRandomSampler
from torchvision.datasets.folder import ImageFolder, default_loader
from torchvision.datasets.utils import check_integrity
from torchvision import transforms
from torchvision import models
import matplotlib.pyplot as plt
View concrete_dropout.py
# usr/bin/env python3
# author: Conor McDonald
# torch==0.4.1
# numpy==1.14.3
import torch
import torch.nn as nn
import torch.nn.functional as F
View thompsonsamper.py
class ThompsonSampler(BaseSampler):
def __init__(self, env):
super().__init__(env)
def choose_k(self):
# sample from posterior (this is the thompson sampling approach)
# this leads to more exploration because machines with > uncertainty can then be selected as the machine
self.theta = np.random.beta(self.a, self.b)
# select machine with highest posterior p of payout
View egreedy.py
class eGreedy(BaseSampler):
def __init__(self, env, n_learning, e):
super().__init__(env, n_learning, e)
def choose_k(self):
# e% of the time take a random draw from machines
# random k for n learning trials, then the machine with highest theta
self.k = np.random.choice(self.variants) if self.i < self.n_learning else np.argmax(self.theta)
View RandomSampler.py
class RandomSampler(BaseSampler):
def __init__(self, env):
super().__init__(env)
def choose_k(self):
self.k = np.random.choice(self.variants)
return self.k