Skip to content

Instantly share code, notes, and snippets.

@anirudhshenoy
anirudhshenoy / final_code.py
Created December 13, 2019 12:27
Code for the Blog at:
from torch import nn
from time import time
import torch
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
from skimage.util.shape import view_as_windows
torch.manual_seed(42)
@anirudhshenoy
anirudhshenoy / memory_strided_im2col.py
Last active December 13, 2019 10:22
Im2Col with Memory Strides
def memory_strided_im2col(x, kernel):
output_shape = (x.shape[0] - kernel.shape[0]) + 1
return view_as_windows(x, kernel.shape).reshape(output_shape*output_shape,
kernel.shape[0]*2)
@anirudhshenoy
anirudhshenoy / naive_im2col.py
Last active December 13, 2019 10:07
Naive Implementation of Im2Col
def im2col(x, kernel):
kernel_shape = kernel.shape[0]
rows = []
# Assuming Padding = 0, stride = 1
for row in range(x.shape[0] - 1):
for col in range(x.shape[1] - 1):
window = x[row: row + kernel_shape, col: col + kernel_shape]
rows.append(window.flatten())
@anirudhshenoy
anirudhshenoy / conv_2d.py
Created December 12, 2019 12:21
2D Convolution
def conv_2d(x, kernel, bias):
kernel_shape = kernel.shape[0]
# Assuming Padding = 0, stride = 1
output_shape = x.shape[0] - kernel_shape + 1
result = np.zeros((output_shape, output_shape))
for row in range(x.shape[0] - 1):
for col in range(x.shape[1] - 1):
window = x[row: row + kernel_shape, col: col + kernel_shape]
@anirudhshenoy
anirudhshenoy / pseudo_labeler_sklearn.py
Created December 3, 2019 06:29
Pseudo Labeling Wrapper for Sklearn
# Concept similar to : https://www.analyticsvidhya.com/blog/2017/09/pseudo-labelling-semi-supervised-learning-technique/
class pseudo_labeling():
def __init__(self, model, unlabelled_data, sample_rate=0.01, upper_threshold = 0.7, lower_threshold = 0.3, verbose = False):
self.sample_rate = sample_rate
self.model = model
self.unlabelled_data = unlabelled_data
self.verbose = verbose
self.upper_threshold = upper_threshold
@anirudhshenoy
anirudhshenoy / semi_supervised_training.py
Last active December 2, 2019 07:13
Semi Supervised Training for MNIST
# Based on https://github.com/peimengsui/semi_supervised_mnist
from tqdm import tqdm_notebook
T1 = 100
T2 = 700
af = 3
def alpha_weight(step):
if step < T1:
return 0.0
import torch
from torch import nn
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 20, kernel_size=5)
self.conv2 = nn.Conv2d(20, 40, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
@anirudhshenoy
anirudhshenoy / hyperopt_stacking.py
Created November 19, 2019 08:32
Running hyperopt for a stacking classifier
from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
def run_voting_clf(model_weights):
y_pred_prob = 0
for model_name, model in model_dict.items():
y_pred_prob += (model.predict_proba(test_features)[:,1] * model_weights[model_name])
y_pred_prob += (simple_nn.predict(test_features.todense()).ravel() * model_weights['simple_nn'])
y_pred_prob /= sum(model_weights.values())
f1 = print_model_metrics(y_test, y_pred_prob, return_metrics = True, verbose = 0)[0]
@anirudhshenoy
anirudhshenoy / stacking.py
Created November 19, 2019 08:18
Stacking all the models
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import MultinomialNB
from xgboost import XGBClassifier
from sklearn.svm import SVC
# Define all models
lr = SGDClassifier(loss = 'log', alpha = 0.1, penalty = 'elasticnet')
svm = SVC(C = 10, kernel = 'poly', degree = 2, probability = True)
nb = MultinomialNB(alpha = 10000, class_prior = [0.5, 0.5])
@anirudhshenoy
anirudhshenoy / simple_mlp.py
Created November 19, 2019 07:55
2 Layer MLP for tabular data
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.optimizers import RMSprop, Adam
from tensorflow.keras.callbacks import ModelCheckpoint
batch_size = 128
epochs = 30
simple_nn = Sequential()
simple_nn.add(Dense(150, activation='relu', input_shape=(119,)))