Skip to content

Instantly share code, notes, and snippets.

View wayofnumbers's full-sized avatar

Michael Li wayofnumbers

View GitHub Profile
@wayofnumbers
wayofnumbers / collect_images_urls_from_google_search
Created September 11, 2019 04:07
JavaScript command to download images from Google Images search results
@wayofnumbers
wayofnumbers / download-file-from-kaggle-kernel.py
Last active September 12, 2019 15:27
Download files from Kaggle Kernels
# Put this snippet into your Kaggle kernel, make sure the file is under kaggle/working/
import os
os.chdir(r'kaggle/working/')
from IPython.display import FileLink
FileLink(r'export.pkl')
@wayofnumbers
wayofnumbers / FMNIST-import.py
Created October 22, 2019 21:53
FMNIST-Import
# import standard PyTorch modules
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.tensorboard import SummaryWriter # TensorBoard support
# import torchvision module to handle image manipulation
import torchvision
import torchvision.transforms as transforms
@wayofnumbers
wayofnumbers / FMNIST-Data.py
Created October 22, 2019 21:54
FMNIST-Data
# Use standard FashionMNIST dataset
train_set = torchvision.datasets.FashionMNIST(
root = './data/FashionMNIST',
train = True,
download = True,
transform = transforms.Compose([
transforms.ToTensor()
])
)
@wayofnumbers
wayofnumbers / FMNIST-network.py
Created October 22, 2019 21:55
FMNIST-network
# Build the neural network, expand on top of nn.Module
class Network(nn.Module):
def __init__(self):
super().__init__()
# define layers
self.conv1 = nn.Conv2d(in_channels=1, out_channels=6, kernel_size=5)
self.conv2 = nn.Conv2d(in_channels=6, out_channels=12, kernel_size=5)
self.fc1 = nn.Linear(in_features=12*4*4, out_features=120)
@wayofnumbers
wayofnumbers / FMNIST-RunBuilder.py
Created October 22, 2019 21:56
FMNIST-RunBuilder
# import modules to build RunBuilder and RunManager helper classes
from collections import OrderedDict
from collections import namedtuple
from itertools import product
# Read in the hyper-parameters and return a Run namedtuple containing all the
# combinations of hyper-parameters
class RunBuilder():
@staticmethod
def get_runs(params):
@wayofnumbers
wayofnumbers / FMNIST-RunManager.py
Created October 22, 2019 21:56
FMNIST-RunManager
# Helper class, help track loss, accuracy, epoch time, run time,
# hyper-parameters etc. Also record to TensorBoard and write into csv, json
class RunManager():
def __init__(self):
# tracking every epoch count, loss, accuracy, time
self.epoch_count = 0
self.epoch_loss = 0
self.epoch_num_correct = 0
self.epoch_start_time = None
@wayofnumbers
wayofnumbers / FMNIST-params.py
Created October 22, 2019 21:57
FMNIST-params
# put all hyper params into a OrderedDict, easily expandable
params = OrderedDict(
lr = [.01, .001],
batch_size = [100, 1000],
shuffle = [True, False]
)
epochs = 3
@wayofnumbers
wayofnumbers / FMNIST-train-loop.py
Created October 22, 2019 21:58
FMNIST-train-loop
m = RunManager()
# get all runs from params using RunBuilder class
for run in RunBuilder.get_runs(params):
# if params changes, following line of code should reflect the changes too
network = Network()
loader = torch.utils.data.DataLoader(train_set, batch_size = run.batch_size)
optimizer = optim.Adam(network.parameters(), lr=run.lr)
@wayofnumbers
wayofnumbers / base-nn-myLinear.py
Last active October 31, 2019 20:47
Simple PyTorch model to showcase how to build nn.Linear of your own
# We'll use fast.ai to showcase how to build your own 'nn.Linear' module
%matplotlib inline
from fastai.basics import *
import sys
# create and download/prepare our MNIST dataset
path = Config().data_path()/'mnist'
path.mkdir(parents=True)
!wget http://deeplearning.net/data/mnist/mnist.pkl.gz -P {path}