Skip to content

Instantly share code, notes, and snippets.

View ajbrock's full-sized avatar

Andy Brock ajbrock

View GitHub Profile
# Manual BN
# Calculate means and variances using mean-of-squares mins mean-squared
def manual_bn(x, gain=None, bias=None, return_mean_var=False, eps=1e-5):
# # Calculate expected value of x (m) and expected value of x**2 (m2)
# Mean of x
m = torch.mean(x, [0, 2, 3], keepdim=True)
# Mean of x squared
m2 = torch.mean(x ** 2, [0, 2, 3], keepdim=True)
# Calculate variance as mean of squared minus mean squared.
var = (m2 - m **2)
import numpy as np
# Corpus available here: https://pastebin.com/WqD6fAgu
# Corpus taken from https://dominionstrategy.com/all-cards/
# Read all cards into memory
with open('dominion_cards.html', 'r') as rfile:
x = rfile.readlines()
# Convenience function to count words, used later
def count_words(text):
import torch
# Dict to store hooks and flop count
data_dict = {'conv_flops' : 0, 'hooks' :[]}
def count_conv_flops(self, input, output):
# Flop contribution from channelwise connections
flops_c = self.out_channels * self.in_channels / self.groups
# Flop contribution from number of spatial locations we convolve over
flops_s = output.size(2) * output.size(3)
## Wide ResNet with Shift and incorrect hyperparams.
# Based on code by xternalz: https://github.com/xternalz/WideResNet-pytorch
# WRN by Sergey Zagoruyko and Nikos Komodakis
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable as V
import torch.optim as optim
## Wide ResNet with Shift and incorrect hyperparams.
# Based on code by xternalz: https://github.com/xternalz/WideResNet-pytorch
# WRN by Sergey Zagoruyko and Nikos Komodakis
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable as V
import torch.optim as optim
# PyTorch code For implementing the mixture of softmaxes layer from
# "Breaking the Softmax Bottleneck: A High-Rank RNN Language Model"
# https://arxiv.org/abs/1711.03953
context = self.fc(out)
# Non-log version
priors = F.softmax(context[:,-self.n_components:])
mixtures = torch.stack([priors[:,i].unsqueeze(1) * F.softmax(context[:, i * self.nClasses : (i + 1) * self.nClasses]) for i in range(self.n_components)],1)
out = torch.log(mixtures.sum(1))
x=["SyBPtQfAZ","H1S8UE-Rb","S1sRrN-CW","Syt0r4bRZ","HkPCrEZ0Z","rJ5C67-C-","H1T2hmZAb","Hymt27b0Z",
"HJ5AUm-CZ","r1nzLmWAb","HkGJUXb0-","SkERSm-0-","BJlrSmbAZ","HJXyS7bRb","SyhRVm-Rb","SkwAEQbAb",
"B1mvVm-C-","S1TgE7WR-","H1DkN7ZCZ","SJ71VXZAZ","ryk77mbRZ","HJIhGXWCZ","BJInMmWC-","H1I3M7Z0b",
"Bk-ofQZRb","SJx9GQb0-","BJoBfQ-0b","SJyVzQ-C-","HJNGGmZ0Z","H1kMMmb0-","HkGbzX-AW","rJIgf7bAZ",
"SyCyMm-0W","r1ayG7WRZ","H1Nyf7W0Z","HkCvZXbC-","ByED-X-0W","ByuI-mW0W","H1BHbmWCZ","SkqV-XZRZ",
"rk07ZXZRb","HJCXZQbAZ","H1bbbXZC-","rkaqxm-0b","S1XolQbRW","B1TYxm-0-","Bkftl7ZCW","SyBBgXWAZ",
"SkrHeXbCW","S1ANxQW0b","ByOExmWAb","By4Nxm-CW","r1l4eQW0Z","B12QlQWRW","ry831QWAb","B1EGg7ZCb",
"HyMTkQZAb","rJ6iJmWCW","rkZB1XbRZ","HJnQJXbC-","Sy3fJXbA-","HJ8W1Q-0Z","HknbyQbC-","BkrsAzWAb",
"ryH20GbRW","r1HhRfWRZ","B1KFAGWAZ","Byht0GbRZ","B1hYRMbCW","S1q_Cz-Cb","BJ7d0fW0b","HyydRMZC-",
"SyZI0GWCZ","rJSr0GZR-","ryZERzWCZ","rkeZRGbRW","ryazCMbR-","Hyig0zb0Z","H11lAfbCW","HkXWCMbRW",
%% knn Custom K-Nearest Neighbors Function
% class = knn_all(x,labels,K,weighted)
%
% A Brock, 29.9.15
%
% This function is a modification of the knn function to process the KNN-value of
% each member of a dataset. It uses parallel processing to quickly compute the class of
% x based on the K nearest neighbors of each member of x in x. The user can choose if
% they want to use the weighted version of the algorithm, which allows the
% K nearest neighbors to "vote" for the desired class, with the weight of
# Inception Score Calculator
#
# A Brock, 2017
#
# This snippet assumes you have two functions defined:
# 1. sample_net, which takes in a batch x num_latents random vector and returns batch samples,
# 2. eval_net, which takes in batch samples and returns a batch x #classes prediction vector.
num_latents = 100
# Resample.py
# Andrew Brock, 2017
# This code resamples a 3d grid using catmull-rom spline interpolation, and is GPU accelerated.
# Resample along the trailing dimension
# Assumes a more-than-1D array? Or just directly assumes a 3D array? we'll find out
#
# TODO: Some things could be shared (such as the mgrid call, which can presumably be done once? hmm)
# between resample1d calls.