Skip to content

Instantly share code, notes, and snippets.

View emilemathieu's full-sized avatar

Mathieue emilemathieu

View GitHub Profile
@emilemathieu
emilemathieu / makefile
Created October 30, 2020 14:56
makefile for latex compiling
LATEX=pdflatex
LATEXOPT=--shell-escape
NONSTOP=--interaction=batchmode#--interaction=nonstopmode
LATEXMK=latexmk
LATEXMKOPT=-pdf
MAIN=main
all: clean
import os
import io
import pandas as pd
import numpy as np
import torch
import yaml
from scipy import stats
import math
from geoflow.utils import walklevel
import os
from utils import query, load_experiments, lower_ci, upper_ci, inter_ci, mean, convert_to_latex, compute_ci_and_format
def process_data(data):
data = data.unstack(level=-1)
data.columns = data.columns.droplevel(level=0)
bold_rows_name = dict((key, "\\bf " + value) for key, value in rows_name.items())
data = data.reindex(columns=rows_name.keys())
class CrossEntropyLoss(object):
def __call__(self, Y, labels):
loss = 0
for i, y in enumerate(Y):
loss += - y[labels[i]] + np.log(np.sum(np.exp(y)))
return loss/len(labels)
def grad(self, Y, labels):
output_grad = np.empty_like(Y)
for i, y in enumerate(Y):
class Linear(Module):
""" Applies a linear transformation to the incoming data: y=Ax+b
Parameters
----------
in_features : int
size of each input sample
out_features : int
size of each output sample
Variables
----------
layer._weight = optimizer(id(layer), 'weight', layer._weight, layer._grad_weight)
layer._bias = optimizer(id(layer), 'bias', layer._bias, layer._grad_bias)
class MyNet(nn.Module):
def __init__(self):
self.features = nn.Sequential(
nn.Conv2d(1, 10, kernel_size=5),
nn.MaxPool2d(2, 2),
nn.ReLU(),
nn.Conv2d(10, 20, kernel_size=5),
nn.MaxPool2d(2, 2),
nn.ReLU()
)
class Optimizer(object):
def __init__(self):
self.state = {}
def __call__(self, layer_id, weight_type, value, grad):
raise NotImplementedError()
class SGD(Optimizer):
def __init__(self, lr=0.1, momentum=0):
super().__init__()
class Sequential(Module):
""" Special instance of neural network which can be constructed as a sequence of layers
"""
def __init__(self, *modules):
self._modules = list(modules)
def forward(self, X):
for module in self._modules:
X = module.forward(X)
return X