Skip to content

Instantly share code, notes, and snippets.

@anna-hope
anna-hope / init_dotfiles.sh
Created November 23, 2023 23:48
Initialize dotfiles on a new computer
git clone --bare git@github.com:anna-hope/dotfiles.git $HOME/.cfg
function dotfiles {
/usr/bin/git --git-dir=$HOME/.cfg/ --work-tree=$HOME $@
}
mkdir -p .dotfiles-backup
dotfiles checkout
if [ $? = 0 ]; then
echo "Checked out the dotfiles.";
else
echo "Backing up pre-existing dot files.";
@anna-hope
anna-hope / random_image.go
Created March 29, 2022 04:22
Random Image in go
package main
import (
"image"
"image/color"
"math/rand"
"time"
"golang.org/x/tour/pic"
)
@anna-hope
anna-hope / rot13.go
Created March 29, 2022 03:56
go rot13
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
@anna-hope
anna-hope / torchtext_pandas_datasets.py
Created May 30, 2019 18:36
Torchtext dataset and iterator wrappers for Pandas DataFrames
from typing import Union, Dict
import pandas as pd
from torchtext.data import (Field, Example, Iterator, BucketIterator, Dataset)
from tqdm import tqdm
class DataFrameExampleSet:
def __init__(self, df: pd.DataFrame, fields: Dict[str, Field]):
self._df = df
@anna-hope
anna-hope / structured_self_attention.py
Last active April 22, 2019 06:05
Structured Self-Attention in PyTorch (Lin et al. 2017)
# Implementation of Structured Self-Attention mechanism
# from Lin et al. 2017 (https://arxiv.org/pdf/1703.03130.pdf)
# Anton Melnikov
import torch
import torch.nn as nn
class StructuredAttention(nn.Module):
def __init__(self, *, input_dim: int, hidden_dim: int, attention_hops: int):
@anna-hope
anna-hope / get_conv_out.py
Last active October 17, 2018 15:15
Get output dimension after all conv layers
import numpy as np
def get_conv_out(dim_in, padding, kernel_size, stride):
out_dim = (dim_in + 2 * padding - kernel_size) / stride
out_dim += 1
return int(np.floor(out_dim))
layers = [(0, 7, 1),
(0, 3, 3),
(0, 7, 1),
@anna-hope
anna-hope / hierarchical_attention_net.py
Last active April 1, 2018 18:10
Hierarchical Attention Network (Yang et al. 2016) in PyTorch
# Implementation of the Hierarchical Attention Network from Yang et al. 2016
# https://www.cs.cmu.edu/~diyiy/docs/naacl16.pdf
# Anton Melnikov
import torch
from torch import nn
import torch.nn.functional as F
class SequenceClassifierAttention(nn.Module):
@anna-hope
anna-hope / vdcnn.py
Last active April 20, 2018 16:55
VDCNN (Conneau et al.)
# Very Deep Convolutional Network (http://aclweb.org/anthology/E17-1104)
# PyTorch implementation by Anton Melnikov
from typing import Iterable, Tuple
import torch
from torch import nn
import torch.nn.functional as F
from torch.autograd import Variable
@anna-hope
anna-hope / conv_block_vdcnn.py
Last active April 18, 2018 14:04
VDCNN Convolutional Block (Conneau et al. 2017)
# Convolutional block from http://aclweb.org/anthology/E17-1104
# Anton Melnikov
import numpy as np
import torch
from torch import nn
class ConvBlock(nn.Module):
def __init__(self, in_channels, out_channels, *,
kernel_size, stride=1):
@anna-hope
anna-hope / make_training_examples.py
Last active March 18, 2018 18:03
Make torchtext training examples from pandas.DataFrame on the fly
import pandas as pd
from torchtext import data
from tqdm import tqdm
def make_examples(df: pd.DataFrame, fields: Dict[str, data.Field]):
fields = {field_name: (field_name, field)
for field_name, field in fields.items()}
for _, row in tqdm(df.iterrows()):
example = data.Example.fromdict(row, fields)