Skip to content

Instantly share code, notes, and snippets.

View seanie12's full-sized avatar
🎲

Seanie Lee seanie12

🎲
View GitHub Profile
import argparse
import os
import pickle
import numpy as np
import torch
import torch.nn as nn
from sklearn.metrics import accuracy_score
from torch.utils.data import DataLoader, Dataset
from tqdm import tqdm
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from torch_scatter import scatter_max
from pytorch_transformers import BertModel, BertTokenizer
def return_mask_lengths(ids):
import linecache
import subprocess
import torch
from torch.utils.data import Dataset, DataLoader
class EEGDataset(Dataset):
def __init__(self, filename):
self.filename = filename
import os
import random
from tqdm import tqdm
import numpy as np
def batch_loader(iterable, batch_size, shuffle=False):
length = len(iterable)
if shuffle:
random.shuffle(iterable)
class RLTrainer(CatTrainer):
def __init__(self, args):
super(RLTrainer, self).__init__(args)
self.qa_model = BiDAF(embedding_size=100,
vocab_size=self.vocab_size,
hidden_size=args.qa_hidden_size,
drop_prob=0.2)
state_dict = torch.load(args.qa_file, map_location="cpu")
self.qa_model.load_state_dict(state_dict)
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from pytorch_pretrained_bert import BertModel
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from torch_scatter import scatter_max
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
from pytorch_pretrained_bert import BertModel
dropout = 0.1
def mask_logits(inputs, mask):
#! /bin/bash
set -e
OUTPUT=$1
mkdir -p $OUTPUT
wget https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/SQuAD.jsonl.gz -O $OUTPUT/SQuAD.jsonl.gz
wget https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/NewsQA.jsonl.gz -O $OUTPUT/NewsQA.jsonl.gz
import torch
import torch.nn as nn
import torch.nn.functional as F
from pytorch_pretrained_bert import BertModel
from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence
from torch_scatter import scatter_max
import numpy as np
from torch.distributions.categorical import Categorical
INF = 1e12
# old files
train_src_file = "./squad/para-train.txt"
train_trg_file = "./squad/tgt-train.txt"
dev_src_file = "./squad/para-dev.txt"
dev_trg_file = "./squad/tgt-dev.txt"
test_src_file = "./squad/para-test.txt"
test_trg_file = "./squad/tgt-test.txt"
embedding = "./data/embedding.pkl"
word2idx_file = "./data/word2idx.pkl"