Skip to content

Instantly share code, notes, and snippets.

"""traverse all the datastore vectors, delete the ones that
are never hit (excluding itself)
"""
import argparse
import numpy as np
import faiss
import ctypes
import time
import pickle
from typing import Union, Tuple, Optional
from torch_geometric.typing import (OptPairTensor, Adj, Size, NoneType,
OptTensor)
import torch
from torch import Tensor
import torch.nn.functional as F
from torch.nn import Parameter, Linear
from torch_sparse import SparseTensor, set_diag
from torch_geometric.nn.conv import MessagePassing
This file has been truncated, but you can view the full file.
{
"accuracy": 1.0,
"accuracy_up_to_parametric_type": 1.0,
"count": 29092,
"count_on_lattice": 29092,
"normalized_least_upper_bound_depth": 1.0,
"per_type_stats": {
"A": {
"accuracy": 1.0,
"accuracy_up_to_parametric_type": 1.0,
from argparse import ArgumentParser
import nltk
nltk.download('punkt')
def process_seq(seq):
tokens = nltk.word_tokenize(seq.lower())
return tokens
def load_align(align_file):
result = {}
from argparse import ArgumentParser
import os
import json
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument("-in", "--input", dest="input_file",
help="path to input file", required=True)
parser.add_argument("-out", "--output", dest="output_dir", required=True)
args = parser.parse_args()
if self.config.LABEL_SMOOTHING == 0:
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels,
logits=logits)
else:
onehot_labels = tf.one_hot(indices=labels, depth=tf.shape(logits)[-1])
loss = tf.losses.softmax_cross_entropy(
onehot_labels=onehot_labels,
logits=logits,
label_smoothing=self.config.LABEL_SMOOTHING, # typically 0.1
reduction=Reduction.NONE) # (batch, time)
from argparse import ArgumentParser
import os
import pickle
import tensorflow as tf
from tensorflow.contrib.framework.python.framework import checkpoint_utils
vars_to_rename = {
# 'model/old_name': 'model/new_name',
'model/decoder/attention_wrapper/lstm_cell/bias': 'model/decoder/attention_wrapper/multi_rnn_cell/cell_0/lstm_cell/bias',
'model/decoder/attention_wrapper/lstm_cell/kernel': 'model/decoder/attention_wrapper/multi_rnn_cell/cell_0/lstm_cell/kernel',
from argparse import ArgumentParser
import os
import git
import json
import gzip
# Based on Appendix D in: https://arxiv.org/pdf/1711.00740.pdf
# And preprocessed dataset in: https://aka.ms/iclr18-prog-graphs-dataset
repos_train = {
'akka.net': ('https://github.com/akkadotnet/akka.net', '719335a1'),