This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn as nn | |
from torch.utils.data import Dataset, DataLoader | |
from transformers import BertTokenizer, BertForMaskedLM | |
from datasets import load_dataset | |
import random | |
import numpy as np | |
from tqdm import tqdm | |
# Set random seed for reproducibility |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Credit: Claude-3.5-Sonnet-200k AI chatbot | |
import numpy as np | |
from transformers import AutoTokenizer, AutoModel | |
from transformers import LongformerModel, LongformerTokenizer | |
import torch | |
import pandas as pd | |
import os | |
import umap | |
import matplotlib.pyplot as plt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import matplotlib.pyplot as plt | |
import time | |
from qiskit import QuantumCircuit | |
from qiskit_aer import AerSimulator | |
from qiskit.quantum_info import Operator | |
# Define basic quantum gates | |
def get_X(): | |
return np.array([[0, 1], [1, 0]]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Credit : gpt-4o , Claude-3.5-Sonnet-200k , Gemini-Pro-1.5 | |
# Reference : | |
# [Protein Discovery with Discrete Walk-Jump Sampling](http://arxiv.org/abs/2306.12360) | |
# [Diffusion Forcing: Next-token Prediction Meets Full-Sequence Diffusion](http://arxiv.org/abs/2407.01392) | |
import torch | |
import torch.nn as nn | |
import torch.nn.functional as F |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import ceviche | |
import matplotlib.pyplot as plt | |
import pandas as pd | |
from skimage.draw import disk as circle | |
from autograd.scipy.signal import convolve as conv | |
from scipy.optimize import minimize | |
from autograd import grad | |
import autograd.numpy as anp |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# [Nash Learning from Human Feedback](http://arxiv.org/abs/2312.00886) | |
import os | |
import math | |
import numpy as np | |
import torch | |
import torch.nn as nn | |
import torch.optim as optim | |
from torch.utils.data import DataLoader, Dataset |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Reference: [Half-Quadratic Quantization of Large Machine Learning Models](https://mobiusml.github.io/hqq_blog/) | |
import numpy as np | |
# Define the shrinkage function for soft-thresholding | |
def shrink(x, beta, p): | |
return np.sign(x) * np.maximum(np.abs(x) - (np.abs(x)**(p-1))/beta, 0) | |
# Define the quantization and dequantization operators |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# [Mamba: Linear-Time Sequence Modeling with Selective State Spaces](https://arxiv.org/abs/2312.00752) | |
import torch | |
import torch.nn as nn | |
import torch.optim as optim | |
from torch.utils.data import DataLoader, Dataset | |
from torch.nn import functional as F | |
from einops import rearrange, repeat | |
from tqdm import tqdm |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
from torch import nn | |
import torch.nn.functional as F | |
import torchvision | |
import torchvision.transforms as transforms | |
from tqdm import tqdm | |
# Custom fast linear layer | |
class FastLinear(nn.Module): | |
def __init__(self, in_features, out_features): |
We can't make this file beautiful and searchable because it's too large.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
sequence_id,sequence,experiment_type,dataset_name,reads,signal_to_noise,SN_filter,reactivity_0001,reactivity_0002,reactivity_0003,reactivity_0004,reactivity_0005,reactivity_0006,reactivity_0007,reactivity_0008,reactivity_0009,reactivity_0010,reactivity_0011,reactivity_0012,reactivity_0013,reactivity_0014,reactivity_0015,reactivity_0016,reactivity_0017,reactivity_0018,reactivity_0019,reactivity_0020,reactivity_0021,reactivity_0022,reactivity_0023,reactivity_0024,reactivity_0025,reactivity_0026,reactivity_0027,reactivity_0028,reactivity_0029,reactivity_0030,reactivity_0031,reactivity_0032,reactivity_0033,reactivity_0034,reactivity_0035,reactivity_0036,reactivity_0037,reactivity_0038,reactivity_0039,reactivity_0040,reactivity_0041,reactivity_0042,reactivity_0043,reactivity_0044,reactivity_0045,reactivity_0046,reactivity_0047,reactivity_0048,reactivity_0049,reactivity_0050,reactivity_0051,reactivity_0052,reactivity_0053,reactivity_0054,reactivity_0055,reactivity_0056,reactivity_0057,reactivity_0058,reactivity_005 |
NewerOlder