This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# A minimal exmaple of flash attention implemented in Numpy | |
# Contact: bingquanxia AT qq.com | |
import unittest | |
from typing import List | |
import numpy as np | |
import torch |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# coding=utf-8 | |
# Contact: bingquanxia@qq.com | |
import numpy as np | |
import torch | |
import torch.nn as nn | |
def get_len_mask(b: int, max_len: int, feat_lens: torch.Tensor, device: torch.device) -> torch.Tensor: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from argparse import ArgumentParser | |
import editdistance | |
from rich.text import Text | |
from rich.console import Console | |
def edit_dist_dp(gt, hyp): | |
""" | |
A Dynamic Programming based Python program for edit distance problem |