Skip to content

Instantly share code, notes, and snippets.

View mberr's full-sized avatar

Max Berrendorf mberr

  • DeepL SE
  • Cologne, Germany
View GitHub Profile
@mberr
mberr / rational_activation.py
Created May 4, 2022 21:42
A simple PyTorch implementation of rational activation functions.
import torch
from torch import nn
class RationalActivation(nn.Module):
"""
A rational activation function with trainable parameters.
Inspired by https://arxiv.org/abs/2205.01549.
.. seealso::
@mberr
mberr / similarity_normalization.py
Last active February 28, 2022 18:20
Several Similarity Matrix Normalization Methods written in PyTorch
"""Several similarity matrix normalization methods."""
import torch
def csls(
sim: torch.FloatTensor,
k: Optional[int] = 1,
) -> torch.FloatTensor:
"""
Apply CSLS normalization to a similarity matrix.
@mberr
mberr / optimal_threshold_f1.py
Last active March 31, 2022 08:46
Determine optimal threshold for Macro F1 score
"""Determine optimal threshold for Macro F1 score."""
from typing import Tuple
import numpy
from sklearn.metrics._ranking import _binary_clf_curve
def f1_scores(
precision: numpy.ndarray,
recall: numpy.ndarray,
) -> numpy.ndarray:
import torch
from torch import nn
import logging
logger = logging.getLogger(__name__)
# pylint: disable=abstract-method
class ExtendedModule(nn.Module):
"""Extends nn.Module by a few utility methods."""
def latex_bold(text: str) -> str:
"""Format text in bold font using Latex."""
return rf"\textbf{{{text}}}"
def highlight_max(
data: pandas.Series,
float_formatter: Callable[[float], str] = "{:2.2f}".format,
highlighter: Callable[[str], str] = latex_bold,
) -> pandas.Series:
@mberr
mberr / random_sentences.py
Created October 5, 2020 15:00
Generate a list of random sentences comprising random words.
import random
import string
from typing import Sequence
def random_sentence_list(
num_sentences: int = 1,
word_sep: str = ' ',
min_num_words: int = 1,
max_num_words: int = 1,
@mberr
mberr / maximize_memory_utilization.py
Created June 17, 2020 15:04
Find maximal parameter value for a given CUDA device by successive halvening.
"""Find maximal parameter value for a given CUDA device by successive halvening."""
from typing import Callable, Tuple, TypeVar
import torch
R = TypeVar('R')
def maximize_memory_utilization(
func: Callable[..., R],
parameter_name: str,