This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Install this tool with the followig command: | |
# curl -LsSf https://gist.githubusercontent.com/xmodar/7bcb7cbcc9a263ef8f758e1bad9a80eb/raw/uvn.sh | bash | |
uvn() { | |
if [[ "$1" == "-h" ]]; then | |
echo "Manage Python virtual environments with uv (astral.sh/uv)" | |
echo "" | |
echo "Author: @xmodar" | |
echo "Link : https://gist.github.com/xmodar/7bcb7cbcc9a263ef8f758e1bad9a80eb" | |
echo "Also : https://github.com/xmodar/uvn" | |
echo "" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* Lambert W-function when k = 0 | |
* {@link https://gist.github.com/xmodar/baa392fc2bec447d10c2c20bbdcaf687} | |
* {@link https://link.springer.com/content/pdf/10.1007/s10444-017-9530-3.pdf} | |
*/ | |
export function lambertW(x: number, log = false): number { | |
if (log) return lambertWLog(x); // x is actually log(x) | |
if (x >= 0) return lambertWLog(Math.log(x)); // handles [0, Infinity] | |
const xE = x * Math.E; | |
if (isNaN(x) || xE < -1) return NaN; // handles NaN and [-Infinity, -1 / Math.E) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** {@link https://gist.github.com/xmodar/d3a17bf51b8399534c5f8d27104a2a38} */ | |
export const operator = { | |
lt: <T>(a: T, b: T) => a < b, | |
le: <T>(a: T, b: T) => a <= b, | |
eq: <T>(a: T, b: T) => a === b, | |
ne: <T>(a: T, b: T) => a !== b, | |
ge: <T>(a: T, b: T) => a >= b, | |
gt: <T>(a: T, b: T) => a > b, | |
not: <T>(a: T) => !a, | |
abs: (a: number) => Math.abs(a), |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Resnet + SVM""" | |
import torch | |
from torch import nn | |
import torchvision.transforms as T | |
from torchvision import models | |
class SVM(nn.Module): | |
"""Multi-Class SVM with Gaussian Kernel (Radial Basis Function) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Invertible BatchNorm""" | |
import torch | |
from torch import nn | |
class NonZero(nn.Module): | |
"""Parameterization to force the values to be nonzero""" | |
def __init__(self, eps=1e-5, preserve_sign=True): | |
super().__init__() | |
self.eps, self.preserve_sign = eps, preserve_sign |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""InvTorch: Core Invertible Utilities https://github.com/xmodar/invtorch""" | |
import itertools | |
import collections | |
import torch | |
from torch import nn | |
import torch.utils.checkpoint | |
__all__ = ['invertible_checkpoint', 'InvertibleModule'] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Deconvolution https://api.semanticscholar.org/CorpusID:208192734""" | |
import torch | |
from torch import nn | |
class Deconv(nn.Module): | |
"""Inverse conv https://gist.github.com/ModarTensai/7921460648230eda5053fe06b7cd2f4d""" | |
def __init__(self, conv, output_padding=0): | |
dim = len(conv.padding) | |
if isinstance(output_padding, int): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
def sinusoidal(positions, features=16, periods=10000): | |
"""Encode `positions` using sinusoidal positional encoding | |
Args: | |
positions: tensor of positions | |
features: half the number of features per position | |
periods: used frequencies for the sinusoidal functions |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
__all__ = ['softmax_mask'] | |
class SoftmaxMask(torch.autograd.Function): | |
"""Differentiable mask for logits before a softmax operation""" | |
@staticmethod | |
def forward(ctx, *args, **kwargs): | |
inputs, = args |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
def randg(*args, like=None, **kwargs): | |
"""Sample from Gumbel(location=0, scale=1)""" | |
generator = kwargs.pop('generator', None) | |
requires_grad = kwargs.pop('requires_grad', False) | |
if like is None: | |
samples = torch.empty(*args, **kwargs) | |
else: |
NewerOlder