View lambertw.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* Lambert W-function when k = 0 | |
* {@link https://gist.github.com/xmodar/baa392fc2bec447d10c2c20bbdcaf687} | |
* {@link https://link.springer.com/content/pdf/10.1007/s10444-017-9530-3.pdf} | |
*/ | |
export function lambertW(x: number, log = false): number { | |
if (log) return lambertWLog(x); // x is actually log(x) | |
if (x >= 0) return lambertWLog(Math.log(x)); // handles [0, Infinity] | |
const xE = x * Math.E; | |
if (isNaN(x) || xE < -1) return NaN; // handles NaN and [-Infinity, -1 / Math.E) |
View python.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** {@link https://gist.github.com/xmodar/d3a17bf51b8399534c5f8d27104a2a38} */ | |
export const operator = { | |
lt: <T>(a: T, b: T) => a < b, | |
le: <T>(a: T, b: T) => a <= b, | |
eq: <T>(a: T, b: T) => a === b, | |
ne: <T>(a: T, b: T) => a !== b, | |
ge: <T>(a: T, b: T) => a >= b, | |
gt: <T>(a: T, b: T) => a > b, | |
not: <T>(a: T) => !a, | |
abs: (a: number) => Math.abs(a), |
View resnet_svm.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Resnet + SVM""" | |
import torch | |
from torch import nn | |
import torchvision.transforms as T | |
from torchvision import models | |
class SVM(nn.Module): | |
"""Multi-Class SVM with Gaussian Kernel (Radial Basis Function) |
View invertible_batchnorm.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Invertible BatchNorm""" | |
import torch | |
from torch import nn | |
class NonZero(nn.Module): | |
"""Parameterization to force the values to be nonzero""" | |
def __init__(self, eps=1e-5, preserve_sign=True): | |
super().__init__() | |
self.eps, self.preserve_sign = eps, preserve_sign |
View invtorch.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""InvTorch: Core Invertible Utilities https://github.com/xmodar/invtorch""" | |
import itertools | |
import collections | |
import torch | |
from torch import nn | |
import torch.utils.checkpoint | |
__all__ = ['invertible_checkpoint', 'InvertibleModule'] |
View deconv.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Deconvolution https://api.semanticscholar.org/CorpusID:208192734""" | |
import torch | |
from torch import nn | |
class Deconv(nn.Module): | |
"""Inverse conv https://gist.github.com/ModarTensai/7921460648230eda5053fe06b7cd2f4d""" | |
def __init__(self, conv, output_padding=0): | |
dim = len(conv.padding) | |
if isinstance(output_padding, int): |
View point_pe.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
def sinusoidal(positions, features=16, periods=10000): | |
"""Encode `positions` using sinusoidal positional encoding | |
Args: | |
positions: tensor of positions | |
features: half the number of features per position | |
periods: used frequencies for the sinusoidal functions |
View softmax_mask.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
__all__ = ['softmax_mask'] | |
class SoftmaxMask(torch.autograd.Function): | |
"""Differentiable mask for logits before a softmax operation""" | |
@staticmethod | |
def forward(ctx, *args, **kwargs): | |
inputs, = args |
View gumbel_softmax.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
def randg(*args, like=None, **kwargs): | |
"""Sample from Gumbel(location=0, scale=1)""" | |
generator = kwargs.pop('generator', None) | |
requires_grad = kwargs.pop('requires_grad', False) | |
if like is None: | |
samples = torch.empty(*args, **kwargs) | |
else: |
View import_form_url.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tempfile | |
import urllib.request | |
import importlib.util | |
from pathlib import Path | |
def import_from_url(url): | |
"""Import a module from a given URL""" | |
with tempfile.TemporaryDirectory() as path: | |
path = Path(path) / Path(url).name |
NewerOlder