Skip to content

Instantly share code, notes, and snippets.


Modar M. Alfadly xmodar

View GitHub Profile
View lambertw.ts
* Lambert W-function when k = 0
* {@link}
* {@link}
export function lambertW(x: number, log = false): number {
if (log) return lambertWLog(x); // x is actually log(x)
if (x >= 0) return lambertWLog(Math.log(x)); // handles [0, Infinity]
const xE = x * Math.E;
if (isNaN(x) || xE < -1) return NaN; // handles NaN and [-Infinity, -1 / Math.E)
xmodar / python.ts
Last active Mar 21, 2022
JavaScript utilities to mimic Python
View python.ts
/** {@link} */
export const operator = {
lt: <T>(a: T, b: T) => a < b,
le: <T>(a: T, b: T) => a <= b,
eq: <T>(a: T, b: T) => a === b,
ne: <T>(a: T, b: T) => a !== b,
ge: <T>(a: T, b: T) => a >= b,
gt: <T>(a: T, b: T) => a > b,
not: <T>(a: T) => !a,
abs: (a: number) => Math.abs(a),
"""Resnet + SVM"""
import torch
from torch import nn
import torchvision.transforms as T
from torchvision import models
class SVM(nn.Module):
"""Multi-Class SVM with Gaussian Kernel (Radial Basis Function)
"""Invertible BatchNorm"""
import torch
from torch import nn
class NonZero(nn.Module):
"""Parameterization to force the values to be nonzero"""
def __init__(self, eps=1e-5, preserve_sign=True):
self.eps, self.preserve_sign = eps, preserve_sign
"""InvTorch: Core Invertible Utilities"""
import itertools
import collections
import torch
from torch import nn
import torch.utils.checkpoint
__all__ = ['invertible_checkpoint', 'InvertibleModule']
import torch
from torch import nn
class Deconv(nn.Module):
"""Inverse conv"""
def __init__(self, conv, output_padding=0):
dim = len(conv.padding)
if isinstance(output_padding, int):
xmodar /
Last active Oct 21, 2021
Positional encoding for point clouds
import torch
def sinusoidal(positions, features=16, periods=10000):
"""Encode `positions` using sinusoidal positional encoding
positions: tensor of positions
features: half the number of features per position
periods: used frequencies for the sinusoidal functions
xmodar /
Created Oct 8, 2021
Differentiable mask for logits before a softmax operation
import torch
__all__ = ['softmax_mask']
class SoftmaxMask(torch.autograd.Function):
"""Differentiable mask for logits before a softmax operation"""
def forward(ctx, *args, **kwargs):
inputs, = args
import torch
def randg(*args, like=None, **kwargs):
"""Sample from Gumbel(location=0, scale=1)"""
generator = kwargs.pop('generator', None)
requires_grad = kwargs.pop('requires_grad', False)
if like is None:
samples = torch.empty(*args, **kwargs)
import tempfile
import urllib.request
import importlib.util
from pathlib import Path
def import_from_url(url):
"""Import a module from a given URL"""
with tempfile.TemporaryDirectory() as path:
path = Path(path) / Path(url).name