Skip to content

Instantly share code, notes, and snippets.

@lpinner
Last active November 8, 2018 04:54
Show Gist options
  • Save lpinner/953f14c1e85eb19d6dbf2f27a8b0beef to your computer and use it in GitHub Desktop.
Save lpinner/953f14c1e85eb19d6dbf2f27a8b0beef to your computer and use it in GitHub Desktop.
Sampling
from enum import Enum
import numpy as np
class Transform(Enum):
linear = 'linear'
log = 'log'
equal = 'equal'
halfcauchy = 'halfcauchy'
def rescale(array, out_min=0, out_max=1):
arr_min, arr_max = np.min(array), np.max(array)
m = (out_max - out_min) / (arr_max - arr_min)
b = out_min - m * arr_min
return m * array + b
def sampling_kernel(y, x, radius, shape: tuple=None, transform: Transform=Transform.linear):
"""Generate a circular equal or linear / log / half-cauchy distance weighted sampling probability kernel"""
if shape is None:
shape = (radius * 2, radius * 2)
Y, X = np.ogrid[:shape[0], :shape[1]]
cdist = np.sqrt((X - x) ** 2 + (Y - y) ** 2)
mask = cdist <= radius
if transform == Transform.equal:
kernel = 1.0 * mask
elif transform == Transform.halfcauchy:
qs = 0.1, 0.99 # quantiles
q1, q2 = (np.tan(np.pi/2*q) for q in qs) # half-cauchy ppf
cdist = rescale(cdist, q1, q2)
kernel = 2.0 / np.pi / (1.0 + cdist*cdist) # half-cauchy pdf
kernel *= mask
elif transform == Transform.log:
kernel = np.log(cdist)
kernel[y, x] = 0.0 # Fix div by zero inf val
kernel = mask * (kernel[mask].max() - kernel)
elif transform == Transform.linear:
kernel = mask * (cdist[mask].max() - cdist)
return kernel / kernel.sum() # sample probabilities must sum to 1
def random_sample(nsamples, probability):
# https://stackoverflow.com/a/45863403/737471
linear_idx = np.random.choice(probability.size, nsamples, replace=False, p=probability.ravel())
y, x = np.unravel_index(linear_idx, probability.shape)
return y, x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment