Skip to content

Instantly share code, notes, and snippets.

@bluecube
Last active May 8, 2018 11:28
Show Gist options
  • Star 5 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bluecube/1d31dfe23d8fb590f7bc to your computer and use it in GitHub Desktop.
Save bluecube/1d31dfe23d8fb590f7bc to your computer and use it in GitHub Desktop.
Remix of some ideas of perlin noise and simplex noise...
import math
import numpy
_permutation = numpy.array([ 6, 36, 184, 45, 107, 67, 2, 34, 82, 181, 110, 188, 69, 152, 142, 0,
224, 154, 236, 203, 157, 73, 171, 115, 138, 66, 252, 165, 155, 60, 229, 95,
141, 202, 58, 132, 76, 94, 160, 53, 170, 242, 235, 81, 139, 84, 240, 153,
130, 148, 228, 97, 105, 201, 135, 237, 62, 232, 163, 136, 143, 255, 147, 178,
204, 127, 227, 249, 253, 77, 145, 91, 19, 72, 24, 108, 144, 233, 101, 40,
20, 37, 200, 29, 54, 27, 55, 46, 234, 50, 213, 64, 17, 231, 25, 10,
70, 146, 205, 222, 80, 162, 59, 86, 12, 125, 192, 124, 246, 83, 114, 93,
206, 63, 99, 214, 15, 174, 173, 9, 208, 190, 166, 161, 3, 167, 88, 85,
168, 121, 239, 26, 11, 182, 175, 35, 92, 44, 180, 43, 22, 61, 87, 225,
211, 118, 176, 111, 244, 217, 248, 209, 133, 57, 159, 150, 247, 186, 194, 137,
56, 199, 251, 140, 18, 103, 149, 158, 187, 212, 193, 230, 220, 223, 134, 13,
120, 128, 23, 41, 90, 129, 189, 106, 74, 89, 250, 33, 164, 98, 78, 151,
31, 71, 119, 126, 243, 218, 47, 207, 177, 49, 226, 48, 7, 109, 14, 195,
116, 219, 254, 215, 198, 238, 169, 1, 32, 16, 172, 123, 112, 197, 113, 4,
210, 28, 104, 39, 185, 221, 79, 117, 131, 8, 183, 38, 216, 122, 96, 68,
196, 21, 30, 100, 42, 5, 102, 156, 52, 65, 75, 245, 191, 241, 51, 179],
dtype = numpy.uint8)
_permutation2 = numpy.array([215, 173, 219, 224, 240, 41, 254, 17, 39, 116, 197, 168, 142, 40, 175, 76,
3, 57, 118, 188, 132, 88, 194, 59, 137, 115, 221, 123, 54, 129, 237, 136,
42, 94, 239, 6, 73, 128, 198, 46, 205, 51, 24, 178, 2, 149, 235, 82,
212, 243, 163, 253, 110, 0, 66, 30, 244, 19, 53, 69, 122, 214, 38, 127,
150, 83, 61, 109, 213, 157, 103, 23, 218, 179, 45, 26, 206, 36, 241, 91,
135, 167, 154, 162, 139, 186, 200, 108, 44, 29, 49, 222, 211, 18, 155, 92,
87, 228, 22, 153, 8, 191, 230, 209, 35, 160, 64, 126, 233, 114, 172, 190,
187, 74, 56, 95, 4, 234, 125, 70, 130, 89, 165, 90, 20, 185, 50, 229,
112, 147, 113, 144, 105, 249, 15, 181, 68, 85, 251, 52, 220, 195, 99, 134,
166, 133, 104, 106, 247, 245, 169, 9, 75, 37, 177, 192, 246, 43, 232, 141,
223, 27, 101, 47, 217, 159, 138, 65, 255, 193, 98, 146, 158, 60, 71, 199,
204, 145, 161, 201, 151, 102, 63, 32, 86, 77, 81, 1, 120, 34, 231, 236,
55, 164, 242, 21, 124, 10, 58, 250, 252, 216, 143, 148, 170, 13, 238, 176,
79, 11, 202, 208, 183, 84, 226, 62, 227, 12, 203, 31, 5, 152, 248, 72,
7, 33, 96, 174, 14, 78, 100, 16, 121, 80, 156, 97, 180, 25, 131, 207,
210, 111, 189, 48, 117, 225, 182, 196, 67, 119, 107, 93, 28, 184, 171, 140],
dtype=numpy.uint8)
_permutation3 = numpy.array([118, 7, 174, 144, 162, 169, 186, 238, 127, 246, 126, 90, 208, 179, 211, 111,
234, 121, 237, 28, 180, 191, 54, 216, 99, 230, 29, 35, 182, 60, 193, 157,
248, 124, 50, 100, 205, 185, 220, 189, 225, 89, 250, 12, 137, 66, 141, 76,
247, 62, 31, 254, 16, 213, 0, 30, 38, 197, 212, 159, 37, 145, 8, 10,
4, 63, 183, 80, 123, 45, 115, 177, 112, 136, 74, 229, 151, 148, 242, 235,
86, 221, 178, 192, 70, 34, 204, 61, 200, 165, 27, 103, 163, 41, 146, 140,
56, 92, 198, 164, 125, 77, 48, 166, 113, 82, 150, 97, 249, 64, 155, 6,
227, 81, 217, 68, 104, 245, 231, 243, 222, 147, 93, 67, 202, 143, 42, 224,
223, 255, 131, 171, 105, 71, 184, 173, 138, 188, 170, 176, 167, 78, 21, 83,
32, 251, 154, 241, 107, 95, 96, 91, 129, 133, 152, 139, 252, 84, 3, 196,
1, 209, 11, 43, 236, 226, 9, 239, 13, 18, 24, 232, 160, 215, 101, 175,
161, 134, 58, 194, 47, 106, 168, 117, 116, 39, 36, 172, 219, 52, 40, 109,
85, 53, 98, 88, 233, 240, 46, 49, 158, 244, 135, 5, 75, 218, 201, 20,
25, 65, 156, 55, 57, 23, 44, 228, 122, 15, 142, 87, 149, 195, 153, 73,
22, 132, 51, 2, 69, 108, 14, 72, 120, 19, 110, 17, 94, 190, 102, 199,
203, 210, 26, 130, 181, 33, 206, 114, 128, 253, 79, 119, 207, 59, 214, 187],
dtype=numpy.uint8)
def _generate_gradients():
angles = numpy.linspace(0, 2 * math.pi, len(_permutation), endpoint = False)
angles = angles[_permutation]
return numpy.cos(angles), numpy.sin(angles)
_gradients_x, _gradients_y = _generate_gradients()
def _hash_grid_perlin(size, x_offset = 0, y_offset = 0, seed = 0):
""" Create a grid of hashes based on https://www.cs.utah.edu/~aek/research/noise.pdf """
#TODO Don't calculate the position values mod 256, preferably xor fold them from 32 bit or at least 16 bit
seed = _permutation[seed % len(_permutation)]
x_array = numpy.arange(x_offset, size + x_offset, dtype=numpy.uint8)
numpy.add(x_array, seed, out=x_array)
x_array = _permutation[x_array]
y_array = numpy.arange(y_offset, size + y_offset, dtype=numpy.uint8)
return _permutation[numpy.add(x_array[numpy.newaxis,:], y_array[:,numpy.newaxis])]
def _hash_grid_better(size, x_offset = 0, y_offset = 0, seed = 0):
""" Create a grid of hashes based on https://www.cs.utah.edu/~aek/research/noise.pdf """
#TODO Don't calculate the position values mod 256, preferably xor fold them from 32 bit or at least 16 bit
x_array = numpy.arange(x_offset, size + x_offset, dtype=numpy.uint8)
x_array = _permutation[x_array]
y_array = numpy.arange(y_offset, size + y_offset, dtype=numpy.uint8)
y_array = _permutation2[y_array]
numpy.bitwise_xor(x_array, _permutation3[seed % len(_permutation)], out = x_array)
return numpy.bitwise_xor(x_array[numpy.newaxis,:], y_array[:,numpy.newaxis])
def _hash_grid_fnv1a(size, x_offset = 0, y_offset = 0, seed = 0):
""" This is a hacky (but hopefully fast) way to make an array that contains a
FNV-1a hashes of seed, x index and y index """
hash_dtype = numpy.uint32
fnv_prime = 16777619
fnv_offset_basis = 2166136261
# first FNV iteration for the seeed value
init_value = (fnv_offset_basis ^ seed) * fnv_prime
init_value = hash_dtype(init_value & int(hash_dtype(-1)))
# Second round (mixing in x coordinates)
x_array = numpy.arange(x_offset, size + x_offset, dtype=hash_dtype)
numpy.bitwise_and(x_array, 0xff, out=x_array)
numpy.bitwise_xor(init_value, x_array, out=x_array)
numpy.multiply(x_array, fnv_prime, out=x_array)
# Last step, hash in the y coordinates
y_array = numpy.arange(y_offset, size + y_offset, dtype=hash_dtype)
numpy.bitwise_and(y_array, 0xff, out=y_array)
ret = numpy.bitwise_xor(x_array[numpy.newaxis,:], y_array[:,numpy.newaxis])
numpy.multiply(ret, fnv_prime, out=ret)
numpy.bitwise_and(ret, 0xff, out=ret)
return ret
def _hash_grid_djb2(size, x_offset = 0, y_offset = 0, seed = 0):
""" Using DJB2 (http://www.cse.yorku.ca/~oz/hash.html) to hash the coordinates """
# Hash in seed
seed = int(seed)
hash = 5381
hash = (hash * 33 ^ (seed & 0xff)) & numpy.uint32(-1)
hash = (hash * 33) & numpy.uint32(-1)
x_array = numpy.arange(x_offset, size + x_offset, dtype=numpy.uint32)
numpy.bitwise_and(x_array, 0xff, out=x_array)
numpy.bitwise_xor(hash, x_array, out = x_array)
y_array = numpy.arange(y_offset, size + y_offset, dtype=numpy.uint32)
numpy.bitwise_and(y_array, 0xff, out=y_array)
numpy.multiply(x_array, 33)
ret = numpy.bitwise_xor(x_array[numpy.newaxis,:], y_array[:,numpy.newaxis])
numpy.bitwise_and(ret, 0xff, out=ret)
return ret
def _hash_grid_blackmoons(size, x_offset = 0, y_offset = 0, seed = 0):
""" From http://pastebin.com/4VvceBAc """
x_array = numpy.arange(x_offset, size + x_offset, dtype=numpy.uint32)
y_array = numpy.arange(y_offset, size + y_offset, dtype=numpy.uint32)
numpy.multiply(y_array, 75326, out=y_array)
n = numpy.add(x_array[numpy.newaxis,:], y_array[:,numpy.newaxis])
n2 = numpy.left_shift(n, 13)
numpy.bitwise_xor(n, n2, out=n)
numpy.square(n, out=n2)
numpy.multiply(n2, 15731, out=n2)
numpy.add(n2, 789221, out=n2)
numpy.multiply(n, n2, out=n)
numpy.add(n, 1376312589, out=n)
numpy.bitwise_and(n, 0xff, out=n)
return n
def _hash_grid_murmur(size, x_offset = 0, y_offset = 0, seed = 0):
""" https://en.wikipedia.org/wiki/MurmurHash """
c1 = 0xcc9e2d51
c2 = 0x1b873593
r1 = 15
r2 = 13
m = 5
n = 0xe6546b64
def rot32(a, k):
b = numpy.left_shift(a, k)
numpy.right_shift(a, (32 - k), out=a)
numpy.bitwise_or(a, b, out=a)
def k(a):
numpy.multiply(a, c1, out=a)
rot32(a, r1)
numpy.multiply(a, c2, out=a)
def hash_mix(a):
rot32(a, r2)
numpy.multiply(a, m, out=a)
numpy.add(a, n, out=a)
x_array = numpy.arange(x_offset, size + x_offset, dtype=numpy.uint32)
k(x_array)
numpy.bitwise_xor(x_array, seed, out=x_array)
hash_mix(x_array)
y_array = numpy.arange(y_offset, size + y_offset, dtype=numpy.uint32)
k(y_array)
ret = numpy.bitwise_xor(x_array[numpy.newaxis,:], y_array[:,numpy.newaxis])
hash_mix(ret)
tmp = numpy.right_shift(ret, 16)
numpy.bitwise_xor(ret, tmp, out=ret)
numpy.multiply(ret, 0x85ebca6b, out=ret)
tmp = numpy.right_shift(ret, 13)
numpy.bitwise_xor(ret, tmp, out=ret)
numpy.multiply(ret, 0xc2b2ae35, out=ret)
tmp = numpy.right_shift(ret, 16)
numpy.bitwise_xor(ret, tmp, out=ret)
numpy.bitwise_and(ret, 0xff, out=ret)
return ret
def _hash_grid_cube(size, x_offset = 0, y_offset = 0, seed = 0):
""" My own attempt, fiddling until the result is good enough. """
x_array = numpy.arange(x_offset, size + x_offset, dtype=numpy.uint32)
numpy.multiply(x_array, 15731, out=x_array)
tmp = numpy.right_shift(x_array, 14)
numpy.bitwise_xor(x_array, tmp, out=x_array)
numpy.bitwise_xor(x_array, seed, out=x_array)
y_array = numpy.arange(y_offset, size + y_offset, dtype=numpy.uint32)
numpy.multiply(y_array, 5381, out=x_array)
numpy.right_shift(y_array, 13, out=tmp)
numpy.bitwise_xor(y_array, tmp, out=y_array)
ret = numpy.bitwise_xor(x_array[numpy.newaxis,:], y_array[:,numpy.newaxis])
numpy.multiply(ret, 789221, out=ret)
tmp = numpy.right_shift(ret, 20)
numpy.bitwise_xor(ret, tmp, out=ret)
numpy.bitwise_and(ret, 0xff, out=ret)
return ret
def _hash_grid_md5(size, x_offset = 0, y_offset = 0, seed = 0):
import hashlib
ret = numpy.empty((size, size), dtype=numpy.uint8)
for y, yc in enumerate(range(y_offset, size + y_offset)):
for x, xc in enumerate(range(x_offset, size + x_offset)):
hasher = hashlib.md5()
hasher.update(seed.to_bytes(4, byteorder="big"))
hasher.update(xc.to_bytes(4, byteorder="big"))
hasher.update(yc.to_bytes(4, byteorder="big"))
ret[x, y] = hasher.digest()[0]
return ret
_hash_grid = _hash_grid_better
def gradient_noise(size, scale, x_offset = 0, y_offset = 0, seed = 0):
""" Generate a square numpy array with a single octave of gradient noise.
The noise itself is a hybrid between perlin noise and simplex noise, using square grid,
and sums of surflets ( https://briansharpe.wordpress.com/2012/03/09/modifications-to-classic-perlin-noise/ )
For hashing we also use a non standard function (P[X] ^ P[Y] ^ P[seed] instead of P[Y + P[X]],
https://www.cs.utah.edu/~aek/research/noise.pdf)
`size` The output array will have shape `(size, size)`.
`scale` How many noise cells will there be along the side of the output array. Integer.
`x_offset`, `y_offset` Determine the offset of the left top corner of output array in noise blocks. Integer.
`seed` Controls the noise seed."""
coords = numpy.linspace(0, scale, size, endpoint=False, dtype=numpy.float)
integers = coords.astype(dtype=numpy.uint32)
fractions = numpy.subtract(coords, integers, out=coords)
# Generate quarters of the surflets in the top left of each cell
# Later we will rotate this array to obtain the remaining quarters
fractions_squared = numpy.square(fractions)
surflets = numpy.add(fractions_squared[numpy.newaxis,:], fractions_squared[:,numpy.newaxis])
del fractions_squared
numpy.fmin(surflets, 1.0, out = surflets)
numpy.sqrt(surflets, out = surflets)
numpy.subtract(1.0, surflets, out = surflets)
numpy.square(surflets, out = surflets)
#return surflets
hashes = _hash_grid(scale + 1, x_offset, y_offset, seed)
# Generate the gradient dot products
grid_gradients_x = _gradients_x[hashes]
grid_gradients_y = _gradients_y[hashes]
output = numpy.zeros((size, size), dtype = fractions.dtype)
for i, (x_offset, y_offset) in enumerate([(0, 0), (0, 1), (1, 1), (1, 0)]): # Rotating counterclockwise
indices = numpy.ix_(integers + y_offset, integers + x_offset)
gx = grid_gradients_x[indices]
gy = grid_gradients_y[indices]
del indices
numpy.multiply(gx, (fractions - x_offset)[numpy.newaxis,:], out = gx)
numpy.multiply(gy, (fractions - y_offset)[:,numpy.newaxis], out = gy)
dot_product = numpy.add(gx, gy, out = gx)
del gy
#return dot_product
rotated_surflets = numpy.rot90(surflets, i)
noise_quarter = numpy.multiply(dot_product, rotated_surflets, out=dot_product)
numpy.add(output, noise_quarter, out = output)
return output
def multioctave_noise_iterator(noise_func, size, scale = 1, persistence = 0.5, octaves = None, x_offset = 0, y_offset = 0, seed = 0):
""" Yield a sequence of noise octaves of shape (size, size).
For details see multioctave_noise(). """
if octaves is None:
octaves = math.ceil(math.log2(size))
amplitude = 1
for i in range(octaves):
octave = noise_func(size, scale, x_offset * scale, y_offset * scale, seed)
numpy.multiply(octave, amplitude, out=octave)
yield octave
del octave
amplitude *= persistence
scale *= 2
def multioctave_noise(noise_func, size, scale = 1, persistence = 0.5, octaves = None, x_offset = 0, y_offset = 0, seed = 0):
""" Generate a square numpy array with multioctave perlin noise.
`size` The output array will have shape `(size, size)`.
`scale` How many noise cells of the lowest frequency will there be along the side of the output array. Integer.
`persistence` Coefficient for exponential dropoff of amplitudes in higher frequency octaves.
`octaves` Number of octaves to add, default value is to use as many octaves as will be visible in the output (ceil(log2(size))).
`x_offset`, `y_offset` Determine the offset of the left top corner of output array in lowest frequency blocks. Integer.
`seed` Controls the noise seed."""
# TODO: convert this to iterator to allow eg. turbulence textures.
output = numpy.zeros((size, size));
for octave in multioctave_noise_iterator(noise_func, size, scale, persistence, octaves, x_offset, y_offset, seed):
numpy.add(octave, output, out=output)
return output
if __name__ == "__main__":
import matplotlib.pyplot as plt
import timeit
import re
_hash_grid = _hash_grid_perlin
hash_grids = [_hash_grid_md5, # Too slow
_hash_grid_perlin,
_hash_grid_better,
_hash_grid_fnv1a,
_hash_grid_djb2, # Failed miserably
_hash_grid_blackmoons,
_hash_grid_murmur,
_hash_grid_cube]
f, axarr = plt.subplots(3, len(hash_grids))
for i, _hash_grid in enumerate(hash_grids):
name = _hash_grid.__name__
name = re.sub(r"^_hash_grid_", "", name)
print(i + 1, "-", name)
start_time = timeit.default_timer()
h = _hash_grid(8192)
elapsed = timeit.default_timer() - start_time
print("hash elapsed {:.2}s".format(elapsed))
axarr[0, i].imshow(h[:256, :256], interpolation="nearest")
axarr[0, i].set_title(name + "({:.01f}ms)".format(1000 * elapsed))
start_time = timeit.default_timer()
p = gradient_noise(4096, 128)
elapsed = timeit.default_timer() - start_time
print("elapsed {:.2}s".format(elapsed))
axarr[1, i].imshow(p[:512,:512], interpolation="nearest")
axarr[1, i].set_title("Complete noise")
fft = numpy.fft.fftshift(numpy.fft.fftn(p))
fft_crop = 0.05
s = fft.shape[0]
fft_crop_min = int(s * (0.5 - fft_crop))
fft_crop_max = int(s * (0.5 + fft_crop))
fft = fft[fft_crop_min:fft_crop_max, fft_crop_min:fft_crop_max]
axarr[2, i].imshow(numpy.absolute(fft), interpolation="nearest")
axarr[2, i].set_title("Complete noise FFT")
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment