Skip to content

Instantly share code, notes, and snippets.

@kwinkunks
Last active October 18, 2019 08:14
Show Gist options
  • Save kwinkunks/512c431900af931b5f00423f1cce817c to your computer and use it in GitHub Desktop.
Save kwinkunks/512c431900af931b5f00423f1cce817c to your computer and use it in GitHub Desktop.
Two functions implementing very naive 3d spatial analysis of a point cloud
import numpy as np
import scipy.signal as ss
def to_volume(points, max_mb=10):
"""
Convert N x 3 array of points in a point cloud to a 3D image
or 'volume'. The degree of upscaling is controlled by ``max_mb``
which is the target size of the 3D image in memory.
Args:
points (ndarray): the x, y, z data for N points as an
array-like of shape (N, 3). If there are more columns,
then x, y, z must be the first three.
max_mb (int): the APPROXIMATE maximum size of the volume,
given in MB. The function will make the biggest cube
it can, given the max size constraint.
Returns:
volume (ndarray): the resulting volume.
coords (ndarray): the array of indices corresponding to the
input points. Each row gives the position in the volume
corresponding to that point in the point cloud.
Example:
>>> img, coords = to_volume(df[['X', 'Y', 'Z']].values, max_mb=25)
TODO:
- Tests.
- Allow user to specify scale directly.
"""
# Get full precision coords.
X, Y, Z = np.asarray(points)[:, :3].T
# Get min, max of each.
xn, xx, yn, yx, zn, zx = X.min(), X.max(), Y.min(), Y.max(), Z.min(), Z.max()
# Get ranges.
xr, yr, zr = xx - xn, yx - yn, zx - zn
# Find the biggest scale factor that hits max size.
size = 0
scale = 1_000_000
while size < max_mb * 0.9:
scale -= 100
xc, yc, zc = xr//scale, yr//scale, zr//scale
size = np.product([xc, yc, zc]) / 2**20
# Get point coordinates.
x, y, z = ((points.astype(int) - np.array([xn, yn, zn])) // factor).T
coords = np.array([x, y, z]).T
# Make image.
img = np.zeros((xc+1, yc+1, zc+1), dtype=np.uint8)
img[x, y, z] = 1
return img, coords
def continuity_cubes(img, coords, s=7):
"""
Poor-man's structural analysis for points: do it in an image.
This function makes continuity cubes. Given a 3D image and
coordinates (output from ``to_volume()``, above), produce
spatial correlation attributes by convolution with structured
kernels.
Args:
img (ndarray): the image volume.
s (int): the size of the kernel (default 7 voxels).
This will be used to make kernels of shape (s, s, s).
Returns:
tuple: A 4-tuple of ndarrays containing:
- x continuity (longness)
- y continuity (wideness)
- z continuity (tallness)
- xy continuity (flatness)
Example:
>>> df['x_cont'], df['y_cont'], df['z_cont'], df['xy_cont'] = continuity_cubes(img, coords)
"""
t = slice(s//2, s//2+1)
x_kernel = np.zeros((s, s, s))
x_kernel[:, t, t] = 1 / s
y_kernel = np.zeros((s, s, s))
y_kernel[t, :, t] = 1 / s
z_kernel = np.zeros((s, s, s))
z_kernel[t, t, :] = 1 / s
xy_kernel = np.zeros((s, s, s))
xy_kernel[:, :, s//2] = 1 / s**2
x_cont_cube = ss.convolve(img, x_kernel, mode='same')
y_cont_cube = ss.convolve(img, y_kernel, mode='same')
z_cont_cube = ss.convolve(img, z_kernel, mode='same')
xy_cont_cube = ss.convolve(img, xy_kernel, mode='same')
x, y, z = coords.T
x_cont = x_cont_cube[x, y, z]
y_cont = y_cont_cube[x, y, z]
z_cont = z_cont_cube[x, y, z]
xy_cont = xy_cont_cube[x, y, z]
return x_cont, y_cont, z_cont, xy_cont
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment