Skip to content

Instantly share code, notes, and snippets.

@sevagh
Created April 22, 2021 15:33
Show Gist options
  • Save sevagh/3017d7c2393d3fd184b290e54dcae441 to your computer and use it in GitHub Desktop.
Save sevagh/3017d7c2393d3fd184b290e54dcae441 to your computer and use it in GitHub Desktop.
demo of cupy out of memory issue with large ndarray
import numpy as np
import scipy.fft
import cupyx
import cupy
# managed memory to stream large ndarrays to cuda
cupy.cuda.set_allocator(cupy.cuda.MemoryPool(cupy.cuda.memory.malloc_managed).malloc)
if __name__ == '__main__':
# 10GB array, approx. won't fit in my 8GB gpu vmem
big_array = np.random.standard_normal((1000, 1000000))
print(f'size: {big_array.nbytes//1e9} GB')
print(f'shape: {big_array.shape}')
print(f'dtype: {big_array.dtype}')
print(cupy.show_config())
# perform giant 2D IFFT
big_array = cupyx.scipy.fft.ifft2(cupy.array(big_array))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment