Skip to content

Instantly share code, notes, and snippets.

@justinfx
Last active January 18, 2016 16:05
Show Gist options
  • Save justinfx/10458017 to your computer and use it in GitHub Desktop.
Save justinfx/10458017 to your computer and use it in GitHub Desktop.
Get memory usage of CUDA GPU, using ctypes and libcudart
import ctypes
# Path to location of libcudart
_CUDA = "/Developer/NVIDIA/CUDA-5.5/lib/libcudart.5.5.dylib"
cuda = ctypes.cdll.LoadLibrary(_CUDA)
cuda.cudaMemGetInfo.restype = int
cuda.cudaMemGetInfo.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
cuda.cudaGetErrorString.restype = ctypes.c_char_p
cuda.cudaGetErrorString.argtypes = [ctypes.c_int]
def cudaMemGetInfo(mb=False):
"""
Return (free, total) memory stats for CUDA GPU
Default units are bytes. If mb==True, return units in MB
"""
free = ctypes.c_size_t()
total = ctypes.c_size_t()
ret = cuda.cudaMemGetInfo(ctypes.byref(free), ctypes.byref(total))
if ret != 0:
err = cuda.cudaGetErrorString(status)
raise RuntimeError("CUDA Error (%d): %s" % (status, err))
if mb:
scale = 1024.0**2
return free.value / scale, total.value / scale
else:
return free.value, total.value
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment