Skip to content

Instantly share code, notes, and snippets.

@s-mawjee
Last active May 20, 2024 17:35
Show Gist options
  • Save s-mawjee/ad0d8e0c7e07265cae097899fe48c023 to your computer and use it in GitHub Desktop.
Save s-mawjee/ad0d8e0c7e07265cae097899fe48c023 to your computer and use it in GitHub Desktop.
Get Nvidia GPU information via python code, instead of watching nvidia-smi in the terminal. Useful when training ML models, can be added to the training loop.
import nvidia_smi
_GPU = False
_NUMBER_OF_GPU = 0
def _check_gpu():
global _GPU
global _NUMBER_OF_GPU
nvidia_smi.nvmlInit()
_NUMBER_OF_GPU = nvidia_smi.nvmlDeviceGetCount()
if _NUMBER_OF_GPU > 0:
_GPU = True
def _print_gpu_usage(detailed=False):
if not detailed:
for i in range(_NUMBER_OF_GPU):
handle = nvidia_smi.nvmlDeviceGetHandleByIndex(i)
info = nvidia_smi.nvmlDeviceGetMemoryInfo(handle)
print(f'GPU-{i}: GPU-Memory: {_bytes_to_megabytes(info.used)}/{_bytes_to_megabytes(info.total)} MB')
def _bytes_to_megabytes(bytes):
return round((bytes/1024)/1024,2)
if __name__ == '__main__':
print('Checking for Nvidia GPU\n')
_check_gpu()
if _GPU:
_print_gpu_usage()
else:
print("No GPU found.")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment