Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save darien-schettler/cb0724d9fa453607d48b8b5c2a8e1942 to your computer and use it in GitHub Desktop.
Save darien-schettler/cb0724d9fa453607d48b8b5c2a8e1942 to your computer and use it in GitHub Desktop.
Gets the Resource Utilization Statistics For Your Google Colab
def get_colab_usage(pip_install=False, import_libs=True, return_fn=True):
""" Retrieve Google Colab Resource Utilization Stats
Args:
pip_install (bool, optional): Whether to preform pip installs
import_libs (bool, optional): Whether to import libraries
return_fn (bool, optional): Whether or not to return get_usage fn
Returns:
The get_usage fn ...
(potentially... only if return_fn flag is set to True)
... which can be used to determine resource utilization stats
at any time in the future of this session without need for
any pip installs or library imports or fn definitions
"""
if pip_install:
# memory footprint support libraries/code
!ln -sf /opt/bin/nvidia-smi /usr/bin/nvidia-smi
!pip install gputil
!pip install psutil
!pip install humanize
if import_libs:
import psutil
import humanize
import os
import GPUtil as GPU
def print_resource_usage():
"""Function that actually retrieves resource utilization statistics"""
# Get the activate GPU
# TODO >>> GPU not guaranteed to be the only one <<< TODO
gpu = GPU.getGPUs()[0]
# Get current process
process = psutil.Process(os.getpid())
# Get general ram usage
gen_ram = humanize.naturalsize(psutil.virtual_memory().available)
# Get processor size
proc_size = humanize.naturalsize(process.memory_info().rss)
# Get gpu stats
gpu_free_mem = gpu.memoryFree
gpu_used_mem = gpu.memoryUsed
gpu_util_mem = gpu.memoryUtil*100
gpu_total_mem = gpu.memoryTotal
# Print interpretable resource utilization statistics
print("\n------------------------------------------------------")
print(" RESOURCE USAGE STATISTICS ")
print("------------------------------------------------------\n")
print("Gen RAM Free: {:8} | " \
"Proc size : {}"\
"".format(gen_ram, proc_size))
print("GPU RAM Free: {:4.0f} MB | " \
"Used : {:5.0f} MB | " \
"Util : {:5.0f}% | " \
"Total : {:5.0f}MB\n" \
"".format(gpu_free_mem, gpu_used_mem,
gpu_util_mem, gpu_total_mem))
# Internally call the fn
print_resource_usage()
if return_fn:
return(print_resource_usage)
# This will print the resource utilization and give us access
# to the fn `get_usage` which can now be called like a regular
# function with no arguments required.
get_usage = get_colab_usage(pip_install=True, return_fn=True)
@darien-schettler
Copy link
Author

darien-schettler commented Mar 9, 2020

Sample of Expected Output


------------------------------------------------------
             RESOURCE USAGE STATISTICS                
------------------------------------------------------

Gen RAM Free: 26.4 GB  | Proc size   : 162.4 MB
GPU RAM Free: 11441 MB | Used        :     0 MB | Util        :     0% | Total       : 11441MB

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment