Skip to content

Instantly share code, notes, and snippets.

@PolarNick239
Last active December 12, 2018 12:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save PolarNick239/496aaa3f39a032720dde980cf25bc5f7 to your computer and use it in GitHub Desktop.
Save PolarNick239/496aaa3f39a032720dde980cf25bc5f7 to your computer and use it in GitHub Desktop.
import os
import time
import subprocess
from collections import namedtuple
def get_memory(key="total"):
# See https://doeidoei.wordpress.com/2009/03/22/python-tip-3-checking-available-ram-with-python/
if os.name == "posix":
"""Returns the RAM of a linux system"""
key_to_index = {
"total": 1,
"used": 2,
"free": 6,
}
if key.startswith("swap"):
totalMemory = os.popen("free -m").readlines()[2].split()[key_to_index[key[4:]]]
else:
totalMemory = os.popen("free -m").readlines()[1].split()[key_to_index[key]]
return int(totalMemory)
elif os.name == "nt":
"""Uses Windows API to check RAM in this OS"""
kernel32 = ctypes.windll.kernel32
c_ulong = ctypes.c_ulong
class MEMORYSTATUS(ctypes.Structure):
_fields_ = [
("dwLength", c_ulong),
("dwMemoryLoad", c_ulong),
("dwTotalPhys", c_ulong),
("dwAvailPhys", c_ulong),
("dwTotalPageFile", c_ulong),
("dwAvailPageFile", c_ulong),
("dwTotalVirtual", c_ulong),
("dwAvailVirtual", c_ulong)
]
memoryStatus = MEMORYSTATUS()
memoryStatus.dwLength = ctypes.sizeof(MEMORYSTATUS)
kernel32.GlobalMemoryStatus(ctypes.byref(memoryStatus))
if key == "total":
result = memoryStatus.dwTotalPhys
elif key == "used":
result = memoryStatus.dwTotalPhys - memoryStatus.dwAvailPhys
elif key == "free":
result = memoryStatus.dwAvailPhys
else:
raise KeyError("key={}, while expected one of: [total, used, free]".format(key))
return int(result / 1024 ** 2)
else:
print("I only work with Win or Linux :P")
GPUUsageInfo = namedtuple('GPUUsageInfo', ['total_mem', 'avail_mem', 'used_mem',
'temp', 'percent_fan',
'usage_gpu', 'usage_mem'])
def query_nvidia_smi():
params = ["memory.total", "memory.free", "memory.used",
"temperature.gpu", "fan.speed",
"utilization.gpu", "utilization.memory"]
try:
output = subprocess.check_output(["nvidia-smi",
"--query-gpu={}".format(','.join(params)),
"--format=csv,noheader,nounits"])
except FileNotFoundError:
raise Exception("No nvidia-smi")
except subprocess.CalledProcessError:
raise Exception("nvidia-smi call failed")
output = output.decode('utf-8').strip()
gpus_values = output.split("\n")
gpus_info = []
for values in gpus_values:
values = values.split(", ")
# If value contains 'not' - it is not supported for this GPU (in fact, for now nvidia-smi returns '[Not Supported]')
values = [None if ("not" in value.lower()) else int(value) for value in values]
gpus_info.append(GPUUsageInfo(*values))
return gpus_info
if __name__ == '__main__':
line_format = "[{: <20}, {: <20}, {: <20}, {: <20}, {: <20}, {: <20}],"
print(line_format.format("[Time, hh:mm:ss]", "[Used memory, Mb]", "[Used swap, Mb]", "[Avg VRAM used, Mb]", "[Avg GPU temp, C]", "[Avg GPU util, %]", "[Avg GPU mem util, %]", ))
while True:
gpus_info = query_nvidia_smi()
gpu_used_mem, gpu_temp, gpu_usage, gpu_mem_util = 0, 0, 0, 0
for gpu_info in gpus_info:
gpu_used_mem += gpu_info.used_mem
gpu_temp += gpu_info.temp
gpu_usage += gpu_info.usage_gpu
gpu_mem_util += gpu_info.usage_mem
gpu_used_mem /= len(gpus_info)
gpu_temp /= len(gpus_info)
gpu_usage /= len(gpus_info)
gpu_mem_util /= len(gpus_info)
cur_time = time.strftime("%H:%M:%S", time.localtime())
print(line_format.format(cur_time, get_memory("used"), get_memory("swapused"), gpu_used_mem, gpu_temp, gpu_usage, gpu_mem_util))
time.sleep(60)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment