Skip to content

Instantly share code, notes, and snippets.

@ipmb
Created March 3, 2017 03:59
Show Gist options
  • Save ipmb/c45d44589b37272de354820069de93fb to your computer and use it in GitHub Desktop.
Save ipmb/c45d44589b37272de354820069de93fb to your computer and use it in GitHub Desktop.
Send system stats to AWS Cloudwatch with Python
# -*- coding: utf-8 -*-
import os
# pip install boto3 psutil requests
import boto3
import psutil
import requests
NAMESPACE = 'Sys'
os.environ.setdefault('AWS_DEFAULT_REGION', 'us-east-1')
def get_metadata(path):
url = 'http://169.254.169.254/latest/meta-data/' + path
return requests.get(url, timeout=0.5).content
def extract_metric_tuple(metric, prefix, unit, dimensions=None):
metrics = []
for k, v in metric._asdict().items():
name = ''.join([prefix, k.capitalize()])
data = dict(name=name, value=v, dimensions=dimensions)
if k == 'percent':
data['unit'] = 'Percent'
elif k.endswith('_time'):
data['unit'] = 'Milliseconds'
elif k.endswith('_sent') or k.endswith('_recv'):
data['unit'] = 'Bytes'
else:
data['unit'] = unit
metrics.append(data)
return metrics
def collect_metrics():
metrics = [
dict(name='Users', value=len(psutil.users()), unit='Count'),
dict(name='Processes', value=len(psutil.pids()), unit='Count'),
]
metrics.extend(
extract_metric_tuple(psutil.cpu_times(), 'CpuTimeTotal', 'Seconds'))
for idx, cpu_times in enumerate(psutil.cpu_times(percpu=True)):
metrics.extend(
extract_metric_tuple(cpu_times, 'CpuTime', 'Seconds',
dimensions={'Cpu': str(idx)}))
metrics.extend(
extract_metric_tuple(psutil.cpu_stats(), 'CpuStats', 'Count'))
metrics.extend(
extract_metric_tuple(psutil.virtual_memory(), 'VirtualMem', 'Bytes'))
metrics.extend(
extract_metric_tuple(psutil.swap_memory(), 'SwapMem', 'Bytes'))
for disk, io_counters in psutil.disk_io_counters(perdisk=True).items():
metrics.extend(
extract_metric_tuple(io_counters, 'DiskIo', 'Count',
dimensions={'Disk': disk}))
for nic, io_counters in psutil.net_io_counters(pernic=True).items():
metrics.extend(
extract_metric_tuple(io_counters, 'NetIo', 'Count',
dimensions={'Interface': nic}))
return metrics
def send_metrics(metrics):
cloudwatch = boto3.client('cloudwatch')
dimensions = [
{'Name': 'InstanceId', 'Value': get_metadata('instance-id')},
{'Name': 'ImageId', 'Value': get_metadata('ami-id')},
{'Name': 'InstanceType', 'Value': get_metadata('instance-type')},
]
metric_payload = []
for metric in metrics:
metric_data = {'MetricName': metric['name'],
'Value': metric['value'],
'Dimensions': list(dimensions),
'Unit': metric['unit']}
if metric.get('dimensions'):
for k, v in metric['dimensions'].items():
metric_data['Dimensions'].append({'Name': k, 'Value': v})
metric_payload.append(metric_data)
cloudwatch.put_metric_data(Namespace=NAMESPACE, MetricData=metric_data)
def main():
metrics = collect_metrics()
send_metrics(metrics)
if __name__ == '__main__':
main()
@andreivmaksimov
Copy link

Thanks for a gist! It was helpful as a starting point. It has a small typo (you need to pass metric_payload to put_metric_data function instead of metric_data) and bug: you can not upload more, then 20 metrics at a time.

Current working copy is:

# -*- coding: utf-8 -*-

import os

# pip install boto3 psutil requests
import boto3
import psutil
import requests

NAMESPACE = 'Sys'
os.environ.setdefault('AWS_DEFAULT_REGION', 'us-east-1')


def get_metadata(path):
    url = 'http://169.254.169.254/latest/meta-data/' + path
    return requests.get(url, timeout=0.5).content


def extract_metric_tuple(metric, prefix, unit, dimensions=None):
    metrics = []
    for k, v in metric._asdict().items():
        name = ''.join([prefix, k.capitalize()])
        data = dict(name=name, value=v, dimensions=dimensions)
        if k == 'percent':
            data['unit'] = 'Percent'
        elif k.endswith('_time'):
            data['unit'] = 'Milliseconds'
        elif k.endswith('_sent') or k.endswith('_recv'):
            data['unit'] = 'Bytes'
        else:
            data['unit'] = unit
        metrics.append(data)
    return metrics


def collect_metrics():
    metrics = [
        dict(name='Users', value=len(psutil.users()), unit='Count'),
        dict(name='Processes', value=len(psutil.pids()), unit='Count'),
    ]
    metrics.extend(
        extract_metric_tuple(psutil.cpu_times(), 'CpuTimeTotal', 'Seconds'))
    for idx, cpu_times in enumerate(psutil.cpu_times(percpu=True)):
        metrics.extend(
            extract_metric_tuple(cpu_times, 'CpuTime', 'Seconds',
                                 dimensions={'Cpu': str(idx)}))
    metrics.extend(
        extract_metric_tuple(psutil.cpu_stats(), 'CpuStats', 'Count'))
    metrics.extend(
        extract_metric_tuple(psutil.virtual_memory(), 'VirtualMem', 'Bytes'))
    metrics.extend(
        extract_metric_tuple(psutil.swap_memory(), 'SwapMem', 'Bytes'))
    for disk, io_counters in psutil.disk_io_counters(perdisk=True).items():
        metrics.extend(
            extract_metric_tuple(io_counters, 'DiskIo', 'Count',
                                 dimensions={'Disk': disk}))
    for nic, io_counters in psutil.net_io_counters(pernic=True).items():
        metrics.extend(
            extract_metric_tuple(io_counters, 'NetIo', 'Count',
                                 dimensions={'Interface': nic}))
    return metrics


def chunks(l, n):
    """Yield successive n-sized chunks from l."""
    for i in xrange(0, len(l), n):
        yield l[i:i + n]


def send_metrics(metrics):
    cloudwatch = boto3.client('cloudwatch')
    dimensions = [
        {'Name': 'InstanceId', 'Value': get_metadata('instance-id')},
        {'Name': 'ImageId', 'Value': get_metadata('ami-id')},
        {'Name': 'InstanceType', 'Value': get_metadata('instance-type')},
    ]
    metric_payload = []
    for metric in metrics:
        metric_data = {'MetricName': metric['name'],
                       'Value': metric['value'],
                       'Dimensions': list(dimensions),
                       'Unit': metric['unit']}
        if metric.get('dimensions'):
            for k, v in metric['dimensions'].items():
                metric_data['Dimensions'].append({'Name': k, 'Value': v})
        metric_payload.append(metric_data)

    # You may send not more, than 20 metrics at a time 
    for batch in list(chunks(metric_payload, 20)):
        cloudwatch.put_metric_data(Namespace=NAMESPACE, MetricData=batch)


def main():
    metrics = collect_metrics()
    send_metrics(metrics)


if __name__ == '__main__':
    main()

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment