Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save halfb00t/45033549b5e50d87145fd0a38a92ffcf to your computer and use it in GitHub Desktop.
Save halfb00t/45033549b5e50d87145fd0a38a92ffcf to your computer and use it in GitHub Desktop.
Python Script to Upload Memcache Metrics to CloudWatch
import sys, time, subprocess, socket, telnetlib
from datetime import datetime
from collections import defaultdict
from boto.ec2.cloudwatch import CloudWatchConnection
MAPPINGS = {
# Memcached name: (AWS Name, AWS Metric Type, Calculation Method)
'uptime': ('Uptime', 'Count', 'gauge'),
'curr_connections': ('CurrConnections', 'Count', 'gauge'),
'cmd_get': ('CmdGet', 'Count', 'sampling'),
'cmd_set': ('CmdSet', 'Count', 'sampling'),
'cmd_flush': ('CmdFlush', 'Count', 'sampling'),
'cmd_touch': ('CmdTouch', 'Count', 'sampling'),
'get_hits': ('GetHits', 'Count', 'sampling'),
'get_misses': ('GetMisses', 'Count', 'sampling'),
'delete_hits': ('DeleteHits', 'Count', 'sampling'),
'delete_misses': ('DeleteMisses', 'Count', 'sampling'),
'incr_hits': ('IncrHits', 'Count', 'sampling'),
'incr_misses': ('IncrMisses', 'Count', 'sampling'),
'decr_hits': ('DecrHits', 'Count', 'sampling'),
'decr_misses': ('DecrMisses', 'Count', 'sampling'),
'cas_hits': ('CasHits', 'Count', 'sampling'),
'cas_misses': ('CasMisses', 'Count', 'sampling'),
'touch_hits': ('TouchHits', 'Count', 'sampling'),
'touch_misses': ('TouchMisses', 'Count', 'sampling'),
'bytes_read': ('BytesReadIntoMemcached', 'Bytes', 'sampling'),
'bytes_written': ('BytesWrittenOutFromMemcached', 'Bytes', 'sampling'),
'evictions': ('Evictions', 'Count', 'sampling'),
'bytes': ('BytesUsedForCacheItems', 'Bytes', 'gauge'),
'curr_items': ('CurrItems', 'Count', 'gauge')
}
# Stats not readable from Memcached 'stats' directly,
# we need to fetch or calculate these explicity
DERIVED_STATS = {
'new_connections': ('NewConnections', 'Count', 'sampling'),
'new_items': ('NewItems', 'Count', 'sampling'),
'cpu_util': ('CPUUtilization', 'Percent', 'gauge'),
'freeable_mem': ('FreeableMemory', 'Kilobytes', 'gauge'),
'network_in': ('NetworkBytesIn', 'Bytes', 'sampling'),
'network_out': ('NetworkBytesOut', 'Bytes', 'sampling'),
'swap_usage': ('SwapUsage', 'Kilobytes', 'gauge')
}
def client():
global _client
if _client is None:
_client = telnetlib.Telnet('127.0.0.1', '11211')
return _client
def command(client, cmd):
'Write a command to telnet and return the response'
client.write("%s\n" % cmd)
return client.read_until('END')
def get_stats():
c = client()
return command(c, 'stats')
def parse_stats(new_stats):
# Clear our parsed stats
parsed_stats = {}
# Parse the Memcached 'stats' output
for stat in new_stats.split('\n'):
if stat.startswith('STAT'):
stat = stat.split()
key = stat[1]
value = stat[2]
# Filter down to the keys we care about
if key in MAPPINGS.keys():
parsed_stats[key] = int(value)
return parsed_stats
def calculate_regular_stats(current_stats, old_stats):
calculated_stats = {}
for key in current_stats.keys():
# Check to see if we are a measurement where we
# are interested in the diff between pollings
if 'sampling' in MAPPINGS[key][2]:
diff = current_stats[key] - old_stats[key]
calculated_stats[key] = diff
# Otherwise this is a gauge and we just care about
# the value, not the difference
else:
calculated_stats[key] = current_stats[key]
return calculated_stats
def _calculate_new_connections(current_stats, old_stats):
# Not sure how amazon is calculating this
pass
def _calculate_new_items(current_stats, old_stats):
# Not sure how amazon is calculating this
pass
def _get_network_in(current_stats, old_stats):
# Not sure how amazon is calculating this
pass
def _get_network_out(current_stats, old_stats):
# Not sure how amazon is calculating this
pass
def _get_cpu_usage():
# Get the 3rd field of the ps aux command which should be CPU usage
# of the process as a percentage
command = "ps aux | grep '/usr/bin/memcached' | grep -v grep | awk '{print $3}'"
# Create our new process
proc = subprocess.Popen(command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
# Filter the result
result = proc.stdout.readline().rstrip()
if result is not None:
return float(result)
else:
return 0.0
def _parse_memory():
mem_info = defaultdict(int)
# Get the systems memory info
command = "cat /proc/meminfo"
# Create our new process
proc = subprocess.Popen(command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
# Parse through the lines gathering relevant information
for line in proc.stdout.readlines():
for word in ['SwapCached:', 'MemFree:', 'Cached:', 'Buffers:']:
if line.startswith(word):
# Pull out the value of the line and convert it to an int
value = int(line.split()[1])
if 'swap' in word.lower():
mem_info['swap_usage'] = value
else:
mem_info['freeable_mem'] += value
return mem_info
def calculate_derived_stats():
calculated_stats = {}
mem_info = _parse_memory()
calculated_stats.update(mem_info)
calculated_stats.update({ 'cpu_util': _get_cpu_usage() })
return calculated_stats
def compile_stats(old_stats):
final_stats = {}
global previous_stats
new_stats = get_stats()
current_stats = parse_stats(new_stats)
previous_stats = current_stats
final_stats.update(calculate_regular_stats(current_stats, old_stats))
final_stats.update(calculate_derived_stats())
return final_stats
def send_stats():
# Uncomfortable with the global here
global previous_stats
stats = compile_stats(previous_stats)
for key, value in stats.iteritems():
name = "{0}:{1}".format(hostname, key)
if key in MAPPINGS.keys():
unit = MAPPINGS[key][1]
elif key in DERIVED_STATS.keys():
unit = DERIVED_STATS[key][1]
else:
unit = "count"
print namespace, name, value, unit
conn.put_metric_data(namespace=namespace, name=name, value=float(value), unit=unit)
# We want to have these stats set to 0
_client = None
previous_stats = defaultdict(int)
conn = CloudWatchConnection()
namespace = "Memcached"
hostname = socket.gethostname()
while True:
if datetime.now().second == 0:
send_stats()
time.sleep(1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment