Skip to content

Instantly share code, notes, and snippets.

@bound2
Last active May 31, 2017 09:47
Show Gist options
  • Save bound2/e747e8bc5778469531950a7a78525c62 to your computer and use it in GitHub Desktop.
Save bound2/e747e8bc5778469531950a7a78525c62 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
import os
import json
import re
raw_input = """
ID : fa5bd3ba-5e60-4499-be7c-3b9556b89e7b
Gossip active : true
Thrift active : true
Native Transport active: true
Load : 827.55 KiB
Generation No : 1494935993
Uptime (seconds) : 777385
Heap Memory (MB) : 192.55 / 990.00
Off Heap Memory (MB) : 0.00
Data Center : eu-west
Rack : 1b
Exceptions : 0
Key Cache : entries 195, size 18.92 KiB, capacity 49 MiB, 5438 hits, 5630 requests, 0.966 recent hit rate, 14400 save period in seconds
Row Cache : entries 0, size 0 bytes, capacity 0 bytes, 0 hits, 0 requests, NaN recent hit rate, 0 save period in seconds
Counter Cache : entries 0, size 0 bytes, capacity 24 MiB, 0 hits, 0 requests, NaN recent hit rate, 7200 save period in seconds
Chunk Cache : entries 17, size 1.06 MiB, capacity 215 MiB, 1425 misses, 7146 requests, 0.801 recent hit rate, NaN microseconds miss latency
Percent Repaired : 100.0%
Token : (invoke with -T/--tokens to see all 256 tokens)
"""
def parse_status(raw_data, filters=None):
lines = raw_data.split(os.linesep)
status = dict()
for line in lines:
if len(line.strip()) == 0:
continue
if filters is not None:
if apply_filters(text=line, filters=filters) == 0:
continue
key_value = line.split(":")
status.update({key_value[0].strip():key_value[1].strip()})
return status
def apply_filters(text, filters):
match_count = 0
for f in filters:
regex = re.compile(f, re.IGNORECASE)
match = regex.search(text)
if match:
match_count += 1
return match_count
def to_json(data):
return json.dumps(data, indent=4, sort_keys=True)
filters = ["cache", "token", "Gossip Active"]
print to_json(parse_status(raw_input, filters))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment