Last active
February 15, 2018 19:54
-
-
Save clayg/5357d25a4a464df1ff5083cd2fe56a2b to your computer and use it in GitHub Desktop.
dig around in internal logs account
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import argparse | |
from ConfigParser import ConfigParser | |
from StringIO import StringIO | |
import logging | |
import re | |
import sys | |
import thread | |
import threading | |
from Queue import Queue | |
from datetime import datetime, timedelta | |
from swift.common.internal_client import InternalClient | |
from swift.container.sync import ic_conf_body | |
from swift.common.wsgi import ConfigString | |
# fix monkey-patch lp bug #1380815 | |
logging.threading = threading | |
logging.thread = thread | |
logging._lock = threading.RLock() | |
TODAY = datetime(*datetime.today().timetuple()[:3]) | |
YESTERDAY = TODAY - timedelta(days=1) | |
DATE_FORMAT = '%Y/%m/%d' | |
def parse_date(string): | |
try: | |
d = datetime.strptime(string, DATE_FORMAT) | |
except ValueError as e: | |
raise argparse.ArgumentTypeError(str(e)) | |
return datetime(d.year, d.month, d.day) | |
def add_start_end(parser): | |
parser.add_argument('-n', '--node', action='append', | |
help='limit log search to node(s)') | |
parser.add_argument('-S', '--start', default=YESTERDAY, type=parse_date, | |
help='YYYY/MM/DD to start search (inclusive)') | |
parser.add_argument('-E', '--end', default=TODAY, type=parse_date, | |
help='YYYY/MM/DD to end search (exclusive)') | |
parser = argparse.ArgumentParser() | |
parser.add_argument('--request-node-count', type=int, | |
help='override request_node_count') | |
subparsers = parser.add_subparsers() | |
list_nodes_parser = subparsers.add_parser('list-nodes') | |
list_logs_parser = subparsers.add_parser('list-logs') | |
add_start_end(list_logs_parser) | |
grep_logs_parser = subparsers.add_parser('grep-logs') | |
add_start_end(grep_logs_parser) | |
parser.add_argument('--workers', help='number of workers', type=int, | |
default=24) | |
grep_logs_parser.add_argument('-e', '--pattern', help='something to look for', | |
default='') | |
grep_logs_parser.add_argument('-r', '--regex', help='treat pattern as regex', | |
action='store_true') | |
def mb(bytes_): | |
return '%.02f MiB' % (bytes_ / 2.0 ** 20) | |
def gb(bytes_): | |
return '%.02f GiB' % (bytes_ / 2.0 ** 30) | |
def do_list_nodes(swift, args): | |
listing = swift.iter_containers('.logs') | |
total = 0 | |
for item in listing: | |
print '%s - %s - %s' % ( | |
item['name'], item['count'], gb(item['bytes'])) | |
total += item['bytes'] | |
print 'Total: %s' % gb(total) | |
list_nodes_parser.set_defaults(func=do_list_nodes) | |
def get_aggr_logs(swift, args): | |
nodes = swift.iter_containers('.logs') | |
aggr_logs = [] | |
for node in nodes: | |
if args.node and node['name'] not in args.node: | |
continue | |
logs = swift.iter_objects('.logs', node['name']) | |
for log in logs: | |
if log['name'].endswith('ssnoded.log.gz'): | |
continue | |
date = parse_date(log['name'][:10]) | |
if args.start <= date < args.end: | |
aggr_logs.append((date, node['name'], log)) | |
return aggr_logs | |
def extract_date_from_name(name): | |
return datetime.stptime() | |
def do_list_logs(swift, args): | |
aggr_logs = get_aggr_logs(swift, args) | |
total = 0 | |
for date, node, log in sorted(aggr_logs): | |
print '%s/%s - %s' % (node, log['name'], mb(log['bytes'])) | |
total += log['bytes'] | |
print 'Total: %s' % gb(total) | |
list_logs_parser.set_defaults(func=do_list_logs) | |
def output_lines(out_q): | |
while True: | |
line = out_q.get() | |
if not line: | |
break | |
print line | |
def consumer(in_q, out_q, swift, args): | |
while True: | |
item = in_q.get() | |
if item is None: | |
break | |
container, obj = item | |
for line in swift.iter_object_lines('.logs', container, obj): | |
if args.regex: | |
if args.pattern.search(line): | |
out_q.put(line) | |
else: | |
if args.pattern in line: | |
out_q.put(line) | |
def do_grep_logs(swift, args): | |
aggr_logs = get_aggr_logs(swift, args) | |
if args.regex: | |
args.pattern = re.compile(args.pattern) | |
out_q = Queue() | |
output_worker = threading.Thread(target=output_lines, args=(out_q,)) | |
output_worker.start() | |
in_q = Queue() | |
workers = [] | |
for i in range(args.workers): | |
t = threading.Thread(target=consumer, args=( | |
in_q, out_q, swift, args)) | |
t.start() | |
workers.append(t) | |
for date, node, log in sorted(aggr_logs): | |
in_q.put((node, log['name'])) | |
for t in workers: | |
in_q.put(None) | |
for t in workers: | |
t.join() | |
out_q.put(None) | |
output_worker.join() | |
grep_logs_parser.set_defaults(func=do_grep_logs) | |
def get_config_string(args): | |
parser = ConfigParser() | |
parser.readfp(StringIO(ic_conf_body)) | |
if args.request_node_count is not None: | |
parser.set('DEFAULT', 'request_node_count', args.request_node_count) | |
conf_out = StringIO() | |
parser.write(conf_out) | |
return ConfigString(conf_out.getvalue()) | |
def main(): | |
args = parser.parse_args() | |
config_string = get_config_string(args) | |
swift = InternalClient(config_string, 'test', 1) | |
args.func(swift, args) | |
if __name__ == "__main__": | |
sys.exit(main()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment