Skip to content

Instantly share code, notes, and snippets.

@simpleadm
Last active February 1, 2019 06:39
Show Gist options
  • Save simpleadm/758161d8a80f46554041af77702d295e to your computer and use it in GitHub Desktop.
Save simpleadm/758161d8a80f46554041af77702d295e to your computer and use it in GitHub Desktop.
import requests
import json
import argparse
import urllib.parse
from itertools import groupby
from datetime import datetime
def parse_args():
parser = argparse.ArgumentParser()
parser.description = """
Examples:
python3 check_alerts.py -c eplehuset --start="-1d"
python3 check_alerts.py --start="2018-09-24 00:00:00" --until="2018-09-26 00:00:00"
"""
parser.add_argument("-t", "--token", help="loggly api token")
parser.add_argument("-c", "--client", help="tag name (tenant name)")
parser.add_argument("-s", "--start",
help="search logs from, e.g. -1d, -30m, -12h, 'yyyy-MM-dd HH:mm:ss.SSS' (Default: -1h)",
default='-1h')
parser.add_argument("-u", "--until",
help="search logs to, e.g. now, -1d, -30m, -12h, 'yyyy-MM-dd HH:mm:ss.SSS' (Default: now)",
default='now')
return parser.parse_args()
def display_entities_by_type(result):
group_key = lambda x: x.get('entity_type')
result.sort(key=group_key)
for key, group in groupby(result, group_key):
print('%s:' % key)
group = list(group)
ids = list(set([str(g.get('source_system_entity_id')) for g in group]))
ids.sort()
print(' %s\n' % ','.join(ids))
def display_scheduled_tasks(result):
group_key = lambda x: '%s(%s->%s)' % (x.get('task_name'), x.get('origin'), ','.join(x.get('consumers')))
result.sort(key=group_key)
for key, group in groupby(result, group_key):
print('%s:' % key)
group = list(group)
group.sort(key=lambda x: x.get('time'))
ids = ['%s (%s)' % (g.get('task_id'), g.get('time').strftime("%Y-%m-%d %H:%M:%S")) for g in group]
print(' %s\n' % '\n '.join(ids))
def process_data(data):
results = {}
scheduled_results = {}
events = data.get('events')
for event in events:
tags = event.get('tags', [])
raw = json.loads(event.get('raw'))
c = raw.get('Context', {})
# keep only push event alerts
if not c:
if raw.get("args") and raw.get("Message") == "Scheduled task was failed":
result_key = '|'.join(sorted(tags))
r_args = raw.get("args", [])
tenant = r_args[0]
origin = r_args[1]
consumers = list(r_args[3].keys())
scheduled_results.setdefault(result_key, [])
scheduled_results[result_key].append({
'time': datetime.strptime(raw.get('asctime'), "%Y-%m-%d %H:%M:%S,%f"),
'task': raw.get('Task'),
'task_name': raw.get('task_name'),
'task_id': raw.get('task_id'),
'origin': origin,
'tenant': tenant,
'tags': tags,
'consumers': consumers,
})
continue
print("\x1b[1;31m> Message was skipped! loggly id: %s, message: '%s', tags: %s\x1b[0m" % (
event.get('id', '-'), raw.get('Message', '-'), tags))
continue
tenant = c.get('tenant')
result_key = '|'.join(sorted(tags)) # tenant
results.setdefault(result_key, [])
entity = c.get('entity', {})
order_id = entity.get('orderid') if isinstance(entity, dict) else None
source_system_entity_id = c.get('source_system_entity_id', '')
if not source_system_entity_id and order_id:
source_system_entity_id = order_id
target_system_entity_id = c.get('target_system_entity_id', '')
if not target_system_entity_id and order_id:
target_system_entity_id = order_id
results[result_key].append({
'entity_type': c.get('entity_type', 'nf'),
'source_system': c.get('source_system', ''),
'source_system_entity_id': source_system_entity_id,
'target_system': c.get('target_system', ''),
'target_system_entity_id': target_system_entity_id,
'time': raw.get('asctime'),
# 'task': raw.get('Task'),
# 'entity': entity,
'tenant': tenant,
'tags': tags,
})
print("###################")
print("### Flow events ###")
print("###################")
# print(results)
for key, result in results.items():
print("\n\x1b[1;36m%s\x1b[0m" % key)
display_entities_by_type(result)
print('---' * 20)
print("#######################")
print("### Scheduled tasks ###")
print("#######################")
# print(scheduled_results)
for key, result in scheduled_results.items():
print("\n\x1b[1;36m%s\x1b[0m" % key)
display_scheduled_tasks(result)
print('---' * 20)
args = parse_args()
base_url = 'https://convertflowe.loggly.com/apiv2/events/iterate?'
loggly_token = args.token if args.token else ''
tenant = args.client # None
# https://www.loggly.com/docs/search-query-language/
logs_query = 'json.levelname:"CRITICAL"' # *
# https://www.loggly.com/docs/search-query-language/#time
if tenant:
logs_query += ' tag:' + tenant
get_vars = {
'q': logs_query,
'from': args.start, # -1d -30m -12h
'until': args.until,
'size': '1000'
}
response = requests.get(base_url + urllib.parse.urlencode(get_vars),
headers={'Authorization': 'Bearer ' + loggly_token})
# print(response.text)
events = response.json()
if not isinstance(events, dict) or not events.get('events'):
print("Wrong api response:")
print(events)
exit(1)
process_data(events)
# check alerts from export file
# file_name = 'loggly_events_2018-09-26 07_02_54.803445.json'
#
# with open(file_name) as f:
# data = json.load(f)
# get_data(data)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment