Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mdcollins05/af7fd537d0619253e526591e704ee3e1 to your computer and use it in GitHub Desktop.
Save mdcollins05/af7fd537d0619253e526591e704ee3e1 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
import requests
import json
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
# Your PagerDuty API key. A read-only key will work for this.
api_key = 'ENTER_API_KEY'
# The API base url
base_url = 'https://api.pagerduty.com'
# The service ID you want to query. Leave this blank to query all services.
service_id = ''
# The start date that you want to query.
since = '2016-05-01'
# The end date that you would like to search.
until = '2016-07-15'
headers = {
'Authorization': 'Token token={0}'.format(api_key),
'Content-type': 'application/json',
'Accept': 'application/vnd.pagerduty+json;version=2'
}
def get_incidents(since, until, offset, service_id=None, total_incidents=[]):
params = {
'service_ids': [service_id],
'since': since,
'until': until,
'offset': offset,
'limit': 100
}
r = requests.get(
'{0}/incidents'.format(base_url),
headers=headers,
data=json.dumps(params)
)
if r.json()['more']:
total_incidents.extend(r.json()['incidents'])
offset += 100
return get_incidents(since, until, offset, service_id, total_incidents)
else:
total_incidents.extend(r.json()['incidents'])
return total_incidents
def get_incident_details(incident_id, incident_number, service, file_name):
start_time = ''
end_time = ''
summary = ''
has_details = False
has_summary = False
has_body = False
output = incident_number + ',' + service + ','
f = open(file_name, 'a')
r = requests.get(
'{0}/incidents/{1}/log_entries?include[]=channels'.format(
base_url, incident_id
),
headers=headers
)
for log_entry in r.json()['log_entries']:
if log_entry['type'] == 'trigger_log_entry':
if log_entry['created_at'] > start_time:
start_time = log_entry['created_at']
if ('summary' in log_entry['channel']):
has_summary = True
summary = log_entry['channel']['summary']
if ('details' in log_entry['channel']):
has_details = True
details = log_entry['channel']['details']
if ('body' in log_entry['channel']):
has_body = True
body = log_entry['channel']['body']
elif log_entry['type'] == 'resolve_log_entry':
end_time = log_entry['created_at']
output += start_time + ','
output += end_time
if (has_summary):
output += ',"' + summary + '"'
if (has_details):
output += ',"' + str(details) + '"'
if (has_body):
output += ',"' + str(body).replace(",", "-").replace("\"", "'").replace("\n", "").replace("\r", "") + '"'
output += '\n'
f.write(output)
def main():
if service_id != '':
incidents = get_incidents(since, until, 0, service_id)
else:
incidents = get_incidents(since, until, 0)
for incident in incidents:
get_incident_details(
incident['id'], str(incident['incident_number']),
incident['service']['summary'],
'pagerduty_export_' + since + '.csv'
)
print 'Export completed successfully!'
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment