Skip to content

Instantly share code, notes, and snippets.

@paragbaxi
Created March 11, 2014 01:48
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save paragbaxi/9478022 to your computer and use it in GitHub Desktop.
Save paragbaxi/9478022 to your computer and use it in GitHub Desktop.
Examples to auto download next QualysGuard batch
import argparse
import csv
import datetime
import logging
import os
import qualysapi
import sys
import unicodedata
from collections import defaultdict
from lxml import etree, objectify
from progressbar import AnimatedMarker, ProgressBar, SimpleProgress
def download_apps(tag=None):
"""Download apps from QualysGuard.
"""
# Count applicable apps.
app_count = count_apps(tag)
last_record = '0'
apps = []
print('Downloading applications:')
# Show progress bar.
pbar = ProgressBar(widgets=[SimpleProgress()], maxval=app_count).start()
while True:
# Get list of web apps.
query_uri = '/search/was/webapp'
data = '''
<ServiceRequest>
<filters>
<Criteria field="createdDate" operator="GREATER">2000-02-21T00:00:00Z</Criteria>
<Criteria field="id" operator="GREATER">%s</Criteria>
</filters>
<preferences>
<limitResults>1000</limitResults>
</preferences>
</ServiceRequest>''' % (last_record)
if tag:
# Insert additional criteria for tag after '<filters>'.
new_criteria_position = data.find('<filters>\n') + 18
data = data[:new_criteria_position] + \
'<Criteria field="tags.name" operator="EQUALS">%s</Criteria>' % (c_args.tag) + \
data[new_criteria_position:]
logging.debug('data = \n%s' % data)
search_apps = qgc.request(query_uri, data)
# Parse list of web apps to associate each web app id with web app name.
tree = objectify.fromstring(search_apps)
for webapp in tree.data.WebApp:
app = defaultdict(str)
app_name = webapp.name.text
# App name may be in unicode.
if isinstance(app_name, unicode):
# Decode to string.
app_name = unicodedata.normalize('NFKD', app_name).encode('ascii', 'ignore')
app['name'] = app_name
app['id'] = webapp.id.text
apps.append(app)
pbar.update(len(apps))
if tree.hasMoreRecords.text == 'true':
last_record = tree.lastId.text
else:
break
print
'\n'
logging.info('apps = %s' % (apps))
return apps
def qg_remediation_tickets(asset_group, states, qids = None):
"""Return defaultdict of all vulnerabilities of status STATUS."""
global qg_username, qg_password
# asset_group's vulnerability data map:
# {'qid_number': {
# # CSV info
# 'hosts': [{'ip': '10.28.0.1', 'dns': 'hostname', 'netbios': 'blah', 'vuln_id': 'remediation_ticket_number'}, {'ip': '10.28.0.3', 'dns': 'hostname2', 'netbios': '', 'vuln_id': 'remediation_ticket_number'}, ...],
# 'solution': '',
# 'impact': '',
# 'threat': '',
# 'severity': '',
# }
# 'qid_number2': ...
# }
# Add all vulnerabilities to list of dictionaries.
vulns = defaultdict(dict)
# Start searching at initial ticket #1.
since_ticket_number = 1
while True:
command_parameter = 'show_vuln_details=1&states=%s&asset_groups=%s&since_ticket_number=%s' % (states, asset_group.replace(' ', '+'), since_ticket_number)
if not qids == None:
command_parameter += '&qids=%s' % (qids)
args_sub = [
'curl',
'-H', 'X-Requested-With: QGIR',
'-d', command_parameter,
'https://%s:%s@qualysapi.qualys.com/msp/ticket_list.php' % (qg_username, qg_password)
]
logging.debug('args_sub: %s' % (args_sub))
# Call API.
# TODO: Incorporate timeout of 5 minutes.
xml_output = subprocess.check_output(args_sub)
logging.debug('qg_remediation_tickets.xml_output =')
logging.debug(xml_output)
# Objectify XML.
tree = objectify.fromstring(xml_output)
# Parse vulnerabilities.
try:
for ticket in tree.TICKET_LIST.TICKET:
# Use defaultdict in case a new QID is encountered.
# Extract possible extra hostname information.
try:
netbios = unicodedata.normalize('NFKD', unicode(ticket.DETECTION.NBHNAME)).encode('ascii', 'ignore').strip()
except AttributeError:
netbios = ''
try:
dns = unicodedata.normalize('NFKD', unicode(ticket.DETECTION.DNSNAME)).encode('ascii', 'ignore').strip()
except AttributeError:
dns = ''
try:
result = unicodedata.normalize('NFKD', unicode(ticket.DETAILS.RESULT)).encode('ascii', 'ignore').strip()
except AttributeError:
result = ''
vuln_id = unicodedata.normalize('NFKD', unicode(ticket.NUMBER)).encode('ascii', 'ignore').strip()
ip = unicodedata.normalize('NFKD', unicode(ticket.DETECTION.IP)).encode('ascii', 'ignore').strip()
qid = unicodedata.normalize('NFKD', unicode(ticket.VULNINFO.QID)).encode('ascii', 'ignore').strip()
# Attempt to add host to QID's list of affected hosts.
try:
vulns[qid]['hosts'].append({'ip': '%s' % (ip),
'dns': '%s' % (dns),
'netbios': '%s' % (netbios),
'vuln_id': '%s' % (vuln_id),
'result': '%s' % (result), })
except KeyError:
# New QID.
logging.debug('New QID found: %s' % (qid))
vulns[qid]['hosts'] = []
vulns[qid]['hosts'].append({'ip': '%s' % (ip),
'dns': '%s' % (dns),
'netbios': '%s' % (netbios),
'vuln_id': '%s' % (vuln_id),
'result': '%s' % (result), })
# Add all other qid information
vulns[qid]['title'] = unicodedata.normalize('NFKD', unicode(ticket.VULNINFO.TITLE)).encode('ascii', 'ignore').strip()
vulns[qid]['severity'] = unicodedata.normalize('NFKD', unicode(ticket.VULNINFO.SEVERITY)).encode('ascii', 'ignore').strip()
vulns[qid]['solution'] = qg_html_to_ascii(unicodedata.normalize('NFKD', unicode(ticket.DETAILS.SOLUTION)).encode('ascii', 'ignore').strip())
vulns[qid]['threat'] = qg_html_to_ascii(unicodedata.normalize('NFKD', unicode(ticket.DETAILS.DIAGNOSIS)).encode('ascii', 'ignore').strip())
vulns[qid]['impact'] = qg_html_to_ascii(unicodedata.normalize('NFKD', unicode(ticket.DETAILS.CONSEQUENCE)).encode('ascii', 'ignore').strip())
except AttributeError, e:
logging.debug('No QualysGuard tickets to report.')
return False
# All vulnerabilities added.
try:
# See if the API call was truncated due to API limit of 1000 records at a time.
since_ticket_number = tree.TRUNCATION.get('last')
except AttributeError:
# No more API calls necessary.
break
# logging.debug('initial xml vulns = %s' % (vulns))
#
# vulns now holds all remediation tickets.
return vulns
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment