Skip to content

Instantly share code, notes, and snippets.

@milo2012
Last active September 17, 2019 18:29
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save milo2012/72869369bb95881071b41a9545e71ab7 to your computer and use it in GitHub Desktop.
Save milo2012/72869369bb95881071b41a9545e71ab7 to your computer and use it in GitHub Desktop.
This script parses nmap XML file and extract HTTP/HTTPs servers. The problem with Nmap scanning is that it doesn't correctly identify all hosts running HTTP/HTTPS services. This script attempts to fix this issue
from libnmap.parser import NmapParser
import requests, optparse, os, sys, glob, multiprocessing
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
requests.packages.urllib3.disable_warnings()
timeout=1.0
numOfThreads=2
#This script parses nmap XML file and extract HTTP/HTTPs servers.
#The problem with Nmap scanning is that it doesn't correctly identify all hosts running HTTP/HTTPS services.
#This script attempts to fix this issue
def checkHTTP(ip):
tmpFound=False
tmpStatusCode=None
tmpPrefix=""
urlPrefixList=[]
urlPrefixList.append("http://")
urlPrefixList.append("https://")
while tmpFound==False:
for prefix in urlPrefixList:
try:
s = requests.Session()
burp0_url = prefix+ip
burp0_headers = {"User-Agent": "Mozilla/5.0", "Accept": "*/*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Connection": "close"}
r1=r=s.get(burp0_url, headers=burp0_headers, verify=False, timeout=timeout)
tmpStatusCode=r1.status_code
tmpFound=True
tmpPrefix=prefix
except requests.exceptions.ReadTimeout:
pass
except requests.exceptions.ConnectTimeout:
pass
except requests.exceptions.SSLError:
pass
except requests.exceptions.ConnectionError:
pass
return tmpStatusCode,burp0_url
if __name__ == "__main__":
parser = optparse.OptionParser(description="This script parses nmap XML file and extract HTTP/HTTPs servers.\nThe problem with Nmap scanning is that it doesn't correctly identify all hosts running HTTP/HTTPS services. This script attempts to fix this issue",version="%prog 1.0")
parser.add_option('-f','--file', action="store", dest="filename",help="nmap xml file")
parser.add_option('-n', '--num', action="store", dest="numOfThreads")
options, remainder = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
else:
if options.numOfThreads:
numOfThreads=int(numOfThreads)
httpsList=[]
httpList=[]
totalList=[]
fileList=[]
if os.path.exists(options.filename):
fileList.append(options.filename)
else:
tmpFileList=glob.glob("*"+options.filename+"*.xml")
if len(tmpFileList)>0:
for x in tmpFileList:
fileList.append(x)
targetList=[]
for filename in fileList:
rep = NmapParser.parse_fromfile(filename)
for scanned_hosts in rep.hosts:
svcList = scanned_hosts.services
for x in svcList:
if scanned_hosts.address+":"+str(x.port) not in totalList:
totalList.append(scanned_hosts.address+":"+str(x.port))
if x.service=="http":
if x.tunnel=="ssl":
httpsList.append("https://"+scanned_hosts.address+":"+str(x.port))
else:
httpList.append("https://"+scanned_hosts.address+":"+str(x.port))
else:
if scanned_hosts.address+":"+str(x.port) not in targetList:
targetList.append(scanned_hosts.address+":"+str(x.port))
p = multiprocessing.Pool(processes=numOfThreads)
tmpResultList = p.map(checkHTTP,targetList)
p.close()
for x in tmpResultList:
tmpStatusCode=x[0]
tmpUrl=x[1]
if tmpStatusCode!=None:
print tmpUrl+" "+str(tmpStatusCode)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment