Skip to content

Instantly share code, notes, and snippets.

@opexxx
Forked from LuD1161/amass.sh
Created September 7, 2021 18:43
Show Gist options
  • Save opexxx/9ca0ed02ed8ad657c98cc52a625eb6b3 to your computer and use it in GitHub Desktop.
Save opexxx/9ca0ed02ed8ad657c98cc52a625eb6b3 to your computer and use it in GitHub Desktop.
Scripts for different tools are here, run setup_bbty.sh to copy these in the scripts folder
#!/bin/bash
domain=$1
resultDir=$2
resultfile=$resultDir/$domain.amass.txt
touch $resultfile
echo "" >> $resultDir/log.txt
echo "AMASS Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
amass enum -active -d $domain | tee -a $resultfile $resultDir/log.txt
touch $resultfile.finished
#!/bin/bash
domain=$1
resultDir=$2
typeOfRecon=$3
echo "" >> $resultDir/log.txt
echo "AQUATONE Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
set +x
cat $resultDir/$domain.amass_subfinder.txt
cat $resultDir/$domain.amass_subfinder.txt | aquatone -scan-timeout 3000 -threads 10 -ports xlarge -chrome-path /usr/bin/chromium -out $resultDir/aquatone | tee -a $resultDir/log.txt
echo "nmap aquatone starting"
cat $resultDir/$domain.final.xml | aquatone -scan-timeout 3000 -nmap -threads 10 -chrome-path /usr/bin/chromium -out $resultDir/aquatone-nmap | tee -a $resultDir/log.txt
touch "$resultDir/$domain.aquatone.finished"
#!/bin/bash
python brutespray.py --file nmap.gnmap -U /usr/share/wordlist/user.txt -P /usr/share/wordlist/pass.txt --threads 5 --hosts 5 | tee -a log.txt
#!/bin/bash
domain=$1
resultDir=$2
echo "" >> $resultDir/log.txt
echo "EYEWITNESS Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
python ~/tools/EyeWitness/EyeWitness.py --no-prompt --max-retries 3 --jitter 3 --timeout 5 -f $resultDir/$domain.massdns_ip.out.txt --threads 15 --headless -d $resultDir/eyewitness | tee -a $resultDir/log.txt
touch "$2/$domain.eyewitness.finished"
#!/bin/bash
ipFile=$1
domain=$2
resultDir=$3
echo "" >> $resultDir/log.txt
echo "MASSCAN Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
echo "" | tee -a $resultDir/log.txt
echo "#####################################" | tee -a $resultDir/log.txt
echo "Testing "$ipFile" of "$domain | tee -a $resultDir/log.txt
echo "#####################################" | tee -a $resultDir/log.txt
echo "" | tee -a $resultDir/log.txt
outputFile=$resultDir/$domain.masscan.txt
finishedFile=$resultDir/$domain.masscan.finished
masscan -iL $ipFile -p1-65535 --rate=10000 -oL $outputFile | tee -a $resultDir/log.txt
sed -i -e "/#/d" -e "/^$/d" $outputFile
cut -d" " -f3,4 $outputFile | awk '{print($2","$1)}' | sort -V > $resultDir/$domain.masscan-sorted.txt
#
# This was used before sed was used ;)
# noOfports=(($(wc -l $outputFile | cut -f1 -d" ")-2))
#
noOfports=$(wc -l $outputFile | cut -f1 -d" ")
echo "Output in "$outputFile" Found : "$noOfports | tee -a $resultDir/log.txt
touch $finishedFile
#!/bin/bash
domain=$1
resultDir=$2
ip=$3
echo "" >> $resultDir/log.txt
echo "MASSCAN Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
echo "" | tee -a $resultDir/log.txt
echo "#####################################" | tee -a $resultDir/log.txt
echo "Testing "$ip" of "$domain | tee -a $resultDir/log.txt
echo "#####################################" | tee -a $resultDir/log.txt
echo "" | tee -a $resultDir/log.txt
#
# Command should be given like this
# cat ~/tools/results/$domain.out.txt | xargs -L1 masscan/masscan.sh $domain
# Usage: cat ~/tools/results/yahoo-2018.07.07/yahoo.out.txt | xargs -L1 masscan/masscan.sh yahoo.com
#
# Taken from : https://stackoverflow.com/a/13778973
if [[ $ip =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
masscan --wait 10 -p1-65535 $ip --max-rate 10000 -oL $resultdir/$ip.scan | tee -a $resultDir/log.txt
else
strip=$(echo $ip|sed 's/https\?:\/\///')
host $strip | tee -a $resultDir/log.txt
masscan --wait 10 -p1-65535 $(dig +short $strip|grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b"|head -1) --max-rate 10000 -oL $resultdir/$strip.scan | tee -a $resultDir/log.txt
fi
# masscan manpage , also can be looked online : http://manpages.ubuntu.com/manpages/artful/man8/masscan.8.html
# The program will not exit immediately, but will wait a default of 10 seconds to receive
# results from the Internet and save the results before exiting completely. This time can be
# changed with the --wait option.
#!/bin/bash
domain=$1
./subbrute.py ./all.txt $domain | massdns -r ../massdns_lists/resolvers.txt -t A -a -o -w $domain_massdns_output.txt -
#!/bin/bash
echo "This will work on only those, on which you've run subfinder"
domain=$1
resultDir=$2
resultfile=$resultDir/$domain.massdns_ip
massdns_temp=$resultfile.temp.txt
massdns_ips=$resultfile.ips.txt
massdns_out=$resultfile.out.txt
echo "" >> $resultDir/log.txt
echo "MASSDNS Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
massdns -r ~/tools/wordlists/resolvers.txt -t A -o S -w $massdns_temp $resultDir/$domain.amass_subfinder.txt | tee -a $resultDir/log.txt
cat $massdns_temp | cut -f1 -d" " | sed -e "s/\.$//g" | sort -u >> $massdns_out
cat $massdns_temp | cut -d" " -f3 | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" >> $massdns_ips
rm $massdns_temp
touch $resultfile.finished
#!/bin/bash
# Usage : nmap.sh $nmap_cleaned_file $resultDir example.com basic|advanced
# Run nmap on the found ip and port in sorted file
#
# Output file's layout for nmap script
# 104.16.199.134 8080,8443,8880
# 162.159.0.102 53,80,443,2052
# i.e IP<space>port#1,port#2
#
# This is done so that nmap scans for services only on these specified ip and ports
#
# nmap -Pn -n -v -sV IP PORTS -oA IP.nmap.txt
# Taken from https://security.stackexchange.com/a/189540
#
# After a comparision between -A with -T4 and -A and without -A, came to conclusion that -T4 doesn't add much to the speed
# as --script-timeout option is already set
#
# without -A and without -T4 | with only -A | both
# 37.34 seconds | 47.51 seconds | 48.15 seconds
# The comparitive results is pasted at the bottom in the comments
#
echo "" >> $resultDir/log.txt
echo "NMAP Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
nmap_run()
{
filePath=$1
resultDir=$2
domain=$3
mkdir $resultDir/nmap
# nmapXML=$resultDir/$domain.nmap.$4.xml
# nmapNMAP=$resultDir/$domain.nmap.$4.nmap
# nmapGNMAP=$resultDir/$domain.nmap.$4.gnmap
if [ ! -f $filePath ]
then
echo "Error: Must supply file"
exit 1
fi
while read -r line
do
IP=`echo $line | cut -d" " -f1`
ports=`echo $line | cut -d" " -f2`
echo "Scanning $IP : $ports : $4"
if [ "$4" == "basic" ]; then
nmap -Pn -n -v -sV --script-timeout 5s --script "default and safe" $IP -p$ports -oA $resultDir/nmap/$IP | tee -a $resultDir/log.txt
elif [ "$4" == "advanced" ]; then
nmap -Pn -n -v -sV --script-timeout 10s --script "default and safe" -A $IP -p$ports -oA $resultDir/nmap/$IP | tee -a $resultDir/log.txt
else
exit 1
fi
# echo "" | tee -a $nmapGNMAP $nmapNMAP $nmapXML
# cat $resultDir/temp.xml >> $nmapXML # Correct this, not the correct way to join multiple reports
# cat $resultDir/temp.nmap >> $nmapNMAP # Correct this, not the correct way to join multiple reports
# cat $resultDir/temp.gnmap >> $nmapGNMAP # Correct this, not the correct way to join multiple reports
done < $1
# rm $resultDir/temp*
python ~/tools/scripts/nmap_merge.py -sources=$resultDir/nmap/ > $resultDir/$domain.final.xml
finishedFile=$resultDir/$domain.nmap.finished
touch $finishedFile
}
nmap_run $1 $2 $3 $4
#
# without -A and without -T4 | with only -A | both
# 37.34 seconds | 47.51 seconds | 48.15 seconds
#
# nmap -Pn -n -v -sV --script-timeout 5s --script "default and safe" 52.x.x.209 -p80,443,2000,5060,8008,8010,8020
# Starting Nmap 7.40 ( https://nmap.org ) at 2018-07-19 20:15 IST
# NSE: Loaded 141 scripts for scanning.
# NSE: Script Pre-scanning.
# Initiating NSE at 20:15
# Completed NSE at 20:15, 0.00s elapsed
# Initiating NSE at 20:15
# Completed NSE at 20:15, 0.00s elapsed
# Initiating SYN Stealth Scan at 20:15
# ~~~~~~~~~~~~ SNIP ~~~~~~~~~~~~~~~~~~~~~~~
# Host is up (0.076s latency).
# PORT STATE SERVICE VERSION
# 80/tcp open http Amazon CloudFront httpd
# |_http-server-header: CloudFront
# |_http-title: ERROR: The request could not be satisfied
# 443/tcp open ssl/https CloudFront
# |_http-server-header: CloudFront
# |_http-title: ERROR: The request could not be satisfied
# 2000/tcp open tcpwrapped
# 5060/tcp open tcpwrapped
# 8008/tcp open http Fortinet FortiGuard block page
# | http-methods:
# |_ Supported Methods: GET HEAD POST OPTIONS
# |_http-title: Did not follow redirect to https://52.x.x.209:8010/
# 8010/tcp open ssl/http-proxy FortiGate Web Filtering Service
# |_hadoop-datanode-info:
# |_hadoop-jobtracker-info:
# |_hadoop-tasktracker-info:
# |_hbase-master-info:
# |_ssl-date: TLS randomness does not represent time
# 8020/tcp open http-proxy FortiGate Web Filtering Service
# |_hadoop-datanode-info:
# |_hadoop-jobtracker-info:
# |_hadoop-tasktracker-info:
# |_hbase-master-info:
# | http-open-proxy: Potentially OPEN proxy.
# |_Methods supported:CONNECTION
# |_http-title: Web Filter Block Override
# Service Info: Device: security-misc
# NSE: Script Post-scanning.
# Initiating NSE at 20:16
# Completed NSE at 20:16, 0.00s elapsed
# Initiating NSE at 20:16
# Completed NSE at 20:16, 0.00s elapsed
# Read data files from: /usr/bin/../share/nmap
# Service detection performed. Please report any incorrect results at https://nmap.org/submit/ .
# Nmap done: 1 IP address (1 host up) scanned in 37.34 seconds
# Raw packets sent: 7 (308B) | Rcvd: 7 (308B)
# nmap -Pn -n -v -sV --script-timeout 5s --script "default and safe" -A 52.x.x.209 -p80,443,2000,5060,8008,8010,8020
# Starting Nmap 7.40 ( https://nmap.org ) at 2018-07-19 20:19 IST
# NSE: Loaded 141 scripts for scanning.
# NSE: Script Pre-scanning.
# Initiating NSE at 20:19
# Completed NSE at 20:19, 0.00s elapsed
# Initiating NSE at 20:19
# Completed NSE at 20:19, 0.00s elapsed
# Initiating SYN Stealth Scan at 20:19
# ~~~~~~~~~~~~ SNIP ~~~~~~~~~~~~~~~~~~~~~~~
# Host is up (0.12s latency).
# PORT STATE SERVICE VERSION
# 80/tcp open http Amazon CloudFront httpd
# |_http-server-header: CloudFront
# |_http-title: ERROR: The request could not be satisfied
# 443/tcp open ssl/https CloudFront
# |_http-server-header: CloudFront
# |_http-title: ERROR: The request could not be satisfied
# 2000/tcp open tcpwrapped
# 5060/tcp open tcpwrapped
# 8008/tcp open http Fortinet FortiGuard block page
# | http-methods:
# |_ Supported Methods: GET HEAD POST OPTIONS
# |_http-title: Did not follow redirect to https://52.x.x.209:8010/
# 8010/tcp open ssl/http-proxy FortiGate Web Filtering Service
# |_hadoop-datanode-info:
# |_hadoop-tasktracker-info:
# | http-methods:
# |_ Supported Methods: GET OPTIONS
# |_http-title: Web Filter Block Override
# | ssl-cert: Subject: commonName=FortiGate/organizationName=Fortinet/stateOrProvinceName=California/countryName=US
# | Issuer: commonName=support/organizationName=Fortinet/stateOrProvinceName=California/countryName=US
# | Public Key type: rsa
# | Public Key bits: 2048
# | Signature Algorithm: sha256WithRSAEncryption
# | Not valid before: 2015-07-16T00:33:11
# | Not valid after: 2038-01-19T03:14:07
# | MD5: f134 4f9d 87cb 3cd1 cfd6 0a92 8484 9d40
# |_SHA-1: e40b d3dd 8721 fb56 e4a9 7d1d 571d 1b43 67b6 779f
# |_ssl-date: TLS randomness does not represent time
# 8020/tcp open http-proxy FortiGate Web Filtering Service
# |_hadoop-datanode-info:
# |_hadoop-jobtracker-info:
# |_hadoop-tasktracker-info:
# |_hbase-master-info:
# | http-open-proxy: Potentially OPEN proxy.
# |_Methods supported:CONNECTION
# |_http-title: Web Filter Block Override
# Warning: OSScan results may be unreliable because we could not find at least 1 open and 1 closed port
# Device type: general purpose|PBX
# Running (JUST GUESSING): Linux 3.X|4.X (90%), Vodavi embedded (87%)
# OS CPE: cpe:/o:linux:linux_kernel:3 cpe:/o:linux:linux_kernel:4 cpe:/h:vodavi:xts-ip
# Aggressive OS guesses: Linux 3.2 - 4.6 (90%), Vodavi XTS-IP PBX (87%)
# No exact OS matches for host (test conditions non-ideal).
# Uptime guess: 0.935 days (since Wed Jul 18 21:53:33 2018)
# Network Distance: 6 hops
# TCP Sequence Prediction: Difficulty=259 (Good luck!)
# IP ID Sequence Generation: All zeros
# Service Info: Device: security-misc
# TRACEROUTE (using port 5060/tcp)
# HOP RTT ADDRESS
# 1 0.59 ms 159.65.144.254
# 2 0.55 ms 138.197.249.22
# 3 13.34 ms 124.124.67.162
# 4 ... 5
# 6 52.42 ms 52.222.168.209
# NSE: Script Post-scanning.
# Initiating NSE at 20:19
# Completed NSE at 20:19, 0.00s elapsed
# Initiating NSE at 20:19
# Completed NSE at 20:19, 0.00s elapsed
# Read data files from: /usr/bin/../share/nmap
# OS and Service detection performed. Please report any incorrect results at https://nmap.org/submit/ .
# Nmap done: 1 IP address (1 host up) scanned in 47.51 seconds
# Raw packets sent: 102 (8.756KB) | Rcvd: 47 (2.652KB)
# nmap -Pn -n -v -sV --script-timeout 5s --script "default and safe" -A -T4 52.x.x.209 -p80,443,2000,5060,8008,8010,8020
# Starting Nmap 7.40 ( https://nmap.org ) at 2018-07-19 20:20 IST
# NSE: Loaded 141 scripts for scanning.
# NSE: Script Pre-scanning.
# Initiating NSE at 20:20
# Completed NSE at 20:20, 0.00s elapsed
# Initiating NSE at 20:20
# Completed NSE at 20:20, 0.00s elapsed
# Initiating SYN Stealth Scan at 20:20
# ~~~~~~~~~~~~ SNIP ~~~~~~~~~~~~~~~~~~~~~~~
# Host is up (0.12s latency).
# PORT STATE SERVICE VERSION
# 80/tcp open http Amazon CloudFront httpd
# |_http-server-header: CloudFront
# |_http-title: ERROR: The request could not be satisfied
# 443/tcp open ssl/https CloudFront
# |_http-server-header: CloudFront
# |_http-title: ERROR: The request could not be satisfied
# 2000/tcp open tcpwrapped
# 5060/tcp open tcpwrapped
# 8008/tcp open http Fortinet FortiGuard block page
# | http-methods:
# |_ Supported Methods: GET HEAD POST OPTIONS
# |_http-title: Did not follow redirect to https://52.x.x.209:8010/
# 8010/tcp open ssl/http-proxy FortiGate Web Filtering Service
# |_hadoop-datanode-info:
# |_hadoop-jobtracker-info:
# |_hadoop-tasktracker-info:
# |_hbase-master-info:
# |_http-title: Web Filter Block Override
# |_ssl-date: TLS randomness does not represent time
# 8020/tcp open http-proxy FortiGate Web Filtering Service
# |_hadoop-datanode-info:
# |_hadoop-jobtracker-info:
# |_hadoop-tasktracker-info:
# |_hbase-master-info:
# | http-open-proxy: Potentially OPEN proxy.
# |_Methods supported:CONNECTION
# |_http-title: Web Filter Block Override
# Warning: OSScan results may be unreliable because we could not find at least 1 open and 1 closed port
# Device type: general purpose|PBX
# Running (JUST GUESSING): Linux 3.X|4.X (90%), Vodavi embedded (87%)
# OS CPE: cpe:/o:linux:linux_kernel:3 cpe:/o:linux:linux_kernel:4 cpe:/h:vodavi:xts-ip
# Aggressive OS guesses: Linux 3.2 - 4.6 (90%), Vodavi XTS-IP PBX (87%)
# No exact OS matches for host (test conditions non-ideal).
# Uptime guess: 0.687 days (since Thu Jul 19 03:52:30 2018)
# Network Distance: 6 hops
# TCP Sequence Prediction: Difficulty=258 (Good luck!)
# IP ID Sequence Generation: All zeros
# Service Info: Device: security-misc
# TRACEROUTE (using port 8020/tcp)
# HOP RTT ADDRESS
# 1 0.45 ms 159.65.144.254
# 2 0.49 ms 138.197.249.18
# 3 13.02 ms 124.124.67.154
# 4 ... 5
# 6 52.47 ms 52.x.x.209
# NSE: Script Post-scanning.
# Initiating NSE at 20:21
# Completed NSE at 20:21, 0.00s elapsed
# Initiating NSE at 20:21
# Completed NSE at 20:21, 0.00s elapsed
# Read data files from: /usr/bin/../share/nmap
# OS and Service detection performed. Please report any incorrect results at https://nmap.org/submit/ .
# Nmap done: 1 IP address (1 host up) scanned in 48.15 seconds
# Raw packets sent: 106 (8.980KB) | Rcvd: 47 (2.684KB)
#!/usr/bin/env python2.7
# gnxmerge.py - Glens Nmap XML merger
# Merge multiple nmap XML files
# Project URL: https://bitbucket.org/memoryresident/gnxtools
# Author URL: https://www.glenscott.net
import sys, argparse, copy, os, time
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
def handle_opts():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='Glens Nmap XML Merger gnxmerge - Merges <host> sections from multiple Nmap XML files',
usage='%(prog)s -sources=./file.xml,file2.xml,/path/to/files'
)
parser.add_argument('-s', '-sources', action='store', dest='sources',
required=True,
help='Comma separated list of paths to files and/or folder contents to merge. An .xml extension is not mandatory; all files and contents of target folder will be (non-recursively) processed, regardless of extension. If files are present which are not valid XML, they will be skipped with warnings.')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s 1.0.1 : Glens Nmap XML Merger (http://bitbucket.org/memoryresident/gnxtools )'
)
args = parser.parse_args()
# return sources_list for now, modify if other arguments come along
return (args.sources).split(",")
def start_xml(script_start_time):
# Concatenate the script arguments to a string
scriptargs = ""
for argnum in range(0, len(sys.argv)):
scriptargs = scriptargs + " " + str(sys.argv[argnum])
# Set some vars used to mimic the official nmap file format
nmaprun_attribs = {'scanner': sys.argv[0], 'args': scriptargs,
'start': str(script_start_time), 'version': '1.0',
'xmloutputversion': '1.04'}
nmaprun = ET.Element('nmaprun', nmaprun_attribs)
# Append a comment prior to starting root xml
comment = ET.Comment('Generated by Glens nmap XML merger (gnxmerge)')
nmaprun.append(comment)
nmaprun_verbose_attribs = {'level': '0'}
nmaprun_debug_attribs = {'level': '0'}
nmaprun_verbose = ET.SubElement(nmaprun, 'verbose', nmaprun_verbose_attribs)
nmaprun_debug = ET.SubElement(nmaprun, 'debug', nmaprun_debug_attribs)
return nmaprun
def finalise_xml(nmaprun_merged_results, script_start_time):
nmaprun = nmaprun_merged_results[0]
total_hosts = nmaprun_merged_results[1]
total_seconds = nmaprun_merged_results[2]
total_files = nmaprun_merged_results[3]
nmaprun_string = ET.tostring(nmaprun)
total_script_time = int(time.time()) - script_start_time;
summary_string = (
'Nmap XML merge done at ' + time.strftime("%c") + "; " + str(
total_hosts) + ' total hosts found in ' + str(
total_files) + ' files; Merge completed in ' + str(
total_script_time) + ' seconds')
finished_attribs = {}
finished_attribs["time"] = str(int(time.time()))
finished_attribs["timestr"] = time.strftime("%c")
finished_attribs["elapsed"] = str(total_seconds)
finished_attribs["summary"] = summary_string
finished_attribs["exit"] = 'success'
hosts_attribs = {}
hosts_attribs["up"] = str(total_hosts)
hosts_attribs["down"] = '0'
hosts_attribs["total"] = str(total_hosts)
runstats = ET.SubElement(nmaprun, 'runstats')
finished = ET.SubElement(runstats, 'finished', finished_attribs)
hosts = ET.SubElement(runstats, 'hosts', hosts_attribs)
return nmaprun
def merge_hosts(nmaprun, file_list):
# iterate through each file in the target folder or each file passed as an option.
# for each file, the entire <host> section is copied and appended to
# the XML object being built.
# init these to zero before we start counting
total_hosts = 0
total_seconds = 0
bad_file_list = []
for current_file in file_list:
try:
current_nmap_file_blob = ET.ElementTree(file=current_file);
for current_host in current_nmap_file_blob.findall('host'):
# build our stats here
total_hosts = total_hosts + 1
total_seconds = (
total_seconds + calc_seconds(
current_host.attrib['starttime'],
current_host.attrib['endtime']))
nmaprun.append(copy.deepcopy(current_host))
except:
bad_file_list.append(current_file)
# work out how many files were successfully processed
files_processed = len(file_list) - len(bad_file_list)
nmaprun_merge_results = [nmaprun, total_hosts, total_seconds,
len(file_list)]
return nmaprun_merge_results, bad_file_list, files_processed, total_hosts
def calc_seconds(starttime, finishtime):
# calculate seconds duration of this host
totaltime = int(finishtime) - int(starttime)
return totaltime
def input_file_list(sources_list):
# get the args, add all files to list
file_list = []
for target in sources_list:
if os.path.isdir(target) == True:
dirlist = os.listdir(target)
for file in dirlist:
file_list.append(target + file)
else:
file_list.append(target)
return file_list
def output_results(nmap_file_preamble, nmaprun, merge_job_output):
bad_file_list = merge_job_output[1]
files_processed = merge_job_output[2]
total_hosts = merge_job_output[3]
print(nmap_file_preamble)
nmaprun_string = ET.tostring(nmaprun)
print(nmaprun_string)
for badfile in bad_file_list:
# Throw a warning to stderror but dont interfere with the working output
# there might be a bunch of files we dont care about in the same folder, such as nmap 'normal' outputs.
sys.stderr.write("\n WARNING:" + badfile + " skipped (not xml?)")
# print(general end status)
sys.stderr.write("\n\nMerged " + str(total_hosts) + " hosts from " + str(
files_processed) + " xml files. " + str(
len(bad_file_list)) + " invalid files skipped.")
def main():
# Running time.time() through int as the former returns epoch in the form of a float which is not the official format.
# discovered this because ndiff was breaking on the merged output. Also applied to time.time call in finalise_xml
script_start_time = int(time.time())
merge_job_output = []
nmap_file_preamble = ('<?xml version="1.0"?> \n'
'<!DOCTYPE nmaprun PUBLIC "-//IDN nmap.org//DTD Nmap XML 1.04//EN" "https://svn.nmap.org/nmap/docs/nmap.dtd"> \n'
'<?xml-stylesheet href="https://svn.nmap.org/nmap/docs/nmap.xsl" type="text/xsl"?> \n'
'<!-- nmap results file generated by gnxmerge.py: Glens Nmap XML Merger https://bitbucket.org/memoryresident/gnxtools --> \n\n'
)
file_list = input_file_list(handle_opts())
nmaprun_skel = start_xml(script_start_time)
merge_job_output = merge_hosts(nmaprun_skel, file_list)
nmaprun_merged_results = merge_job_output[0]
nmaprun_finalised = finalise_xml(nmaprun_merged_results, script_start_time)
output_results(nmap_file_preamble, nmaprun_finalised, merge_job_output)
if __name__ == "__main__":
main()
#!/bin/bash
domain=$1
resultDir=$2
echo "" >> $resultDir/log.txt
echo "SEARCHSPLOIT Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
ls $resultDir/nmap/*.xml | xargs -I % searchsploit -v --nmap % | tee -a $resultDir/log.txt $resultDir/$domain.searchsploit.txt
touch "$resultDir/$domain.searchsploit.finished"
#!/bin/bash
domain=$1
resultDir=$2
resultfile=$resultDir/$domain.subfinder.txt
touch $resultfile
echo "" >> $resultDir/log.txt
echo "SUBFINDER Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
subfinder -d $domain | tee -a $resultfile | tee -a $resultDir/log.txt
echo "Running sed to clean output" | tee -a $resultDir/log.txt
till_here=$(grep -n "Unique subdomains" $resultfile | cut -d":" -f1)
sed -i -e "1,$till_here d" -e '/^$/d' -e 's/^\.//' $resultfile
touch $resultfile.finished
#!/bin/bash
domain=$1
resultDir=$2
echo "" >> $resultDir/log.txt
echo "SUBJACK Logs Now" >> $resultDir/log.txt
echo "" >> $resultDir/log.txt
subjack -v -c ~/go/src/github.com/haccer/subjack/fingerprints.json -w $resultDir/$domain.amass_subfinder.txt -t 40 -o $resultDir/$domain.subjack.txt -ssl | tee -a $resultDir/log.txt
touch "$resultDir/$domain.subjack.finished"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment