Skip to content

Instantly share code, notes, and snippets.

@deeso
Created June 3, 2022 00:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save deeso/2901e7362e86a2d293d38dba5b40d13c to your computer and use it in GitHub Desktop.
Save deeso/2901e7362e86a2d293d38dba5b40d13c to your computer and use it in GitHub Desktop.
perform a VirusTotal Search for Potential Phishing Domains
import json
import sys
import requests
from datetime import datetime, timedelta
from argparse import ArgumentParser
DAYS = 30
LAST_SEEN = (datetime.now() - timedelta(days=DAYS)).strftime("%Y-%m-%d")
parser = ArgumentParser()
parser.add_argument('-key', default=None, type=str, help="VT API Key")
parser.add_argument('-query', default=None, type=str, help="query to use to search")
parser.add_argument('-url_substr', default=None, type=str, help="domain to fill in the query")
parser.add_argument('-limit', default=10, type=int, help="number of domains")
parser.add_argument('-last_seen', default=LAST_SEEN, type=str, help="last time URLs were seen" )
parser.add_argument('-positives', default=10, type=int, help="number of positive engine matches" )
parser.add_argument('-out', default="vt_url_search.json", type=str, help="file to dump the results to" )
QUERY = 'entity:url response_code:200 url:{url_substr} ls:{last_seen}+ positives:{positives}+ engines:phishing'
if __name__ == "__main__":
args = parser.parse_args()
kargs = vars(args)
if kargs['key'] is None:
print("API Key is require")
sys.exit(-1)
if kargs['url_substr'] is None and kargs['query'] is None:
print("Must specify a query or a URL substr")
sys.exit(-1)
query = kargs['query']
if query is None:
query = QUERY.format(**kargs)
query = query.replace('+', '%2B')
payload = {
'limit': kargs['limit'],
'query': query
}
url = "https://www.virustotal.com/api/v3/intelligence/search?query={query}&limit={limit}".format(**payload)
headers = {
'x-apikey': kargs['key']
}
# response2 = requests.request("GET", url2, headers=headers, parameters=payload)
response = requests.request("GET", url, headers=headers, data={})
data = response.json()
results = []
for dp in data['data']:
result = {}
result['url'] = dp['attributes']['url']
result['title'] = dp['attributes'].get('title', '')
result['final_url'] = dp['attributes']['last_final_url']
result['detections'] = dp['attributes']['last_analysis_stats']['malicious']
result['response_code'] = dp['attributes']['last_http_response_code']
results.append(result)
outfile = open(kargs['out'], 'w')
r = json.dumps(results, indent=4)
outfile.write(r)
outfile.close()
# print(json.dumps(results, indent=4))
# print(response.text)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment