Skip to content

Instantly share code, notes, and snippets.

@vijay922
Created August 4, 2020 07:39
Show Gist options
  • Save vijay922/59b364726edb499d78ba894a14be53ef to your computer and use it in GitHub Desktop.
Save vijay922/59b364726edb499d78ba894a14be53ef to your computer and use it in GitHub Desktop.
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import sys
version = sys.version_info
if version < (3, 0):
print('The current version is not supported, you need to use python3')
sys.exit()
import requests
import json,ast
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import configparser
cf = configparser.ConfigParser()
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
cf.read(r"awvs_config.ini",encoding='utf-8')
secs=cf.sections()
awvs_url =cf.get('awvs_url_key','awvs_url')
apikey = cf.get('awvs_url_key','api_key')
headers = {'Content-Type': 'application/json',"X-Auth": apikey}
add_count_suss=0
error_count=0
target_scan=False
target_list=[]
pages = 10
def get_target_list():#Get all targets in the scanner
global pages,target_list
while 1:
target_dict={}
get_target_url=awvs_url+'/api/v1/targets?c={}&l=10'.format(str(pages))
r = requests.get(get_target_url, headers=headers, timeout=30, verify=False)
result = json.loads(r.content.decode())
try:
for targetsid in range(len(result['targets'])):
target_dict={'target_id':result['targets'][targetsid]['target_id'],'address':result['targets'][targetsid]['address']}
target_list.append(target_dict)
pages=pages+10
if len(result['targets'])==0:
break
except Exception as e:
return r.text
def addTask(url,target):
try:
url = ''.join((url, '/api/v1/targets/add'))
data = {"targets":[{"address": target,"description":"This url is added for script"}],"groups":[]}
r = requests.post(url, headers=headers, data=json.dumps(data), timeout=30, verify=False)
result = json.loads(r.content.decode())
return result['targets'][0]['target_id']
except Exception as e:
return e
def scan(url,target,Crawl,user_agent,profile_id,proxy_address,proxy_port,scan_speed,limit_crawler_scope,excluded_paths,scan_cookie,is_to_scan):
scanUrl = ''.join((url, '/api/v1/scans'))
target_id = addTask(url,target)
if target_id:
try:
configuration(url,target_id,proxy_address,proxy_port,Crawl,user_agent,scan_speed,limit_crawler_scope,excluded_paths,scan_cookie,target)#配置目标参数信息
if is_to_scan:
data = {"target_id": target_id, "profile_id": profile_id, "incremental": False,
"schedule": {"disable": False, "start_date": None, "time_sensitive": False}}
response = requests.post(scanUrl, data=json.dumps(data), headers=headers, timeout=30, verify=False)
result = json.loads(response.content)
return result['target_id']
else:
print(target, 'Target only added successfully')
return 2
except Exception as e:
print(e)
def configuration(url,target_id,proxy_address,proxy_port,Crawl,user_agent,scan_speed,limit_crawler_scope,excluded_paths,scan_cookie,target):#配置目标
configuration_url = ''.join((url,'/api/v1/targets/{0}/configuration'.format(target_id)))
if scan_cookie != '':
data = {"scan_speed":scan_speed,"login":{"kind":"none"},"ssh_credentials":{"kind":"none"},"sensor": False,"user_agent": user_agent,"case_sensitive":"auto","limit_crawler_scope": limit_crawler_scope,"excluded_paths":excluded_paths,"authentication":{"enabled": False},"proxy":{"enabled": Crawl,"protocol":"http","address":proxy_address,"port":proxy_port},"technologies":[],"custom_headers":[],"custom_cookies":[{"url":target,"cookie":scan_cookie}],"debug":False,"client_certificate_password":"","issue_tracker_id":"","excluded_hours_id":""}
else:
data = {"scan_speed": scan_speed, "login": {"kind": "none"}, "ssh_credentials": {"kind": "none"},
"sensor": False, "user_agent": user_agent, "case_sensitive": "auto",
"limit_crawler_scope": limit_crawler_scope, "excluded_paths": excluded_paths,
"authentication": {"enabled": False},
"proxy": {"enabled": Crawl, "protocol": "http", "address": proxy_address, "port": proxy_port},
"technologies": [], "custom_headers": [], "custom_cookies": [],
"debug": False, "client_certificate_password": "", "issue_tracker_id": "", "excluded_hours_id": ""}
r = requests.patch(url=configuration_url,data=json.dumps(data), headers=headers, timeout=30, verify=False)
#print(configuration_url,r.text)
def delete_targets():#Delete all scan targets
global awvs_url,apikey,headers
while 1:
quer='/api/v1/targets'
try:
r = requests.get(awvs_url+quer, headers=headers, timeout=30, verify=False)
result = json.loads(r.content.decode())
if int(result['pagination']['count'])==0:
print('All scan targets have been deleted and are currently empty')
return 0
for targetsid in range(len(result['targets'])):
targets_id=result['targets'][targetsid]['target_id']
targets_address = result['targets'][targetsid]['address']
#print(targets_id,targets_address)
try:
del_log=requests.delete(awvs_url+'/api/v1/targets/'+targets_id,headers=headers, timeout=30, verify=False)
if del_log.status_code == 204:
print(targets_address,' Delete target successfully')
except Exception as e:
print(targets_address,e)
except Exception as e:
print(awvs_url+quer,e)
def main():
global add_count_suss,error_count,target_scan
########################################################AWVS扫描配置参数#########################################
Crawl = False #Default False, not enabled
proxy_address = '127.0.0.1' #Do not delete, will not enable
proxy_port = '777' #Do not delete, will not enable
input_urls=cf.get('awvs_url_key','domain_file')
excluded_paths=ast.literal_eval(cf.get('scan_seting','excluded_paths'))
limit_crawler_scope=cf.get('scan_seting','limit_crawler_scope')
scan_speed = cf.get('scan_seting','scan_speed')
scan_cookie=cf.get('scan_seting','cookie').replace('\n','').strip()#处理前后空格 与换行。
mod_id = {
"1": "11111111-1111-1111-1111-111111111111", # Full scan
"2": "11111111-1111-1111-1111-111111111112", # High-risk vulnerabilities
"3": "11111111-1111-1111-1111-111111111116", # XSS vulnerability
"4": "11111111-1111-1111-1111-111111111113", # SQL injection vulnerability
"5": "11111111-1111-1111-1111-111111111115", # Weak password detection
"6": "11111111-1111-1111-1111-111111111117", # Crawl Only
"7": "11111111-1111-1111-1111-111111111120", # Malware scan
"8": "11111111-1111-1111-1111-111111111120" # Only add, this line will not take effect
}
if target_scan==False:
print("""Choose the type to scan:
1 【Start full scan]
2 [Start scanning for high-risk vulnerabilities
3 [Start scanning for XSS vulnerabilities]
4 [Start scanning for SQL injection vulnerabilities]
5 [Start weak password detection]
6 [Start Crawl Only, crawl only, will enter the passive scanner address setting]
7 [Start Scanner Software Scan]
8 [Only add the target to the scanner, do not do any scanning] """)
else:
print("""Scan the existing target and select the type to be scanned:
1 【Full scan】
2 [Scan for high-risk vulnerabilities]
3 [Scan for XSS vulnerabilities]
4 [Scan for SQL injection vulnerabilities]
5 【Weak Password Detection】
6 [Crawl Only, crawler only, will enter the passive scanner address setting]
7 [Scan by software] """)
scan_type = str(input('Please enter the number:'))
try:
is_to_scan = True
if target_scan==False:
if '8'==scan_type:
is_to_scan = False
profile_id=mod_id[scan_type]
except Exception as e:
print('输入有误,检查',e)
sys.exit()
if profile_id=='11111111-1111-1111-1111-111111111117':
proxy_address=str(input('Enter the passive scanner listening IP address(如:127.0.0.1):'))
proxy_port=str(input('Enter the passive scanner listening port(如:7777):'))
Crawl = True
########################################################扫描配置参数#########################################
targets = open(input_urls, 'r', encoding='utf-8').read().split('\n')
user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.21 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.21" #扫描默认UA头
if Crawl:#Only call xray for proxy scanning
profile_id = "11111111-1111-1111-1111-111111111117"
if target_scan==False:
for target in targets:
target = target.strip()
#if '://' not in target and 'http' not in target:
if 'http' not in target[0:7]:
target='http://'+target
target_state=scan(awvs_url,target,Crawl,user_agent,profile_id,proxy_address,int(proxy_port),scan_speed,limit_crawler_scope,excluded_paths,scan_cookie,is_to_scan)
if target_state!=2:
open('./add_log/success.txt','a',encoding='utf-8').write(target+'\n')
add_count_suss=add_count_suss+1
print("{0} Has been added to the scan queue ,第:".format(target),str(add_count_suss))
elif target_state==2:
pass
else:
open('./add_log/error_url.txt', 'a', encoding='utf-8').write(target + '\n')
error_count=error_count+1
print("{0} add failed".format(target),str(error_count))
elif target_scan==True:
get_target_list()
scanUrl2= ''.join((awvs_url, '/api/v1/scans'))
for target_for in target_list:
data = {"target_id": target_for['target_id'], "profile_id": profile_id, "incremental": False,
"schedule": {"disable": False, "start_date": None, "time_sensitive": False}}
configuration(awvs_url, target_for['target_id'], proxy_address, proxy_port, Crawl, user_agent, scan_speed,
limit_crawler_scope,
excluded_paths, scan_cookie, target_for['address']) #已有目标扫描时配置
try:
response = requests.post(scanUrl2, data=json.dumps(data), headers=headers, timeout=30, verify=False)
result = json.loads(response.content)
if 'profile_id' in str(result) and 'target_id' in str(result):
print(target_for['address'],'Add to the scanner queue and start scanning')
except Exception as e:
print(str(target_for['address'])+' Scan failed ',e)
if __name__ == '__main__':
print( """
********************************************************************
.@@@@@.***. ..***. .***]@@@@`.
*/@^*@@^.** ..***. ..*..@@.@@@^
.[@//@@^**...*]]]]]/@@^..*** *@@,@@/.
*@@@^*.*...@@,*,@@*=@******.**@@\*
**[[\*.....**\@@@*..=@@@@@@\*,.[\`*. .
.**@@@`***,@@[[` .*..***[[@@@@@@***.*.
.*....*.,]**..*. .*.*****..*,\@@`*.**.
.*..*..*.. .*.*.**/@@@@@@@\]**/@@@@@@@`..****,@@`.*.. *.*,]]***.
***,]]]*** .****=@/..*****=@@`*****.*=@^***.*,@\***. .*=@/*=@]**
**@@`*@@@. .**.*@@^*@@@^ **=@@@@@^****,@@*.. ***\@^.. .*\@@.=@@`*.
.,[@//@@/* .*.,@@@^*@@@`*.*=@`\@@`**.*=@@*. **@@. .***=@@^***
****@@@@*. .*=@^*\@`*.*****/@@`******,@@/* .**,@^. ...=@@^***.
**.*@@@*.* .@@@@@@@@@\/@@@@[\@@@\]/@@@*,]/\]]`*.*=@^*****..=@@^..*.
****/@@***. .@@@@@@@`,/@/.\`**@^*/@`*.,@@@@@@@@@`**@@*..***......**.
.....**.** .@@@@@@@@@@@@@@@@@@@@@@@**\@@@@@@@@@^*=@@.*.*.**....**.
*=@@@@@@^.=@@@@@@@@@@@[`.*=@@@@@@@@@`.@@`
..@@@@@****,[@@@@@@[**.****,@@@@@@/`=@@`
..,\@@\***. ... .... ..... .*[**]@@@@`.....*.*.*.
.***.**@@\` .*=@@@@@*@@@\*******.
.***.**@@[[` ..**.****.,@@@@\*.**.
.*.**=@@**. ...*,@@....
.*.**@@^*... .***.@@....
.*.*@@`.*. .**.*@@*.*.
.**.*@@^,*. ***,@@*.**.
.****\@^***. .**.@@/****.
=@@**. .*=@@^
Edited:@vijay922
********************************************************************
1 [Add URLs to AWVS Scanner in batches]
2 [Delete all targets in the scanner with one key]
3 [Scan the target already in the scanner]
""")
selection=int(input('Please enter the number:'))
if selection==1:
main()
elif selection==2:
delete_targets()
elif selection==3:
target_scan=True
main()
@test502git
Copy link

why?

@M0dred
Copy link

M0dred commented Feb 11, 2023

Copy paste without credit!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment