Skip to content

Instantly share code, notes, and snippets.

@vijay922
Created August 4, 2020 07:42
Show Gist options
  • Save vijay922/88d837fed33265b0c74767f2f70dc89d to your computer and use it in GitHub Desktop.
Save vijay922/88d837fed33265b0c74767f2f70dc89d to your computer and use it in GitHub Desktop.
[awvs_url_key]#Basic settings
awvs_url=https://192.168.73.128:13443/
api_key=1986ad8c0a5b3df4d7028d5f3c06e936c5d7211fb0ba444de82ed618e96251b54
#URL file to be scanned
domain_file=url.txt
[scan_seting]#Scan configuration
##Scan speed, from slow to fast: sequential slow moderate fast, the faster the speed, the more omissions, the opposite is true.
scan_speed=slow
#Scanning cookies, effective for all URLs, if you do not add cookies, please leave it blank, that is, the scanner crawler will get it automatically
#Example cookie=BIDUPSID=D40B5A304EFD449C3F8DED17FDF633A0; PSTM=1592016294
cookie=
#Exclude directories that are not scanned, usually used to do not perform exit, logout and other operations after adding cookies
#excluded_paths=[] can do the same
excluded_paths=['quit','exit','logout','Logout','delete','DELETE','logout','exit','delete']
##Limit the crawl to only include addresses and subdirectories Value: true (default)/False, False is recommended
limit_crawler_scope=False
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment