Skip to content

Instantly share code, notes, and snippets.

@Sachinart
Last active March 19, 2024 19:31
Show Gist options
  • Save Sachinart/7f66d59a4468a3c036f8bdee7ab3862a to your computer and use it in GitHub Desktop.
Save Sachinart/7f66d59a4468a3c036f8bdee7ab3862a to your computer and use it in GitHub Desktop.
Python script that can fetch endpoints from a file which contains any url links with proper sorting. change katana-js.txt to your file which contain .js urls. Everything will be saved in output.txt - Script By Chirag Artani
import requests
import re
import threading
def extract_endpoints_from_url(url, output_file):
try:
response = requests.get(url)
if response.status_code == 200:
content = response.text
# Regular expression to find endpoints (assumes they start with '/').
regex = r'(?<=["\'`])\/[a-zA-Z0-9_?&=\/\-\#.]*?(?=["\'`])'
endpoints = re.findall(regex, content)
if endpoints:
for endpoint in endpoints:
output_file.write(endpoint + "\n")
output_file.write("\n")
return endpoints
else:
print(f"No endpoints found in {url}\n")
return []
else:
print(f"Failed to fetch {url}: {response.status_code}\n")
return []
except Exception as e:
print(f"Failed to fetch {url}: {e}\n")
return []
def main():
# Read URLs from URLS.txt
with open('URLS.txt', 'r') as file:
urls = file.read().splitlines()
all_endpoints = []
# Function to be run in each thread
def worker(url):
with open("output.txt", 'a') as output_file: # Open file in append mode
endpoints = extract_endpoints_from_url(url, output_file)
if endpoints:
all_endpoints.extend(endpoints)
threads = []
for url in urls:
thread = threading.Thread(target=worker, args=(url,))
thread.start()
threads.append(thread)
# Wait for all threads to complete
for thread in threads:
thread.join()
print("\nAll Endpoints found:")
print(all_endpoints)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment