Skip to content

Instantly share code, notes, and snippets.

@bmatthewshea
Last active August 5, 2021 02:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bmatthewshea/a9a4e8441a4690bd8b0f9a56a15ecccb to your computer and use it in GitHub Desktop.
Save bmatthewshea/a9a4e8441a4690bd8b0f9a56a15ecccb to your computer and use it in GitHub Desktop.
Python script that does checks on apex domains from a text file. Checks status for bare apex / "www." subdomains and ssl / non-ssl. (4 checks on each domain). Outputs in CSV format.
#!/usr/bin/python
# -*- coding: utf-8 -*-
## Brady Shea - 04AUG2021 - (Python 3 script)
## This will read in a file list of plain APEX domains like:
## "example.com"
## If you want it in a file just pipe the script to a file.
## python3 -u ./sites-up-down.py | tee ./domain-check.csv
import requests
import ssl
import socket
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
## can change these
sitelist = "sitelist-test.txt"
time_out = 2
##
def filelist_lookup():
uri_type = ""
counter = 0
file_contents = open(sitelist, 'r')
Lines = file_contents.readlines()
print("Group,Domain,URI_type,Response_Code,Link Up/Down,Errors,Link,Redirect")
for line in Lines:
counter += 1
nosubdomain = line.strip()
wwwsubdomain = "www." + line.strip()
dorequests("http://", nosubdomain, counter)
dorequests("http://", wwwsubdomain, counter)
dorequests("https://", nosubdomain, counter)
dorequests("https://", wwwsubdomain, counter)
def dorequests(uri_type, domain, count):
up_down = "link down"
full_link = uri_type + domain
try:
with requests.Session() as req:
response = req.get(full_link, verify=False, allow_redirects=False, timeout=time_out)
except requests.exceptions.Timeout as e:
print_line(count, domain, uri_type[:-3], "000", up_down, "ERROR = TIMEOUT", full_link, "n/a")
except requests.exceptions.ReadTimeout as e:
print_line(count, domain, uri_type[:-3], "000", up_down, "ERROR = READ TIMEOUT", full_link, "n/a")
except requests.ConnectionError as e:
print_line(count, domain, uri_type[:-3], "000", up_down, "ERROR = CONNECTION/SSL/TLS", full_link, "n/a")
except requests.exceptions.TooManyRedirects as e:
print_line(count, domain, uri_type[:-3], "000", up_down, "ERROR = TOO MANY REDIRECTS", full_link, "n/a")
except KeyboardInterrupt:
print("Keyboard exception caught. Exiting.")
raise SystemExit()
except requests.exceptions.RequestException as e:
raise SystemExit(e)
else: # Got a reply..
if response.ok:
up_down = "link up"
if response.status_code == 301 or response.status_code == 302: # Check if redirect and show link
print_line(count, domain, uri_type[:-3], response.status_code, up_down, "n/a", full_link, response.headers['Location'])
else:
print_line(count, domain, uri_type[:-3], response.status_code, up_down, "n/a", full_link, "n/a")
def print_line(ct, dom, uri, rscode, updown, errors, fullurl, redirect):
print("Site {0},{1},{2},{3},{4},{5},{6},{7}".format(ct, dom, uri, rscode, updown, errors, fullurl, redirect))
filelist_lookup()
example.com
example.net
example.org
example.edu
example.us
example.me
@bmatthewshea
Copy link
Author

bmatthewshea commented Aug 5, 2021

Have tested with a site list of +350 domains and works. Had some read timeout issues at first. Please report if you find errors.

Example output (Four checks per site / .csv format):

SHEA22-2021-08-04_215032

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment