-
-
Save fny/2361272 to your computer and use it in GitHub Desktop.
Pythonic site monitor
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
# sample usage: checksites.py eriwen.com nixtutor.com yoursite.org | |
import pickle, os, sys, logging | |
from httplib import HTTPConnection, socket | |
from smtplib import SMTP | |
def email_alert(message, status): | |
fromaddr = 'you@gmail.com' | |
toaddrs = 'yourphone@txt.att.net' | |
server = SMTP('smtp.gmail.com:587') | |
server.starttls() | |
server.login('you', 'password') | |
server.sendmail(fromaddr, toaddrs, 'Subject: %s\r\n%s' % (status, message)) | |
server.quit() | |
def get_site_status(url): | |
response = get_response(url) | |
try: | |
if getattr(response, 'status') == 200: | |
return 'up' | |
except AttributeError: | |
pass | |
return 'down' | |
def get_response(url): | |
'''Return response object from URL''' | |
try: | |
conn = HTTPConnection(url) | |
conn.request('HEAD', '/') | |
return conn.getresponse() | |
except socket.error: | |
return None | |
except: | |
logging.error('Bad URL:', url) | |
exit(1) | |
def get_headers(url): | |
'''Gets all headers from URL request and returns''' | |
response = get_response(url) | |
try: | |
return getattr(response, 'getheaders')() | |
except AttributeError: | |
return 'Headers unavailable' | |
def compare_site_status(prev_results): | |
'''Report changed status based on previous results''' | |
def is_status_changed(url): | |
status = get_site_status(url) | |
friendly_status = '%s is %s' % (url, status) | |
print friendly_status | |
if url in prev_results and prev_results[url] != status: | |
logging.warning(status) | |
# Email status messages | |
email_alert(str(get_headers(url)), friendly_status) | |
prev_results[url] = status | |
return is_status_changed | |
def is_internet_reachable(): | |
'''Checks Google then Yahoo just in case one is down''' | |
if get_site_status('www.google.com') == 'down' and get_site_status('www.yahoo.com') == 'down': | |
return False | |
return True | |
def load_old_results(file_path): | |
'''Attempts to load most recent results''' | |
pickledata = {} | |
if os.path.isfile(file_path): | |
picklefile = open(file_path, 'rb') | |
pickledata = pickle.load(picklefile) | |
picklefile.close() | |
return pickledata | |
def store_results(file_path, data): | |
'''Pickles results to compare on next run''' | |
output = open(file_path, 'wb') | |
pickle.dump(data, output) | |
output.close() | |
def main(urls): | |
# Setup logging to store time | |
logging.basicConfig(level=logging.WARNING, filename='checksites.log', | |
format='%(asctime)s %(levelname)s: %(message)s', | |
datefmt='%Y-%m-%d %H:%M:%S') | |
# Load previous data | |
pickle_file = 'data.pkl' | |
pickledata = load_old_results(pickle_file) | |
# Check sites only if Internet is_available | |
if is_internet_reachable(): | |
status_checker = compare_site_status(pickledata) | |
map(status_checker, urls) | |
else: | |
logging.error('Either the world ended or we are not connected to the net.') | |
# Store results in pickle file | |
store_results(pickle_file, pickledata) | |
if __name__ == '__main__': | |
# First arg is script name, skip it | |
main(sys.argv[1:]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Basically, this script just checks if the internet is available, then checks each site. If the previous result is available and is different, it sends an email with the headers received so you might get a good idea what’s going on. Even cooler, you can use the email specific to your cell phone carrier to get text messages when your sites’ availability changes.
NOTE: You must have some sort of mailer daemon installed. See How to setup Gmail with SMTP. You can try it out by editing the appropriate parts of the script and then doing:
Scheduling it up with cron
I’ve already showed you the ins and outs of basic cron scheduling. We can have this run every 5 minutes by typing crontab -e and then adding:
What do you think? Tell me how you’d make it more “pythonic” or otherwise improve it in the comments.