Skip to content

Instantly share code, notes, and snippets.

@sineer
Last active August 29, 2015 14:07
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save sineer/3b0c96cc62b90e340384 to your computer and use it in GitHub Desktop.
Save sineer/3b0c96cc62b90e340384 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
import pickle, os, sys, logging, json, socket
from httplib import HTTPConnection, socket
from smtplib import SMTP
def email_alert(subject, message):
fromaddr = 'monitor@gmail.com'
toaddrs = ['monitor@gmail.com']
# toaddrs = ['devs@u.com']
toaddrs = ", ".join(toaddrs)
msg = """\
From: %s
To: %s
Subject: %s
%s
""" % (fromaddr, toaddrs, subject, message)
server = SMTP('smtp.gmail.com:587')
server.starttls()
server.login('monitor@gmail.com', 'SUPER SECRET PASSWORD!')
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
logging.info("EMAIL SENT TO: " + toaddrs);
def get_site_status(url):
# print("About to get_response..")
response = get_response(url)
# print("get_response("+url+"): " + str(response))
try:
if getattr(response, 'status'):
# print("get_site_status(" + url + ") is UP!")
return 'up'
except AttributeError:
pass
# print("get_site_status(" + url + ") is DOWN!")
logging.error("URL: " + str(url) + " ERROR! SITE IS DOWN!!!");
return 'down'
def get_store_app_file_info(url, path):
response = get_app_file_info_response(url, path)
try:
if getattr(response, 'status') == 200:
data = response.read()
#print("get_store_app_file_info(" + url + ", " + path + ") DATA: " + str(data))
#print("get_store_app_file_info STATUS: " + str(getattr(response, 'status') ))
#print("DECODING RESPONSE...")
data = json.loads(data)
#print("DECODED DATA: " + str(data))
filename = data[u'filename']
filesize = data[u'filesize']
#print("get_store_app_file_info FILENAME: " + str(filename))
#print("get_store_app_file_info FILESIZE: " + str(filesize))
return test_ftp_file_download(url, filename, filesize)
except KeyError:
logging.error("URL: " +str(url) + " KeyError (FILE NOT FOUND?)")
return 'file_not_found'
except:
pass
logging.error("URL: " + str(url) + " GET_APP_FILE_INFO ERROR!")
# print("get_store_app_file_info(" + url + ") ERROR!")
return 'get_app_file_info_error'
def test_ftp_file_download(url, filename, filesize):
try:
data = download_ftp_file(url, filename, filesize)
# print("LEN: " + str(len(data)))
# print("DATA:" + str(repr(data)))
if len(data) == filesize:
logging.info("URL: " + str(url) + " FTP O.K.");
return 'ftp_ok'
else:
logging.error("URL: " + str(url) + " FTP BAD FILESIZE ERROR!" + \
"EXPECTED: " + str(filesize) + " BUT LEN(DATA)= " + str(len(data)));
return 'ftp_bad_filesize'
except:
#print str(e)
pass
logging.error("FTP ERROR! URL: " + url)
return 'ftp_error'
def download_ftp_file(url, filename, filesize):
#print("download_ftp_file URL: %s FILENAME: %s FILESIZE: %s" % (url, filename, filesize))
port = 55335
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(10)
s.connect((url, port))
s.sendall('g\0')
s.sendall(filename + '\0')
data = s.recv(4096)
while 1:
more_data = s.recv(4096)
if not more_data: break
data += more_data
s.close()
# REMOVE RECEIVED FILENAME FROM DATA
data = data.split('\0',1)[1]
#print("download_ftp_file COMPLETE. len(data)=" + str(len(data)))
return data
def get_app_file_info_response(url, path):
'''Return response object from STORE app_file_info request'''
try:
conn = HTTPConnection(url)
conn.request('GET', path)
return conn.getresponse()
except socket.error:
return None
except:
# logging.error('Bad URL: ', str(url))
#exit(1)
return None
def get_response(url):
'''Return response object from URL'''
try:
conn = HTTPConnection(url, timeout=5)
conn.request('HEAD', '/')
return conn.getresponse()
except socket.error:
return None
except:
#logging.error('Bad URL: ' + str(url))
#exit(1)
return None
def compare_site_status(prev_results):
'''Report changed status based on previous results'''
def is_status_changed(url):
status = get_site_status(url)
friendly_status = '%s is %s' % (url, status)
if status == 'down':
pass
#print("SITE IS DOWN: " + url)
else:
# TEST STORE...
path = '/api/query.php?q=app_file_info&appid=Tablet&action=download'
status = get_store_app_file_info(url, path)
friendly_status = '%s FTP STATUS: %s' % (url, status)
if url in prev_results and prev_results[url] != status:
#logging.warning(status)
# Email status messages
email_alert(friendly_status, friendly_status)
prev_results[url] = status
return is_status_changed
def is_internet_reachable():
'''Checks Google then Yahoo just in case one is down'''
if get_site_status('www.yahoo.com') == 'down' and get_site_status('www.u.com') == 'down':
return False
return True
def load_old_results(file_path):
'''Attempts to load most recent results'''
pickledata = {}
if os.path.isfile(file_path):
picklefile = open(file_path, 'rb')
pickledata = pickle.load(picklefile)
picklefile.close()
return pickledata
def store_results(file_path, data):
'''Pickles results to compare on next run'''
output = open(file_path, 'wb')
pickle.dump(data, output)
output.close()
def main(filename):
# Fetch URLs from files (one per line)
# print("FILE: " + str(filename))
with open(filename[0]) as f:
urls = f.readlines()
urls = map(lambda s: s.strip(), urls)
# Setup logging to store time
logging.basicConfig(level=logging.DEBUG, filename='checksites.log',
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
# Load previous data
pickle_file = 'data.pkl'
pickledata = load_old_results(pickle_file)
logging.info("Monitoring theses URLs: " + str(urls));
# Check sites only if Internet is_available
if is_internet_reachable():
status_checker = compare_site_status(pickledata)
map(status_checker, urls)
# print("status_checker ran!")
else:
logging.error('Either the world ended or we are not connected to the net.')
# print("Storing resint in pickle file...")
# Store results in pickle file
store_results(pickle_file, pickledata)
if __name__ == '__main__':
# First arg is script name, skip it
main(sys.argv[1:])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment