Skip to content

Instantly share code, notes, and snippets.

@nueh
Last active November 12, 2017 15:25
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nueh/8252633 to your computer and use it in GitHub Desktop.
Save nueh/8252633 to your computer and use it in GitHub Desktop.
iss-notify tells the time until the next pass of the ISS
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Heavily 'inspired' by Dr. Drang (https://github.com/drdrang/heavens-above)
import urllib2
from BeautifulSoup import BeautifulSoup
from datetime import datetime, date, timedelta
from time import strptime
import os
import sys
from os.path import expanduser
import pickle
# home contains the path to the current user's home directory
home = expanduser("~")
# This will be run weekly. The ISS information is given for a 10-day window.
# To avoid duplicating events, we'll filter out events more than 7 days
# in the future.
nextWeek = date.today() + timedelta(days=7)
lastWeek = date.today() - timedelta(days=7)
now = datetime.today()
def modification_date(filename):
"""Get the modification date of a file and return a date object"""
t = os.path.getmtime(filename)
return date.fromtimestamp(t)
def parseRow(row):
"""Parse a row of Heavens Above data and return the start date (datetime),
the intensity (integer), and the beginning, peak, and end sky positions (strings)."""
cols = row.findAll('td')
dStr = cols[0].a.string
t1Str = ':'.join(cols[2].string.split(':')[0:3])
t3Str = ':'.join(cols[8].string.split(':')[0:3])
intensity = float(cols[1].string)
alt1 = cols[3].string.replace(u'°', '')
az1 = cols[4].string.replace('E', 'O') # convert 'East' to 'Ost'
alt2 = cols[6].string.replace(u'°', '')
az2 = cols[7].string.string.replace('E', 'O')
alt3 = cols[9].string.replace(u'°', '')
az3 = cols[10].string.string.replace('E', 'O')
loc1 = '%s-%s' % (az1, alt1)
loc2 = '%s-%s' % (az2, alt2)
loc3 = '%s-%s' % (az3, alt3)
startStr = '%s %s %s' % (dStr, date.today().year, t1Str)
start = datetime(*strptime(startStr, '%d %b %Y %H:%M:%S')[0:7])
endStr = '%s %s %s' % (dStr, date.today().year, t3Str)
end = datetime(*strptime(endStr, '%d %b %Y %H:%M:%S')[0:7])
return (start, end, intensity, loc1, loc2, loc3)
def getPasses():
# Heavens Above URL.
haurl = "http://www.heavens-above.com/PassSummary.aspx?satid=25544&lat=53.08333&lng=8.8&loc=Bremen&alt=3&tz=CET"
out = u"Suche in den Interwebs …\n"
print out.encode("utf-8")
# Get the 10-day ISS page.
req = urllib2.Request(haurl)
response = urllib2.urlopen(req)
data = response.read()
iHtml = data
# In the past, Beautiful Soup hasn't been able to parse the Heavens Above HTML.
# To get around this problem, we extract just the table of ISS data and set
# it in a well-formed HTML skeleton. If there is no table of ISS data, create
# an empty table.
try:
table = iHtml.split(
r'<table class="standardTable"', 1)[1]
table = table.split(r'>', 1)[1]
table = table.split(r'</table>', 1)[0]
except IndexError:
table = '<tr><td></td></tr>'
html = '''<html>
<head>
</head>
<body>
<table>
%s
</table>
</body>
</html>''' % table
# Parse the HTML.
soup = BeautifulSoup(html)
# Collect only the data rows of the table.
rows = soup.findAll('table')[0].findAll('tr')[2:]
passes_dict = []
# Go through the data rows.
for row in rows:
(start, end, intensity, loc1, loc2, loc3) = parseRow(row)
# if intensity <= maxMag and int(loc2.split('-')[1]) >= minAlt and start.date() < nextWeek and start.hour > earliest:
# Only add dates in the next 7 days
if start.date() < nextWeek:
passes_dict.append({"begin_time": start, "end_time": end, "magnitude": intensity, "loc1": loc1, "loc2": loc2, "loc3": loc3})
f = open(os.path.join(home, ".iss-notitfy-passes.pickle"), 'w')
pickle.dump(passes_dict, f)
f.close()
return passes_dict
passes = []
data_available = False
try:
f = open(os.path.join(home, ".iss-notitfy-passes.pickle"))
except IOError:
sys.stdout.write(u"Keine lokalen Daten gefunden. ")
passes = getPasses()
else:
modDate = modification_date(os.path.join(home, ".iss-notitfy-passes.pickle"))
if modDate >= lastWeek:
passes = pickle.load(f)
f.close()
else:
sys.stdout.write(u"Lokale Daten veraltet. ")
passes = getPasses()
# Loop through the passes and find the first upcoming one
for apass in passes:
next_pass = apass["begin_time"]
timedelta = next_pass - now
past = timedelta.days
if past >= 0:
days_to_next_pass = timedelta.days
seconds_to_next_pass = timedelta.seconds
data_available = True
break
else:
data_available = False
if data_available:
location = apass["loc1"]
magnitude = apass["magnitude"]
# How long will this pass last?
duration = apass["end_time"] - apass["begin_time"]
pass_length = duration.seconds
minutes_to_next_pass = seconds_to_next_pass / 60
hours_to_next_pass = minutes_to_next_pass / 60
restminuten = minutes_to_next_pass % 60
out = u"Nächster Überflug in %d Tagen, %d Stunden und %d Minuten.\nRichtung %s°, scheinbare Helligkeit %2.1f mag, Dauer %d Sekunden." % (days_to_next_pass, hours_to_next_pass, restminuten, location, magnitude, pass_length)
print out.encode("utf-8")
else:
out = u"Leider keine Überflüge in den nächsten Tagen"
print out.encode("utf-8")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment