-
-
Save JamesHarrison/fd75fb768d0825d3a9b4db5622656f1b to your computer and use it in GitHub Desktop.
Script to calculate visible sky altitude in photos taken in daylight with a visible moon using known time/Earth location
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
# Script to determine a horizon profile from a 360 degree photo taken at twilight with a visible moon, | |
# with no prior knowledge other than the approx time and location of the photo | |
from skyfield.api import load, Topos | |
import numpy as np | |
from PIL import Image | |
#from astropy.io import fits | |
import pandas as pd | |
import matplotlib.pyplot as plt | |
import cv2 | |
import glob | |
# NOTE: This is useful only for *relative comparisons*. While absolute values are emitted, they are absolutely not calibrated or corrected for a number of sources of error. | |
# This script merely assumes those errors will be broadly identical or small enough to not impact for simple usage. | |
# TODO: Rewrite this so it doesn't make me feel sad. It's a monstrosity, code-wise. | |
class HorizonVisibilityAnalyser(object): | |
def __init__(self, filename, topos): | |
# Estimated horizon angular error (how far was the image out of plumb) | |
self.moon_buffer_degrees = 2.5 | |
# Buffer for horizon calcs | |
self.angular_buffer_degrees = 3 | |
self.filename = filename | |
self.topos = topos | |
# Grab the photo | |
self.image = Image.open(filename) | |
# image = Image.open('R0010537.JPG') | |
self.xsize, self.ysize = self.image.size | |
def get_horizon(self, pixels): | |
gray_horizon_pixels = cv2.cvtColor(pixels, cv2.COLOR_BGR2GRAY) | |
blurred_horizon_pixels = cv2.medianBlur(gray_horizon_pixels,3) | |
ret, horizon_th = cv2.threshold(blurred_horizon_pixels, 160, 255, cv2.THRESH_BINARY) | |
# denoise with an opening | |
kernel = np.ones((3,3),np.uint8) | |
opened_horizon_thimg = cv2.morphologyEx(horizon_th, cv2.MORPH_OPEN, kernel) | |
# plt.imshow(opened_horizon_thimg) | |
# plt.show() | |
# th_val = opened_horizon_thimg[0][0]/1.50 | |
row_vals = {} | |
for (y, x), value in np.ndenumerate(opened_horizon_thimg): | |
# print(x, y, value) | |
if value == 255: | |
if x not in row_vals.keys(): | |
row_vals[x] = y | |
if row_vals[x] < y: | |
row_vals[x] = y | |
vals = [] | |
for k, v in row_vals.items(): | |
vals.append(v) | |
return float(sum(vals)) / float(len(vals)) | |
# for idx, row in blurred_horizon_pixels: | |
# for pixel in row: | |
# if pixel < th_val: | |
# row_vals[row] = | |
def generate_csv(self): | |
# Housekeeping for skyfield calcs | |
ts = load.timescale() | |
# Using the JPL de421 ephemeris | |
planets = load('de421.bsp') | |
moon = planets['Moon'] | |
earth = planets['Earth'] | |
# TODO: EXIF if possible, otherwise hardcode somewhere else | |
home = earth + self.topos | |
# TODO: Read this off the EXIF data, don't forget to map local time to UTC (EXIF is ISO8601) | |
photo_capture_time = ts.utc(2019, 4, 14, 19, 9) | |
# Work out the ICRF coords of the moon at this time | |
astrometric = home.at(photo_capture_time).observe(moon) | |
# Figure out the position of the moon at this time, as observed from the location | |
apparent = astrometric.apparent() | |
# Calculate alt/az | |
alt, az, dist = apparent.altaz() | |
print(alt, az, dist) | |
# So, we know that to offset the equirectangular projection from the camera to make north column x, we must offset by (left_to_moon - moon_az_as_perc_of_pixels) | |
# The trick now is to find the moon... | |
# Cast to numpy ndarray | |
pix = np.array(self.image) | |
# We know the predicted altitude of the moon (alt.degrees) so can limit our search to a hopefully-mostly-sky scope, and make our lives easier | |
horizon_pixel_row = int(self.ysize/2) | |
moon_pixel_path = horizon_pixel_row - (horizon_pixel_row / (90/alt.degrees)) | |
# Buffer a bit because nobody's perfect and neither will our levelling be when taking a photo | |
# Assume 180 degree high image - this is the most egregious error because equirectangular images aren't perfect in that regard I think.. | |
pixels_per_alt_degree = self.ysize / 180.0 | |
pixels_per_az_degree = self.xsize / 360.0 | |
moon_pixel_path_max = int(moon_pixel_path + (pixels_per_alt_degree*self.moon_buffer_degrees)) | |
moon_pixel_path_min = int(moon_pixel_path - (pixels_per_alt_degree*self.moon_buffer_degrees)) | |
# Crop our main image down to our region of itnerest | |
potential_moon_pixels = pix[moon_pixel_path_min:moon_pixel_path_max, 0:self.xsize] | |
print(potential_moon_pixels.shape) | |
# Cast to grayscale | |
gray_moon_pixels = cv2.cvtColor(potential_moon_pixels, cv2.COLOR_BGR2GRAY) | |
# Blur to eliminate noise | |
blurred_moon_pixels = cv2.medianBlur(gray_moon_pixels,5) | |
# Binarize the image using a gaussian adaptive threhsold | |
thimg = cv2.adaptiveThreshold(blurred_moon_pixels, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 11, 4) | |
# Use a 3x3 kernel to perform a morphological opening, which denoises the thresholded image without compromising the binarisation | |
kernel = np.ones((5,5),np.uint8) | |
opened_thimg = cv2.morphologyEx(thimg, cv2.MORPH_OPEN, kernel) | |
# Detect contours in the resultant opened image | |
im2, contours, hierarchy = cv2.findContours(opened_thimg, 1, 2) | |
x_coords = [] | |
y_coords = [] | |
# Look at each contour, calculate the moment, screen and if so add to our averaging | |
# We might pick up several contours for the moon, hence this | |
for contour in contours: | |
moments = cv2.moments(contour) | |
# Get the x/y of that moment | |
x = int(moments['m10']/moments['m00']) | |
y = int(moments['m01']/moments['m00']) | |
# Figure out if this looks bright enough to be the moon, as a final check against noise | |
if pix[y + moon_pixel_path_min][x][0] > 200 and pix[y + moon_pixel_path_min][x][1] > 200 and pix[y + moon_pixel_path_min][x][2] > 200: | |
x_coords.append(x) | |
y_coords.append(y) | |
cx = sum(x_coords) / len(x_coords) | |
cy = (sum(y_coords) / len(y_coords)) + moon_pixel_path_min | |
# We now know that cy represents the angular position of the moon in the image, which is in turn off from north by az. | |
# We want to end up with an altitude per degree for each of our 360 degrees, referencing north | |
# For each degree we will pick a set of columns and average the horizon in them | |
north_offset_pixels = az.degrees * pixels_per_az_degree | |
north_col = int(cx - north_offset_pixels) | |
print(cx, pixels_per_az_degree, north_col, north_offset_pixels) | |
azimuths = [] | |
altitudes = [] | |
for i in range(0, 359, 3): | |
start_col = int(north_col + (pixels_per_az_degree * i) - (pixels_per_alt_degree * self.angular_buffer_degrees)) | |
end_col = int(north_col + (pixels_per_az_degree * i) + (pixels_per_alt_degree * self.angular_buffer_degrees)) | |
section_pixels = np.take(pix, range(start_col,end_col), axis=1, mode='wrap') | |
# Debugging sections | |
# plt.imshow(section_pixels) | |
# plt.show() | |
horizon = self.get_horizon(section_pixels) | |
azimuths.append(i) | |
altitudes.append(180-(horizon / pixels_per_alt_degree)) | |
df = pd.DataFrame({'Azimuth': azimuths, 'Altitude': altitudes}) | |
df.to_csv('{}.csv'.format(self.filename)) | |
# Debug: Plot image with coords | |
# plt.imshow(np.take(pix, range(north_col,north_col + self.xsize), axis=1, mode='wrap')) | |
# plt.axhline(y=moon_pixel_path_max) | |
# plt.axhline(y=moon_pixel_path_min) | |
# plt.axvline(x=cx) | |
# plt.scatter([az * pixels_per_az_degree for az in azimuths], [alt * pixels_per_alt_degree for alt in altitudes]) | |
# plt.show() | |
import glob | |
# Add your own location here, and hack the time about to taste above - should draw the time from EXIF | |
topos = Topos('52.2 N', '1.1 W') | |
# The script will just consume every JPEG file handy... | |
for path in glob.glob('*.JPG'): | |
try: | |
hva = HorizonVisibilityAnalyser(path, topos) | |
hva.generate_csv() | |
except Exception as e: | |
print("FAILED TO PROCESS {}".format(path)) | |
print(e) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment