Last active
October 7, 2023 15:37
-
-
Save ieb/9c337d68a4492db1571e to your computer and use it in GitHub Desktop.
Displaying AIS Data on Google Maps using the Google Maps API
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
# | |
# A Python AIVDM/AIVDO decoder | |
# | |
# This file is Copyright (c) 2010 by the GPSD project | |
# BSD terms apply: see the file COPYING in the distribution root for details. | |
# | |
# This decoder works by defining a declarative pseudolanguage in which | |
# to describe the process of extracting packed bitfields from an AIS | |
# message, a set of tables which contain instructions in the pseudolanguage, | |
# and a small amount of code for interpreting it. | |
# | |
# Known bugs: | |
# * Doesn't join parts A and B of Type 24 together yet. | |
# * Only handles the broadcast case of type 22. The problem is that the | |
# addressed field is located *after* the variant parts. Grrrr... | |
# * Message type 26 is presently unsupported. It hasn't been observed | |
# in the wild yet as of Jan 2010; not a lot of point in trying util | |
# we have test data. We'd need new machinery to constrain how many | |
# bits the data spec eats in order to recover the radio bits after it. | |
# * No support for IMO236 and IMO289 special messages in types 6 and 8 yet. | |
# | |
# Decoding for 1-15, 18-21, and 24 have been tested against live data. | |
# Decoding for 16-17, 22-23, and 25-27 have not. | |
# Here are the pseudoinstructions in the pseudolanguage. | |
class bitfield: | |
"Object defining the interpretation of an AIS bitfield." | |
# The only un-obvious detail is the use of the oob (out-of-band) | |
# member. This isn't used in data extraction, but rather to cut | |
# down on the number of custom formatting hooks. With this we | |
# handle the case where the field should be reported as an integer | |
# or "n/a". | |
def __init__(self, name, width, dtype, oob, legend, | |
validator=None, formatter=None, conditional=None): | |
self.name = name # Fieldname, for internal use and JSON | |
self.width = width # Bit width | |
self.type = dtype # Data type: signed/unsigned/string/raw | |
self.oob = oob # Out-of-band value to be shown as n/a | |
self.legend = legend # Human-friendly description of field | |
self.validator = validator # Validation checker | |
self.formatter = formatter # Custom reporting hook. | |
self.conditional = conditional # Evaluation guard for this field | |
class spare: | |
"Describes spare bits,, not to be interpreted." | |
def __init__(self, width, conditional=None): | |
self.width = width | |
self.conditional = conditional # Evaluation guard for this field | |
class dispatch: | |
"Describes how to dispatch to a message type variant on a subfield value." | |
def __init__(self, fieldname, subtypes, compute=lambda x: x, conditional=None): | |
self.fieldname = fieldname # Value of view to dispatch on | |
self.subtypes = subtypes # Possible subtypes to dispatch to | |
self.compute = compute # Pass value through this pre-dispatch | |
self.conditional = conditional # Evaluation guard for this field | |
# Message-type-specific information begins here. There are four | |
# different kinds of things in it: (1) string tables for expanding | |
# enumerated-type codes, (2) hook functions, (3) instruction tables, | |
# and (4) field group declarations. This is the part that could, in | |
# theory, be generated from a portable higher-level specification in | |
# XML; only the hook functions are actually language-specific, and | |
# your XML definition could in theory embed several different ones for | |
# code generation in Python, Java, Perl, etc. | |
cnb_status_legends = ( | |
"Under way using engine", | |
"At anchor", | |
"Not under command", | |
"Restricted manoeuverability", | |
"Constrained by her draught", | |
"Moored", | |
"Aground", | |
"Engaged in fishing", | |
"Under way sailing", | |
"Reserved for HSC", | |
"Reserved for WIG", | |
"Reserved", | |
"Reserved", | |
"Reserved", | |
"Reserved", | |
"Not defined", | |
) | |
def cnb_rot_format(n): | |
if n == -128: | |
return "n/a" | |
elif n == -127: | |
return "fastleft" | |
elif n == 127: | |
return "fastright" | |
else: | |
return str((n / 4.733) ** 2); | |
def cnb_latlon_format(n): | |
return str(n / 600000.0) | |
def cnb_speed_format(n): | |
if n == 1023: | |
return "n/a" | |
elif n == 1022: | |
return "fast" | |
else: | |
return str(n / 10.0); | |
def cnb_course_format(n): | |
return str(n / 10.0); | |
def cnb_second_format(n): | |
if n == 60: | |
return "n/a" | |
elif n == 61: | |
return "manual input" | |
elif n == 62: | |
return "dead reckoning" | |
elif n == 63: | |
return "inoperative" | |
else: | |
return str(n); | |
# Common Navigation Block is the format for AIS types 1, 2, and 3 | |
cnb = ( | |
bitfield("status", 4, 'unsigned', 0, "Navigation Status", | |
formatter=cnb_status_legends), | |
bitfield("turn", 8, 'signed', -128, "Rate of Turn", | |
formatter=cnb_rot_format), | |
bitfield("speed", 10, 'unsigned', 1023, "Speed Over Ground", | |
formatter=cnb_speed_format), | |
bitfield("accuracy", 1, 'unsigned', None, "Position Accuracy"), | |
bitfield("lon", 28, 'signed', 0x6791AC0, "Longitude", | |
formatter=cnb_latlon_format), | |
bitfield("lat", 27, 'signed', 0x3412140, "Latitude", | |
formatter=cnb_latlon_format), | |
bitfield("course", 12, 'unsigned', 0xe10, "Course Over Ground", | |
formatter=cnb_course_format), | |
bitfield("heading", 9, 'unsigned', 511, "True Heading"), | |
bitfield("second", 6, 'unsigned', None, "Time Stamp", | |
formatter=cnb_second_format), | |
bitfield("maneuver", 2, 'unsigned', None, "Maneuver Indicator"), | |
spare(3), | |
bitfield("raim", 1, 'unsigned', None, "RAIM flag"), | |
bitfield("radio", 19, 'unsigned', None, "Radio status"), | |
) | |
epfd_type_legends = ( | |
"Undefined", | |
"GPS", | |
"GLONASS", | |
"Combined GPS/GLONASS", | |
"Loran-C", | |
"Chayka", | |
"Integrated navigation system", | |
"Surveyed", | |
"Galileo", | |
) | |
type4 = ( | |
bitfield("year", 14, "unsigned", 0, "Year"), | |
bitfield("month", 4, "unsigned", 0, "Month"), | |
bitfield("day", 5, "unsigned", 0, "Day"), | |
bitfield("hour", 5, "unsigned", 24, "Hour"), | |
bitfield("minute", 6, "unsigned", 60, "Minute"), | |
bitfield("second", 6, "unsigned", 60, "Second"), | |
bitfield("accuracy", 1, "unsigned", None, "Fix quality"), | |
bitfield("lon", 28, "signed", 0x6791AC0, "Longitude", | |
formatter=cnb_latlon_format), | |
bitfield("lat", 27, "signed", 0x3412140, "Latitude", | |
formatter=cnb_latlon_format), | |
bitfield("epfd", 4, "unsigned", None, "Type of EPFD", | |
validator=lambda n: n >= 0 and n <= 8 or n == 15, | |
formatter=epfd_type_legends), | |
spare(10), | |
bitfield("raim", 1, "unsigned", None, "RAIM flag "), | |
bitfield("radio", 19, "unsigned", None, "SOTDMA state"), | |
) | |
ship_type_legends = ( | |
"Not available", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Wing in ground (WIG) - all ships of this type", | |
"Wing in ground (WIG) - Hazardous category A", | |
"Wing in ground (WIG) - Hazardous category B", | |
"Wing in ground (WIG) - Hazardous category C", | |
"Wing in ground (WIG) - Hazardous category D", | |
"Wing in ground (WIG) - Reserved for future use", | |
"Wing in ground (WIG) - Reserved for future use", | |
"Wing in ground (WIG) - Reserved for future use", | |
"Wing in ground (WIG) - Reserved for future use", | |
"Wing in ground (WIG) - Reserved for future use", | |
"Fishing", | |
"Towing", | |
"Towing: length exceeds 200m or breadth exceeds 25m", | |
"Dredging or underwater ops", | |
"Diving ops", | |
"Military ops", | |
"Sailing", | |
"Pleasure Craft", | |
"Reserved", | |
"Reserved", | |
"High speed craft (HSC) - all ships of this type", | |
"High speed craft (HSC) - Hazardous category A", | |
"High speed craft (HSC) - Hazardous category B", | |
"High speed craft (HSC) - Hazardous category C", | |
"High speed craft (HSC) - Hazardous category D", | |
"High speed craft (HSC) - Reserved for future use", | |
"High speed craft (HSC) - Reserved for future use", | |
"High speed craft (HSC) - Reserved for future use", | |
"High speed craft (HSC) - Reserved for future use", | |
"High speed craft (HSC) - No additional information", | |
"Pilot Vessel", | |
"Search and Rescue vessel", | |
"Tug", | |
"Port Tender", | |
"Anti-pollution equipment", | |
"Law Enforcement", | |
"Spare - Local Vessel", | |
"Spare - Local Vessel", | |
"Medical Transport", | |
"Ship according to RR Resolution No. 18", | |
"Passenger - all ships of this type", | |
"Passenger - Hazardous category A", | |
"Passenger - Hazardous category B", | |
"Passenger - Hazardous category C", | |
"Passenger - Hazardous category D", | |
"Passenger - Reserved for future use", | |
"Passenger - Reserved for future use", | |
"Passenger - Reserved for future use", | |
"Passenger - Reserved for future use", | |
"Passenger - No additional information", | |
"Cargo - all ships of this type", | |
"Cargo - Hazardous category A", | |
"Cargo - Hazardous category B", | |
"Cargo - Hazardous category C", | |
"Cargo - Hazardous category D", | |
"Cargo - Reserved for future use", | |
"Cargo - Reserved for future use", | |
"Cargo - Reserved for future use", | |
"Cargo - Reserved for future use", | |
"Cargo - No additional information", | |
"Tanker - all ships of this type", | |
"Tanker - Hazardous category A", | |
"Tanker - Hazardous category B", | |
"Tanker - Hazardous category C", | |
"Tanker - Hazardous category D", | |
"Tanker - Reserved for future use", | |
"Tanker - Reserved for future use", | |
"Tanker - Reserved for future use", | |
"Tanker - Reserved for future use", | |
"Tanker - No additional information", | |
"Other Type - all ships of this type", | |
"Other Type - Hazardous category A", | |
"Other Type - Hazardous category B", | |
"Other Type - Hazardous category C", | |
"Other Type - Hazardous category D", | |
"Other Type - Reserved for future use", | |
"Other Type - Reserved for future use", | |
"Other Type - Reserved for future use", | |
"Other Type - Reserved for future use", | |
"Other Type - no additional information", | |
) | |
type5 = ( | |
bitfield("ais_version", 2, 'unsigned', None, "AIS Version"), | |
bitfield("imo_id", 30, 'unsigned', 0, "IMO Identification Number"), | |
bitfield("callsign", 42, 'string', None, "Call Sign"), | |
bitfield("shipname", 120, 'string', None, "Vessel Name"), | |
bitfield("shiptype", 8, 'unsigned', None, "Ship Type", | |
#validator=lambda n: n >= 0 and n <= 99, | |
formatter=ship_type_legends), | |
bitfield("to_bow", 9, 'unsigned', 0, "Dimension to Bow"), | |
bitfield("to_stern", 9, 'unsigned', 0, "Dimension to Stern"), | |
bitfield("to_port", 6, 'unsigned', 0, "Dimension to Port"), | |
bitfield("to_starbord", 6, 'unsigned', 0, "Dimension to Starboard"), | |
bitfield("epfd", 4, 'unsigned', 0, "Position Fix Type", | |
validator=lambda n: n >= 0 and n <= 8 or n == 15, | |
formatter=epfd_type_legends), | |
bitfield("month", 4, 'unsigned', 0, "ETA month"), | |
bitfield("day", 5, 'unsigned', 0, "ETA day"), | |
bitfield("hour", 5, 'unsigned', 24, "ETA hour"), | |
bitfield("minute", 6, 'unsigned', 60, "ETA minute"), | |
bitfield("draught", 8, 'unsigned', 0, "Draught", | |
formatter=lambda n: n/10.0), | |
bitfield("destination", 120, 'string', None, "Destination"), | |
bitfield("dte", 1, 'unsigned', None, "DTE"), | |
spare(1), | |
) | |
type6_dac_or_fid_unknown = ( | |
bitfield("data", 920, 'raw', None, "Data"), | |
) | |
type6_dispatch = {} | |
type6_dispatch[0] = type6_dac_or_fid_unknown | |
# DAC 235 and 250 (UK, Rep. of Ireland) | |
type6_dac235_dispatch = {} | |
type6_dac235_dispatch[0] = type6_dac_or_fid_unknown | |
type6_dac235_fid10 = ( | |
bitfield("ana_int", 10, 'unsigned', None, "Supply voltage"), | |
bitfield("ana_ext1", 10, 'unsigned', None, "Analogue (Ext#1)"), | |
bitfield("ana_ext2", 10, 'unsigned', None, "Analogue (Ext#2)"), | |
bitfield("racon", 2, 'unsigned', None, "RACON status"), | |
bitfield("light", 2, 'unsigned', None, "Light status"), | |
bitfield("health", 1, 'unsigned', None, "Health"), | |
bitfield("stat_ext", 8, 'unsigned', None, "Status (ext)"), | |
bitfield("off_pos", 1, 'unsigned', None, "Position status"), | |
) | |
type6_dac235_dispatch[10] = type6_dac235_fid10 | |
type6_dac235 = ( | |
dispatch("fid", type6_dac235_dispatch, lambda m: m if m in type6_dac235_dispatch else 0), | |
) | |
type6_dispatch[235] = type6_dac235 | |
type6_dispatch[250] = type6_dac235 | |
type6 = ( | |
bitfield("seqno", 2, 'unsigned', None, "Sequence Number"), | |
bitfield("dest_mmsi", 30, 'unsigned', None, "Destination MMSI"), | |
bitfield("retransmit", 1, 'unsigned', None, "Retransmit flag"), | |
spare(1), | |
bitfield("dac", 10, 'unsigned', 0, "DAC"), | |
bitfield("fid", 6, 'unsigned', 0, "Functional ID"), | |
dispatch("dac", type6_dispatch, lambda m: m if m in type6_dispatch else 0), | |
) | |
type7 = ( | |
spare(2), | |
bitfield("mmsi1", 30, 'unsigned', 0, "MMSI number 1"), | |
spare(2), | |
bitfield("mmsi2", 30, 'unsigned', 0, "MMSI number 2"), | |
spare(2), | |
bitfield("mmsi3", 30, 'unsigned', 0, "MMSI number 3"), | |
spare(2), | |
bitfield("mmsi1", 30, 'unsigned', 0, "MMSI number 4"), | |
spare(2), | |
) | |
# | |
# Type 8 have subtypes identified by DAC (Designated Area Code) and FID (Functional ID) | |
# | |
def type8_latlon_format(n): | |
return str(n / 60000.0) | |
type8_dac_or_fid_unknown = ( | |
bitfield("data", 952, 'raw', None, "Data"), | |
) | |
type8_dispatch = {} | |
type8_dispatch[0] = type8_dac_or_fid_unknown | |
# DAC 1 (international) | |
type8_dac1_dispatch = {} | |
type8_dac1_dispatch[0] = type8_dac_or_fid_unknown | |
# DAC 1, FID 11: IMO236 Met/Hydro message | |
def type8_dac1_fid11_airtemp_format(n): | |
return str(n * 0.1 - 60) | |
def type8_dac1_fid11_dewpoint_format(n): | |
return str(n * 0.1 - 20) | |
def type8_dac1_fid11_pressure_format(n): | |
return str(n + 800) | |
def type8_dac1_fid11_visibility_format(n): | |
return str(n * 0.1) | |
def type8_dac1_fid11_waterlevel_format(n): | |
return str(n * 0.1 - 10) | |
def type8_dac1_fid11_cspeed_format(n): | |
return str(n * 0.1) | |
def type8_dac1_fid11_waveheight_format(n): | |
return str(n * 0.1) | |
type8_dac1_fid11_seastate_legend = ( | |
"Calm", | |
"Light air", | |
"Light breeze" | |
"Gentle breeze", | |
"Moderate breeze", | |
"Fresh breeze", | |
"Strong breeze", | |
"High wind", | |
"Gale", | |
"Strong gale", | |
"Storm", | |
"Violent storm", | |
"Hurricane force", | |
"Reserved", | |
"Reserved", | |
"Reserved" | |
) | |
def type8_dac1_fid11_watertemp_format(n): | |
return str(n * 0.1 - 10) | |
type8_dac1_fid11_preciptype_legend = ( | |
"Reserved", | |
"Rain", | |
"Thunderstorm", | |
"Freezing rain", | |
"Mixed/ice", | |
"Snow", | |
"Reserved", | |
"Reserved" | |
) | |
def type8_dac1_fid11_salinity_format(n): | |
return str(n * 0.1) | |
type8_dac1_fid11_ice_legend = ( | |
"Yes", | |
"No" | |
) | |
type8_dac1_fid11 = ( | |
bitfield("lat", 24, "signed", 2**24-1, "Latitude", | |
formatter=type8_latlon_format), | |
bitfield("lon", 25, "signed", 2**25-1, "Longitude", | |
formatter=type8_latlon_format), | |
bitfield("day", 5, 'unsigned', 0, "ETA day"), | |
bitfield("hour", 5, 'unsigned', 24, "ETA hour"), | |
bitfield("minute", 6, 'unsigned', 60, "ETA minute"), | |
bitfield("wspeed", 7, 'unsigned', 127, "Wind speed"), | |
bitfield("wgust", 7, 'unsigned', 127, "Wind gust"), | |
bitfield("wdir", 9, 'unsigned', 511, "Wind direction"), | |
bitfield("wgustdir", 9, 'unsigned', 511, "Wind gust direction"), | |
bitfield("airtemp", 11, 'unsigned', 2047, "Air temperature", | |
formatter=type8_dac1_fid11_airtemp_format), | |
bitfield("humidity", 7, 'unsigned', 127, "Relative humidity"), | |
bitfield("dewpoint", 10, 'unsigned', 1023, "Dew point", | |
formatter=type8_dac1_fid11_dewpoint_format), | |
bitfield("pressure", 9, 'unsigned', 511, "Atmospheric pressure", | |
formatter=type8_dac1_fid11_pressure_format), | |
bitfield("pressuretend", 2, 'unsigned', 3, "Atmospheric pressure tendency"), | |
bitfield("visibility", 8, 'unsigned', 255, "Horizontal visibility", | |
formatter=type8_dac1_fid11_visibility_format), | |
bitfield("waterlevel", 9, 'unsigned', 511, "Water level", | |
formatter=type8_dac1_fid11_waterlevel_format), | |
bitfield("leveltrend", 2, 'unsigned', 3, "Water level trend"), | |
bitfield("cspeed", 8, 'unsigned', 255, "Surface current speed", | |
formatter=type8_dac1_fid11_cspeed_format), | |
bitfield("cdir", 9, 'unsigned', 511, "Surface current direction"), | |
bitfield("cspeed2", 8, 'unsigned', 255, "Current speed #2", | |
formatter=type8_dac1_fid11_cspeed_format), | |
bitfield("cdir2", 9, 'unsigned', 511, "Current direction #2"), | |
bitfield("cdepth2", 5, 'unsigned', 31, "Current measuring level #2"), | |
bitfield("cspeed3", 8, 'unsigned', 255, "Current speed #3", | |
formatter=type8_dac1_fid11_cspeed_format), | |
bitfield("cdir3", 9, 'unsigned', 511, "Current direction #3"), | |
bitfield("cdepth3", 5, 'unsigned', 31, "Current measuring level #3"), | |
bitfield("waveheight", 8, 'unsigned', 255, "Significant wave height", | |
formatter=type8_dac1_fid11_waveheight_format), | |
bitfield("waveperiod", 6, 'unsigned', 63, "Significant wave period"), | |
bitfield("wavedir", 9, 'unsigned', 511, "Significant wave direction"), | |
bitfield("swellheight", 8, 'unsigned', 255, "Swell height", | |
formatter=type8_dac1_fid11_waveheight_format), | |
bitfield("swellperiod", 6, 'unsigned', 63, "Swell period"), | |
bitfield("swelldir", 9, 'unsigned', 511, "Swell direction"), | |
bitfield("seastate", 4, 'unsigned', 15, "Sea state", | |
formatter=type8_dac1_fid11_seastate_legend), | |
bitfield("watertemp", 10, 'unsigned', 1023, "Water temperature", | |
formatter=type8_dac1_fid11_watertemp_format), | |
bitfield("preciptype", 3, 'unsigned', 7, "Precipitation type", | |
formatter=type8_dac1_fid11_preciptype_legend), | |
bitfield("salinity", 9, 'unsigned', 511, "Salinity", | |
formatter=type8_dac1_fid11_salinity_format), | |
bitfield("ice", 2, 'unsigned', 3, "Ice?", | |
formatter=type8_dac1_fid11_ice_legend), | |
spare(6) | |
) | |
type8_dac1_dispatch[11] = type8_dac1_fid11 | |
type8_dac1 = ( | |
dispatch("fid", type8_dac1_dispatch, lambda m: m if m in type8_dac1_dispatch else 0), | |
) | |
type8_dispatch[1] = type8_dac1 | |
type8 = ( | |
spare(2), | |
bitfield("dac", 10, 'unsigned', 0, "DAC"), | |
bitfield("fid", 6, 'unsigned', 0, "Functional ID"), | |
dispatch("dac", type8_dispatch, lambda m: m if m in type8_dispatch else 0), | |
) | |
def type9_alt_format(n): | |
if n == 4094: | |
return ">=4094" | |
else: | |
return str(n) | |
def type9_speed_format(n): | |
if n == 1023: | |
return "n/a" | |
elif n == 1022: | |
return "fast" | |
else: | |
return str(n); | |
type9 = ( | |
bitfield("alt", 12, 'unsigned', 4095, "Altitude", | |
formatter=type9_alt_format), | |
bitfield("speed", 10, 'unsigned', 1023, "SOG", | |
formatter=type9_speed_format), | |
bitfield("accuracy", 1, 'unsigned', None, "Position Accuracy"), | |
bitfield("lon", 28, 'signed', 0x6791AC0, "Longitude", | |
formatter=cnb_latlon_format), | |
bitfield("lat", 27, 'signed', 0x3412140, "Latitude", | |
formatter=cnb_latlon_format), | |
bitfield("course", 12, 'unsigned', 0xe10, "Course Over Ground", | |
formatter=cnb_course_format), | |
bitfield("second", 6, 'unsigned', 60, "Time Stamp", | |
formatter=cnb_second_format), | |
bitfield("regional", 8, 'unsigned', None, "Regional reserved"), | |
bitfield("dte", 1, 'unsigned', None, "DTE"), | |
spare(3), | |
bitfield("assigned", 1, 'unsigned', None, "Assigned"), | |
bitfield("raim", 1, 'unsigned', None, "RAIM flag"), | |
bitfield("radio", 19, 'unsigned', None, "Radio status"), | |
) | |
type10 = ( | |
spare(2), | |
bitfield("dest_mmsi", 30, 'unsigned', None, "Destination MMSI"), | |
spare(2), | |
) | |
type12 = ( | |
bitfield("seqno", 2, 'unsigned', None, "Sequence Number"), | |
bitfield("dest_mmsi", 30, 'unsigned', None, "Destination MMSI"), | |
bitfield("retransmit", 1, 'unsigned', None, "Retransmit flag"), | |
spare(1), | |
bitfield("text", 936, 'string', None, "Text"), | |
) | |
type14 = ( | |
spare(2), | |
bitfield("text", 968, 'string', None, "Text"), | |
) | |
type15 = ( | |
spare(2), | |
bitfield("mmsi1", 30, 'unsigned', 0, "First interrogated MMSI"), | |
bitfield("type1_1", 6, 'unsigned', 0, "First message type"), | |
bitfield("offset1_1", 12, 'unsigned', 0, "First slot offset"), | |
spare(2), | |
bitfield("type1_2", 6, 'unsigned', 0, "Second message type"), | |
bitfield("offset1_2", 12, 'unsigned', 0, "Second slot offset"), | |
spare(2), | |
bitfield("mmsi2", 30, 'unsigned', 0, "Second interrogated MMSI"), | |
bitfield("type2_1", 6, 'unsigned', 0, "Message type"), | |
bitfield("offset2_1", 12, 'unsifned', 0, "Slot offset"), | |
spare(2), | |
) | |
type16 = ( | |
spare(2), | |
bitfield("mmsi1", 30, 'unsigned', 0, "Interrogated MMSI 1"), | |
bitfield("offset1", 12, 'unsigned', 0, "First slot offset"), | |
bitfield("increment1",10, 'unsigned', 0, "First slot increment"), | |
bitfield("mmsi2", 30, 'unsigned', 0, "Interrogated MMSI 2"), | |
bitfield("offset2", 12, 'unsigned', 0, "Second slot offset"), | |
bitfield("increment2",10, 'unsigned', 0, "Second slot increment"), | |
spare(2), | |
) | |
def short_latlon_format(n): | |
return str(n / 600.0) | |
type17 = ( | |
spare(2), | |
bitfield("lon", 18, 'signed', 0x1a838, "Longitude", | |
formatter=short_latlon_format), | |
bitfield("lat", 17, 'signed', 0xd548, "Latitude", | |
formatter=short_latlon_format), | |
spare(5), | |
bitfield("data", 736, 'raw', None, "DGNSS data"), | |
) | |
type18 = ( | |
bitfield("reserved", 8, 'unsigned', None, "Regional reserved"), | |
bitfield("speed", 10, 'unsigned', 1023, "Speed Over Ground", | |
formatter=cnb_speed_format), | |
bitfield("accuracy", 1, 'unsigned', None, "Position Accuracy"), | |
bitfield("lon", 28, 'signed', 0x6791AC0, "Longitude", | |
formatter=cnb_latlon_format), | |
bitfield("lat", 27, 'signed', 0x3412140, "Latitude", | |
formatter=cnb_latlon_format), | |
bitfield("course", 12, 'unsigned', 0xE10, "Course Over Ground", | |
formatter=cnb_course_format), | |
bitfield("heading", 9, 'unsigned', 511, "True Heading"), | |
bitfield("second", 6, 'unsigned', None, "Time Stamp", | |
formatter=cnb_second_format), | |
bitfield("regional", 2, 'unsigned', None, "Regional reserved"), | |
bitfield("cs", 1, 'unsigned', None, "CS Unit"), | |
bitfield("display", 1, 'unsigned', None, "Display flag"), | |
bitfield("dsc", 1, 'unsigned', None, "DSC flag"), | |
bitfield("band", 1, 'unsigned', None, "Band flag"), | |
bitfield("msg22", 1, 'unsigned', None, "Message 22 flag"), | |
bitfield("assigned", 1, 'unsigned', None, "Assigned"), | |
bitfield("raim", 1, 'unsigned', None, "RAIM flag"), | |
bitfield("radio", 20, 'unsigned', None, "Radio status"), | |
) | |
type19 = ( | |
bitfield("reserved", 8, 'unsigned', None, "Regional reserved"), | |
bitfield("speed", 10, 'unsigned', 1023, "Speed Over Ground", | |
formatter=cnb_speed_format), | |
bitfield("accuracy", 1, 'unsigned', None, "Position Accuracy"), | |
bitfield("lon", 28, 'signed', 0x6791AC0, "Longitude", | |
formatter=cnb_latlon_format), | |
bitfield("lat", 27, 'signed', 0x3412140, "Latitude", | |
formatter=cnb_latlon_format), | |
bitfield("course", 12, 'unsigned', 0xE10, "Course Over Ground", | |
formatter=cnb_course_format), | |
bitfield("heading", 9, 'unsigned', 511, "True Heading"), | |
bitfield("second", 6, 'unsigned', None, "Time Stamp", | |
formatter=cnb_second_format), | |
bitfield("regional", 4, 'unsigned', None, "Regional reserved"), | |
bitfield("shipname", 120, 'string', None, "Vessel Name"), | |
bitfield("shiptype", 8, 'unsigned', None, "Ship Type", | |
#validator=lambda n: n >= 0 and n <= 99, | |
formatter=ship_type_legends), | |
bitfield("to_bow", 9, 'unsigned', 0, "Dimension to Bow"), | |
bitfield("to_stern", 9, 'unsigned', 0, "Dimension to Stern"), | |
bitfield("to_port", 6, 'unsigned', 0, "Dimension to Port"), | |
bitfield("to_starbord", 6, 'unsigned', 0, "Dimension to Starboard"), | |
bitfield("epfd", 4, 'unsigned', 0, "Position Fix Type", | |
validator=lambda n: n >= 0 and n <= 8 or n == 15, | |
formatter=epfd_type_legends), | |
bitfield("assigned", 1, 'unsigned', None, "Assigned"), | |
bitfield("raim", 1, 'unsigned', None, "RAIM flag"), | |
bitfield("radio", 20, 'unsigned', None, "Radio status"), | |
) | |
type20 = ( | |
spare(2), | |
bitfield("offset1", 12, 'unsigned', 0, "Offset number"), | |
bitfield("number1", 4, 'unsigned', 0, "Reserved slots"), | |
bitfield("timeout1", 3, 'unsigned', 0, "Time-out"), | |
bitfield("increment1", 11, 'unsigned', 0, "Increment"), | |
bitfield("offset2", 12, 'unsigned', 0, "Offset number 2"), | |
bitfield("number2", 4, 'unsigned', 0, "Reserved slots"), | |
bitfield("timeout2", 3, 'unsigned', 0, "Time-out"), | |
bitfield("increment2", 11, 'unsigned', 0, "Increment"), | |
bitfield("offset3", 12, 'unsigned', 0, "Offset number 3"), | |
bitfield("number3", 4, 'unsigned', 0, "Reserved slots"), | |
bitfield("timeout3", 3, 'unsigned', 0, "Time-out"), | |
bitfield("increment3", 11, 'unsigned', 0, "Increment"), | |
bitfield("offset4", 12, 'unsigned', 0, "Offset number 4"), | |
bitfield("number4", 4, 'unsigned', 0, "Reserved slots"), | |
bitfield("timeout4", 3, 'unsigned', 0, "Time-out"), | |
bitfield("increment4", 11, 'unsigned', 0, "Increment"), | |
) | |
aide_type_legends = ( | |
"Unspecified", | |
"Reference point", | |
"RACON", | |
"Fixed offshore structure", | |
"Spare, Reserved for future use.", | |
"Light, without sectors", | |
"Light, with sectors", | |
"Leading Light Front", | |
"Leading Light Rear", | |
"Beacon, Cardinal N", | |
"Beacon, Cardinal E", | |
"Beacon, Cardinal S", | |
"Beacon, Cardinal W", | |
"Beacon, Port hand", | |
"Beacon, Starboard hand", | |
"Beacon, Preferred Channel port hand", | |
"Beacon, Preferred Channel starboard hand", | |
"Beacon, Isolated danger", | |
"Beacon, Safe water", | |
"Beacon, Special mark", | |
"Cardinal Mark N", | |
"Cardinal Mark E", | |
"Cardinal Mark S", | |
"Cardinal Mark W", | |
"Port hand Mark", | |
"Starboard hand Mark", | |
"Preferred Channel Port hand", | |
"Preferred Channel Starboard hand", | |
"Isolated danger", | |
"Safe Water", | |
"Special Mark", | |
"Light Vessel / LANBY / Rigs", | |
) | |
type21 = ( | |
bitfield("aid_type", 5, 'unsigned', 0, "Aid type", | |
formatter=aide_type_legends), | |
bitfield("name", 120, 'string', None, "Name"), | |
bitfield("accuracy", 1, 'unsigned', 0, "Position Accuracy"), | |
bitfield("lon", 28, 'signed', 0x6791AC0, "Longitude", | |
formatter=cnb_latlon_format), | |
bitfield("lat", 27, 'signed', 0x3412140, "Latitude", | |
formatter=cnb_latlon_format), | |
bitfield("to_bow", 9, 'unsigned', 0, "Dimension to Bow"), | |
bitfield("to_stern", 9, 'unsigned', 0, "Dimension to Stern"), | |
bitfield("to_port", 6, 'unsigned', 0, "Dimension to Port"), | |
bitfield("to_starboard", 6, 'unsigned', 0, "Dimension to Starboard"), | |
bitfield("epfd", 4, 'unsigned', 0, "Position Fix Type", | |
validator=lambda n: n >= 0 and n <= 8 or n == 15, | |
formatter=epfd_type_legends), | |
bitfield("second", 6, 'unsigned', 0, "UTC Second"), | |
bitfield("off_position", 1, 'unsigned', 0, "Off-Position Indicator"), | |
bitfield("regional", 8, 'unsigned', 0, "Regional reserved"), | |
bitfield("raim", 1, 'unsigned', 0, "RAIM flag"), | |
bitfield("virtual_aid", 1, 'unsigned', 0, "Virtual-aid flag"), | |
bitfield("assigned", 1, 'unsigned', 0, "Assigned-mode flag"), | |
spare(1), | |
bitfield("name", 88, 'string', 0, "Name Extension"), | |
) | |
type22 = ( | |
spare(2), | |
bitfield("channel_a", 12, 'unsigned', 0, "Channel A"), | |
bitfield("channel_b", 12, 'unsigned', 0, "Channel B"), | |
bitfield("txrx", 4, 'unsigned', 0, "Tx/Rx mode"), | |
bitfield("power", 1, 'unsigned', 0, "Power"), | |
bitfield("ne_lon", 18, 'signed', 0x1a838, "NE Longitude", | |
formatter=short_latlon_format), | |
bitfield("ne_lat", 17, 'signed', 0xd548, "NE Latitude", | |
formatter=short_latlon_format), | |
bitfield("sw_lon", 18, 'signed', 0x1a838, "SW Longitude", | |
formatter=short_latlon_format), | |
bitfield("sw_lat", 17, 'signed', 0xd548, "SW Latitude", | |
formatter=short_latlon_format), | |
bitfield("addressed", 1, 'unsigned', 0, "Addressed"), | |
bitfield("band_a", 1, 'unsigned', 0, "Channel A Band"), | |
bitfield("band_a", 1, 'unsigned', 0, "Channel A Band"), | |
bitfield("zonesize", 3, 'unsigned', 0, "Zone size"), | |
spare(23), | |
) | |
station_type_legends = ( | |
"All types of mobiles", | |
"Reserved for future use", | |
"All types of Class B mobile stations", | |
"SAR airborne mobile station", | |
"Aid to Navigation station", | |
"Class B shipborne mobile station", | |
"Regional use and inland waterways", | |
"Regional use and inland waterways", | |
"Regional use and inland waterways", | |
"Regional use and inland waterways", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
"Reserved for future use", | |
) | |
type23 = ( | |
spare(2), | |
bitfield("ne_lon", 18, 'signed', 0x1a838, "NE Longitude", | |
formatter=short_latlon_format), | |
bitfield("ne_lat", 17, 'signed', 0xd548, "NE Latitude", | |
formatter=short_latlon_format), | |
bitfield("sw_lon", 18, 'signed', 0x1a838, "SW Longitude", | |
formatter=short_latlon_format), | |
bitfield("sw_lat", 17, 'signed', 0xd548, "SW Latitude", | |
formatter=short_latlon_format), | |
bitfield("stationtype",4, 'unsigned', 0, "Station Type", | |
validator=lambda n: n >= 0 and n <= 31, | |
formatter=station_type_legends), | |
bitfield("shiptype", 8, 'unsigned', 0, "Ship Type", | |
#validator=lambda n: n >= 0 and n <= 99, | |
formatter=ship_type_legends), | |
spare(22), | |
bitfield("txrx", 2, 'unsigned', 0, "Tx/Rx mode"), | |
bitfield("interval", 4, 'unsigned', 0, "Reporting interval"), | |
bitfield("txrx", 4, 'unsigned', 0, "Quiet time"), | |
) | |
type24a = ( | |
bitfield("shipname", 120, 'string', None, "Vessel Name"), | |
spare(8), | |
) | |
type24b1 = ( | |
bitfield("callsign", 42, 'string', None, "Call Sign"), | |
bitfield("to_bow", 9, 'unsigned', 0, "Dimension to Bow"), | |
bitfield("to_stern", 9, 'unsigned', 0, "Dimension to Stern"), | |
bitfield("to_port", 6, 'unsigned', 0, "Dimension to Port"), | |
bitfield("to_starbord", 6, 'unsigned', 0, "Dimension to Starboard"), | |
spare(8), | |
) | |
type24b2 = ( | |
bitfield('mothership_mmsi', 30, 'unsigned', 0, "Mothership MMSI"), | |
spare(8), | |
) | |
type24b = ( | |
bitfield("shiptype", 8, 'unsigned', None, "Ship Type", | |
validator=lambda n: n >= 0 and n <= 99, | |
formatter=ship_type_legends), | |
bitfield("vendorid", 42, 'string', None, "Vendor ID"), | |
dispatch("mmsi", {0:type24b1, 1:type24b2}, lambda m: 1 if `m`[:2]=='98' else 0), | |
) | |
type24 = ( | |
bitfield('partno', 2, 'unsigned', None, "Part Number"), | |
dispatch('partno', {0:type24a, 1:type24b}), | |
) | |
type25 = ( | |
bitfield("addressed", 1, 'unsigned', None, "Addressing flag"), | |
bitfield("structured", 1, 'unsigned', None, "Dimension to Bow"), | |
bitfield("dest_mmsi", 30, 'unsigned', 0, "Destinstion MMSI", | |
conditional=lambda i, v: v["addressed"]), | |
bitfield("app_id", 16, 'unsigned', 0, "Application ID", | |
conditional=lambda i, v: v["structured"]), | |
bitfield("data", 0, 'raw', None, "Data"), | |
) | |
# No type 26 handling yet, | |
type27 = ( | |
bitfield("accuracy", 1, 'unsigned', None, "Position Accuracy"), | |
bitfield("raim", 1, 'unsigned', None, "RAIM flag"), | |
bitfield("status", 4, 'unsigned', 0, "Navigation Status", | |
formatter=cnb_status_legends), | |
bitfield("lon", 18, 'signed', 0x1a838, "Longitude", | |
formatter=short_latlon_format), | |
bitfield("lat", 17, 'signed', 0xd548, "Latitude", | |
formatter=short_latlon_format), | |
bitfield("speed", 6, 'unsigned', 63, "Speed Over Ground", | |
formatter=cnb_speed_format), | |
bitfield("course", 9, 'unsigned', 511, "Course Over Ground"), | |
bitfield("GNSS", 1, 'unsigned', None, "GNSS flag"), | |
spare(1), | |
) | |
aivdm_decode = ( | |
bitfield('msgtype', 6, 'unsigned', 0, "Message Type", | |
validator=lambda n: n > 0 and n <= 27), | |
bitfield('repeat', 2, 'unsigned', None, "Repeat Indicator"), | |
bitfield('mmsi', 30, 'unsigned', 0, "MMSI"), | |
# This is the master dispatch on AIS message type | |
dispatch('msgtype', {0:None, 1:cnb, 2:cnb, 3:cnb, 4:type4, | |
5:type5, 6:type6, 7:type7, 8:type8, 9:type9, | |
10:type10, 11:type4, 12:type12, 13:type7, 14:type14, | |
15:type15, 16:type16,17:type17, 18:type18,19:type19, | |
20:type20, 21:type21,22:type22, 23:type23,24:type24, | |
25:type25, 26:None, 27:type27}), | |
) | |
# Length ranges. We use this for integrity checking. | |
# When a range is a tuple, it's (minimum, maximum). | |
lengths = { | |
1: 168, | |
2: 168, | |
3: 168, | |
4: 168, | |
5: 424, | |
6: (88, 1008), | |
7: (72, 168), | |
8: (56, 1008), | |
9: 168, | |
10: 72, | |
11: 168, | |
12: (72, 1008), | |
13: (72, 168), | |
14: (40, 1008), | |
15: (88, 168), | |
16: (96, 144), | |
17: (80, 816), | |
18: 168, | |
19: 312, | |
20: (72, 160), | |
21: (272, 360), | |
22: 168, | |
23: 160, | |
24: (160, 168), | |
25: 168, | |
26: (60, 1004), | |
27: 96, | |
} | |
field_groups = ( | |
# This one occurs in message type 4 | |
(3, ["year", "month", "day", "hour", "minute", "second"], | |
"time", "Timestamp", | |
lambda y, m, d, h, n, s: "%02d-%02d-%02dT%02d:%02d:%02dZ" % (y, m, d, h, n, s)), | |
# This one is in message 5 | |
(13, ["month", "day", "hour", "minute", "second"], | |
"eta", "Estimated Time of Arrival", | |
lambda m, d, h, n, s: "%02d-%02dT%02d:%02d:%02dZ" % (m, d, h, n, s)), | |
) | |
# Message-type-specific information ends here. | |
# | |
# Next, the execution machinery for the pseudolanguage. There isn't much of | |
# this: the whole point of the design is to embody most of the information | |
# about the AIS format in the pseudoinstruction tables. | |
from array import array | |
BITS_PER_BYTE = 8 | |
class BitVector: | |
"Fast bit-vector class based on Python built-in array type." | |
def __init__(self, data=None, length=None): | |
self.bits = array('B') | |
self.bitlen = 0 | |
if data is not None: | |
self.bits.extend(data) | |
if length is None: | |
self.bitlen = len(data) * 8 | |
else: | |
self.bitlen = length | |
def extend_to(self, length): | |
"Extend vector to given bitlength." | |
if length > self.bitlen: | |
self.bits.extend([0]*((length - self.bitlen +7 )/8)) | |
self.bitlen = length | |
def from_sixbit(self, data, pad=0): | |
"Initialize bit vector from AIVDM-style six-bit armoring." | |
self.bits.extend([0] * len(data)) | |
for ch in data: | |
ch = ord(ch) - 48 | |
if ch > 40: | |
ch -= 8 | |
for i in (5, 4, 3, 2, 1, 0): | |
if (ch >> i) & 0x01: | |
self.bits[self.bitlen/8] |= (1 << (7 - self.bitlen % 8)) | |
self.bitlen += 1 | |
self.bitlen -= pad | |
def ubits(self, start, width): | |
"Extract a (zero-origin) bitfield from the buffer as an unsigned int." | |
fld = 0 | |
for i in range(start/BITS_PER_BYTE, (start + width + BITS_PER_BYTE - 1) / BITS_PER_BYTE): | |
fld <<= BITS_PER_BYTE | |
fld |= self.bits[i] | |
end = (start + width) % BITS_PER_BYTE | |
if end != 0: | |
fld >>= (BITS_PER_BYTE - end) | |
fld &= ~(-1 << width) | |
return fld | |
def sbits(self, start, width): | |
"Extract a (zero-origin) bitfield from the buffer as a signed int." | |
fld = self.ubits(start, width); | |
if fld & (1 << (width-1)): | |
fld = -(2 ** width - fld) | |
return fld | |
def __len__(self): | |
return self.bitlen | |
def __repr__(self): | |
"Used for dumping binary data." | |
return str(self.bitlen) + ":" + "".join(map(lambda d: "%02x" % d, self.bits[:(self.bitlen + 7)/8])) | |
import sys, exceptions, re | |
class AISUnpackingException(exceptions.Exception): | |
def __init__(self, lc, fieldname, value): | |
self.lc = lc | |
self.fieldname = fieldname | |
self.value = value | |
def __repr__(self): | |
return "%d: validation on fieldname %s failed (value %s)" % (self.lc, self.fieldname, self.value) | |
def aivdm_unpack(lc, data, offset, values, instructions): | |
"Unpack fields from data according to instructions." | |
cooked = [] | |
for inst in instructions: | |
if offset >= len(data): | |
break | |
elif inst.conditional is not None and not inst.conditional(inst,values): | |
continue | |
elif isinstance(inst, spare): | |
offset += inst.width | |
elif isinstance(inst, dispatch): | |
i = inst.compute(values[inst.fieldname]) | |
# This is the recursion that lets us handle variant types | |
cooked += aivdm_unpack(lc, data, offset, values, inst.subtypes[i]) | |
elif isinstance(inst, bitfield): | |
if inst.type == 'unsigned': | |
value = data.ubits(offset, inst.width) | |
elif inst.type == 'signed': | |
value = data.sbits(offset, inst.width) | |
elif inst.type == 'string': | |
value = '' | |
# The try/catch error here is in case we run off the end | |
# of a variable-length string field, as in messages 12 and 14 | |
try: | |
for i in range(inst.width/6): | |
newchar = "@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^- !\"#$%&'()*+,-./0123456789:;<=>?"[data.ubits(offset + 6*i, 6)] | |
if newchar == '@': | |
break | |
else: | |
value += newchar | |
except IndexError: | |
pass | |
value = value.replace("@", " ").rstrip() | |
elif inst.type == 'raw': | |
# Note: Doesn't rely on the length. | |
value = BitVector(data.bits[offset/8:], len(data)-offset) | |
values[inst.name] = value | |
if inst.validator and not inst.validator(value): | |
raise AISUnpackingException(lc, inst.name, value) | |
offset += inst.width | |
# An important thing about the unpacked representation this | |
# generates is that it carries forward the meta-information from | |
# the field type definition. This stuff is then available for | |
# use by report-generating code. | |
cooked.append([inst, value]) | |
return cooked | |
def packet_scanner(source): | |
"Get a span of AIVDM packets with contiguous fragment numbers." | |
payloads = {'A':'', 'B':''} | |
raw = '' | |
well_formed = False | |
lc = 0 | |
while True: | |
lc += 1; | |
line = source.readline() | |
if not line: | |
return | |
raw += line | |
line = line.strip() | |
# Strip off USCG metadata | |
line = re.sub(r"(?<=\*[0-9A-F][0-9A-F]),.*", "", line) | |
# Compute CRC-16 checksum | |
packet = line[1:-3] # Strip leading !, trailing * and CRC | |
csum = 0 | |
for c in packet: | |
csum ^= ord(c) | |
csum = "%02X" % csum | |
# Ignore comments | |
if not line.startswith("!"): | |
continue | |
# Assemble fragments from single- and multi-line payloads | |
fields = line.split(",") | |
try: | |
expect = fields[1] | |
fragment = fields[2] | |
channel = fields[4] | |
if fragment == '1': | |
payloads[channel] = '' | |
well_formed = True | |
payloads[channel] += fields[5] | |
try: | |
# This works because a mangled pad literal means | |
# a malformed packet that will be caught by the CRC check. | |
pad = int(fields[6].split('*')[0]) | |
except ValueError: | |
pad = 0 | |
crc = fields[6].split('*')[1].strip() | |
except IndexError: | |
if skiperr: | |
sys.stderr.write("%d: malformed line: %s\n" % (lc, line.strip())) | |
well_formed = False | |
else: | |
raise AISUnpackingException(lc, "checksum", crc) | |
if csum != crc: | |
if skiperr: | |
sys.stderr.write("%d: bad checksum %s, expecting %s: %s\n" % (lc, `crc`, csum, line.strip())) | |
well_formed = False | |
else: | |
raise AISUnpackingException(lc, "checksum", crc) | |
if fragment < expect or not well_formed: | |
continue | |
# Render assembled payload to packed bytes | |
bits = BitVector() | |
bits.from_sixbit(payloads[channel], pad) | |
yield (lc, raw, bits) | |
raw = '' | |
def postprocess(cooked): | |
"Postprocess cooked fields from a message." | |
# Handle type 21 name extension | |
if cooked[0][1] == 21 and len(cooked) > 19: | |
cooked[4][1] += cooked[19][1] | |
cooked.pop(-1) | |
return cooked | |
def parse_ais_messages(source, scaled=False, skiperr=False, verbose=0): | |
"Generator code - read forever from source stream, parsing AIS messages." | |
values = {} | |
for (lc, raw, bits) in packet_scanner(source): | |
values['length'] = bits.bitlen | |
# Without the following magic, we'd have a subtle problem near | |
# certain variable-length messages: DSV reports would | |
# sometimes have fewer fields than expected, because the | |
# unpacker would never generate cooked tuples for the omitted | |
# part of the message. Presently a known issue for types 15 | |
# and 16 only. (Would never affect variable-length messages in | |
# which the last field type is 'string' or 'raw'). | |
bits.extend_to(168) | |
# Magic recursive unpacking operation | |
try: | |
cooked = aivdm_unpack(lc, bits, 0, values, aivdm_decode) | |
# We now have a list of tuples containing unpacked fields | |
# Collect some field groups into ISO8601 format | |
for (offset, template, label, legend, formatter) in field_groups: | |
segment = cooked[offset:offset+len(template)] | |
if map(lambda x: x[0], segment) == template: | |
group = formatter(*map(lambda x: x[1], segment)) | |
group = (label, group, 'string', legend, None) | |
cooked = cooked[:offset]+[group]+cooked[offset+len(template):] | |
# Apply the postprocessor stage | |
cooked = postprocess(cooked) | |
# Now apply custom formatting hooks. | |
if scaled: | |
for (i, (inst, value)) in enumerate(cooked): | |
if value == inst.oob: | |
cooked[i][1] = "n/a" | |
elif inst.formatter: | |
if type(inst.formatter) == type(()): | |
# Assumes 0 is the legend for the "undefined" value | |
if value >= len(inst.formatter): | |
value = 0 | |
cooked[i][1] = inst.formatter[value] | |
elif type(formatter) == type(lambda x: x): | |
cooked[i][1] = inst.formatter(value) | |
expected = lengths.get(values['msgtype'], None) | |
# Check length; has to be done after so we have the type field | |
bogon = False | |
if expected is not None: | |
if type(expected) == type(0): | |
expected_range = (expected, expected) | |
else: | |
expected_range = expected | |
actual = values['length'] | |
if not (actual >= expected_range[0] and actual <= expected_range[1]): | |
bogon = True | |
if skiperr: | |
sys.stderr.write("%d: type %d expected %s bits but saw %s: %s\n" % (lc, values['msgtype'], expected, actual, raw.strip().split())) | |
else: | |
raise AISUnpackingException(lc, "length", actual) | |
# We're done, hand back a decoding | |
values = {} | |
yield (raw, cooked, bogon) | |
raw = '' | |
except KeyboardInterrupt: | |
raise KeyboardInterrupt | |
except GeneratorExit: | |
raise GeneratorExit | |
except AISUnpackingException, e: | |
if skiperr: | |
sys.stderr.write("%s: %s\n" % (`e`, raw.strip().split())) | |
continue | |
else: | |
raise | |
except: | |
(exc_type, exc_value, exc_traceback) = sys.exc_info() | |
sys.stderr.write("%d: Unknown exception: %s\n" % (lc, raw.strip().split())) | |
if skiperr: | |
continue | |
else: | |
raise exc_type, exc_value, exc_traceback | |
# The rest is just sequencing and report generation. | |
if __name__ == "__main__": | |
import sys, getopt | |
try: | |
(options, arguments) = getopt.getopt(sys.argv[1:], "cdhjmqst:vx") | |
except getopt.GetoptError, msg: | |
print "ais.py: " + str(msg) | |
raise SystemExit, 1 | |
dsv = False | |
dump = False | |
histogram = False | |
json = False | |
malformed = False | |
quiet = False | |
scaled = False | |
types = [] | |
frequencies = {} | |
verbose = 0 | |
skiperr = True | |
for (switch, val) in options: | |
if switch == '-c': # Report in DSV format rather than JSON | |
dsv = True | |
elif switch == '-d': # Dump in a more human-readable format | |
dump = True | |
elif switch == '-h': # Make a histogram of type frequencies | |
histogram = True | |
elif switch == '-j': # Dump JSON | |
json = True | |
elif switch == '-m': # Dump malformed AIVDM/AIVDO packets raw | |
malformed = True | |
elif switch == '-q': # Suppress output | |
quiet = True | |
elif switch == '-s': # Report AIS in scaled form | |
scaled = True | |
elif switch == '-t': # Filter for a comma-separated list of types | |
types = map(int, val.split(",")) | |
elif switch == '-v': # Dump raw packet before JSON or DSV. | |
verbose += 1 | |
elif switch == '-x': # Skip decoding errors | |
skiperr = False | |
if not dsv and not histogram and not json and not malformed and not quiet: | |
dump = True | |
try: | |
for (raw, parsed, bogon) in parse_ais_messages(sys.stdin, scaled, skiperr, verbose): | |
msgtype = parsed[0][1] | |
if types and msgtype not in types: | |
continue | |
if verbose >= 1 or (bogon and malformed): | |
sys.stdout.write(raw) | |
if not bogon: | |
if json: | |
print "{" + ",".join(map(lambda x: '"' + x[0].name + '":' + str(x[1]), parsed)) + "}" | |
elif dsv: | |
print "|".join(map(lambda x: str(x[1]), parsed)) | |
elif histogram: | |
key = "%02d" % msgtype | |
frequencies[key] = frequencies.get(key, 0) + 1 | |
if msgtype == 6 or msgtype == 8: | |
dac = 0; fid = 0 | |
if msgtype == 8: | |
dac = parsed[3][1] | |
fid = parsed[4][1] | |
elif msgtype == 6: | |
dac = parsed[6][1] | |
fid = parsed[7][1] | |
key = "%02d_%04d_%02d" % (msgtype, dac, fid) | |
frequencies[key] = frequencies.get(key, 0) + 1 | |
elif dump: | |
for (inst, value) in parsed: | |
print "%-25s: %s" % (inst.legend, value) | |
print "%%" | |
sys.stdout.flush() | |
if histogram: | |
keys = frequencies.keys() | |
keys.sort() | |
for msgtype in keys: | |
print "%-33s\t%d" % (msgtype, frequencies[msgtype]) | |
except KeyboardInterrupt: | |
pass | |
# End |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
''' | |
Reads ship files producing a lat lon map based on csv files containing history of each ships path. | |
Column 5 of the csv is latidude as a float going from -90 to +90 | |
Column 6 of the csv is longigure as a float going from -180 to +180 | |
python aisbox.py aislog/ 45 60 -10 10 | |
''' | |
from os import path, listdir | |
import sys | |
import csv | |
import math | |
from copy import copy | |
class SubTile(object): | |
''' | |
A tile is a 2d space of interest, with max min co-ordinates containing cells and the space is split | |
into a regular grid. There are methods to determine if a point is inside the grid, which cell within the | |
grid the lower bound of each cell as 2 arrays and the coordinates of an enitre row. | |
''' | |
def __init__(self, nlong=100, nlat=100, maxlat=90.0, minlat=-90.0, maxlon=180.0, minlon=-180.0): | |
''' | |
Create a subtile | |
:param nlong: the number of longitude tiles. | |
:param nlat: the number of latitude tiles | |
:param maxlat: the maximum latitude | |
:param minlat: the minimum latitude | |
:param maxlon: the maximim longitude | |
:param minlon: the minimum longitude | |
''' | |
self.nlong = nlong | |
self.nlat = nlat | |
self.minlat = float(minlat) | |
self.maxlat = float(maxlat) | |
self.minlon = float(minlon) | |
self.maxlon = float(maxlon) | |
self.latcel = (self.maxlat-self.minlat)/float(self.nlat) | |
self.loncel = (self.maxlon-self.minlon)/float(self.nlong) | |
self._longcells = self._fill(self.nlong,self.minlon, self.loncel) | |
self._latcells = self._fill(self.nlat, self.minlat, self.latcel) | |
def _fill(self, n, start, cellsize ): | |
''' | |
Fill an array with regulararly spaced values | |
:param n: the number of elements | |
:param start: value of the first element | |
:param cellsize: increment of the value of each subsequent element | |
''' | |
l = [0] * n | |
for i in range(n): | |
l[i] = start+(cellsize*i) | |
return l | |
def size(self): | |
''' | |
Number of cells in a tile. | |
''' | |
return self.nlong*self.nlat | |
def inside(self, lat, lon): | |
''' | |
True if lat and lon are inside this subtile. | |
:param lat: | |
:param lon: | |
''' | |
return lat <= self.maxlat and lat > self.minlat and lon <= self.maxlon and lon > self.minlon | |
def cell(self, lat, lon): | |
''' | |
the cell number of lat an lon, or None if lat and lon are not within the tile. | |
:param lat: | |
:param lon: | |
''' | |
if self.inside(lat, lon): | |
latcel = int((lat-self.minlat)/self.latcel) | |
loncel = int((lon-self.minlon)/self.loncel) | |
return latcel*self.nlong+loncel | |
return None | |
def longcells(self, reversed=False): | |
''' | |
list of the lower value of each longitude cell | |
:param reversed: reverse the list if true. | |
''' | |
if reversed: | |
t = copy(self._longcells) | |
t.reverse() | |
return t | |
return self._longcells | |
def latcells(self, reversed=False): | |
''' | |
List of latitude cell lover values | |
:param reversed: reverse the list if true | |
''' | |
if reversed: | |
t = copy(self._latcells) | |
t.reverse() | |
return t | |
return self._latcells | |
def get_row(self, lat): | |
''' | |
Get a row of cells at the specified latitude | |
:param lat: | |
''' | |
if self.inside(lat, self.maxlon): | |
latcel = int((lat-self.minlat)/self.latcel) | |
return (latcel*self.nlong,(latcel+1)*self.nlong) | |
return (0,0) | |
class ClusterMap(object): | |
''' | |
A rectanculat map of cells | |
''' | |
def __init__(self, sub_tile, defcell=0): | |
''' | |
Create using a sub tile object | |
:param sub_tile: the subtile object | |
:param defcell: the default value of each cell. | |
''' | |
self.sub_tile = sub_tile | |
self.cluster = [defcell] * sub_tile.size() | |
def inc(self, lat, lon, by=1): | |
''' | |
Increment a cell at lat lon, assuming the cell is a numeric | |
:param lat: | |
:param lon: | |
:param by: | |
''' | |
i = self.sub_tile.cell(lat, lon) | |
if i is not None: | |
self.cluster[i] += by | |
else: | |
print "Outside range %s %s " % (lat, lon) | |
def get(self, lat, lon): | |
''' | |
Get the value at the cell at lat lon | |
:param lat: | |
:param lon: | |
''' | |
i = self.sub_tile.cell(lat, lon) | |
if i is not None: | |
return self.cluster[i] | |
return None | |
def get_row(self, latrow): | |
''' | |
Get the row at latitude latrow | |
:param latrow: | |
''' | |
(start,end) = self.sub_tile.get_row(latrow) | |
return self.cluster[start:end] | |
if __name__ == '__main__': | |
location = sys.argv[1] | |
sub_tile = SubTile(minlat=sys.argv[2], maxlat=sys.argv[3], minlon=sys.argv[4], maxlon=sys.argv[5]) | |
cluster_map = ClusterMap(sub_tile) | |
final_map = ClusterMap(sub_tile) | |
for historyfile in listdir(location): | |
if historyfile[-3:] == 'csv': | |
with open(path.join(location, historyfile)) as f: | |
reader = csv.reader(f) | |
headers = None | |
line = 0 | |
lastlat = None | |
lastlon = None | |
for row in reader: | |
line += 1 | |
if headers is None: | |
headers = row | |
else: | |
lat = row[4] | |
lon = row[5] | |
#print "%s %s" % (lat, lon) | |
try: | |
lat = float(lat) | |
lon = float(lon) | |
if lat < -90 or lat > 90 or lon < -180 or lon > 180: | |
print "Bad Data %s %s %s:%s" % ( lat, lon, historyfile, line) | |
else: | |
cluster_map.inc(lat, lon) | |
lastlat = lat | |
lastlon = lon | |
except IndexError: | |
print "ERROR: Index bad %s %s" % (lat, lon) | |
except ValueError: | |
pass | |
if lastlat is not None and lastlon is not None: | |
final_map.inc(lat,lon) | |
writer = csv.writer(sys.stdout) | |
writer.writerow(sub_tile.longcells()) | |
for latcell in sub_tile.latcells(reversed=True): | |
row = cluster_map.get_row(latcell) | |
row.insert(0,latcell) | |
writer.writerow(row) | |
writer = csv.writer(sys.stdout) | |
writer.writerow(sub_tile.longcells()) | |
for latcell in sub_tile.latcells(reversed=True): | |
row = final_map.get_row(latcell) | |
row.insert(0,latcell) | |
writer.writerow(row) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/*jslint node: true */ | |
"use strict"; | |
/** | |
* This code runs on the server, recieving a NMEA stream of AIS data via the update method. | |
* If connected to a real NMEA reciever over a serial line, just listen to the data event of the serial | |
* reader and push the data recieved into the update method. | |
* It will write json files containing ships and their tracks for everything recieved indexed by MMSI number. | |
* Runs in Node.js, requires "aisdecoder" : "0.0.1", "geolib" : "1.3.5", and "underscore" : "", NPM modules. | |
* | |
* License is Apache Software License 2.0 | |
* (c) Ian Boston 2014 | |
*/ | |
var EventEmitter = require('events').EventEmitter; | |
var util = require('util'); | |
var _ = require('underscore'); | |
var aisdecoder = require ('aisdecoder'); | |
var geolib = require ('geolib'); | |
var DEBUG = false; | |
if (!DEBUG) { | |
util.debug = function(args) { | |
}; | |
} | |
function AISMFactory() { | |
var factory = this; | |
function AISModel(position, range) { | |
this.decoder = new aisdecoder.AisDecoder(); | |
this.ships = {}; | |
this.messages = []; | |
this.position = position || { lat : 51.925013, lon: 1.300260 }; | |
this.range = range || 50000; | |
} | |
util.inherits(AISModel, EventEmitter); | |
AISModel.prototype.update_ship = function(aisobject) { | |
if (this.ships[aisobject.mmsi] === undefined) { | |
this.ships[aisobject.mmsi] = new AISShipModel(aisobject); | |
} else { | |
this.ships[aisobject.mmsi].update(aisobject); | |
} | |
} | |
AISModel.prototype.inrange = function(aisobject) { | |
var ship = this.ships[aisobject.mmsi]; | |
if (ship === undefined || ship.info.lat === undefined || ship.info.lon === undefined) { | |
return false; | |
} | |
if ( geolib.isPointInCircle( | |
{ latitude: ship.info.lat, longitude: ship.info.lon}, | |
{ latitude: this.position.lat, longitude: this.position.lon}, | |
this.range) ) { | |
if (!ship.basestation) { | |
util.log(JSON.stringify(ship)); | |
} | |
// 235039652 244060919 235018953 235018953 244700538 | |
return true; | |
} | |
return false; | |
} | |
AISModel.prototype.update = function(data) { | |
var self = this; | |
self.messages.push(data); | |
var aisobject = self.decoder.decode(data) | |
if (aisobject !== undefined) { | |
if (aisobject.mmsi !== undefined) { | |
self.update_ship(aisobject); | |
if ( self.inrange(aisobject) ) { | |
// dump emit all messages prior to this one, as this one is | |
// in range and the previous messages may be relevant. | |
// then clear the messages/ | |
// AIS messages are spread over a number of lines. | |
// bunched togather. | |
self.messages.forEach(function(message, i, array) { | |
self.emit("data", message); | |
}); | |
} | |
self.messages = []; | |
} else { | |
util.log("No mmsi "+JSON.stringify(aisobject)); | |
} | |
} | |
} | |
AISModel.prototype.set_position = function(position) { | |
this.position = position; | |
} | |
function AISShipModel(aisobject) { | |
this.mmsi = aisobject.mmsi; | |
this.basestation = false; | |
this.unknown_message_count = 0; | |
this.unknown_message_type = 0; | |
this.not_implemented = 0; | |
this.info = {}; | |
this.history = []; | |
this.update(aisobject); | |
} | |
/** | |
* Save current data in history if required | |
*/ | |
AISShipModel.prototype.save = function(positionUpdate) { | |
if (positionUpdate) { | |
this.history.push({ lat: this.info.lat, lon: this.info.lon, heading: this.info.heading, speed: this.info.speed }); | |
} | |
} | |
AISShipModel.prototype.update = function(aisobject) { | |
var self = this; | |
if (aisobject.type === 'PositionReportClassA') { | |
self.info = _.extend(self.info, aisobject); | |
self.save(true); | |
} else if (aisobject.type === 'ClassBCSPositionReport') { | |
// 29 Mar 08:24:52 - {"type":"ClassBCSPositionReport","mmsi":808663857,"repeat":0, | |
// "lon":113.443885,"lat":23.090231666666668,"course":130.4,"heading":130,"speed":0.1, | |
// "accuracy":true,"regional":3,"cs":true,"display":true,"dsc":false,"band":false,"msg22":false, | |
// "raim":false,"radio":530291,"assigned":0,"second":63} | |
self.info = _.extend(self.info, aisobject); | |
self.save(true); | |
} else if (aisobject.type === 'BaseStationReport') { | |
// 29 Mar 08:29:27 - {"type":"BaseStationReport","mmsi":4132201,"repeat":0, | |
// "lon":122.17332,"lat":30.810088333333333,"accuracy":false,"raim":false,"radio":83009,"epfd":7} | |
self.info = _.extend(self.info, aisobject); | |
self.basestation = true; | |
self.save(true); | |
} else if (aisobject.type === 'StaticAndVoyageRelatedData') { | |
// 29 Mar 08:31:13 - {"type":"StaticAndVoyageRelatedData","mmsi":259216000, | |
// "repeat":0,"imo":9344760,"callsign":"LIAT","shipname":"MASTRAFJORD","shiptype":60, | |
// "destination":"ARSVAAGEN/MORTAVIKA ","ais_version":0,"to_bow":66,"to_stern":63,"to_port":10, | |
// "to_starboard":9,"epfd":1,"draught":4.5,"dte":0} | |
self.info = _.extend(self.info, aisobject); | |
self.save(false); | |
} else if (aisobject.type === 'UnknownMessageType') { | |
self.unknown_message_count++; | |
self.save(false); | |
} else if (aisobject.type === 'ExtendedClassBCSPositionReport') { | |
// 29 Mar 08:33:16 - {"type":"ExtendedClassBCSPositionReport","mmsi":413769914,"repeat":0, | |
// "lon":121.49704,"lat":31.37528,"course":197.2,"speed":0.1,"accuracy":true,"regional":0, | |
// "second":40,"shipname":"HAI XUN 1077","shiptype":55,"to_bow":7,"to_stern":10,"to_port":2, | |
// "to_starboard":2,"epfd":1,"raim":false,"dte":1,"assigned":false} | |
self.info = _.extend(self.info, aisobject); | |
self.save(true); | |
} else if (aisobject.type === 'UTCAndDateResponse') { | |
// 29 Mar 15:22:47 - {"type":"UTCAndDateResponse","mmsi":4132108,"repeat":0, | |
// "lon":121.71751666666667,"lat":32.01835166666667,"accuracy":true,"raim":false,"radio":0,"epfd":7} | |
self.info = _.extend(self.info, aisobject); | |
self.save(false); | |
} else if (aisobject.type === 'Not implemented: Static Data Report') { | |
self.not_implemented++; | |
} else { | |
util.log(JSON.stringify(aisobject)); | |
self.unknown_message_type++; | |
} | |
return self; | |
} | |
factory.AISModel = AISModel; | |
} | |
module.exports = new AISMFactory(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
''' | |
Created on Nov 12, 2012 | |
@author: ieb | |
''' | |
import re | |
import logging | |
from ais import AISUnpackingException, BitVector, aivdm_unpack, aivdm_decode,\ | |
postprocess, field_groups, lengths | |
import sys | |
import traceback | |
import time | |
import os | |
import csv | |
import json | |
from base import NMEATarget, NMEASource | |
import datetime | |
from os import path | |
from aisbox import SubTile | |
class AISParser(object): | |
''' | |
Parses an AIS message using ais.py from GPSD. Much of the code here is taken from that file | |
and recast into a class. | |
''' | |
def __init__(self, skiperr=True, scaled=True): | |
self.skiperr = skiperr | |
self.scaled = scaled | |
self.payloads = None | |
self.raw = None | |
self.well_formed = False | |
self.lc = 0 | |
def _parseline(self, line): | |
if self.payloads is None: | |
self.payloads = {'A':'', 'B':''} | |
if self.raw is None: | |
self.raw = '' | |
if not line: | |
logging.error("Parser: Not a line") | |
return (None,None,None) | |
self.lc += 1 | |
self.raw += line | |
line = line.strip() | |
# Strip off USCG metadata | |
line = re.sub(r"(?<=\*[0-9A-F][0-9A-F]),.*", "", line) | |
# Compute CRC-16 checksum | |
packet = line[1:-3] # Strip leading !, trailing * and CRC | |
csum = 0 | |
crc = -1 | |
for c in packet: | |
csum ^= ord(c) | |
csum = "%02X" % csum | |
# Ignore comments | |
if not line.startswith("!"): | |
logging.error("Parser: Not a data line") | |
return (None,None,None) | |
# Assemble fragments from single- and multi-line payloads | |
fields = line.split(",") | |
try: | |
expect = fields[1] | |
fragment = fields[2] | |
channel = fields[4] | |
if fragment == '1': | |
self.payloads[channel] = '' | |
self.well_formed = True | |
self.payloads[channel] += fields[5] | |
try: | |
# This works because a mangled pad literal means | |
# a malformed packet that will be caught by the CRC check. | |
pad = int(fields[6].split('*')[0]) | |
except ValueError: | |
pad = 0 | |
crc = fields[6].split('*')[1].strip() | |
except IndexError: | |
if self.skiperr: | |
logging.error("%d: malformed line: %s\n" % (self.lc, line.strip())) | |
self.well_formed = False | |
else: | |
raise AISUnpackingException(self.lc, "checksum", crc) | |
except KeyError: | |
logging.error("Key Error with %s " % line) | |
if csum != crc: | |
if self.skiperr: | |
logging.error("%d: bad checksum %s, expecting %s: %s\n" % (self.lc, `crc`, csum, line.strip())) | |
self.well_formed = False | |
else: | |
raise AISUnpackingException(self.lc, "checksum", crc) | |
if not self.well_formed: | |
logging.error("Parser: Not well formed %s" % (self.raw)) | |
return (None,None,None) | |
if fragment < expect: | |
logging.info("Parser: Fragment %s %s %s on channel %s payload %s" % (fragment, expect, self.raw, channel, self.payloads[channel])) | |
return (None,None,None) | |
# Render assembled payload to packed bytes | |
bits = BitVector() | |
bits.from_sixbit(self.payloads[channel], pad) | |
del(self.payloads[channel]) | |
r = self.raw | |
self.raw = '' | |
return (self.lc, r, bits) | |
def parse_ais_messages(self, line): | |
"Parse the message." | |
self.values = {} | |
(lc, raw, bits) = self._parseline(line) | |
if raw is None: | |
return (None, None, None) | |
self.values['length'] = bits.bitlen | |
# Without the following magic, we'd have a subtle problem near | |
# certain variable-length messages: DSV reports would | |
# sometimes have fewer fields than expected, because the | |
# unpacker would never generate cooked tuples for the omitted | |
# part of the message. Presently a known issue for types 15 | |
# and 16 only. (Would never affect variable-length messages in | |
# which the last field type is 'string' or 'raw'). | |
bits.extend_to(168) | |
# Magic recursive unpacking operation | |
try: | |
cooked = aivdm_unpack(lc, bits, 0, self.values, aivdm_decode) | |
# We now have a list of tuples containing unpacked fields | |
# Collect some field groups into ISO8601 format | |
for (offset, template, label, legend, formatter) in field_groups: | |
segment = cooked[offset:offset+len(template)] | |
if map(lambda x: x[0], segment) == template: | |
group = formatter(*map(lambda x: x[1], segment)) | |
group = (label, group, 'string', legend, None) | |
cooked = cooked[:offset]+[group]+cooked[offset+len(template):] | |
# Apply the postprocessor stage | |
cooked = postprocess(cooked) | |
# Now apply custom formatting hooks. | |
if self.scaled: | |
for (i, (inst, value)) in enumerate(cooked): | |
if value == inst.oob: | |
cooked[i][1] = "n/a" | |
elif inst.formatter: | |
if type(inst.formatter) == type(()): | |
# Assumes 0 is the legend for the "undefined" value | |
if value >= len(inst.formatter): | |
value = 0 | |
cooked[i][1] = inst.formatter[value] | |
elif type(formatter) == type(lambda x: x): | |
cooked[i][1] = inst.formatter(value) | |
expected = lengths.get(self.values['msgtype'], None) | |
# Check length; has to be done after so we have the type field | |
bogon = False | |
if expected is not None: | |
if type(expected) == type(0): | |
expected_range = (expected, expected) | |
else: | |
expected_range = expected | |
actual = self.values['length'] | |
if not (actual >= expected_range[0] and actual <= expected_range[1]): | |
bogon = True | |
if self.skiperr: | |
logging.info("%d: type %d expected %s bits but saw %s: %s\n" % (lc, self.values['msgtype'], expected, actual, raw.strip().split())) | |
else: | |
raise AISUnpackingException(lc, "length", actual) | |
# We're done, hand back a decoding | |
self.values = {} | |
return (raw, cooked, bogon) | |
except KeyboardInterrupt: | |
raise KeyboardInterrupt | |
except GeneratorExit: | |
raise GeneratorExit | |
except AISUnpackingException, e: | |
if self.skiperr: | |
logging.error("%s: %s\n" % (`e`, raw.strip().split())) | |
else: | |
raise | |
except: | |
(exc_type, exc_value, exc_traceback) = sys.exc_info() | |
logging.error("%d: Unknown exception: %s\n" % (lc, raw.strip().split())) | |
if self.skiperr: | |
pass | |
else: | |
raise exc_type, exc_value, exc_traceback | |
logging.error("Parser: Parser failed ") | |
return (None, None, None) | |
class AISFilter(AISParser, NMEASource, NMEATarget): | |
def __init__(self, source, config): | |
super(AISFilter, self).__init__(skiperr=True, scaled=True) | |
source.add(self) | |
self._source = source | |
self._inrange = {} | |
self._tile = SubTile(minlat=config[0]["lat"], | |
minlon=config[0]["lon"], | |
maxlat=config[1]["lat"], | |
maxlon=config[1]["lon"]) | |
pass | |
def recieve(self, message): | |
(raw, parsed, bogon) = self.parse_ais_messages(message) | |
if raw is None: | |
return | |
if bogon: | |
return | |
lat = None | |
lon = None | |
mmsi = None | |
for x in parsed: | |
if x[0].name == "lat": | |
lat = x[1] | |
if x[0].name == "lon": | |
lon = x[1] | |
if x[0].name == "mmsi": | |
mmsi = x[1] | |
if lat is not None and lon is not None: | |
if self._tile.inside(lat, lon): | |
self._send(message) | |
self._inrange[mmsi] = True | |
elif mmsi in self._inrange: | |
del(self._inrange[mmsi]) | |
elif mmsi is not None and mmsi in self._inrange: | |
self._send(message) | |
def start(self): | |
self._source.start() | |
''' | |
Prints an AIS Message in log format, maintaining a list of seen ships in CSV files, and a list of | |
current ships. Extends NMEATarget allowing this to be added to a source. | |
Extends a AISPartse to allow the messages to be parsed into a readable format. | |
''' | |
class AISPrinter(AISParser, NMEATarget): | |
TLOG = ("mmsi","shipname","status","lat","lon","course","speed","shiptype","callsign","destination") | |
def __init__(self, config): | |
super(AISPrinter, self).__init__(skiperr=config['skiperr'] if "skiperr" in config else True, scaled=config['scaled'] if "scaled" in config else True) | |
self.verbose = config['verbose'] if "verbose" in config else False | |
self.json = config['logjsondata'] if "logjsondata" in config else False | |
self.dsv = config['dsv'] if "dsv" in config else False | |
self.histogram = config['histogram'] if "histogram" in config else False | |
self.malformed = config['malformed'] if "malformed" in config else False | |
self.dump = config['dump'] if "dump" in config else False | |
self.with_history = config['history'] if "history" in config else False | |
self.frequencies = {} | |
self.basepath = path.abspath(config['basepath']) | |
self.running = True | |
self.ships_json = path.join(self.basepath,"ships.json") | |
self.history_csv = path.join(self.basepath,"history.csv") | |
if not path.exists(path.dirname(self.ships_json)): | |
os.makedirs(path.dirname(self.ships_json)) | |
if not path.exists(path.dirname(self.history_csv)): | |
os.makedirs(path.dirname(self.history_csv)) | |
if os.path.exists(self.ships_json): | |
f = open(self.ships_json) | |
self.ships = json.load(f) | |
f.close() | |
else: | |
self.ships = {} | |
self.dumpt = time.time() | |
if self.with_history: | |
self.allrecords = open(self.history_csv,"a") | |
self.allrecordscsv = csv.writer(self.allrecords) | |
header = [ "time" ] | |
header.extend(AISPrinter.TLOG) | |
self.allrecordscsv.writerow(header) | |
self.allrecords.flush() | |
def close(self): | |
self.allrecords.flush() | |
self.allrecords.close() | |
def _get_details(self, parsed): | |
details = {} | |
for (bf,v) in parsed: | |
details[bf.name] = v | |
return details | |
def _add(self, a, b): | |
try: | |
return (a if a is not None else 0) + (b if b is not None else 0) | |
except: | |
return 0 | |
def _save_details(self, details): | |
try: | |
mmsi = details.get("mmsi") | |
key = "%s" % mmsi | |
if mmsi is not None: | |
if key not in self.ships: | |
self.ships[key] = {} | |
length = self._add(details.get("to_bow"), details.get("to_stern")) | |
if length > 0: | |
details['loa'] = length | |
beam = self._add(details.get("to_port"), details.get("to_starbord")) | |
if length > 0: | |
details['beam'] = beam | |
details['_ts'] = long(time.time()*1000); | |
self.ships[key].update(details) | |
if self.dumpt < time.time(): | |
self.dumpt = time.time() + 60 | |
# this ensures that anything inside the ships dict does not | |
# result in ships.json becoming corrupted | |
todump = {} | |
for (k,v) in self.ships.iteritems(): | |
try: | |
kv = int(k) | |
todump["%s" % kv] = json.loads(json.dumps(v)) | |
except: | |
pass | |
f = open("%s.n" % self.ships_json,"w") | |
json.dump(todump,f,indent=2) | |
f.close() | |
os.rename("%s.n" % self.ships_json, self.ships_json) | |
except: | |
logging.error(traceback.format_exc()) | |
def _append_history(self, details): | |
mmsi = details.get("mmsi") | |
if mmsi != "n/a": | |
key = "%s" % mmsi | |
shipinfo = self.ships.get(key) or {} | |
f = None | |
try: | |
history_file = path.join(self.basepath,"%s.csv" % mmsi) | |
if not os.path.exists(history_file): | |
f = open(history_file,"w") | |
cw = csv.writer(f) | |
header = [ "time" ] | |
header.extend(AISPrinter.TLOG) | |
cw.writerow(header) | |
else: | |
f = open(history_file,"a") | |
cw = csv.writer(f) | |
history = [ datetime.datetime.now().isoformat()] | |
for h in AISPrinter.TLOG: | |
d = details.get(h) or shipinfo.get(h) | |
history.append(d) | |
cw.writerow(history) | |
self.allrecordscsv.writerow(history) | |
self.allrecords.flush() | |
except: | |
logging.error(traceback.format_exc()) | |
finally: | |
if f is not None: | |
f.close() | |
def cluster_details(self, details): | |
''' | |
Aggegates the details and maintains some overall stats of location. | |
lat and lon are floats | |
lat goes from +90 to -90 | |
lon goes from +180 to -180 | |
If we split the globe into cells 10 by 10, then count the number of hits in each cell that will give us an idea of the distribution. | |
''' | |
pass | |
def recieve(self, message): | |
if self.running: | |
try: | |
(raw, parsed, bogon) = self.parse_ais_messages(message) | |
if raw is None: | |
return | |
msgtype = parsed[0][1] | |
if self.verbose >= 1 or (bogon and self.malformed): | |
logging.error(raw) | |
if not bogon: | |
details = self._get_details(parsed) | |
self.cluster_details(details) | |
self._save_details(details) | |
if self.with_history: | |
self._append_history(details) | |
if self.json: | |
logging.error( "{" + ",".join(map(lambda x: '"' + x[0].name + '":' + str(x[1]), parsed)) + "}" ) | |
elif self.dsv: | |
logging.error( "|".join(map(lambda x: str(x[1]), parsed))) | |
elif self.histogram: | |
key = "%02d" % msgtype | |
self.frequencies[key] = self.frequencies.get(key, 0) + 1 | |
if msgtype == 6 or msgtype == 8: | |
dac = 0; fid = 0 | |
if msgtype == 8: | |
dac = parsed[3][1] | |
fid = parsed[4][1] | |
elif msgtype == 6: | |
dac = parsed[6][1] | |
fid = parsed[7][1] | |
key = "%02d_%04d_%02d" % (msgtype, dac, fid) | |
self.frequencies[key] = self.frequencies.get(key, 0) + 1 | |
elif self.dump: | |
for (inst, value) in parsed: | |
logging.error( "%-25s: %s" % (inst.legend, value)) | |
logging.error( "%%" ) | |
except: | |
logging.error(traceback.format_exc()) | |
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
''' | |
Created on Nov 14, 2012 | |
@author: ieb | |
''' | |
import logging | |
import threading | |
''' | |
A NMEATartget is class that receives NMEA Sentences. | |
It must assume that the sentences may be recieved from multiple threads. | |
Recievers are in generall not threaded. | |
''' | |
class NMEATarget(object): | |
def recieve(self, message): | |
raise Exception("Please implement recieve method on %s " % self.__class__) | |
''' | |
A NMEASource is a source of NMEA messages. It provides methods to add and remove targets to which | |
Implementations can send messages. Messages are NMEA sentences. | |
Sources are in general threaded. | |
''' | |
class NMEASource(object): | |
TO_KMH = 1.15077945 | |
TO_MPS = 0.514444444 | |
TO_FEET = 3.2808399 | |
def add(self, receiver): | |
if not isinstance(receiver, NMEATarget): | |
raise Exception("Only NMEATargets (%s) may be added to NMEASources (%) " % ( receiver.__class__, self.__class__)) | |
if not hasattr(self, "receivers2"): | |
self.receivers2 = set() | |
self.receivers2.add(receiver) | |
self.receivers = frozenset(self.receivers2) | |
logging.error("Added Client") | |
return self | |
def remove(self, receiver): | |
if not hasattr(self, "receivers2"): | |
self.receivers2 = set() | |
self.receivers2.remove(receiver) | |
self.receivers = frozenset(self.receivers2) | |
logging.error("Removed Client") | |
return self | |
def _send(self, message): | |
if not hasattr(self, "receivers"): | |
return | |
if message is None or len(message) == 0: | |
return | |
s = self.receivers | |
for r in s: | |
r.recieve(message) | |
@staticmethod | |
def addChecksum(packet): | |
csum = 0 | |
for c in packet[1:]: | |
csum ^= ord(c) | |
return "%s*%02X" % (packet, csum) | |
''' | |
i2C uses smbus python module that will read from teh i2C buss | |
sudo apt-get install python-smbus. | |
sudo apt-get install i2c-tools. | |
''' | |
class FileReader(threading.Thread, NMEASource): | |
def __init__(self, filename, *args, **kwargs): | |
super(FileReader, self).__init__(*args, **kwargs) | |
self.filename = filename | |
self.running = True | |
def _clean(self, op): | |
if len(op) == 0: | |
return op | |
while( op[-1] == '\n' or op[-1] == '\r'): | |
op = op[:-1] | |
return op | |
def run(self): | |
if (self.running): | |
with open(self.filename) as sourcef: | |
logging.error("Reading %s " % self.filename) | |
while(self.running): | |
self._send(self._clean(sourcef.readline())) | |
self.running = False | |
logging.error("Closed %s " % self.filename) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!DOCTYPE html> | |
<html> | |
<!-- | |
This file displays a Google Map canvas and then gets various json files from the server rendering the contents on the map using the map api. | |
You must specify your Goolge Maps API key where indicated. | |
For those interested in drawing AIS tracks, grep for ships.json | |
License is Apache Software License 2.0 | |
(c) Ian Boston 2014 | |
--> | |
<head> | |
<meta name="viewport" content="initial-scale=1.0, user-scalable=no" /> | |
<style type="text/css"> | |
html { height: 100% } | |
body { height: 100%; margin: 0; padding: 0 } | |
#map_canvas { height: 100% } | |
</style> | |
<script type="text/javascript" | |
src="https://maps.googleapis.com/maps/api/js?key=<replace_with_your_key>&sensor=true"> | |
</script> | |
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.8.2/jquery.min.js"></script> | |
<script type="text/javascript"> | |
function initialize() { | |
var mapOptions = { | |
center: new google.maps.LatLng(51.911491, 1.304249), | |
zoom: 12, | |
mapTypeId: google.maps.MapTypeId.ROADMAP | |
}; | |
var map = new google.maps.Map(document.getElementById("map_canvas"), | |
mapOptions); | |
var ships = {}; | |
var boat = undefined; | |
var getMarkerInfo = function(val) { | |
var path = google.maps.SymbolPath.CIRCLE; | |
var scale = 4; | |
var speed = val['speed']; | |
if ( speed > 0.5 ) { | |
var course = val['course']; | |
if ( course < 0 ) { | |
course = course +360; | |
} | |
course = Math.PI*(course/180); | |
var vertex = [ -5,0,5,0,0,-15, 0, -speed*5-15, 0,-15 ] | |
var lvertex = vertex.length; | |
var ccos = Math.cos(course); | |
var csin = Math.sin(course); | |
for ( var i = 0; i < vertex.length; i+=2) { | |
var x = vertex[i+1]; | |
var y = vertex[i]; | |
vertex[i+1] = x * ccos + y * csin ; | |
vertex[i] = y * ccos - x * csin; | |
} | |
path = "M "+vertex[0]+" "+vertex[1]; | |
for ( var i = 2; i < vertex.length; i+=2) { | |
path = path+" L "+vertex[i]+" "+vertex[i+1]; | |
} | |
scale = 0.5; | |
path = path+" z"; | |
} | |
return { | |
"path" : path, | |
"scale" : scale, | |
"color" : "red" | |
}; | |
}; | |
/** | |
* Calculates Equirectangular approximation to distance using Pythagoras | |
*/ | |
var distance = function(lat1,lon1, lat2, lon2) { | |
var R = 3447; // nm | |
var lat1 = lat1*Math.PI/180; | |
var lon1 = lon1*Math.PI/180; | |
var lat2 = lat2*Math.PI/180; | |
var lon2 = lon2*Math.PI/180; | |
var x = (lon2-lon1) * Math.cos((lat1+lat2)/2); | |
var y = (lat2-lat1); | |
return Math.sqrt(x*x + y*y) * R; | |
}; | |
/** | |
* Calculates great circle distance using haversign approach, good for 32bit | |
* down a few NM | |
*/ | |
var greate_circle_distance_haversign = function(lat1, lon1, lat2, lon2) { | |
var R = 3447; // nm | |
var lat1 = lat1*Math.PI/180; | |
var lon1 = lon1*Math.PI/180; | |
var lat2 = lat2*Math.PI/180; | |
var lon2 = lon2*Math.PI/180; | |
var dLat = (lat2-lat1); | |
var dLon = (lon2-lon1); | |
var a = Math.sin(dLat/2) * Math.sin(dLat/2) + | |
Math.sin(dLon/2) * Math.sin(dLon/2) * Math.cos(lat1) * Math.cos(lat2); | |
var c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a)); | |
return R * c; | |
}; | |
/** | |
* Calculates great circle distance using the spherical | |
* cosine rule on 64bit down to a few m. | |
*/ | |
var great_circle_distance_cosin = function(lat1, lon1, lat2, lon2) { | |
var R = 3447; // nm | |
var lat1 = lat1*Math.PI/180; | |
var lon1 = lon1*Math.PI/180; | |
var lat2 = lat2*Math.PI/180; | |
var lon2 = lon2*Math.PI/180; | |
return Math.acos(Math.sin(lat1)*Math.sin(lat2) + | |
Math.cos(lat1)*Math.cos(lat2) * | |
Math.cos(lon2-lon1)) * R; | |
}; | |
/** | |
* The intial bearing on the great circle route from lat1:lon1 to lat2:lon2 | |
*/ | |
var initial_bearing = function(lat1, lon1, lat2, lon2) { | |
var lat1 = lat1*Math.PI/180; | |
var lon1 = lon1*Math.PI/180; | |
var lat2 = lat2*Math.PI/180; | |
var lon2 = lon2*Math.PI/180; | |
var dLat = (lat2-lat1); | |
var dLon = (lon2-lon1); | |
var y = Math.sin(dLon) * Math.cos(lat2); | |
var x = Math.cos(lat1)*Math.sin(lat2) - | |
Math.sin(lat1)*Math.cos(lat2)*Math.cos(dLon); | |
return ((Math.atan2(y, x)*180/Math.PI)+360) % 360; | |
}; | |
/** | |
* The final bearing on the great circle route from lat1:lon1 to lat2:lon2 (at the lat2:lon2 end) | |
*/ | |
var reverse_bearing = function(lat1, lon1, lat2, lon2) { | |
return (initial_bearing(lat2,lon2,lat1,lon1)+180) % 360; | |
}; | |
/** | |
* The midpoint of the great circle route. | |
*/ | |
var midpoint = function(lat1, lon1, lat2, lon2) { | |
var lat1 = lat1*Math.PI/180; | |
var lon1 = lon1*Math.PI/180; | |
var lat2 = lat2*Math.PI/180; | |
var lon2 = lon2*Math.PI/180; | |
var dLat = (lat2-lat1); | |
var dLon = (lon2-lon1); | |
var Bx = Math.cos(lat2) * Math.cos(dLon); | |
var By = Math.cos(lat2) * Math.sin(dLon); | |
var lat3 = Math.atan2(Math.sin(lat1)+Math.sin(lat2), | |
Math.sqrt( (Math.cos(lat1)+Bx)*(Math.cos(lat1)+Bx) + By*By ) ); | |
var lon3 = lon1 + Math.atan2(By, Math.cos(lat1) + Bx); | |
return { | |
"lat" : lat3, | |
"lon" : lon3 | |
}; | |
}; | |
var getTitle = function(val) { | |
title = val['mmsi']+" "+val['shipname']; | |
if ( val['speed'] > 0.1 ) { | |
title = title + " " + val['course'] + "T" + val['speed'] + "kn"; | |
} | |
var tnow = new Date().getTime(); | |
var age = Math.floor((tnow - val["_ts"])/1000); | |
if ( age < 60 ) { | |
title = title + " T-" + age +"s"; | |
} else { | |
age = Math.floor(age/60); | |
if ( age < 60 ) { | |
title = title + " T-" + age +"m"; | |
} else { | |
age = Math.floor(age/60); | |
title = title + " T-" + age +"h"; | |
} | |
} | |
return title; | |
} | |
var getBoatTitle = function(val) { | |
title = "Your Boat"; | |
if ( val['speed'] > 0.1 ) { | |
title = title + " " + val['course'] + "T" + val['speed'] + "kn"; | |
} | |
var tnow = new Date().getTime(); | |
var age = Math.floor((tnow - val["_ts"])/1000); | |
if ( age < 60 ) { | |
title = title + " T-" + age +"s"; | |
} else { | |
age = Math.floor(age/60); | |
if ( age < 60 ) { | |
title = title + " T-" + age +"m"; | |
} else { | |
age = Math.floor(age/60); | |
title = title + " T-" + age +"h"; | |
} | |
} | |
return title; | |
} | |
/** | |
* Move a marker and create a trail | |
*/ | |
var moveMarker = function(val) { | |
var mmsi = val['mmsi']; | |
var marker = ships[mmsi]['_marker']; | |
var posnow = new google.maps.LatLng(val['lat'], val['lon']); | |
var lastpos = marker.getPosition(); | |
marker.setPosition(posnow); | |
var markerinfo = getMarkerInfo(val); | |
marker.setIcon({ | |
path: markerinfo['path'], | |
fillColor: markerinfo['color'], | |
fillOpacity: 0.5, | |
scale: markerinfo['scale'], | |
strokeColor: markerinfo['color'], | |
strokeWeight: 1, | |
}); | |
if (ships[mmsi]['_wake'] === undefined) { | |
var wake = new google.maps.Polyline({ | |
path : [ lastpos, posnow ], | |
map : map, | |
strokeColor: "green", | |
strokeOpacity: 1.0, | |
strokeWeight: 1 | |
}); | |
ships[mmsi]['_wake'] = wake; | |
} else { | |
var wake = ships[mmsi]['_wake']; | |
var positons = wake.getPath(); | |
positons.push(posnow); | |
while (positons.getLength() > 20) { | |
positons.removeAt(0); | |
} | |
} | |
}; | |
/** | |
* Move a marker and create a trail | |
*/ | |
var moveBoat = function(boat) { | |
var marker = boat['_marker']; | |
var posnow = new google.maps.LatLng(boat['lat'], boat['lon']); | |
var lastpos = marker.getPosition(); | |
marker.setPosition(posnow); | |
var markerinfo = getMarkerInfo(boat); | |
marker.setIcon({ | |
path: markerinfo['path'], | |
fillColor: "blue", | |
fillOpacity: 0.5, | |
scale: markerinfo['scale'], | |
strokeColor: "blue", | |
strokeWeight: 1, | |
}); | |
if (boat['_wake'] === undefined) { | |
var wake = new google.maps.Polyline({ | |
path : [ lastpos, posnow ], | |
map : map, | |
strokeColor: "green", | |
strokeOpacity: 1.0, | |
strokeWeight: 2 | |
}); | |
boat['_wake'] = wake; | |
} else { | |
var wake = boat['_wake']; | |
var positons = wake.getPath(); | |
positons.push(posnow); | |
while (positons.getLength() > 20) { | |
positons.removeAt(0); | |
} | |
} | |
}; | |
var getContent = function(mmsi) { | |
var val = ships[mmsi]; | |
var contentString = '<div id="content">'+ | |
'<div id="siteNotice">'+ | |
'<img src="http://photos3.marinetraffic.com/ais/showphoto.aspx?mmsi='+mmsi+'" width="300" />' + | |
'</div>'+ | |
'<h2 id="firstHeading" class="firstHeading">'+val['shipname']+'</h2>'+ | |
'<div id="bodyContent">'+ | |
'<p><ul>'; | |
$.each(val, function(skey,sval) { | |
contentString = contentString + "<li>"+skey+":"+sval+"</li>"; | |
}); | |
contentString = contentString + '</ul></p>'+ | |
'</div>'+ | |
'</div>'; | |
return contentString; | |
} | |
var getBoatContent = function() { | |
var contentString = '<div id="content">'+ | |
'<div id="siteNotice">'+ | |
'' + | |
'</div>'+ | |
'<h2 id="firstHeading" class="firstHeading">Your Boat</h2>'+ | |
'<div id="bodyContent">'+ | |
'<p><ul>'; | |
$.each(boat, function(skey,sval) { | |
contentString = contentString + "<li>"+skey+":"+sval+"</li>"; | |
}); | |
contentString = contentString + '</ul></p>'+ | |
'</div>'+ | |
'</div>'; | |
return contentString; | |
} | |
/** | |
* | |
*/ | |
var createMarker = function(mmsi, val) { | |
var shiplatlong = new google.maps.LatLng(val['lat'],val['lon']); | |
var markerinfo = getMarkerInfo(val); | |
var marker = new google.maps.Marker({ | |
position: shiplatlong, | |
map: map, | |
icon : { | |
path: markerinfo['path'], | |
fillColor: markerinfo['color'], | |
fillOpacity: 0.5, | |
scale: markerinfo['scale'], | |
strokeColor: markerinfo['color'], | |
strokeWeight: 1, | |
}, | |
title: getTitle(val) | |
}); | |
var infowindow = new google.maps.InfoWindow(); | |
google.maps.event.addListener(marker, 'click', function() { | |
infowindow.setContent(getContent(mmsi)); | |
infowindow.open(map,marker); | |
}); | |
return marker; | |
}; | |
var createBoat = function(boat) { | |
var shiplatlong = new google.maps.LatLng(boat['lat'],boat['lon']); | |
var markerinfo = getMarkerInfo(boat); | |
var marker = new google.maps.Marker({ | |
position: shiplatlong, | |
map: map, | |
icon : { | |
path: markerinfo['path'], | |
fillColor: "blue", | |
fillOpacity: 0.5, | |
scale: markerinfo['scale'], | |
strokeColor:"blue", | |
strokeWeight: 1, | |
}, | |
title: getBoatTitle(boat) | |
}); | |
var infowindow = new google.maps.InfoWindow(); | |
google.maps.event.addListener(marker, 'click', function() { | |
infowindow.setContent(getBoatContent()); | |
infowindow.open(map,marker); | |
}); | |
$.each(boat['waypoints'], function(n,wp) { | |
console.log("Marking Waypoint "+wp); | |
var wplatlong = new google.maps.LatLng(wp['lat'],wp['lon']); | |
var marker = new google.maps.Marker({ | |
position: wplatlong, | |
map: map, | |
icon : { | |
path: google.maps.SymbolPath.CIRCLE, | |
fillColor: "red", | |
fillOpacity: 0.5, | |
scale: 2, | |
strokeColor:"red", | |
strokeWeight: 1, | |
}, | |
title: "WP"+wp['wpn'] | |
}); | |
}); | |
return marker; | |
}; | |
var removeShip = function(val) { | |
if ( val['_marker'] !== undefined ) { | |
val['_marker'].setMap(null); | |
} | |
if ( val['_wake'] !== undefined ) { | |
val['_wake'].setMap(null); | |
} | |
} | |
var dump = function(val) { | |
var ret = ""; | |
$.each(val, function( key, val) { | |
ret = ret + " " + key + ":" + val + "," | |
}); | |
return ret; | |
} | |
var track = undefined; | |
var loadShips = function() { | |
var now = new Date(); | |
$.getJSON('track.json', { "noCache": now.getTime() }, function(data) { | |
$.each(data, function(n,latlon) { | |
if ( track === undefined ) { | |
var posnow = new google.maps.LatLng(latlon['lat'],latlon['lon']); | |
track = new google.maps.Polyline({ | |
path : [ posnow ], | |
map : map, | |
strokeColor: "yellow", | |
strokeOpacity: 1.0, | |
strokeWeight: 2 | |
}); | |
} else { | |
var path = track.getPath(); | |
if (n >= path.getLength()) { | |
path.push(new google.maps.LatLng(latlon['lat'],latlon['lon'])); | |
} | |
} | |
}); | |
}); | |
// represents the boat you are on | |
$.getJSON('boat.json', { "noCache": now.getTime() }, function(data) { | |
if ( boat === undefined ) { | |
boat = data; | |
boat["_marker"] = createBoat(boat); | |
} else { | |
var marker = boat['_marker']; | |
var lastpos = marker.getPosition(); | |
var d = distance(lastpos.lat(), lastpos.lng(), data['lat'], data['lon']) | |
console.log("Distance "+d); | |
if (d > 0.01) { // greater than 0.1nm | |
moveBoat(boat); | |
} | |
marker.setTitle(getBoatTitle(boat)); | |
// update the ships data. | |
$.each(data, function(kk,vv) { | |
boat[kk] = vv; | |
}); | |
} | |
}); | |
// all other ships tracked by AIS | |
$.getJSON('ships.json', { "noCache": now.getTime() }, function(data) { | |
$.each(data, function(key,val) { | |
if ( val['_ts'] === undefined ) { | |
return; | |
} | |
var remove = false; | |
if ( val['speed'] === undefined || val['speed'] < 0.1 ) { | |
// not moving and no update in the last 24h | |
remove = ((now.getTime()-val['_ts']) > 24*3600*1000) | |
} else { | |
// moving but no update in the last hour | |
remove = ((now.getTime()-val['_ts']) > 3600*1000) | |
} | |
if ( ships[key] !== undefined ) { | |
if ( remove ) { | |
removeShip(ships[key]); | |
ships[key] = undefined; | |
} else { | |
var marker = ships[key]['_marker']; | |
var lastpos = marker.getPosition(); | |
var d = distance(lastpos.lat(), lastpos.lng(), val['lat'], val['lon']) | |
if (d > 0.1) { // greater than 0.1nm | |
console.log(val['mmsi']+" "+val['shipsname']+" travelled "+d+" at "+val['speed']); | |
moveMarker(val); | |
} | |
marker.setTitle(getTitle(val)); | |
// update the ships data. | |
$.each(val, function(kk,vv) { | |
ships[key][kk] = vv; | |
}); | |
} | |
} else { | |
if ( remove ) { | |
return; | |
} | |
val["_marker"] = createMarker(key, val) | |
// save | |
ships[key] = val; | |
} | |
}); // each | |
}); // getjson | |
} // end function | |
var reloadShips = function() { | |
console.log("Reload ships"); | |
loadShips(); | |
window.setTimeout(reloadShips,15000); | |
} | |
reloadShips(); | |
} | |
</script> | |
</head> | |
<body onload="initialize()"> | |
<div id="map_canvas" style="width:100%; height:100%"></div> | |
</body> | |
</html> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
''' | |
Created on Nov 9, 2012 | |
This is the main NMEA server for use when connected up to real hardware. | |
@author: ieb | |
''' | |
import logging | |
import sys | |
from nmeaserver import NMEAMux, NMEATcpHandler, ThreadedTCPServer | |
import json | |
from nmeahardware import NMEASerialReader, NMEAI2CReader, OutputDemux,\ | |
OutputFile | |
from aisprocessor import AISPrinter, AISFilter | |
from base import FileReader | |
from demo.boat import Boat | |
from demo.instruments import WindInstrument, DepthInstrument, SpeedInstrument,\ | |
CompassInstrument, GPSInstrument | |
import time | |
def addLogging(reader, config): | |
if "box" in config: | |
reader = AISFilter(reader, config['box']) | |
if "logging" in config and config["logging"]["type"] == "ais": | |
logging.error("Creating AIS Printer") | |
printer = AISPrinter(config['logging']) | |
reader.add(printer) | |
return reader | |
if __name__ == '__main__': | |
if len(sys.argv) != 1: | |
logging.error("You must provide a config file as the first argument"); | |
f = open(sys.argv[1]) | |
print "Loading Config %s " % sys.argv[1] | |
config = json.load(f) | |
f.close() | |
logging.error("Starting Mux") | |
demux = None | |
output = None | |
mux = None | |
if "demux" in config: | |
demux = OutputDemux(config["demux"]) | |
if "file" in config: | |
output = OutputFile(config["file"]) | |
else: | |
mux = NMEAMux() | |
tostop = [] | |
if "boat" in config: | |
boat = Boat(config['boat']) | |
for listener in config["listeners"]: | |
reader = None | |
if listener['type'] == 'serial': | |
reader = addLogging(NMEASerialReader(listener['port'],listener['speed']), listener) | |
elif listener['type'] == 'i2c': | |
reader = addLogging(NMEAI2CReader(listener['port'],listener['speed']), listener) | |
elif listener['type'] == 'demo-file': | |
reader = addLogging(FileReader(listener['file']), listener) | |
elif listener['type'] == 'demo-wind': | |
reader = WindInstrument(listener, boat) | |
elif listener['type'] == 'demo-depth': | |
reader = DepthInstrument(listener, boat) | |
elif listener['type'] == 'demo-speed': | |
reader = SpeedInstrument(listener, boat) | |
elif listener['type'] == 'demo-compass': | |
reader = CompassInstrument(listener, boat) | |
elif listener['type'] == 'demo-gps': | |
reader = GPSInstrument(listener, boat) | |
if reader is not None: | |
if demux is not None: | |
reader.add(demux.get_channel_target(listener['channel'])) | |
elif output is not None: | |
reader.add(output) | |
else: | |
reader.add(mux) | |
reader.start() | |
tostop.append(reader) | |
try: | |
if demux is not None: | |
demux.run() | |
elif output is not None: | |
while True: | |
time.sleep(10) | |
else: | |
NMEATcpHandler.source = mux | |
# Create the server, binding to localhost on port 9999 | |
server = ThreadedTCPServer((config["server"]["host"], config["server"]["port"]), NMEATcpHandler) | |
logging.error("Serving") | |
# Activate the server; this will keep running until you | |
# interrupt the program with Ctrl-C | |
server.serve_forever() | |
finally: | |
if demux is not None: | |
demux.close() | |
for x in tostop: | |
x.running = False | |
logging.error("Done") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
''' | |
@author: ieb | |
''' | |
import serial | |
import sys | |
import time | |
def writeBytes(source, dest, wchannel, nbytes, linebuffer, rate, log): | |
if linebuffer is None: | |
return None | |
while(len(linebuffer) < nbytes): | |
l = source.readline() | |
if len(l) == 0: | |
nbytes = len(linebuffer) | |
dest.write(chr(wchannel+ord('1'))) | |
dest.flush() | |
dest.write(chr(nbytes)) | |
dest.flush() | |
for c in linebuffer[:nbytes]: | |
dest.write(c) | |
dest.flush() | |
log.write(chr(wchannel+ord('1'))) | |
log.write(chr(nbytes)) | |
for c in linebuffer[:nbytes]: | |
log.write(c) | |
print "Final Sent %s to %s : %s " % (nbytes,wchannel, linebuffer[:(nbytes)]) | |
return None | |
#if l[-2:-2] != "\r\n": | |
# if l[-2:-1] == "\n\r": | |
# l = l[:-2] + "\r\n" | |
# elif l[-1:-1] == "\n": | |
# l = l[:-1] + "\r\n" | |
# elif l[-1:-1] == "\r": | |
# l = l + "\n" | |
# else: | |
# l = l + "\r\n" | |
linebuffer = linebuffer + l | |
dest.write(chr(wchannel+ord('1'))) | |
dest.flush() | |
dest.write(chr(nbytes)) | |
dest.flush() | |
for c in linebuffer[:nbytes]: | |
dest.write(c) | |
dest.flush() | |
log.write(chr(wchannel+ord('1'))) | |
log.write(chr(nbytes)) | |
for c in linebuffer[:nbytes]: | |
log.write(c) | |
print "Sent %s to %s : %s " % (nbytes,wchannel, linebuffer[:(nbytes)]) | |
return linebuffer[nbytes:] | |
def lineBuffersEmpty(lb): | |
for l in lb: | |
if l is not None: | |
return False | |
return True | |
if __name__ == '__main__': | |
source = sys.argv[1] | |
dest = sys.argv[2] | |
sactive = False | |
if dest[:8] == "/dev/tty": | |
sactive = True | |
desth = serial.Serial(dest, 9600, timeout=1) | |
else: | |
desth = open(dest,"wb") | |
if sactive: | |
time.sleep(5) | |
if sactive > 0: | |
print "<<< %s" % desth.readline(); | |
print "<<< %s" % desth.readline(); | |
print "Writing \n" | |
with open(source,"rb") as sourceh: | |
content = sourceh.read() | |
for c in content: | |
desth.write(c) | |
desth.flush() | |
time.sleep(0.01) | |
if c == '\n': | |
if sactive and desth.inWaiting() > 0: | |
print "<<< %s" % desth.readline(); | |
if sactive: | |
for i in range(1,10): | |
if sactive and desth.inWaiting() > 0: | |
print "<<< %s" % desth.readline(); | |
time.sleep(1) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment