|
# frozen_string_literal: true |
|
|
|
require 'net/http' |
|
require 'json' |
|
require 'csv' |
|
require 'logger' |
|
|
|
LOG = Logger.new($stdout, progname: 'ratp') |
|
|
|
NA_UI = '[ND]' |
|
|
|
Transport = Struct.new(:type, :number, :stop, :destination) do |
|
def to_s |
|
"#{type[:ui]} #{number} @ #{stop} -> #{destination}" |
|
end |
|
end |
|
|
|
Line = Struct.new(:type, :id) do |
|
def to_s |
|
"#{type[:ui]} #{id}" |
|
end |
|
end |
|
|
|
StopAtRoute = Struct.new(:line, :route_id, :stop_name) |
|
Request = Struct.new(:route_id, :stop_id, :direction) |
|
|
|
class Type |
|
METRO = { ui: 'metro', api: 'metro' }.freeze |
|
BUS = { ui: 'bus', api: 'bus' }.freeze |
|
RER = { ui: 'rer', api: 'rail' }.freeze |
|
TRAM = { ui: 'tram', api: 'tram' }.freeze |
|
end |
|
|
|
class ConfigurationError < StandardError |
|
end |
|
|
|
def read_csv(url) |
|
uri = URI(url) |
|
response = nil |
|
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http| |
|
request = Net::HTTP::Get.new(uri) |
|
response = http.request(request) |
|
end |
|
return nil unless response.is_a? Net::HTTPSuccess |
|
|
|
csv_data = CSV.new(response.body, headers: true, col_sep: ';', encoding: 'utf-8') |
|
|
|
while (row = csv_data.shift) |
|
yield row |
|
end |
|
end |
|
|
|
def get_line_details(lines) |
|
LOG.info('Reading line details') |
|
|
|
ids = {} |
|
pictos = {} |
|
|
|
url = 'https://data.iledefrance-mobilites.fr/explore/dataset/referentiel-des-lignes/download/?format=csv&timezone=Europe/Paris&lang=fr&use_labels_for_header=true&csv_separator=%3B' |
|
read_csv(url) do |row| |
|
# TODO: check if RATP can be taken as input |
|
lines.each do |line| |
|
next unless row['TransportMode'] == line.type[:api] && row['ShortName_Line'] == line.id && ( |
|
row['OperatorName'] == 'RATP' || row['OperatorName'] == 'SNCF' |
|
) |
|
|
|
ids[line] = row['ID_Line'] |
|
pictos[row['ID_Line']] = row['Picto'] |
|
end |
|
end |
|
|
|
lines.each do |line| |
|
raise ConfigurationError, "Cannot find line #{line}" if ids[line].nil? |
|
end |
|
|
|
LOG.info('End line details') |
|
|
|
[ids, pictos] |
|
end |
|
|
|
def get_stop_ids(route_id_stop_name_pairs) |
|
LOG.info('Reading stop ids') |
|
|
|
stop_ids = [] |
|
line_and_stop_name_map = {} |
|
stops_at_route = {} |
|
|
|
url = 'https://data.iledefrance-mobilites.fr/explore/dataset/arrets-lignes/download/?format=csv&timezone=Europe/Berlin&lang=fr&use_labels_for_header=true&csv_separator=%3B' |
|
|
|
read_csv(url) do |row| |
|
route_id_stop_name_pairs.each do |pair| |
|
if row['route_id'] == "IDFM:#{pair.route_id}" |
|
stops_at_route[pair.route_id] = [] if stops_at_route[pair.route_id].nil? |
|
stops_at_route[pair.route_id].push(row['stop_name']) |
|
end |
|
|
|
next unless row['route_id'] == "IDFM:#{pair.route_id}" && row['stop_name'] == pair.stop_name |
|
|
|
cur_stop_id = row['stop_id'].delete('^0-9') |
|
stop_ids.push(cur_stop_id) |
|
line_and_stop_name_map[pair.route_id] = {} if line_and_stop_name_map[pair.route_id].nil? |
|
if line_and_stop_name_map[pair.route_id][pair.stop_name].nil? |
|
line_and_stop_name_map[pair.route_id][pair.stop_name] = [] |
|
end |
|
line_and_stop_name_map[pair.route_id][pair.stop_name].push(cur_stop_id) |
|
end |
|
end |
|
|
|
route_id_stop_name_pairs.each do |pair| |
|
if line_and_stop_name_map[pair.route_id].nil? || line_and_stop_name_map[pair.route_id][pair.stop_name].nil? |
|
raise ConfigurationError, |
|
"Unable to find stop #{pair.stop_name} for #{pair.line}. Possible values: #{stops_at_route[pair.route_id]}" |
|
end |
|
end |
|
|
|
LOG.info('End stop ids') |
|
|
|
[stop_ids.uniq, line_and_stop_name_map] |
|
end |
|
|
|
def request_info(stop_id) |
|
uri = URI("https://prim.iledefrance-mobilites.fr/marketplace/stop-monitoring?MonitoringRef=STIF:StopPoint:Q:#{stop_id}:") |
|
|
|
response = nil |
|
Net::HTTP.start(uri.host, uri.port, { use_ssl: true, read_timeout: 5, open_timeout: 5 }) do |http| |
|
request = Net::HTTP::Get.new(uri, { |
|
accept: 'application/json', |
|
apiKey: IDFM_API_KEY |
|
}) |
|
response = http.request(request) |
|
end |
|
|
|
json = JSON.parse(response.body) |
|
|
|
json['Siri']['ServiceDelivery']['StopMonitoringDelivery'][0]['MonitoredStopVisit'] |
|
end |
|
|
|
def get_all_timings(stop_ids) |
|
all_timings = [] |
|
|
|
semaphore = Mutex.new |
|
get_update_data = lambda { |stop_id| |
|
begin |
|
useful_data = request_info(stop_id) |
|
semaphore.synchronize do |
|
all_timings.push(*useful_data) |
|
end |
|
rescue StandardError => e |
|
warn("ERROR: RATP: Unable to read timings for #{stop_id}: #{e}") |
|
end |
|
} |
|
|
|
threads = [] |
|
stop_ids.each do |stop_id| |
|
task = Thread.new(stop_id) do |this| |
|
get_update_data.call(this) |
|
end |
|
threads << task |
|
end |
|
|
|
threads.map(&:join) |
|
|
|
all_timings |
|
end |
|
|
|
def find_results(entries, requests) |
|
results_per_key = {} |
|
|
|
requests.each_key do |transport| |
|
found = false |
|
|
|
requests[transport].each do |queried_item| |
|
key = "#{queried_item.route_id}-#{queried_item.stop_id}-#{queried_item.direction}" |
|
entries.each do |item| |
|
id = item['ItemIdentifier'] |
|
dir = id.split('.')[2] |
|
dir = id.split('.')[6] unless %w[A R].include?(dir) |
|
dir_name = item['MonitoredVehicleJourney']['DirectionName'][0]['value'].downcase.delete('^a-z') |
|
|
|
route_id = item['MonitoredVehicleJourney']['LineRef']['value'].delete('STIF:Line::').delete_suffix(':') |
|
stop_id = item['MonitoringRef']['value'].delete('^0-9') |
|
|
|
unless queried_item.route_id == route_id && queried_item.stop_id == stop_id && |
|
(queried_item.direction == dir || queried_item.direction.downcase.delete('^a-z') == dir_name) |
|
next |
|
end |
|
|
|
dest_name = item['MonitoredVehicleJourney']['DestinationName'][0]['value'] |
|
|
|
at_stop = item['MonitoredVehicleJourney']['MonitoredCall']['VehicleAtStop'] |
|
if at_stop |
|
remaining_time = 'Arrêt' |
|
else |
|
now = DateTime.now |
|
upcoming = DateTime.parse(item['MonitoredVehicleJourney']['MonitoredCall']['ExpectedDepartureTime']) |
|
remaining = [0, ((upcoming.to_time - now.to_time) / 60).round(half: :down)].max |
|
remaining_time = "#{remaining} mn" |
|
end |
|
|
|
results_per_key[key] = [] if results_per_key[key].nil? |
|
|
|
results_per_key[key].append( |
|
{ |
|
route: route_id, |
|
dest: dest_name, |
|
time: remaining_time |
|
} |
|
) |
|
|
|
found = true |
|
end |
|
end |
|
|
|
next if found |
|
|
|
# TODO: fix key to be route id |
|
|
|
results_per_key[transport.to_s] = [] |
|
results_per_key[transport.to_s].append( |
|
{ |
|
route: requests[transport][0].route_id, |
|
dest: '', |
|
time: 'N/D' |
|
} |
|
) |
|
results_per_key[transport.to_s].append( |
|
{ |
|
route: requests[transport][0].route_id, |
|
dest: '', |
|
time: 'N/D' |
|
} |
|
) |
|
end |
|
|
|
results_per_key |
|
end |
Bonjour, effectivement il y'a un problème sur l'API que j'utilise pour récupérer les données. L'auteur de cette API nous a promis de regarder mais rien encore est fait.
Pendant ce temps, je suis en train de bouger le code vers l'API d'IDFM, mais cette API est mal documentée et manque des informations.
Prochain update d'ici une semaine :-)