Skip to content

Instantly share code, notes, and snippets.

@tamil4kites
Created February 8, 2018 11:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save tamil4kites/16d9ce684b6d8792fa2df5d7ecfaca3d to your computer and use it in GitHub Desktop.
Save tamil4kites/16d9ce684b6d8792fa2df5d7ecfaca3d to your computer and use it in GitHub Desktop.
require_relative './address_upload_util.rb'
address_uploader = AddressUploadUtil.new ARGV.push('update')
address_uploader.process_and_upload(["xlsx", "csv"])
require 'roo'
require 'json'
require 'uri'
require 'csv'
require 'net/http'
class AddressUploadUtil
attr_reader :output_file_name
HEADING_HASH = {location_id: 'Location Id', ship_to: 'Ship-To', stop_name: "Stop Name", address_line_1: "Address Line 1",
address_line_2: "Address Line 2", city: "City", state: "State", zip: "ZIP", country: "Country",
load_unload_time: "Load/Unload Time (in minutes)", latitude: "LAT", longitude: "LONG", geofence_radius: "Rad (in miles)",
polygon_coordinates: "Polygon (array of lat/long pairs)", mon_open: "Monday Open", mon_close: "Monday Close",
tues_open: "Tuesday Open", tues_close: "Tuesday Close", wed_open: "Wednesday Open", wed_close: "Wednesday Close",
thurs_open: "Thursday Open", thurs_close: "Thursday Close", fri_open: "Friday Open", fri_close: "Friday Close",
sat_open: "Saturday Open", sat_close: "Saturday Close", sun_open: "Sunday Open", sun_close: "Sunday Close"
}
ROW_HEADING = [:address_id, :location_id, :ship_to, :stop_name, :address_line_1, :address_line_2, :city, :state, :zip, :country,
:load_unload_time, :latitude, :longitude, :geofence_radius, :polygon_coordinates, :tags, :mon_open, :mon_close, :tues_open, :tues_close,
:wed_open, :wed_close, :thurs_open, :thurs_close, :fri_open, :fri_close, :sat_open, :sat_close, :sun_open, :sun_close]
def initialize(arguements)
@input_file_name = arguements[0]
@company_id = arguements[1]
@env = arguements[2]
@user_name = arguements[3]
@password = arguements[4]
@output_file_name = arguements[5]
@method = arguements[6]
if @env == "prod"
url = "https://geo-api.fourkites.com"
elsif @env == "staging"
url = "https://geo-api-staging.fourkites.com"
elsif @env == 'uat'
url = "http://geo-api-uat.fourkites.com"
else
url = "https://geo-api-dev.fourkites.com"
end
@address_create_url = "#{url}/api/v1/address?address_manager_feature=true&company_id=#{@company_id}"
@address_update_url = "#{url}/api/v1/address/%s/?address_manager_feature=true&company_id=#{@company_id}"
@file_extn = File.extname(@input_file_name)
end
def process_and_upload(file_types)
file_types.each { | ft | ft.prepend "." }
if arguements_correct?(file_types)
puts "Address upload started successfully . Error result will be saved in #{@output_file_name} after this process complete"
create_output_file
process_individual_records do |row_hash, row_num|
populate_nil_values row_hash
puts "Row #{row_num} in progress "
begin
update_address row_hash, row_num
rescue => e
log_error_in_output_file row_num, "Exception happened when executing script. Error - #{e.message}"
end
end
else
puts "wrong arguements passed. see below for correct command"
puts "ruby address_upload.rb <input_file_name (#{file_types})> <company_id> <environment(/dev/staging/production)> <user_name> <password> <output csv file name>"
puts "eg: ruby scripts/updated_address_uploader.rb /Users/username/Desktop/address_upload_3.csv test-shipper prod xxxx@fourkites.com xxxxxx /Users/user/outputs/run1_output.csv"
puts "check wheher excel file and output file path are correct"
end
end
def get_tags(address_hash)
tags = []
if(address_hash[:tags] != "")
tags = address_hash[:tags].split(/\s*,\s*/) #split by comma with leading and trailing spaces.
end
tags
end
def get_request_payload_json(address_hash)
geofence_points = get_geofence_coordinates(address_hash[:polygon_coordinates])
cut_off_time_array, cut_off_time_enabled = get_cut_off_times(address_hash)
tags = get_tags(address_hash)
json_hash = {"enabledCheckboxLabel" => "Business hours for this stop", "locationId" => address_hash[:location_id], "curatedAddressLine1" => address_hash[:address_line_1],
"curatedAddressLine2" => address_hash[:address_line_2], "curatedCity" => address_hash[:city], "curatedState" => address_hash[:state],
"curatedPostal" => address_hash[:zip], "country" => address_hash[:country], "latitude" => address_hash[:latitude], "longitude" => address_hash[:longitude],
"name" => address_hash[:stop_name], "unloadTimeInMinutes" => address_hash[:load_unload_time], "tags" => tags, "isCutoffTimeEnabled" => cut_off_time_enabled, "reverseGeocode" => false,
"geofencePoints" => geofence_points, "cutoffTimes" => cut_off_time_array}
unless address_hash[:geofence_radius].nil?
json_hash["geofenceRadius"] = convert_miles_to_meter(address_hash[:geofence_radius].to_f)
end
json_hash = {address: json_hash}
json_hash.to_json
end
def get_proper_time_format(time)
if time.length == 4
time = "0#{time}"
end
time
end
def get_cut_off_times(address_hash)
cut_off_times = []
is_cut_off_time_enabled = false
open_close_days_arr = [[:sun_open, :sun_close], [:mon_open, :mon_close], [:tues_open, :tues_close], [:wed_open, :wed_close],
[:thurs_open, :thurs_close], [:fri_open, :fri_close], [:sat_open , :sat_close]]
cut_off_times = []
0.upto(6) do |day_num|
start_cut_off_time = address_hash[open_close_days_arr[day_num][0]]
end_cut_off_time = address_hash[open_close_days_arr[day_num][1]]
is_cut_off_time_enabled ||= !(start_cut_off_time.empty? && end_cut_off_time.empty?)
start_cut_off_time = "00:00" if start_cut_off_time.empty?
end_cut_off_time = "23:59" if end_cut_off_time.empty?
is_working_day = is_working_day?(start_cut_off_time, end_cut_off_time)
cut_off_times << {dayEnum: day_num, startCutoffTime: get_proper_time_format(start_cut_off_time),
endCutoffTime: get_proper_time_format(end_cut_off_time), isWorkingDay: is_working_day}
end
[cut_off_times, is_cut_off_time_enabled]
end
def convert_miles_to_meter(miles)
meters = miles * 1.609344 * 1000
meters.round(4)
end
def create_output_file
CSV.open(@output_file_name, "wb") do |csv|
csv << ["Row Number", "Status", "Error"]
end
end
def arguements_correct?(file_exten)
arguements_incorrect = @input_file_name.nil? || !(file_exten.include?(@file_extn)) || !File.file?(@input_file_name) ||
@company_id.nil? || @env.nil? || !(['staging' , 'prod' , 'dev' , 'uat'].include? @env) || @user_name.nil? || @password.nil? ||
@output_file_name.nil? || !File.directory?(File.dirname(@output_file_name))
!arguements_incorrect
end
def populate_nil_values(row_hash)
row_hash.each do |key, array|
row_hash[key] = "" if row_hash[key].nil?
end
end
def log_error_in_output_file(current_row_num, error_msg)
CSV.open(@output_file_name, "ab") do |csv|
csv << [current_row_num, "failure", error_msg]
end
end
def is_working_day?(startCutoffTime , endCutoffTime)
!((startCutoffTime == "00:00" && endCutoffTime == "00:00") || (startCutoffTime == "0:00" && endCutoffTime == "0:00"))
end
def get_geofence_coordinates(geofence_points)
unless geofence_points.nil?
coordinates_array = geofence_points.split(",").map do |coordinate|
if coordinate.start_with? "("
coordinate[1 , coordinate.length - 1]
else
coordinate[0 , coordinate.length - 1]
end
end
geofence_points = []
coordinates_array.each_slice(2) { |lat , long| geofence_points << {latitude: lat, longitude: long} }
end
geofence_points
end
def send_http_request(request_payload_json, current_row_num, addressId)
formatted_address_update_url = (addressId == "") ? @address_create_url : sprintf(@address_update_url, addressId)
uri = URI.parse(formatted_address_update_url)
http = Net::HTTP.new(uri.host, uri.port)
if @env != 'uat'
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
request = (addressId == "") ? Net::HTTP::Post.new(uri.request_uri) : Net::HTTP::Put.new(uri.request_uri)
request.body = request_payload_json
request["content-type"] = 'application/json'
request["cache-control"] = 'no-cache'
request.basic_auth(@user_name, @password)
response = http.request(request)
response_hash = JSON.parse(response.read_body)
if response_hash["statusCode"] != 200
error_msg = ""
if !response_hash["errors"].nil?
response_hash["errors"].each { |error| error_msg = "#{error} #{error_msg}" }
elsif !response_hash["error"].nil?
error_msg = response_hash["error"]
elsif !response_hash["message"].nil?
error_msg = response_hash["message"]
else
error_msg = "Unknown error. Check input fields and try to create manually"
end
log_error_in_output_file current_row_num, error_msg
end
rescue Exception => e
log_error_in_output_file current_row_num, "Error happened when sending request to server . error - #{e.message}"
ensure
#sleeping for 2 seconds before sending next http request, to reduce load on server.
#If batch create is implemented this sleep can be removed
sleep 2
end
private
def process_individual_records
if @file_extn == ".xlsx"
spread_sheet = Roo::Spreadsheet.open(@input_file_name)
spread_sheet.each_with_index(HEADING_HASH) do |row_hash, row_num|
row_num += 1
if row_num == 1
next
end
yield(row_hash, row_num)
end
elsif @file_extn == ".csv"
CSV.foreach(@input_file_name, headers: true).with_index do |row , row_num|
row_num += 2
row_hash = @method == "create" ? get_row_hash_create(row) : get_row_hash_update(row)
yield(row_hash, row_num)
end
end
end
def get_row_hash_update(row)
row_hash = {}
row_no = 0
row.each do |heading, value|
row_heading = ROW_HEADING[row_no]
unless row_heading.nil?
row_hash[row_heading] = value
end
row_no += 1
end
row_hash
end
def get_row_hash_create(row)
row_hash = {}
row.each do |heading , value|
row_heading = HEADING_HASH.key(heading)
unless row_heading.nil?
row_hash[row_heading] = value
end
end
row_hash
end
def update_address(address_hash, current_row_num)
request_payload_json = get_request_payload_json address_hash
send_http_request(request_payload_json, current_row_num, address_hash[:address_id])
end
end
require_relative './address_upload_util.rb'
address_updater = AddressUploadUtil.new ARGV.push('update')
address_updater.process_and_upload(["csv"])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment