Skip to content

Instantly share code, notes, and snippets.

Last active February 12, 2018 08:07
  • Star 0 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save scrapehero/fbf04332e2a26c326dab9b53e23a8dee to your computer and use it in GitHub Desktop.
Python 2 code to extract sotre locations from
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Written as part of
import requests
from lxml import html
import re
from time import time
import json
import argparse
def get_store(store):
store_name = store['Name']
store_timings = store['OperatingHours']['Hours']
street = store['Address']['AddressLine1']
city = store['Address']['City']
county = store['Address'].get('County')
zipcode = store['Address']['PostalCode']
state = store['Address']['Subdivision']
country = store['Address'].get('CountryName')
contact = store['TelephoneNumber'][0]['PhoneNumber']
contact = store['TelephoneNumber'].get('PhoneNumber')
open_timing = []
stores_open = []
for store_timing in store_timings:
timing = store_timing['TimePeriod']['Summary']
weekDay = store_timing['FullName']
open_timing.append({"Week Day":weekDay,"Open Hours":timing})
data = {
'Store_Name' : store_name,
'Street' : street,
'City' : city,
'County' : county,
'Zipcode' : zipcode,
'State' : state,
'Contact' : contact,
'Timings' : open_timing,
'Stores_Open' : stores_open,
'Country' : country
return data
def parse(zipcode):
#sending requests to get the accesskey for the store listing page url
stores_url = '{0}&capabilities=&concept='.format(zipcode)
front_page_response = requests.get(stores_url)
raw_access_key = re.findall("accesskey\s+?\:\"(.*)\"",front_page_response.text)
if raw_access_key:
accesskey = raw_access_key[0]
print "Access key not found"
access_time = int(time())
stores_listing_url = '{0}&range=100&locale=en-US&key={1}&callback=jQuery2140816666152355445_1500385885308&_={2}'.format(zipcode,accesskey,access_time)
storeing_response = requests.get(stores_listing_url)
content =re.findall("\((.*)\)",storeing_response.text)
Locations = []
json_data = json.loads(content[0])
total_stores = json_data['Locations']['@count']
if not total_stores == 0:
stores = json_data["Locations"]["Location"]
# Handling multiple Locations
if total_stores > 1:
for store in stores:
# Single Location
return Locations
except ValueError:
print "No json content found in response"
if __name__=="__main__":
argparser = argparse.ArgumentParser()
argparser.add_argument('zipcode', help='Zip code')
args = argparser.parse_args()
zipcode = args.zipcode
print "Fetching Location details"
scraped_data = parse(zipcode)
print "Writing data to output file"
with open('%s-locations.json'%(zipcode),'w') as fp:
json.dump(scraped_data,fp,indent = 4)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment