Created
September 6, 2018 15:21
-
-
Save MuddyBootsCode/be61ef92053306ba8828acac3fd33485 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from dev.lib.events_library import * | |
import dev.ac636.globals | |
def explore_stlouis(): | |
venue_url = 'https://explorestlouis.com/events/' | |
venue_url_root = 'https://explorestlouis.com' | |
venue = 'Explore St. Louis' | |
venue_address = 'Multiple' | |
venue_city = 'St. Louis' | |
venue_default_price = -1 # -1 is unknown, 0 is FREE | |
picture = 'None' | |
time = 'None' | |
price = venue_default_price | |
print("Processing venue " + venue) | |
has_event = False | |
# Create list of coming days | |
day_list = [] | |
today = datetime.datetime.now() | |
for x in range(8): | |
next_day = today + datetime.timedelta(days=x) | |
next_day = datetime.datetime.strftime(next_day, '%Y-%m-%d') | |
day_list.append(next_day) | |
# Loop through days | |
for day in day_list: | |
venue_url = venue_url + day | |
# manual_review(dev.ac636.globals.entries, venue_url, venue, venue_address, venue_city) | |
try: | |
soup = BeautifulSoup(urllib.request.urlopen( | |
urllib.request.Request( | |
venue_url, | |
headers={ | |
'User-Agent': 'Mozilla/5.0' | |
} | |
) | |
).read(), 'lxml') | |
events = soup.find_all('div', class_="type-tribe_events") | |
for event in events: | |
event_url = event.find('a')['href'] | |
picture = event.find('img')['src'] | |
name = event.find('h2', class_="entry-title").text.strip() | |
venue = event.find('div', class_="tribe-events-venue-details").text.strip() | |
try: | |
price = event.find('div', class_="tribe-events-event-cost").text.strip() | |
except AttributeError: | |
price = -1 | |
date_info = parser.parse(soup.find('h2', class_="tribe-events-page-title").text.replace("Events for", "")) | |
description = event.find('div', class_="entry-summary").text.strip() | |
time = event.find('span', class_="tribe-event-date-start").text.split(",")[-1].replace("\\xa", "") | |
if check_date(date_info.year, date_info.month, date_info.day): | |
event_soup = BeautifulSoup(urllib.request.urlopen( | |
urllib.request.Request( | |
event_url, | |
headers={ | |
'User-Agent': 'Mozilla/5.0' | |
} | |
) | |
).read(), 'lxml') | |
venue_address = event_soup.find('span', class_='adr').text.replace("\n", " ") | |
dev.ac636.globals.entries.append({ | |
"name": name, | |
"venue": venue, | |
"venue_address": venue_address, | |
"date": datetime.datetime.strftime(date_info, '%m/%d/%Y'), | |
"time": time, | |
"venue_city": venue_city, | |
"event_url": event_url, | |
"description": description, | |
"picture": picture, | |
"price": dollar_signs(price), | |
}) | |
has_event = True | |
if has_event == False: | |
manual_review(dev.ac636.globals.entries, venue_url, venue, venue_address, venue_city) | |
except: | |
print("ERROR: Failed to retrieve calendar: " + venue_url) | |
print(sys.exc_info()) | |
# Reset venue url to base url | |
venue_url = venue_url.replace(day, "") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment