Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
The source code from this building twitter bot video: https://youtu.be/fA2SVTIYKes
from bs4 import BeautifulSoup
import json
# with open('all_eps.json','r') as out:
# all_eps = json.load(out)
all_eps = json.load(open('all_eps.json'))
all_tweets = []
for ep in all_eps:
print(ep['long_title'])
soup = BeautifulSoup(ep['script_html'], 'html.parser')
pre = soup.find('pre')
if pre != None:
all_elements = list(pre.children)
dialog_counter = 0
niles_positions = []
for dialog in all_elements:
if '<b>Niles:' in str(dialog):
niles_positions.append(dialog_counter)
dialog_counter = dialog_counter + 1
for niles_position in niles_positions:
if '<b>Frasier:' in str(all_elements[niles_position+2]):
niles_text = str(all_elements[niles_position+1])
frasier_text = str(all_elements[niles_position+3])
if len(niles_text) > 3 and len(frasier_text) > 3:
niles_text = " ".join(niles_text.split())
frasier_text = " ".join(frasier_text.split())
tweet_text = f"Niles: {niles_text}\n\nFrasier: {frasier_text}\n\nS{ep['season']}E{ep['episode']} - {ep['title']}"
if len(tweet_text) <= 280:
all_tweets.append(tweet_text)
print(tweet_text)
print('--------------')
json.dump(all_tweets,open('all_tweets.json','w'),indent=2)
import tweepy # installed via pip
import json # local package
import random # built in
def lambda_handler(event, context):
data = json.load(open('all_tweets.json'))
random_tweet = random.choice(data)
consumer_key = "xxx"
consumer_secret = "xxx"
access_token = "xxx"
access_token_secret = "xxx"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
tweet_post_result = api.update_status(random_tweet)
return {
'statusCode': 200,
'body': json.dumps('It worked!!')
}
import requests
from bs4 import BeautifulSoup
import re
import json
r = requests.get('http://www.kacl780.net/frasier/transcripts/')
html = r.text
soup =BeautifulSoup(html, 'html.parser')
all_eps = []
divs = soup.find_all('div',{'class':'SeasonList'})
for div in divs:
lis = div.find_all('li')
for li in lis:
li_list = list(li.children)
season = li_list[0].split('.')[0]
episode = li_list[0].split('.')[1]
script_link = li.find('a')
title = script_link.text
url = 'http://www.kacl780.net' + script_link['href']
long_title = script_link['title']
ep_data = {
'season' : season,
'episode' : episode,
'script_url' : url,
'title' : title,
'long_title' : long_title,
'script_html' : ''
}
# grab the data
r = requests.get(url)
print('doing', url)
ep_soup = BeautifulSoup(r.text,'html.parser')
h2 = ep_soup.find('h2', text=re.compile(r'^Transcript'))
if h2== None:
h2 = ep_soup.find('h2', text=re.compile(r'^Quotes & Scene Summary'))
if h2 != None:
pre = None
if h2.nextSibling.name == 'pre':
pre = h2.nextSibling
elif h2.nextSibling.name != 'pre' and h2.nextSibling.nextSibling.name == 'pre':
pre = h2.nextSibling.nextSibling
elif h2.nextSibling.name != 'pre' and h2.nextSibling.nextSibling.name != 'pre' and h2.nextSibling.nextSibling.nextSibling.name == 'pre':
pre = h2.nextSibling.nextSibling.nextSibling
if pre != None:
ep_data['script_html'] = pre.prettify()
else:
print('~~~~~~')
print('PROBLEMS ERRRO OH NO!!!')
print(ep_data)
print('~~~~~~')
else:
print("Hey error with this one:")
print(ep_data)
all_eps.append(ep_data)
# break
# break
with open('all_eps.json','w') as out:
json.dump(all_eps,out,indent=2)
import requests # installed via pip
import tweepy # installed via pip
import json # local package
def parse_title(recipe):
first_line = recipe.split('\n')
first_line = first_line[0]
first_line = first_line.replace('#','')
# Hard Corn Shells (Traditional; US)
# ['Hard Corn Shells ','Traditional; US)']
# 'Hard Corn Shells '
first_line = first_line.split('(')[0]
first_line = first_line.strip()
return first_line
def lambda_handler(event, context):
# TODO implement
r = requests.get('http://taco-randomizer.herokuapp.com/random/')
data = json.loads(r.text)
seasoning = parse_title(data['seasoning']['recipe'])
base_layer = parse_title(data['base_layer']['recipe'])
condiment = parse_title(data['condiment']['recipe'])
mixin = parse_title(data['mixin']['recipe'])
shell = parse_title(data['shell']['recipe'])
# print('seasoning',seasoning)
# print('base_layer',base_layer)
# print('condiment',condiment)
# print('mixin',mixin)
# print('shell',shell)
tweet_string = f"{base_layer} with {seasoning}, {condiment} and {mixin} in {shell} "
tweet_string = tweet_string + '\n\n -- https://github.com/evz/tacofancy-api'
if len(tweet_string) > 280:
tweet_string = f"{base_layer} with {seasoning}, {condiment} and {mixin} in {shell} "
tweet_string = tweet_string[0:230] +'...'
tweet_string = tweet_string + '\n\n -- https://github.com/evz/tacofancy-api'
consumer_key = "xxx"
consumer_secret = "xxx"
access_token = "xxx"
access_token_secret = "xxx"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
tweet_post_result = api.update_status(tweet_string)
return {
'statusCode': 200,
'body': json.dumps('It worked!!')
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment