Skip to content

Instantly share code, notes, and snippets.

Forked from miguelgrinberg/
Last active August 29, 2015 14:26
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save dunglehome/dd54cde9cc931e1246a1 to your computer and use it in GitHub Desktop.
Generate statistics about PyCon 2014 videos
import argparse
import re
from multiprocessing.pool import ThreadPool as Pool
import requests
import bs4
root_url = ''
index_url = root_url + '/category/50/pycon-us-2014'
def get_video_page_urls():
response = requests.get(index_url)
soup = bs4.BeautifulSoup(response.text)
return [a.attrs.get('href') for a in' a[href^=/video]')]
def get_video_data(video_page_url):
video_data = {}
response = requests.get(root_url + video_page_url)
soup = bs4.BeautifulSoup(response.text)
video_data['title'] ='div#videobox h3')[0].get_text()
video_data['speakers'] = [a.get_text() for a in'div#sidebar a[href^=/speaker]')]
# initialize counters
video_data['views'] = 0
video_data['likes'] = 0
video_data['dislikes'] = 0
video_data['youtube_url'] ='div#sidebar a[href^=]')[0].get_text()
response = requests.get(video_data['youtube_url'], headers={'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36'})
soup = bs4.BeautifulSoup(response.text)
video_data['views'] = int(re.sub('[^0-9]', '','.watch-view-count')[0].get_text().split()[0]))
video_data['likes'] = int(re.sub('[^0-9]', '','#watch-like-dislike-buttons')[0].get_text().split()[0]))
video_data['dislikes'] = int(re.sub('[^0-9]', '','#watch-like-dislike-buttons')[2].get_text().split()[0]))
# some or all of the counters could not be scraped
return video_data
def parse_args():
parser = argparse.ArgumentParser(description='Show PyCon 2014 video statistics.')
parser.add_argument('--sort', metavar='FIELD', choices=['views', 'likes', 'dislikes'],
help='sort by the specified field. Options are views, likes and dislikes.')
parser.add_argument('--max', metavar='MAX', type=int, help='show the top MAX entries only.')
parser.add_argument('--csv', action='store_true', default=False,
help='output the data in CSV format.')
parser.add_argument('--workers', type=int, default=8,
help='number of workers to use, 8 by default.')
return parser.parse_args()
def show_video_stats(options):
#video_page_urls = get_video_page_urls()
#for video_page_url in video_page_urls:
# print get_video_data(video_page_url)
pool = Pool(options.workers)
video_page_urls = get_video_page_urls()
results = sorted(, video_page_urls), key=lambda video: video[options.sort],
print len(results)
max = options.max
if max is None or max > len(results):
max = len(results)
if options.csv:
print(u'"title","speakers", "views","likes","dislikes"')
print(u'Views +1 -1 Title (Speakers)')
for i in range(max):
if options.csv:
results[i]['title'], ', '.join(results[i]['speakers']), results[i]['views'],
results[i]['likes'], results[i]['dislikes']))
print(u'{0:5d} {1:3d} {2:3d} {3} ({4})'.format(
results[i]['views'], results[i]['likes'], results[i]['dislikes'], results[i]['title'],
', '.join(results[i]['speakers'])))
if __name__ == '__main__':
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment