Last active
April 9, 2018 16:35
-
-
Save denvera/0a4dc6431349b483418f86fecb709094 to your computer and use it in GitHub Desktop.
Download all screencasts currently free (until 10-4-2018) from www.destroyallsoftware.com
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
# Download all screencasts currently free (until 10-4-2018) from www.destroyallsoftware.com | |
# Update to set User-Agent | |
from bs4 import BeautifulSoup | |
import os | |
import requests | |
headers = { | |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36" | |
} | |
cookies = {} | |
html = requests.get("https://www.destroyallsoftware.com/screencasts/catalog", cookies=cookies, headers=headers).text | |
soup = BeautifulSoup(html, "html.parser") | |
for season in soup.find_all('div', class_ = 'container season'): | |
season_title = season.find('h1', class_ = "season_title twelve columns").a['name'] | |
print("Season: {}".format(season_title)) | |
try: | |
os.makedirs("{}".format(season_title)) | |
except: | |
pass | |
for episode in season.find_all('div', class_ = "episode"): | |
number = episode.find('div', class_ = "number one column").text | |
title = episode.find('div', class_ = "title five columns").text | |
subtitle = episode.find('div', class_="subtitle four columns").text | |
link = episode.find('a')['href'] | |
download_link = "https://www.destroyallsoftware.com{}/download?resolution=1080p".format(link) | |
print("Downloading Episode: {}: {} - {} [{}]".format(number, title, subtitle, download_link)) | |
title = title.replace(":", " ").replace("/", " ") | |
r = requests.get(download_link,cookies=cookies, headers=headers, stream=True) | |
if r.status_code == 200: | |
with open("{}/{}. {}.mp4".format(season_title, number, title), 'wb') as f: | |
for chunk in r.iter_content(1024): | |
f.write(chunk) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Hey - It did yesterday when I ran it, I see User-Agent is checked now, so it sets a reasonable looking User-Agent header.
Will also need
pip install BeautifulSoup4
andpip install requests