Skip to content

Instantly share code, notes, and snippets.

@shivapoudel
Last active June 19, 2021 13:39
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save shivapoudel/c517456e32127093ee035c2c18eafe31 to your computer and use it in GitHub Desktop.
Save shivapoudel/c517456e32127093ee035c2c18eafe31 to your computer and use it in GitHub Desktop.
Scraped wallpapers from Quotefancy.com
import shutil
import os
import requests
from bs4 import BeautifulSoup
# URL handlers to scraped.
url_handles = [
'motivational-quotes',
'inspirational-entrepreneurship-quotes',
'startup-quotes',
'albert-einstein-quotes',
'steve-jobs-quotes',
'swami-vivekananda-quotes',
'robin-s-sharma-quotes',
'elon-musk-quotes',
'focus-quotes',
'zig-ziglar-quotes',
'inspirational-quotes',
]
# The root directory.
root_dir = 'Quotefancy'
if not os.path.exists(root_dir):
os.mkdir(root_dir)
count = 0
for url_handle in url_handles:
response = requests.get('https://quotefancy.com/' + url_handle)
if response.status_code==200:
soup = BeautifulSoup(response.text, 'html.parser')
hyperlinks = soup.find_all('a')
wallpapers_ids = []
for link in hyperlinks:
img = link.find('img')
if img and img.get('data-wallpaper-id'):
wallpapers_ids.append(img['data-wallpaper-id'])
if wallpapers_ids:
for wallpapers_id in wallpapers_ids:
file_name = root_dir + '-' + wallpapers_id + '-3840x2160.jpg'
file_path = os.path.join(root_dir, file_name)
if not os.path.isfile(file_path):
response = requests.get('https://quotefancy.com/download/' + wallpapers_id + '/original/wallpaper.jpg', stream=True)
if response.status_code==200:
path = os.path.join(root_dir, file_name)
with open(path, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
count += 1
del response
print('All', count, 'wallpapers were successfully scraped!')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment