Skip to content

Instantly share code, notes, and snippets.

@AndrewMohawk
Created December 19, 2021 21:41
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save AndrewMohawk/8c2ff0181ee0691dcb07ec7bb483562a to your computer and use it in GitHub Desktop.
Save AndrewMohawk/8c2ff0181ee0691dcb07ec7bb483562a to your computer and use it in GitHub Desktop.
Download Flickr Public Photos/Videso
#!/usr/bin/python3
import flickr_api
import os
import sys
import urllib
import threading
from six import text_type, iteritems, with_metaclass
flickr_api.set_keys(api_key = 'coolapikey', api_secret = 'secretsthebest')
def fetch_flickr_user(username):
try:
user = flickr_api.Person.findByUserName(username)
print(f"Fetched user {user.username} {user.realname} {user.id} {user.photosurl}")
return user
except:
print("User not found")
return None
return user
def getLargestSizeVideo(photo):
"""
returns the largest size for the current photo.
"""
sizes = {k:v for k,v in photo.getSizes().items() if v["media"] == photo.media}
max_size = None
max_area = None
for sl, s in iteritems(sizes):
if(s["height"] == None or s["width"] == None or sl == "Video Player"):
continue
area = int(s["height"]) * int(s["width"])
if max_area is None or area > max_area:
max_size = sl
max_area = area
if (max_size == None):
if(700 in sizes.keys()):
return 700
else:
return list(sizes.keys())[-1]
return max_size
def download_photo(photo,path,thisCount):
try:
if(photo.media == "video"):
size_label = getLargestSizeVideo(photo)
else:
size_label = photo._getLargestSizeLabel() #largest size
photo_file = photo.getPhotoFile(size_label)
file_ext = '.' + photo_file.split('.')[-1]
if(photo.media == "video"):
filename = os.path.join(path,str(thisCount) + "_" + photo.title+"_")
file_ext = 'flv'
else:
filename = os.path.join(path,str(thisCount) + "_" + photo.title.replace(" ","_")+"-"+size_label.replace(" ","_")+file_ext)
if(os.path.isfile(filename)):
print(f"\t [!] File {filename} already exists, Skipping Download")
return
#print(f"\t[-] Downloading image {filename}")
r = urllib.request.urlopen(photo_file)
with open(filename, 'wb') as f:
f.write(r.read())
f.close()
metaInfo = filename + ".txt"
#print(f"\t[-] Saving metadata (descrip,exif,details) to {metaInfo}")
with open(metaInfo, "w") as text_file:
text_file.write(f"\nINFO {photo.title}\n")
text_file.write("*"*50)
exif = "".join([f"{e['tag']}: {e['raw']}\n" for e in photo.getExif()])
text_file.write(f"\nEXIF\n")
text_file.write("*"*50 + "\n")
text_file.write(str(exif))
details = photo.getInfo()
text_file.write(f"\nDETAILS\n")
text_file.write("*"*50 + "\n")
text_file.write(str(details))
print(f"[+] Downloaded Photo {photo.title} count: {thisCount}\n\t[-] Image: {filename}\n\t[-] Metadata: {metaInfo}")
return True
except Exception as e:
print(f"Error Trying to get {photo}: {e}")
#sys.exit(1)
def fetch_all_public_photos(user,page=1,totalcount=0,path="."):
photos = user.getPublicPhotos(per_page=50,page=page)
if(page == 1):
print(f"Found {photos.info.total} photos over {photos.info.pages} pages of {photos.info.perpage} per page.. starting threads for each download")
print("-"*50)
else:
print("-"*50)
print(f"Starting Page {page} of {photos.info.pages} ({totalcount} photos downloaded of {photos.info.total} ({round(totalcount / photos.info.total,2) * 100}%))")
print("-"*50)
threads=[]
for photo in photos:
totalcount += 1
t = threading.Thread(target=download_photo, args=(photo,path,totalcount))
t.start()
threads.append(t)
for t in threads:
t.join()
if (photos.info.pages > page):
fetch_all_public_photos(user,page+1,totalcount,path)
return photos
if __name__ == '__main__':
if(len(sys.argv) < 2):
print("Usage: flickrdownload.py <username> <path>")
sys.exit(1)
username = sys.argv[1]
path = "."
if(len(sys.argv) > 2):
path = sys.argv[2]
print(f"Fetching Username {username} and saving to {path}...")
user = fetch_flickr_user(username)
if(user):
print(f"Fetching All Public Photos for {username}")
fetch_all_public_photos(user,path=path, page=1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment