Skip to content

Instantly share code, notes, and snippets.

@antonydevanchi
Last active March 6, 2022 04:56
Show Gist options
  • Star 6 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
  • Save antonydevanchi/56be1c739e47c72e11ddabf853ddc6db to your computer and use it in GitHub Desktop.
Save antonydevanchi/56be1c739e47c72e11ddabf853ddc6db to your computer and use it in GitHub Desktop.
Download all images from undraw.co/illustrations
# npm -g i svgexport
# #ff6347 — change color
svgexport hiking_d24r.svg hiking_d24r.png "svg *[fill=\"#6c63ff\"]{fill: #ff6347}"
# oneliner
for ((i=1;i<30;i++)); do curl -s https://undraw.co/illustrations/load/$i | pup2 'a[data-src] json{}' | jq '.[] .children[] .children[] .children[] .src' -r -M | xargs -i aria2c {} -d ./imgs; done;
@WesleyBatista
Copy link

snippet not working anymore...

new version using jq, sed, xargs and curl:

for ((i=1;i<60;i++)); do curl -s https://undraw.co/api/illustrations\?page\=$i | jq '.illustrations[] | (.title, .image)' -r -M | sed -e 's/\(.*\)/\L\1/; s/\ /_/g'|xargs -n2 -L2 bash -c 'curl --silent --output ./imgs/$1 $2 > /dev/null' bash;done;

ℹ️ to change the default color of the images replace #6c63ff with whatever color you want.

@antonydevanchi
Copy link
Author

@WesleyBatista thank you for contributed fix!

@wohfab
Copy link

wohfab commented Mar 24, 2020

Running on MacOS with jq, sed, xargs, curl installed and getting the following error:

parse error: Invalid numeric literal at line 1, column 10

@heypoom
Copy link

heypoom commented Apr 2, 2020

I can't get the above script to work, so here's my Python script. The requests dependency is required.

#!/usr/bin/env python3

import os
import json
import requests
from multiprocessing.pool import ThreadPool

def build_index():
  page = 1
  urls = []

  while True:
    res = requests.get("https://undraw.co/api/illustrations?page={}".format(page))
    json_body = res.json()

    for item in json_body['illustrations']:
        title = item['title']
        url = item['image']

        print("Title: %s => URL: %s" % (title, url))
        urls.append([title, url])

    page = json_body['nextPage']
    print("Proceeding to Page %d" % page)

    if not json_body['hasMore']:
        print("Finished Gathering JSON.")
        return urls
        
def download_from_entry(entry):
    title, url = entry
    file_name = "%s.svg" % title.lower().replace(' ', '_')

    print("Downloading %s" % file_name)

    if not os.path.exists(file_name):
        res = requests.get(url, stream=True)
        
        if res.status_code is 200:
            path = "./images/%s" % file_name

            with open(path, 'wb') as f:
                for chunk in res:
                    f.write(chunk)
    
            return file_name

urls = build_index()
    
print("Downloading %d files." % len(urls))
    
results = ThreadPool(20).imap_unordered(download_from_entry, urls)

for path in results:
    print("Downloaded %s" % path)

print("Downloaded %d files." % len(urls))

@wohfab
Copy link

wohfab commented Apr 3, 2020

I can't get the above script to work, so here's my Python script. The requests dependency is required.

This is great. Thank you very much! Is there a way to change the SVG fill color on request, like in OG script?

@Polyterative
Copy link

Polyterative commented May 12, 2021

I was able to get the script to work
I modified the illustration JSON string

Thanks @heypoom

#!/usr/bin/env python3

import os
import json
import requests
from multiprocessing.pool import ThreadPool


def build_index():
    page = 1
    URLs = []

    while True:
        res = requests.get("https://undraw.co/api/illustrations?page={}".format(page))
        json_body = res.json()

        for item in json_body['illos']:
            title = item['title']
            url = item['image']

            print("Title: %s => URL: %s" % (title, url))
            URLs.append([title, url])

        page = json_body['nextPage']
        print("Proceeding to Page %d" % page)

        if not json_body['hasMore']:
            print("Finished Gathering JSON.")
            return URLs


def download_from_entry(entry):
    title, url = entry
    file_name = "%s.svg" % title.lower().replace(' ', '_')

    print("Downloading %s" % file_name)

    if not os.path.exists(file_name):
        res = requests.get(url, stream=True)

        if res.status_code == 200:
            path = "./images/%s" % file_name

            with open(path, 'wb') as f:
                for chunk in res:
                    f.write(chunk)

            return file_name


urls = build_index()

print("Downloading %d files." % len(urls))

results = ThreadPool(20).imap_unordered(download_from_entry, urls)

for path in results:
    print("Downloaded %s" % path)

print("Downloaded %d files." % len(urls))

@antonydevanchi
Copy link
Author

Community — so sweet ^_^
Thank you!

@rseyferth
Copy link

rseyferth commented Feb 1, 2022

This updated one-liner worked for me:

for ((i=1;i<66;i++)); do curl -s https://undraw.co/api/illustrations\?page\=$i | jq '.illos[] | (.title, .image)' -r -M | sed -e 's/\(.*\)/\1/; s/\ /_/g'|xargs -n2 -L2 bash -c 'curl --silent --output ./$1.svg $2 > /dev/null' bash;done;

@isaacgr
Copy link

isaacgr commented Mar 6, 2022

I can't get the above script to work, so here's my Python script. The requests dependency is required.

#!/usr/bin/env python3

import os
import json
import requests
from multiprocessing.pool import ThreadPool

def build_index():
  page = 1
  urls = []

  while True:
    res = requests.get("https://undraw.co/api/illustrations?page={}".format(page))
    json_body = res.json()

    for item in json_body['illustrations']:
        title = item['title']
        url = item['image']

        print("Title: %s => URL: %s" % (title, url))
        urls.append([title, url])

    page = json_body['nextPage']
    print("Proceeding to Page %d" % page)

    if not json_body['hasMore']:
        print("Finished Gathering JSON.")
        return urls
        
def download_from_entry(entry):
    title, url = entry
    file_name = "%s.svg" % title.lower().replace(' ', '_')

    print("Downloading %s" % file_name)

    if not os.path.exists(file_name):
        res = requests.get(url, stream=True)
        
        if res.status_code is 200:
            path = "./images/%s" % file_name

            with open(path, 'wb') as f:
                for chunk in res:
                    f.write(chunk)
    
            return file_name

urls = build_index()
    
print("Downloading %d files." % len(urls))
    
results = ThreadPool(20).imap_unordered(download_from_entry, urls)

for path in results:
    print("Downloaded %s" % path)

print("Downloaded %d files." % len(urls))

This worked. Great stuff. Only thing is it should be
for item in json_body['illos']:

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment