Skip to content

Instantly share code, notes, and snippets.

@elderlabs
Last active January 25, 2022 09:30
Show Gist options
  • Save elderlabs/c72df033169322fa069eb9564d7d13bb to your computer and use it in GitHub Desktop.
Save elderlabs/c72df033169322fa069eb9564d7d13bb to your computer and use it in GitHub Desktop.
A Soundcloud playlist scraper for use with Sinusbot. Makes adding bulk lists easier.
#!/usr/bin/python3 -u
"""
A Soundcloud playlist scraper for use with Sinusbot. Makes adding bulk lists easier.
Why pull from Youtube when Soundcloud has album art. It's much nicer. Perhaps better quality; probably not.
Written by: Dooley_labs <dooleylabs.com> | https://gist.github.com/elderlabs/c72df033169322fa069eb9564d7d13bb
"""
import json
import requests
import signal
from http.client import responses
from time import sleep
SOUNDCLOUD_USER_ID = '### your Soundcloud user_id goes here; find it in the network tab of inspect element ###'
SOUNDCLOUD_CLIENT_ID = '### your Soundcloud client_id token goes here; find it in the network tab of inspect element ###'
SINUSBOT_URL = 'http://127.0.0.1:8087'
SINUSBOT_TOKEN = '### your Sinusbot user authorization bearer token goes here ###'
LOGO = '''
_____ _____ _____ _
/ ____| / ____| | __ \ | |
| (___ | | | | | | | |
\___ \ | | | | | | | |
____) | | |____ | |__| | | |____
|_____/ \_____| |_____/ |______|
Soundcloud Download - for Sinusbot
------------------------------------
'''
def scdl():
print(LOGO)
sc_headers = requests.utils.default_headers()
sc_headers.update({'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36'})
r = requests.get(
f'https://api-v2.soundcloud.com/users/{SOUNDCLOUD_USER_ID}/track_likes?client_id={SOUNDCLOUD_CLIENT_ID}&limit=200&offset=0&linked_partitioning=1', headers=sc_headers)
if r.status_code != 200:
return print(f' ERROR | Soundcloud API: {r.status_code} - {responses[r.status_code]} - Failed to pull liked tracks')
else:
print(' INFO | Soundcloud API: Pulled liked tracks')
data = r.json()['collection']
url_list = []
for x in data:
url_list.append(x['track']['permalink_url'])
i = 0
sinusbot_session = requests.Session()
sinusbot_session.headers = requests.utils.default_headers()
sinusbot_session.headers.update({'Authorization': f'Bearer {SINUSBOT_TOKEN}', 'Content-Type': 'application/json'})
for x in url_list:
r = sinusbot_session.post(f'{SINUSBOT_URL}/api/v1/bot/jobs', json={'url': x})
i += 1
if i == 5:
for y in range(10, 0, -1):
print(f' INFO | SCDL Worker: Will resume job creation in {y}s', end=" \r", flush=True)
sleep(1)
i = 0
if r.status_code != 200:
print(f' ERROR | Sinusbot API: {r.status_code} - {responses[r.status_code]} - {x}')
if r.status_code == 200:
print(f' INFO | Sinusbot API: 200 - {x}')
def shutdown(signum, frame):
print('\nExited')
exit(0)
if __name__ == '__main__':
signal.signal(signal.SIGINT, shutdown)
signal.signal(signal.SIGUSR1, shutdown)
scdl()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment