Skip to content

Instantly share code, notes, and snippets.

@wlinds
Created March 18, 2024 21:51
Show Gist options
  • Save wlinds/6b2e37e54529dd0cda06a875714de6af to your computer and use it in GitHub Desktop.
Save wlinds/6b2e37e54529dd0cda06a875714de6af to your computer and use it in GitHub Desktop.
Find available Soundcloud names
import os, requests, time, csv
from bs4 import BeautifulSoup
delay_seconds = 0.2
url_to_check = "https://soundcloud.com"
error_title_to_check = "Something went wrong on SoundCloud"
csv_filename = "results.csv"
start_sequence = "/wla"
def get_names(delay, base_url, error_title, csv_filename, start_sequence):
successful_requests = 0
if not os.path.isfile(csv_filename):
with open(csv_filename, 'w', newline='', encoding='utf-8') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow(['URL', 'Status Code', 'Title', 'Content'])
with open(csv_filename, 'a', newline='', encoding='utf-8') as csv_file:
csv_writer = csv.writer(csv_file)
while True:
url_to_check = base_url + start_sequence
response = requests.get(url_to_check)
csv_writer.writerow([url_to_check, response.status_code, get_page_title(response.content)])
csv_file.flush()
if not error_title and response.status_code == 404:
return start_sequence[1:], successful_requests
if check_title(response.content, error_title):
return start_sequence[1:], successful_requests
successful_requests += 1
time.sleep(delay)
start_sequence = increment_string(start_sequence)
print(successful_requests, start_sequence, response.status_code)
def check_title(html_content, error_title):
soup = BeautifulSoup(html_content, 'html.parser')
title_tag = soup.find('title')
if title_tag and error_title.lower() in title_tag.text.lower():
return True
return False
def increment_string(s):
last_char = s[-1]
if last_char == 'z':
return increment_string(s[:-1]) + 'a'
elif last_char == '9':
return increment_string(s[:-1]) + 'a'
else:
return s[:-1] + chr(ord(last_char) + 1)
def get_page_title(html_content):
soup = BeautifulSoup(html_content, 'html.parser')
title_tag = soup.find('title')
return title_tag.text if title_tag else ''
result, total_requests = get_names(delay_seconds, url_to_check, error_title_to_check, csv_filename, start_sequence)
print(f"Error String: {result}, Total Requests: {total_requests}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment