Skip to content

Instantly share code, notes, and snippets.

@fluffy-critter
Last active February 8, 2021 19:46
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save fluffy-critter/daaf13e52c4ecfa74084227a29ea0e57 to your computer and use it in GitHub Desktop.
Save fluffy-critter/daaf13e52c4ecfa74084227a29ea0e57 to your computer and use it in GitHub Desktop.
Python script to incrementally update a Stepmania SIMfile collection from z-i-v

Zenius I Vanisher Mirroring

The best source for Stepmania charts is Zenius-I-Vanisher. Unfortunately, there's no easy way to incrementally update collections.

Usage

To run this script you'll need Python 3.6 or later, and you'll need to install Beautiful Soup with e.g. pip install beautifulsoup4. Put it into your Stepmania folder and run it with e.g.

python3 zenius.py

By default it will download the latest 20 official SIMfiles but you can give it one or more alternate URLs to pull from, for example:

python3 zenius.py https://zenius-i-vanisher.com/v5.2/simfiles.php?category=top_official https://zenius-i-vanisher.com/v5.2/viewsimfilecategory.php?categoryid=41

Currently it doesn't detect if a SIMfile has updated; it will only download one which appears for the first time. If a SIMfile has changed, manually delete its zip file from the zips directory.

Notes

ZIV is an amazing resource. Don't abuse this tool, and consider donating to them to keep it running!

import urllib.parse
from urllib.request import urlretrieve
import requests
from bs4 import BeautifulSoup
import urllib
import os.path
import zipfile
import os
import argparse
def parse_args(*args):
parser = argparse.ArgumentParser(description="Mirror simfiles from ZIV")
parser.add_argument('categories', type=str, nargs='*', help='ZIV category pages to mirror',
default=['https://zenius-i-vanisher.com/v5.2/simfiles.php?category=latest20official'])
feature = parser.add_mutually_exclusive_group(required=False)
feature.add_argument('--dry-run', '-n',
help="Only perform a dry run; don't send any pings",
dest='dry_run', action='store_true')
feature.add_argument('--no-dry-run',
help="Send pings normally",
dest='dry_run', action='store_false')
feature.set_defaults(dry_run=False)
return parser.parse_args(*args)
def mirror(cat_url, args):
request = requests.get(cat_url)
page = BeautifulSoup(request.text, features="html.parser")
if 'viewsimfilecategory.php' in cat_url:
simgroup = page.find('div', {'class': 'headertop'}).h1
else:
simgroup = None
for row in page.find_all('tr'):
simfile = row.find("a", href=lambda href: href and "viewsimfile.php" in href)
simgroup = simgroup or row.find("a", href=lambda href: href and "viewsimfilecategory.php" in href)
if not (simfile and simgroup):
continue
songname = ' '.join(simfile.get_text().replace('/', '-').split())
groupname = ' '.join(simgroup.get_text().replace('/', '-').split())
print(f"collection: '{groupname}' simfile: '{songname}'")
simlink = simfile['href']
try:
sim_id = urllib.parse.parse_qs(urllib.parse.urlparse(simfile['href']).query)['simfileid'][0]
except KeyError:
print(f"WARNING: no simfileid found on URL {simlink}")
continue
url = f'https://zenius-i-vanisher.com/v5.2/download.php?type=ddrsimfile&simfileid={sim_id}'
if args.dry_run:
print(f"Dry run requested, not downloading {url}")
continue
filename = f'zips/{sim_id}.zip'
if not os.path.isfile(filename):
print(f'Downloading {url} -> {filename}')
try:
os.makedirs('zips', exist_ok=True)
urlretrieve(url, filename)
except KeyboardInterrupt as e:
print(f'Download aborting...')
if os.path.isfile(filename):
print(f'Removing partial file {filename}')
os.unlink(filename)
raise e
try:
with zipfile.ZipFile(filename, 'r') as zip:
songdir = f'songs/{groupname}'
print(f'Extracting into {songdir}')
os.makedirs(songdir, exist_ok=True)
zip.extractall(songdir)
except zipfile.BadZipFile:
print(f'Not a zip file: {filename}')
if __name__ == "__main__":
args = parse_args()
for url in args.categories:
mirror(url, args)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment