Skip to content

Instantly share code, notes, and snippets.

@learnitall
Created February 28, 2024 19:06
Show Gist options
  • Save learnitall/efd5f808d3fb7a57fbd996027ea71c09 to your computer and use it in GitHub Desktop.
Save learnitall/efd5f808d3fb7a57fbd996027ea71c09 to your computer and use it in GitHub Desktop.
Update mc plugins defined in default.nix
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging
import re
import shlex
import subprocess
import requests
from bs4 import BeautifulSoup
logger = logging.getLogger().setLevel(logging.DEBUG)
# Hangar does not like the requests user agent, and sends back
# 404s for everything.
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36",
}
def load_versions():
with open("./default.nix", "r") as default_dot_nix:
content = default_dot_nix.read()
default_dot_nix.seek(0, 0)
lines = default_dot_nix.readlines()
on_plugins_list = False
plugins = []
for i, line in enumerate(lines):
line = line.strip()
if line.strip() == "plugins = [":
on_plugins_list = True
continue
if on_plugins_list and "]" in line:
break
if on_plugins_list and line == "}":
plugins.append(
dict(
(
l.strip().replace('"', "").replace(";", "").split(" = ")
for l in lines[i - 3 : i]
) # don't care about the hash
)
)
logging.info("Found plugin " + str(plugins[-1]))
return plugins, content
def get_redirect_url(url):
r = subprocess.run(
shlex.split(
"curl -IsSfw '%{redirect_url}\n' " + url,
),
capture_output=True,
)
if r.returncode != 0:
raise OSError(f"Unable to get redirect url for {url}: {r.stderr}")
lines = r.stdout.decode("utf-8").split("\n")
redirect_url = lines[-2]
if redirect_url == "\n" or not redirect_url.startswith("https:"):
raise ValueError(f"Weird url: {r}")
return redirect_url
def get_sha256_base64_hash(url):
r = subprocess.run(
shlex.split(
"nix-prefetch-url " + url,
),
capture_output=True,
)
if r.returncode != 0:
raise OSError(f"Unable to prefetch url {url}: {r.stderr}")
b32hash = r.stdout.decode("utf-8").strip()
r = subprocess.run(
shlex.split(
"nix hash to-base64 --type sha256 " + b32hash,
),
capture_output=True,
)
if r.returncode != 0:
raise OSError(f"Unable to convert b32 hash {b32hash} to base64: {r.stderr}")
b64hash = r.stdout.decode("utf-8").strip()
return f"sha256-{b64hash}"
def handle_geyser_plugin(plugin):
_parts = plugin["url"].split("/")
version = _parts[-5]
build = _parts[-3]
latest_url = plugin["url"].replace(version, "latest").replace(build, "latest")
redirect_url = get_redirect_url(latest_url)
logging.debug(
f"Plugin {plugin['name']}: "
f"given={plugin['url']},latest={latest_url},"
f"redirected_to={redirect_url}",
)
if redirect_url != plugin["url"]:
_new_parts = redirect_url.split("/")
new_version = _new_parts[-5]
new_build = _new_parts[-3]
logging.info(
f"Geyser plugin {plugin['name']} has new version {version} and build {build}"
)
return redirect_url
return None
def handle_hangar_plugin(plugin):
_parts = plugin["url"].split("/")
org = _parts[4]
name = _parts[5]
homepage_url = f"https://hangar.papermc.io/{org}/{name}"
homepage = requests.get(homepage_url, headers=headers)
if homepage.status_code != 200:
raise ValueError(
f"Unable to get url {homepage_url}, got status code {homepage.status_code}"
)
soup = BeautifulSoup(homepage.text, features="html.parser")
download_url = soup.find(
"a", href=lambda href: "hangarcdn.papermc.io" in href
).attrs["href"]
logging.debug(
f"Plugin {plugin['name']}: " f"given={plugin['url']}, " f"latest={download_url}"
)
if download_url != plugin["url"]:
new_jar = download_url.split("/")[-1]
logging.info(f"Hangar plugin {plugin['name']} has new jar {new_jar}")
return download_url
return None
def handle_modrinth_plugin(plugin):
homepage_url = f"https://modrinth.com/plugin/{plugin['name']}/versions"
homepage = requests.get(homepage_url, headers=headers)
if homepage.status_code != 200:
raise ValueError(
f"Unable to get url {homepage_url}, got status code {homepage.status_code}"
)
download_url = None
soup = BeautifulSoup(homepage.text, features="html.parser")
# Find the first download button (class "version-button") that:
# 1. Is marked as a stable release (nested <a> tag contains the class "release")
# 2. Has a <span> tag containing the string "Paper" (targets a nested <div>
# tag with the class "version__supports", containing the supported distros).
latest_paper_button = soup.find(
lambda tag: len(tag.findChildren("a", class_="release", recursive=False)) > 0
and len(tag.findChildren("span", string=re.compile("Paper"))) > 0,
class_="version-button",
)
if latest_paper_button is not None:
download_url = latest_paper_button.find("a").attrs["href"]
logging.debug(
f"Plugin {plugin['name']}: " f"given={plugin['url']}, " f"latest={download_url}"
)
if download_url is not None and download_url != plugin["url"]:
new_jar = download_url.split("/")[-1]
logging.info(f"Modrinth plugin {plugin['name']} has new jar {new_jar}")
return download_url
return None
if __name__ == "__main__":
plugins, default_dot_nix = load_versions()
for plugin in plugins:
handler = None
if "geyser" in plugin["url"]:
handler = handle_geyser_plugin
elif "hangar" in plugin["url"]:
handler = handle_hangar_plugin
elif "modrinth" in plugin["url"]:
handler = handle_modrinth_plugin
else:
raise TypeError("unknown plugin type", plugin)
new_url = handler(plugin)
if new_url is not None:
default_dot_nix = default_dot_nix.replace(plugin["url"], new_url)
default_dot_nix = default_dot_nix.replace(
plugin["hash"], get_sha256_base64_hash(new_url),
)
with open("default.nix.updated", "w") as default_dot_nix_new:
default_dot_nix_new.write(default_dot_nix)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment