Skip to content

Instantly share code, notes, and snippets.

@ZoomTen
Last active April 29, 2023 14:08
Show Gist options
  • Save ZoomTen/29c9e7a2d676d711fb9dad28a8f880a7 to your computer and use it in GitHub Desktop.
Save ZoomTen/29c9e7a2d676d711fb9dad28a8f880a7 to your computer and use it in GitHub Desktop.
Convert GBS to GBSX using a JSON definition.
import io
import argparse
import jsonschema
import logging
import json
import os
import math
from binascii import crc32
log = logging.getLogger(__name__)
# JSONSchema for the json file
dist_schema = {
"type": "object",
"required": ["file", "meta", "tracks"],
"properties": {
"file": {
"type": "string",
"description": "[REQUIRED] The GBS rip file to be given tags.",
},
"meta": {
"type": "object",
"description": "[REQUIRED] Information on the game itself.",
"$ref": "#/definitions/meta_properties"
},
"tracks": {
"type": "array",
"description": "[REQUIRED] A list of track info, ordered by playlist number.",
"uniqueItems": True,
"minItems": 1,
"items": {
"$ref": "#/definitions/track"
}
}
},
"definitions": {
"track": {
"type": "object",
"additionalProperties": False,
"required": [ "name", "length" ],
"properties": {
"name": {
"type": "string",
"description": "[REQUIRED] Track name"
},
"length": {
"type": "string",
"description": "[REQUIRED] Length of track in (hh:)mm:ss format, or in seconds.",
"pattern": "^([0-9]+:)+[0-9]+$|^[0-9]+$"
}
}
},
"meta_properties": {
"additionalProperties": False,
"required": [ "title", "date", "copyright", "author" ],
"properties": {
"title": {
"type": "string",
"description": "[REQUIRED] Title of the game."
},
"alternate_titles": {
"type": "array",
"description": "Alternate titles of the game, if any. This can be its Japanese title, for example.",
"$ref": "#/definitions/common_name_list"
},
"qualifiers": {
"type": "array",
"description": "Release qualifiers, if any. e.g. 'Unreleased', 'Prototype', 'Beta', etc.",
"$ref": "#/definitions/common_name_list"
},
"date": {
"type": "string",
"description": "[REQUIRED] Release date of the game. Uses the YYYY-MM-DD format. Can be shortened "
"to just the month or year.",
"pattern": "^[0-9]+(-[0-9]{2,})?(-[0-9]{2,})?$|^\\?+$"
},
"copyright": {
"type": "array",
"description": "[REQUIRED] This is usually the copyright holder(s) (company or publisher) of the game.",
"$ref": "#/definitions/common_name_list"
},
"author": {
"type": "array",
"description": "[REQUIRED] Composer(s) of the soundtrack.",
"$ref": "#/definitions/common_name_list"
},
"ripper": {
"type": "array",
"description": "[RECOMMENDED] Ripper(s) of the GBS.",
"$ref": "#/definitions/common_name_list"
},
"tagger": {
"type": "array",
"description": "[RECOMMENDED] Tagger(s) of the GBS.",
"$ref": "#/definitions/common_name_list"
},
"comments": {
"type": "array",
"description": "Any remarks to be made ripping this GBS. Each element is its own line.",
"minItems": 1,
"items": {
"type": "string"
}
}
}
},
"common_name_list": {
"uniqueItems": True,
"minItems": 1,
"items": {
"type": "string"
}
}
}
}
def zero_write(file_target, num_bytes):
return file_target.write(b"\x00" * num_bytes)
def check_json(json_fn):
"""
Check the validity of a JSON file according to this schema.
:param json_fn: JSON file name
"""
log.info("Validating json file %s" % json_fn)
with open(json_fn) as json_file:
return jsonschema.validate(
instance=json.load(json_file),
schema=dist_schema
)
def determine_title_string(deserialized):
meta = deserialized["meta"]
title = meta["title"]
suffix = ""
# add alternate titles like [Akai] [Doukutsu Monogatari] etc...
alt_titles = meta.get("alternate_titles")
if alt_titles:
title += " [%s]" % ('] ['.join(alt_titles))
# add qualifiers like (Prototype)(Rev.B) etc..
qualifiers = meta.get("qualifiers")
if qualifiers:
suffix += "(%s)" % (') ('.join(qualifiers))
return "%s %s" % (
title,
suffix
)
def determine_copyright_string(deserialized):
meta = deserialized["meta"]
return "%s %s" % (
meta["date"], ", ".join(meta["copyright"])
)
def determine_author_string(deserialized):
return ", ".join(deserialized["meta"]["author"])
def determine_comment_string(deserialized):
meta = deserialized["meta"]
ripper = meta.get("ripper", [])
tagger = meta.get("tagger", [])
comments = meta.get("comments", [])
cm = ""
if ripper:
cm += "Ripper%s: %s\r\n" % ("s" if len(ripper) > 1 else "", ", ".join(ripper))
if tagger:
cm += "Tagger%s: %s\r\n" % ("s" if len(tagger) > 1 else "", ", ".join(tagger))
if comments:
cm += "\r\n"
cm += "\r\n".join(comments)
return cm
def parse_stuff(deserialized, gbs_output_name):
log.info("Preparing to write GBSX")
file_n = deserialized["file"]
tracks = deserialized["tracks"]
if not gbs_output_name:
gbs_output_name = os.path.splitext(file_n)[0] + ".gbsx"
log.info("Output file is %s" % gbs_output_name)
int_to_word = lambda x: "\xff\xff" if x < 0 else x.to_bytes(2, "little")
int_to_long = lambda x: "\xff\xff\xff\xff" if x < 0 else x.to_bytes(4, "little")
as_bytes = lambda x: x.encode("ISO-8859-1") + b"\x00"
with open(file_n, "rb") as og_file:
with io.BytesIO() as gbs_buffer:
log.info("Input GBS is %s" % file_n)
gbs_content = og_file.read()
gbs_buffer.write(gbs_content)
log.debug("Writing size of GBS minus header")
code_data_size_multiplier = math.ceil((len(gbs_content) - 0x70) / 16)
code_data_size_multiplier += 0x100 # i don't know why this is necessary
gbs_buffer.seek(0x6e)
gbs_buffer.write(int_to_word(code_data_size_multiplier))
gbs_buffer.seek(0, io.SEEK_END)
cur_pos = gbs_buffer.tell()
code_data_size_multiplier -= 0x100 # ???
target_pos = (code_data_size_multiplier * 16) + 0x70
if cur_pos < target_pos:
log.debug("Padding out GBS file")
zero_write(gbs_buffer, target_pos - cur_pos)
gbs_buffer.seek(0)
gbs_content = gbs_buffer.read()
og_file_size = len(gbs_content)
og_crc = crc32(gbs_content)
log.debug("GBS CRC is %08x" % og_crc)
def track_length_to_seconds(trlen: str):
scnds = 0
factor = 1
for i in reversed( [int(x) for x in trlen.split(":")] ):
scnds += (i * factor)
factor *= 60
log.debug("Length \"%s\" -> %d seconds" % (trlen, scnds))
return scnds
string_offsets = {
"title": -1,
"author": -1,
"copyright": -1,
"comment": -1,
"tracks": []
}
with io.BytesIO() as string_area:
log.debug("Preparing string area")
title_string = determine_title_string(deserialized)
author_string = determine_author_string(deserialized)
copy_string = determine_copyright_string(deserialized)
comment_string = determine_comment_string(deserialized)
log.debug("Writing title string: \"%s\"" % title_string)
string_offsets["title"] = string_area.tell()
string_area.write(as_bytes(title_string))
log.debug("Writing author string: \"%s\"" % author_string)
string_offsets["author"] = string_area.tell()
string_area.write(as_bytes(author_string))
log.debug("Writing copyright string: \"%s\"" % copy_string)
string_offsets["copyright"] = string_area.tell()
string_area.write(as_bytes(copy_string))
log.debug("Writing comment string: \"%s\"" % comment_string)
string_offsets["comment"] = string_area.tell()
string_area.write(as_bytes(comment_string))
for track in tracks:
track_name_string = track["name"]
log.debug("Writing track name: \"%s\"" % track_name_string)
string_offsets["tracks"].append(string_area.tell())
string_area.write(as_bytes(track_name_string))
string_area.seek(0)
log.info("Writing GBSX file")
with io.BytesIO() as gbsx:
log.debug("Writing GBSX magic")
gbsx.write(b"GBSX")
# size of extended header
xthdr_size_offset = gbsx.tell()
zero_write(gbsx, 4)
xthdr_size = gbsx.tell()
# ext header CRC
xthdr_crc_offset = gbsx.tell()
zero_write(gbsx, 4)
# gbs file size
og_file_size_offset = gbsx.tell()
zero_write(gbsx, 4)
# gbs file CRC
og_crc_offset = gbsx.tell()
zero_write(gbsx, 4)
# StringArea + offset, title string
log.debug("Writing title offset")
gbsx.write(int_to_word(string_offsets["title"]))
# StringArea + offset, author string
log.debug("Writing author offset")
gbsx.write(int_to_word(string_offsets["author"]))
# StringArea + offset, copyright string
log.debug("Writing copyright offset")
gbsx.write(int_to_word(string_offsets["copyright"]))
# StringArea + offset, comment string
log.debug("Writing comment offset")
gbsx.write(int_to_word(string_offsets["comment"]))
# num entries in subsong info table
log.debug("Writing number of tracks")
gbsx.write(len(tracks).to_bytes(1, "little"))
# reserved
zero_write(gbsx, 3)
# subsong info
for track_num in range(len(tracks)):
log.debug("Writing track #%d length" % track_num)
gbsx.write(
int_to_long(track_length_to_seconds(tracks[track_num]["length"]) * 1024)
)
log.debug("Writing track #%d name" % track_num)
gbsx.write(
int_to_word(string_offsets["tracks"][track_num])
)
zero_write(gbsx, 2) # reserved
# string area
log.debug("Writing string area")
gbsx.write(string_area.read())
xthdr_size = gbsx.tell() - xthdr_size
# fill in xthdr size
log.debug("Writing extended header size")
gbsx.seek(xthdr_size_offset)
gbsx.write(int_to_long(xthdr_size))
# fill in og stuff
log.debug("Writing GBS CRC info")
gbsx.seek(og_crc_offset)
gbsx.write(int_to_long(og_crc))
log.debug("Writing GBS file size info")
gbsx.seek(og_file_size_offset)
gbsx.write(int_to_long(og_file_size))
# fill in the crc
log.debug("Writing extended header CRC")
gbsx.seek(0)
gbsx_data_recurs = gbsx.read()
gbsx.seek(xthdr_crc_offset)
gbsx.write(int_to_long(crc32(gbsx_data_recurs)))
gbsx.seek(0)
# output file
with open(gbs_output_name, "wb") as output:
log.info("Copying modified GBS file")
output.write(gbs_content)
log.info("Appending GBSX data")
output.write(gbsx.read())
log.info("Success!")
if __name__ == "__main__":
logging.basicConfig(
format="%(levelname)8s: %(message)s",
level=logging.INFO
)
ap = argparse.ArgumentParser(
description="Appends an extended header in the GBSX format for use with GBSPlay (commit eaaa371 at the latest)"
)
ap.add_argument(
'json',
help="Name of the JSON file to parse containing the GBSX tags."
)
ap.add_argument(
'-o', '--output',
default='',
help="Output file. By default, the file name will be the same only with a .gbsx file extension."
)
args = ap.parse_args()
try:
check_json(args.json)
except Exception as e:
log.critical("Failed to validate file %s!" % args.json)
log.critical(str(e))
exit(1)
with open(args.json, "r") as json_file:
js = json.load(json_file)
parse_stuff(js, args.output)
{
"file": "katakis3d.gbs",
"meta": {
"title": "Katakis 3D",
"qualifiers": ["Unreleased"],
"copyright": ["Similis"],
"author": ["Tufan Uysal"],
"date": "2001",
"ripper": ["zlago"],
"tagger": ["Zumi"],
"comments": [
"ripped by Sylvie (zlago).",
"using a disasm of a sound test ROM, since apparently no one ripped this soundtrack and i just NEEDED a .gbs to listen to"
]
},
"tracks": [
{
"name": "Crush Boom Bang",
"length": "0:06"
},{
"name": "Flight to Hell",
"length": "1:08"
},{
"name": "The Big Thing",
"length": "0:39"
},{
"name": "Electrical Motions",
"length": "1:21"
},{
"name": "Protected Beat",
"length": "0:19"
},{
"name": "Secret Cycles",
"length": "1:52"
},{
"name": "Radioactive Attack",
"length": "0:32"
},{
"name": "Enforcer",
"length": "1:20"
},{
"name": "Someone Wanna Party",
"length": "0:40"
},{
"name": "Rasit's Spiritual Dreams",
"length": "1:08"
},{
"name": "Oriental Danger",
"length": "0:40"
},{
"name": "Boomin' Back Katakis",
"length": "3:51"
},{
"name": "Master of Universe",
"length": "1:04"
},{
"name": "The Impregnable",
"length": "0:51"
},{
"name": "Katakis (Remix)",
"length": "3:07"
},{
"name": "Loud 'n Proud",
"length": "0:43"
},{
"name": "30 Seconds to Go...",
"length": "0:35"
},{
"name": "Beyond the Stars",
"length": "4:17"
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment