Skip to content

Instantly share code, notes, and snippets.

@Kyuuhachi
Last active April 11, 2023 16:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save Kyuuhachi/5af2b9c2036ae8e9b474e43b2854eef3 to your computer and use it in GitHub Desktop.
Save Kyuuhachi/5af2b9c2036ae8e9b474e43b2854eef3 to your computer and use it in GitHub Desktop.
[DEPRECATED] Convert a bc7-encoded .dds file to a NISA Trails from Zero-style .itp file (and also itp to dds)

This script is deprecated, please use Cradle instead.

#!/usr/bin/env python
from pathlib import Path
import struct
import typing as T
import io
import argparse
def unpack(f: T.IO[bytes], fmt: str):
return struct.unpack("<" + fmt, f.read(struct.calcsize("<" + fmt)))
def pack(f: T.IO[bytes], fmt: str, *args: T.Any):
f.write(struct.pack("<" + fmt, *args))
def check(f: T.IO[bytes], fmt: str, *val: T.Any):
v = unpack(f, fmt)
assert v == val, v
def compress(data: bytes):
# This is a total cop-out. The decompressor is as seen below:
return struct.pack("I", 0) + data
def decompress(data: bytes):
f = io.BytesIO(data)
mode, = unpack(f, "I")
if mode == 0:
return f.read()
out = bytearray()
while f.tell() < len(data):
x, = unpack(f, "H")
op = ~(-1<<mode) & x
num = x >> mode
if op == 0:
out.extend(f.read(num))
else:
for _ in range(op):
out.append(out[~num])
out.append(unpack(f, "B")[0])
return out
def read_dds(path: str|Path):
pixdata = []
with open(path, "rb") as f:
check(f, "4sI", b"DDS ", 124)
flags, height, width, pitch, depth, mipmaps = unpack(f, "IIIIII")
check(f, "11I", *(0,)*11)
pfsize, pfflags = unpack(f, "II")
pfmagic, bitcount = unpack(f, "4sI")
assert pfmagic == b"DX10", pfmagic
check(f, "IIII", 0, 0, 0, 0)
check(f, "IIIII", 0x1000, 0, 0, 0, 0)
dxgi, *_ = unpack(f, "IIIII")
assert dxgi == 98, f"format {dxgi} not supported, only 98=DXGI_FORMAT_BC7_UNORM is"
for i in range(mipmaps):
pixdata.append(f.read((width>>i)*(height>>i)))
return width, height, pixdata
def write_dds(path: str|Path, data: tuple[int, int, list[bytes]]):
width, height, pixdata = data
with open(path, "wb") as f:
pack(f, "4s", b"DDS ")
flags = 0x1007 # Mandatory
flags |= 0x80000 # DDSD_LINEARSIZE
flags |= 0x20000 # DDSD_MIPMAPCOUNT
pack(f, "IIIIIII", 124, flags, height, width, width*height, 1, len(pixdata)) # DDS_HEADER
pack(f, "11I", *(0,)*11) # reserved
pack(f, "II4sI", 32, 4, b"DX10", 0) # DDS_PIXELFORMAT
pack(f, "IIII", 0, 0, 0, 0) # rest of DDS_PIXELFORMAT
pack(f, "IIIII", 0x1000, 0, 0, 0, 0) # rest of DDS_HEADER
pack(f, "IIIII", 98, 3, 0, 1, 0) # DDS_HEADER_DXT10
for mip in pixdata:
f.write(mip)
def read_itp(path: str|Path):
pixdata = []
with open(path, "rb") as f:
magic = f.read(4)
if magic != b"ITP\xFF":
print("Not a ITP-FF file: magic was {magic:r}.")
if 999 <= int.from_bytes(magic, "little") <= 1006:
print("This program only handles recent ITP versions, maybe try another converter?")
exit()
width, height = 0, 0
has_mip = False
n_mips = 0
major, minor = 0, 0
while True:
fourcc, size = unpack(f, "4sI")
g = io.BytesIO(f.read(size))
match fourcc:
case b"IHDR":
check(g, "I", 32)
width, height, file_size = unpack(g, "III")
# file_size is the size of the whole file, but I'll ignore it
major, minor = unpack(g, "HH")
check(g, "HHHHHH", 0, 6, 3, 0, 0, 0) # swizzle, unk, unk, 0, 0, 0
assert major == 3, major
case b"IMIP":
check(g, "I", 12)
has_mip, n_mips, zero = unpack(g, "HHI")
has_mip = bool(has_mip)
n_mips += 1
assert has_mip == (n_mips > 1), (has_mip, n_mips > 1)
assert zero == 0, zero
case b"IHAS":
check(g, "I", 16)
zero, _hash = unpack(g, "IQ")
assert zero == 0, zero
case b"IALP":
check(g, "I", 8)
_has_alpha, = unpack(g, "I")
case b"IDAT":
n = len(pixdata)
check(g, "IHH", 8, 0, n)
out = bytearray()
if minor == 5:
while g.tell() < len(g.getbuffer()):
csize, usize = unpack(g, "II")
chunk = decompress(g.read(csize))
assert len(chunk) == usize, (len(chunk), usize)
out.extend(chunk)
elif minor == 10:
check(g, "I", 0x80000001)
nchunks, total_csize, max_csize, total_usize = unpack(g, "IIII")
assert g.tell() + total_csize == len(g.getbuffer())
assert total_usize == (width>>n)*(height>>n)
csizes = []
for _ in range(nchunks):
csize, usize = unpack(g, "II")
chunk = decompress(g.read(csize))
assert len(chunk) == usize, (len(chunk), usize)
out.extend(chunk)
csizes.append(8+csize)
assert len(out) == total_usize
assert max(csizes) == max_csize
else:
raise ValueError(minor)
pixdata.append(bytes(out))
case b"IEND":
break
case _:
raise ValueError(fourcc)
return width, height, pixdata
def write_itp(path: str|Path, data: tuple[int, int, list[bytes]]):
width, height, pixdata = data
with open(path, "wb") as f:
pack(f, "4s", b"ITP\xFF")
pack(f, "4sII", b"IHDR", 32, 32)
pack(f, "III", width, height, 0)
lenpos = f.tell() - 4
pack(f, "HHHHHHHH", 3, 10, 0, 6, 3, 0, 0, 0)
pack(f, "4sII", b"IMIP", 12, 12)
pack(f, "HHI", len(pixdata)>1, len(pixdata)-1, 0)
# IHAS - hash. ignored intentionally
# IALP - has-alpha flag. Not sure if it has any effect, and I don't know how to compute it anyway.
for n, pix in enumerate(pixdata):
encoded = io.BytesIO()
chunksize = 0x40000
nchunk = 0
maxsize = 0
for i in range(0, len(pix), chunksize):
nchunk += 1
uchunk = pix[i:i+chunksize]
chunk = compress(uchunk)
pack(encoded, "II", len(chunk), len(uchunk))
encoded.write(chunk)
maxsize = max(maxsize, 8+len(chunk))
encoded = encoded.getvalue()
pack(f, "4sI", b"IDAT", 28+len(encoded))
pack(f, "IHH", 8, 0, n)
pack(f, "I", 0x80000001)
pack(f, "IIII", nchunk, len(encoded), maxsize, len(pix))
f.write(encoded)
pack(f, "4sI", b"IEND", 0)
length = f.tell()
f.seek(lenpos)
pack(f, "I", length)
argp = argparse.ArgumentParser()
argp.add_argument("files", metavar="file", nargs="+", type=Path)
def __main__(files: list[Path]):
for f in files:
if f.suffix.lower() == ".itp":
f2 = f.with_suffix(".dds")
print(f"Converting {f} ⇒ {f2}")
write_dds(f2, read_itp(f))
elif f.suffix.lower() == ".dds":
f2 = f.with_suffix(".itp")
print(f"Converting {f} ⇒ {f2}")
write_itp(f2, read_dds(f))
else:
print("Cannot convert {f}: only .dds and .itp allowed")
if __name__ == "__main__":
__main__(**argp.parse_args().__dict__)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment