Skip to content

Instantly share code, notes, and snippets.

@bbbradsmith
Last active May 25, 2021 12:16
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bbbradsmith/49c8b79cba350ccf248f351de1b8aa1c to your computer and use it in GitHub Desktop.
Save bbbradsmith/49c8b79cba350ccf248f351de1b8aa1c to your computer and use it in GitHub Desktop.
Utilities for extracting data files from Elemental Gimmick Gear (Dreamcast) MDT, FNT, NJA, BIN, etc.
# eggstractor.py
# FNT file image extractor
# MDT packfile/map file extractor
# for E.G.G. Elemental Gimmick Gear (Dreamcast)
#
# Brad Smith, 2020
# http://rainwarrior.ca
#
# Dump result archive: https://mega.nz/#F!B74zmaTJ!LQcdMdeOji_cKyfxwoYDbg
#
# Before using this tool you must extract the Elemental Gimmick Gear
# files from your GD-ROM image.
# GDItools: https://sourceforge.net/projects/dcisotools/
# FNT and MDT file format notes at end of file
import os
import pathlib
import PIL.Image
import struct
import math
import wave
INDIR = "../dump/data/" # folder containing FNT/MDT/BIN files
# output folders
OUTFNT = "fnt"
OUTMAP = "map"
OUTMAPL = "mapl"
OUTMISC = "misc"
OUTDIRS = [ OUTFNT, OUTMAP, OUTMAPL, OUTMISC ]
ALL = ""
CLEAR = (255,0,255,255)
TRANSPARENT = (255,0,255,0)
WIDTH = 530
GROUP_BY_SIZE = True # true for a more compact but out-of-order presentation
DUMP_FNT = ALL
DUMP_MAP = ALL
LOG_TILES = False
# MDT files that aren't a map file
MDT_EFFMSPR = "EFFMSPR.MDT"
MDT_PCM = "PCM.MDT"
MDT_SEQ = "SEQ.MDT"
MDT_SNDSYS = "SNDSYS.MDT"
UNMAP_MDT = { MDT_EFFMSPR, MDT_PCM, MDT_SEQ, MDT_SNDSYS }
# miscellaneous FNT files embedded in .BIN
BIN_FNT = [
("1ST_READ.BIN", 0x35264 - 0xC),
("1ST_READ.BIN", 0x3B5CC - 0xC),
("ACT5.BIN", 0x00FDC - 0xC),
("ACT6.BIN", 0x01488 - 0xC),
("ACT7.BIN", 0x017B4 - 0xC),
#ENDING.BIN contains something that "looks" vaguely image-like, couldn't figure it out
("OVER.BIN", 0x009B4 - 0xC),
]
# miscellaneous raw textures embedded in .BIN
BIN_IMG = [
("1ST_READ.BIN", 0x970EC, 128)
]
#
# utility
#
def strhex(bs):
s = ""
for b in bs:
s += "%02X " % b
s = s.rstrip(" ")
return s
def twiddle(x,y):
bit = 0
t = 0
while x > 0 or y > 0:
t |= (y & 1) << (2 * bit + 0)
t |= (x & 1) << (2 * bit + 1)
bit += 1
y >>= 1
x >>= 1
return t
def image_grid(match_dim, images, max_width=None):
def match(im):
return (match_dim == None) or (im.size == match_dim)
filtered = []
max_w = 0
max_h = 0
for im in images:
if match(im):
filtered.append(im)
max_w = max(max_w, im.size[0])
max_h = max(max_h, im.size[1])
tw = math.ceil(math.sqrt(len(filtered)))
if max_width != None:
tw = math.floor(max_width / (max_w+1))
tw = min(len(filtered),tw)
tw = max(1,tw)
th = math.ceil(len(filtered)/tw)
img = PIL.Image.new("RGBA",(1+(max_w+1)*tw,1+(max_h+1)*th),CLEAR)
for ty in range(0,th):
for tx in range(0,tw):
ti = tx + (ty * tw)
if ti >= len(filtered):
continue
tile = filtered[ti]
img.paste(tile,(1+(max_w+1)*tx,1+(max_h+1)*ty))
return img
def image_grids(images, max_width=None, group_size=False):
if not group_size:
return image_grid(None, images, None)
else:
img = PIL.Image.new("RGBA",(0,0),CLEAR)
sizes = sorted({im.size for im in images},reverse=True)
for s in sizes:
imga = image_grid(s,images,WIDTH)
imgb = PIL.Image.new("RGBA",(max(img.size[0],imga.size[0]),img.size[1]+imga.size[1]),CLEAR)
imgb.paste(img,(0,0))
imgb.paste(imga,(0,img.size[1]))
img = imgb
return img
def rl(data,offset):
return struct.unpack("<L",data[offset:offset+4])[0]
DATAPALETTE_CLEAR = 0b01101101 # magenta index
def datapalette(): # 256 colour palette with high contrast at low bits
pal = []
for i in range(256):
# 3:3:2 bit reversed
r = ((i>>0)&1) << 7
g = ((i>>1)&1) << 7
b = ((i>>2)&1) << 7
r |= ((i>>3)&1) << 6
g |= ((i>>4)&1) << 6
b |= ((i>>5)&1) << 6
r |= ((i>>6)&1) << 5
g |= ((i>>7)&1) << 5
# copy high bits to low for more brightness
r |= r >> 3
r |= r >> 6
g |= g >> 3
g |= g >> 6
b |= b >> 2
b |= b >> 4
pal.append(r)
pal.append(g)
pal.append(b)
return pal
def grey(data,offset):
p = data[offset]
return(p,p,p,255)
def argb1555(data,offset):
pd = data[offset:offset+2]
if len(pd) == 2:
p = struct.unpack("<H",pd)[0]
pb = int(((p >> 0) & 31) * 255 / 31)
pg = int(((p >> 5) & 31) * 255 / 31)
pr = int(((p >> 10) & 31) * 255 / 31)
pa = ((p >> 15) & 1) * 255
return (pr,pg,pb,pa)
else:
return None
#
# FNT file dumping
#
def dump_icon(filename,fnt):
palette = []
for i in range(16): # ARGB 4444 palette
p = struct.unpack("<H",fnt[i*2:i*2+2])[0]
pa = int(((p >> 12) & 15) * 255 / 15)
pr = int(((p >> 8) & 15) * 255 / 15)
pg = int(((p >> 4) & 15) * 255 / 15)
pb = int(((p >> 0) & 15) * 255 / 15)
palette.append((pr,pg,pb,pa))
images = []
for i in range(3):
img = PIL.Image.new("RGBA",(32,32),CLEAR)
img_loc = (32+i*32*32//2)
for y in range(32):
for x in range(16):
p0 = fnt[img_loc+(y*32//2)+x] >> 4
p1 = fnt[img_loc+(y*32//2)+x] & 0xF
img.putpixel(((x*2)+0,y),palette[p0])
img.putpixel(((x*2)+1,y),palette[p1])
images.append(img)
img = image_grid(None,images,32*3+4)
img.save(filename+".PNG")
def dump_fnt(filename,fnt,mdt_map=False,warn_unused=True):
# read header
(loc_e, loc_f, loc_img, fmt, loc_d, loc_c, count) = \
struct.unpack("<LLLLLLL",fnt[0:28])
bsize = -1
if fmt == 5: # fmt 20 counts bytes, fmt 5 counts 32-bit words
loc_img *= 4
loc_c *= 4
loc_d *= 4
loc_e *= 4
loc_f *= 4
bsize = 44
elif fmt == 20:
bsize = 36
calc_count = (loc_c-0x1C)//bsize # entry count in section B?
print("%-32s: fmt %2d, img $%06X, count %3d" % \
(filename, fmt, loc_img, count))
#print("C $%X, D $%X, E $%X, F $%X, IMG $%X\n" % (loc_c, loc_d, loc_e, loc_f, loc_img))
if fmt == 0xFBF3FAD2:
# These are save game icons, I think this hacky fmt comparison only
# passes on all 3 because they share a palette. I'm sure the game
# does not use their header to identify their contents.
# ICON0.FNT
# ICON1.FNT
# ICON2.FNT
dump_icon(filename,fnt)
return
if (calc_count != count):
print("> calc_count mismatch: %d" % (calc_count))
if filename.endswith("NOWLOAD.FNT"): # this file contains 10 images, strangely
count = 10
images = []
last_data = loc_img
for i in range(count):
if fmt == 5:
desc_offset = 0x1C + (44 * i)
if filename.endswith("NOWLOAD.FNT"):
desc_offset = 0x1C
img_offset = rl(fnt,desc_offset+(0*4)) + loc_img
img_type = rl(fnt,desc_offset+(3*4))
img_w = rl(fnt,desc_offset+(4*4))
img_h = rl(fnt,desc_offset+(5*4))
img_size = rl(fnt,desc_offset+(6*4))
nzset = {0,3,4,5,6} # mark bytes that expect nonzero
elif fmt == 20:
desc_offset = 0x1C + (36 * i)
img_offset = rl(fnt,desc_offset+(0*4)) + loc_img
img_type = rl(fnt,desc_offset+(1*4))
img_w = rl(fnt,desc_offset+(4*4))
img_h = rl(fnt,desc_offset+(5*4))
img_size = -1 # size not stored
nzset = {0,1,4,5}
expected_size = -1
if img_type == 0x100:
expected_size = img_w * img_h * 2
elif img_type == 0x300:
# 256 4-colour "palette" blocks + half-scale image
expected_size = (256 * 4 * 2) + ((img_w * img_h) / 4)
else:
print("Unknown image type? %X" % (img_type))
if fmt == 20:
img_size = expected_size
if img_size != expected_size:
print("Image size unexpected? %dx%d (type %X) = %d != %d?" % (img_w,img_h,img_type,expected_size,img_size))
if LOG_TILES:
print("%3d: $%06X %4dx%4d %d bytes" % (i,img_offset,img_w,img_h,img_size))
if img_w != img_h:
print("Nonsquare image! %dx%d?" % (img_w, img_h))
assert(False)
for j in range(bsize//4): # check rest of B section entries for expected zeroes
if j in nzset:
continue
v = rl(fnt,desc_offset+(j*4))
if v != 0:
print("Tile entry word %d is not 0? %5d / $%8X" % (j,v,v))
# E142.FNT trips this, not sure what the meaning is though
if filename.endswith("NOWLOAD.FNT"): # this has 10 images, but I could not find how the file actually references
img_size += 2048 # there seems to be 2k of extra data (mostly 0s) between images
img_offset += (i * img_size)
img = PIL.Image.new("RGBA",(img_w,img_h),CLEAR)
#if img_type == 0x100:
if img_type != 0x300:
for y in range(img_h):
for x in range(img_w):
po = twiddle(x,y) * 2
pp = argb1555(fnt,img_offset+po)
if pp != None:
img.putpixel((x,y),pp)
elif img_type == 0x300:
# byte-indexed 2x2 blocks
dpix = img_offset + (256 * 4 * 2)
for y in range(img_w//2):
for x in range(img_h//2):
twxy = twiddle(x,y)
if (dpix+twxy) < len(fnt):
bi = fnt[dpix+twxy]
p0 = argb1555(fnt,img_offset+(bi*8)+0)
p1 = argb1555(fnt,img_offset+(bi*8)+2)
p2 = argb1555(fnt,img_offset+(bi*8)+4)
p3 = argb1555(fnt,img_offset+(bi*8)+6)
if p0 != None:
img.putpixel((x*2+0,y*2+0),p0)
if p1 != None:
img.putpixel((x*2+0,y*2+1),p1)
if p2 != None:
img.putpixel((x*2+1,y*2+0),p2)
if p3 != None:
img.putpixel((x*2+1,y*2+1),p3)
images.append(img)
if (img_offset - last_data) > 0:
print("%d bytes of unused data before image?" % (img_offset - last_data))
last_data = img_offset + img_size
if len(fnt) > last_data and warn_unused:
print("%d extra bytes at end of file?" % (len(fnt)-last_data))
img = image_grids(images, WIDTH, GROUP_BY_SIZE)
img.save(filename+".PNG")
# debug dump of all data
#print("C $%X, D $%X, E $%X, F $%X, IMG $%X\n" % (loc_c, loc_d, loc_e, loc_f, loc_img))
#for l in range(0x1C,loc_c,bsize): # section B
# print("B %3d: %s" % ((l-0x1C)//bsize,strhex(fnt[l:l+bsize])))
#csize = (loc_d - loc_c) // count
#for l in range(loc_c,loc_d,csize): # section C
# print("C %3d: %s" % ((l-loc_c)//csize,strhex(fnt[l:l+csize])))
#for l in range(loc_d,loc_e,20):
# print("D %3d: %s" % ((l-loc_d)//20,strhex(fnt[l:l+20])))
#for l in range(loc_e,loc_f,32):
# print("E RAW $%08X %s" % (l,strhex(fnt[l:l+32])))
#for l in range(loc_e,loc_f,2):
# o = struct.unpack("<H",fnt[l:l+2])[0]
# if o == 0:
# break
# o = (o * 2) + loc_e
# print("E %2d $%04X %s" % ((l-loc_e)//2,o,strhex(fnt[o:o+16])))
#for l in range(loc_f,loc_img,8):
# print("F %3d: %s" % ((l-loc_f)//8,strhex(fnt[l:l+8])))
if not mdt_map:
return None
loc_groups = loc_d + (count * 20)
group_count = rl(fnt,loc_groups)
bmin = (0,0)
bmax = (0,0)
ftiles = []
for i in range(group_count):
# D section stores X/Y render location for group
loc_gxy = loc_groups + 4 + (i*4)
(gx,gy) = struct.unpack("<HH",fnt[loc_gxy:loc_gxy+4])
# E section contains pointer to a group descriptor with an index to F
loc_eg = loc_e + (i*2)
off_eg = struct.unpack("<H",fnt[loc_eg:loc_eg+2])[0]
loc_fi = loc_e + (off_eg * 2) + 4
fi = struct.unpack("<H",fnt[loc_fi:loc_fi+2])[0]
# F section contains a list of tiles
for l in range(loc_f+(fi*8),loc_img,8):
(flags,ti,fx,fy) = struct.unpack("<HHhh",fnt[l:l+8])
(tix,tiy) = images[ti].size
tx0 = fx + gx - (tix//2) # centred on tile
ty0 = fy + gy - (tiy//2)
tx1 = tx0 + tix
ty1 = ty0 + tiy
ftiles.append((ti,tx0,ty0))
bmin = (min(bmin[0],tx0),min(bmin[1],ty0))
bmax = (max(bmax[0],tx1),max(bmax[1],ty1))
if (flags & 0x8000):
break
bmin = (0,0) # reset origin, tiles might have cut off empty areas to the top/left
img = PIL.Image.new("RGBA",(bmax[0]-bmin[0],bmax[1]-bmin[1]),TRANSPARENT)
for i in range(len(ftiles)):
(ti,x,y) = ftiles[i]
x -= bmin[0]
y -= bmin[1]
it = images[ti]
img.paste(it,(x,y),mask=it)
filemap = filename+".MAP.PNG"
img.save(filemap)
return filemap
def dump_fnt_load(filename):
fnt = open(os.path.join(INDIR,filename),"rb").read()
dump_fnt(os.path.join(OUTFNT,filename),fnt)
#
# MDT map data and environment graphics
#
def dump_mdt_map0(filename,mdt,offset,chunk_size):
# unpack header
fcc = mdt[offset:offset+4].decode("ASCII")
assert(fcc == "MAP0" or fcc == "MAP1")
map_type = mdt[offset+3] - ord('0')
maph = struct.unpack("<LHHHHLLLL",mdt[offset+4:offset+32])
print("%s %8X %4X %4X %4X %4X %8X %8X %8X %8X" % tuple([fcc]+list(maph)))
w = maph[1]//32
h = maph[2]//32
grid_offset = maph[5]
tile_offset = maph[6]
unknown_offset = maph[7]
tiles_size = maph[8]
# render grid
img = PIL.Image.new("RGBA",(w*32,h*32),CLEAR)
pixel_size = 2
if map_type == 1:
pixel_size = 1
for y in range(h):
for x in range(w):
ig = x + (y*w)
igr = offset + grid_offset + (ig * 2)
pg = struct.unpack("<H",mdt[igr:igr+2])[0] # tile index from grid
tile = offset + tile_offset + (pg * pixel_size * 32 * 32)
for ty in range(32):
for tx in range(32):
ipr = (twiddle(tx,ty) * pixel_size) + tile
if map_type == 0:
p = argb1555(mdt,ipr)
else:
p = grey(mdt,ipr)
img.putpixel(((x*32)+tx,(y*32)+ty),p)
fileout = "%s.%08X.%s.PNG" % (filename,offset,fcc)
#print(fileout)
img.save(fileout)
return fileout
def dump_mdt_fnt(filename,mdt,offset,chunk_size):
return dump_fnt("%s.%08X.FNT" % (filename,offset), mdt[offset:offset+chunk_size],True)
def dump_mdt_atr3(filename,mdt,offset,chunk_size):
assert(mdt[offset:offset+4] == "ATR3".encode("ASCII"))
(w,h,a0,a1,a2,a3) = struct.unpack("<HHHHHH",mdt[offset+4:offset+16])
print("ATR3 %08X: %3d x %3d (%02X %02X %02X %02X)" % (offset,w,h,a0,a1,a2,a3))
assert(a0 == 0x04)
assert(a1 == 0x08)
img = PIL.Image.new("P",(2*w+3,2*h+3),DATAPALETTE_CLEAR)
img.putpalette(datapalette())
for y in range(h):
for x in range(w):
i = ((x + (y*w)) * 4) + offset + 8
p = mdt[i:i+4]
img.putpixel((1+0+x,1+0+y),p[0])
img.putpixel((2+w+x,1+0+y),p[1])
img.putpixel((1+0+x,2+h+y),p[2])
img.putpixel((2+w+x,2+h+y),p[3])
fileout = "%s.%08X.ATR3.PNG" % (filename,offset)
#print(fileout)
img.save(fileout)
def dump_mdt_maps(filename):
print(filename)
mdt = open(os.path.join(INDIR,filename),"rb").read()
pos = 0
layers = []
while ((pos+12) <= len(mdt)):
chunk = pos//12
filepart = os.path.join(OUTMAP,"%s.%03d" % (filename,chunk))
(index, mode, offset, chunk_size) = struct.unpack("<HHLL",mdt[pos:pos+12])
if index == 0 and mode == 0 and offset == 0 and chunk_size == 0:
break
#print("%3d: %4X, %4X, %8X, %8X" % (pos//12, index, mode, offset, chunk_size))
mode &= 0x7FFF
if mode == 0: # ATR3 collision data
dump_mdt_atr3(filepart,mdt,offset,chunk_size)
# automatically render layers
img = None
for (i,l) in sorted(layers):
if l == None:
continue
imgadd = PIL.Image.open(l)
if img == None:
img = imgadd
else:
img.paste(imgadd,(0,0),mask=imgadd)
if img != None:
filelayers = os.path.join(OUTMAPL,"%s.%03d.LAYERS.PNG" % (filename,chunk))
img.save(filelayers)
print(filelayers)
layers = []
elif mode == 1: # MAP0 or MAP1
f = dump_mdt_map0(filepart+(".%02d"%index),mdt,offset,chunk_size)
layers.append((index,f))
elif mode == 2: # embedded FNT, transparent overlay
f = dump_mdt_fnt(filepart+(".%02d"%index),mdt,offset,chunk_size)
layers.append((index,f))
else:
print("Unknown data type? %4X - %4X %4X %8X %8X" % (mode,index,mode,offset,chunk_size))
assert(False) # unknown data type
pos += 12
#
# miscellany
#
def dump_mdt_effmspr(filename):
print(filename)
mdt = open(os.path.join(INDIR,filename),"rb").read()
pos = 0
while ((pos+4) <= len(mdt)):
chunk = pos//4
filepart = os.path.join(OUTMISC,"%s.%03d" % (filename,chunk))
(offset, next_offset) = struct.unpack("<LL",mdt[pos:pos+8])
chunk_size = next_offset - offset
if next_offset == 0:
break
#print("%3d: %8X, %8X (%8X)" % (chunk, offset, next_offset, chunk_size))
dump_fnt(filepart,mdt[offset:offset+chunk_size])
if (next_offset == 0):
break
pos += 4
ADPCM_DIFF = [ 1, 3, 5, 7, 9, 11, 13, 15 ]
ADPCM_STEP = [ 0x0E6, 0x0E6, 0x0E6, 0x0E6, 0x133, 0x199, 0x200, 0x266 ]
def dump_mdt_pcm(filename):
print(filename)
mdt = open(os.path.join(INDIR,filename),"rb").read()
pos = 0
while ((pos+12) <= len(mdt)):
chunk = pos//12
filepart = os.path.join(OUTMISC,"%s.%03d" % (filename,chunk))
(mode, offset, chunk_size) = struct.unpack("<LLL",mdt[pos:pos+12])
#print("%3d: %8X, %8X %8X" % (chunk, mode, offset, chunk_size))
if offset >= len(mdt):
break
filewave = filepart + ".WAV"
f = wave.open(filewave,"wb")
f.setnchannels(1)
f.setsampwidth(2)
f.setframerate(22050)
nibs = []
for i in range(offset,offset+chunk_size):
nibs.append(mdt[i] & 0x0F)
nibs.append(mdt[i] >> 4)
w = bytearray()
v = 0
vstep = 0x7F
for n in nibs:
s = (vstep * ADPCM_DIFF[n&7]) // 8
if n & 8: # python rounds signed integers oddly, so we must separate sign here
s = -s
v = v + s
v = max(-32768,min(32767,v))
vstep = (vstep * ADPCM_STEP[n&7]) // 256
vstep = max(0x7F,min(0x6000,vstep))
w += struct.pack("<h",v)
f.writeframes(w)
f.close()
#print(filewave)
pos += 12
def dump_fnt_bin(filename,offset):
print("%s $%08X" % (filename,offset))
b = open(os.path.join(INDIR,filename),"rb").read()
dump_fnt(os.path.join(OUTMISC,"%s.%08X.FNT" % (filename,offset)), b[offset:len(b)], warn_unused=False)
def dump_img_bin(filename,offset,dim):
print("%s $%08X" % (filename,offset))
b = open(os.path.join(INDIR,filename),"rb").read()
img = PIL.Image.new("RGBA",(dim+2,dim+2),CLEAR)
for y in range(dim):
for x in range(dim):
twxy = twiddle(x,y) * 2
p = argb1555(b,offset+twxy)
img.putpixel((x+1,y+1),p)
img.save(os.path.join(OUTMISC,"%s.%08X.PNG" % (filename,offset)))
#
# dump everything
#
for d in OUTDIRS:
pathlib.Path(d).mkdir(parents=True, exist_ok=True)
if DUMP_FNT == ALL:
for f in os.listdir(INDIR):
if (f.upper().endswith(".FNT")):
dump_fnt_load(f)
elif DUMP_FNT != None:
dump_fnt_load(DUMP_FNT)
if DUMP_MAP == ALL:
for f in os.listdir(INDIR):
if f.upper() in UNMAP_MDT:
continue
if (f.upper().endswith(".MDT")):
dump_mdt_maps(f)
elif DUMP_MAP != None:
dump_mdt_maps(DUMP_MAP)
if MDT_EFFMSPR != None:
dump_mdt_effmspr(MDT_EFFMSPR)
if MDT_PCM != None:
dump_mdt_pcm(MDT_PCM)
for (f,offset) in BIN_FNT:
dump_fnt_bin(f,offset)
for (f,offset,dim) in BIN_IMG:
dump_img_bin(f,offset,dim)
# FNT file format notes:
#
# There is a 28 byte header containing 7 32-bit values:
# - $00: section E location
# - $04: section F location
# - $08: image data location
# - $0C: format
# - $10: section D location
# - $14: section C location
# - $18: image count (IC)
#
# If format is 5, the location values should be multiplied by 4 to find the
# actual location within the file. If format is 20, they should be used as-is.
# No other format values were found (except the ICON#.FNT files, see below).
#
# After the header, section B begins at $1C, and contains a list of entries
# describing each image tile found in the file. There are IC entries.
#
# If the format is 5, each B entry is 44 bytes (11 32-bit words):
# - $00: byte offset to start of tile from the image data location
# - $0C: type, either $100 (raw) or $300 (indexed 2x2 blocks)
# - $10: image width
# - $14: image height (always equal to width, always power of 2)
# - $18: image data size
#
# If the format is 20, each B entry is 36 bytes (9 32-bit words):
# - $00: byte offset to tile data
# - $04: type
# - $10: width
# - $14: height
#
# These two entry types are very similar, except image size is
# not stored in format 20's entry, and the type is stored in a different location.
#
# The rest of the bytes in the B section entries are 0, except for one entry
# in E142.FNT which contains a value of unknown function.
#
# See below for image data format.
#
# Section C begins immediately after section B. Contains IC x 12 or 8 byte entries.
# - $00: 32-bit byte offset to B section entry from start of file
# - $04: always 00 00 80 40
# - $08: index from 0 to IC-1
#
# Section D begins immediately after section C.
# 1. Contains IC x 20 byte entries.
# - $10: index to one of the image tiles.
# For sprite FNT files:
# There might be more than IC entries here. This might be a list of animations?
# For FNT files embedded in a map MDT:
# 2. Immediately following 1, contains a list of group X/Y offsets for map overlays.
# Begins with a 32-bit group count.
# This is followed with that count list of 16-bit X/Y pairs.
# (Apply this to a group specified in E, followed by the tiles specified in F.)
#
# Section E contains a list of tile groups:
# 1. The first several 16-bit values are a offset to group data within section E.
# The offset is an index to 16-bit values, so must be multiplied by 2 for a byte offset.
# A value of $0000 terminates the list of offsets.
# 2. At each offset within the section is a 16-byte group descriptor:
# - $04: start tile index
#
# Section F contains 8 byte tile entries for the groups specified in section E.
# - $00: 16-bit flags, high bit set indicates last tile in group
# - $02: 16-bit tile index
# - $04: 16-bit signed X offset
# - $06: 16-bit signed Y offset
#
#
# Image data type $100:
#
# Pixels are stored in ARGB 1555 format (little endian, B in the 5 lowest bits, etc.)
#
# Pixels are stored in twiddled order. To find the data for an X,Y pixel, interleave
# the bits of the X,Y coordinate:
# Image X bits: X7 X6 X5 X4 X3 X2 X1 X0
# Image Y bits: Y7 Y6 Y5 Y4 Y3 Y2 Y1 Y0
# Twiddled offset: X7 Y7 X6 Y6 X5 Y5 X4 Y4 X3 Y3 X2 Y2 X1 Y1 X0 Y0
#
# Note that the twiddled offset above should be multiplyed by 2 byte to find the offset
# to the 16-bit pixel data.
#
#
# Image data type $300:
#
# This begins with a 256 entry "palette" of 2x2 blocks. Each block is 4 ARGB 1555
# colours for pixels (0,0) (0,1) (1,0) (1,1) of the block.
#
# Following the 2048 bytes of block palette is an indexed image of one byte per index.
# This indexed image is halved in each dimension from the total image size,
# and each entry references a 2x2 block of pixels from the palette.
# The indexed entries are stored in twiddled order (see above).
#
#
# One image (NOWLOAD.FNT) contains 10 images, rather than the
# listed number. I do not know whether this is hard-coded within the
# game or can be derived from the file itself somehow.
#
# Additionally there are 3 special files:
# - ICON0.FNT (looks like the save game icon stored on the VMU)
# - ICON1.FNT
# - ICON2.FNT
#
# This is a different, simple format:
# - 16 palette entries (ARGB 4444)
# - 3 x 32x32 bitmap (4-bit index)
# MDT file format:
#
# These are archive formats containing various chunks of data.
# Some of them have a different index table format at the start of the file.
#
#
# A maps-containing MDT file begins with an array of 12-byte entries:
# - $00: 16-bit layer index
# - $02: 16-bit type (high bit has a different meaning, might mark last chunk in group?)
# - $04: 32-bit byte offset to chunk
# - $08: 32-bite chunk size
#
# Map files have 3 kinds of chunks:
# 0. Type 0 is labelled ATR3, containing a collision/layer grid.
# 1. Type 1 is labelled MAP0 or MAP1.
# These represent a grid of 32x32 tiles.
# MAP0 uses 16-bit ARGB 1555 pixels.
# MAP1 uses 8-bit pixels (greyscale?)
# The MAP1 maps seem to be used for water or cloud surface effects, and though they
# seem to be greyscale data, are probably given a specific colour tint when rendered?
# 2. Type 2 is an embedded FNT file.
# The tiles in the FNT file are given some positional information to render sprite
# overlays on top of the tiled map layers.
#
# Generally a series of layers appear bottom to top, before finally an ATR3 chunk appears.
# Assuming these should be rendered in increasing order of index?
#
#
# EFFMSPR.MDT (Extra sprites)
# The index table is just a list of 32-bit offsets.
# The last index points to the end of the file.
# Data found at each offset can be treated as a FNT file.
#
# PCM.MDT (Grandmother / Mother / Noman voice sounds)
# 12-byte index table:
# - $00: always 000590
# - $04: offset
# - $08: chunk size
# Ends with an offset pointing to the end of file.
# Each chunk contains 4-bit Yamaha ADPCM data at 22050Hz.
# ADPCM reference: https://github.com/mamedev/mame/blob/master/src/devices/sound/ymz280b.cpp
#
# SNDSYS.MDT (Sound effects?)
# The index table is just a short list of 32-bit offsets:
# - $00: 32-bit offset to sound driver code chunk?
# - $04: 32-bit offset to a data chank
# - $08: 32-bit offset to end of file (end of chunk table).
# File seems to contain various things including 16-bit PCM and ADPCM samples?
#
# SEQ.MDT (Music?)
# The index table has 20-byte entries:
# - $04: offset to chunk
# - $10: size of chunk
# File seems to contain 16-bit PCM and ADPCM samples?
# Font sets can be found at:
# 1ST_READ.BIN at $249C4: 1bpp 8x8 font set
# 1ST_READ.BIN at $251C3: 1bpp 8x12 font set
# 1ST_READ.BIN at $261A8: 1bpp 12x12 font set
# 1ST_READ.BIN at $46710: 1bpp 8x7 font set
# These are not dumped by this tool, but can be easily viewed with Binxelview:
# https://github.com/bbbradsmith/binxelview
# END
# eggstractor_nja.py
# NJA 3D model extractor
# for E.G.G. Elemental Gimmick Gear (Dreamcast)
#
# Brad Smith, 2020
# http://rainwarrior.ca
#
# Dump result archive: https://mega.nz/#F!B74zmaTJ!LQcdMdeOji_cKyfxwoYDbg
#
# It converts a collection of .NJA files that describe 3D models into
# the common OBJ/MTL format that many modern programs can import.
#
# To use this tool prepare the following:
#
# 1. Extract the Elemental Gimmick Gear files from your GD-ROM image.
# GDItools: https://sourceforge.net/projects/dcisotools/
#
# 2. Convert all the .PVR files in data/9MODEL/ into .PNG
# PVR Viewer: https://www.romhacking.net/utilities/1458/
#
# 2. Python 3 is required.
#
#
# Further information
#
# This utility is only good enough to extract all the NJA files within E.G.G.,
# and will intentonally throw an assert if it encounters unexpected data,
# so that I could be sure I handled everything I could.
#
# It might work with other NJA files out there, if you can find any, but
# this code might require some additional work.
#
# The NJA seems to be part of a suite of file formats belonging to a Ninja
# family of tools. These particular files bore the following comment:
# NJA 0.88alpha1 NinjaAsciiDataMix (SI)
#
# Related links:
#
# Ninja-Lib
# Documents NJ NJTL NJCM formats used in Phantasy Star Online:
# https://github.com/bogglez/Ninja-Lib/blob/master/documentation/NJ_Format.md
#
# ExMLDNet
# Converter for Ninja file types used in Skies of Arcadia:
# http://kryslin.silvite.com/exmldnet.html
# http://kryslin.silvite.com/FAQ.html
#
import os
import math
import pathlib
import PIL.Image
OUTDIR = "nja"
# contains data/9MODEL/*.NJA
NJA_FOLDER = "../dump/data/9MODEL"
# contains data/9MODEL/*.PVR extracted as *.PVR.PNG
TEXTURE_FOLDER = "../PVRViewer/data/9MODEL"
#
# simple and rough parser that turns the NJA file into a usable tree structure
#
# Compare the text file to the output of parse_tree_str for help understanding
# its output.
#
class Node(list): # node of NJA parse tree
def __init__(self, text_, parent_):
self.text = text_
self.id = None
self.parent = parent_
self.children = []
# Grammar entry:
# group start token : (
# [ tokens to match in order ],
# <id> replaces .id of node
# <leaf> becomes a child leaf of node
# otherwise is a matched and discarded string
# ending token,
# { leaf groups },
# optional: True prevents auto sibling-leafing
NJA_IDS = ["<id>","START"]
NJA_GRAMMAR = (
[], None, {
"TEXTURE_START" : ( [], "TEXTURE_END", {
"TEXTURENAME" : ( NJA_IDS, "END", {} ),
"TEXTURELIST" : ( NJA_IDS, "END", {} ) } ),
"MATERIAL" : ( NJA_IDS, "END", {
"MATSTART" : ( [], "MATEND", {} ) } ),
"OBJECT_START" : ( [], "OBJECT_END", {
"MATERIAL" : ( NJA_IDS, "END", {
"MATSTART" : ( [], "MATEND", {} ) } ),
"POLYGON" : ( NJA_IDS, "END", {}, True ),
"VERTUV" : ( NJA_IDS, "END", {} ),
"MESHSET" : ( NJA_IDS, "END", {
"MESHSTART" : ( [], "MESHEND", {} ) } ),
"POINT" : ( NJA_IDS, "END", {} ),
"NORMAL" : ( NJA_IDS, "END", {} ),
"MODEL" : ( NJA_IDS, "END", {
"Center" : (["<leaf>","<leaf>","<leaf>"], None, {}, True) } ),
"OBJECT" : ( NJA_IDS, "END", {} ) } ),
"DEFAULT_START" : ( [], "DEFAULT_END", {
"#ifndef" : ( ["<id>"], "#endif", {
"#define" : ( ["<id>","<leaf>"], None, {} ) } ),
"#define" : ( ["<id>","<leaf>"], None, {} ) } ) } )
NJA_GRAMMAR_VECTOR = ( [], "),", {}, True )
def parse_tree_str(node,level=0):
s = ("%d " % level) + (". " * level)
s += node.text
if node.id:
s += " (" + node.id + ")"
s += "\n"
for c in node.children:
s += parse_tree_str(c,level+1)
return s
def parse_nja(filename, base_grammar = NJA_GRAMMAR, vector_grammar = NJA_GRAMMAR_VECTOR):
print('load_nja_parse("'+filename+'")')
lines = open(os.path.join(NJA_FOLDER,filename + ".NJA")).readlines()
grammar_stack = []
grammar = base_grammar
comment = False
root = Node("<root>",filename)
node = root
match = 0
auto_sibling = 0
ln = 0
tokens = []
while len(tokens) > 0 or ln < len(lines):
# find next token:
# basically just splits at all whitespace and comments,
# and additional at '(' and '),'.
# Comments are required to have a space following/preceding
# '/*' and '*/', though I suspect the original language
# definition doesn't have that restriction.
if len(tokens) < 1:
tokens = lines[ln].split()
ln += 1
continue
t = tokens[0]
# extra token split at (
op = t.find('(')
if (op < 1):
tokens = tokens[1:]
else:
tnew = t[0:op]
tkeep = ["("]
if len(t) > (op+1):
tkeep.append(t[op+1:])
tokens = tkeep + tokens[1:]
t = tnew
# extra token split at )
op = t.find('),')
if op >= 1:
tokens = [t[op:]] + tokens
t = t[0:op]
# deal with comments
if comment:
if t == "*/":
comment = False
continue
if t == "/*":
comment = True
continue
# debug node
#print("%d > %s (%d)" % (len(grammar_stack), node.text, len(node.children)))
# debug tokenizer
#print("%5d: %s" % (ln,t))
# remove comma from leaf text for convenience
tl = t.rstrip(",")
# matching prefix list
if match < len(grammar[0]):
tm = grammar[0][match]
match += 1
if tm == "<id>":
node.id = t
elif tm == "<leaf>":
node.children.append(Node(tl,node))
elif tm != t:
print("%s expected, %s got at line %d" % (tm,t,ln))
assert(False)
if match >= len(grammar[0]) and grammar[1] == None:
if len(grammar_stack) < 1 or node.parent == None:
print("Top level escaped at line %d?" % ln)
assert(False)
grammar = grammar_stack.pop()
match = len(grammar[0])
node = node.parent
auto_sibling = 0
continue
# close group if end token is reached
if t == grammar[1]:
if len(grammar_stack) < 1 or node.parent == None:
print("Top level escaped at line %d?" % ln)
assert(False)
grammar = grammar_stack.pop()
match = len(grammar[0])
node = node.parent
auto_sibling = 0
continue
# start subgroup for a matched token
if t in grammar[2]:
grammar_stack.append(grammar)
grammar = grammar[2][t]
node = Node(t,node)
node.parent.children.append(node)
auto_sibling = 0
match = 0
continue
# start vector subgroup at (
if t == "(":
grammar_stack.append(grammar)
grammar = vector_grammar
if len(node.children) < 1:
print("No token preceding ( at line %d?", ln)
assert(False)
node = node.children[len(node.children)-1]
auto_sibling = 0
match = 0
continue
# non-vector leaf nodes get paired with preceding sibling
if auto_sibling == 1 and (len(grammar) < 4 or not grammar[3]):
if (len(node.children)<1):
print("No sibling found for single leaf at line %d?", ln)
assert(len(node.children)>0)
sibling = node.children[len(node.children)-1]
sibling.children.append(Node(tl,sibling))
auto_sibling = 0
continue
# otherwise just make a simple leaf node
node.children.append(Node(tl,node))
auto_sibling = 1
if len(grammar_stack) > 0:
print("Top level not reached at end?")
assert(False)
return root
#
# unpacks the parsed tree into usable data
#
def find_node(node, name):
for c in node.children:
if c.text == name:
return c
assert(False)
def find_text(node, name):
c = find_node(node, name)
assert(len(c.children)==1)
return c.children[0].text
def find_int(node, name):
return int(find_text(node, name), 0)
def find_float(node, name):
return float(find_text(node, name).rstrip('F'))
def find_define(node, id_):
for c in node.children:
if c.id != id_:
continue
if c.text == "#ifndef":
return find_define(c, id_)
elif c.text == "#define":
assert(len(c.children) == 1)
return c.children[0].text
return None
def unpack_vector_int(node):
v = []
for c in node.children:
v.append(int(c.text,0))
return tuple(v)
def unpack_vector_float(node):
v = []
for c in node.children:
v.append(float(c.text.rstrip('F')))
return tuple(v)
def unpack_vectors_int(node, name):
vs = []
for c in node.children:
assert(c.text == name)
vs.append(unpack_vector_int(c))
return vs
def unpack_vectors_float(node, name):
vs = []
for c in node.children:
assert(c.text == name)
vs.append(unpack_vector_float(c))
return vs
def find_vector_int(node, name):
c = find_node(node, name)
return unpack_vector_int(c)
def find_vector_float(node, name):
c = find_node(node, name)
return unpack_vector_float(c)
def vector_str(v):
s = "["
for e in v:
s += str(v) + ","
return s.rstrip(",") + "]"
class NJAObject():
def __init__(self):
self.material = {}
self.polygon = {}
self.vertuv = {}
self.meshset = {}
self.point = {}
self.normal = {}
self.model = {}
self.object = {}
self.id = None
def __repr__(self):
return "NJAObject:"+str(self.id)+"\n"+\
str(self.material)+",\n"+\
str(self.polygon)+"\n"+\
str(self.vertuv)+"\n"+\
str(self.meshset)+"\n"+\
str(self.point)+"\n"+\
str(self.normal)+"\n"+\
str(self.model)+"\n"+\
str(self.object)
class NJAMaterial():
def __init__(self):
self.diffuse = None
self.specular = None
self.exponent = None
self.attrtexid = None
self.attrflags = None
def __repr__(self):
return "(diffuse:%s, specular:%s, exponent:%f, attrtexid:%s, attrflags:%08X)" %\
(self.diffuse, self.specular, self.exponent, self.attrtexid, self.attrflags)
class NJAMesh():
def __init__(self):
self.typematid = None # function unknown
self.meshnum = None
self.meshes = None
self.polyattrs = None # function unknown
self.polynormal = None
self.vertcolor = None # none existed in E.G.G.
self.vertuv = None
def __repr__(self):
return "(typematid:%s, meshnum:%d, meshes:%s, polyattrs:%s, polynormal:%s, vertcolor:%s, vertuv:%s)" %\
(self.typematid, self.meshnum, self.meshes, self.polyattrs, self.polynormal, self.vertcolor, self.vertuv)
class NJAModel():
def __init__(self):
self.points = None
self.normal = None
self.pointnum = None
self.meshset = None
self.materials = None
self.meshsetnum = None
self.matnum = None
self.center = None # not used by this exporter, probably just a culling hint
self.radius = None # (same)
def __repr__(self):
return "(points:%s, normal:%s, pointnum:%d, meshset:%s, materials:%s, meshsetnum:%d, matnum:%d, center:%s, radius:%f)" %\
(self.points, self.normal, self.pointnum, self.meshset, self.materials, self.meshsetnum, self.matnum, self.center, self.radius)
class NJAObjectSingleton():
def __init__(self):
self.evalflags = None # function unknown
self.model = None
self.oposition = None
self.oangle = None
self.oscale = None
self.child = None
self.sibling = None
def __repr__(self):
return "(evalflags:%0X, model:%s, oposition:%s, oangle:%s, oscale:%s, child:%s, sibling:%s)" %\
(self.evalflags, self.model, self.oposition, self.oangle, self.oscale, self.child, self.sibling)
def unpack_nja_texture_start(ts):
tn = {}
tl = {}
for node in ts.children:
if node.text == "TEXTURENAME":
texns = []
for texn in node.children:
assert(len(texn.children)==1)
assert(texn.text == "TEXN")
texns.append(texn.children[0].text.lstrip('"').rstrip('"'))
assert(node.id not in tn)
tn[node.id] = texns
elif node.text == "TEXTURELIST":
ref = find_text(node,"TextureList")
num = find_int(node,"TextureNum")
assert(len(node.children)==2)
assert(node.id not in tl)
tl[node.id] = (ref,num)
else:
assert(False)
return (tn,tl)
def unpack_nja_object_material(node):
ms = []
for c in node.children:
assert(c.text == "MATSTART")
m = NJAMaterial()
m.diffuse = find_vector_int(c,"Diffuse")
m.specular = find_vector_int(c,"Specular")
m.exponent = find_float(c,"Exponent")
m.attrtexid = find_vector_int(c,"AttrTexId")
m.attrflags = find_int(c,"AttrFlags")
assert(len(c.children)==5)
ms.append(m)
return { node.id : ms }
def unpack_nja_object_polygon(node):
strips = []
i = 0
while i < len(node.children):
c = node.children[i]
assert(c.text == "Strip")
header = unpack_vector_int(c)
assert(len(header)==2)
# header[0] function is unknown, seemed to be 0 or 0x8000
count = header[1]
strip = []
for j in range(count):
strip.append(int(node.children[i+1+j].text,0))
i += count + 1
strips.append((header,strip))
return { node.id: strips }
def unpack_nja_object_vertuv(node):
return { node.id : unpack_vectors_int(node, "UV") }
def unpack_nja_object_meshset(node):
ms = []
for c in node.children:
assert(c.text == "MESHSTART")
m = NJAMesh()
m.typematid = find_vector_int(c,"TypeMatId")
m.meshnum = find_int(c,"MeshNum")
m.meshes = find_text(c,"Meshes")
m.polyattrs = find_text(c,"PolyAttrs")
m.polynormal = find_text(c,"PolyNormal")
m.vertcolor = find_text(c,"VertColor")
m.vertuv = find_text(c,"VertUV")
assert(len(c.children)==7)
ms.append(m)
return { node.id : ms }
def unpack_nja_object_point(node):
return { node.id : unpack_vectors_float(node, "VERT") }
def unpack_nja_object_normal(node):
return { node.id : unpack_vectors_float(node, "NORM") }
def unpack_nja_object_model(node):
m = NJAModel()
m.points = find_text(node,"Points")
m.normal = find_text(node,"Normal")
m.pointnum = find_int(node,"PointNum")
m.meshset = find_text(node,"Meshset")
m.materials = find_text(node,"Materials")
m.meshsetnum = find_int(node,"MeshsetNum")
m.matnum = find_int(node,"MatNum")
m.center = find_vector_float(node,"Center")
m.radius = find_float(node,"Radius")
assert(len(node.children)==9)
return { node.id : m }
def unpack_nja_object_object(node):
o = NJAObjectSingleton()
o.evalflags = find_int(node,"EvalFlags")
o.model = find_text(node,"Model")
o.oposition = find_vector_float(node,"OPosition")
o.oangle = find_vector_float(node,"OAngle")
o.oscale = find_vector_float(node,"OScale")
o.child = find_text(node,"Child")
o.sibling = find_text(node,"Sibling")
assert(len(node.children)==7)
return { node.id : o }
def unpack_nja_object(node):
o = NJAObject()
unpackers = {
"MATERIAL" : unpack_nja_object_material,
"POLYGON" : unpack_nja_object_polygon,
"VERTUV" : unpack_nja_object_vertuv,
"MESHSET" : unpack_nja_object_meshset,
"POINT" : unpack_nja_object_point,
"NORMAL" : unpack_nja_object_normal,
"MODEL" : unpack_nja_object_model,
"OBJECT" : unpack_nja_object_object }
for c in node.children:
if c.text in unpackers:
attr = c.text.lower()
d = unpackers[c.text](c)
assert(len(set(getattr(o,attr)).intersection(set(d)))==0)
getattr(o,attr).update(d)
else:
assert(False)
assert(len(o.object) == 1)
assert(len(o.model) <= 1)
assert(len(o.meshset) <= 1)
assert(len(o.material) <= 1)
k = list(o.object.keys())[0]
o.object = o.object[k]
o.id = k
return { k : o }
def unpack_nja(nt):
texturename = {}
texturelist = {}
njaobjects = {}
globmaterials = {}
default_object = None
default_textlist = None
for node in nt.children:
if node.text == "TEXTURE_START":
(tn,tl) = unpack_nja_texture_start(node)
assert(len(set(texturename).intersection(set(tn)))==0)
assert(len(set(texturelist).intersection(set(tl)))==0)
texturename.update(tn)
texturelist.update(tl)
elif node.text == "OBJECT_START":
o = unpack_nja_object(node)
assert(len(set(njaobjects).intersection(set(o)))==0)
njaobjects.update(o)
elif node.text == "DEFAULT_START":
default_object = find_define(node,"DEFAULT_OBJECT_NAME")
default_texlist = find_define(node,"DEFAULT_TEXLIST_NAME")
elif node.text == "MATERIAL":
d = unpack_nja_object_material(node)
assert(len(set(globmaterials).intersection(set(d)))==0)
globmaterials.update(d)
else:
assert(False)
#for key in texturename:
# tn = texturename[key]
# for i in range(len(tn)):
# tn[i] = load_texture(tn[i])
return (njaobjects, texturename, texturelist, default_object, default_texlist, globmaterials)
#
# access to the unpacked NJA
#
def nja_get_object(nja, name):
return nja[0][name+"[]"]
def nja_get_default_object(nja):
return nja_get_object(nja,nja[3])
def nja_get_default_textures(nja):
default_texlist = nja[4]
if default_texlist == None:
return None
(ref,count) = nja[2][default_texlist]
texturename = nja[1][ref+"[]"]
assert(len(texturename) == count)
return texturename
def nja_gather_object_tree(n, o, level=0):
tree = [(o.id, level)]
if o.object.child != "NULL":
tree += nja_gather_object_tree(n, nja_get_object(n, o.object.child), level+1)
if o.object.sibling != "NULL":
tree += nja_gather_object_tree(n, nja_get_object(n, o.object.sibling), level)
return tree
def nja_object_tree(n, o, level=0):
obj = o.object
s = ("%d " % level) + (". " * level)
s += o.id + "\n"
if obj.child != "NULL":
s += object_tree(n,n[0][obj.child+"[]"],level+1)
if obj.sibling != "NULL":
s += object_tree(n,n[0][obj.sibling+"[]"],level)
return s
def nja_stats(n):
s = "object: %d (%s)\n" % (len(n[0]), n[3])
s += "texturename: %d\n" % len(n[1])
s += "texturelist: %d (%s)\n" % (len(n[2]), n[4])
walk = nja_gather_object_tree(n, nja_get_default_object(n))
for (name, level) in walk:
s += ("%d " % level) + (". " * level) + name + "\n"
visited = {x for (x,y) in walk}
for k in n[0]:
if k not in visited:
s += "orphan: " + n[0][k].id + "\n"
return s
#
# simple 3D vertex and transform
#
class V3:
def __init__(self,x,y,z):
self.x = x
self.y = y
self.z = z
def norm(self):
m2 = (self.x*self.x)+(self.y*self.y)+(self.z*self.z)
if (m2 == 0):
return V3(0,0,0)
m = (m2 ** 0.5)
return V3(self.x/m, self.x/m, self.y/m)
class T3:
def __init__(self):
self.m = ((1,0,0,0),(0,1,0,0),(0,0,1,0),(0,0,0,1))
def rotx(degree):
t = T3()
s = math.sin(degree * math.pi / 180)
c = math.cos(degree * math.pi / 180)
t.m = ((1,0,0,0),(0,c,-s,0),(0,s,c,0),(0,0,0,1))
return t
def roty(degree):
t = T3()
s = math.sin(degree * math.pi / 180)
c = math.cos(degree * math.pi / 180)
t.m = ((c,0,s,0),(0,1,0,0),(-s,0,c,0),(0,0,0,1))
return t
def rotz(degree):
t = T3()
s = math.sin(degree * math.pi / 180)
c = math.cos(degree * math.pi / 180)
t.m = ((c,-s,0,0),(s,c,0,0),(0,0,1,0),(0,0,0,1))
return t
def translate(x,y,z):
t = T3()
t.m = ((1,0,0,x),(0,1,0,y),(0,0,1,z),(0,0,0,1))
return t
def scale(x,y,z):
t = T3()
t.m = ((x,0,0,0),(0,y,0,0),(0,0,z,0),(0,0,0,1))
return t
def __mul__(self,t):
if type(t) is T3:
to = T3()
to.m = [[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]]
for r in range(4):
for c in range(4):
a = 0
for i in range(4):
a += self.m[r][i] * t.m[i][c]
to.m[r][c] = a
to.m[r] = tuple(to.m[r])
to.m = tuple(to.m)
return to
elif type(t) is V3:
v = [t.x, t.y, t.z, 1]
o = [0, 0, 0, 0]
for r in range(4):
a = 0
for i in range(4):
a += self.m[r][i] * v[i]
o[r] = a
return V3(o[0], o[1], o[2])
#
# NJA convertion to OBJ file format
#
def object_transform(obj):
o = obj.object
return \
T3.translate(o.oposition[0], o.oposition[1], o.oposition[2]) * \
T3.rotx(o.oangle[0]) * \
T3.roty(o.oangle[1]) * \
T3.rotz(o.oangle[2]) * \
T3.scale(o.oscale[0], o.oscale[1], o.oscale[2])
def object_normal_transform(obj):
o = obj.object
return \
T3.rotx(o.oangle[0]) * \
T3.roty(o.oangle[1]) * \
T3.rotz(o.oangle[2]) * \
T3.scale(o.oscale[0], o.oscale[1], o.oscale[2])
def collect_object(nja, o, parent_transform=T3(), parent_normal_transform=T3()):
obj = []
transform = parent_transform * object_transform(o)
normal_transform = parent_normal_transform * object_normal_transform(o)
# collect all render data for this object
if o.object.model != "NULL":
material = {}
point = {}
normal = {}
vertuv = {}
meshlist = []
for k in o.material:
ms = o.material[k]
for i in range(len(ms)):
name = k.rstrip("[]")+("__%d" % i)
material[name] = ms[i]
for k in o.point:
ps = o.point[k]
tps = []
for i in range(len(ps)):
p = ps[i]
assert(len(p) == 3)
pv = V3(p[0], p[1], p[2])
ptv = transform * pv
tps.append(ptv)
point[k.rstrip("[]")] = tps
for k in o.normal:
ns = o.normal[k]
tns = []
for i in range(len(ns)):
n = ns[i]
assert(len(n) == 3)
nv = V3(p[0], p[1], p[2])
ntv = normal_transform * nv
nntv = nv.norm()
tns.append(nntv)
normal[k.rstrip("[]")] = tns
for k in o.vertuv:
us = o.vertuv[k]
tus = []
for i in range(len(us)):
u = us[i]
assert(len(u) == 2)
tu = (u[0]/256, 1-(u[1]/256))
tus.append(tu)
vertuv[k.rstrip("[]")] = tus
for kmodel in o.model:
model = o.model[kmodel]
meshset = o.meshset[model.meshset+"[]"]
for imesh in range(model.meshsetnum):
mesh = meshset[imesh]
polygon = o.polygon[mesh.meshes+"[]"]
meshentry = (mesh.meshes,
model.materials+"__%d" % mesh.typematid[1],
model.points,
model.normal,
mesh.vertuv,
polygon)
meshlist.append(meshentry)
obj = [(o.id,material,point,normal,vertuv,meshlist)]
else:
assert(len(o.model)==0)
# add children and siblings
if o.object.child != "NULL":
obj += collect_object(nja, nja_get_object(nja, o.object.child), transform, normal_transform)
if o.object.sibling != "NULL":
obj += collect_object(nja, nja_get_object(nja, o.object.sibling), parent_transform, parent_normal_transform)
return obj
def export_material(m,name,textures):
mtl = ""
mtl += "newmtl %s\n" % name
mtl += "Ka %f %f %f\n" % (0,0,0)
mtl += "Kd %f %f %f\n" % (m.diffuse[1]/255, m.diffuse[2]/255, m.diffuse[3]/255)
mtl += "#Ks %f %f %f\n" % (m.specular[1]/255, m.specular[2]/255, m.specular[3]/255)
mtl += "Ks 0 0 0 # forcing matte material for better behaviour in Windows 3D viewer\n"
mtl += "d %f\n" % (m.diffuse[0]/255)
mtl += "Ns %f\n" % (m.exponent)
mtl += "illum %d\n" % (0)
if textures != None:
mtl += "map_Kd %s.PVR.PNG\n" % (textures[m.attrtexid[1]].upper())
mtl += "\n"
return mtl
def export_collected_obj(nja,coll):
# generate MTL material definitions
mtl = ""
textures = nja_get_default_textures(nja)
mtl += "# Global materials\n"
mtl += "\n"
for k in nja[5]:
for i in range(len(nja[5][k])):
mtl += export_material(nja[5][k][i],k + ("__%d" % i),textures)
for o in coll:
mtl += "# %s\n" % (o[0])
mtl += "\n"
material = o[1]
for k in material:
mtl += export_material(material[k],k,textures)
mtl += "# END\n"
# generate OBJ
obj = ""
countp = 0
countn = 0
countu = 0
idxp = {}
idxn = {}
idxu = {}
for o in coll:
obj += "# OBJECT %s\n" % (o[0])
obj += "\n"
for kp in o[2]:
ps = o[2][kp]
idxp[kp] = countp
countp += len(ps)
obj += "# POINT %s\n" % (kp)
for p in ps:
obj += "v %f %f %f\n" % (p.x, p.y, p.z)
obj += "\n"
for kn in o[3]:
ns = o[3][kn]
idxn[kn] = countn
countn += len(ns)
obj += "# NORMAL %s\n" % (kn)
for n in ns:
obj += "vn %f %f %f\n" % (n.x, n.y, n.z)
obj += "\n"
for ku in o[4]:
us = o[4][ku]
idxu[ku] = countu
countu += len(us)
obj += "# VERTUV %s\n" % (ku)
for u in us:
obj += "vt %f %f\n" % u
obj += "\n"
for m in o[5]:
hasuv = m[4] != "NULL"
obj += "# MESHSET %s\n" % (m[0])
obj += "g %s\n" % (m[0])
obj += "usemtl %s\n" % (m[1])
offu = 0
offp = idxp[m[2]]
offn = idxn[m[3]]
if hasuv:
offu = idxu[m[4]]
obj += "# +%d %s\n" % (offp, m[2])
obj += "# +%d %s\n" % (offn, m[3])
obj += "# +%d %s\n" % (offu, m[4])
offs = 0
for (header,strip) in m[5]:
assert(len(strip)==header[1])
obj += "# strip(0x%04X, %d) +%d\n" % (header[0],header[1],offs)
flip = False
for i in range(2,len(strip)):
tri = strip[i-2:i+1]
tuv = list(range(i-2,i+1))
if (flip):
tri.reverse()
tuv.reverse()
flip = not flip
obj += "f"
for j in range(3):
obj += " %d/" % (offp + tri[j] + 1)
if hasuv:
obj += "%d" % (offu + offs + tuv[j] + 1)
obj += "/%d" % (offn + tri[j] + 1)
obj += "\n"
offs += len(strip)
obj += "\n"
obj += "# END\n"
return (mtl,obj)
#
# converts every NJA in the data/9MODEL folder
#
pathlib.Path(OUTDIR).mkdir(parents=True, exist_ok=True)
for f in os.listdir(NJA_FOLDER):
if not f.upper().endswith(".NJA"):
continue
f = f[0:len(f)-4]
nt = parse_nja(f)
n = unpack_nja(nt)
print(nja_stats(n))
# Note: orphan objects could potentially exists,
# not referenced by the default object hierarchy,
# but no cases of this existed within E.G.G.
coll = collect_object(n, nja_get_default_object(n))
(mtl,obj) = export_collected_obj(n,coll)
obj = "mtllib " + f + ".mtl\n\n" + obj
open(os.path.join(OUTDIR,f+".mtl"),"wt").write(mtl)
open(os.path.join(OUTDIR,f+".obj"),"wt").write(obj)
for f in os.listdir(TEXTURE_FOLDER):
# copy textures and automatically crop to square (in case mipmaps are contained)
if not f.upper().endswith(".PNG"):
continue
print("Texture: "+f)
infile = os.path.join(TEXTURE_FOLDER,f)
outfile = os.path.join(OUTDIR,f)
img = PIL.Image.open(infile)
img = img.crop((0,0,img.size[0],img.size[0]))
img.save(outfile)
# END
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment