yaml-ify map metadata & iQue pm_map (#1145)

* map (draft)

* put to mapfs.yaml and other ver.

* ique map finished

* alisin ang type
This commit is contained in:
AltoXorg 2024-01-11 19:41:11 +08:00 committed by GitHub
parent 82b09bd69e
commit b2cf91c8ca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 2218 additions and 93 deletions

View File

@ -301,7 +301,7 @@ def write_ninja_rules(
ninja.rule(
"pack_title_data",
description="pack_title_data $out",
command=f"$python {BUILD_TOOLS}/mapfs/pack_title_data.py $out $in",
command=f"$python {BUILD_TOOLS}/mapfs/pack_title_data.py $version $out $in",
)
ninja.rule("map_header", command=f"$python {BUILD_TOOLS}/mapfs/map_header.py $in > $out")
@ -947,6 +947,9 @@ class Configure:
"img_flags": "",
},
)
elif path.suffixes[-2:] == [".raw", ".dat"]:
compress = False
bin_path = path
elif name == "title_data":
compress = True

View File

@ -16,11 +16,13 @@ def get_version_date(version):
return "Map Ver.00/07/05 19:13"
elif version == "pal":
return "Map Ver.01/03/23 16:30"
elif version == "ique":
return "Map Ver.04/05/18 13:41"
else:
return "Map Ver.??/??/?? ??:??"
def build_mapfs(out_bin, assets, version):
def build_mapfs(out_bin, assets, version, pre_write_assets):
# every TOC entry's name field has data after the null terminator made up from all the previous name fields.
# we probably don't have to do this for the game to read the data properly (it doesn't read past the null terminator
# of `string`), but the original devs' equivalent of this script had this bug so we need to replicate it to match.
@ -41,6 +43,9 @@ def build_mapfs(out_bin, assets, version):
decompressed_size = decompressed.stat().st_size
size = next_multiple(compressed.stat().st_size, 2) if compressed.exists() else decompressed_size
if version == "ique" and decompressed.stem == "title_data":
size = compressed.stat().st_size
# print(f"{name} {offset:08X} {size:08X} {decompressed_size:08X}")
# write all previously-written names; required to match
@ -52,10 +57,18 @@ def build_mapfs(out_bin, assets, version):
f.seek(toc_entry_pos + 0x10)
f.write(struct.pack(">III", offset, size, decompressed_size))
# initial data to be overwritten back, provided by .raw.dat files
pre_write_bytes = b""
if pre_write_assets.get(decompressed.stem):
with open(pre_write_assets[decompressed.stem], "rb") as pwf:
pre_write_bytes = pwf.read()
f.seek(0x20 + next_data_pos)
f.write(pre_write_bytes)
# write data.
f.seek(0x20 + next_data_pos)
f.write(compressed.read_bytes() if compressed.exists() else decompressed.read_bytes())
next_data_pos += size
next_data_pos += max(len(pre_write_bytes), size)
asset_idx += 1
@ -77,9 +90,16 @@ if __name__ == "__main__":
out = argv.pop(0)
assets = []
pre_write_assets = {}
# pairs
for i in range(0, len(argv), 2):
assets.append((Path(argv[i]), Path(argv[i + 1])))
for path in argv:
path = Path(path)
if path.suffixes[-2:] == [".raw", ".dat"]:
pre_write_assets[path.with_suffix("").stem] = path
else:
assets.append(path)
build_mapfs(out, assets, version)
# turn them into pairs
assets = list(zip(assets[::2], assets[1::2]))
build_mapfs(out, assets, version, pre_write_assets)

View File

@ -4,51 +4,37 @@ from sys import argv
if __name__ == "__main__":
argv.pop(0) # python3
version = argv.pop(0)
out = argv.pop(0)
imgs = argv
if len(argv) > 4:
out, img1, img2, img3, img2_pal = argv
imgs_bytes = []
for img in imgs:
with open(img, "rb") as f:
imgs_bytes.append(f.read())
if version == "jp":
# copyright, copyright pal, press start, logo
write_order = (1, 3, 2, 0)
elif version == "ique":
# press start, copyright, logo
write_order = (2, 1, 0)
else:
out, img1, img2, img3 = argv
img2_pal = None
with open(img1, "rb") as f:
img1 = f.read()
with open(img2, "rb") as f:
img2 = f.read()
with open(img3, "rb") as f:
img3 = f.read()
if img2_pal:
with open(img2_pal, "rb") as f:
img2_pal = f.read()
# copyright, press start, logo
write_order = (1, 2, 0)
with open(out, "wb") as f:
f.seek(0x10)
pos2 = f.tell()
f.write(img2)
imgs_pos = [0] * len(imgs)
for i in write_order:
imgs_pos[i] = f.tell()
f.write(imgs_bytes[i])
if img2_pal:
pos2_pal = f.tell()
f.write(img2_pal)
else:
pos2_pal = None
pos3 = f.tell()
f.write(img3)
pos1 = f.tell()
f.write(img1)
if img2_pal:
if version == "jp":
# jp padding?
f.write(b"\x00" * 0x10)
f.seek(0)
f.write(pos1.to_bytes(4, byteorder="big"))
f.write(pos2.to_bytes(4, byteorder="big"))
f.write(pos3.to_bytes(4, byteorder="big"))
if pos2_pal:
f.write(pos2_pal.to_bytes(4, byteorder="big"))
for pos in imgs_pos:
f.write(pos.to_bytes(4, byteorder="big"))

View File

@ -1006,7 +1006,8 @@
- gv__tex
- kmr_bg
- nok_bg
- sbk_bg
- name: sbk_bg
pal_count: 2 # sbk_bg has an alternative palette
- sbk3_bg
- iwa_bg
- hos_bg
@ -1022,7 +1023,11 @@
- sam_bg
- kpa_bg
- title_bg
- title_data
- name: title_data
textures:
- [0x10, ia8, copyright, 144, 32]
- [0x1210, ia8, press_start, 128, 32]
- [0x2210, rgba32, logotype, 200, 112]
- party_kurio
- party_kameki
- party_pinki

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,6 @@
from math import ceil
import os, sys
import struct
from pathlib import Path
import crunch64
@ -86,8 +87,22 @@ class N64SegPm_map_data(N64Segment):
yaml=yaml,
)
with open(script_dir / "map_data.yaml") as f:
self.files = yaml_loader.load(f.read(), Loader=yaml_loader.SafeLoader)
if "ver/ique" in str(options.opts.target_path):
cfg_name = "mapfs_ique.yaml"
elif "ver/jp" in str(options.opts.target_path):
cfg_name = "mapfs_jp.yaml"
else:
cfg_name = "mapfs.yaml"
self.files = {}
with open(script_dir / cfg_name) as f:
mapfs_cfg = yaml_loader.load(f.read(), Loader=yaml_loader.SafeLoader)
for file in mapfs_cfg:
if isinstance(file, dict):
self.files[file["name"]] = file.copy()
else:
name = file
self.files[name] = {"name": name}
def split(self, rom_bytes):
assert isinstance(self.rom_start, int)
@ -112,14 +127,16 @@ class N64SegPm_map_data(N64Segment):
is_compressed = size != decompressed_size
if name == "end_data":
break
assert self.files.get(name) is not None
if offset == 0:
path = None
else:
path = fs_dir / add_file_ext(name)
if name == "end_data":
break
bytes_start = self.rom_start + 0x20 + offset
bytes = rom_bytes[bytes_start : bytes_start + size]
@ -133,49 +150,59 @@ class N64SegPm_map_data(N64Segment):
w = png.Writer(150, 105, palette=parse_palette(bytes[:0x200]))
w.write_array(f, bytes[0x200:])
elif name == "title_data":
if "ver/us" in str(options.opts.target_path) or "ver/pal" in str(options.opts.target_path):
w = 200
h = 112
img = n64img.image.RGBA32(data=bytes[0x2210 : 0x2210 + w * h * 4], width=w, height=h)
img.write(fs_dir / "title/logotype.png")
textures = self.files[name]["textures"]
for tex in textures:
pos = tex[0]
imgtype = tex[1]
outname = tex[2]
w = 144
h = 32
img = n64img.image.IA8(data=bytes[0x10 : 0x10 + w * h], width=w, height=h)
img.write(fs_dir / "title/copyright.png")
if imgtype == "pal":
continue
w = 128
h = 32
img = n64img.image.IA8(data=bytes[0x1210 : 0x1210 + w * h], width=w, height=h)
img.write(fs_dir / "title/press_start.png")
else:
w = 272
h = 88
img = n64img.image.RGBA32(data=bytes[0x1830 : 0x1830 + w * h * 4], width=w, height=h)
img.write(fs_dir / "title/logotype.png")
w = tex[3]
h = tex[4]
w = 128
h = 32
img = n64img.image.CI4(data=bytes[0x10 : 0x10 + (w * h // 2)], width=w, height=h)
img.palette = parse_palette(bytes[0x810:0x830])
img.write(fs_dir / "title/copyright.png")
if imgtype == "ia4":
img = n64img.image.IA4(data=bytes[pos : pos + w * h // 2], width=w, height=h)
elif imgtype == "ia8":
img = n64img.image.IA8(data=bytes[pos : pos + w * h], width=w, height=h)
elif imgtype == "ia16":
img = n64img.image.IA16(data=bytes[pos : pos + w * h * 2], width=w, height=h)
elif imgtype == "rgba16":
img = n64img.image.RGBA16(data=bytes[pos : pos + w * h * 2], width=w, height=h)
elif imgtype == "rgba32":
img = n64img.image.RGBA32(data=bytes[pos : pos + w * h * 4], width=w, height=h)
elif imgtype in ("ci4", "ci8"):
palette = next(filter(lambda x: x[1] == "pal" and x[2] == outname, textures))
pal_pos = palette[0]
if imgtype == "ci4":
img = n64img.image.CI4(data=bytes[pos : pos + w * h // 2], width=w, height=h)
img.palette = parse_palette(bytes[pal_pos : pal_pos + 0x20])
elif imgtype == "ci8":
img = n64img.image.CI8(data=bytes[pos : pos + w * h], width=w, height=h)
img.palette = parse_palette(bytes[pal_pos : pal_pos + 0x200])
else:
raise Exception(f"Invalid image type {imgtype}")
img.write(fs_dir / "title" / f"{outname}.png")
w = 128
h = 32
img = n64img.image.IA8(data=bytes[0x830 : 0x830 + w * h], width=w, height=h)
img.write(fs_dir / "title/press_start.png")
elif name.endswith("_bg"):
for i in range(self.files[name].get("pal_count", 1)):
header_offset = i * 0x10
raster_offset, palette_offset, draw_pos, width, height = struct.unpack(
">IIIHH", bytes[header_offset : header_offset + 0x10]
)
def write_bg_png(bytes, path, header_offset=0):
header = bytes[header_offset : header_offset + 0x10]
raster_offset -= 0x80200000
palette_offset -= 0x80200000
assert draw_pos == 0x000C0014
raster_offset = int.from_bytes(header[0:4], byteorder="big") - 0x80200000
palette_offset = int.from_bytes(header[4:8], byteorder="big") - 0x80200000
assert int.from_bytes(header[8:12], byteorder="big") == 0x000C0014 # draw pos
width = int.from_bytes(header[12:14], byteorder="big")
height = int.from_bytes(header[14:16], byteorder="big")
outname = name
if i >= 1:
outname += f".{i}"
with open(path, "wb") as f:
with open(fs_dir / "bg" / f"{outname}.png", "wb") as f:
# CI-8
w = png.Writer(
width,
@ -184,11 +211,6 @@ class N64SegPm_map_data(N64Segment):
)
w.write_array(f, bytes[raster_offset:])
write_bg_png(bytes, fs_dir / "bg" / f"{name}.png")
# sbk_bg has an alternative palette
if name == "sbk_bg":
write_bg_png(bytes, fs_dir / "bg" / f"{name}.alt.png", header_offset=0x10)
elif name.endswith("_tex"):
TexArchive.extract(bytes, fs_dir / "tex" / name)
else:
@ -196,6 +218,10 @@ class N64SegPm_map_data(N64Segment):
with open(path, "wb") as f:
f.write(bytes)
if self.files[name].get("dump_raw", False):
with open(fs_dir / f"{name}.raw.dat", "wb") as f:
f.write(rom_bytes[bytes_start : bytes_start + self.files[name]["dump_raw_size"]])
asset_idx += 1
def get_linker_entries(self):
@ -203,10 +229,16 @@ class N64SegPm_map_data(N64Segment):
fs_dir = options.opts.asset_path / self.dir / self.name
src_paths = []
for name, file in self.files.items():
src_paths.append(fs_dir / add_file_ext(name, linker=True))
if file.get("dump_raw", False):
src_paths.append(fs_dir / f"{name}.raw.dat")
return [
LinkerEntry(
self,
[fs_dir / add_file_ext(name, linker=True) for name in self.files],
src_paths,
fs_dir.with_suffix(".dat"),
".data",
".data",

View File

@ -14655,7 +14655,7 @@ segments:
- { start: 0x1943000, align: 8, type: pm_sprites, name: sprites }
- [0x1B82208, bin] # still zero fill
- [0x1B83000, bin, msg] # pm_msg (todo)
- [0x1E40000, bin, mapfs] # pm_map_data (todo)
- [0x1E40000, pm_map_data, mapfs]
- { type: bin, start: 0x27FEE1E, subalign: 2 } # zero fill
- [0x27FFFC0, bin] # ?
- [0x2800000]