match fs.bin

This commit is contained in:
Alex Bates 2020-10-18 15:04:49 +01:00
parent 236aa9b718
commit 59bc5dfa42
No known key found for this signature in database
GPG Key ID: 5E11C2DB78877706

View File

@ -9,60 +9,79 @@ import shutil
tools_dir = Path(__file__).parent.absolute()
with open("assets/fs.json", "r") as f:
config = json.loads(f.read())
def next_multiple(pos, multiple):
return pos + pos % multiple
def build_mapfs(src_dir, build_dir, out_bin):
with open(src_dir + ".json", "r") as f:
config = json.loads(f.read())
# every TOC entry's name field has data after the null terminator made up from all the previous name fields.
# we probably don't have to do this for the game to read the data properly (it doesn't read past the null terminator
# of `string`), but the original devs' equivalent to build_assets_fs.py had this bug so we need to replicate it to match.
written_names = []
with open(out_bin, "wb") as f:
f.write(config["title"].encode("ascii"))
next_data_pos = 0x20 + (len(config["assets"]) + 1) * 0x1C
next_data_pos = (len(config["assets"]) + 1) * 0x1C
asset_idx = 0
for asset in config["assets"]:
offset = 0x20 + asset_idx * 0x1C
toc_entry_pos = 0x20 + asset_idx * 0x1C
src_path = Path(src_dir, asset["path"])
build_path = Path(build_dir, asset["path"])
name = asset["name"]
src_size = src_path.stat().st_size
compressed_size = build_path.stat().st_size + 1
# data for TOC entry
name = asset["name"] + "\0"
offset = next_data_pos
size = next_multiple(build_path.stat().st_size, 2)
decompressed_size = src_path.stat().st_size
print({
"name": name,
"offset": (next_data_pos + 0x20),
"size": compressed_size,
"decompressed_size": src_size,
})
print(f"{name} {offset:08X} {size:08X} {decompressed_size:08X}")
# write TOC row
f.seek(offset)
f.write(name.encode('ascii'))
f.seek(offset + 0x10)
f.write((next_data_pos - 0x20).to_bytes(4, byteorder="big"))
f.write(compressed_size.to_bytes(4, byteorder="big"))
f.write(src_size.to_bytes(4, byteorder="big"))
written_names.append(name)
# write all previously-written names; required to match
for prev_name in written_names:
f.seek(toc_entry_pos)
f.write(prev_name.encode('ascii'))
# write data
f.seek(next_data_pos)
# write TOC entry.
f.seek(toc_entry_pos + 0x10)
f.write(offset.to_bytes(4, byteorder="big"))
f.write(size.to_bytes(4, byteorder="big"))
f.write(decompressed_size.to_bytes(4, byteorder="big"))
# write data.
f.seek(0x20 + next_data_pos)
f.write(build_path.read_bytes())
next_data_pos += compressed_size
next_data_pos += size
asset_idx += 1
f.seek(0x20 + asset_idx * 0x1C)
f.write(b"end_data")
# end_data
toc_entry_pos = 0x20 + asset_idx * 0x1C
written_names.append("end_data\0")
for prev_name in written_names:
f.seek(toc_entry_pos)
f.write(prev_name.encode('ascii'))
f.seek(toc_entry_pos + 0x18)
f.write((0x903F0000).to_bytes(4, byteorder="big")) # TODO: figure out purpose
def build_file(src_dir, out_dir, filename):
with open(src_dir + ".json", "r") as f:
config = json.loads(f.read())
asset = None
for a in config["assets"]:
if (a["path"] == filename):
asset = a
if not asset:
print("asset not configured in json file")
print("asset not configured in {}.json".format(src_dir))
exit(1)
src_path = Path(src_dir, filename)