2022-05-05 16:08:16 +02:00
|
|
|
#! /usr/bin/env python3
|
2021-01-15 02:26:06 +01:00
|
|
|
|
|
|
|
import argparse
|
2022-10-04 16:09:23 +02:00
|
|
|
import hashlib
|
|
|
|
import pickle
|
|
|
|
from typing import Any, Dict, List, Optional, Set, Union
|
2022-10-15 00:21:50 +02:00
|
|
|
import importlib
|
2022-10-04 16:09:23 +02:00
|
|
|
|
2022-06-12 17:33:32 +02:00
|
|
|
import rabbitizer
|
2022-10-04 16:09:23 +02:00
|
|
|
import spimdisasm
|
2022-06-12 17:33:32 +02:00
|
|
|
import tqdm
|
2021-01-15 02:26:06 +01:00
|
|
|
import yaml
|
2022-10-04 16:09:23 +02:00
|
|
|
from colorama import Fore, Style
|
2022-06-12 17:33:32 +02:00
|
|
|
from intervaltree import Interval, IntervalTree
|
|
|
|
|
2022-10-04 16:09:23 +02:00
|
|
|
from segtypes.linker_entry import LinkerWriter, to_cname
|
|
|
|
from segtypes.segment import Segment
|
|
|
|
from util import compiler, log, options, palettes, symbols
|
|
|
|
|
|
|
|
VERSION = "0.12.2"
|
2022-10-15 00:21:50 +02:00
|
|
|
# This value should be keep in sync with the version listed on requirements.txt
|
|
|
|
SPIMDISASM_MIN = (1, 5, 6)
|
2021-09-22 13:09:51 +02:00
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Split a rom given a rom, a config, and output directory"
|
|
|
|
)
|
|
|
|
parser.add_argument("config", help="path to a compatible config .yaml file", nargs="+")
|
2021-01-15 02:26:06 +01:00
|
|
|
parser.add_argument("--modes", nargs="+", default="all")
|
2021-04-13 09:47:52 +02:00
|
|
|
parser.add_argument("--verbose", action="store_true", help="Enable debug logging")
|
2022-05-05 16:08:16 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"--use-cache", action="store_true", help="Only split changed segments in config"
|
|
|
|
)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
linker_writer: LinkerWriter
|
2021-04-18 15:26:00 +02:00
|
|
|
config: Dict[str, Any]
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2022-06-12 17:33:32 +02:00
|
|
|
segment_roms: IntervalTree = IntervalTree()
|
|
|
|
segment_rams: IntervalTree = IntervalTree()
|
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2021-01-15 02:26:06 +01:00
|
|
|
def fmt_size(size):
|
|
|
|
if size > 1000000:
|
|
|
|
return str(size // 1000000) + " MB"
|
|
|
|
elif size > 1000:
|
|
|
|
return str(size // 1000) + " KB"
|
|
|
|
else:
|
|
|
|
return str(size) + " B"
|
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
def initialize_segments(config_segments: Union[dict, list]) -> List[Segment]:
|
2022-06-12 17:33:32 +02:00
|
|
|
global segment_roms
|
|
|
|
global segment_rams
|
|
|
|
|
|
|
|
segment_roms = IntervalTree()
|
|
|
|
segment_rams = IntervalTree()
|
|
|
|
|
2022-09-17 21:10:37 +02:00
|
|
|
segments_by_name: Dict[str, Segment] = {}
|
2021-01-15 02:26:06 +01:00
|
|
|
ret = []
|
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
for i, seg_yaml in enumerate(config_segments):
|
2022-10-15 00:21:50 +02:00
|
|
|
# end marker
|
2021-04-13 09:47:52 +02:00
|
|
|
if isinstance(seg_yaml, list) and len(seg_yaml) == 1:
|
|
|
|
continue
|
2021-01-24 16:41:31 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
seg_type = Segment.parse_segment_type(seg_yaml)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
segment_class = Segment.get_class_for_type(seg_type)
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
this_start = Segment.parse_segment_start(seg_yaml)
|
|
|
|
next_start = Segment.parse_segment_start(config_segments[i + 1])
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
segment: Segment = Segment.from_yaml(
|
|
|
|
segment_class, seg_yaml, this_start, next_start
|
|
|
|
)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
if segment.require_unique_name:
|
2022-09-17 21:10:37 +02:00
|
|
|
if segment.name in segments_by_name:
|
2021-04-13 09:47:52 +02:00
|
|
|
log.error(f"segment name '{segment.name}' is not unique")
|
|
|
|
|
2022-09-17 21:10:37 +02:00
|
|
|
segments_by_name[segment.name] = segment
|
2021-01-15 02:26:06 +01:00
|
|
|
|
|
|
|
ret.append(segment)
|
2022-06-12 17:33:32 +02:00
|
|
|
if (
|
|
|
|
isinstance(segment.rom_start, int)
|
|
|
|
and isinstance(segment.rom_end, int)
|
|
|
|
and segment.rom_start != segment.rom_end
|
|
|
|
):
|
|
|
|
segment_roms.addi(segment.rom_start, segment.rom_end, segment)
|
|
|
|
if (
|
|
|
|
isinstance(segment.vram_start, int)
|
|
|
|
and isinstance(segment.vram_end, int)
|
|
|
|
and segment.vram_start != segment.vram_end
|
|
|
|
):
|
|
|
|
segment_rams.addi(segment.vram_start, segment.vram_end, segment)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2022-09-17 21:10:37 +02:00
|
|
|
for segment in ret:
|
|
|
|
if segment.follows_vram:
|
|
|
|
if segment.follows_vram not in segments_by_name:
|
|
|
|
log.error(
|
|
|
|
f"segment '{segment.name}' follows_vram segment'{segment.follows_vram}' does not exist"
|
|
|
|
)
|
|
|
|
segment.follows_vram_segment = segments_by_name[segment.follows_vram]
|
|
|
|
|
2021-01-15 02:26:06 +01:00
|
|
|
return ret
|
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2022-06-12 17:33:32 +02:00
|
|
|
def assign_symbols_to_segments():
|
2021-04-13 09:47:52 +02:00
|
|
|
for symbol in symbols.all_symbols:
|
2022-10-15 00:21:50 +02:00
|
|
|
if symbol.segment:
|
|
|
|
continue
|
|
|
|
|
2022-06-12 17:33:32 +02:00
|
|
|
if symbol.rom:
|
|
|
|
cands = segment_roms[symbol.rom]
|
|
|
|
if len(cands) > 1:
|
|
|
|
log.error("multiple segments rom overlap symbol", symbol)
|
|
|
|
elif len(cands) == 0:
|
|
|
|
log.error("no segment rom overlaps symbol", symbol)
|
2021-02-03 16:09:01 +01:00
|
|
|
else:
|
2022-06-12 17:33:32 +02:00
|
|
|
cand: Interval = cands.pop()
|
|
|
|
seg: Segment = cand.data
|
|
|
|
seg.add_symbol(symbol)
|
2021-02-03 16:09:01 +01:00
|
|
|
else:
|
2022-06-12 17:33:32 +02:00
|
|
|
cands: Set[Interval] = segment_rams[symbol.vram_start]
|
|
|
|
segs: List[Segment] = [cand.data for cand in cands]
|
|
|
|
for seg in segs:
|
|
|
|
if not seg.get_exclusive_ram_id():
|
|
|
|
seg.add_symbol(symbol)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
def do_statistics(seg_sizes, rom_bytes, seg_split, seg_cached):
|
|
|
|
unk_size = seg_sizes.get("unk", 0)
|
|
|
|
rest_size = 0
|
|
|
|
total_size = len(rom_bytes)
|
|
|
|
|
|
|
|
for typ in seg_sizes:
|
|
|
|
if typ != "unk":
|
|
|
|
rest_size += seg_sizes[typ]
|
|
|
|
|
|
|
|
known_ratio = rest_size / total_size
|
|
|
|
unk_ratio = unk_size / total_size
|
|
|
|
|
|
|
|
log.write(f"Split {fmt_size(rest_size)} ({known_ratio:.2%}) in defined segments")
|
|
|
|
for typ in seg_sizes:
|
|
|
|
if typ != "unk":
|
|
|
|
tmp_size = seg_sizes[typ]
|
|
|
|
tmp_ratio = tmp_size / total_size
|
2022-05-05 16:08:16 +02:00
|
|
|
log.write(
|
|
|
|
f"{typ:>20}: {fmt_size(tmp_size):>8} ({tmp_ratio:.2%}) {Fore.GREEN}{seg_split[typ]} split{Style.RESET_ALL}, {Style.DIM}{seg_cached[typ]} cached"
|
|
|
|
)
|
|
|
|
log.write(
|
|
|
|
f"{'unknown':>20}: {fmt_size(unk_size):>8} ({unk_ratio:.2%}) from unknown bin files"
|
|
|
|
)
|
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
|
2021-10-26 05:26:38 +02:00
|
|
|
def merge_configs(main_config, additional_config):
|
|
|
|
# Merge rules are simple
|
|
|
|
# For each key in the dictionary
|
|
|
|
# - If list then append to list
|
|
|
|
# - If a dictionary then repeat merge on sub dictionary entries
|
|
|
|
# - Else assume string or number and replace entry
|
|
|
|
|
|
|
|
for curkey in additional_config:
|
|
|
|
if curkey not in main_config:
|
|
|
|
main_config[curkey] = additional_config[curkey]
|
|
|
|
elif type(main_config[curkey]) != type(additional_config[curkey]):
|
|
|
|
log.error(f"Type for key {curkey} in configs does not match")
|
|
|
|
else:
|
|
|
|
# keys exist and match, see if a list to append
|
|
|
|
if type(main_config[curkey]) == list:
|
|
|
|
main_config[curkey] += additional_config[curkey]
|
|
|
|
elif type(main_config[curkey]) == dict:
|
2022-05-05 16:08:16 +02:00
|
|
|
# need to merge sub areas
|
|
|
|
main_config[curkey] = merge_configs(
|
|
|
|
main_config[curkey], additional_config[curkey]
|
|
|
|
)
|
2021-10-26 05:26:38 +02:00
|
|
|
else:
|
2022-05-05 16:08:16 +02:00
|
|
|
# not a list or dictionary, must be a number or string, overwrite
|
2021-10-26 05:26:38 +02:00
|
|
|
main_config[curkey] = additional_config[curkey]
|
|
|
|
|
|
|
|
return main_config
|
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2022-06-12 17:33:32 +02:00
|
|
|
def configure_disassembler():
|
|
|
|
# Configure spimdisasm
|
|
|
|
spimdisasm.common.GlobalConfig.PRODUCE_SYMBOLS_PLUS_OFFSET = True
|
|
|
|
spimdisasm.common.GlobalConfig.TRUST_USER_FUNCTIONS = True
|
|
|
|
spimdisasm.common.GlobalConfig.TRUST_JAL_FUNCTIONS = True
|
|
|
|
spimdisasm.common.GlobalConfig.GLABEL_ASM_COUNT = False
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.rom_address_padding:
|
2022-06-12 17:33:32 +02:00
|
|
|
spimdisasm.common.GlobalConfig.ASM_COMMENT_OFFSET_WIDTH = 6
|
|
|
|
else:
|
|
|
|
spimdisasm.common.GlobalConfig.ASM_COMMENT_OFFSET_WIDTH = 0
|
|
|
|
|
|
|
|
# spimdisasm is not performing any analyzis on non-text sections so enabling this options is pointless
|
|
|
|
spimdisasm.common.GlobalConfig.AUTOGENERATED_NAMES_BASED_ON_SECTION_TYPE = False
|
|
|
|
spimdisasm.common.GlobalConfig.AUTOGENERATED_NAMES_BASED_ON_DATA_TYPE = False
|
|
|
|
|
|
|
|
spimdisasm.common.GlobalConfig.SYMBOL_FINDER_FILTERED_ADDRESSES_AS_HILO = False
|
|
|
|
|
|
|
|
rabbitizer.config.regNames_userFpcCsr = False
|
|
|
|
rabbitizer.config.regNames_vr4300Cop0NamedRegisters = False
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
rabbitizer.config.misc_opcodeLJust = options.opts.mnemonic_ljust - 1
|
2022-06-12 17:33:32 +02:00
|
|
|
|
|
|
|
rabbitizer.config.regNames_gprAbiNames = rabbitizer.Abi.fromStr(
|
2022-09-28 22:52:12 +02:00
|
|
|
options.opts.mips_abi_gpr
|
2022-06-12 17:33:32 +02:00
|
|
|
)
|
|
|
|
rabbitizer.config.regNames_fprAbiNames = rabbitizer.Abi.fromStr(
|
2022-09-28 22:52:12 +02:00
|
|
|
options.opts.mips_abi_float_regs
|
2022-06-12 17:33:32 +02:00
|
|
|
)
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.endianness == "big":
|
2022-06-12 17:33:32 +02:00
|
|
|
spimdisasm.common.GlobalConfig.ENDIAN = spimdisasm.common.InputEndian.BIG
|
|
|
|
else:
|
|
|
|
spimdisasm.common.GlobalConfig.ENDIAN = spimdisasm.common.InputEndian.LITTLE
|
|
|
|
|
|
|
|
rabbitizer.config.pseudos_pseudoMove = False
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
selectedCompiler = options.opts.compiler
|
2022-06-12 17:33:32 +02:00
|
|
|
if selectedCompiler == compiler.SN64:
|
|
|
|
rabbitizer.config.regNames_namedRegisters = False
|
|
|
|
rabbitizer.config.toolchainTweaks_sn64DivFix = True
|
|
|
|
rabbitizer.config.toolchainTweaks_treatJAsUnconditionalBranch = True
|
|
|
|
spimdisasm.common.GlobalConfig.ASM_COMMENT = False
|
|
|
|
spimdisasm.common.GlobalConfig.SYMBOL_FINDER_FILTERED_ADDRESSES_AS_HILO = False
|
|
|
|
spimdisasm.common.GlobalConfig.COMPILER = spimdisasm.common.Compiler.SN64
|
|
|
|
elif selectedCompiler == compiler.GCC:
|
|
|
|
rabbitizer.config.toolchainTweaks_treatJAsUnconditionalBranch = True
|
|
|
|
spimdisasm.common.GlobalConfig.COMPILER = spimdisasm.common.Compiler.GCC
|
|
|
|
elif selectedCompiler == compiler.IDO:
|
|
|
|
spimdisasm.common.GlobalConfig.COMPILER = spimdisasm.common.Compiler.IDO
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
spimdisasm.common.GlobalConfig.GP_VALUE = options.opts.gp
|
2022-06-12 17:33:32 +02:00
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
spimdisasm.common.GlobalConfig.ASM_TEXT_LABEL = options.opts.asm_function_macro
|
|
|
|
spimdisasm.common.GlobalConfig.ASM_DATA_LABEL = options.opts.asm_data_macro
|
|
|
|
spimdisasm.common.GlobalConfig.ASM_TEXT_END_LABEL = options.opts.asm_end_label
|
2022-06-12 17:33:32 +02:00
|
|
|
|
|
|
|
if spimdisasm.common.GlobalConfig.ASM_TEXT_LABEL == ".globl":
|
|
|
|
spimdisasm.common.GlobalConfig.ASM_TEXT_ENT_LABEL = ".ent"
|
|
|
|
spimdisasm.common.GlobalConfig.ASM_TEXT_FUNC_AS_LABEL = True
|
|
|
|
|
2022-10-15 00:21:50 +02:00
|
|
|
if spimdisasm.common.GlobalConfig.ASM_DATA_LABEL == ".globl":
|
2022-09-28 22:52:12 +02:00
|
|
|
spimdisasm.common.GlobalConfig.ASM_DATA_SYM_AS_LABEL = True
|
|
|
|
|
|
|
|
spimdisasm.common.GlobalConfig.LINE_ENDS = options.opts.c_newline
|
2022-06-12 17:33:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
def brief_seg_name(seg: Segment, limit: int, ellipsis="…") -> str:
|
|
|
|
s = seg.name.strip()
|
|
|
|
if len(s) > limit:
|
|
|
|
return s[:limit].strip() + ellipsis
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
def main(config_path, modes, verbose, use_cache=True):
|
2021-04-18 15:26:00 +02:00
|
|
|
global config
|
|
|
|
|
2022-10-15 00:21:50 +02:00
|
|
|
if spimdisasm.__version_info__ < SPIMDISASM_MIN:
|
|
|
|
log.error(
|
|
|
|
f"splat {VERSION} requires as minimum spimdisasm {SPIMDISASM_MIN}, but the installed version is {spimdisasm.__version_info__}"
|
|
|
|
)
|
|
|
|
|
2022-06-12 17:33:32 +02:00
|
|
|
log.write(f"splat {VERSION} (powered by spimdisasm {spimdisasm.__version__})")
|
2021-09-22 13:09:51 +02:00
|
|
|
|
2021-01-15 02:26:06 +01:00
|
|
|
# Load config
|
2021-10-26 05:26:38 +02:00
|
|
|
config = {}
|
|
|
|
for entry in config_path:
|
|
|
|
with open(entry) as f:
|
|
|
|
additional_config = yaml.load(f.read(), Loader=yaml.SafeLoader)
|
|
|
|
config = merge_configs(config, additional_config)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
options.initialize(config, config_path, modes, verbose)
|
2021-06-16 11:52:15 +02:00
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
with options.opts.target_path.open("rb") as f2:
|
2021-04-13 09:47:52 +02:00
|
|
|
rom_bytes = f2.read()
|
2021-02-08 11:57:56 +01:00
|
|
|
|
2021-06-16 11:52:15 +02:00
|
|
|
if "sha1" in config:
|
|
|
|
sha1 = hashlib.sha1(rom_bytes).hexdigest()
|
2022-05-05 16:08:16 +02:00
|
|
|
e_sha1 = config["sha1"].lower()
|
2021-06-16 11:52:15 +02:00
|
|
|
if e_sha1 != sha1:
|
|
|
|
log.error(f"sha1 mismatch: expected {e_sha1}, was {sha1}")
|
|
|
|
|
2021-02-08 11:57:56 +01:00
|
|
|
# Create main output dir
|
2022-09-28 22:52:12 +02:00
|
|
|
options.opts.base_path.mkdir(parents=True, exist_ok=True)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
processed_segments: List[Segment] = []
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
seg_sizes: Dict[str, int] = {}
|
|
|
|
seg_split: Dict[str, int] = {}
|
|
|
|
seg_cached: Dict[str, int] = {}
|
2021-01-15 02:26:06 +01:00
|
|
|
|
|
|
|
# Load cache
|
2021-04-13 09:47:52 +02:00
|
|
|
if use_cache:
|
|
|
|
try:
|
2022-09-28 22:52:12 +02:00
|
|
|
with options.opts.cache_path.open("rb") as f3:
|
2021-04-13 09:47:52 +02:00
|
|
|
cache = pickle.load(f3)
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
if verbose:
|
|
|
|
log.write(f"Loaded cache ({len(cache.keys())} items)")
|
|
|
|
except Exception:
|
|
|
|
cache = {}
|
|
|
|
else:
|
2021-01-15 02:26:06 +01:00
|
|
|
cache = {}
|
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
# invalidate entire cache if options change
|
|
|
|
if use_cache and cache.get("__options__") != config.get("options"):
|
|
|
|
if verbose:
|
|
|
|
log.write("Options changed, invalidating cache")
|
|
|
|
|
|
|
|
cache = {
|
|
|
|
"__options__": config.get("options"),
|
|
|
|
}
|
|
|
|
|
2022-06-12 17:33:32 +02:00
|
|
|
configure_disassembler()
|
|
|
|
|
2022-10-15 00:21:50 +02:00
|
|
|
platform_module = importlib.import_module(f"platforms.{options.opts.platform}")
|
|
|
|
platform_init = getattr(platform_module, "init")
|
|
|
|
platform_init(rom_bytes)
|
|
|
|
|
2021-01-15 02:26:06 +01:00
|
|
|
# Initialize segments
|
2021-04-13 09:47:52 +02:00
|
|
|
all_segments = initialize_segments(config["segments"])
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
# Load and process symbols
|
2022-06-12 17:33:32 +02:00
|
|
|
symbols.initialize(all_segments)
|
|
|
|
|
|
|
|
# Assign symbols to segments
|
|
|
|
assign_symbols_to_segments()
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.is_mode_active("code"):
|
2022-06-12 17:33:32 +02:00
|
|
|
symbols.initialize_spim_context(all_segments)
|
2021-04-13 09:47:52 +02:00
|
|
|
|
|
|
|
# Resolve raster/palette siblings
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.is_mode_active("img"):
|
2021-04-13 09:47:52 +02:00
|
|
|
palettes.initialize(all_segments)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
# Scan
|
2022-06-12 17:33:32 +02:00
|
|
|
scan_bar = tqdm.tqdm(all_segments, total=len(all_segments))
|
|
|
|
for segment in scan_bar:
|
|
|
|
assert isinstance(segment, Segment)
|
|
|
|
scan_bar.set_description(f"Scanning {brief_seg_name(segment, 20)}")
|
2021-02-08 11:57:56 +01:00
|
|
|
typ = segment.type
|
2021-01-15 02:26:06 +01:00
|
|
|
if segment.type == "bin" and segment.is_name_default():
|
2021-02-08 11:57:56 +01:00
|
|
|
typ = "unk"
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-02-08 11:57:56 +01:00
|
|
|
if typ not in seg_sizes:
|
|
|
|
seg_sizes[typ] = 0
|
|
|
|
seg_split[typ] = 0
|
|
|
|
seg_cached[typ] = 0
|
2021-04-13 09:47:52 +02:00
|
|
|
seg_sizes[typ] += 0 if segment.size is None else segment.size
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
if segment.should_scan():
|
|
|
|
# Check cache but don't write anything
|
|
|
|
if use_cache:
|
|
|
|
if segment.cache() == cache.get(segment.unique_id()):
|
|
|
|
continue
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
segment.did_run = True
|
|
|
|
segment.scan(rom_bytes)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
processed_segments.append(segment)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
seg_split[typ] += 1
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2022-10-04 16:09:23 +02:00
|
|
|
symbols.mark_c_funcs_as_defined()
|
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
# Split
|
2022-10-15 00:21:50 +02:00
|
|
|
split_bar = tqdm.tqdm(
|
2022-06-12 17:33:32 +02:00
|
|
|
all_segments,
|
|
|
|
total=len(all_segments),
|
2022-10-15 00:21:50 +02:00
|
|
|
)
|
|
|
|
for segment in split_bar:
|
|
|
|
split_bar.set_description(f"Splitting {brief_seg_name(segment, 20)}")
|
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
if use_cache:
|
|
|
|
cached = segment.cache()
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
if cached == cache.get(segment.unique_id()):
|
|
|
|
# Cache hit
|
2022-10-15 00:21:50 +02:00
|
|
|
if segment.type not in seg_cached:
|
|
|
|
seg_cached[segment.type] = 0
|
|
|
|
seg_cached[segment.type] += 1
|
2021-04-13 09:47:52 +02:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
# Cache miss; split
|
|
|
|
cache[segment.unique_id()] = cached
|
2021-02-03 16:09:01 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
if segment.should_split():
|
2022-10-15 00:21:50 +02:00
|
|
|
segment_bytes = rom_bytes
|
|
|
|
if segment.file_path:
|
|
|
|
with open(segment.file_path, "rb") as segment_input_file:
|
|
|
|
segment_bytes = segment_input_file.read()
|
|
|
|
segment.split(segment_bytes)
|
2021-02-03 16:09:01 +01:00
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.is_mode_active("ld"):
|
2021-04-13 09:47:52 +02:00
|
|
|
global linker_writer
|
|
|
|
linker_writer = LinkerWriter()
|
2022-10-15 00:21:50 +02:00
|
|
|
linker_bar = tqdm.tqdm(
|
|
|
|
all_segments,
|
|
|
|
total=len(all_segments),
|
|
|
|
)
|
|
|
|
for i, segment in enumerate(linker_bar):
|
|
|
|
linker_bar.set_description(f"Linker script {brief_seg_name(segment, 20)}")
|
2022-09-17 21:10:37 +02:00
|
|
|
next_segment: Optional[Segment] = None
|
|
|
|
if i < len(all_segments) - 1:
|
|
|
|
next_segment = all_segments[i + 1]
|
|
|
|
linker_writer.add(segment, next_segment)
|
2021-04-13 09:47:52 +02:00
|
|
|
linker_writer.save_linker_script()
|
|
|
|
linker_writer.save_symbol_header()
|
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
# write elf_sections.txt - this only lists the generated sections in the elf, not subsections
|
2021-10-26 05:26:38 +02:00
|
|
|
# that the elf combines into one section
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.elf_section_list_path:
|
2021-10-26 05:26:38 +02:00
|
|
|
section_list = ""
|
|
|
|
for segment in all_segments:
|
|
|
|
section_list += "." + to_cname(segment.name) + "\n"
|
2022-09-28 22:52:12 +02:00
|
|
|
with open(options.opts.elf_section_list_path, "w", newline="\n") as f:
|
2021-10-26 05:26:38 +02:00
|
|
|
f.write(section_list)
|
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
# Write undefined_funcs_auto.txt
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.create_undefined_funcs_auto:
|
2022-05-05 16:08:16 +02:00
|
|
|
to_write = [
|
|
|
|
s
|
|
|
|
for s in symbols.all_symbols
|
|
|
|
if s.referenced and not s.defined and not s.dead and s.type == "func"
|
|
|
|
]
|
2022-06-12 17:33:32 +02:00
|
|
|
to_write.sort(key=lambda x: x.vram_start)
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
with open(options.opts.undefined_funcs_auto_path, "w", newline="\n") as f:
|
2022-06-12 17:33:32 +02:00
|
|
|
for symbol in to_write:
|
|
|
|
f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n")
|
2021-01-15 02:26:06 +01:00
|
|
|
|
|
|
|
# write undefined_syms_auto.txt
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.create_undefined_syms_auto:
|
2022-05-05 16:08:16 +02:00
|
|
|
to_write = [
|
|
|
|
s
|
|
|
|
for s in symbols.all_symbols
|
2022-06-12 17:33:32 +02:00
|
|
|
if s.referenced
|
|
|
|
and not s.defined
|
|
|
|
and not s.dead
|
|
|
|
and s.type not in {"func", "label", "jtbl_label"}
|
2022-05-05 16:08:16 +02:00
|
|
|
]
|
2022-06-12 17:33:32 +02:00
|
|
|
to_write.sort(key=lambda x: x.vram_start)
|
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
with open(options.opts.undefined_syms_auto_path, "w", newline="\n") as f:
|
2022-06-12 17:33:32 +02:00
|
|
|
for symbol in to_write:
|
|
|
|
f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n")
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2021-04-13 09:47:52 +02:00
|
|
|
# print warnings during split
|
2021-01-15 02:26:06 +01:00
|
|
|
for segment in all_segments:
|
2021-02-03 16:09:01 +01:00
|
|
|
if len(segment.warnings) > 0:
|
2022-05-05 16:08:16 +02:00
|
|
|
log.write(
|
|
|
|
f"{Style.DIM}0x{segment.rom_start:06X}{Style.RESET_ALL} {segment.type} {Style.BRIGHT}{segment.name}{Style.RESET_ALL}:"
|
|
|
|
)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
|
|
|
for warn in segment.warnings:
|
|
|
|
log.write("warning: " + warn, status="warn")
|
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
log.write("") # empty line
|
2021-01-15 02:26:06 +01:00
|
|
|
|
|
|
|
# Statistics
|
2021-04-13 09:47:52 +02:00
|
|
|
do_statistics(seg_sizes, rom_bytes, seg_split, seg_cached)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
|
|
|
# Save cache
|
2021-04-13 09:47:52 +02:00
|
|
|
if cache != {} and use_cache:
|
2021-01-15 02:26:06 +01:00
|
|
|
if verbose:
|
2021-04-13 09:47:52 +02:00
|
|
|
log.write("Writing cache")
|
2022-09-28 22:52:12 +02:00
|
|
|
with open(options.opts.cache_path, "wb") as f4:
|
2021-04-13 09:47:52 +02:00
|
|
|
pickle.dump(cache, f4)
|
2021-01-15 02:26:06 +01:00
|
|
|
|
2022-09-28 22:52:12 +02:00
|
|
|
if options.opts.dump_symbols:
|
2022-09-24 17:21:54 +02:00
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
splat_hidden_folder = Path(".splat/")
|
|
|
|
splat_hidden_folder.mkdir(exist_ok=True)
|
|
|
|
|
|
|
|
with open(splat_hidden_folder / "splat_symbols.csv", "w") as f:
|
|
|
|
f.write(
|
|
|
|
"vram_start,given_name,name,type,given_size,size,rom,defined,user_declared,referenced,dead,extract\n"
|
|
|
|
)
|
|
|
|
for s in sorted(symbols.all_symbols, key=lambda x: x.vram_start):
|
|
|
|
f.write(f"{s.vram_start:X},{s.given_name},{s.name},{s.type},")
|
|
|
|
if s.given_size is not None:
|
|
|
|
f.write(f"0x{s.given_size:X},")
|
|
|
|
else:
|
2022-10-04 16:09:23 +02:00
|
|
|
f.write("None,")
|
2022-09-24 17:21:54 +02:00
|
|
|
f.write(f"{s.size},")
|
|
|
|
if s.rom is not None:
|
|
|
|
f.write(f"0x{s.rom:X},")
|
|
|
|
else:
|
2022-10-04 16:09:23 +02:00
|
|
|
f.write("None,")
|
2022-09-24 17:21:54 +02:00
|
|
|
f.write(
|
|
|
|
f"{s.defined},{s.user_declared},{s.referenced},{s.dead},{s.extract}\n"
|
|
|
|
)
|
|
|
|
|
|
|
|
symbols.spim_context.saveContextToFile(splat_hidden_folder / "spim_context.csv")
|
|
|
|
|
2022-05-05 16:08:16 +02:00
|
|
|
|
2021-01-15 02:26:06 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
args = parser.parse_args()
|
2022-09-28 22:52:12 +02:00
|
|
|
main(args.config, args.modes, args.verbose, args.use_cache)
|