Add Python linter to github actions (#1100)

* Add Python linter to github actions

* wip

* Add back splat_ext

* Format files

* C++ -> C

* format 2 files

* split workflow into separate file, line length 120, fix excludes

* -l 120 in ci

* update black locally and apply formatting changes

* pyproject.toject

---------

Co-authored-by: Ethan Roseman <ethteck@gmail.com>
This commit is contained in:
lshamis 2023-07-29 10:03:17 -07:00 committed by GitHub
parent a69ae38bfe
commit ae66312d8c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
75 changed files with 2033 additions and 1588 deletions

View File

@ -4,7 +4,7 @@ on:
jobs:
cpp_lint:
name: Format and lint
name: C format and lint
runs-on: ubuntu-latest
steps:
- name: Checkout

24
.github/workflows/python.yaml vendored Normal file
View File

@ -0,0 +1,24 @@
name: Python Formatting & Linting
on:
pull_request:
jobs:
py_format:
name: black formatting
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install black
- name: Run black
run: |
black . --check

View File

@ -12,9 +12,9 @@
},
"includePath": [
"${workspaceFolder}/include",
"${workspaceFolder}/ver/us/build/include",
"${workspaceFolder}/ver/pal/build/include",
"${workspaceFolder}/src",
"${workspaceFolder}/assets/us"
"${workspaceFolder}/assets/pal"
],
"defines": [
"F3DEX_GBI_2",

18
.vscode/settings.json vendored
View File

@ -25,7 +25,9 @@
"docs/doxygen": true,
"expected": true,
"ver/jp/expected": true,
"ver/us/expected": true
"ver/us/expected": true,
"ver/pal/expected": true,
"ver/ique/expected": true
},
"python.autoComplete.extraPaths": [
"./tools"
@ -47,6 +49,7 @@
"*.h": "c",
},
"C_Cpp.autoAddFileAssociations": false,
"C_Cpp.default.cStandard": "c89",
"files.exclude": {
"**/.git": true,
"**/.splat_cache": true,
@ -56,7 +59,14 @@
"**/*.i": true,
"docs/doxygen": true
},
"C_Cpp.default.cStandard": "c89",
"python.linting.mypyEnabled": true,
"python.linting.enabled": true,
"[python]": {
"editor.formatOnType": true,
"editor.wordBasedSuggestions": false,
"editor.formatOnSave": true,
"editor.formatOnSaveMode": "modifications",
"editor.defaultFormatter": "ms-python.black-formatter",
},
"black-formatter.args": [
"-l 120"
],
}

View File

@ -5,33 +5,36 @@ import re
import sys
from pathlib import Path
def strip_c_comments(text):
def replacer(match):
s = match.group(0)
if s.startswith('/'):
if s.startswith("/"):
return " "
else:
return s
pattern = re.compile(
r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
re.DOTALL | re.MULTILINE
re.DOTALL | re.MULTILINE,
)
return re.sub(pattern, replacer, text)
c_func_pattern = re.compile(
r"^(static\s+)?[^\s]+\s+([^\s(]+)\(([^;)]*)\)[^;]+{",
re.MULTILINE
)
c_func_pattern = re.compile(r"^(static\s+)?[^\s]+\s+([^\s(]+)\(([^;)]*)\)[^;]+{", re.MULTILINE)
def funcs_in_c(text):
return (match.group(2) for match in c_func_pattern.finditer(text))
asm_func_pattern = re.compile(
r"INCLUDE_ASM\([^,]+, [^,]+, ([^,)]+)",
re.MULTILINE
)
asm_func_pattern = re.compile(r"INCLUDE_ASM\([^,]+, [^,]+, ([^,)]+)", re.MULTILINE)
def include_asms_in_c(text):
return (match.group(1) for match in asm_func_pattern.finditer(text))
def stuff(version):
DIR = os.path.dirname(__file__)
NONMATCHINGS_DIR = Path(os.path.join(DIR, "ver", version, "asm", "nonmatchings"))
@ -76,6 +79,7 @@ def stuff(version):
if not os.listdir(folder[0]):
os.removedirs(folder[0])
stuff("jp")
stuff("us")
stuff("pal")

View File

@ -13,9 +13,7 @@ sys.path.append("tools")
from old.update_evts import parse_symbol_addrs
from tools.disasm_script import ScriptDisassembler, get_constants
parser = argparse.ArgumentParser(
description="Diff EVT macros."
)
parser = argparse.ArgumentParser(description="Diff EVT macros.")
parser.add_argument(
"start",
@ -26,21 +24,13 @@ parser.add_argument(
"-w",
"--watch",
action="store_true",
help="Watch for file changes and update the diff automatically."
help="Watch for file changes and update the diff automatically.",
)
parser.add_argument(
"-m",
"--make",
action="store_true",
help="Run ninja automatically."
)
parser.add_argument("-m", "--make", action="store_true", help="Run ninja automatically.")
parser.add_argument("-o", action="store_true", help="Ignored for compatibility with diff.py.")
parser.add_argument(
"-o",
action="store_true",
help="Ignored for compatibility with diff.py."
)
class EvtDisplay(Display):
def __init__(self, start):
@ -106,11 +96,13 @@ class EvtDisplay(Display):
refresh_key = (current, target)
return (output, refresh_key)
class FakeConfig():
class FakeConfig:
def __init__(self, args):
self.make = args.make
self.source_extensions = ["c", "h"]
def run_ninja():
return subprocess.run(
["ninja", "ver/current/build/papermario.z64"],
@ -118,6 +110,7 @@ def run_ninja():
stdout=subprocess.PIPE,
)
def main():
args = parser.parse_args()
get_constants()
@ -153,8 +146,7 @@ def main():
ret = run_ninja()
if ret.returncode != 0:
display.update(
ret.stderr.decode("utf-8-sig", "replace")
or ret.stdout.decode("utf-8-sig", "replace"),
ret.stderr.decode("utf-8-sig", "replace") or ret.stdout.decode("utf-8-sig", "replace"),
error=True,
)
continue
@ -164,5 +156,6 @@ def main():
else:
display.run_sync()
if __name__ == "__main__":
main()

View File

@ -22,7 +22,7 @@ parser.add_argument(
action="store",
default=False,
const="prompt",
help="run diff.py on the result with the provided arguments"
help="run diff.py on the result with the provided arguments",
)
parser.add_argument(
"-m", "--make", help="run ninja before finding difference(s)", action="store_true"
@ -101,7 +101,9 @@ def search_rom_address(target_addr):
continue
if rom > target_addr:
return f"{prev_sym} (RAM 0x{prev_ram:X}, ROM 0x{prev_rom:X}, {prev_file})"
return (
f"{prev_sym} (RAM 0x{prev_ram:X}, ROM 0x{prev_rom:X}, {prev_file})"
)
prev_ram = ram
prev_rom = rom
@ -214,9 +216,7 @@ if diffs == 0:
if len(found_instr_diff) > 0:
for i in found_instr_diff:
print(f"Instruction difference at ROM addr 0x{i:X}, {search_rom_address(i)}")
print(
f"Bytes: {hexbytes(mybin[i : i + 4])} vs {hexbytes(basebin[i : i + 4])}"
)
print(f"Bytes: {hexbytes(mybin[i : i + 4])} vs {hexbytes(basebin[i : i + 4])}")
print()
definite_shift = diffs > shift_cap

View File

@ -25,11 +25,7 @@ def load_latest_progress(version):
version = Path("ver/current").resolve().parts[-1]
csv = (
urlopen(f"https://papermar.io/reports/progress_{version}.csv")
.read()
.decode("utf-8")
)
csv = urlopen(f"https://papermar.io/reports/progress_{version}.csv").read().decode("utf-8")
latest = csv.split("\n")[-2]
(
@ -56,14 +52,10 @@ def load_latest_progress(version):
def get_func_info():
try:
result = subprocess.run(
["mips-linux-gnu-objdump", "-x", elf_path], stdout=subprocess.PIPE
)
result = subprocess.run(["mips-linux-gnu-objdump", "-x", elf_path], stdout=subprocess.PIPE)
nm_lines = result.stdout.decode().split("\n")
except:
print(
f"Error: Could not run objdump on {elf_path} - make sure that the project is built"
)
print(f"Error: Could not run objdump on {elf_path} - make sure that the project is built")
sys.exit(1)
sizes = {}
@ -135,19 +127,13 @@ def do_section_progress(
section_vram_end,
):
funcs = get_funcs_in_vram_range(vrams, section_vram_start, section_vram_end)
matching_size, nonmatching_size = get_funcs_sizes(
sizes, matchings, nonmatchings, restrict_to=funcs
)
matching_size, nonmatching_size = get_funcs_sizes(sizes, matchings, nonmatchings, restrict_to=funcs)
section_total_size = matching_size + nonmatching_size
progress_ratio = (matching_size / section_total_size) * 100
matching_ratio = (matching_size / total_size) * 100
total_ratio = (section_total_size / total_size) * 100
print(
f"\t{section_name}: {matching_size} matching bytes / {section_total_size} total ({progress_ratio:.2f}%)"
)
print(
f"\t\t(matched {matching_ratio:.2f}% of {total_ratio:.2f}% total rom for {section_name})"
)
print(f"\t{section_name}: {matching_size} matching bytes / {section_total_size} total ({progress_ratio:.2f}%)")
print(f"\t\t(matched {matching_ratio:.2f}% of {total_ratio:.2f}% total rom for {section_name})")
def main(args):
@ -163,9 +149,7 @@ def main(args):
nonmatching_funcs = get_nonmatching_funcs()
matching_funcs = all_funcs - nonmatching_funcs
matching_size, nonmatching_size = get_funcs_sizes(
sizes, matching_funcs, nonmatching_funcs
)
matching_size, nonmatching_size = get_funcs_sizes(sizes, matching_funcs, nonmatching_funcs)
if len(all_funcs) == 0:
funcs_matching_ratio = 0.0
@ -238,19 +222,9 @@ def main(args):
print(f"Warning: category/total size mismatch on version {args.version}!\n")
print("Matching size: " + str(matching_size))
print("Nonmatching size: " + str(nonmatching_size))
print(
"Sum: "
+ str(matching_size + nonmatching_size)
+ " (should be "
+ str(total_size)
+ ")"
)
print(
f"{len(matching_funcs)} matched functions / {len(all_funcs)} total ({funcs_matching_ratio:.2f}%)"
)
print(
f"{matching_size} matching bytes / {total_size} total ({matching_ratio:.2f}%)"
)
print("Sum: " + str(matching_size + nonmatching_size) + " (should be " + str(total_size) + ")")
print(f"{len(matching_funcs)} matched functions / {len(all_funcs)} total ({funcs_matching_ratio:.2f}%)")
print(f"{matching_size} matching bytes / {total_size} total ({matching_ratio:.2f}%)")
do_section_progress(
"effects",

4
pyproject.toml Normal file
View File

@ -0,0 +1,4 @@
[tool.black]
line-length = 120
exclude = 'tools/splat/'
extend-exclude = 'diff.py'

View File

@ -10,7 +10,7 @@ from enum import IntEnum
script_dir = os.path.dirname(os.path.realpath(__file__))
asm_dir = script_dir + "/../ver/current/asm/nonmatchings"
modes = [ "min", "max", "avg", "total", "size" ]
modes = ["min", "max", "avg", "total", "size"]
sizes = {}
@ -47,18 +47,53 @@ def do_dir(root, dir):
avg = 0 if len(files) == 0 else total / len(files)
sizes[root + "/" + dir] = ((min, max, total, avg, len(files)))
sizes[root + "/" + dir] = (min, max, total, avg, len(files))
parser = argparse.ArgumentParser(description="A tool to receive information about the number of non-matching .s files "
+"per .c file, or the size of .s files, measured by their number of instructions. "
+"Option -p is used by default if no option is specified.")
parser = argparse.ArgumentParser(
description="A tool to receive information about the number of non-matching .s files "
+ "per .c file, or the size of .s files, measured by their number of instructions. "
+ "Option -p is used by default if no option is specified."
)
group = parser.add_mutually_exclusive_group()
group.add_argument("-f", "--files", help="Default. Print the number of non-matching .s files per .c file, ordered by size.", action='store_true', required=False)
group.add_argument("-a", "--alphabetical", help="Print the size of .s files, ordered by name.", action='store_true', required=False)
group.add_argument("-s", "--size", help="Print the size of .s files, ordered by size.", action='store_true', required=False)
parser.add_argument("-l", "--limit", help="Only print the .c --files that are greater than or equal to the value.", type=int, default=0, required=False)
parser.add_argument("-m", "--mode", help="Switches between output modes for --files. Allowed values are: {min, max, avg, total, size}.", choices=modes, default="size", metavar='', required=False)
group.add_argument(
"-f",
"--files",
help="Default. Print the number of non-matching .s files per .c file, ordered by size.",
action="store_true",
required=False,
)
group.add_argument(
"-a",
"--alphabetical",
help="Print the size of .s files, ordered by name.",
action="store_true",
required=False,
)
group.add_argument(
"-s",
"--size",
help="Print the size of .s files, ordered by size.",
action="store_true",
required=False,
)
parser.add_argument(
"-l",
"--limit",
help="Only print the .c --files that are greater than or equal to the value.",
type=int,
default=0,
required=False,
)
parser.add_argument(
"-m",
"--mode",
help="Switches between output modes for --files. Allowed values are: {min, max, avg, total, size}.",
choices=modes,
default="size",
metavar="",
required=False,
)
args = parser.parse_args()

View File

@ -12,6 +12,6 @@ if __name__ == "__main__":
with open(infile, "rb") as i:
for char in i.read():
f.write(f'0x{char:02X}, ')
f.write(f"0x{char:02X}, ")
f.write(f"}};\n")

View File

@ -4,10 +4,7 @@ import os
from pathlib import Path
from typing import Tuple
ASSETS_DIR = (
Path(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
/ "assets"
)
ASSETS_DIR = Path(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) / "assets"
@lru_cache(maxsize=None)

View File

@ -28,9 +28,7 @@ PIGMENT_REQ_VERSION = "0.3.0"
def exec_shell(command: List[str]) -> str:
ret = subprocess.run(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
ret = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
return ret.stdout
@ -50,9 +48,7 @@ def write_ninja_rules(
if use_ccache:
ccache = "ccache "
try:
subprocess.call(
["ccache"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
)
subprocess.call(["ccache"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except FileNotFoundError:
ccache = ""
@ -134,9 +130,7 @@ def write_ninja_rules(
command="sha1sum -c $in && touch $out" if DO_SHA1_CHECK else "touch $out",
)
ninja.rule(
"cpp", description="cpp $in", command=f"{cpp} $in {extra_cppflags} -P -o $out"
)
ninja.rule("cpp", description="cpp $in", command=f"{cpp} $in {extra_cppflags} -P -o $out")
ninja.rule(
"cc",
@ -287,9 +281,7 @@ def write_ninja_rules(
command=f"$python {BUILD_TOOLS}/mapfs/pack_title_data.py $out $in",
)
ninja.rule(
"map_header", command=f"$python {BUILD_TOOLS}/mapfs/map_header.py $in > $out"
)
ninja.rule("map_header", command=f"$python {BUILD_TOOLS}/mapfs/map_header.py $in > $out")
ninja.rule("charset", command=f"$python {BUILD_TOOLS}/pm_charset.py $out $in")
@ -303,25 +295,17 @@ def write_ninja_rules(
command=f"$python {BUILD_TOOLS}/sprite/sprite_shading_profiles.py $in $out $header_path",
)
ninja.rule(
"imgfx_data", command=f"$python {BUILD_TOOLS}/imgfx/imgfx_data.py $in $out"
)
ninja.rule("imgfx_data", command=f"$python {BUILD_TOOLS}/imgfx/imgfx_data.py $in $out")
ninja.rule("shape", command=f"$python {BUILD_TOOLS}/mapfs/shape.py $in $out")
ninja.rule(
"effect_data", command=f"$python {BUILD_TOOLS}/effects.py $in_yaml $out_dir"
)
ninja.rule("effect_data", command=f"$python {BUILD_TOOLS}/effects.py $in_yaml $out_dir")
ninja.rule("pm_sbn", command=f"$python {BUILD_TOOLS}/audio/sbn.py $out $in")
with Path("tools/permuter_settings.toml").open("w") as f:
f.write(
f"compiler_command = \"{cc} {CPPFLAGS.replace('$version', 'pal')} {cflags} -DPERMUTER -fforce-addr\"\n"
)
f.write(
f'assembler_command = "{cross}as -EB -march=vr4300 -mtune=vr4300 -Iinclude"\n'
)
f.write(f"compiler_command = \"{cc} {CPPFLAGS.replace('$version', 'pal')} {cflags} -DPERMUTER -fforce-addr\"\n")
f.write(f'assembler_command = "{cross}as -EB -march=vr4300 -mtune=vr4300 -Iinclude"\n')
f.write(f'compiler_type = "gcc"\n')
f.write(
"""
@ -512,11 +496,7 @@ class Configure:
for object_path in object_paths:
if object_path.suffixes[-1] == ".o":
built_objects.add(str(object_path))
elif (
object_path.suffixes[-1] == ".h"
or task == "bin_inc_c"
or task == "pal_inc_c"
):
elif object_path.suffixes[-1] == ".h" or task == "bin_inc_c" or task == "pal_inc_c":
generated_headers.append(str(object_path))
# don't rebuild objects if we've already seen all of them
@ -580,15 +560,13 @@ class Configure:
if isinstance(seg, segtypes.n64.header.N64SegHeader):
build(entry.object_path, entry.src_paths, "as")
elif isinstance(seg, segtypes.common.asm.CommonSegAsm) or (
isinstance(seg, segtypes.common.data.CommonSegData)
and not seg.type[0] == "."
isinstance(seg, segtypes.common.data.CommonSegData) and not seg.type[0] == "."
):
build(entry.object_path, entry.src_paths, "as")
elif seg.type in ["pm_effect_loads", "pm_effect_shims"]:
build(entry.object_path, entry.src_paths, "as")
elif isinstance(seg, segtypes.common.c.CommonSegC) or (
isinstance(seg, segtypes.common.data.CommonSegData)
and seg.type[0] == "."
isinstance(seg, segtypes.common.data.CommonSegData) and seg.type[0] == "."
):
cflags = None
if isinstance(seg.yaml, dict):
@ -619,16 +597,12 @@ class Configure:
task = "cc_272"
cflags = cflags.replace("gcc_272", "")
encoding = (
"CP932" # similar to SHIFT-JIS, but includes backslash and tilde
)
encoding = "CP932" # similar to SHIFT-JIS, but includes backslash and tilde
if version == "ique":
encoding = "EUC-JP"
# Dead cod
if isinstance(seg.parent.yaml, dict) and seg.parent.yaml.get(
"dead_code", False
):
if isinstance(seg.parent.yaml, dict) and seg.parent.yaml.get("dead_code", False):
obj_path = str(entry.object_path)
init_obj_path = Path(obj_path + ".dead")
build(
@ -677,9 +651,7 @@ class Configure:
src_paths = [seg.out_path().relative_to(ROOT)]
inc_dir = self.build_path() / "include" / seg.dir
bin_path = (
self.build_path() / seg.dir / (seg.name + ".png.bin")
)
bin_path = self.build_path() / seg.dir / (seg.name + ".png.bin")
build(
bin_path,
@ -691,9 +663,7 @@ class Configure:
},
)
assert seg.vram_start is not None, (
"img with vram_start unset: " + seg.name
)
assert seg.vram_start is not None, "img with vram_start unset: " + seg.name
c_sym = seg.create_symbol(
addr=seg.vram_start,
@ -720,9 +690,7 @@ class Configure:
elif isinstance(seg, segtypes.n64.palette.N64SegPalette):
src_paths = [seg.out_path().relative_to(ROOT)]
inc_dir = self.build_path() / "include" / seg.dir
bin_path = (
self.build_path() / seg.dir / (seg.name + ".pal.bin")
)
bin_path = self.build_path() / seg.dir / (seg.name + ".pal.bin")
build(
bin_path,
@ -833,9 +801,7 @@ class Configure:
)
# Sprites .bin
sprite_player_header_path = str(
self.build_path() / "include/sprite/player.h"
)
sprite_player_header_path = str(self.build_path() / "include/sprite/player.h")
build(
entry.object_path.with_suffix(".bin"),
@ -843,9 +809,7 @@ class Configure:
"sprites",
variables={
"header_out": sprite_player_header_path,
"build_dir": str(
self.build_path() / "assets" / self.version / "sprite"
),
"build_dir": str(self.build_path() / "assets" / self.version / "sprite"),
"asset_stack": ",".join(self.asset_stack),
},
implicit_outputs=[sprite_player_header_path],
@ -859,9 +823,7 @@ class Configure:
msg_bins = []
for section_idx, msg_path in enumerate(entry.src_paths):
bin_path = (
entry.object_path.with_suffix("") / f"{section_idx:02X}.bin"
)
bin_path = entry.object_path.with_suffix("") / f"{section_idx:02X}.bin"
msg_bins.append(bin_path)
build(bin_path, [msg_path], "msg")
@ -1005,16 +967,12 @@ class Configure:
)
elif name.endswith("_shape_built"):
base_name = name[:-6]
raw_bin_path = self.resolve_asset_path(
f"assets/x/mapfs/geom/{base_name}.bin"
)
raw_bin_path = self.resolve_asset_path(f"assets/x/mapfs/geom/{base_name}.bin")
bin_path = bin_path.parent / "geom" / (base_name + ".bin")
if c_maps:
# raw bin -> c -> o -> elf -> objcopy -> final bin file
c_file_path = (
bin_path.parent / "geom" / base_name
).with_suffix(".c")
c_file_path = (bin_path.parent / "geom" / base_name).with_suffix(".c")
o_path = bin_path.parent / "geom" / (base_name + ".o")
elf_path = bin_path.parent / "geom" / (base_name + ".elf")
@ -1056,12 +1014,7 @@ class Configure:
rasters = []
for src_path in entry.src_paths:
out_path = (
self.build_path()
/ seg.dir
/ seg.name
/ (src_path.stem + ".bin")
)
out_path = self.build_path() / seg.dir / seg.name / (src_path.stem + ".bin")
build(
out_path,
[src_path],
@ -1079,13 +1032,7 @@ class Configure:
palettes = []
for src_path in entry.src_paths:
out_path = (
self.build_path()
/ seg.dir
/ seg.name
/ "palette"
/ (src_path.stem + ".bin")
)
out_path = self.build_path() / seg.dir / seg.name / "palette" / (src_path.stem + ".bin")
build(
out_path,
[src_path],
@ -1100,9 +1047,7 @@ class Configure:
build(entry.object_path.with_suffix(""), palettes, "charset_palettes")
build(entry.object_path, [entry.object_path.with_suffix("")], "bin")
elif seg.type == "pm_sprite_shading_profiles":
header_path = str(
self.build_path() / "include/sprite/sprite_shading_profiles.h"
)
header_path = str(self.build_path() / "include/sprite/sprite_shading_profiles.h")
build(
entry.object_path.with_suffix(""),
entry.src_paths,
@ -1115,14 +1060,12 @@ class Configure:
build(entry.object_path, [entry.object_path.with_suffix("")], "bin")
elif seg.type == "pm_sbn":
sbn_path = entry.object_path.with_suffix("")
build(sbn_path, entry.src_paths, "pm_sbn") # could have non-yaml inputs be implicit
build(sbn_path, entry.src_paths, "pm_sbn") # could have non-yaml inputs be implicit
build(entry.object_path, [sbn_path], "bin")
elif seg.type == "linker" or seg.type == "linker_offset":
pass
elif seg.type == "pm_imgfx_data":
c_file_path = (
Path(f"assets/{self.version}") / "imgfx" / (seg.name + ".c")
)
c_file_path = Path(f"assets/{self.version}") / "imgfx" / (seg.name + ".c")
build(c_file_path, entry.src_paths, "imgfx_data")
build(
@ -1136,9 +1079,7 @@ class Configure:
},
)
else:
raise Exception(
f"don't know how to build {seg.__class__.__name__} '{seg.name}'"
)
raise Exception(f"don't know how to build {seg.__class__.__name__} '{seg.name}'")
# Run undefined_syms through cpp
ninja.build(
@ -1216,20 +1157,14 @@ if __name__ == "__main__":
action="store_true",
help="Delete assets and previously-built files",
)
parser.add_argument(
"--splat", default="tools/splat", help="Path to splat tool to use"
)
parser.add_argument(
"--split-code", action="store_true", help="Re-split code segments to asm files"
)
parser.add_argument("--splat", default="tools/splat", help="Path to splat tool to use")
parser.add_argument("--split-code", action="store_true", help="Re-split code segments to asm files")
parser.add_argument(
"--no-split-assets",
action="store_true",
help="Don't split assets from the baserom(s)",
)
parser.add_argument(
"-d", "--debug", action="store_true", help="Generate debugging information"
)
parser.add_argument("-d", "--debug", action="store_true", help="Generate debugging information")
parser.add_argument(
"-n",
"--non-matching",
@ -1272,12 +1207,8 @@ if __name__ == "__main__":
pass
if args.cpp is None:
print("error: system C preprocessor is not GNU!")
print(
"This is a known issue on macOS - only clang's cpp is installed by default."
)
print(
"Use 'brew' to obtain GNU cpp, then run this script again with the --cpp option, e.g."
)
print("This is a known issue on macOS - only clang's cpp is installed by default.")
print("Use 'brew' to obtain GNU cpp, then run this script again with the --cpp option, e.g.")
print(f" ./configure --cpp {gcc_cpps[0]}")
exit(1)
@ -1285,15 +1216,11 @@ if __name__ == "__main__":
version = exec_shell([PIGMENT, "--version"]).split(" ")[1].strip()
if version < PIGMENT_REQ_VERSION:
print(
f"error: {PIGMENT} version {PIGMENT_REQ_VERSION} or newer is required, system version is {version}\n"
)
print(f"error: {PIGMENT} version {PIGMENT_REQ_VERSION} or newer is required, system version is {version}\n")
exit(1)
except FileNotFoundError:
print(f"error: {PIGMENT} is not installed\n")
print(
"To build and install it, obtain cargo:\n\tcurl https://sh.rustup.rs -sSf | sh"
)
print("To build and install it, obtain cargo:\n\tcurl https://sh.rustup.rs -sSf | sh")
print(f"and then run:\n\tcargo install {PIGMENT}")
exit(1)
@ -1382,12 +1309,8 @@ if __name__ == "__main__":
# include tools/splat_ext in the python path
sys.path.append(str((ROOT / "tools/splat_ext").resolve()))
configure.split(
not args.no_split_assets, args.split_code, args.shift, args.debug
)
configure.write_ninja(
ninja, skip_files, non_matching, args.modern_gcc, args.c_maps
)
configure.split(not args.no_split_assets, args.split_code, args.shift, args.debug)
configure.write_ninja(ninja, skip_files, non_matching, args.modern_gcc, args.c_maps)
all_rom_oks.append(str(configure.rom_ok_path()))

View File

@ -9,9 +9,7 @@ from splat_ext.pm_effect_loads import effects_from_yaml
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Builds effect table, function declarations, macros, and enum"
)
parser = argparse.ArgumentParser(description="Builds effect table, function declarations, macros, and enum")
parser.add_argument("in_yaml")
parser.add_argument("out_dir", type=Path)
args = parser.parse_args()
@ -31,9 +29,7 @@ if __name__ == "__main__":
effect_enum_text += f" {enum_name} = 0x{i:02X},\n"
if not effect.empty:
effect_table_text += (
f" FX_ENTRY({effect.name}, effect_gfx_{effect.gfx}),\n"
)
effect_table_text += f" FX_ENTRY({effect.name}, effect_gfx_{effect.gfx}),\n"
fx_decls_text += effect.get_macro_call("fx_" + effect.name) + ";\n"
main_decls_text += effect.get_macro_call(effect.name + "_main") + ";\n"
macro_defs += effect.get_macro_def() + "\n"

View File

@ -1,23 +1,23 @@
#!/usr/bin/env python3
import sys, os
#Under normal compilation we rely on splat to use a discard option in the ldscript
#to not include sections in the elf then just output all sections, however under debug we want
#to have debug sections.
#In debugging mode splat is told to output a list of sections it is custom creating, which are
#all of the sections we export to the z64 file with an objcopy. The below chunk of code is
#responsible for adding -j to each of the names and outputting a file for objcopy to use
#so we can still generate a elf file with all the extra debugging sections and still output
# Under normal compilation we rely on splat to use a discard option in the ldscript
# to not include sections in the elf then just output all sections, however under debug we want
# to have debug sections.
# In debugging mode splat is told to output a list of sections it is custom creating, which are
# all of the sections we export to the z64 file with an objcopy. The below chunk of code is
# responsible for adding -j to each of the names and outputting a file for objcopy to use
# so we can still generate a elf file with all the extra debugging sections and still output
# the required sections to the .z64 without outputting everything.
if __name__ == "__main__":
infile, outfile = sys.argv[1:]
infile, outfile = sys.argv[1:]
#generate output based on input
file_data = open(infile,"r").read().split("\n")
if len(file_data[-1]) == 0:
file_data.pop()
# generate output based on input
file_data = open(infile, "r").read().split("\n")
if len(file_data[-1]) == 0:
file_data.pop()
outdata = "-j " + " -j ".join(file_data)
with open(outfile, "w") as f:
f.write(outdata)
outdata = "-j " + " -j ".join(file_data)
with open(outfile, "w") as f:
f.write(outdata)

View File

@ -221,12 +221,8 @@ class Converter:
# header (struct BackgroundHeader)
for i, palette in enumerate(palettes):
out_bytes += (baseaddr + palettes_len + headers_len).to_bytes(
4, byteorder="big"
) # raster offset
out_bytes += (baseaddr + headers_len + 0x200 * i).to_bytes(
4, byteorder="big"
) # palette offset
out_bytes += (baseaddr + palettes_len + headers_len).to_bytes(4, byteorder="big") # raster offset
out_bytes += (baseaddr + headers_len + 0x200 * i).to_bytes(4, byteorder="big") # palette offset
out_bytes += (12).to_bytes(2, byteorder="big") # startX
out_bytes += (20).to_bytes(2, byteorder="big") # startY
out_bytes += (out_width).to_bytes(2, byteorder="big") # width
@ -263,8 +259,6 @@ if __name__ == "__main__":
flip_x = "--flip-x" in argv
flip_y = "--flip-y" in argv
(out_bytes, out_width, out_height) = Converter(
mode, infile, flip_x, flip_y
).convert()
(out_bytes, out_width, out_height) = Converter(mode, infile, flip_x, flip_y).convert()
with open(argv[3], "wb") as f:
f.write(out_bytes)

View File

@ -6,6 +6,7 @@ import json
from pathlib import Path
from typing import Any, List
@dataclass
class Vertex:
idx: int
@ -20,7 +21,16 @@ class Vertex:
a: int
def toJSON(self):
return " { \"pos\": [" + f"{self.x}, {self.y}, {self.z}" + "], \"uv\": [" + f"{self.u}, {self.v}" + "], \"rgba\": [" + f"{self.r}, {self.g}, {self.b}, {self.a}" + "] }"
return (
' { "pos": ['
+ f"{self.x}, {self.y}, {self.z}"
+ '], "uv": ['
+ f"{self.u}, {self.v}"
+ '], "rgba": ['
+ f"{self.r}, {self.g}, {self.b}, {self.a}"
+ "] }"
)
@dataclass
class Triangle:
@ -31,6 +41,7 @@ class Triangle:
def toJSON(self):
return f" [{self.i}, {self.j}, {self.k}]"
@dataclass
class Anim:
name: str
@ -46,13 +57,15 @@ class Anim:
triangles: List[Triangle]
def toJSON(self):
framestr = ",\n".join([" [\n" + ",\n".join([v.toJSON() for v in frame]) + "\n ]" for i, frame in enumerate(self.frames)])
framestr = ",\n".join(
[" [\n" + ",\n".join([v.toJSON() for v in frame]) + "\n ]" for i, frame in enumerate(self.frames)]
)
trianglestr = ",\n".join([t.toJSON() for t in self.triangles])
ret = "{\n"
ret += " \"flags\": " + str(self.flags) + ",\n"
ret += " \"frames\": [\n" + framestr + "],\n"
ret += " \"triangles\": [\n" + trianglestr + "]\n"
ret += ' "flags": ' + str(self.flags) + ",\n"
ret += ' "frames": [\n' + framestr + "],\n"
ret += ' "triangles": [\n' + trianglestr + "]\n"
ret += "}"
return ret
@ -60,7 +73,10 @@ class Anim:
@staticmethod
def fromJSON(name: str, data: Any) -> "Anim":
flags = data["flags"]
frames = [[Vertex(idx, *vtx["pos"], *vtx["uv"], *vtx["rgba"]) for idx, vtx in enumerate(frame)] for frame in data["frames"]]
frames = [
[Vertex(idx, *vtx["pos"], *vtx["uv"], *vtx["rgba"]) for idx, vtx in enumerate(frame)]
for frame in data["frames"]
]
triangles = [Triangle(*t) for t in data["triangles"]]
return Anim(
@ -73,15 +89,16 @@ class Anim:
keyframes=len(frames),
flags=flags,
frames=frames,
triangles=triangles
triangles=triangles,
)
def build(inputs: List[Path], output: Path):
with open(output, "w") as f:
f.write("/* NOTE: This file is autogenerated, do not edit */\n\n")
f.write("#include \"PR/gbi.h\"\n")
f.write("#include \"macros.h\"\n")
f.write("#include \"imgfx.h\"\n\n")
f.write('#include "PR/gbi.h"\n')
f.write('#include "macros.h"\n')
f.write('#include "imgfx.h"\n\n')
for input in inputs:
with open(input, "r") as fin:
@ -101,7 +118,9 @@ def build(inputs: List[Path], output: Path):
for frame in anim.frames:
f.write(" {\n")
for vtx in frame:
f.write(f" {{ {{{vtx.x}, {vtx.y}, {vtx.z}}}, {{{vtx.u}, {vtx.v}}}, {{{vtx.r}, {vtx.g}, {vtx.b}}}, {vtx.a} }},\n")
f.write(
f" {{ {{{vtx.x}, {vtx.y}, {vtx.z}}}, {{{vtx.u}, {vtx.v}}}, {{{vtx.r}, {vtx.g}, {vtx.b}}}, {vtx.a} }},\n"
)
f.write(" },\n")
f.write("};\n\n")
@ -134,7 +153,9 @@ def build(inputs: List[Path], output: Path):
# We need a new chunk
if max_t1 >= 32 and not just_chunked:
chunk_text = f" gsSPVertex((u8*){vtx_name} + 0xC * {sub_num}, {min(32, max_t1 + 1)}, 0),\n" + chunk_text
chunk_text = (
f" gsSPVertex((u8*){vtx_name} + 0xC * {sub_num}, {min(32, max_t1 + 1)}, 0),\n" + chunk_text
)
just_chunked = True
f.write(chunk_text)
chunk_text = ""
@ -155,7 +176,9 @@ def build(inputs: List[Path], output: Path):
old_max_t = max(max_t1, max_t2)
# Dump final chunk
chunk_text = f" gsSPVertex((u8*){vtx_name} + 0xC * {sub_num}, {max(max_t1, max_t2) + 1}, 0),\n" + chunk_text
chunk_text = (
f" gsSPVertex((u8*){vtx_name} + 0xC * {sub_num}, {max(max_t1, max_t2) + 1}, 0),\n" + chunk_text
)
f.write(chunk_text)
f.write(" gsSPEndDisplayList(),\n")
f.write("};\n\n")

View File

@ -20,7 +20,7 @@ if __name__ == "__main__":
mode = sys.argv[2]
syms_to_max = {
"entity_data_vram_end" : [
"entity_data_vram_end": [
"entity_default_VRAM_END",
"entity_jan_iwa_VRAM_END",
"entity_sbk_omo_VRAM_END",
@ -44,7 +44,7 @@ if __name__ == "__main__":
"world_action_use_spinning_flower_VRAM_END",
"world_action_use_tweester_VRAM_END",
"world_action_sneaky_parasol_VRAM_END",
]
],
}
addrs: Dict[str, List[int]] = {}
@ -80,7 +80,9 @@ if __name__ == "__main__":
out_addrs = {sym: max(addrs[sym]) for sym in addrs}
out_addrs["entity_data_vram_end"] = out_addrs["entity_data_vram_end"] + out_addrs["world_action_vram_end"] - HARDCODED_ADDR
out_addrs["entity_data_vram_end"] = (
out_addrs["entity_data_vram_end"] + out_addrs["world_action_vram_end"] - HARDCODED_ADDR
)
out = ""
for sym in out_addrs:

View File

@ -4,9 +4,11 @@ from sys import argv
from pathlib import Path
import struct
def next_multiple(pos, multiple):
return pos + pos % multiple
def get_version_date(version):
if version == "us":
return "Map Ver.00/11/07 15:36"
@ -17,6 +19,7 @@ def get_version_date(version):
else:
return "Map Ver.??/??/?? ??:??"
def build_mapfs(out_bin, assets, version):
# every TOC entry's name field has data after the null terminator made up from all the previous name fields.
# we probably don't have to do this for the game to read the data properly (it doesn't read past the null terminator
@ -38,12 +41,12 @@ def build_mapfs(out_bin, assets, version):
decompressed_size = decompressed.stat().st_size
size = next_multiple(compressed.stat().st_size, 2) if compressed.exists() else decompressed_size
#print(f"{name} {offset:08X} {size:08X} {decompressed_size:08X}")
# print(f"{name} {offset:08X} {size:08X} {decompressed_size:08X}")
# write all previously-written names; required to match
lastname = name + lastname[len(name):]
lastname = name + lastname[len(name) :]
f.seek(toc_entry_pos)
f.write(lastname.encode('ascii'))
f.write(lastname.encode("ascii"))
# write TOC entry.
f.seek(toc_entry_pos + 0x10)
@ -61,14 +64,15 @@ def build_mapfs(out_bin, assets, version):
last_name_entry = "end_data\0"
f.seek(toc_entry_pos)
lastname = last_name_entry + lastname[len(last_name_entry):]
f.write(lastname.encode('ascii'))
lastname = last_name_entry + lastname[len(last_name_entry) :]
f.write(lastname.encode("ascii"))
f.seek(toc_entry_pos + 0x18)
f.write((0x903F0000).to_bytes(4, byteorder="big")) # TODO: figure out purpose
f.write((0x903F0000).to_bytes(4, byteorder="big")) # TODO: figure out purpose
if __name__ == "__main__":
argv.pop(0) # python3
argv.pop(0) # python3
version = argv.pop(0)
out = argv.pop(0)
@ -76,6 +80,6 @@ if __name__ == "__main__":
# pairs
for i in range(0, len(argv), 2):
assets.append((Path(argv[i]), Path(argv[i+1])))
assets.append((Path(argv[i]), Path(argv[i + 1])))
build_mapfs(out, assets, version)

View File

@ -4,17 +4,19 @@ from sys import argv, stderr
from os import path
from xml.dom.minidom import parse
def eprint(*args, **kwargs):
print(*args, file=stderr, **kwargs)
if __name__ == "__main__":
_, xml_path = argv
xml = parse(xml_path)
map_name = path.basename(xml_path)[:-4]
print("#include \"common.h\"")
print("#include \"map.h\"")
print('#include "common.h"')
print('#include "map.h"')
print("")
print("#ifndef NAMESPACE")
print(f"#define NAMESPACE {map_name}")

View File

@ -3,7 +3,7 @@
from sys import argv
if __name__ == "__main__":
argv.pop(0) # python3
argv.pop(0) # python3
if len(argv) > 4:
out, img1, img2, img3, img2_pal = argv

View File

@ -90,27 +90,17 @@ class HeaderSegment(Segment):
# note: do not push model root yet
shape.root_node = NodeSegment(self.ptr_root_node, "Node")
shape.vtx_table = shape.push(
VertexTableSegment(self.ptr_vtx_table, "VertexTable")
)
shape.model_names = shape.push(
StringListSegment(self.ptr_model_names, "ModelNames")
)
shape.collider_names = shape.push(
StringListSegment(self.ptr_collider_names, "ColliderNames")
)
shape.zone_names = shape.push(