Created
June 19, 2023 18:16
-
-
Save phire/993aad1cee3a081028db5d364d7110c0 to your computer and use it in GitHub Desktop.
Custom metabuild system
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
# Story time: | |
# I tend to use cmake for projects these days, but I'm not excatly happy with it's support | |
# for cross-compiling. Sure, it's flexible enough that you do almost anything, but anything | |
# advanced (like compiling host tools or building multiple platforms simultaneously) | |
# requires janky usage of external projects. | |
# So I set aside some time to explore other build systems to find one that better supported | |
# my needs. | |
# I spent some time exploring meson, it had first-party understanding of compiling host tools | |
# but it limited you to just three machines (build, host and target). | |
# autotools and pure make were probally the smartest options, but they don't support | |
# outputting compile_commands.json | |
# | |
# So in a moment of frustration, I wrote this meta-build system, which is simply a thin | |
# wrapper around ninja, with cmake-like semantics. | |
# It's somewhat optimized for compactness at the expense of future proofing, with the | |
# expectation that it will live in this project and be extended whenever this project's | |
# needs change. | |
# | |
# Probally not the smartest idea to do a custom buildsystem, but shit happened. | |
import sys | |
import os | |
# begin editable config | |
# The build files aren't allowed to import environment variables, so we import them here. | |
# Anything in the global scope gets exposed to the build files (except modules and anything | |
# beginning with an underscore) | |
devkitpro = os.environ['DEVKITPRO'] | |
devkitarm = os.environ['DEVKITARM'] | |
ds_ip = "10.9.9.106" | |
_ENTRY_DIR='source' | |
_BUILDDIR='build' | |
############# end of config ############## | |
_SCRIPTNAME = os.path.basename(__file__) | |
class Machine(object): | |
def __init__(self, name, toolchain): | |
self.name = name | |
self.toolchain = toolchain | |
self.flags = [] | |
self.ldflags = [] | |
self.include_dirs = [] | |
# TODO: how to handle clang and other compilers? | |
self.cc = f'{toolchain}gcc' | |
self.cxx = f'{toolchain}g++' | |
self.ld = f'{toolchain}ld' | |
self.ar = f'{toolchain}ar' | |
self.strip = f'{toolchain}strip' | |
self.objcopy = f'{toolchain}objcopy' | |
self.objdump = f'{toolchain}objdump' | |
self._used = False | |
_machines = { # some default machines | |
'host' : Machine('host', ''), | |
'target' : Machine('target', ''), | |
} | |
_generate_deps = [_SCRIPTNAME] | |
_targets = {} | |
_outputs = [] | |
def _make_dag(): | |
class Node(): | |
def __init__(self, name, machine): | |
self.name = name | |
self.machine = machine | |
self.children = [] | |
self.parent = None | |
self.sources = None | |
self.visited = False | |
self.dep_target = '' | |
self.generic = False | |
self.dir = '' | |
self.obj_postfix = '.o' | |
def add_child(self, child, inheritance): | |
self.children.append((inheritance, child)) | |
child.parent = self | |
root = Node('root', None) | |
root.type = 'root' | |
all_node = Node('all', None) | |
all_node.type = 'phony' | |
root.add_child(all_node, 'public') | |
depends = {} | |
def add_target(name, parent, machine, inheritance): | |
node = Node(name, machine) | |
node.data = target = _targets[name] | |
parent.add_child(node, inheritance) | |
target['_visited'] = True | |
if 'machine' in target: | |
assert machine == None | |
machine = target['machine'] | |
_machines[machine]._used = True | |
def add_depends(dep, inheritance): | |
for (m, n), d in depends.items(): | |
if (m == 'generic' or machine == None or m == machine) and n == dep: | |
node.add_child(d, inheritance) | |
return | |
# allow depending directly on files | |
if dep not in _targets and os.path.exists(dep): | |
_targets[dep] = {'type': 'file'} | |
add_target(dep, node, machine, inheritance) | |
node.type = target['type'] | |
node.machine = machine | |
node.sources = target.get('sources', []) | |
node.source_dir = target.get('source_dir', '') | |
node.dir = target.get('build_dir', '') | |
node.flags = target.get('flags', []) | |
node.ldflags = target.get('ldflags', []) | |
node.include_dirs = target.get('include_dirs', []) | |
if node.type == 'custom': | |
node.args = target['args'] | |
if target['output']: | |
node.name = target['output'] | |
node.command = target['command'] | |
if 'depends' in target: | |
for dep in target['depends']: | |
add_depends(dep, 'public') | |
if 'private_depends' in target: | |
for dep in target['private_depends']: | |
add_depends(dep, 'private') | |
if 'interface_depends' in target: | |
for dep in target['interface_depends']: | |
add_depends(dep, 'interface') | |
# Nodes can be generic if they aren't specific to a machine, and all their children are generic | |
if node.type not in ['executable', 'library'] and all([c.generic for _, c in node.children]): | |
node.generic = True | |
machine = 'generic' | |
# We want to be able to re-use the same library on multiple machines, so dependencies are | |
# tagged with the machine and instanced multiple times if necessary | |
depends[(machine, name)] = node | |
for output_name in _outputs: | |
add_target(output_name, all_node, None, 'none') | |
for name, target in _targets.items(): | |
if '_visited' not in target and target['type'] == 'custom': | |
add_target(name, root, None, 'none') | |
return root | |
# preserves order (on python 3.6+) | |
def uniq(lst): | |
return list(dict.fromkeys(lst)) | |
def _process_dag(node): | |
if node.visited == True: | |
return [] | |
node.visited = True | |
targets = [node] | |
node.dep_target = node.name | |
if node.machine and _machines[node.machine].name != 'target': | |
node.obj_postfix = '.' + node.machine + node.obj_postfix | |
if node.type in ['library']: | |
node.dep_target += f".{node.machine}" | |
if node.type == 'library': | |
node.dep_target = os.path.join(node.dir, node.dep_target) + '.a' | |
if node.type not in ['executable', 'library', 'interface']: | |
for (inheritance, child) in node.children: | |
targets += _process_dag(child) | |
node.export_flags = ([], [], []) | |
return targets | |
# uesd list() to force copies | |
export_flags, export_ldflags, export_include_dirs = list(node.flags), list(node.ldflags), list(node.include_dirs) | |
for (inheritance, child) in node.children: | |
targets += _process_dag(child) | |
flags, ldflags, include_dirs = child.export_flags | |
if inheritance in ['public', 'private']: | |
node.flags += flags | |
node.ldflags += ldflags | |
node.include_dirs += include_dirs | |
if inheritance in ['public', 'interface']: | |
export_flags += flags | |
export_ldflags += ldflags | |
export_include_dirs += include_dirs | |
node.flags = uniq(node.flags) | |
node.ldflags = uniq(node.ldflags) | |
node.include_dirs = uniq(node.include_dirs) | |
node.export_flags = (uniq(export_flags), uniq(export_ldflags), uniq(export_include_dirs)) | |
return targets | |
def _generate(): | |
# our uniq function depends on python 3.7 semantics | |
version = sys.version_info | |
assert version.major > 3 or (version.major == 3 and version.minor > 6), "Python 3.7 or newer required" | |
# collect all info by adding entry source dir | |
os.chdir(os.path.dirname(__file__)) | |
add_directory(_ENTRY_DIR) | |
# build the dag | |
dag = _make_dag() | |
# propagate flags | |
targets = _process_dag(dag) | |
os.makedirs(_BUILDDIR, exist_ok=True) | |
with open(f"{_BUILDDIR}/new_build.ninja", "w") as f: | |
import deps.ninja_syntax as ninja_syntax | |
ninja = ninja_syntax.Writer(f) | |
ninja.comment(f"This file is autogenerated by {_SCRIPTNAME}, DO NOT EDIT") | |
ninja.variable("ninja_required_version", "1.9.0") | |
ninja.newline() | |
ninja.comment("Rules to regenerate build.ninja when needed") | |
ninja.rule(name='REGENERATE_BUILD', command=f'{sys.executable} {os.path.join("..", _SCRIPTNAME)} --regenerate', | |
description="Regenerating build files.", generator=1) | |
ninja.build('build.ninja', 'REGENERATE_BUILD', | |
[os.path.join('../', x) for x in _generate_deps], pool='console') | |
ninja.newline() | |
ninja.comment("compiler/linker rules") | |
for machine in _machines.values(): | |
if not machine._used: | |
continue # don't emit unused machines | |
ninja.rule(name=f'{machine.name}_CXX', | |
command=f'{machine.cxx} {" ".join(machine.flags)} $ARGS -MD -MF $out.d -c $in -o $out', | |
deps='gcc', depfile='$out.d', description=f'[{machine.name}] Compiling $in') | |
ninja.rule(name=f'{machine.name}_STATIC_LINKER', | |
command=f'{machine.ar} csrDT {" ".join(machine.ldflags)} $LD_ARGS $out $in', | |
description=f'[{machine.name}] Statically linking $out') | |
ninja.rule(name=f'{machine.name}_LD', | |
command=f'{machine.cxx} -o $out $in {" ".join(machine.ldflags)} $LD_ARGS', | |
description=f"[{machine.name}] Linking $out") | |
for target in targets: | |
if target.type in ['file', 'interface', 'root']: | |
continue | |
ninja.newline() | |
ninja.comment(f"Target {target.name}") | |
dir = target.dir | |
depends = [node.dep_target for _, node in target.children if node.type != 'phony'] | |
if target.type in ['executable', 'library']: | |
machine = _machines[target.machine].name | |
args = ' '.join(list(target.flags) + [f'-I{x}' for x in target.include_dirs]) | |
objs = [] | |
for source in target.sources: | |
out = os.path.join(dir, source) + target.obj_postfix | |
full_source = os.path.join(target.source_dir, source) | |
if source.endswith('.c'): | |
compiler = f'{machine}_CC' | |
command = f'{_machines[machine].cc}' | |
elif source.endswith('.cpp'): | |
compiler = f'{machine}_CXX' | |
command = f'{_machines[machine].cxx}' | |
else: | |
raise Exception(f'Unknown source file type: {source}') | |
ninja.build(out, compiler, full_source, variables={'ARGS' : args}) | |
objs.append(out) | |
ld_args = ' '.join(target.ldflags) | |
if target.type == 'library': | |
ninja.build(f'{target.dep_target}', f'{machine}_STATIC_LINKER', | |
objs, variables={'LD_ARGS' : ld_args}) | |
elif target.type == 'executable': | |
static_libs = [node.dep_target for _, node in target.children if node.type == 'library'] | |
ld_args = f'-Wl,--as-needed -Wl,--no-undefined -Wl,--start-group {" ".join(static_libs)} -Wl,--end-group {ld_args}' | |
ninja.build(f'{target.dep_target}', f'{machine}_LD', | |
objs, static_libs, variables={'LD_ARGS' : ld_args}) | |
elif target.type == 'custom': | |
if 'description' in target.data: | |
description = target.data['description'] | |
else: | |
description = f'[Custom Command] {target.command}' | |
ninja.rule(target.name, f"{target.command} {' '.join(target.args)}", description=description) | |
vars = {escape(c.name): c.dep_target for _, c in target.children if c.type != 'file'} | |
ninja.build(target.dep_target, target.name, depends, | |
variables=vars, pool=target.data['pool']) | |
elif target.type == 'file': | |
pass | |
else: | |
ninja.build(f'{target.name}', 'phony', [], depends) | |
ninja.default('all') | |
# this does two things: 1) it validates the ninja syntax 2) it generates compile_commands.json | |
import subprocess | |
ret = subprocess.run(f"ninja -C {_BUILDDIR} -f new_build.ninja -t compdb > {_BUILDDIR}/compile_commands.json", shell=True) | |
if ret.returncode != 0: | |
print(f"\n{_SCRIPTNAME}: build.ninja failed") | |
exit(ret.returncode) | |
# Now we know we have a valid ninja file, we can replace the old build.ninja | |
os.replace(f"{_BUILDDIR}/new_build.ninja", f"{_BUILDDIR}/build.ninja") | |
_current_dir = '' | |
def add_directory(subdir): | |
global _current_dir | |
script_name = os.path.join(_current_dir, subdir, 'make.py') | |
if not os.path.exists(script_name): | |
raise Exception(f'{script_name} not found') | |
_generate_deps.append(script_name) | |
with open(script_name) as script: | |
ast = compile(script.read(), script_name, 'exec') | |
old_dir = _current_dir | |
_current_dir = os.path.join(_current_dir, subdir) | |
lcls = {} | |
exec(ast, dsl_globals, lcls) | |
print(lcls) | |
_current_dir = old_dir | |
def add_executable(name, sources, machine='target', **kwargs): | |
assert name not in _targets # Perhaps not ideal to enforce target name uniqueness, but we do | |
_targets[name] = { | |
'type': 'executable', | |
'sources' : sources, | |
'machine' : machine, | |
'source_dir': os.path.join('..', _current_dir), | |
'build_dir': os.path.join(*_current_dir.split(os.sep)[1:]), | |
} | kwargs | |
_outputs.append(name) | |
def add_target_to_all(name): | |
_outputs.append(name) | |
def add_library(name, sources, **kwargs): | |
assert name not in _targets | |
_targets[name] = { | |
'type': 'library', | |
'sources' : sources, | |
'libtype': 'static', | |
'source_dir': os.path.join('..', _current_dir), | |
'build_dir': os.path.join(*_current_dir.split(os.sep)[1:]), | |
} | kwargs | |
def add_machine(name, machine, default_target=False): | |
_machines[name] = machine | |
if default_target: | |
_machines['target'] = machine | |
def add_interface(name, **kwargs): | |
assert name not in _targets | |
_targets[name] = { | |
'type': 'interface', | |
} | kwargs | |
def add_external_command(name, command): | |
assert name not in _targets | |
_targets[name] = { | |
'type': 'external', | |
'command': abs_path(command) | |
} | |
def custom_command(name, output, command, args='', depends=[], machine='target', description='', | |
pool=None): | |
assert name not in _targets | |
_targets[name] = { | |
'type': 'custom', | |
'command': command, | |
'args': args, | |
'private_depends': depends, | |
'output': output, | |
'source_dir': os.path.abspath(_current_dir), | |
'description': description, | |
'pool': pool, | |
} | |
def abs_path(path): | |
if os.path.isabs(path): | |
return path | |
if path == '.': | |
return os.path.join('..', _current_dir) | |
print(_current_dir, path) | |
print(os.path.join('..', _current_dir, path)) | |
return os.path.join('..', _current_dir, path) | |
def escape(name): | |
return ''.join([c if c not in './-' else '_' for c in name]) | |
def export(name, value): | |
dsl_globals[name] = value | |
dsl_globals = {k: v for (k, v) in dict(globals()).items() if not (k.startswith('_') or isinstance(v, type(sys))) } | |
dsl_globals['__builtins__'] = {} # essentially sandbox the build scripts | |
dsl_globals['print'] = print # but let them print | |
if __name__ == "__main__": | |
if len(sys.argv) > 1 and sys.argv[1] == '--regenerate': | |
# a regenerate has been explicitly requested | |
_generate() | |
sys.exit(0) # ninja will automatically pick up the new build.ninja | |
# make sure build.ninja exists | |
if not os.path.exists(f"{_BUILDDIR}/build.ninja"): | |
_generate() | |
# and run ninja. If a rebuild is needed, ninja will call this script with --regenerate | |
os.execlp("ninja", "ninja", "-C", _BUILDDIR, *sys.argv[1:]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment