Merge pull request #1680 from Faless/build/profile_strip_json
[Bindings] Build profile now strips methods and skip filespull/1544/merge
commit
7d3870bc87
|
@ -32,3 +32,7 @@ jobs:
|
|||
uses: pre-commit/action@v3.0.1
|
||||
with:
|
||||
extra_args: --verbose --hook-stage manual --files ${{ env.CHANGED_FILES }}
|
||||
|
||||
- name: Check generated files consistency
|
||||
run:
|
||||
python misc/scripts/check_get_file_list.py
|
||||
|
|
|
@ -197,13 +197,16 @@ def generate_virtuals(target):
|
|||
f.write(txt)
|
||||
|
||||
|
||||
def get_file_list(api_filepath, output_dir, headers=False, sources=False, profile_filepath=""):
|
||||
def get_file_list(api_filepath, output_dir, headers=False, sources=False):
|
||||
api = {}
|
||||
files = []
|
||||
with open(api_filepath, encoding="utf-8") as api_file:
|
||||
api = json.load(api_file)
|
||||
|
||||
build_profile = parse_build_profile(profile_filepath, api)
|
||||
return _get_file_list(api, output_dir, headers, sources)
|
||||
|
||||
|
||||
def _get_file_list(api, output_dir, headers=False, sources=False):
|
||||
files = []
|
||||
|
||||
core_gen_folder = Path(output_dir) / "gen" / "include" / "godot_cpp" / "core"
|
||||
include_gen_folder = Path(output_dir) / "gen" / "include" / "godot_cpp"
|
||||
|
@ -235,7 +238,7 @@ def get_file_list(api_filepath, output_dir, headers=False, sources=False, profil
|
|||
source_filename = source_gen_folder / "classes" / (camel_to_snake(engine_class["name"]) + ".cpp")
|
||||
if headers:
|
||||
files.append(str(header_filename.as_posix()))
|
||||
if sources and is_class_included(engine_class["name"], build_profile):
|
||||
if sources:
|
||||
files.append(str(source_filename.as_posix()))
|
||||
|
||||
for native_struct in api["native_structures"]:
|
||||
|
@ -267,128 +270,19 @@ def get_file_list(api_filepath, output_dir, headers=False, sources=False, profil
|
|||
return files
|
||||
|
||||
|
||||
def print_file_list(api_filepath, output_dir, headers=False, sources=False, profile_filepath=""):
|
||||
print(*get_file_list(api_filepath, output_dir, headers, sources, profile_filepath), sep=";", end=None)
|
||||
|
||||
|
||||
def parse_build_profile(profile_filepath, api):
|
||||
if profile_filepath == "":
|
||||
return {}
|
||||
print("Using feature build profile: " + profile_filepath)
|
||||
|
||||
with open(profile_filepath, encoding="utf-8") as profile_file:
|
||||
profile = json.load(profile_file)
|
||||
|
||||
api_dict = {}
|
||||
parents = {}
|
||||
children = {}
|
||||
for engine_class in api["classes"]:
|
||||
api_dict[engine_class["name"]] = engine_class
|
||||
parent = engine_class.get("inherits", "")
|
||||
child = engine_class["name"]
|
||||
parents[child] = parent
|
||||
if parent == "":
|
||||
continue
|
||||
children[parent] = children.get(parent, [])
|
||||
children[parent].append(child)
|
||||
|
||||
# Parse methods dependencies
|
||||
deps = {}
|
||||
reverse_deps = {}
|
||||
for name, engine_class in api_dict.items():
|
||||
ref_cls = set()
|
||||
for method in engine_class.get("methods", []):
|
||||
rtype = method.get("return_value", {}).get("type", "")
|
||||
args = [a["type"] for a in method.get("arguments", [])]
|
||||
if rtype in api_dict:
|
||||
ref_cls.add(rtype)
|
||||
elif is_enum(rtype) and get_enum_class(rtype) in api_dict:
|
||||
ref_cls.add(get_enum_class(rtype))
|
||||
for arg in args:
|
||||
if arg in api_dict:
|
||||
ref_cls.add(arg)
|
||||
elif is_enum(arg) and get_enum_class(arg) in api_dict:
|
||||
ref_cls.add(get_enum_class(arg))
|
||||
deps[engine_class["name"]] = set(filter(lambda x: x != name, ref_cls))
|
||||
for acls in ref_cls:
|
||||
if acls == name:
|
||||
continue
|
||||
reverse_deps[acls] = reverse_deps.get(acls, set())
|
||||
reverse_deps[acls].add(name)
|
||||
|
||||
included = []
|
||||
front = list(profile.get("enabled_classes", []))
|
||||
if front:
|
||||
# These must always be included
|
||||
front.append("WorkerThreadPool")
|
||||
front.append("ClassDB")
|
||||
front.append("ClassDBSingleton")
|
||||
while front:
|
||||
cls = front.pop()
|
||||
if cls in included:
|
||||
continue
|
||||
included.append(cls)
|
||||
parent = parents.get(cls, "")
|
||||
if parent:
|
||||
front.append(parent)
|
||||
for rcls in deps.get(cls, set()):
|
||||
if rcls in included or rcls in front:
|
||||
continue
|
||||
front.append(rcls)
|
||||
|
||||
excluded = []
|
||||
front = list(profile.get("disabled_classes", []))
|
||||
while front:
|
||||
cls = front.pop()
|
||||
if cls in excluded:
|
||||
continue
|
||||
excluded.append(cls)
|
||||
front += children.get(cls, [])
|
||||
for rcls in reverse_deps.get(cls, set()):
|
||||
if rcls in excluded or rcls in front:
|
||||
continue
|
||||
front.append(rcls)
|
||||
|
||||
if included and excluded:
|
||||
print(
|
||||
"WARNING: Cannot specify both 'enabled_classes' and 'disabled_classes' in build profile. 'disabled_classes' will be ignored."
|
||||
)
|
||||
|
||||
return {
|
||||
"enabled_classes": included,
|
||||
"disabled_classes": excluded,
|
||||
}
|
||||
|
||||
|
||||
def scons_emit_files(target, source, env):
|
||||
profile_filepath = env.get("build_profile", "")
|
||||
if profile_filepath and not Path(profile_filepath).is_absolute():
|
||||
profile_filepath = str((Path(env.Dir("#").abspath) / profile_filepath).as_posix())
|
||||
|
||||
files = [env.File(f) for f in get_file_list(str(source[0]), target[0].abspath, True, True, profile_filepath)]
|
||||
env.Clean(target, files)
|
||||
env["godot_cpp_gen_dir"] = target[0].abspath
|
||||
return files, source
|
||||
|
||||
|
||||
def scons_generate_bindings(target, source, env):
|
||||
generate_bindings(
|
||||
str(source[0]),
|
||||
env["generate_template_get_node"],
|
||||
"32" if "32" in env["arch"] else "64",
|
||||
env["precision"],
|
||||
env["godot_cpp_gen_dir"],
|
||||
)
|
||||
return None
|
||||
def print_file_list(api_filepath, output_dir, headers=False, sources=False):
|
||||
print(*get_file_list(api_filepath, output_dir, headers, sources), sep=";", end=None)
|
||||
|
||||
|
||||
def generate_bindings(api_filepath, use_template_get_node, bits="64", precision="single", output_dir="."):
|
||||
api = None
|
||||
|
||||
target_dir = Path(output_dir) / "gen"
|
||||
|
||||
api = {}
|
||||
with open(api_filepath, encoding="utf-8") as api_file:
|
||||
api = json.load(api_file)
|
||||
_generate_bindings(api, use_template_get_node, bits, precision, output_dir)
|
||||
|
||||
|
||||
def _generate_bindings(api, use_template_get_node, bits="64", precision="single", output_dir="."):
|
||||
target_dir = Path(output_dir) / "gen"
|
||||
|
||||
shutil.rmtree(target_dir, ignore_errors=True)
|
||||
target_dir.mkdir(parents=True)
|
||||
|
@ -2766,20 +2660,6 @@ def is_refcounted(type_name):
|
|||
return type_name in engine_classes and engine_classes[type_name]
|
||||
|
||||
|
||||
def is_class_included(class_name, build_profile):
|
||||
"""
|
||||
Check if an engine class should be included.
|
||||
This removes classes according to a build profile of enabled or disabled classes.
|
||||
"""
|
||||
included = build_profile.get("enabled_classes", [])
|
||||
excluded = build_profile.get("disabled_classes", [])
|
||||
if included:
|
||||
return class_name in included
|
||||
if excluded:
|
||||
return class_name not in excluded
|
||||
return True
|
||||
|
||||
|
||||
def is_included(type_name, current_type):
|
||||
"""
|
||||
Check if a builtin type should be included.
|
||||
|
|
|
@ -0,0 +1,183 @@
|
|||
import json
|
||||
import sys
|
||||
|
||||
|
||||
def parse_build_profile(profile_filepath, api):
|
||||
if profile_filepath == "":
|
||||
return {}
|
||||
|
||||
with open(profile_filepath, encoding="utf-8") as profile_file:
|
||||
profile = json.load(profile_file)
|
||||
|
||||
api_dict = {}
|
||||
parents = {}
|
||||
children = {}
|
||||
for engine_class in api["classes"]:
|
||||
api_dict[engine_class["name"]] = engine_class
|
||||
parent = engine_class.get("inherits", "")
|
||||
child = engine_class["name"]
|
||||
parents[child] = parent
|
||||
if parent == "":
|
||||
continue
|
||||
children[parent] = children.get(parent, [])
|
||||
children[parent].append(child)
|
||||
|
||||
included = []
|
||||
front = list(profile.get("enabled_classes", []))
|
||||
if front:
|
||||
# These must always be included
|
||||
front.append("WorkerThreadPool")
|
||||
front.append("ClassDB")
|
||||
front.append("ClassDBSingleton")
|
||||
# In src/classes/low_level.cpp
|
||||
front.append("FileAccess")
|
||||
front.append("Image")
|
||||
front.append("XMLParser")
|
||||
# In include/godot_cpp/templates/thread_work_pool.hpp
|
||||
front.append("Semaphore")
|
||||
while front:
|
||||
cls = front.pop()
|
||||
if cls in included:
|
||||
continue
|
||||
included.append(cls)
|
||||
parent = parents.get(cls, "")
|
||||
if parent:
|
||||
front.append(parent)
|
||||
|
||||
excluded = []
|
||||
front = list(profile.get("disabled_classes", []))
|
||||
while front:
|
||||
cls = front.pop()
|
||||
if cls in excluded:
|
||||
continue
|
||||
excluded.append(cls)
|
||||
front += children.get(cls, [])
|
||||
|
||||
if included and excluded:
|
||||
print(
|
||||
"WARNING: Cannot specify both 'enabled_classes' and 'disabled_classes' in build profile. 'disabled_classes' will be ignored."
|
||||
)
|
||||
|
||||
return {
|
||||
"enabled_classes": included,
|
||||
"disabled_classes": excluded,
|
||||
}
|
||||
|
||||
|
||||
def generate_trimmed_api(source_api_filepath, profile_filepath):
|
||||
with open(source_api_filepath, encoding="utf-8") as api_file:
|
||||
api = json.load(api_file)
|
||||
|
||||
if profile_filepath == "":
|
||||
return api
|
||||
|
||||
build_profile = parse_build_profile(profile_filepath, api)
|
||||
|
||||
engine_classes = {}
|
||||
for class_api in api["classes"]:
|
||||
engine_classes[class_api["name"]] = class_api["is_refcounted"]
|
||||
for native_struct in api["native_structures"]:
|
||||
if native_struct["name"] == "ObjectID":
|
||||
continue
|
||||
engine_classes[native_struct["name"]] = False
|
||||
|
||||
classes = []
|
||||
for class_api in api["classes"]:
|
||||
if not is_class_included(class_api["name"], build_profile):
|
||||
continue
|
||||
if "methods" in class_api:
|
||||
methods = []
|
||||
for method in class_api["methods"]:
|
||||
if not is_method_included(method, build_profile, engine_classes):
|
||||
continue
|
||||
methods.append(method)
|
||||
class_api["methods"] = methods
|
||||
classes.append(class_api)
|
||||
api["classes"] = classes
|
||||
|
||||
return api
|
||||
|
||||
|
||||
def is_class_included(class_name, build_profile):
|
||||
"""
|
||||
Check if an engine class should be included.
|
||||
This removes classes according to a build profile of enabled or disabled classes.
|
||||
"""
|
||||
included = build_profile.get("enabled_classes", [])
|
||||
excluded = build_profile.get("disabled_classes", [])
|
||||
if included:
|
||||
return class_name in included
|
||||
if excluded:
|
||||
return class_name not in excluded
|
||||
return True
|
||||
|
||||
|
||||
def is_method_included(method, build_profile, engine_classes):
|
||||
"""
|
||||
Check if an engine class method should be included.
|
||||
This removes methods according to a build profile of enabled or disabled classes.
|
||||
"""
|
||||
included = build_profile.get("enabled_classes", [])
|
||||
excluded = build_profile.get("disabled_classes", [])
|
||||
ref_cls = set()
|
||||
rtype = get_base_type(method.get("return_value", {}).get("type", ""))
|
||||
args = [get_base_type(a["type"]) for a in method.get("arguments", [])]
|
||||
if rtype in engine_classes:
|
||||
ref_cls.add(rtype)
|
||||
elif is_enum(rtype) and get_enum_class(rtype) in engine_classes:
|
||||
ref_cls.add(get_enum_class(rtype))
|
||||
for arg in args:
|
||||
if arg in engine_classes:
|
||||
ref_cls.add(arg)
|
||||
elif is_enum(arg) and get_enum_class(arg) in engine_classes:
|
||||
ref_cls.add(get_enum_class(arg))
|
||||
for acls in ref_cls:
|
||||
if len(included) > 0 and acls not in included:
|
||||
return False
|
||||
elif len(excluded) > 0 and acls in excluded:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_enum(type_name):
|
||||
return type_name.startswith("enum::") or type_name.startswith("bitfield::")
|
||||
|
||||
|
||||
def get_enum_class(enum_name: str):
|
||||
if "." in enum_name:
|
||||
if is_bitfield(enum_name):
|
||||
return enum_name.replace("bitfield::", "").split(".")[0]
|
||||
else:
|
||||
return enum_name.replace("enum::", "").split(".")[0]
|
||||
else:
|
||||
return "GlobalConstants"
|
||||
|
||||
|
||||
def get_base_type(type_name):
|
||||
if type_name.startswith("const "):
|
||||
type_name = type_name[6:]
|
||||
if type_name.endswith("*"):
|
||||
type_name = type_name[:-1]
|
||||
if type_name.startswith("typedarray::"):
|
||||
type_name = type_name.replace("typedarray::", "")
|
||||
return type_name
|
||||
|
||||
|
||||
def is_bitfield(type_name):
|
||||
return type_name.startswith("bitfield::")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 3 or len(sys.argv) > 4:
|
||||
print("Usage: %s BUILD_PROFILE INPUT_JSON [OUTPUT_JSON]" % (sys.argv[0]))
|
||||
sys.exit(1)
|
||||
profile = sys.argv[1]
|
||||
infile = sys.argv[2]
|
||||
outfile = sys.argv[3] if len(sys.argv) > 3 else ""
|
||||
api = generate_trimmed_api(infile, profile)
|
||||
|
||||
if outfile:
|
||||
with open(outfile, "w", encoding="utf-8") as f:
|
||||
json.dump(api, f)
|
||||
else:
|
||||
json.dump(api, sys.stdout)
|
|
@ -32,7 +32,6 @@
|
|||
#define GODOT_VARIANT_INTERNAL_HPP
|
||||
|
||||
#include <gdextension_interface.h>
|
||||
#include <godot_cpp/classes/gpu_particles3d.hpp>
|
||||
#include <godot_cpp/variant/variant.hpp>
|
||||
|
||||
namespace godot {
|
||||
|
|
|
@ -6,26 +6,40 @@ from pathlib import Path
|
|||
|
||||
sys.path.insert(1, os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||
|
||||
from binding_generator import generate_bindings, get_file_list
|
||||
from binding_generator import _generate_bindings, _get_file_list
|
||||
from build_profile import generate_trimmed_api
|
||||
|
||||
api_filepath = "gdextension/extension_api.json"
|
||||
bits = "64"
|
||||
precision = "single"
|
||||
output_dir = "self_test"
|
||||
|
||||
generate_bindings(api_filepath, use_template_get_node=False, bits=bits, precision=precision, output_dir=output_dir)
|
||||
flist = get_file_list(api_filepath, output_dir, headers=True, sources=True)
|
||||
|
||||
p = Path(output_dir) / "gen"
|
||||
allfiles = [str(f.as_posix()) for f in p.glob("**/*.*")]
|
||||
missing = list(filter((lambda f: f not in flist), allfiles))
|
||||
extras = list(filter((lambda f: f not in allfiles), flist))
|
||||
if len(missing) > 0 or len(extras) > 0:
|
||||
print("Error!")
|
||||
for f in missing:
|
||||
print("MISSING: " + str(f))
|
||||
for f in extras:
|
||||
print("EXTRA: " + str(f))
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("OK!")
|
||||
def test(profile_filepath=""):
|
||||
api = generate_trimmed_api(api_filepath, profile_filepath)
|
||||
_generate_bindings(
|
||||
api,
|
||||
use_template_get_node=False,
|
||||
bits=bits,
|
||||
precision=precision,
|
||||
output_dir=output_dir,
|
||||
)
|
||||
flist = _get_file_list(api, output_dir, headers=True, sources=True)
|
||||
|
||||
p = Path(output_dir) / "gen"
|
||||
allfiles = [str(f.as_posix()) for f in p.glob("**/*.*")]
|
||||
missing = list(filter((lambda f: f not in flist), allfiles))
|
||||
extras = list(filter((lambda f: f not in allfiles), flist))
|
||||
if len(missing) > 0 or len(extras) > 0:
|
||||
print("Error!")
|
||||
for f in missing:
|
||||
print("MISSING: " + str(f))
|
||||
for f in extras:
|
||||
print("EXTRA: " + str(f))
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("OK!")
|
||||
|
||||
|
||||
test()
|
||||
test("test/build_profile.json")
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
{
|
||||
"enabled_classes": [
|
||||
"Control",
|
||||
"InputEventKey",
|
||||
"Label",
|
||||
"MultiplayerAPI",
|
||||
"MultiplayerPeer",
|
||||
"OS",
|
||||
"TileMap",
|
||||
"InputEventKey"
|
||||
"TileSet",
|
||||
"Viewport"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -10,7 +10,8 @@ from SCons.Tool import Tool
|
|||
from SCons.Variables import BoolVariable, EnumVariable, PathVariable
|
||||
from SCons.Variables.BoolVariable import _text2bool
|
||||
|
||||
from binding_generator import scons_emit_files, scons_generate_bindings
|
||||
from binding_generator import _generate_bindings, _get_file_list, get_file_list
|
||||
from build_profile import generate_trimmed_api
|
||||
|
||||
|
||||
def add_sources(sources, dir, extension):
|
||||
|
@ -129,6 +130,37 @@ def no_verbose(env):
|
|||
env.Append(GENCOMSTR=[generated_file_message])
|
||||
|
||||
|
||||
def scons_emit_files(target, source, env):
|
||||
profile_filepath = env.get("build_profile", "")
|
||||
if profile_filepath:
|
||||
profile_filepath = normalize_path(profile_filepath, env)
|
||||
|
||||
# Always clean all files
|
||||
env.Clean(target, [env.File(f) for f in get_file_list(str(source[0]), target[0].abspath, True, True)])
|
||||
|
||||
api = generate_trimmed_api(str(source[0]), profile_filepath)
|
||||
files = [env.File(f) for f in _get_file_list(api, target[0].abspath, True, True)]
|
||||
env["godot_cpp_gen_dir"] = target[0].abspath
|
||||
return files, source
|
||||
|
||||
|
||||
def scons_generate_bindings(target, source, env):
|
||||
profile_filepath = env.get("build_profile", "")
|
||||
if profile_filepath:
|
||||
profile_filepath = normalize_path(profile_filepath, env)
|
||||
|
||||
api = generate_trimmed_api(str(source[0]), profile_filepath)
|
||||
|
||||
_generate_bindings(
|
||||
api,
|
||||
env["generate_template_get_node"],
|
||||
"32" if "32" in env["arch"] else "64",
|
||||
env["precision"],
|
||||
env["godot_cpp_gen_dir"],
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
platforms = ["linux", "macos", "windows", "android", "ios", "web"]
|
||||
|
||||
# CPU architecture options.
|
||||
|
|
Loading…
Reference in New Issue