Merge pull request #1529 from dsnopek/4.1-cherrypicks-12
Cherry-picks for the godot-cpp 4.1 branch - 12th batch4.1
commit
a4f9d22ac5
|
@ -0,0 +1,16 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
indent_size = 4
|
||||||
|
indent_style = tab
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[{*.py,SConstruct}]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
[*.{yml,yaml}]
|
||||||
|
indent_size = 2
|
||||||
|
indent_style = space
|
|
@ -1,6 +1,2 @@
|
||||||
*.c eol=lf
|
# Normalize EOL for all files that Git considers text files
|
||||||
*.cpp eol=lf
|
* text=auto eol=lf
|
||||||
*.gd eol=lf
|
|
||||||
*.tscn eol=lf
|
|
||||||
*.cfg eol=lf
|
|
||||||
*.godot eol=lf
|
|
||||||
|
|
|
@ -54,7 +54,7 @@ jobs:
|
||||||
cache-name: windows-x86_64-mingw
|
cache-name: windows-x86_64-mingw
|
||||||
|
|
||||||
- name: 🍎 macOS (universal)
|
- name: 🍎 macOS (universal)
|
||||||
os: macos-11
|
os: macos-latest
|
||||||
platform: macos
|
platform: macos
|
||||||
artifact-name: godot-cpp-macos-universal-release
|
artifact-name: godot-cpp-macos-universal-release
|
||||||
artifact-path: bin/libgodot-cpp.macos.template_release.universal.a
|
artifact-path: bin/libgodot-cpp.macos.template_release.universal.a
|
||||||
|
@ -72,7 +72,7 @@ jobs:
|
||||||
cache-name: android-arm64
|
cache-name: android-arm64
|
||||||
|
|
||||||
- name: 🍏 iOS (arm64)
|
- name: 🍏 iOS (arm64)
|
||||||
os: macos-11
|
os: macos-latest
|
||||||
platform: ios
|
platform: ios
|
||||||
artifact-name: godot-cpp-ios-arm64-release
|
artifact-name: godot-cpp-ios-arm64-release
|
||||||
artifact-path: bin/libgodot-cpp.ios.template_release.arm64.a
|
artifact-path: bin/libgodot-cpp.ios.template_release.arm64.a
|
||||||
|
|
|
@ -7,48 +7,27 @@ concurrency:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
static-checks:
|
static-checks:
|
||||||
name: Format (clang-format, black format, file format)
|
name: Format (clang-format, ruff format, file format)
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
|
||||||
# Azure repositories are not reliable, we need to prevent Azure giving us packages.
|
- name: Get changed files
|
||||||
- name: Make apt sources.list use the default Ubuntu repositories
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
sudo rm -f /etc/apt/sources.list.d/*
|
if [ "${{ github.event_name }}" == "pull_request" ]; then
|
||||||
sudo cp -f misc/ci/sources.list /etc/apt/sources.list
|
files=$(git diff-tree --no-commit-id --name-only -r HEAD^1..HEAD 2> /dev/null || true)
|
||||||
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
|
elif [ "${{ github.event_name }}" == "push" -a "${{ github.event.forced }}" == "false" -a "${{ github.event.created }}" == "false" ]; then
|
||||||
sudo apt-add-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-15 main"
|
files=$(git diff-tree --no-commit-id --name-only -r ${{ github.event.before }}..${{ github.event.after }} 2> /dev/null || true)
|
||||||
sudo apt-get update
|
fi
|
||||||
|
files=$(echo "$files" | grep -v 'thirdparty' | xargs -I {} sh -c 'echo "\"./{}\""' | tr '\n' ' ')
|
||||||
|
echo "CHANGED_FILES=$files" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Style checks via pre-commit
|
||||||
run: |
|
uses: pre-commit/action@v3.0.1
|
||||||
sudo apt-get install -qq dos2unix recode clang-format-15 libxml2-utils python3-pip moreutils
|
with:
|
||||||
sudo update-alternatives --remove-all clang-format || true
|
extra_args: --verbose --hook-stage manual --files ${{ env.CHANGED_FILES }}
|
||||||
sudo update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-15 100
|
|
||||||
sudo pip3 install black==22.3.0 pygments pytest==7.1.2 mypy==0.971
|
|
||||||
|
|
||||||
- name: File formatting checks (file_format.sh)
|
|
||||||
run: |
|
|
||||||
bash ./misc/scripts/file_format.sh
|
|
||||||
|
|
||||||
- name: Header guards formatting checks (header_guards.sh)
|
|
||||||
run: |
|
|
||||||
bash ./misc/scripts/header_guards.sh
|
|
||||||
|
|
||||||
- name: Python style checks via black (black_format.sh)
|
|
||||||
run: |
|
|
||||||
bash ./misc/scripts/black_format.sh
|
|
||||||
|
|
||||||
- name: Python scripts static analysis (mypy_check.sh)
|
|
||||||
run: |
|
|
||||||
bash ./misc/scripts/mypy_check.sh
|
|
||||||
|
|
||||||
- name: Bindings generation checks (ensures get_file_list returns all generated files)
|
|
||||||
run: |
|
|
||||||
python ./misc/scripts/check_get_file_list.py
|
|
||||||
|
|
||||||
- name: Style checks via clang-format (clang_format.sh)
|
|
||||||
run: |
|
|
||||||
bash ./misc/scripts/clang_format.sh
|
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
include/gen
|
include/gen
|
||||||
src/gen
|
src/gen
|
||||||
|
|
||||||
# Build configuarion.
|
# Build configuration.
|
||||||
/custom.py
|
/custom.py
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
|
|
|
@ -0,0 +1,64 @@
|
||||||
|
default_language_version:
|
||||||
|
python: python3
|
||||||
|
|
||||||
|
exclude: |
|
||||||
|
(?x)^(
|
||||||
|
gdextension/extension_api\.json|
|
||||||
|
gdextension/gdextension_interface\.h
|
||||||
|
)$
|
||||||
|
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
|
rev: v17.0.6
|
||||||
|
hooks:
|
||||||
|
- id: clang-format
|
||||||
|
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
rev: v0.4.4
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
args: [--fix]
|
||||||
|
- id: ruff-format
|
||||||
|
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
rev: v0.971
|
||||||
|
hooks:
|
||||||
|
- id: mypy
|
||||||
|
files: \.py$
|
||||||
|
types_or: [text]
|
||||||
|
|
||||||
|
- repo: https://github.com/codespell-project/codespell
|
||||||
|
rev: v2.3.0
|
||||||
|
hooks:
|
||||||
|
- id: codespell
|
||||||
|
additional_dependencies: [tomli]
|
||||||
|
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: copyright-headers
|
||||||
|
name: copyright-headers
|
||||||
|
language: python
|
||||||
|
entry: python misc/scripts/copyright_headers.py
|
||||||
|
files: \.(c|h)pp$
|
||||||
|
exclude: ^test/
|
||||||
|
|
||||||
|
- id: header-guards
|
||||||
|
name: header-guards
|
||||||
|
language: python
|
||||||
|
entry: python misc/scripts/header_guards.py
|
||||||
|
files: \.hpp$
|
||||||
|
exclude: ^test/
|
||||||
|
|
||||||
|
- id: file-format
|
||||||
|
name: file-format
|
||||||
|
language: python
|
||||||
|
entry: python misc/scripts/file_format.py
|
||||||
|
types_or: [text]
|
||||||
|
|
||||||
|
- id: check-get-file-list
|
||||||
|
name: check-get-file-list
|
||||||
|
language: python
|
||||||
|
entry: python misc/scripts/check_get_file_list.py
|
||||||
|
pass_filenames: false
|
||||||
|
always_run: true
|
||||||
|
stages: [manual]
|
|
@ -3,7 +3,7 @@
|
||||||
#
|
#
|
||||||
# godot-cpp cmake arguments
|
# godot-cpp cmake arguments
|
||||||
# GODOT_GDEXTENSION_DIR: Path to the directory containing GDExtension interface header and API JSON file
|
# GODOT_GDEXTENSION_DIR: Path to the directory containing GDExtension interface header and API JSON file
|
||||||
# GODOT_CPP_SYSTEM_HEADERS Mark the header files as SYSTEM. This may be useful to supress warnings in projects including this one.
|
# GODOT_CPP_SYSTEM_HEADERS Mark the header files as SYSTEM. This may be useful to suppress warnings in projects including this one.
|
||||||
# GODOT_CPP_WARNING_AS_ERROR Treat any warnings as errors
|
# GODOT_CPP_WARNING_AS_ERROR Treat any warnings as errors
|
||||||
# GODOT_CUSTOM_API_FILE: Path to a custom GDExtension API JSON file (takes precedence over `gdextension_dir`)
|
# GODOT_CUSTOM_API_FILE: Path to a custom GDExtension API JSON file (takes precedence over `gdextension_dir`)
|
||||||
# FLOAT_PRECISION: Floating-point precision level ("single", "double")
|
# FLOAT_PRECISION: Floating-point precision level ("single", "double")
|
||||||
|
|
|
@ -1,18 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
from binding_generator import scons_generate_bindings, scons_emit_files
|
|
||||||
|
|
||||||
|
|
||||||
EnsureSConsVersion(4, 0)
|
EnsureSConsVersion(4, 0)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Import("env")
|
Import("env")
|
||||||
except:
|
except Exception:
|
||||||
# Default tools with no platform defaults to gnu toolchain.
|
# Default tools with no platform defaults to gnu toolchain.
|
||||||
# We apply platform specific toolchains via our custom tools.
|
# We apply platform specific toolchains via our custom tools.
|
||||||
env = Environment(tools=["default"], PLATFORM="")
|
env = Environment(tools=["default"], PLATFORM="")
|
||||||
|
@ -23,7 +18,7 @@ env.PrependENVPath("PATH", os.getenv("PATH"))
|
||||||
customs = ["custom.py"]
|
customs = ["custom.py"]
|
||||||
try:
|
try:
|
||||||
customs += Import("customs")
|
customs += Import("customs")
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
profile = ARGUMENTS.get("profile", "")
|
profile = ARGUMENTS.get("profile", "")
|
||||||
if profile:
|
if profile:
|
||||||
|
|
|
@ -70,12 +70,14 @@ def generate_wrappers(target):
|
||||||
f.write(txt)
|
f.write(txt)
|
||||||
|
|
||||||
|
|
||||||
def get_file_list(api_filepath, output_dir, headers=False, sources=False):
|
def get_file_list(api_filepath, output_dir, headers=False, sources=False, profile_filepath=""):
|
||||||
api = {}
|
api = {}
|
||||||
files = []
|
files = []
|
||||||
with open(api_filepath, encoding="utf-8") as api_file:
|
with open(api_filepath, encoding="utf-8") as api_file:
|
||||||
api = json.load(api_file)
|
api = json.load(api_file)
|
||||||
|
|
||||||
|
build_profile = parse_build_profile(profile_filepath, api)
|
||||||
|
|
||||||
core_gen_folder = Path(output_dir) / "gen" / "include" / "godot_cpp" / "core"
|
core_gen_folder = Path(output_dir) / "gen" / "include" / "godot_cpp" / "core"
|
||||||
include_gen_folder = Path(output_dir) / "gen" / "include" / "godot_cpp"
|
include_gen_folder = Path(output_dir) / "gen" / "include" / "godot_cpp"
|
||||||
source_gen_folder = Path(output_dir) / "gen" / "src"
|
source_gen_folder = Path(output_dir) / "gen" / "src"
|
||||||
|
@ -105,7 +107,7 @@ def get_file_list(api_filepath, output_dir, headers=False, sources=False):
|
||||||
source_filename = source_gen_folder / "classes" / (camel_to_snake(engine_class["name"]) + ".cpp")
|
source_filename = source_gen_folder / "classes" / (camel_to_snake(engine_class["name"]) + ".cpp")
|
||||||
if headers:
|
if headers:
|
||||||
files.append(str(header_filename.as_posix()))
|
files.append(str(header_filename.as_posix()))
|
||||||
if sources:
|
if sources and is_class_included(engine_class["name"], build_profile):
|
||||||
files.append(str(source_filename.as_posix()))
|
files.append(str(source_filename.as_posix()))
|
||||||
|
|
||||||
for native_struct in api["native_structures"]:
|
for native_struct in api["native_structures"]:
|
||||||
|
@ -134,12 +136,105 @@ def get_file_list(api_filepath, output_dir, headers=False, sources=False):
|
||||||
return files
|
return files
|
||||||
|
|
||||||
|
|
||||||
def print_file_list(api_filepath, output_dir, headers=False, sources=False):
|
def print_file_list(api_filepath, output_dir, headers=False, sources=False, profile_filepath=""):
|
||||||
print(*get_file_list(api_filepath, output_dir, headers, sources), sep=";", end=None)
|
print(*get_file_list(api_filepath, output_dir, headers, sources, profile_filepath), sep=";", end=None)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_build_profile(profile_filepath, api):
|
||||||
|
if profile_filepath == "":
|
||||||
|
return {}
|
||||||
|
print("Using feature build profile: " + profile_filepath)
|
||||||
|
|
||||||
|
with open(profile_filepath, encoding="utf-8") as profile_file:
|
||||||
|
profile = json.load(profile_file)
|
||||||
|
|
||||||
|
api_dict = {}
|
||||||
|
parents = {}
|
||||||
|
children = {}
|
||||||
|
for engine_class in api["classes"]:
|
||||||
|
api_dict[engine_class["name"]] = engine_class
|
||||||
|
parent = engine_class.get("inherits", "")
|
||||||
|
child = engine_class["name"]
|
||||||
|
parents[child] = parent
|
||||||
|
if parent == "":
|
||||||
|
continue
|
||||||
|
children[parent] = children.get(parent, [])
|
||||||
|
children[parent].append(child)
|
||||||
|
|
||||||
|
# Parse methods dependencies
|
||||||
|
deps = {}
|
||||||
|
reverse_deps = {}
|
||||||
|
for name, engine_class in api_dict.items():
|
||||||
|
ref_cls = set()
|
||||||
|
for method in engine_class.get("methods", []):
|
||||||
|
rtype = method.get("return_value", {}).get("type", "")
|
||||||
|
args = [a["type"] for a in method.get("arguments", [])]
|
||||||
|
if rtype in api_dict:
|
||||||
|
ref_cls.add(rtype)
|
||||||
|
elif is_enum(rtype) and get_enum_class(rtype) in api_dict:
|
||||||
|
ref_cls.add(get_enum_class(rtype))
|
||||||
|
for arg in args:
|
||||||
|
if arg in api_dict:
|
||||||
|
ref_cls.add(arg)
|
||||||
|
elif is_enum(arg) and get_enum_class(arg) in api_dict:
|
||||||
|
ref_cls.add(get_enum_class(arg))
|
||||||
|
deps[engine_class["name"]] = set(filter(lambda x: x != name, ref_cls))
|
||||||
|
for acls in ref_cls:
|
||||||
|
if acls == name:
|
||||||
|
continue
|
||||||
|
reverse_deps[acls] = reverse_deps.get(acls, set())
|
||||||
|
reverse_deps[acls].add(name)
|
||||||
|
|
||||||
|
included = []
|
||||||
|
front = list(profile.get("enabled_classes", []))
|
||||||
|
if front:
|
||||||
|
# These must always be included
|
||||||
|
front.append("WorkerThreadPool")
|
||||||
|
front.append("ClassDB")
|
||||||
|
front.append("ClassDBSingleton")
|
||||||
|
while front:
|
||||||
|
cls = front.pop()
|
||||||
|
if cls in included:
|
||||||
|
continue
|
||||||
|
included.append(cls)
|
||||||
|
parent = parents.get(cls, "")
|
||||||
|
if parent:
|
||||||
|
front.append(parent)
|
||||||
|
for rcls in deps.get(cls, set()):
|
||||||
|
if rcls in included or rcls in front:
|
||||||
|
continue
|
||||||
|
front.append(rcls)
|
||||||
|
|
||||||
|
excluded = []
|
||||||
|
front = list(profile.get("disabled_classes", []))
|
||||||
|
while front:
|
||||||
|
cls = front.pop()
|
||||||
|
if cls in excluded:
|
||||||
|
continue
|
||||||
|
excluded.append(cls)
|
||||||
|
front += children.get(cls, [])
|
||||||
|
for rcls in reverse_deps.get(cls, set()):
|
||||||
|
if rcls in excluded or rcls in front:
|
||||||
|
continue
|
||||||
|
front.append(rcls)
|
||||||
|
|
||||||
|
if included and excluded:
|
||||||
|
print(
|
||||||
|
"WARNING: Cannot specify both 'enabled_classes' and 'disabled_classes' in build profile. 'disabled_classes' will be ignored."
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"enabled_classes": included,
|
||||||
|
"disabled_classes": excluded,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def scons_emit_files(target, source, env):
|
def scons_emit_files(target, source, env):
|
||||||
files = [env.File(f) for f in get_file_list(str(source[0]), target[0].abspath, True, True)]
|
profile_filepath = env.get("build_profile", "")
|
||||||
|
if profile_filepath and not Path(profile_filepath).is_absolute():
|
||||||
|
profile_filepath = str((Path(env.Dir("#").abspath) / profile_filepath).as_posix())
|
||||||
|
|
||||||
|
files = [env.File(f) for f in get_file_list(str(source[0]), target[0].abspath, True, True, profile_filepath)]
|
||||||
env.Clean(target, files)
|
env.Clean(target, files)
|
||||||
env["godot_cpp_gen_dir"] = target[0].abspath
|
env["godot_cpp_gen_dir"] = target[0].abspath
|
||||||
return files, source
|
return files, source
|
||||||
|
@ -489,14 +584,14 @@ def generate_builtin_class_header(builtin_api, size, used_classes, fully_used_cl
|
||||||
result.append(method_signature)
|
result.append(method_signature)
|
||||||
|
|
||||||
# Move constructor.
|
# Move constructor.
|
||||||
result.append(f"\t{class_name}({class_name} &&other);")
|
result.append(f"\t{class_name}({class_name} &&p_other);")
|
||||||
|
|
||||||
# Special cases.
|
# Special cases.
|
||||||
if class_name == "String" or class_name == "StringName" or class_name == "NodePath":
|
if class_name == "String" or class_name == "StringName" or class_name == "NodePath":
|
||||||
result.append(f"\t{class_name}(const char *from);")
|
result.append(f"\t{class_name}(const char *p_from);")
|
||||||
result.append(f"\t{class_name}(const wchar_t *from);")
|
result.append(f"\t{class_name}(const wchar_t *p_from);")
|
||||||
result.append(f"\t{class_name}(const char16_t *from);")
|
result.append(f"\t{class_name}(const char16_t *p_from);")
|
||||||
result.append(f"\t{class_name}(const char32_t *from);")
|
result.append(f"\t{class_name}(const char32_t *p_from);")
|
||||||
|
|
||||||
if "constants" in builtin_api:
|
if "constants" in builtin_api:
|
||||||
axis_constants_count = 0
|
axis_constants_count = 0
|
||||||
|
@ -524,14 +619,16 @@ def generate_builtin_class_header(builtin_api, size, used_classes, fully_used_cl
|
||||||
|
|
||||||
vararg = method["is_vararg"]
|
vararg = method["is_vararg"]
|
||||||
if vararg:
|
if vararg:
|
||||||
result.append("\ttemplate<typename... Args>")
|
result.append("\ttemplate <typename... Args>")
|
||||||
|
|
||||||
method_signature = "\t"
|
method_signature = "\t"
|
||||||
if "is_static" in method and method["is_static"]:
|
if "is_static" in method and method["is_static"]:
|
||||||
method_signature += "static "
|
method_signature += "static "
|
||||||
|
|
||||||
if "return_type" in method:
|
if "return_type" in method:
|
||||||
method_signature += f'{correct_type(method["return_type"])} '
|
method_signature += f'{correct_type(method["return_type"])}'
|
||||||
|
if not method_signature.endswith("*"):
|
||||||
|
method_signature += " "
|
||||||
else:
|
else:
|
||||||
method_signature += "void "
|
method_signature += "void "
|
||||||
|
|
||||||
|
@ -554,10 +651,10 @@ def generate_builtin_class_header(builtin_api, size, used_classes, fully_used_cl
|
||||||
|
|
||||||
# Special cases.
|
# Special cases.
|
||||||
if class_name == "String":
|
if class_name == "String":
|
||||||
result.append("\tstatic String utf8(const char *from, int64_t len = -1);")
|
result.append("\tstatic String utf8(const char *p_from, int64_t p_len = -1);")
|
||||||
result.append("\tvoid parse_utf8(const char *from, int64_t len = -1);")
|
result.append("\tvoid parse_utf8(const char *p_from, int64_t p_len = -1);")
|
||||||
result.append("\tstatic String utf16(const char16_t *from, int64_t len = -1);")
|
result.append("\tstatic String utf16(const char16_t *p_from, int64_t p_len = -1);")
|
||||||
result.append("\tvoid parse_utf16(const char16_t *from, int64_t len = -1);")
|
result.append("\tvoid parse_utf16(const char16_t *p_from, int64_t p_len = -1);")
|
||||||
result.append("\tCharString utf8() const;")
|
result.append("\tCharString utf8() const;")
|
||||||
result.append("\tCharString ascii() const;")
|
result.append("\tCharString ascii() const;")
|
||||||
result.append("\tChar16String utf16() const;")
|
result.append("\tChar16String utf16() const;")
|
||||||
|
@ -577,7 +674,7 @@ def generate_builtin_class_header(builtin_api, size, used_classes, fully_used_cl
|
||||||
if operator["name"] not in ["in", "xor"]:
|
if operator["name"] not in ["in", "xor"]:
|
||||||
if "right_type" in operator:
|
if "right_type" in operator:
|
||||||
result.append(
|
result.append(
|
||||||
f'\t{correct_type(operator["return_type"])} operator{operator["name"]}({type_for_parameter(operator["right_type"])}other) const;'
|
f'\t{correct_type(operator["return_type"])} operator{operator["name"]}({type_for_parameter(operator["right_type"])}p_other) const;'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
result.append(
|
result.append(
|
||||||
|
@ -586,10 +683,10 @@ def generate_builtin_class_header(builtin_api, size, used_classes, fully_used_cl
|
||||||
|
|
||||||
# Copy assignment.
|
# Copy assignment.
|
||||||
if copy_constructor_index >= 0:
|
if copy_constructor_index >= 0:
|
||||||
result.append(f"\t{class_name} &operator=(const {class_name} &other);")
|
result.append(f"\t{class_name} &operator=(const {class_name} &p_other);")
|
||||||
|
|
||||||
# Move assignment.
|
# Move assignment.
|
||||||
result.append(f"\t{class_name} &operator=({class_name} &&other);")
|
result.append(f"\t{class_name} &operator=({class_name} &&p_other);")
|
||||||
|
|
||||||
# Special cases.
|
# Special cases.
|
||||||
if class_name == "String":
|
if class_name == "String":
|
||||||
|
@ -623,8 +720,8 @@ def generate_builtin_class_header(builtin_api, size, used_classes, fully_used_cl
|
||||||
|
|
||||||
if class_name == "Array":
|
if class_name == "Array":
|
||||||
result.append("\ttemplate <typename... Args>")
|
result.append("\ttemplate <typename... Args>")
|
||||||
result.append("\tstatic Array make(Args... args) {")
|
result.append("\tstatic Array make(Args... p_args) {")
|
||||||
result.append("\t\treturn helpers::append_all(Array(), args...);")
|
result.append("\t\treturn helpers::append_all(Array(), p_args...);")
|
||||||
result.append("\t}")
|
result.append("\t}")
|
||||||
|
|
||||||
if is_packed_array(class_name):
|
if is_packed_array(class_name):
|
||||||
|
@ -919,13 +1016,13 @@ def generate_builtin_class_source(builtin_api, size, used_classes, fully_used_cl
|
||||||
result.append("")
|
result.append("")
|
||||||
|
|
||||||
# Move constructor.
|
# Move constructor.
|
||||||
result.append(f"{class_name}::{class_name}({class_name} &&other) {{")
|
result.append(f"{class_name}::{class_name}({class_name} &&p_other) {{")
|
||||||
if needs_copy_instead_of_move(class_name) and copy_constructor_index >= 0:
|
if needs_copy_instead_of_move(class_name) and copy_constructor_index >= 0:
|
||||||
result.append(
|
result.append(
|
||||||
f"\tinternal::_call_builtin_constructor(_method_bindings.constructor_{copy_constructor_index}, &opaque, &other);"
|
f"\tinternal::_call_builtin_constructor(_method_bindings.constructor_{copy_constructor_index}, &opaque, &p_other);"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
result.append("\tstd::swap(opaque, other.opaque);")
|
result.append("\tstd::swap(opaque, p_other.opaque);")
|
||||||
result.append("}")
|
result.append("}")
|
||||||
result.append("")
|
result.append("")
|
||||||
|
|
||||||
|
@ -946,7 +1043,7 @@ def generate_builtin_class_source(builtin_api, size, used_classes, fully_used_cl
|
||||||
continue
|
continue
|
||||||
|
|
||||||
method_signature = make_signature(class_name, method, for_builtin=True)
|
method_signature = make_signature(class_name, method, for_builtin=True)
|
||||||
result.append(method_signature + "{")
|
result.append(method_signature + " {")
|
||||||
|
|
||||||
method_call = "\t"
|
method_call = "\t"
|
||||||
is_ref = False
|
is_ref = False
|
||||||
|
@ -1016,7 +1113,7 @@ def generate_builtin_class_source(builtin_api, size, used_classes, fully_used_cl
|
||||||
if operator["name"] not in ["in", "xor"]:
|
if operator["name"] not in ["in", "xor"]:
|
||||||
if "right_type" in operator:
|
if "right_type" in operator:
|
||||||
result.append(
|
result.append(
|
||||||
f'{correct_type(operator["return_type"])} {class_name}::operator{operator["name"]}({type_for_parameter(operator["right_type"])}other) const {{'
|
f'{correct_type(operator["return_type"])} {class_name}::operator{operator["name"]}({type_for_parameter(operator["right_type"])}p_other) const {{'
|
||||||
)
|
)
|
||||||
(encode, arg_name) = get_encoded_arg("other", operator["right_type"], None)
|
(encode, arg_name) = get_encoded_arg("other", operator["right_type"], None)
|
||||||
result += encode
|
result += encode
|
||||||
|
@ -1036,7 +1133,7 @@ def generate_builtin_class_source(builtin_api, size, used_classes, fully_used_cl
|
||||||
|
|
||||||
# Copy assignment.
|
# Copy assignment.
|
||||||
if copy_constructor_index >= 0:
|
if copy_constructor_index >= 0:
|
||||||
result.append(f"{class_name} &{class_name}::operator=(const {class_name} &other) {{")
|
result.append(f"{class_name} &{class_name}::operator=(const {class_name} &p_other) {{")
|
||||||
if builtin_api["has_destructor"]:
|
if builtin_api["has_destructor"]:
|
||||||
result.append("\t_method_bindings.destructor(&opaque);")
|
result.append("\t_method_bindings.destructor(&opaque);")
|
||||||
(encode, arg_name) = get_encoded_arg(
|
(encode, arg_name) = get_encoded_arg(
|
||||||
|
@ -1053,13 +1150,13 @@ def generate_builtin_class_source(builtin_api, size, used_classes, fully_used_cl
|
||||||
result.append("")
|
result.append("")
|
||||||
|
|
||||||
# Move assignment.
|
# Move assignment.
|
||||||
result.append(f"{class_name} &{class_name}::operator=({class_name} &&other) {{")
|
result.append(f"{class_name} &{class_name}::operator=({class_name} &&p_other) {{")
|
||||||
if needs_copy_instead_of_move(class_name) and copy_constructor_index >= 0:
|
if needs_copy_instead_of_move(class_name) and copy_constructor_index >= 0:
|
||||||
result.append(
|
result.append(
|
||||||
f"\tinternal::_call_builtin_constructor(_method_bindings.constructor_{copy_constructor_index}, &opaque, &other);"
|
f"\tinternal::_call_builtin_constructor(_method_bindings.constructor_{copy_constructor_index}, &opaque, &p_other);"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
result.append("\tstd::swap(opaque, other.opaque);")
|
result.append("\tstd::swap(opaque, p_other.opaque);")
|
||||||
result.append("\treturn *this;")
|
result.append("\treturn *this;")
|
||||||
result.append("}")
|
result.append("}")
|
||||||
|
|
||||||
|
@ -1231,7 +1328,7 @@ def generate_engine_classes_bindings(api, output_dir, use_template_get_node):
|
||||||
for field in expanded_format.split(";"):
|
for field in expanded_format.split(";"):
|
||||||
field_type = field.strip().split(" ")[0].split("::")[0]
|
field_type = field.strip().split(" ")[0].split("::")[0]
|
||||||
if field_type != "" and not is_included_type(field_type) and not is_pod_type(field_type):
|
if field_type != "" and not is_included_type(field_type) and not is_pod_type(field_type):
|
||||||
if not field_type in used_classes:
|
if field_type not in used_classes:
|
||||||
used_classes.append(field_type)
|
used_classes.append(field_type)
|
||||||
|
|
||||||
result.append("")
|
result.append("")
|
||||||
|
@ -1354,8 +1451,6 @@ def generate_engine_class_header(class_api, used_classes, fully_used_classes, us
|
||||||
vararg = "is_vararg" in method and method["is_vararg"]
|
vararg = "is_vararg" in method and method["is_vararg"]
|
||||||
|
|
||||||
method_signature = "\t"
|
method_signature = "\t"
|
||||||
if vararg:
|
|
||||||
method_signature += "private: "
|
|
||||||
method_signature += make_signature(
|
method_signature += make_signature(
|
||||||
class_name, method, for_header=True, use_template_get_node=use_template_get_node
|
class_name, method, for_header=True, use_template_get_node=use_template_get_node
|
||||||
)
|
)
|
||||||
|
@ -1425,16 +1520,16 @@ def generate_engine_class_header(class_api, used_classes, fully_used_classes, us
|
||||||
if class_name == "Object":
|
if class_name == "Object":
|
||||||
result.append("")
|
result.append("")
|
||||||
|
|
||||||
result.append("\ttemplate<typename T>")
|
result.append("\ttemplate <typename T>")
|
||||||
result.append("\tstatic T *cast_to(Object *p_object);")
|
result.append("\tstatic T *cast_to(Object *p_object);")
|
||||||
|
|
||||||
result.append("\ttemplate<typename T>")
|
result.append("\ttemplate <typename T>")
|
||||||
result.append("\tstatic const T *cast_to(const Object *p_object);")
|
result.append("\tstatic const T *cast_to(const Object *p_object);")
|
||||||
|
|
||||||
result.append("\tvirtual ~Object() = default;")
|
result.append("\tvirtual ~Object() = default;")
|
||||||
|
|
||||||
elif use_template_get_node and class_name == "Node":
|
elif use_template_get_node and class_name == "Node":
|
||||||
result.append("\ttemplate<typename T>")
|
result.append("\ttemplate <typename T>")
|
||||||
result.append(
|
result.append(
|
||||||
"\tT *get_node(const NodePath &p_path) const { return Object::cast_to<T>(get_node_internal(p_path)); }"
|
"\tT *get_node(const NodePath &p_path) const { return Object::cast_to<T>(get_node_internal(p_path)); }"
|
||||||
)
|
)
|
||||||
|
@ -1476,7 +1571,7 @@ def generate_engine_class_header(class_api, used_classes, fully_used_classes, us
|
||||||
|
|
||||||
vararg = "is_vararg" in method and method["is_vararg"]
|
vararg = "is_vararg" in method and method["is_vararg"]
|
||||||
if vararg:
|
if vararg:
|
||||||
method_signature = "\ttemplate<typename... Args> static "
|
method_signature = "\ttemplate <typename... Args> static "
|
||||||
else:
|
else:
|
||||||
method_signature = "\tstatic "
|
method_signature = "\tstatic "
|
||||||
|
|
||||||
|
@ -1490,7 +1585,9 @@ def generate_engine_class_header(class_api, used_classes, fully_used_classes, us
|
||||||
False,
|
False,
|
||||||
)
|
)
|
||||||
if return_type is not None:
|
if return_type is not None:
|
||||||
method_signature += return_type + " "
|
method_signature += return_type
|
||||||
|
if not method_signature.endswith("*"):
|
||||||
|
method_signature += " "
|
||||||
else:
|
else:
|
||||||
method_signature += "void "
|
method_signature += "void "
|
||||||
|
|
||||||
|
@ -1514,9 +1611,9 @@ def generate_engine_class_header(class_api, used_classes, fully_used_classes, us
|
||||||
if "alias_for" in class_api and return_type.startswith(class_api["alias_for"] + "::"):
|
if "alias_for" in class_api and return_type.startswith(class_api["alias_for"] + "::"):
|
||||||
method_body += f"({return_type})"
|
method_body += f"({return_type})"
|
||||||
method_body += f'ClassDBSingleton::get_singleton()->{method["name"]}('
|
method_body += f'ClassDBSingleton::get_singleton()->{method["name"]}('
|
||||||
method_body += ", ".join(map(lambda x: escape_identifier(x["name"]), method_arguments))
|
method_body += ", ".join(map(lambda x: escape_argument(x["name"]), method_arguments))
|
||||||
if vararg:
|
if vararg:
|
||||||
method_body += ", args..."
|
method_body += ", p_args..."
|
||||||
method_body += "); \\"
|
method_body += "); \\"
|
||||||
|
|
||||||
result.append(method_body)
|
result.append(method_body)
|
||||||
|
@ -1665,7 +1762,7 @@ def generate_engine_class_source(class_api, used_classes, fully_used_classes, us
|
||||||
else: # vararg.
|
else: # vararg.
|
||||||
result.append("\tGDExtensionCallError error;")
|
result.append("\tGDExtensionCallError error;")
|
||||||
result.append("\tVariant ret;")
|
result.append("\tVariant ret;")
|
||||||
method_call += "internal::gdextension_interface_object_method_bind_call(_gde_method_bind, _owner, reinterpret_cast<GDExtensionConstVariantPtr *>(args), arg_count, &ret, &error"
|
method_call += "internal::gdextension_interface_object_method_bind_call(_gde_method_bind, _owner, reinterpret_cast<GDExtensionConstVariantPtr *>(p_args), p_arg_count, &ret, &error"
|
||||||
|
|
||||||
if is_ref:
|
if is_ref:
|
||||||
method_call += ")" # Close Ref<> constructor.
|
method_call += ")" # Close Ref<> constructor.
|
||||||
|
@ -1809,7 +1906,7 @@ def generate_global_constant_binds(api, output_dir):
|
||||||
header.append(f'VARIANT_ENUM_CAST({enum_def["name"]});')
|
header.append(f'VARIANT_ENUM_CAST({enum_def["name"]});')
|
||||||
|
|
||||||
# Variant::Type is not a global enum, but only one line, it is worth to place in this file instead of creating new file.
|
# Variant::Type is not a global enum, but only one line, it is worth to place in this file instead of creating new file.
|
||||||
header.append(f"VARIANT_ENUM_CAST(godot::Variant::Type);")
|
header.append("VARIANT_ENUM_CAST(godot::Variant::Type);")
|
||||||
|
|
||||||
header.append("")
|
header.append("")
|
||||||
|
|
||||||
|
@ -1928,7 +2025,7 @@ def generate_utility_functions(api, output_dir):
|
||||||
source.append(f'\t{get_gdextension_type(correct_type(function["return_type"]))} ret;')
|
source.append(f'\t{get_gdextension_type(correct_type(function["return_type"]))} ret;')
|
||||||
else:
|
else:
|
||||||
source.append("\tVariant ret;")
|
source.append("\tVariant ret;")
|
||||||
function_call += "_gde_function(&ret, reinterpret_cast<GDExtensionConstVariantPtr *>(args), arg_count"
|
function_call += "_gde_function(&ret, reinterpret_cast<GDExtensionConstVariantPtr *>(p_args), p_arg_count"
|
||||||
|
|
||||||
function_call += ");"
|
function_call += ");"
|
||||||
source.append(function_call)
|
source.append(function_call)
|
||||||
|
@ -1959,9 +2056,9 @@ def make_function_parameters(parameters, include_default=False, for_builtin=Fals
|
||||||
|
|
||||||
for index, par in enumerate(parameters):
|
for index, par in enumerate(parameters):
|
||||||
parameter = type_for_parameter(par["type"], par["meta"] if "meta" in par else None)
|
parameter = type_for_parameter(par["type"], par["meta"] if "meta" in par else None)
|
||||||
parameter_name = escape_identifier(par["name"])
|
parameter_name = escape_argument(par["name"])
|
||||||
if len(parameter_name) == 0:
|
if len(parameter_name) == 0:
|
||||||
parameter_name = "arg_" + str(index + 1)
|
parameter_name = "p_arg_" + str(index + 1)
|
||||||
parameter += parameter_name
|
parameter += parameter_name
|
||||||
|
|
||||||
if include_default and "default_value" in par and (not for_builtin or par["type"] != "Variant"):
|
if include_default and "default_value" in par and (not for_builtin or par["type"] != "Variant"):
|
||||||
|
@ -1975,7 +2072,7 @@ def make_function_parameters(parameters, include_default=False, for_builtin=Fals
|
||||||
signature.append(parameter)
|
signature.append(parameter)
|
||||||
|
|
||||||
if is_vararg:
|
if is_vararg:
|
||||||
signature.append("const Args&... args")
|
signature.append("const Args&... p_args")
|
||||||
|
|
||||||
return ", ".join(signature)
|
return ", ".join(signature)
|
||||||
|
|
||||||
|
@ -2006,7 +2103,7 @@ def get_include_path(type_name):
|
||||||
def get_encoded_arg(arg_name, type_name, type_meta):
|
def get_encoded_arg(arg_name, type_name, type_meta):
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
name = escape_identifier(arg_name)
|
name = escape_argument(arg_name)
|
||||||
arg_type = correct_type(type_name)
|
arg_type = correct_type(type_name)
|
||||||
if is_pod_type(arg_type):
|
if is_pod_type(arg_type):
|
||||||
result.append(f"\t{get_gdextension_type(arg_type)} {name}_encoded;")
|
result.append(f"\t{get_gdextension_type(arg_type)} {name}_encoded;")
|
||||||
|
@ -2072,7 +2169,7 @@ def make_signature(
|
||||||
if not is_vararg:
|
if not is_vararg:
|
||||||
function_signature += make_function_parameters(arguments, for_header, for_builtin, is_vararg)
|
function_signature += make_function_parameters(arguments, for_header, for_builtin, is_vararg)
|
||||||
else:
|
else:
|
||||||
function_signature += "const Variant **args, GDExtensionInt arg_count"
|
function_signature += "const Variant **p_args, GDExtensionInt p_arg_count"
|
||||||
|
|
||||||
function_signature += ")"
|
function_signature += ")"
|
||||||
|
|
||||||
|
@ -2087,7 +2184,7 @@ def make_signature(
|
||||||
def make_varargs_template(function_data, static=False):
|
def make_varargs_template(function_data, static=False):
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
function_signature = "\tpublic: template<typename... Args> "
|
function_signature = "\tpublic: template <typename... Args> "
|
||||||
|
|
||||||
if static:
|
if static:
|
||||||
function_signature += "static "
|
function_signature += "static "
|
||||||
|
@ -2131,12 +2228,12 @@ def make_varargs_template(function_data, static=False):
|
||||||
args_array = f"\t\tstd::array<Variant, {len(method_arguments)} + sizeof...(Args)> variant_args {{ "
|
args_array = f"\t\tstd::array<Variant, {len(method_arguments)} + sizeof...(Args)> variant_args {{ "
|
||||||
for argument in method_arguments:
|
for argument in method_arguments:
|
||||||
if argument["type"] == "Variant":
|
if argument["type"] == "Variant":
|
||||||
args_array += escape_identifier(argument["name"])
|
args_array += escape_argument(argument["name"])
|
||||||
else:
|
else:
|
||||||
args_array += f'Variant({escape_identifier(argument["name"])})'
|
args_array += f'Variant({escape_argument(argument["name"])})'
|
||||||
args_array += ", "
|
args_array += ", "
|
||||||
|
|
||||||
args_array += "Variant(args)... };"
|
args_array += "Variant(p_args)... };"
|
||||||
result.append(args_array)
|
result.append(args_array)
|
||||||
result.append(f"\t\tstd::array<const Variant *, {len(method_arguments)} + sizeof...(Args)> call_args;")
|
result.append(f"\t\tstd::array<const Variant *, {len(method_arguments)} + sizeof...(Args)> call_args;")
|
||||||
result.append("\t\tfor(size_t i = 0; i < variant_args.size(); i++) {")
|
result.append("\t\tfor(size_t i = 0; i < variant_args.size(); i++) {")
|
||||||
|
@ -2227,7 +2324,7 @@ def is_packed_array(type_name):
|
||||||
|
|
||||||
def needs_copy_instead_of_move(type_name):
|
def needs_copy_instead_of_move(type_name):
|
||||||
"""
|
"""
|
||||||
Those are types which need initialised data or we'll get warning spam so need a copy instead of move.
|
Those are types which need initialized data or we'll get warning spam so need a copy instead of move.
|
||||||
"""
|
"""
|
||||||
return type_name in [
|
return type_name in [
|
||||||
"Dictionary",
|
"Dictionary",
|
||||||
|
@ -2290,6 +2387,20 @@ def is_refcounted(type_name):
|
||||||
return type_name in engine_classes and engine_classes[type_name]
|
return type_name in engine_classes and engine_classes[type_name]
|
||||||
|
|
||||||
|
|
||||||
|
def is_class_included(class_name, build_profile):
|
||||||
|
"""
|
||||||
|
Check if an engine class should be included.
|
||||||
|
This removes classes according to a build profile of enabled or disabled classes.
|
||||||
|
"""
|
||||||
|
included = build_profile.get("enabled_classes", [])
|
||||||
|
excluded = build_profile.get("disabled_classes", [])
|
||||||
|
if included:
|
||||||
|
return class_name in included
|
||||||
|
if excluded:
|
||||||
|
return class_name not in excluded
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def is_included(type_name, current_type):
|
def is_included(type_name, current_type):
|
||||||
"""
|
"""
|
||||||
Check if a builtin type should be included.
|
Check if a builtin type should be included.
|
||||||
|
@ -2321,7 +2432,7 @@ def correct_default_value(value, type_name):
|
||||||
if value == "":
|
if value == "":
|
||||||
return f"{type_name}()"
|
return f"{type_name}()"
|
||||||
if value.startswith("Array["):
|
if value.startswith("Array["):
|
||||||
return f"{{}}"
|
return "{}"
|
||||||
if value.startswith("&"):
|
if value.startswith("&"):
|
||||||
return value[1::]
|
return value[1::]
|
||||||
if value.startswith("^"):
|
if value.startswith("^"):
|
||||||
|
@ -2337,7 +2448,7 @@ def correct_typed_array(type_name):
|
||||||
|
|
||||||
def correct_type(type_name, meta=None, use_alias=True):
|
def correct_type(type_name, meta=None, use_alias=True):
|
||||||
type_conversion = {"float": "double", "int": "int64_t", "Nil": "Variant"}
|
type_conversion = {"float": "double", "int": "int64_t", "Nil": "Variant"}
|
||||||
if meta != None:
|
if meta is not None:
|
||||||
if "int" in meta:
|
if "int" in meta:
|
||||||
return f"{meta}_t"
|
return f"{meta}_t"
|
||||||
elif meta in type_conversion:
|
elif meta in type_conversion:
|
||||||
|
@ -2367,7 +2478,7 @@ def correct_type(type_name, meta=None, use_alias=True):
|
||||||
return f"Ref<{type_name}>"
|
return f"Ref<{type_name}>"
|
||||||
if type_name == "Object" or is_engine_class(type_name):
|
if type_name == "Object" or is_engine_class(type_name):
|
||||||
return f"{type_name} *"
|
return f"{type_name} *"
|
||||||
if type_name.endswith("*"):
|
if type_name.endswith("*") and not type_name.endswith("**") and not type_name.endswith(" *"):
|
||||||
return f"{type_name[:-1]} *"
|
return f"{type_name[:-1]} *"
|
||||||
return type_name
|
return type_name
|
||||||
|
|
||||||
|
@ -2416,6 +2527,12 @@ def escape_identifier(id):
|
||||||
return id
|
return id
|
||||||
|
|
||||||
|
|
||||||
|
def escape_argument(id):
|
||||||
|
if id.startswith("p_") or id.startswith("r_"):
|
||||||
|
return id
|
||||||
|
return "p_" + id
|
||||||
|
|
||||||
|
|
||||||
def get_operator_id_name(op):
|
def get_operator_id_name(op):
|
||||||
op_id_map = {
|
op_id_map = {
|
||||||
"==": "equal",
|
"==": "equal",
|
||||||
|
@ -2442,7 +2559,6 @@ def get_operator_id_name(op):
|
||||||
"or": "or",
|
"or": "or",
|
||||||
"xor": "xor",
|
"xor": "xor",
|
||||||
"not": "not",
|
"not": "not",
|
||||||
"and": "and",
|
|
||||||
"in": "in",
|
"in": "in",
|
||||||
}
|
}
|
||||||
return op_id_map[op]
|
return op_id_map[op]
|
||||||
|
|
|
@ -132,7 +132,7 @@ public:
|
||||||
std::vector<GDExtensionClassMethodArgumentMetadata> vec;
|
std::vector<GDExtensionClassMethodArgumentMetadata> vec;
|
||||||
// First element is return value
|
// First element is return value
|
||||||
vec.reserve(argument_count + 1);
|
vec.reserve(argument_count + 1);
|
||||||
for (int i = 0; i < argument_count; i++) {
|
for (int i = 0; i < argument_count + 1; i++) {
|
||||||
vec.push_back(get_argument_metadata(i - 1));
|
vec.push_back(get_argument_metadata(i - 1));
|
||||||
}
|
}
|
||||||
return vec;
|
return vec;
|
||||||
|
|
|
@ -31,10 +31,10 @@
|
||||||
#ifndef GODOT_LOCAL_VECTOR_HPP
|
#ifndef GODOT_LOCAL_VECTOR_HPP
|
||||||
#define GODOT_LOCAL_VECTOR_HPP
|
#define GODOT_LOCAL_VECTOR_HPP
|
||||||
|
|
||||||
#include "godot_cpp/core/error_macros.hpp"
|
#include <godot_cpp/core/error_macros.hpp>
|
||||||
#include "godot_cpp/core/memory.hpp"
|
#include <godot_cpp/core/memory.hpp>
|
||||||
#include "godot_cpp/templates/sort_array.hpp"
|
#include <godot_cpp/templates/sort_array.hpp>
|
||||||
#include "godot_cpp/templates/vector.hpp"
|
#include <godot_cpp/templates/vector.hpp>
|
||||||
|
|
||||||
#include <initializer_list>
|
#include <initializer_list>
|
||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
|
|
|
@ -28,6 +28,9 @@
|
||||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||||
/**************************************************************************/
|
/**************************************************************************/
|
||||||
|
|
||||||
|
#ifndef GODOT_ARRAY_HELPERS_HPP
|
||||||
|
#define GODOT_ARRAY_HELPERS_HPP
|
||||||
|
|
||||||
namespace godot {
|
namespace godot {
|
||||||
namespace helpers {
|
namespace helpers {
|
||||||
template <typename T, typename ValueT>
|
template <typename T, typename ValueT>
|
||||||
|
@ -48,3 +51,5 @@ T append_all(T appendable) {
|
||||||
}
|
}
|
||||||
} // namespace helpers
|
} // namespace helpers
|
||||||
} // namespace godot
|
} // namespace godot
|
||||||
|
|
||||||
|
#endif // GODOT_ARRAY_HELPERS_HPP
|
||||||
|
|
|
@ -28,6 +28,9 @@
|
||||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||||
/**************************************************************************/
|
/**************************************************************************/
|
||||||
|
|
||||||
|
#ifndef GODOT_COLOR_NAMES_INC_HPP
|
||||||
|
#define GODOT_COLOR_NAMES_INC_HPP
|
||||||
|
|
||||||
namespace godot {
|
namespace godot {
|
||||||
|
|
||||||
// Names from https://en.wikipedia.org/wiki/X11_color_names
|
// Names from https://en.wikipedia.org/wiki/X11_color_names
|
||||||
|
@ -189,3 +192,5 @@ static NamedColor named_colors[] = {
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace godot
|
} // namespace godot
|
||||||
|
|
||||||
|
#endif // GODOT_COLOR_NAMES_INC_HPP
|
||||||
|
|
|
@ -146,7 +146,7 @@ struct _NO_DISCARD_ Rect2 {
|
||||||
return size.x > 0.0f && size.y > 0.0f;
|
return size.x > 0.0f && size.y > 0.0f;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the instersection between two Rect2s or an empty Rect2 if there is no intersection
|
// Returns the intersection between two Rect2s or an empty Rect2 if there is no intersection
|
||||||
inline Rect2 intersection(const Rect2 &p_rect) const {
|
inline Rect2 intersection(const Rect2 &p_rect) const {
|
||||||
Rect2 new_rect = p_rect;
|
Rect2 new_rect = p_rect;
|
||||||
|
|
||||||
|
|
|
@ -89,7 +89,7 @@ struct _NO_DISCARD_ Rect2i {
|
||||||
return size.x > 0 && size.y > 0;
|
return size.x > 0 && size.y > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the instersection between two Rect2is or an empty Rect2i if there is no intersection
|
// Returns the intersection between two Rect2is or an empty Rect2i if there is no intersection
|
||||||
inline Rect2i intersection(const Rect2i &p_rect) const {
|
inline Rect2i intersection(const Rect2i &p_rect) const {
|
||||||
Rect2i new_rect = p_rect;
|
Rect2i new_rect = p_rect;
|
||||||
|
|
||||||
|
|
|
@ -44,10 +44,15 @@ public:
|
||||||
_ref(p_array);
|
_ref(p_array);
|
||||||
}
|
}
|
||||||
_FORCE_INLINE_ TypedArray(const Variant &p_variant) :
|
_FORCE_INLINE_ TypedArray(const Variant &p_variant) :
|
||||||
Array(p_variant.operator Array(), Variant::OBJECT, T::get_class_static(), Variant()) {
|
TypedArray(Array(p_variant)) {
|
||||||
}
|
}
|
||||||
_FORCE_INLINE_ TypedArray(const Array &p_array) :
|
_FORCE_INLINE_ TypedArray(const Array &p_array) {
|
||||||
Array(p_array, Variant::OBJECT, T::get_class_static(), Variant()) {
|
set_typed(Variant::OBJECT, T::get_class_static(), Variant());
|
||||||
|
if (is_same_typed(p_array)) {
|
||||||
|
_ref(p_array);
|
||||||
|
} else {
|
||||||
|
assign(p_array);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
_FORCE_INLINE_ TypedArray() {
|
_FORCE_INLINE_ TypedArray() {
|
||||||
set_typed(Variant::OBJECT, T::get_class_static(), Variant());
|
set_typed(Variant::OBJECT, T::get_class_static(), Variant());
|
||||||
|
@ -65,10 +70,15 @@ public:
|
||||||
_ref(p_array); \
|
_ref(p_array); \
|
||||||
} \
|
} \
|
||||||
_FORCE_INLINE_ TypedArray(const Variant &p_variant) : \
|
_FORCE_INLINE_ TypedArray(const Variant &p_variant) : \
|
||||||
Array(p_variant.operator Array(), m_variant_type, StringName(), Variant()) { \
|
TypedArray(Array(p_variant)) { \
|
||||||
} \
|
} \
|
||||||
_FORCE_INLINE_ TypedArray(const Array &p_array) : \
|
_FORCE_INLINE_ TypedArray(const Array &p_array) { \
|
||||||
Array(p_array, m_variant_type, StringName(), Variant()) { \
|
set_typed(m_variant_type, StringName(), Variant()); \
|
||||||
|
if (is_same_typed(p_array)) { \
|
||||||
|
_ref(p_array); \
|
||||||
|
} else { \
|
||||||
|
assign(p_array); \
|
||||||
|
} \
|
||||||
} \
|
} \
|
||||||
_FORCE_INLINE_ TypedArray() { \
|
_FORCE_INLINE_ TypedArray() { \
|
||||||
set_typed(m_variant_type, StringName(), Variant()); \
|
set_typed(m_variant_type, StringName(), Variant()); \
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
# Git hooks for Godot Engine
|
|
||||||
|
|
||||||
This folder contains Git hooks meant to be installed locally by Godot Engine
|
|
||||||
contributors to make sure they comply with our requirements.
|
|
||||||
|
|
||||||
## List of hooks
|
|
||||||
|
|
||||||
- Pre-commit hook for `clang-format`: Applies `clang-format` to the staged
|
|
||||||
files before accepting a commit; blocks the commit and generates a patch if
|
|
||||||
the style is not respected.
|
|
||||||
You may need to edit the file if your `clang-format` binary is not in the
|
|
||||||
`PATH`, or if you want to enable colored output with `pygmentize`.
|
|
||||||
- Pre-commit hook for `black`: Applies `black` to the staged Python files
|
|
||||||
before accepting a commit.
|
|
||||||
- Pre-commit hook for `make_rst`: Checks the class reference syntax using
|
|
||||||
`make_rst.py`.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
Copy all the files from this folder into your `.git/hooks` folder, and make
|
|
||||||
sure the hooks and helper scripts are executable.
|
|
||||||
|
|
||||||
#### Linux/MacOS
|
|
||||||
|
|
||||||
The hooks rely on bash scripts and tools which should be in the system `PATH`,
|
|
||||||
so they should work out of the box on Linux/macOS.
|
|
||||||
|
|
||||||
#### Windows
|
|
||||||
|
|
||||||
##### clang-format
|
|
||||||
- Download LLVM for Windows (version 13 or later) from
|
|
||||||
<https://releases.llvm.org/download.html>
|
|
||||||
- Make sure LLVM is added to the `PATH` during installation
|
|
||||||
|
|
||||||
##### black
|
|
||||||
- Python installation: make sure Python is added to the `PATH`
|
|
||||||
- Install `black` - in any console: `pip3 install black`
|
|
|
@ -1,48 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# Provide the canonicalize filename (physical filename with out any symlinks)
|
|
||||||
# like the GNU version readlink with the -f option regardless of the version of
|
|
||||||
# readlink (GNU or BSD).
|
|
||||||
|
|
||||||
# This file is part of a set of unofficial pre-commit hooks available
|
|
||||||
# at github.
|
|
||||||
# Link: https://github.com/githubbrowser/Pre-commit-hooks
|
|
||||||
# Contact: David Martin, david.martin.mailbox@googlemail.com
|
|
||||||
|
|
||||||
###########################################################
|
|
||||||
# There should be no need to change anything below this line.
|
|
||||||
|
|
||||||
# Canonicalize by recursively following every symlink in every component of the
|
|
||||||
# specified filename. This should reproduce the results of the GNU version of
|
|
||||||
# readlink with the -f option.
|
|
||||||
#
|
|
||||||
# Reference: https://stackoverflow.com/questions/1055671/how-can-i-get-the-behavior-of-gnus-readlink-f-on-a-mac
|
|
||||||
canonicalize_filename () {
|
|
||||||
local target_file="$1"
|
|
||||||
local physical_directory=""
|
|
||||||
local result=""
|
|
||||||
|
|
||||||
# Need to restore the working directory after work.
|
|
||||||
local working_dir="`pwd`"
|
|
||||||
|
|
||||||
cd -- "$(dirname -- "$target_file")"
|
|
||||||
target_file="$(basename -- "$target_file")"
|
|
||||||
|
|
||||||
# Iterate down a (possible) chain of symlinks
|
|
||||||
while [ -L "$target_file" ]
|
|
||||||
do
|
|
||||||
target_file="$(readlink -- "$target_file")"
|
|
||||||
cd -- "$(dirname -- "$target_file")"
|
|
||||||
target_file="$(basename -- "$target_file")"
|
|
||||||
done
|
|
||||||
|
|
||||||
# Compute the canonicalized name by finding the physical path
|
|
||||||
# for the directory we're in and appending the target file.
|
|
||||||
physical_directory="`pwd -P`"
|
|
||||||
result="$physical_directory/$target_file"
|
|
||||||
|
|
||||||
# restore the working directory after work.
|
|
||||||
cd -- "$working_dir"
|
|
||||||
|
|
||||||
echo "$result"
|
|
||||||
}
|
|
|
@ -1,50 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
# Git pre-commit hook that runs multiple hooks specified in $HOOKS.
|
|
||||||
# Make sure this script is executable. Bypass hooks with git commit --no-verify.
|
|
||||||
|
|
||||||
# This file is part of a set of unofficial pre-commit hooks available
|
|
||||||
# at github.
|
|
||||||
# Link: https://github.com/githubbrowser/Pre-commit-hooks
|
|
||||||
# Contact: David Martin, david.martin.mailbox@googlemail.com
|
|
||||||
|
|
||||||
|
|
||||||
###########################################################
|
|
||||||
# CONFIGURATION:
|
|
||||||
# pre-commit hooks to be executed. They should be in the same .git/hooks/ folder
|
|
||||||
# as this script. Hooks should return 0 if successful and nonzero to cancel the
|
|
||||||
# commit. They are executed in the order in which they are listed.
|
|
||||||
#HOOKS="pre-commit-compile pre-commit-uncrustify"
|
|
||||||
HOOKS="pre-commit-clang-format pre-commit-black"
|
|
||||||
###########################################################
|
|
||||||
# There should be no need to change anything below this line.
|
|
||||||
|
|
||||||
. "$(dirname -- "$0")/canonicalize_filename.sh"
|
|
||||||
|
|
||||||
# exit on error
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Absolute path to this script, e.g. /home/user/bin/foo.sh
|
|
||||||
SCRIPT="$(canonicalize_filename "$0")"
|
|
||||||
|
|
||||||
# Absolute path this script is in, thus /home/user/bin
|
|
||||||
SCRIPTPATH="$(dirname -- "$SCRIPT")"
|
|
||||||
|
|
||||||
|
|
||||||
for hook in $HOOKS
|
|
||||||
do
|
|
||||||
echo "Running hook: $hook"
|
|
||||||
# run hook if it exists
|
|
||||||
# if it returns with nonzero exit with 1 and thus abort the commit
|
|
||||||
if [ -f "$SCRIPTPATH/$hook" ]; then
|
|
||||||
"$SCRIPTPATH/$hook"
|
|
||||||
if [ $? != 0 ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Error: file $hook not found."
|
|
||||||
echo "Aborting commit. Make sure the hook is in $SCRIPTPATH and executable."
|
|
||||||
echo "You can disable it by removing it from the list in $SCRIPT."
|
|
||||||
echo "You can skip all pre-commit hooks with --no-verify (not recommended)."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
|
@ -1,202 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# git pre-commit hook that runs a black stylecheck.
|
|
||||||
# Based on pre-commit-clang-format.
|
|
||||||
|
|
||||||
##################################################################
|
|
||||||
# SETTINGS
|
|
||||||
# Set path to black binary.
|
|
||||||
BLACK=`which black 2>/dev/null`
|
|
||||||
BLACK_OPTIONS="-l 120"
|
|
||||||
|
|
||||||
# Remove any older patches from previous commits. Set to true or false.
|
|
||||||
DELETE_OLD_PATCHES=false
|
|
||||||
|
|
||||||
# File types to parse.
|
|
||||||
FILE_NAMES="SConstruct SCsub"
|
|
||||||
FILE_EXTS=".py"
|
|
||||||
|
|
||||||
# Use pygmentize instead of cat to parse diff with highlighting.
|
|
||||||
# Install it with `pip install pygments` (Linux) or `easy_install Pygments` (Mac)
|
|
||||||
PYGMENTIZE=`which pygmentize 2>/dev/null`
|
|
||||||
if [ ! -z "$PYGMENTIZE" ]; then
|
|
||||||
READER="pygmentize -l diff"
|
|
||||||
else
|
|
||||||
READER=cat
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Path to zenity
|
|
||||||
ZENITY=`which zenity 2>/dev/null`
|
|
||||||
|
|
||||||
# Path to xmessage
|
|
||||||
XMSG=`which xmessage 2>/dev/null`
|
|
||||||
|
|
||||||
# Path to powershell (Windows only)
|
|
||||||
PWSH=`which powershell 2>/dev/null`
|
|
||||||
|
|
||||||
##################################################################
|
|
||||||
# There should be no need to change anything below this line.
|
|
||||||
|
|
||||||
. "$(dirname -- "$0")/canonicalize_filename.sh"
|
|
||||||
|
|
||||||
# exit on error
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# check whether the given file matches any of the set extensions
|
|
||||||
matches_name_or_extension() {
|
|
||||||
local filename=$(basename "$1")
|
|
||||||
local extension=".${filename##*.}"
|
|
||||||
|
|
||||||
for name in $FILE_NAMES; do [[ "$name" == "$filename" ]] && return 0; done
|
|
||||||
for ext in $FILE_EXTS; do [[ "$ext" == "$extension" ]] && return 0; done
|
|
||||||
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# necessary check for initial commit
|
|
||||||
if git rev-parse --verify HEAD >/dev/null 2>&1 ; then
|
|
||||||
against=HEAD
|
|
||||||
else
|
|
||||||
# Initial commit: diff against an empty tree object
|
|
||||||
against=4b825dc642cb6eb9a060e54bf8d69288fbee4904
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -x "$BLACK" ] ; then
|
|
||||||
if [ ! -t 1 ] ; then
|
|
||||||
if [ -x "$ZENITY" ] ; then
|
|
||||||
$ZENITY --error --title="Error" --text="Error: black executable not found."
|
|
||||||
exit 1
|
|
||||||
elif [ -x "$XMSG" ] ; then
|
|
||||||
$XMSG -center -title "Error" "Error: black executable not found."
|
|
||||||
exit 1
|
|
||||||
elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then
|
|
||||||
winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")"
|
|
||||||
$PWSH -noprofile -executionpolicy bypass -file "$winmessage" -center -title "Error" --text "Error: black executable not found."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
printf "Error: black executable not found.\n"
|
|
||||||
printf "Set the correct path in $(canonicalize_filename "$0").\n"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# create a random filename to store our generated patch
|
|
||||||
prefix="pre-commit-black"
|
|
||||||
suffix="$(date +%s)"
|
|
||||||
patch="/tmp/$prefix-$suffix.patch"
|
|
||||||
|
|
||||||
# clean up any older black patches
|
|
||||||
$DELETE_OLD_PATCHES && rm -f /tmp/$prefix*.patch
|
|
||||||
|
|
||||||
# create one patch containing all changes to the files
|
|
||||||
git diff-index --cached --diff-filter=ACMR --name-only $against -- | while read file;
|
|
||||||
do
|
|
||||||
# ignore thirdparty files
|
|
||||||
if grep -q "thirdparty" <<< $file; then
|
|
||||||
continue;
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ignore file if not one of the names or extensions we handle
|
|
||||||
if ! matches_name_or_extension "$file"; then
|
|
||||||
continue;
|
|
||||||
fi
|
|
||||||
|
|
||||||
# format our file with black, create a patch with diff and append it to our $patch
|
|
||||||
# The sed call is necessary to transform the patch from
|
|
||||||
# --- $file timestamp
|
|
||||||
# +++ $file timestamp
|
|
||||||
# to both lines working on the same file and having a/ and b/ prefix.
|
|
||||||
# Else it can not be applied with 'git apply'.
|
|
||||||
"$BLACK" "$BLACK_OPTIONS" --diff "$file" | \
|
|
||||||
sed -e "1s|--- |--- a/|" -e "2s|+++ |+++ b/|" >> "$patch"
|
|
||||||
done
|
|
||||||
|
|
||||||
# if no patch has been generated all is ok, clean up the file stub and exit
|
|
||||||
if [ ! -s "$patch" ] ; then
|
|
||||||
printf "Files in this commit comply with the black formatter rules.\n"
|
|
||||||
rm -f "$patch"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# a patch has been created, notify the user and exit
|
|
||||||
printf "\nThe following differences were found between the code to commit "
|
|
||||||
printf "and the black formatter rules:\n\n"
|
|
||||||
|
|
||||||
if [ -t 1 ] ; then
|
|
||||||
$READER "$patch"
|
|
||||||
printf "\n"
|
|
||||||
# Allows us to read user input below, assigns stdin to keyboard
|
|
||||||
exec < /dev/tty
|
|
||||||
terminal="1"
|
|
||||||
else
|
|
||||||
cat "$patch"
|
|
||||||
printf "\n"
|
|
||||||
# Allows non zero zenity/powershell output
|
|
||||||
set +e
|
|
||||||
terminal="0"
|
|
||||||
fi
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
if [ $terminal = "0" ] ; then
|
|
||||||
if [ -x "$ZENITY" ] ; then
|
|
||||||
choice=$($ZENITY --text-info --filename="$patch" --width=800 --height=600 --title="Do you want to apply that patch?" --ok-label="Apply" --cancel-label="Do not apply" --extra-button="Apply and stage")
|
|
||||||
if [ "$?" = "0" ] ; then
|
|
||||||
yn="Y"
|
|
||||||
else
|
|
||||||
if [ "$choice" = "Apply and stage" ] ; then
|
|
||||||
yn="S"
|
|
||||||
else
|
|
||||||
yn="N"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
elif [ -x "$XMSG" ] ; then
|
|
||||||
$XMSG -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?"
|
|
||||||
choice=$?
|
|
||||||
if [ "$choice" = "100" ] ; then
|
|
||||||
yn="Y"
|
|
||||||
elif [ "$choice" = "200" ] ; then
|
|
||||||
yn="S"
|
|
||||||
else
|
|
||||||
yn="N"
|
|
||||||
fi
|
|
||||||
elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then
|
|
||||||
winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")"
|
|
||||||
$PWSH -noprofile -executionpolicy bypass -file "$winmessage" -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?"
|
|
||||||
choice=$?
|
|
||||||
if [ "$choice" = "100" ] ; then
|
|
||||||
yn="Y"
|
|
||||||
elif [ "$choice" = "200" ] ; then
|
|
||||||
yn="S"
|
|
||||||
else
|
|
||||||
yn="N"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
printf "Error: zenity, xmessage, or powershell executable not found.\n"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
read -p "Do you want to apply that patch (Y - Apply, N - Do not apply, S - Apply and stage files)? [Y/N/S] " yn
|
|
||||||
fi
|
|
||||||
case $yn in
|
|
||||||
[Yy] ) git apply $patch;
|
|
||||||
printf "The patch was applied. You can now stage the changes and commit again.\n\n";
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
[Nn] ) printf "\nYou can apply these changes with:\n git apply $patch\n";
|
|
||||||
printf "(may need to be called from the root directory of your repository)\n";
|
|
||||||
printf "Aborting commit. Apply changes and commit again or skip checking with";
|
|
||||||
printf " --no-verify (not recommended).\n\n";
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
[Ss] ) git apply $patch;
|
|
||||||
git diff-index --cached --diff-filter=ACMR --name-only $against -- | while read file;
|
|
||||||
do git add $file;
|
|
||||||
done
|
|
||||||
printf "The patch was applied and the changed files staged. You can now commit.\n\n";
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
* ) echo "Please answer yes or no."
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
exit 1 # we don't commit in any case
|
|
|
@ -1,242 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# git pre-commit hook that runs a clang-format stylecheck.
|
|
||||||
# Features:
|
|
||||||
# - abort commit when commit does not comply with the style guidelines
|
|
||||||
# - create a patch of the proposed style changes
|
|
||||||
# Modifications for clang-format by rene.milk@wwu.de
|
|
||||||
|
|
||||||
# This file is part of a set of unofficial pre-commit hooks available
|
|
||||||
# at github.
|
|
||||||
# Link: https://github.com/githubbrowser/Pre-commit-hooks
|
|
||||||
# Contact: David Martin, david.martin.mailbox@googlemail.com
|
|
||||||
|
|
||||||
# Some quality of life modifications made for Godot Engine.
|
|
||||||
|
|
||||||
##################################################################
|
|
||||||
# SETTINGS
|
|
||||||
# Set path to clang-format binary.
|
|
||||||
CLANG_FORMAT=`which clang-format 2>/dev/null`
|
|
||||||
|
|
||||||
# Remove any older patches from previous commits. Set to true or false.
|
|
||||||
DELETE_OLD_PATCHES=false
|
|
||||||
|
|
||||||
# Only parse files with the extensions in FILE_EXTS. Set to true or false.
|
|
||||||
# If false every changed file in the commit will be parsed with clang-format.
|
|
||||||
# If true only files matching one of the extensions are parsed with clang-format.
|
|
||||||
PARSE_EXTS=true
|
|
||||||
|
|
||||||
# File types to parse. Only effective when PARSE_EXTS is true.
|
|
||||||
FILE_EXTS=".c .h .cpp .hpp .cc .hh .cxx .m .mm .inc .java .glsl"
|
|
||||||
|
|
||||||
# Use pygmentize instead of cat to parse diff with highlighting.
|
|
||||||
# Install it with `pip install pygments` (Linux) or `easy_install Pygments` (Mac)
|
|
||||||
PYGMENTIZE=`which pygmentize 2>/dev/null`
|
|
||||||
if [ ! -z "$PYGMENTIZE" ]; then
|
|
||||||
READER="pygmentize -l diff"
|
|
||||||
else
|
|
||||||
READER=cat
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Path to zenity
|
|
||||||
ZENITY=`which zenity 2>/dev/null`
|
|
||||||
|
|
||||||
# Path to xmessage
|
|
||||||
XMSG=`which xmessage 2>/dev/null`
|
|
||||||
|
|
||||||
# Path to powershell (Windows only)
|
|
||||||
PWSH=`which powershell 2>/dev/null`
|
|
||||||
|
|
||||||
##################################################################
|
|
||||||
# There should be no need to change anything below this line.
|
|
||||||
|
|
||||||
. "$(dirname -- "$0")/canonicalize_filename.sh"
|
|
||||||
|
|
||||||
# exit on error
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# check whether the given file matches any of the set extensions
|
|
||||||
matches_extension() {
|
|
||||||
local filename=$(basename "$1")
|
|
||||||
local extension=".${filename##*.}"
|
|
||||||
local ext
|
|
||||||
|
|
||||||
for ext in $FILE_EXTS; do [[ "$ext" == "$extension" ]] && return 0; done
|
|
||||||
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# necessary check for initial commit
|
|
||||||
if git rev-parse --verify HEAD >/dev/null 2>&1 ; then
|
|
||||||
against=HEAD
|
|
||||||
else
|
|
||||||
# Initial commit: diff against an empty tree object
|
|
||||||
against=4b825dc642cb6eb9a060e54bf8d69288fbee4904
|
|
||||||
fi
|
|
||||||
|
|
||||||
# To get consistent formatting, we recommend contributors to use the same
|
|
||||||
# clang-format version as CI.
|
|
||||||
RECOMMENDED_CLANG_FORMAT_MAJOR_MIN="12"
|
|
||||||
RECOMMENDED_CLANG_FORMAT_MAJOR_MAX="13"
|
|
||||||
|
|
||||||
if [ ! -x "$CLANG_FORMAT" ] ; then
|
|
||||||
message="Error: clang-format executable not found. Please install clang-format $RECOMMENDED_CLANG_FORMAT_MAJOR_MAX."
|
|
||||||
|
|
||||||
if [ ! -t 1 ] ; then
|
|
||||||
if [ -x "$ZENITY" ] ; then
|
|
||||||
$ZENITY --error --title="Error" --text="$message"
|
|
||||||
exit 1
|
|
||||||
elif [ -x "$XMSG" ] ; then
|
|
||||||
$XMSG -center -title "Error" "$message"
|
|
||||||
exit 1
|
|
||||||
elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then
|
|
||||||
winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")"
|
|
||||||
$PWSH -noprofile -executionpolicy bypass -file "$winmessage" -center -title "Error" --text "$message"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
printf "$message\n"
|
|
||||||
printf "Set the correct path in $(canonicalize_filename "$0").\n"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# The returned string can be inconsistent depending on where clang-format comes from.
|
|
||||||
# Example output strings reported by `clang-format --version`:
|
|
||||||
# - Ubuntu: "Ubuntu clang-format version 11.0.0-2"
|
|
||||||
# - Fedora: "clang-format version 11.0.0 (Fedora 11.0.0-2.fc33)"
|
|
||||||
CLANG_FORMAT_VERSION="$(clang-format --version | sed "s/[^0-9\.]*\([0-9\.]*\).*/\1/")"
|
|
||||||
CLANG_FORMAT_MAJOR="$(echo "$CLANG_FORMAT_VERSION" | cut -d. -f1)"
|
|
||||||
|
|
||||||
if [[ "$CLANG_FORMAT_MAJOR" -lt "$RECOMMENDED_CLANG_FORMAT_MAJOR_MIN" || "$CLANG_FORMAT_MAJOR" -gt "$RECOMMENDED_CLANG_FORMAT_MAJOR_MAX" ]]; then
|
|
||||||
echo "Warning: Your clang-format binary is the wrong version ($CLANG_FORMAT_VERSION, expected between $RECOMMENDED_CLANG_FORMAT_MAJOR_MIN and $RECOMMENDED_CLANG_FORMAT_MAJOR_MAX)."
|
|
||||||
echo " Consider upgrading or downgrading clang-format as formatting may not be applied correctly."
|
|
||||||
fi
|
|
||||||
|
|
||||||
# create a random filename to store our generated patch
|
|
||||||
prefix="pre-commit-clang-format"
|
|
||||||
suffix="$(date +%s)"
|
|
||||||
patch="/tmp/$prefix-$suffix.patch"
|
|
||||||
|
|
||||||
# clean up any older clang-format patches
|
|
||||||
$DELETE_OLD_PATCHES && rm -f /tmp/$prefix*.patch
|
|
||||||
|
|
||||||
# create one patch containing all changes to the files
|
|
||||||
git diff-index --cached --diff-filter=ACMR --name-only $against -- | while read file;
|
|
||||||
do
|
|
||||||
# ignore thirdparty files
|
|
||||||
if grep -q "thirdparty" <<< $file; then
|
|
||||||
continue;
|
|
||||||
fi
|
|
||||||
if grep -q "platform/android/java/lib/src/com" <<< $file; then
|
|
||||||
continue;
|
|
||||||
fi
|
|
||||||
if grep -q "\-so_wrap." <<< $file; then
|
|
||||||
continue;
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ignore file if we do check for file extensions and the file
|
|
||||||
# does not match any of the extensions specified in $FILE_EXTS
|
|
||||||
if $PARSE_EXTS && ! matches_extension "$file"; then
|
|
||||||
continue;
|
|
||||||
fi
|
|
||||||
|
|
||||||
# clang-format our sourcefile, create a patch with diff and append it to our $patch
|
|
||||||
# The sed call is necessary to transform the patch from
|
|
||||||
# --- $file timestamp
|
|
||||||
# +++ - timestamp
|
|
||||||
# to both lines working on the same file and having a/ and b/ prefix.
|
|
||||||
# Else it can not be applied with 'git apply'.
|
|
||||||
"$CLANG_FORMAT" -style=file "$file" --Wno-error=unknown | \
|
|
||||||
diff -u "$file" - | \
|
|
||||||
sed -e "1s|--- |--- a/|" -e "2s|+++ -|+++ b/$file|" >> "$patch"
|
|
||||||
done
|
|
||||||
|
|
||||||
# if no patch has been generated all is ok, clean up the file stub and exit
|
|
||||||
if [ ! -s "$patch" ] ; then
|
|
||||||
printf "Files in this commit comply with the clang-format rules.\n"
|
|
||||||
rm -f "$patch"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# a patch has been created, notify the user and exit
|
|
||||||
printf "\nThe following differences were found between the code to commit "
|
|
||||||
printf "and the clang-format rules:\n\n"
|
|
||||||
|
|
||||||
if [ -t 1 ] ; then
|
|
||||||
$READER "$patch"
|
|
||||||
printf "\n"
|
|
||||||
# Allows us to read user input below, assigns stdin to keyboard
|
|
||||||
exec < /dev/tty
|
|
||||||
terminal="1"
|
|
||||||
else
|
|
||||||
cat "$patch"
|
|
||||||
printf "\n"
|
|
||||||
# Allows non zero zenity/powershell output
|
|
||||||
set +e
|
|
||||||
terminal="0"
|
|
||||||
fi
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
if [ $terminal = "0" ] ; then
|
|
||||||
if [ -x "$ZENITY" ] ; then
|
|
||||||
choice=$($ZENITY --text-info --filename="$patch" --width=800 --height=600 --title="Do you want to apply that patch?" --ok-label="Apply" --cancel-label="Do not apply" --extra-button="Apply and stage")
|
|
||||||
if [ "$?" = "0" ] ; then
|
|
||||||
yn="Y"
|
|
||||||
else
|
|
||||||
if [ "$choice" = "Apply and stage" ] ; then
|
|
||||||
yn="S"
|
|
||||||
else
|
|
||||||
yn="N"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
elif [ -x "$XMSG" ] ; then
|
|
||||||
$XMSG -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?"
|
|
||||||
choice=$?
|
|
||||||
if [ "$choice" = "100" ] ; then
|
|
||||||
yn="Y"
|
|
||||||
elif [ "$choice" = "200" ] ; then
|
|
||||||
yn="S"
|
|
||||||
else
|
|
||||||
yn="N"
|
|
||||||
fi
|
|
||||||
elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then
|
|
||||||
winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")"
|
|
||||||
$PWSH -noprofile -executionpolicy bypass -file "$winmessage" -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?"
|
|
||||||
choice=$?
|
|
||||||
if [ "$choice" = "100" ] ; then
|
|
||||||
yn="Y"
|
|
||||||
elif [ "$choice" = "200" ] ; then
|
|
||||||
yn="S"
|
|
||||||
else
|
|
||||||
yn="N"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
printf "Error: zenity, xmessage, or powershell executable not found.\n"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
read -p "Do you want to apply that patch (Y - Apply, N - Do not apply, S - Apply and stage files)? [Y/N/S] " yn
|
|
||||||
fi
|
|
||||||
case $yn in
|
|
||||||
[Yy] ) git apply $patch;
|
|
||||||
printf "The patch was applied. You can now stage the changes and commit again.\n\n";
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
[Nn] ) printf "\nYou can apply these changes with:\n git apply $patch\n";
|
|
||||||
printf "(may need to be called from the root directory of your repository)\n";
|
|
||||||
printf "Aborting commit. Apply changes and commit again or skip checking with";
|
|
||||||
printf " --no-verify (not recommended).\n\n";
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
[Ss] ) git apply $patch;
|
|
||||||
git diff-index --cached --diff-filter=ACMR --name-only $against -- | while read file;
|
|
||||||
do git add $file;
|
|
||||||
done
|
|
||||||
printf "The patch was applied and the changed files staged. You can now commit.\n\n";
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
* ) echo "Please answer yes or no."
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
exit 1 # we don't commit in any case
|
|
|
@ -1,103 +0,0 @@
|
||||||
Param (
|
|
||||||
[string]$file = "",
|
|
||||||
[string]$text = "",
|
|
||||||
[string]$buttons = "OK:0",
|
|
||||||
[string]$default = "",
|
|
||||||
[switch]$nearmouse = $false,
|
|
||||||
[switch]$center = $false,
|
|
||||||
[string]$geometry = "",
|
|
||||||
[int32]$timeout = 0,
|
|
||||||
[string]$title = "Message"
|
|
||||||
)
|
|
||||||
Add-Type -assembly System.Windows.Forms
|
|
||||||
|
|
||||||
$global:Result = 0
|
|
||||||
|
|
||||||
$main_form = New-Object System.Windows.Forms.Form
|
|
||||||
$main_form.Text = $title
|
|
||||||
|
|
||||||
$geometry_data = $geometry.Split("+")
|
|
||||||
if ($geometry_data.Length -ge 1) {
|
|
||||||
$size_data = $geometry_data[0].Split("x")
|
|
||||||
if ($size_data.Length -eq 2) {
|
|
||||||
$main_form.Width = $size_data[0]
|
|
||||||
$main_form.Height = $size_data[1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ($geometry_data.Length -eq 3) {
|
|
||||||
$main_form.StartPosition = [System.Windows.Forms.FormStartPosition]::Manual
|
|
||||||
$main_form.Location = New-Object System.Drawing.Point($geometry_data[1], $geometry_data[2])
|
|
||||||
}
|
|
||||||
if ($nearmouse) {
|
|
||||||
$main_form.StartPosition = [System.Windows.Forms.FormStartPosition]::Manual
|
|
||||||
$main_form.Location = System.Windows.Forms.Cursor.Position
|
|
||||||
}
|
|
||||||
if ($center) {
|
|
||||||
$main_form.StartPosition = [System.Windows.Forms.FormStartPosition]::CenterScreen
|
|
||||||
}
|
|
||||||
|
|
||||||
$main_form.SuspendLayout()
|
|
||||||
|
|
||||||
$button_panel = New-Object System.Windows.Forms.FlowLayoutPanel
|
|
||||||
$button_panel.SuspendLayout()
|
|
||||||
$button_panel.FlowDirection = [System.Windows.Forms.FlowDirection]::RightToLeft
|
|
||||||
$button_panel.Dock = [System.Windows.Forms.DockStyle]::Bottom
|
|
||||||
$button_panel.Autosize = $true
|
|
||||||
|
|
||||||
if ($file -ne "") {
|
|
||||||
$text = [IO.File]::ReadAllText($file).replace("`n", "`r`n")
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($text -ne "") {
|
|
||||||
$text_box = New-Object System.Windows.Forms.TextBox
|
|
||||||
$text_box.Multiline = $true
|
|
||||||
$text_box.ReadOnly = $true
|
|
||||||
$text_box.Autosize = $true
|
|
||||||
$text_box.Text = $text
|
|
||||||
$text_box.Select(0,0)
|
|
||||||
$text_box.Dock = [System.Windows.Forms.DockStyle]::Fill
|
|
||||||
$main_form.Controls.Add($text_box)
|
|
||||||
}
|
|
||||||
|
|
||||||
$buttons_array = $buttons.Split(",")
|
|
||||||
foreach ($button in $buttons_array) {
|
|
||||||
$button_data = $button.Split(":")
|
|
||||||
$button_ctl = New-Object System.Windows.Forms.Button
|
|
||||||
if ($button_data.Length -eq 2) {
|
|
||||||
$button_ctl.Tag = $button_data[1]
|
|
||||||
} else {
|
|
||||||
$button_ctl.Tag = 100 + $buttons_array.IndexOf($button)
|
|
||||||
}
|
|
||||||
if ($default -eq $button_data[0]) {
|
|
||||||
$main_form.AcceptButton = $button_ctl
|
|
||||||
}
|
|
||||||
$button_ctl.Autosize = $true
|
|
||||||
$button_ctl.Text = $button_data[0]
|
|
||||||
$button_ctl.Add_Click(
|
|
||||||
{
|
|
||||||
Param($sender)
|
|
||||||
$global:Result = $sender.Tag
|
|
||||||
$main_form.Close()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
$button_panel.Controls.Add($button_ctl)
|
|
||||||
}
|
|
||||||
$main_form.Controls.Add($button_panel)
|
|
||||||
|
|
||||||
$button_panel.ResumeLayout($false)
|
|
||||||
$main_form.ResumeLayout($false)
|
|
||||||
|
|
||||||
if ($timeout -gt 0) {
|
|
||||||
$timer = New-Object System.Windows.Forms.Timer
|
|
||||||
$timer.Add_Tick(
|
|
||||||
{
|
|
||||||
$global:Result = 0
|
|
||||||
$main_form.Close()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
$timer.Interval = $timeout
|
|
||||||
$timer.Start()
|
|
||||||
}
|
|
||||||
$dlg_res = $main_form.ShowDialog()
|
|
||||||
|
|
||||||
[Environment]::Exit($global:Result)
|
|
|
@ -1,25 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# This script runs black on all Python files in the repo.
|
|
||||||
|
|
||||||
set -uo pipefail
|
|
||||||
|
|
||||||
# Apply black.
|
|
||||||
echo -e "Formatting Python files..."
|
|
||||||
PY_FILES=$(git ls-files -- '*SConstruct' '*SCsub' '*.py' ':!:.git/*' ':!:thirdparty/*')
|
|
||||||
black -l 120 $PY_FILES
|
|
||||||
|
|
||||||
diff=$(git diff --color)
|
|
||||||
|
|
||||||
# If no patch has been generated all is OK, clean up, and exit.
|
|
||||||
if [ -z "$diff" ] ; then
|
|
||||||
printf "Files in this commit comply with the black style rules.\n"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# A patch has been created, notify the user, clean up, and exit.
|
|
||||||
printf "\n*** The following differences were found between the code "
|
|
||||||
printf "and the formatting rules:\n\n"
|
|
||||||
echo "$diff"
|
|
||||||
printf "\n*** Aborting, please fix your commit(s) with 'git commit --amend' or 'git rebase -i <hash>'\n"
|
|
||||||
exit 1
|
|
|
@ -9,8 +9,8 @@ if len(sys.argv) < 2:
|
||||||
|
|
||||||
fname = sys.argv[1]
|
fname = sys.argv[1]
|
||||||
|
|
||||||
fileread = open(fname.strip(), "r")
|
with open(fname.strip(), "r", encoding="utf-8") as fileread:
|
||||||
file_contents = fileread.read()
|
file_contents = fileread.read()
|
||||||
|
|
||||||
# If find "ERROR: AddressSanitizer:", then happens invalid read or write
|
# If find "ERROR: AddressSanitizer:", then happens invalid read or write
|
||||||
# This is critical bug, so we need to fix this as fast as possible
|
# This is critical bug, so we need to fix this as fast as possible
|
||||||
|
@ -25,6 +25,8 @@ if (
|
||||||
file_contents.find("Program crashed with signal") != -1
|
file_contents.find("Program crashed with signal") != -1
|
||||||
or file_contents.find("Dumping the backtrace") != -1
|
or file_contents.find("Dumping the backtrace") != -1
|
||||||
or file_contents.find("Segmentation fault (core dumped)") != -1
|
or file_contents.find("Segmentation fault (core dumped)") != -1
|
||||||
|
or file_contents.find("Aborted (core dumped)") != -1
|
||||||
|
or file_contents.find("terminate called without an active exception") != -1
|
||||||
):
|
):
|
||||||
print("FATAL ERROR: Godot has been crashed.")
|
print("FATAL ERROR: Godot has been crashed.")
|
||||||
sys.exit(52)
|
sys.exit(52)
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import os, sys
|
import os
|
||||||
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.insert(1, os.path.join(os.path.dirname(__file__), "..", ".."))
|
sys.path.insert(1, os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||||
|
|
||||||
from binding_generator import get_file_list, generate_bindings
|
from binding_generator import generate_bindings, get_file_list
|
||||||
|
|
||||||
api_filepath = "gdextension/extension_api.json"
|
api_filepath = "gdextension/extension_api.json"
|
||||||
bits = "64"
|
bits = "64"
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# This script runs clang-format and fixes copyright headers on all relevant files in the repo.
|
|
||||||
# This is the primary script responsible for fixing style violations.
|
|
||||||
|
|
||||||
set -uo pipefail
|
|
||||||
|
|
||||||
# Loops through all code files tracked by Git.
|
|
||||||
git ls-files -- '*.c' '*.h' '*.cpp' '*.hpp' '*.cc' '*.hh' '*.cxx' '*.m' '*.mm' '*.inc' |
|
|
||||||
while read -r f; do
|
|
||||||
# Run clang-format.
|
|
||||||
clang-format --Wno-error=unknown -i "$f"
|
|
||||||
|
|
||||||
# Fix copyright headers, but not all files get them.
|
|
||||||
if [[ "$f" == *"inc" ]]; then
|
|
||||||
continue
|
|
||||||
elif [[ "$f" == *"glsl" ]]; then
|
|
||||||
continue
|
|
||||||
elif [[ "$f" == "test/"* ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
python misc/scripts/copyright_headers.py "$f"
|
|
||||||
done
|
|
||||||
|
|
||||||
diff=$(git diff --color)
|
|
||||||
|
|
||||||
# If no patch has been generated all is OK, clean up, and exit.
|
|
||||||
if [ -z "$diff" ] ; then
|
|
||||||
printf "Files in this commit comply with the clang-tidy style rules.\n"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# A patch has been created, notify the user, clean up, and exit.
|
|
||||||
printf "\n*** The following changes have been made to comply with the formatting rules:\n\n"
|
|
||||||
echo "$diff"
|
|
||||||
printf "\n*** Please fix your commit(s) with 'git commit --amend' or 'git rebase -i <hash>'\n"
|
|
||||||
exit 1
|
|
|
@ -1,5 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
SKIP_LIST="./thirdparty,*.gen.*,*.po,*.pot,package-lock.json,./core/string/locales.h,./DONORS.md,./misc/scripts/codespell.sh"
|
|
||||||
IGNORE_LIST="ba,childs,curvelinear,expct,fave,findn,gird,inout,lod,nd,numer,ois,ro,statics,te,varn"
|
|
||||||
|
|
||||||
codespell -w -q 3 -S "${SKIP_LIST}" -L "${IGNORE_LIST}"
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
header = """\
|
header = """\
|
||||||
|
@ -35,61 +36,61 @@ header = """\
|
||||||
/**************************************************************************/
|
/**************************************************************************/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
fname = sys.argv[1]
|
if len(sys.argv) < 2:
|
||||||
|
print("Invalid usage of copyright_headers.py, it should be called with a path to one or multiple files.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Handle replacing $filename with actual filename and keep alignment
|
for f in sys.argv[1:]:
|
||||||
fsingle = fname.strip()
|
fname = f
|
||||||
if fsingle.find("/") != -1:
|
|
||||||
fsingle = fsingle[fsingle.rfind("/") + 1 :]
|
|
||||||
rep_fl = "$filename"
|
|
||||||
rep_fi = fsingle
|
|
||||||
len_fl = len(rep_fl)
|
|
||||||
len_fi = len(rep_fi)
|
|
||||||
# Pad with spaces to keep alignment
|
|
||||||
if len_fi < len_fl:
|
|
||||||
for x in range(len_fl - len_fi):
|
|
||||||
rep_fi += " "
|
|
||||||
elif len_fl < len_fi:
|
|
||||||
for x in range(len_fi - len_fl):
|
|
||||||
rep_fl += " "
|
|
||||||
if header.find(rep_fl) != -1:
|
|
||||||
text = header.replace(rep_fl, rep_fi)
|
|
||||||
else:
|
|
||||||
text = header.replace("$filename", fsingle)
|
|
||||||
text += "\n"
|
|
||||||
|
|
||||||
# We now have the proper header, so we want to ignore the one in the original file
|
# Handle replacing $filename with actual filename and keep alignment
|
||||||
# and potentially empty lines and badly formatted lines, while keeping comments that
|
fsingle = os.path.basename(fname.strip())
|
||||||
# come after the header, and then keep everything non-header unchanged.
|
rep_fl = "$filename"
|
||||||
# To do so, we skip empty lines that may be at the top in a first pass.
|
rep_fi = fsingle
|
||||||
# In a second pass, we skip all consecutive comment lines starting with "/*",
|
len_fl = len(rep_fl)
|
||||||
# then we can append the rest (step 2).
|
len_fi = len(rep_fi)
|
||||||
|
# Pad with spaces to keep alignment
|
||||||
|
if len_fi < len_fl:
|
||||||
|
for x in range(len_fl - len_fi):
|
||||||
|
rep_fi += " "
|
||||||
|
elif len_fl < len_fi:
|
||||||
|
for x in range(len_fi - len_fl):
|
||||||
|
rep_fl += " "
|
||||||
|
if header.find(rep_fl) != -1:
|
||||||
|
text = header.replace(rep_fl, rep_fi)
|
||||||
|
else:
|
||||||
|
text = header.replace("$filename", fsingle)
|
||||||
|
text += "\n"
|
||||||
|
|
||||||
fileread = open(fname.strip(), "r")
|
# We now have the proper header, so we want to ignore the one in the original file
|
||||||
line = fileread.readline()
|
# and potentially empty lines and badly formatted lines, while keeping comments that
|
||||||
header_done = False
|
# come after the header, and then keep everything non-header unchanged.
|
||||||
|
# To do so, we skip empty lines that may be at the top in a first pass.
|
||||||
|
# In a second pass, we skip all consecutive comment lines starting with "/*",
|
||||||
|
# then we can append the rest (step 2).
|
||||||
|
|
||||||
while line.strip() == "": # Skip empty lines at the top
|
with open(fname.strip(), "r", encoding="utf-8") as fileread:
|
||||||
line = fileread.readline()
|
line = fileread.readline()
|
||||||
|
header_done = False
|
||||||
|
|
||||||
if line.find("/**********") == -1: # Godot header starts this way
|
while line.strip() == "" and line != "": # Skip empty lines at the top
|
||||||
# Maybe starting with a non-Godot comment, abort header magic
|
line = fileread.readline()
|
||||||
header_done = True
|
|
||||||
|
|
||||||
while not header_done: # Handle header now
|
if line.find("/**********") == -1: # Godot header starts this way
|
||||||
if line.find("/*") != 0: # No more starting with a comment
|
# Maybe starting with a non-Godot comment, abort header magic
|
||||||
header_done = True
|
header_done = True
|
||||||
if line.strip() != "":
|
|
||||||
|
while not header_done: # Handle header now
|
||||||
|
if line.find("/*") != 0: # No more starting with a comment
|
||||||
|
header_done = True
|
||||||
|
if line.strip() != "":
|
||||||
|
text += line
|
||||||
|
line = fileread.readline()
|
||||||
|
|
||||||
|
while line != "": # Dump everything until EOF
|
||||||
text += line
|
text += line
|
||||||
line = fileread.readline()
|
line = fileread.readline()
|
||||||
|
|
||||||
while line != "": # Dump everything until EOF
|
# Write
|
||||||
text += line
|
with open(fname.strip(), "w", encoding="utf-8", newline="\n") as filewrite:
|
||||||
line = fileread.readline()
|
filewrite.write(text)
|
||||||
|
|
||||||
fileread.close()
|
|
||||||
|
|
||||||
# Write
|
|
||||||
filewrite = open(fname.strip(), "w")
|
|
||||||
filewrite.write(text)
|
|
||||||
filewrite.close()
|
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Invalid usage of file_format.py, it should be called with a path to one or multiple files.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
BOM = b"\xef\xbb\xbf"
|
||||||
|
|
||||||
|
changed = []
|
||||||
|
invalid = []
|
||||||
|
|
||||||
|
for file in sys.argv[1:]:
|
||||||
|
try:
|
||||||
|
with open(file, "rt", encoding="utf-8") as f:
|
||||||
|
original = f.read()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
invalid.append(file)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if original == "":
|
||||||
|
continue
|
||||||
|
|
||||||
|
revamp = "\n".join([line.rstrip("\n\r\t ") for line in original.splitlines(True)]).rstrip("\n") + "\n"
|
||||||
|
|
||||||
|
new_raw = revamp.encode(encoding="utf-8")
|
||||||
|
if new_raw.startswith(BOM):
|
||||||
|
new_raw = new_raw[len(BOM) :]
|
||||||
|
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
old_raw = f.read()
|
||||||
|
|
||||||
|
if old_raw != new_raw:
|
||||||
|
changed.append(file)
|
||||||
|
with open(file, "wb") as f:
|
||||||
|
f.write(new_raw)
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
for file in changed:
|
||||||
|
print(f"FIXED: {file}")
|
||||||
|
if invalid:
|
||||||
|
for file in invalid:
|
||||||
|
print(f"REQUIRES MANUAL CHANGES: {file}")
|
||||||
|
sys.exit(1)
|
|
@ -1,41 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# This script ensures proper POSIX text file formatting and a few other things.
|
|
||||||
# This is supplementary to clang_format.sh and black_format.sh, but should be
|
|
||||||
# run before them.
|
|
||||||
|
|
||||||
# We need dos2unix and recode.
|
|
||||||
if [ ! -x "$(command -v dos2unix)" -o ! -x "$(command -v recode)" ]; then
|
|
||||||
printf "Install 'dos2unix' and 'recode' to use this script.\n"
|
|
||||||
fi
|
|
||||||
|
|
||||||
set -uo pipefail
|
|
||||||
IFS=$'\n\t'
|
|
||||||
|
|
||||||
# Loops through all text files tracked by Git.
|
|
||||||
git grep -zIl '' |
|
|
||||||
while IFS= read -rd '' f; do
|
|
||||||
# Ensure that files are UTF-8 formatted.
|
|
||||||
recode UTF-8 "$f" 2> /dev/null
|
|
||||||
# Ensure that files have LF line endings and do not contain a BOM.
|
|
||||||
dos2unix "$f" 2> /dev/null
|
|
||||||
# Remove trailing space characters and ensures that files end
|
|
||||||
# with newline characters. -l option handles newlines conveniently.
|
|
||||||
perl -i -ple 's/\s*$//g' "$f"
|
|
||||||
done
|
|
||||||
|
|
||||||
diff=$(git diff --color)
|
|
||||||
|
|
||||||
# If no patch has been generated all is OK, clean up, and exit.
|
|
||||||
if [ -z "$diff" ] ; then
|
|
||||||
printf "Files in this commit comply with the formatting rules.\n"
|
|
||||||
rm -f patch.patch
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# A patch has been created, notify the user, clean up, and exit.
|
|
||||||
printf "\n*** The following differences were found between the code "
|
|
||||||
printf "and the formatting rules:\n\n"
|
|
||||||
echo "$diff"
|
|
||||||
printf "\n*** Aborting, please fix your commit(s) with 'git commit --amend' or 'git rebase -i <hash>'\n"
|
|
||||||
exit 1
|
|
|
@ -0,0 +1,127 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Invalid usage of header_guards.py, it should be called with a path to one or multiple files.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
HEADER_CHECK_OFFSET = 30
|
||||||
|
HEADER_BEGIN_OFFSET = 31
|
||||||
|
HEADER_END_OFFSET = -1
|
||||||
|
|
||||||
|
changed = []
|
||||||
|
invalid = []
|
||||||
|
|
||||||
|
for file in sys.argv[1:]:
|
||||||
|
with open(file, "rt", encoding="utf-8", newline="\n") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
if len(lines) <= HEADER_BEGIN_OFFSET:
|
||||||
|
continue # Most likely a dummy file.
|
||||||
|
|
||||||
|
if lines[HEADER_CHECK_OFFSET].startswith("#import"):
|
||||||
|
continue # Early catch obj-c file.
|
||||||
|
|
||||||
|
name = f"GODOT_{Path(file).name}".upper().replace(".", "_").replace("-", "_").replace(" ", "_")
|
||||||
|
|
||||||
|
HEADER_CHECK = f"#ifndef {name}\n"
|
||||||
|
HEADER_BEGIN = f"#define {name}\n"
|
||||||
|
HEADER_END = f"#endif // {name}\n"
|
||||||
|
|
||||||
|
if (
|
||||||
|
lines[HEADER_CHECK_OFFSET] == HEADER_CHECK
|
||||||
|
and lines[HEADER_BEGIN_OFFSET] == HEADER_BEGIN
|
||||||
|
and lines[HEADER_END_OFFSET] == HEADER_END
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Guards might exist but with the wrong names.
|
||||||
|
if (
|
||||||
|
lines[HEADER_CHECK_OFFSET].startswith("#ifndef")
|
||||||
|
and lines[HEADER_BEGIN_OFFSET].startswith("#define")
|
||||||
|
and lines[HEADER_END_OFFSET].startswith("#endif")
|
||||||
|
):
|
||||||
|
lines[HEADER_CHECK_OFFSET] = HEADER_CHECK
|
||||||
|
lines[HEADER_BEGIN_OFFSET] = HEADER_BEGIN
|
||||||
|
lines[HEADER_END_OFFSET] = HEADER_END
|
||||||
|
with open(file, "wt", encoding="utf-8", newline="\n") as f:
|
||||||
|
f.writelines(lines)
|
||||||
|
changed.append(file)
|
||||||
|
continue
|
||||||
|
|
||||||
|
header_check = -1
|
||||||
|
header_begin = -1
|
||||||
|
header_end = -1
|
||||||
|
pragma_once = -1
|
||||||
|
objc = False
|
||||||
|
|
||||||
|
for idx, line in enumerate(lines):
|
||||||
|
if not line.startswith("#"):
|
||||||
|
continue
|
||||||
|
elif line.startswith("#ifndef") and header_check == -1:
|
||||||
|
header_check = idx
|
||||||
|
elif line.startswith("#define") and header_begin == -1:
|
||||||
|
header_begin = idx
|
||||||
|
elif line.startswith("#endif") and header_end == -1:
|
||||||
|
header_end = idx
|
||||||
|
elif line.startswith("#pragma once"):
|
||||||
|
pragma_once = idx
|
||||||
|
break
|
||||||
|
elif line.startswith("#import"):
|
||||||
|
objc = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if objc:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if pragma_once != -1:
|
||||||
|
lines.pop(pragma_once)
|
||||||
|
lines.insert(HEADER_CHECK_OFFSET, HEADER_CHECK)
|
||||||
|
lines.insert(HEADER_BEGIN_OFFSET, HEADER_BEGIN)
|
||||||
|
lines.append("\n")
|
||||||
|
lines.append(HEADER_END)
|
||||||
|
with open(file, "wt", encoding="utf-8", newline="\n") as f:
|
||||||
|
f.writelines(lines)
|
||||||
|
changed.append(file)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if header_check == -1 and header_begin == -1 and header_end == -1:
|
||||||
|
# Guards simply didn't exist
|
||||||
|
lines.insert(HEADER_CHECK_OFFSET, HEADER_CHECK)
|
||||||
|
lines.insert(HEADER_BEGIN_OFFSET, HEADER_BEGIN)
|
||||||
|
lines.append("\n")
|
||||||
|
lines.append(HEADER_END)
|
||||||
|
with open(file, "wt", encoding="utf-8", newline="\n") as f:
|
||||||
|
f.writelines(lines)
|
||||||
|
changed.append(file)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if header_check != -1 and header_begin != -1 and header_end != -1:
|
||||||
|
# All prepends "found", see if we can salvage this.
|
||||||
|
if header_check == header_begin - 1 and header_begin < header_end:
|
||||||
|
lines.pop(header_check)
|
||||||
|
lines.pop(header_begin - 1)
|
||||||
|
lines.pop(header_end - 2)
|
||||||
|
if lines[header_end - 3] == "\n":
|
||||||
|
lines.pop(header_end - 3)
|
||||||
|
lines.insert(HEADER_CHECK_OFFSET, HEADER_CHECK)
|
||||||
|
lines.insert(HEADER_BEGIN_OFFSET, HEADER_BEGIN)
|
||||||
|
lines.append("\n")
|
||||||
|
lines.append(HEADER_END)
|
||||||
|
with open(file, "wt", encoding="utf-8", newline="\n") as f:
|
||||||
|
f.writelines(lines)
|
||||||
|
changed.append(file)
|
||||||
|
continue
|
||||||
|
|
||||||
|
invalid.append(file)
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
for file in changed:
|
||||||
|
print(f"FIXED: {file}")
|
||||||
|
if invalid:
|
||||||
|
for file in invalid:
|
||||||
|
print(f"REQUIRES MANUAL CHANGES: {file}")
|
||||||
|
sys.exit(1)
|
|
@ -1,60 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [ ! -f "SConstruct" ]; then
|
|
||||||
echo "Warning: This script is intended to be run from the root of the Godot repository."
|
|
||||||
echo "Some of the paths checks may not work as intended from a different folder."
|
|
||||||
fi
|
|
||||||
|
|
||||||
files_invalid_guard=""
|
|
||||||
|
|
||||||
for file in $(find . -name "*.hpp" -print); do
|
|
||||||
# Skip generated files.
|
|
||||||
if [[ "$file" == "./gen/"* || "$file" == "./include/gen/"* ]]; then continue; fi
|
|
||||||
# Skip the test project.
|
|
||||||
if [[ "$file" == "./test/"* ]]; then continue; fi
|
|
||||||
|
|
||||||
bname=$(basename $file .hpp)
|
|
||||||
|
|
||||||
# NOTE: The "GODOT_CPP_" prefix is already used by the generated
|
|
||||||
# bindings, so we can't use that. We'll use "GODOT_" instead.
|
|
||||||
prefix="GODOT_"
|
|
||||||
|
|
||||||
# ^^ is bash builtin for UPPERCASE.
|
|
||||||
guard="${prefix}${bname^^}_HPP"
|
|
||||||
|
|
||||||
# Replaces guards to use computed name.
|
|
||||||
# We also add some \n to make sure there's a proper separation.
|
|
||||||
sed -i $file -e "0,/ifndef/s/#ifndef.*/\n#ifndef $guard/"
|
|
||||||
sed -i $file -e "0,/define/s/#define.*/#define $guard\n/"
|
|
||||||
sed -i $file -e "$ s/#endif.*/\n#endif \/\/ $guard/"
|
|
||||||
# Removes redundant \n added before, if they weren't needed.
|
|
||||||
sed -i $file -e "/^$/N;/^\n$/D"
|
|
||||||
|
|
||||||
# Check that first ifndef (should be header guard) is at the expected position.
|
|
||||||
# If not it can mean we have some code before the guard that should be after.
|
|
||||||
# "31" is the expected line with the copyright header.
|
|
||||||
first_ifndef=$(grep -n -m 1 "ifndef" $file | sed 's/\([0-9]*\).*/\1/')
|
|
||||||
if [[ "$first_ifndef" != "31" ]]; then
|
|
||||||
files_invalid_guard+="$file\n"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ ! -z "$files_invalid_guard" ]]; then
|
|
||||||
echo -e "The following files were found to have potentially invalid header guard:\n"
|
|
||||||
echo -e "$files_invalid_guard"
|
|
||||||
fi
|
|
||||||
|
|
||||||
diff=$(git diff --color)
|
|
||||||
|
|
||||||
# If no diff has been generated all is OK, clean up, and exit.
|
|
||||||
if [ -z "$diff" ] ; then
|
|
||||||
printf "Files in this commit comply with the header guards formatting rules.\n"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# A diff has been created, notify the user, clean up, and exit.
|
|
||||||
printf "\n*** The following differences were found between the code "
|
|
||||||
printf "and the header guards formatting rules:\n\n"
|
|
||||||
echo "$diff"
|
|
||||||
printf "\n*** Aborting, please fix your commit(s) with 'git commit --amend' or 'git rebase -i <hash>'\n"
|
|
||||||
exit 1
|
|
|
@ -1,11 +0,0 @@
|
||||||
[mypy]
|
|
||||||
ignore_missing_imports = true
|
|
||||||
disallow_any_generics = True
|
|
||||||
pretty = True
|
|
||||||
show_column_numbers = True
|
|
||||||
warn_redundant_casts = True
|
|
||||||
warn_return_any = True
|
|
||||||
warn_unreachable = True
|
|
||||||
|
|
||||||
namespace_packages = True
|
|
||||||
explicit_package_bases = True
|
|
|
@ -1,6 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -uo pipefail
|
|
||||||
|
|
||||||
echo -e "Python: mypy static analysis..."
|
|
||||||
mypy --config-file=./misc/scripts/mypy.ini .
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
[tool.mypy]
|
||||||
|
disallow_any_generics = true
|
||||||
|
explicit_package_bases = true
|
||||||
|
ignore_missing_imports = true
|
||||||
|
namespace_packages = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
pretty = true
|
||||||
|
scripts_are_modules = true
|
||||||
|
show_column_numbers = true
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unreachable = true
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
extend-include = ["SConstruct"]
|
||||||
|
line-length = 120
|
||||||
|
target-version = "py37"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
extend-select = [
|
||||||
|
"I", # isort
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
"SConstruct" = [
|
||||||
|
"F821", # Undefined name
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.codespell]
|
||||||
|
enable-colors = ""
|
||||||
|
write-changes = ""
|
||||||
|
check-hidden = ""
|
||||||
|
quiet-level = 3
|
||||||
|
builtin = "clear,rare,en-GB_to_en-US"
|
||||||
|
ignore-words-list = """\
|
||||||
|
breaked,
|
||||||
|
cancelled,
|
||||||
|
checkin,
|
||||||
|
curvelinear,
|
||||||
|
doubleclick,
|
||||||
|
expct,
|
||||||
|
findn,
|
||||||
|
gird,
|
||||||
|
hel,
|
||||||
|
inout,
|
||||||
|
labelin,
|
||||||
|
lod,
|
||||||
|
mis,
|
||||||
|
nd,
|
||||||
|
numer,
|
||||||
|
ot,
|
||||||
|
outin,
|
||||||
|
requestor,
|
||||||
|
te,
|
||||||
|
textin,
|
||||||
|
thirdparty,
|
||||||
|
vai
|
||||||
|
"""
|
|
@ -36,7 +36,7 @@ namespace godot {
|
||||||
|
|
||||||
void *Memory::alloc_static(size_t p_bytes, bool p_pad_align) {
|
void *Memory::alloc_static(size_t p_bytes, bool p_pad_align) {
|
||||||
#ifdef DEBUG_ENABLED
|
#ifdef DEBUG_ENABLED
|
||||||
bool prepad = false; // Alredy pre paded in the engine.
|
bool prepad = false; // Already pre paded in the engine.
|
||||||
#else
|
#else
|
||||||
bool prepad = p_pad_align;
|
bool prepad = p_pad_align;
|
||||||
#endif
|
#endif
|
||||||
|
@ -63,7 +63,7 @@ void *Memory::realloc_static(void *p_memory, size_t p_bytes, bool p_pad_align) {
|
||||||
uint8_t *mem = (uint8_t *)p_memory;
|
uint8_t *mem = (uint8_t *)p_memory;
|
||||||
|
|
||||||
#ifdef DEBUG_ENABLED
|
#ifdef DEBUG_ENABLED
|
||||||
bool prepad = false; // Alredy pre paded in the engine.
|
bool prepad = false; // Already pre paded in the engine.
|
||||||
#else
|
#else
|
||||||
bool prepad = p_pad_align;
|
bool prepad = p_pad_align;
|
||||||
#endif
|
#endif
|
||||||
|
@ -82,7 +82,7 @@ void Memory::free_static(void *p_ptr, bool p_pad_align) {
|
||||||
uint8_t *mem = (uint8_t *)p_ptr;
|
uint8_t *mem = (uint8_t *)p_ptr;
|
||||||
|
|
||||||
#ifdef DEBUG_ENABLED
|
#ifdef DEBUG_ENABLED
|
||||||
bool prepad = false; // Alredy pre paded in the engine.
|
bool prepad = false; // Already pre paded in the engine.
|
||||||
#else
|
#else
|
||||||
bool prepad = p_pad_align;
|
bool prepad = p_pad_align;
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -816,7 +816,7 @@ void Basis::get_axis_angle(Vector3 &r_axis, real_t &r_angle) const {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// As we have reached here there are no singularities so we can handle normally.
|
// As we have reached here there are no singularities so we can handle normally.
|
||||||
double s = Math::sqrt((rows[2][1] - rows[1][2]) * (rows[2][1] - rows[1][2]) + (rows[0][2] - rows[2][0]) * (rows[0][2] - rows[2][0]) + (rows[1][0] - rows[0][1]) * (rows[1][0] - rows[0][1])); // Used to normalise.
|
double s = Math::sqrt((rows[2][1] - rows[1][2]) * (rows[2][1] - rows[1][2]) + (rows[0][2] - rows[2][0]) * (rows[0][2] - rows[2][0]) + (rows[1][0] - rows[0][1]) * (rows[1][0] - rows[0][1])); // Used to normalize.
|
||||||
|
|
||||||
if (Math::abs(s) < CMP_EPSILON) {
|
if (Math::abs(s) < CMP_EPSILON) {
|
||||||
// Prevent divide by zero, should not happen if matrix is orthogonal and should be caught by singularity test above.
|
// Prevent divide by zero, should not happen if matrix is orthogonal and should be caught by singularity test above.
|
||||||
|
|
|
@ -192,7 +192,7 @@ void Color::set_hsv(float p_h, float p_s, float p_v, float p_alpha) {
|
||||||
a = p_alpha;
|
a = p_alpha;
|
||||||
|
|
||||||
if (p_s == 0.0f) {
|
if (p_s == 0.0f) {
|
||||||
// Achromatic (grey)
|
// Achromatic (gray)
|
||||||
r = g = b = p_v;
|
r = g = b = p_v;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
env = SConscript("../SConstruct")
|
env = SConscript("../SConstruct")
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
{
|
||||||
|
"enabled_classes": [
|
||||||
|
"Control",
|
||||||
|
"Label",
|
||||||
|
"OS",
|
||||||
|
"TileMap",
|
||||||
|
"InputEventKey"
|
||||||
|
]
|
||||||
|
}
|
|
@ -86,8 +86,8 @@ func _ready():
|
||||||
assert_equal(example.test_str_utility(), "Hello, World! The answer is 42")
|
assert_equal(example.test_str_utility(), "Hello, World! The answer is 42")
|
||||||
|
|
||||||
# Test converting string to char* and doing comparison.
|
# Test converting string to char* and doing comparison.
|
||||||
assert_equal(example.test_string_is_fourty_two("blah"), false)
|
assert_equal(example.test_string_is_forty_two("blah"), false)
|
||||||
assert_equal(example.test_string_is_fourty_two("fourty two"), true)
|
assert_equal(example.test_string_is_forty_two("forty two"), true)
|
||||||
|
|
||||||
# PackedArray iterators
|
# PackedArray iterators
|
||||||
assert_equal(example.test_vector_ops(), 105)
|
assert_equal(example.test_vector_ops(), 105)
|
||||||
|
|
|
@ -146,7 +146,7 @@ void Example::_bind_methods() {
|
||||||
ClassDB::bind_method(D_METHOD("test_node_argument"), &Example::test_node_argument);
|
ClassDB::bind_method(D_METHOD("test_node_argument"), &Example::test_node_argument);
|
||||||
ClassDB::bind_method(D_METHOD("test_string_ops"), &Example::test_string_ops);
|
ClassDB::bind_method(D_METHOD("test_string_ops"), &Example::test_string_ops);
|
||||||
ClassDB::bind_method(D_METHOD("test_str_utility"), &Example::test_str_utility);
|
ClassDB::bind_method(D_METHOD("test_str_utility"), &Example::test_str_utility);
|
||||||
ClassDB::bind_method(D_METHOD("test_string_is_fourty_two"), &Example::test_string_is_fourty_two);
|
ClassDB::bind_method(D_METHOD("test_string_is_forty_two"), &Example::test_string_is_forty_two);
|
||||||
ClassDB::bind_method(D_METHOD("test_vector_ops"), &Example::test_vector_ops);
|
ClassDB::bind_method(D_METHOD("test_vector_ops"), &Example::test_vector_ops);
|
||||||
ClassDB::bind_method(D_METHOD("test_vector_init_list"), &Example::test_vector_init_list);
|
ClassDB::bind_method(D_METHOD("test_vector_init_list"), &Example::test_vector_init_list);
|
||||||
|
|
||||||
|
@ -278,7 +278,7 @@ ExampleRef *Example::return_extended_ref() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
Ref<ExampleRef> Example::extended_ref_checks(Ref<ExampleRef> p_ref) const {
|
Ref<ExampleRef> Example::extended_ref_checks(Ref<ExampleRef> p_ref) const {
|
||||||
// This is therefor the prefered way of instancing and returning a refcounted object:
|
// This is therefore the preferred way of instancing and returning a refcounted object:
|
||||||
Ref<ExampleRef> ref;
|
Ref<ExampleRef> ref;
|
||||||
ref.instantiate();
|
ref.instantiate();
|
||||||
return ref;
|
return ref;
|
||||||
|
@ -323,8 +323,8 @@ String Example::test_str_utility() const {
|
||||||
return UtilityFunctions::str("Hello, ", "World", "! The answer is ", 42);
|
return UtilityFunctions::str("Hello, ", "World", "! The answer is ", 42);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Example::test_string_is_fourty_two(const String &p_string) const {
|
bool Example::test_string_is_forty_two(const String &p_string) const {
|
||||||
return strcmp(p_string.utf8().ptr(), "fourty two") == 0;
|
return strcmp(p_string.utf8().ptr(), "forty two") == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int Example::test_vector_ops() const {
|
int Example::test_vector_ops() const {
|
||||||
|
|
|
@ -125,7 +125,7 @@ public:
|
||||||
Example *test_node_argument(Example *p_node) const;
|
Example *test_node_argument(Example *p_node) const;
|
||||||
String test_string_ops() const;
|
String test_string_ops() const;
|
||||||
String test_str_utility() const;
|
String test_str_utility() const;
|
||||||
bool test_string_is_fourty_two(const String &p_str) const;
|
bool test_string_is_forty_two(const String &p_str) const;
|
||||||
int test_vector_ops() const;
|
int test_vector_ops() const;
|
||||||
int test_vector_init_list() const;
|
int test_vector_init_list() const;
|
||||||
|
|
||||||
|
|
|
@ -6,23 +6,24 @@
|
||||||
#ifndef TESTS_H
|
#ifndef TESTS_H
|
||||||
#define TESTS_H
|
#define TESTS_H
|
||||||
|
|
||||||
#include "godot_cpp/templates/cowdata.hpp"
|
#include <godot_cpp/templates/cowdata.hpp>
|
||||||
#include "godot_cpp/templates/hash_map.hpp"
|
#include <godot_cpp/templates/hash_map.hpp>
|
||||||
#include "godot_cpp/templates/hash_set.hpp"
|
#include <godot_cpp/templates/hash_set.hpp>
|
||||||
#include "godot_cpp/templates/hashfuncs.hpp"
|
#include <godot_cpp/templates/hashfuncs.hpp>
|
||||||
#include "godot_cpp/templates/list.hpp"
|
#include <godot_cpp/templates/list.hpp>
|
||||||
#include "godot_cpp/templates/pair.hpp"
|
#include <godot_cpp/templates/local_vector.hpp>
|
||||||
#include "godot_cpp/templates/rb_map.hpp"
|
#include <godot_cpp/templates/pair.hpp>
|
||||||
#include "godot_cpp/templates/rb_set.hpp"
|
#include <godot_cpp/templates/rb_map.hpp>
|
||||||
#include "godot_cpp/templates/rid_owner.hpp"
|
#include <godot_cpp/templates/rb_set.hpp>
|
||||||
#include "godot_cpp/templates/safe_refcount.hpp"
|
#include <godot_cpp/templates/rid_owner.hpp>
|
||||||
#include "godot_cpp/templates/search_array.hpp"
|
#include <godot_cpp/templates/safe_refcount.hpp>
|
||||||
#include "godot_cpp/templates/self_list.hpp"
|
#include <godot_cpp/templates/search_array.hpp>
|
||||||
#include "godot_cpp/templates/sort_array.hpp"
|
#include <godot_cpp/templates/self_list.hpp>
|
||||||
#include "godot_cpp/templates/spin_lock.hpp"
|
#include <godot_cpp/templates/sort_array.hpp>
|
||||||
#include "godot_cpp/templates/thread_work_pool.hpp"
|
#include <godot_cpp/templates/spin_lock.hpp>
|
||||||
#include "godot_cpp/templates/vector.hpp"
|
#include <godot_cpp/templates/thread_work_pool.hpp>
|
||||||
#include "godot_cpp/templates/vmap.hpp"
|
#include <godot_cpp/templates/vector.hpp>
|
||||||
#include "godot_cpp/templates/vset.hpp"
|
#include <godot_cpp/templates/vmap.hpp>
|
||||||
|
#include <godot_cpp/templates/vset.hpp>
|
||||||
|
|
||||||
#endif // TESTS_H
|
#endif // TESTS_H
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import my_spawn
|
|
||||||
import common_compiler_flags
|
import common_compiler_flags
|
||||||
from SCons.Script import ARGUMENTS
|
import my_spawn
|
||||||
|
|
||||||
|
|
||||||
def options(opts):
|
def options(opts):
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def using_clang(env):
|
def using_clang(env):
|
||||||
|
|
|
@ -1,15 +1,16 @@
|
||||||
import os, sys, platform
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
|
||||||
from SCons.Variables import EnumVariable, PathVariable, BoolVariable
|
|
||||||
from SCons.Variables.BoolVariable import _text2bool
|
|
||||||
from SCons.Tool import Tool
|
|
||||||
from SCons.Action import Action
|
from SCons.Action import Action
|
||||||
from SCons.Builder import Builder
|
from SCons.Builder import Builder
|
||||||
from SCons.Errors import UserError
|
from SCons.Errors import UserError
|
||||||
from SCons.Script import ARGUMENTS
|
from SCons.Script import ARGUMENTS
|
||||||
|
from SCons.Tool import Tool
|
||||||
|
from SCons.Variables import BoolVariable, EnumVariable, PathVariable
|
||||||
|
from SCons.Variables.BoolVariable import _text2bool
|
||||||
|
|
||||||
|
from binding_generator import scons_emit_files, scons_generate_bindings
|
||||||
from binding_generator import scons_generate_bindings, scons_emit_files
|
|
||||||
|
|
||||||
|
|
||||||
def add_sources(sources, dir, extension):
|
def add_sources(sources, dir, extension):
|
||||||
|
@ -283,6 +284,15 @@ def options(opts, env):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
opts.Add(
|
||||||
|
PathVariable(
|
||||||
|
"build_profile",
|
||||||
|
"Path to a file containing a feature build profile",
|
||||||
|
default=env.get("build_profile", None),
|
||||||
|
validator=validate_file,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
opts.Add(
|
opts.Add(
|
||||||
BoolVariable(
|
BoolVariable(
|
||||||
"disable_exceptions",
|
"disable_exceptions",
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
import common_compiler_flags
|
import common_compiler_flags
|
||||||
from SCons.Variables import *
|
from SCons.Variables import BoolVariable
|
||||||
|
|
||||||
if sys.version_info < (3,):
|
if sys.version_info < (3,):
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import common_compiler_flags
|
import common_compiler_flags
|
||||||
from SCons.Variables import *
|
|
||||||
from SCons.Tool import clang, clangxx
|
from SCons.Tool import clang, clangxx
|
||||||
|
from SCons.Variables import BoolVariable
|
||||||
|
|
||||||
|
|
||||||
def options(opts):
|
def options(opts):
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import common_compiler_flags
|
import common_compiler_flags
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,6 @@ def configure(env):
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
def mySpawn(sh, escape, cmd, args, env):
|
def mySpawn(sh, escape, cmd, args, env):
|
||||||
|
|
||||||
newargs = " ".join(args[1:])
|
newargs = " ".join(args[1:])
|
||||||
cmdline = cmd + " " + newargs
|
cmdline = cmd + " " + newargs
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import os
|
|
||||||
import common_compiler_flags
|
import common_compiler_flags
|
||||||
from SCons.Util import WhereIs
|
from SCons.Util import WhereIs
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,81 @@
|
||||||
import sys
|
import sys
|
||||||
import my_spawn
|
|
||||||
import common_compiler_flags
|
import common_compiler_flags
|
||||||
from SCons.Tool import msvc, mingw
|
import my_spawn
|
||||||
from SCons.Variables import *
|
from SCons.Tool import mingw, msvc
|
||||||
|
from SCons.Variables import BoolVariable
|
||||||
|
|
||||||
|
|
||||||
|
def silence_msvc(env):
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
# Ensure we have a location to write captured output to, in case of false positives.
|
||||||
|
capture_path = os.path.join(os.path.dirname(__file__), "..", "msvc_capture.log")
|
||||||
|
with open(capture_path, "wt", encoding="utf-8"):
|
||||||
|
pass
|
||||||
|
|
||||||
|
old_spawn = env["SPAWN"]
|
||||||
|
re_redirect_stream = re.compile(r"^[12]?>")
|
||||||
|
re_cl_capture = re.compile(r"^.+\.(c|cc|cpp|cxx|c[+]{2})$", re.IGNORECASE)
|
||||||
|
re_link_capture = re.compile(r'\s{3}\S.+\s(?:"[^"]+.lib"|\S+.lib)\s.+\s(?:"[^"]+.exp"|\S+.exp)')
|
||||||
|
|
||||||
|
def spawn_capture(sh, escape, cmd, args, env):
|
||||||
|
# We only care about cl/link, process everything else as normal.
|
||||||
|
if args[0] not in ["cl", "link"]:
|
||||||
|
return old_spawn(sh, escape, cmd, args, env)
|
||||||
|
|
||||||
|
# Process as normal if the user is manually rerouting output.
|
||||||
|
for arg in args:
|
||||||
|
if re_redirect_stream.match(arg):
|
||||||
|
return old_spawn(sh, escape, cmd, args, env)
|
||||||
|
|
||||||
|
tmp_stdout, tmp_stdout_name = tempfile.mkstemp()
|
||||||
|
os.close(tmp_stdout)
|
||||||
|
args.append(f">{tmp_stdout_name}")
|
||||||
|
ret = old_spawn(sh, escape, cmd, args, env)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(tmp_stdout_name, "r", encoding=sys.stdout.encoding, errors="replace") as tmp_stdout:
|
||||||
|
lines = tmp_stdout.read().splitlines()
|
||||||
|
os.remove(tmp_stdout_name)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Early process no lines (OSError)
|
||||||
|
if not lines:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
is_cl = args[0] == "cl"
|
||||||
|
content = ""
|
||||||
|
caught = False
|
||||||
|
for line in lines:
|
||||||
|
# These conditions are far from all-encompassing, but are specialized
|
||||||
|
# for what can be reasonably expected to show up in the repository.
|
||||||
|
if not caught and (is_cl and re_cl_capture.match(line)) or (not is_cl and re_link_capture.match(line)):
|
||||||
|
caught = True
|
||||||
|
try:
|
||||||
|
with open(capture_path, "a", encoding=sys.stdout.encoding) as log:
|
||||||
|
log.write(line + "\n")
|
||||||
|
except OSError:
|
||||||
|
print(f'WARNING: Failed to log captured line: "{line}".')
|
||||||
|
continue
|
||||||
|
content += line + "\n"
|
||||||
|
# Content remaining assumed to be an error/warning.
|
||||||
|
if content:
|
||||||
|
sys.stderr.write(content)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
env["SPAWN"] = spawn_capture
|
||||||
|
|
||||||
|
|
||||||
def options(opts):
|
def options(opts):
|
||||||
opts.Add(BoolVariable("use_mingw", "Use the MinGW compiler instead of MSVC - only effective on Windows", False))
|
opts.Add(BoolVariable("use_mingw", "Use the MinGW compiler instead of MSVC - only effective on Windows", False))
|
||||||
opts.Add(BoolVariable("use_clang_cl", "Use the clang driver instead of MSVC - only effective on Windows", False))
|
opts.Add(BoolVariable("use_clang_cl", "Use the clang driver instead of MSVC - only effective on Windows", False))
|
||||||
opts.Add(BoolVariable("use_static_cpp", "Link MinGW/MSVC C++ runtime libraries statically", True))
|
opts.Add(BoolVariable("use_static_cpp", "Link MinGW/MSVC C++ runtime libraries statically", True))
|
||||||
|
opts.Add(BoolVariable("silence_msvc", "Silence MSVC's cl/link stdout bloat, redirecting errors to stderr.", True))
|
||||||
|
|
||||||
|
|
||||||
def exists(env):
|
def exists(env):
|
||||||
|
@ -16,7 +83,6 @@ def exists(env):
|
||||||
|
|
||||||
|
|
||||||
def generate(env):
|
def generate(env):
|
||||||
base = None
|
|
||||||
if not env["use_mingw"] and msvc.exists(env):
|
if not env["use_mingw"] and msvc.exists(env):
|
||||||
if env["arch"] == "x86_64":
|
if env["arch"] == "x86_64":
|
||||||
env["TARGET_ARCH"] = "amd64"
|
env["TARGET_ARCH"] = "amd64"
|
||||||
|
@ -42,6 +108,9 @@ def generate(env):
|
||||||
else:
|
else:
|
||||||
env.Append(CCFLAGS=["/MD"])
|
env.Append(CCFLAGS=["/MD"])
|
||||||
|
|
||||||
|
if env["silence_msvc"] and not env.GetOption("clean"):
|
||||||
|
silence_msvc(env)
|
||||||
|
|
||||||
elif sys.platform == "win32" or sys.platform == "msys":
|
elif sys.platform == "win32" or sys.platform == "msys":
|
||||||
env["use_mingw"] = True
|
env["use_mingw"] = True
|
||||||
mingw.generate(env)
|
mingw.generate(env)
|
||||||
|
|
Loading…
Reference in New Issue