Skip to content

Commit

Permalink
Merge pull request #89452 from Riteo/name-a-better-duo
Browse files Browse the repository at this point in the history
SCons: Enable the experimental Ninja backend and minimize timestamp changes to generated code
  • Loading branch information
akien-mga committed Apr 4, 2024
2 parents a28be93 + 55558fb commit 7fa97f3
Show file tree
Hide file tree
Showing 8 changed files with 120 additions and 72 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,10 @@ bin
compile_commands.json
platform/windows/godot_res.res

# Ninja build files
build.ninja
.ninja

# Generated by Godot binary
.import/
/gdextension_interface.h
Expand Down
23 changes: 20 additions & 3 deletions SConstruct
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ opts.Add(BoolVariable("custom_modules_recursive", "Detect custom modules recursi
opts.Add(BoolVariable("dev_mode", "Alias for dev options: verbose=yes warnings=extra werror=yes tests=yes", False))
opts.Add(BoolVariable("tests", "Build the unit tests", False))
opts.Add(BoolVariable("fast_unsafe", "Enable unsafe options for faster rebuilds", False))
opts.Add(BoolVariable("ninja", "Use the ninja backend for faster rebuilds", False))
opts.Add(BoolVariable("compiledb", "Generate compilation DB (`compile_commands.json`) for external tools", False))
opts.Add(BoolVariable("verbose", "Enable verbose output for the compilation", False))
opts.Add(BoolVariable("progress", "Show a progress indicator during compilation", True))
Expand Down Expand Up @@ -956,7 +957,8 @@ if selected_platform in platform_list:
env.vs_incs = []
env.vs_srcs = []

# CompileDB
# CompileDB and Ninja are only available with certain SCons versions which
# not everybody might have yet, so we have to check.
from SCons import __version__ as scons_raw_version

scons_ver = env._get_major_minor_revision(scons_raw_version)
Expand All @@ -968,6 +970,20 @@ if selected_platform in platform_list:
env.Tool("compilation_db")
env.Alias("compiledb", env.CompilationDatabase())

if env["ninja"]:
if scons_ver < (4, 2, 0):
print("The `ninja=yes` option requires SCons 4.2 or later, but your version is %s." % scons_raw_version)
Exit(255)

SetOption("experimental", "ninja")

# By setting this we allow the user to run ninja by themselves with all
# the flags they need, as apparently automatically running from scons
# is way slower.
SetOption("disable_execute_ninja", True)

env.Tool("ninja")

# Threads
if env["threads"]:
env.Append(CPPDEFINES=["THREADS_ENABLED"])
Expand Down Expand Up @@ -1041,9 +1057,10 @@ atexit.register(print_elapsed_time)


def purge_flaky_files():
paths_to_keep = ["ninja.build"]
for build_failure in GetBuildFailures():
path = build_failure.node.abspath
if os.path.isfile(path):
path = build_failure.node.path
if os.path.isfile(path) and path not in paths_to_keep:
os.remove(path)


Expand Down
9 changes: 6 additions & 3 deletions core/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Import("env")

import core_builders
import methods

env.core_sources = []

Expand Down Expand Up @@ -35,10 +36,12 @@ if "SCRIPT_AES256_ENCRYPTION_KEY" in os.environ:
)
Exit(255)

# NOTE: It is safe to generate this file here, since this is still executed serially
with open("script_encryption_key.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write('#include "core/config/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n")

script_encryption_key_contents = (
'#include "core/config/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n"
)

methods.write_file_if_needed("script_encryption_key.gen.cpp", script_encryption_key_contents)

# Add required thirdparty code.

Expand Down
28 changes: 15 additions & 13 deletions editor/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,24 @@ env.editor_sources = []
import os
import glob
import editor_builders
import methods


def _make_doc_data_class_path(to_path):
# NOTE: It is safe to generate this file here, since this is still executed serially
with open(os.path.join(to_path, "doc_data_class_path.gen.h"), "w", encoding="utf-8", newline="\n") as g:
g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
file_path = os.path.join(to_path, "doc_data_class_path.gen.h")

g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n")
for c in sorted(env.doc_class_path):
g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n')
g.write("\t{nullptr, nullptr}\n")
g.write("};\n")
class_path_data = ""
class_path_data += "static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n"
class_path_data += "struct _DocDataClassPath { const char* name; const char* path; };\n"
class_path_data += (
"static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n"
)
for c in sorted(env.doc_class_path):
class_path_data += '\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n'
class_path_data += "\t{nullptr, nullptr}\n"
class_path_data += "};\n"

methods.write_file_if_needed(file_path, class_path_data)


if env.editor_build:
Expand All @@ -38,10 +43,7 @@ if env.editor_build:
reg_exporters += "\tregister_" + e + "_exporter_types();\n"
reg_exporters += "}\n"

# NOTE: It is safe to generate this file here, since this is still executed serially
with open("register_exporters.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write(reg_exporters_inc)
f.write(reg_exporters)
methods.write_file_if_needed("register_exporters.gen.cpp", reg_exporters_inc + reg_exporters)

# Core API documentation.
docs = []
Expand Down
60 changes: 34 additions & 26 deletions methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,14 +228,22 @@ def get_version_info(module_version_string="", silent=False):
return version_info


def write_file_if_needed(path, string):
try:
with open(path, "r", encoding="utf-8", newline="\n") as f:
if f.read() == string:
return
except FileNotFoundError:
pass

with open(path, "w", encoding="utf-8", newline="\n") as f:
f.write(string)


def generate_version_header(module_version_string=""):
version_info = get_version_info(module_version_string)

# NOTE: It is safe to generate these files here, since this is still executed serially.

with open("core/version_generated.gen.h", "w", encoding="utf-8", newline="\n") as f:
f.write(
"""\
version_info_header = """\
/* THIS FILE IS GENERATED DO NOT EDIT */
#ifndef VERSION_GENERATED_GEN_H
#define VERSION_GENERATED_GEN_H
Expand All @@ -252,21 +260,20 @@ def generate_version_header(module_version_string=""):
#define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH
#endif // VERSION_GENERATED_GEN_H
""".format(
**version_info
)
)
**version_info
)

with open("core/version_hash.gen.cpp", "w", encoding="utf-8", newline="\n") as fhash:
fhash.write(
"""\
version_hash_data = """\
/* THIS FILE IS GENERATED DO NOT EDIT */
#include "core/version.h"
const char *const VERSION_HASH = "{git_hash}";
const uint64_t VERSION_TIMESTAMP = {git_timestamp};
""".format(
**version_info
)
)
**version_info
)

write_file_if_needed("core/version_generated.gen.h", version_info_header)
write_file_if_needed("core/version_hash.gen.cpp", version_hash_data)


def parse_cg_file(fname, uniforms, sizes, conditionals):
Expand Down Expand Up @@ -385,15 +392,18 @@ def is_module(path):


def write_disabled_classes(class_list):
with open("core/disabled_classes.gen.h", "w", encoding="utf-8", newline="\n") as f:
f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
f.write("#ifndef DISABLED_CLASSES_GEN_H\n")
f.write("#define DISABLED_CLASSES_GEN_H\n\n")
for c in class_list:
cs = c.strip()
if cs != "":
f.write("#define ClassDB_Disable_" + cs + " 1\n")
f.write("\n#endif\n")
file_contents = ""

file_contents += "/* THIS FILE IS GENERATED DO NOT EDIT */\n"
file_contents += "#ifndef DISABLED_CLASSES_GEN_H\n"
file_contents += "#define DISABLED_CLASSES_GEN_H\n\n"
for c in class_list:
cs = c.strip()
if cs != "":
file_contents += "#define ClassDB_Disable_" + cs + " 1\n"
file_contents += "\n#endif\n"

write_file_if_needed("core/disabled_classes.gen.h", file_contents)


def write_modules(modules):
Expand Down Expand Up @@ -435,9 +445,7 @@ def write_modules(modules):
uninitialize_cpp,
)

# NOTE: It is safe to generate this file here, since this is still executed serially
with open("modules/register_module_types.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write(modules_cpp)
write_file_if_needed("modules/register_module_types.gen.cpp", modules_cpp)


def convert_custom_modules_path(path):
Expand Down
9 changes: 3 additions & 6 deletions platform/SCsub
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#!/usr/bin/env python

import methods

Import("env")

env.platform_sources = []
Expand All @@ -18,12 +20,7 @@ reg_apis_inc += "\n"
reg_apis += "}\n\n"
unreg_apis += "}\n"

# NOTE: It is safe to generate this file here, since this is still execute serially
with open("register_platform_apis.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write(reg_apis_inc)
f.write(reg_apis)
f.write(unreg_apis)

methods.write_file_if_needed("register_platform_apis.gen.cpp", reg_apis_inc + reg_apis + unreg_apis)
env.add_source_files(env.platform_sources, "register_platform_apis.gen.cpp")

lib = env.add_library("platform", env.platform_sources)
Expand Down
6 changes: 3 additions & 3 deletions platform_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import uuid
import functools
import subprocess
import methods

# NOTE: The multiprocessing module is not compatible with SCons due to conflict on cPickle

Expand Down Expand Up @@ -65,10 +66,9 @@ def generate_export_icons(platform_path, platform_name):

svg_str += '";\n'

# NOTE: It is safe to generate this file here, since this is still executed serially.
wf = export_path + "/" + name + "_svg.gen.h"
with open(wf, "w", encoding="utf-8", newline="\n") as svgw:
svgw.write(svg_str)

methods.write_file_if_needed(wf, svg_str)


def get_build_version(short):
Expand Down
53 changes: 35 additions & 18 deletions scu_builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,19 @@
_max_includes_per_scu = 1024


def clear_out_existing_files(output_folder, extension):
def clear_out_stale_files(output_folder, extension, fresh_files):
output_folder = os.path.abspath(output_folder)
# print("clear_out_existing_files from folder: " + output_folder)
# print("clear_out_stale_files from folder: " + output_folder)

if not os.path.isdir(output_folder):
# folder does not exist or has not been created yet,
# no files to clearout. (this is not an error)
return

for file in glob.glob(output_folder + "/*." + extension):
# print("removed pre-existing file: " + file)
os.remove(file)
if not file in fresh_files:
# print("removed stale file: " + file)
os.remove(file)


def folder_not_found(folder):
Expand Down Expand Up @@ -87,11 +88,16 @@ def write_output_file(file_count, include_list, start_line, end_line, output_fol

short_filename = output_filename_prefix + num_string + ".gen." + extension
output_filename = output_folder + "/" + short_filename
if _verbose:
print("SCU: Generating: %s" % short_filename)

output_path = Path(output_filename)
output_path.write_text(file_text, encoding="utf8")

if not output_path.exists() or output_path.read_text() != file_text:
if _verbose:
print("SCU: Generating: %s" % short_filename)
output_path.write_text(file_text, encoding="utf8")
elif _verbose:
print("SCU: Generation not needed for: " + short_filename)

return output_filename


def write_exception_output_file(file_count, exception_string, output_folder, output_filename_prefix, extension):
Expand All @@ -109,11 +115,16 @@ def write_exception_output_file(file_count, exception_string, output_folder, out
short_filename = output_filename_prefix + "_exception" + num_string + ".gen." + extension
output_filename = output_folder + "/" + short_filename

if _verbose:
print("SCU: Generating: " + short_filename)

output_path = Path(output_filename)
output_path.write_text(file_text, encoding="utf8")

if not output_path.exists() or output_path.read_text() != file_text:
if _verbose:
print("SCU: Generating: " + short_filename)
output_path.write_text(file_text, encoding="utf8")
elif _verbose:
print("SCU: Generation not needed for: " + short_filename)

return output_filename


def find_section_name(sub_folder):
Expand Down Expand Up @@ -214,10 +225,7 @@ def process_folder(folders, sought_exceptions=[], includes_per_scu=0, extension=
output_folder = abs_main_folder + "/scu/"
output_filename_prefix = "scu_" + out_filename

# Clear out any existing files (usually we will be overwriting,
# but we want to remove any that are pre-existing that will not be
# overwritten, so as to not compile anything stale)
clear_out_existing_files(output_folder, extension)
fresh_files = set()

for file_count in range(0, num_output_files):
end_line = start_line + lines_per_file
Expand All @@ -226,19 +234,28 @@ def process_folder(folders, sought_exceptions=[], includes_per_scu=0, extension=
if file_count == (num_output_files - 1):
end_line = len(found_includes)

write_output_file(
fresh_file = write_output_file(
file_count, found_includes, start_line, end_line, output_folder, output_filename_prefix, extension
)

fresh_files.add(fresh_file)

start_line = end_line

# Write the exceptions each in their own scu gen file,
# so they can effectively compile in "old style / normal build".
for exception_count in range(len(found_exceptions)):
write_exception_output_file(
fresh_file = write_exception_output_file(
exception_count, found_exceptions[exception_count], output_folder, output_filename_prefix, extension
)

fresh_files.add(fresh_file)

# Clear out any stale file (usually we will be overwriting if necessary,
# but we want to remove any that are pre-existing that will not be
# overwritten, so as to not compile anything stale).
clear_out_stale_files(output_folder, extension, fresh_files)


def generate_scu_files(max_includes_per_scu):
print("=============================")
Expand Down

0 comments on commit 7fa97f3

Please sign in to comment.