diff --git a/.bazelrc b/.bazelrc index 042b6f89779..cb3a238fc06 100644 --- a/.bazelrc +++ b/.bazelrc @@ -39,6 +39,8 @@ common --remote_download_regex=.*\.(dwo|h|cpp)$ common:macos --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1 +common:windows --features=-compiler_param_file + # Restrict local actions to 20% to prevent linking from hitting OOM issues, but give the # freedom to remote execution to schedule more compilation actions. common --local_resources=cpu=HOST_CPUS*.2 @@ -326,7 +328,7 @@ common:clang-tidy --build_tag_filters=-third_party,-mongo-tidy-tests common:clang-tidy --//bazel/config:compiler_type=clang common:clang-tidy --aspects @bazel_clang_tidy//clang_tidy:clang_tidy.bzl%clang_tidy_aspect common:clang-tidy --output_groups=report -common:clang-tidy --@bazel_clang_tidy//:clang_tidy_config=//:clang_tidy_config +common:clang-tidy --@bazel_clang_tidy//:clang_tidy_config=//:clang_tidy_config_strict common:clang-tidy --@bazel_clang_tidy//:clang_tidy_executable=//:clang_tidy common:clang-tidy --@bazel_clang_tidy//:clang_tidy_additional_deps=//:toolchain_files common:clang-tidy --@bazel_clang_tidy//:clang_tidy_plugin_deps=//src/mongo/tools/mongo_tidy_checks:mongo_tidy_checks diff --git a/.gitignore b/.gitignore index 1443035caf0..58ce0281c38 100644 --- a/.gitignore +++ b/.gitignore @@ -289,6 +289,7 @@ bazel/coverity/analysis/BUILD.bazel .bazel_include_info.json .bazel_header_list_cache .bazel_real +.mongo_checks_module_path MODULE.bazel MODULE.bazel.lock # generated configs for external fixture suites diff --git a/BUILD.bazel b/BUILD.bazel index 7edb5ba12bc..87dfe4f35e6 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -1,7 +1,7 @@ -load("@hedron_compile_commands//:refresh_compile_commands.bzl", "refresh_compile_commands") load("@npm//:defs.bzl", "npm_link_all_packages") load("//bazel/install_rules:install_rules.bzl", "mongo_install") load("//bazel/toolchains:mongo_toolchain.bzl", "setup_mongo_toolchain_aliases") +load("//bazel/config:render_template.bzl", "render_template") package( default_visibility = ["//visibility:public"], @@ -27,32 +27,40 @@ alias( setup_mongo_toolchain_aliases() -# This rule makes it possible to set the clang-tidy configuration setting: -genrule( +render_template( name = "clang_tidy_config", srcs = [ ".clang-tidy.in", + "//buildscripts:clang_tidy_config_gen.py", ], - outs = [".clang-tidy"], - - # Note: we use sed to substitute the known good value `build/compiledb/mongo`; testing - # has confirmed that this is the correct value. - # We also use it to append a line to opt-in to warnings as errors. - cmd = """ - cp $(location .clang-tidy.in) $@ && \ - sed -i 's|@MONGO_BUILD_DIR@|$(RULEDIR)/src/mongo|g ; s|@MONGO_BRACKET_BUILD_DIR@|$(RULEDIR)/src/mongo|g ; $$aWarningsAsErrors: "*"' $@ - """, - visibility = ["//visibility:public"], + cmd = [ + "$(location //buildscripts:clang_tidy_config_gen.py)", + "--input=$(location .clang-tidy.in)", + "--output=$(location .clang-tidy)", + ], + output = ".clang-tidy", ) -refresh_compile_commands( +render_template( + name = "clang_tidy_config_strict", + srcs = [ + ".clang-tidy.in", + "//buildscripts:clang_tidy_config_gen.py", + ], + cmd = [ + "$(location //buildscripts:clang_tidy_config_gen.py)", + "--input=$(location .clang-tidy.in)", + "--output=$(location .clang-tidy.strict)", + "--warnings-as-errors", + ], + output = ".clang-tidy.strict", +) + +genrule( name = "compiledb", - exclude_external_sources = True, - exclude_headers = "all", # not using "all" adds headers as sources to compile_commands.json which is never what we want - tags = ["compiledb"], - targets = { - "//src/...": "", - }, + srcs = ["compile_commands.json"], + outs = ["compile_commands_done"], + cmd = "echo noop > $(location :compile_commands_done)", ) mongo_install( diff --git a/SConstruct b/SConstruct index 2513cc604de..85771c5a3d4 100644 --- a/SConstruct +++ b/SConstruct @@ -6437,13 +6437,6 @@ def injectModule(env, module, **kwargs): env.AddMethod(injectModule, "InjectModule") -if get_option("ninja") == "disabled": - compileCommands = env.CompilationDatabase("compile_commands.json") - # Initialize generated-sources Alias as a placeholder so that it can be used as a - # dependency for compileCommands. This Alias will be properly updated in other SConscripts. - env.Depends(compileCommands, env.Alias("generated-sources")) - compileDb = env.Alias("compiledb", compileCommands) - msvc_version = "" if "MSVC_VERSION" in env and env["MSVC_VERSION"]: msvc_version = "--version " + env["MSVC_VERSION"] + " " @@ -6452,7 +6445,7 @@ if "MSVC_VERSION" in env and env["MSVC_VERSION"]: if get_option("ninja") == "disabled": vcxprojFile = env.Command( "mongodb.vcxproj", - compileCommands, + "compiledb", r"$PYTHON buildscripts\make_vcxproj.py " + msvc_version + "mongodb", ) vcxproj = env.Alias("vcxproj", vcxprojFile) diff --git a/WORKSPACE.bazel b/WORKSPACE.bazel index 6a30b363b0b..10a8dd4ac64 100644 --- a/WORKSPACE.bazel +++ b/WORKSPACE.bazel @@ -96,34 +96,6 @@ load("@rules_coverity//coverity:repositories.bzl", "rules_coverity_toolchains") rules_coverity_toolchains() -# Hedron's Compile Commands Extractor for Bazel -# https://github.com/hedronvision/bazel-compile-commands-extractor -http_archive( - # TODO SERVER-91979 - # the compile commands extractor is tied to bazel version and toolchain resolution - # so this might need to be update when bazel version is updated. - name = "hedron_compile_commands", - sha256 = "d699c5e3a82b811d7051e833b1808358256a9c52285a43e6a3abb76c809ab03f", - strip_prefix = "bazel-compile-commands-extractor-33658bab23a4858b513d767480b43d3d8fb6a3d1", - url = "https://github.com/hedronvision/bazel-compile-commands-extractor/archive/33658bab23a4858b513d767480b43d3d8fb6a3d1.tar.gz", -) - -load("@hedron_compile_commands//:workspace_setup.bzl", "hedron_compile_commands_setup") - -hedron_compile_commands_setup() - -load("@hedron_compile_commands//:workspace_setup_transitive.bzl", "hedron_compile_commands_setup_transitive") - -hedron_compile_commands_setup_transitive() - -load("@hedron_compile_commands//:workspace_setup_transitive_transitive.bzl", "hedron_compile_commands_setup_transitive_transitive") - -hedron_compile_commands_setup_transitive_transitive() - -load("@hedron_compile_commands//:workspace_setup_transitive_transitive_transitive.bzl", "hedron_compile_commands_setup_transitive_transitive_transitive") - -hedron_compile_commands_setup_transitive_transitive_transitive() - http_archive( name = "platforms", sha256 = "8150406605389ececb6da07cbcb509d5637a3ab9a24bc69b1101531367d89d74", diff --git a/bazel/wrapper_hook/compiledb.py b/bazel/wrapper_hook/compiledb.py new file mode 100644 index 00000000000..f2bea0b84b3 --- /dev/null +++ b/bazel/wrapper_hook/compiledb.py @@ -0,0 +1,200 @@ +import errno +import fileinput +import json +import os +import pathlib +import platform +import shutil +import subprocess +import sys + +REPO_ROOT = pathlib.Path(__file__).parent.parent.parent +sys.path.append(str(REPO_ROOT)) + + +def run_pty_command(cmd): + stdout = None + try: + import pty + + parent_fd, child_fd = pty.openpty() # provide tty + stdout = "" + + proc = subprocess.Popen(cmd, stdout=child_fd, stdin=child_fd) + os.close(child_fd) + while True: + try: + data = os.read(parent_fd, 512) + except OSError as e: + if e.errno != errno.EIO: + raise + break # EIO means EOF on some systems + else: + if not data: # EOF + break + stdout += data.decode() + except ModuleNotFoundError: + proc = subprocess.run( + cmd, + stdout=subprocess.PIPE, + ) + stdout = proc.stdout.decode() + return stdout + + +def generate_compiledb(bazel_bin, persistent_compdb): + if persistent_compdb: + info_proc = subprocess.run( + [bazel_bin, "info", "output_base"], capture_output=True, text=True + ) + project_hash = pathlib.Path(info_proc.stdout.strip()).name + output_base = pathlib.Path(info_proc.stdout.strip() + "_bazel_compiledb") + tmp_dir = os.environ["Temp"] if platform.system() == "Windows" else "/tmp" + symlink_prefix = pathlib.Path(tmp_dir) / f"{project_hash}_compiledb-" + query_cmd = ( + [bazel_bin] + + ([f"--output_base={output_base}"] if persistent_compdb else []) + + ["aquery"] + + ([f"--symlink_prefix={symlink_prefix}"] if persistent_compdb else []) + + [ + "--config=dbg", + "--remote_executor=", + "--remote_cache=", + "--bes_backend=", + "--bes_results_url=", + "--noinclude_artifacts", + 'mnemonic("CppCompile|LinkCompile", //src/...)', + "--output=jsonproto", + ] + ) + + first_time = "" + if persistent_compdb and not output_base.exists(): + first_time = " (the first time takes longer)" + + print(f"Generating compiledb command lines via aquery{first_time}...") + stdout = run_pty_command(query_cmd) + data = json.loads(stdout) + + output_json = [] + repo_root_resolved = str(REPO_ROOT.resolve()) + + for action in data["actions"]: + input_file = None + output_file = None + prev_arg = None + for arg in reversed(action["arguments"]): + if not input_file: + if arg == "-c" or arg == "/c": + input_file = prev_arg + elif arg.startswith("/c"): + input_file = arg[2:] + if not output_file: + if arg == "-o" or arg == "/Fo": + output_file = prev_arg + elif arg.startswith("/Fo"): + output_file = arg[3:] + if input_file and output_file: + break + prev_arg = arg + + if not input_file: + raise Exception( + f"failed to parse '-c' or '/c' from command line:{os.linesep}{' '.join(action['arguments'])}" + ) + + if not output_file: + raise Exception( + f"failed to parse '-o' or '/Fo' from command line:{os.linesep}{' '.join(action['arguments'])}" + ) + + if persistent_compdb: + output_json.append( + { + "file": input_file.replace("bazel-out", f"{symlink_prefix}out"), + "arguments": [ + arg.replace("bazel-out", f"{symlink_prefix}out").replace( + "external/", f"{symlink_prefix}out/../../../external/" + ) + for arg in action["arguments"] + ], + "directory": repo_root_resolved, + "output": output_file.replace("bazel-out", f"{symlink_prefix}out"), + } + ) + else: + output_json.append( + { + "file": input_file, + "arguments": action["arguments"], + "directory": repo_root_resolved, + "output": output_file, + } + ) + + json_str = json.dumps(output_json, indent=4) + compile_commands_json = REPO_ROOT / "compile_commands.json" + need_rewrite = True + if compile_commands_json.exists(): + with open(compile_commands_json, "r") as f: + need_rewrite = json_str != f.read() + + if need_rewrite: + with open(compile_commands_json, "w") as f: + f.write(json_str) + + if not persistent_compdb: + external_link = REPO_ROOT / "external" + if external_link.exists(): + os.unlink(external_link) + os.symlink( + pathlib.Path(os.readlink(REPO_ROOT / "bazel-out")).parent.parent.parent / "external", + external_link, + target_is_directory=True, + ) + + print("Generating sources for compiledb...") + gen_source_cmd = ( + [bazel_bin] + + ([f"--output_base={output_base}"] if persistent_compdb else []) + + ["build"] + + ([f"--symlink_prefix={symlink_prefix}"] if persistent_compdb else []) + + [ + "--config=dbg", + f"--build_tag_filters=gen_source{',mongo-tidy-checks' if platform.system() != 'Windows' else ''}", + "//src/...", + ] + + (["//:clang_tidy_config"] if platform.system() != "Windows" else []) + + (["//:clang_tidy_config_strict"] if platform.system() != "Windows" else []) + ) + run_pty_command(gen_source_cmd) + + if platform.system() != "Windows": + clang_tidy_file = pathlib.Path(REPO_ROOT) / ".clang-tidy" + + if persistent_compdb: + configs = [ + pathlib.Path(f"{symlink_prefix}bin") / config + for config in [".clang-tidy.strict", ".clang-tidy"] + ] + for config in configs: + os.chmod(config, 0o744) + with fileinput.FileInput(config, inplace=True) as file: + for line in file: + print(line.replace("bazel-out/", f"{symlink_prefix}out/"), end="") + shutil.copyfile(configs[1], clang_tidy_file) + with open(".mongo_checks_module_path", "w") as f: + f.write( + os.path.join( + f"{symlink_prefix}bin", + "src", + "mongo", + "tools", + "mongo_tidy_checks", + "libmongo_tidy_checks.so", + ) + ) + else: + shutil.copyfile(pathlib.Path("bazel-bin") / ".clang-tidy", clang_tidy_file) + + print("compiledb target done, finishing any other targets...") diff --git a/bazel/wrapper_hook/engflow_check.py b/bazel/wrapper_hook/engflow_check.py new file mode 100644 index 00000000000..b8287ebf60c --- /dev/null +++ b/bazel/wrapper_hook/engflow_check.py @@ -0,0 +1,25 @@ +import os +import pathlib +import sys +import time + +REPO_ROOT = str(pathlib.Path(__file__).parent.parent.parent) +sys.path.append(REPO_ROOT) + +from bazel.wrapper_hook.wrapper_debug import wrapper_debug + + +def engflow_auth(args): + start = time.time() + from buildscripts.engflow_auth import setup_auth + + args_str = " ".join(args) + if ( + "--config=local" not in args_str + and "--config=public-release" not in args_str + and "--config local" not in args_str + and "--config public-release" not in args_str + ): + if os.environ.get("CI") is None: + setup_auth(verbose=False) + wrapper_debug(f"engflow auth time: {time.time() - start}") diff --git a/bazel/wrapper_hook/install_modules.py b/bazel/wrapper_hook/install_modules.py new file mode 100644 index 00000000000..f9e8552fcfe --- /dev/null +++ b/bazel/wrapper_hook/install_modules.py @@ -0,0 +1,98 @@ +import hashlib +import os +import pathlib +import platform +import shutil +import subprocess +import sys +import tempfile + +REPO_ROOT = pathlib.Path(__file__).parent.parent.parent +sys.path.append(str(REPO_ROOT)) + +from bazel.wrapper_hook.wrapper_debug import wrapper_debug + + +def get_deps_dirs(deps): + tmp_dir = pathlib.Path(os.environ["Temp"] if platform.system() == "Windows" else "/tmp") + bazel_bin = REPO_ROOT / "bazel-bin" + for dep in deps: + try: + for out_dir in [ + REPO_ROOT / "bazel-out", + tmp_dir / "compiledb-out", + ]: + for child in os.listdir(out_dir): + yield f"{out_dir}/{child}/bin/external/poetry/{dep}", dep + except OSError: + pass + yield f"{bazel_bin}/external/poetry/{dep}", dep + + +def search_for_modules(deps, deps_installed, lockfile_changed=False): + deps_not_found = deps.copy() + wrapper_debug(f"deps_installed: {deps_installed}") + for target_dir, dep in get_deps_dirs(deps): + wrapper_debug(f"checking for {dep} in target_dir: {target_dir}") + if dep in deps_installed: + continue + + if not pathlib.Path(target_dir).exists(): + continue + + if not lockfile_changed: + for entry in os.listdir(target_dir): + if entry.endswith(".dist-info"): + wrapper_debug(f"found: {target_dir}") + deps_installed.append(dep) + deps_not_found.remove(dep) + sys.path.append(target_dir) + break + else: + os.chmod(target_dir, 0o777) + for root, dirs, files in os.walk(target_dir): + for somedir in dirs: + os.chmod(pathlib.Path(root) / somedir, 0o777) + for file in files: + os.chmod(pathlib.Path(root) / file, 0o777) + shutil.rmtree(target_dir) + wrapper_debug(f"deps_not_found: {deps_not_found}") + return deps_not_found + + +def install_modules(bazel): + need_to_install = False + pwd_hash = hashlib.md5(str(REPO_ROOT).encode()).hexdigest() + lockfile_hash_file = pathlib.Path(tempfile.gettempdir()) / f"{pwd_hash}_lockfile_hash" + with open(REPO_ROOT / "poetry.lock", "rb") as f: + current_hash = hashlib.md5(f.read()).hexdigest() + + old_hash = None + if lockfile_hash_file.exists(): + with open(lockfile_hash_file) as f: + old_hash = f.read() + + if old_hash != current_hash: + with open(lockfile_hash_file, "w") as f: + f.write(current_hash) + + deps = ["retry"] + deps_installed = [] + deps_needed = search_for_modules( + deps, deps_installed, lockfile_changed=old_hash != current_hash + ) + + if deps_needed: + need_to_install = True + + if old_hash != current_hash: + need_to_install = True + deps_needed = deps + + if need_to_install: + subprocess.run( + [bazel, "build", "--config=local"] + ["@poetry//:install_" + dep for dep in deps_needed] + ) + deps_missing = search_for_modules(deps_needed, deps_installed) + if deps_missing: + raise Exception(f"Failed to install python deps {deps_missing}") diff --git a/bazel/wrapper_hook.py b/bazel/wrapper_hook/plus_interface.py similarity index 55% rename from bazel/wrapper_hook.py rename to bazel/wrapper_hook/plus_interface.py index f25425204b6..f09f9921b69 100644 --- a/bazel/wrapper_hook.py +++ b/bazel/wrapper_hook/plus_interface.py @@ -1,31 +1,16 @@ -import hashlib import os +import pathlib import platform import shutil import subprocess import sys -import tempfile import time -REPO_ROOT = os.path.join(os.path.dirname(__file__), os.path.pardir) -sys.path.append(REPO_ROOT) +REPO_ROOT = pathlib.Path(__file__).parent.parent.parent +sys.path.append(str(REPO_ROOT)) - -# This script should be careful not to disrupt automatic mechanism which -# may be expecting certain stdout, always print to stderr. -sys.stdout = sys.stderr - -if ( - os.environ.get("MONGO_BAZEL_WRAPPER_DEBUG") == "1" - and os.environ.get("MONGO_AUTOCOMPLETE_QUERY") != "1" -): - - def wrapper_debug(x): - print("[WRAPPER_HOOK_DEBUG]: " + x, file=sys.stderr) -else: - - def wrapper_debug(x): - pass +from bazel.wrapper_hook.compiledb import generate_compiledb +from bazel.wrapper_hook.wrapper_debug import wrapper_debug class BinAndSourceIncompatible(Exception): @@ -36,99 +21,15 @@ class DuplicateSourceNames(Exception): pass -wrapper_debug(f"wrapper hook script is using {sys.executable}") - - -def get_deps_dirs(deps): - bazel_out_dir = os.path.join(REPO_ROOT, "bazel-out") - bazel_bin = os.path.join(REPO_ROOT, "bazel-bin") - for dep in deps: - try: - for child in os.listdir(bazel_out_dir): - yield f"{bazel_out_dir}/{child}/bin/external/poetry/{dep}", dep - except OSError: - pass - yield f"{bazel_bin}/external/poetry/{dep}", dep - - -def search_for_modules(deps, deps_installed, lockfile_changed=False): - deps_not_found = deps.copy() - wrapper_debug(f"deps_installed: {deps_installed}") - for target_dir, dep in get_deps_dirs(deps): - wrapper_debug(f"checking for {dep} in target_dir: {target_dir}") - if dep in deps_installed: - continue - - if not os.path.exists(target_dir): - continue - - if not lockfile_changed: - for entry in os.listdir(target_dir): - if entry.endswith(".dist-info"): - wrapper_debug(f"found: {target_dir}") - deps_installed.append(dep) - deps_not_found.remove(dep) - sys.path.append(target_dir) - break - else: - os.chmod(target_dir, 0o777) - for root, dirs, files in os.walk(target_dir): - for somedir in dirs: - os.chmod(os.path.join(root, somedir), 0o777) - for file in files: - os.chmod(os.path.join(root, file), 0o777) - shutil.rmtree(target_dir) - wrapper_debug(f"deps_not_found: {deps_not_found}") - return deps_not_found - - -def install_modules(bazel): - need_to_install = False - pwd_hash = hashlib.md5(os.path.abspath(REPO_ROOT).encode()).hexdigest() - lockfile_hash_file = os.path.join(tempfile.gettempdir(), f"{pwd_hash}_lockfile_hash") - with open(os.path.join(REPO_ROOT, "poetry.lock"), "rb") as f: - current_hash = hashlib.md5(f.read()).hexdigest() - - old_hash = None - if os.path.exists(lockfile_hash_file): - with open(lockfile_hash_file) as f: - old_hash = f.read() - - if old_hash != current_hash: - with open(lockfile_hash_file, "w") as f: - f.write(current_hash) - - deps = ["retry"] - deps_installed = [] - deps_needed = search_for_modules( - deps, deps_installed, lockfile_changed=old_hash != current_hash - ) - - if deps_needed: - need_to_install = True - - if old_hash != current_hash: - need_to_install = True - deps_needed = deps - - if need_to_install: - subprocess.run( - [bazel, "build", "--config=local"] + ["@poetry//:install_" + dep for dep in deps_needed] - ) - deps_missing = search_for_modules(deps_needed, deps_installed) - if deps_missing: - raise Exception(f"Failed to install python deps {deps_missing}") - - def get_buildozer_output(autocomplete_query): from buildscripts.install_bazel import install_bazel buildozer_name = "buildozer" if not platform.system() == "Windows" else "buildozer.exe" buildozer = shutil.which(buildozer_name) if not buildozer: - buildozer = os.path.expanduser(f"~/.local/bin/{buildozer_name}") + buildozer = str(pathlib.Path(f"~/.local/bin/{buildozer_name}").expanduser()) if not os.path.exists(buildozer): - bazel_bin_dir = os.path.expanduser("~/.local/bin") + bazel_bin_dir = str(pathlib.Path("~/.local/bin").expanduser()) if not os.path.exists(bazel_bin_dir): os.makedirs(bazel_bin_dir) install_bazel(bazel_bin_dir) @@ -149,55 +50,58 @@ def get_buildozer_output(autocomplete_query): return p.stdout -def engflow_auth(args): - start = time.time() - from buildscripts.engflow_auth import setup_auth - - args_str = " ".join(args) - if ( - "--config=local" not in args_str - and "--config=public-release" not in args_str - and "--config local" not in args_str - and "--config public-release" not in args_str - ): - if os.environ.get("CI") is None: - setup_auth(verbose=False) - wrapper_debug(f"engflow auth time: {time.time() - start}") - - def test_runner_interface(args, autocomplete_query, get_buildozer_output=get_buildozer_output): start = time.time() plus_autocomplete_query = False - if autocomplete_query: - str_args = " ".join(args) - if "'//:*'" in str_args or "':*'" in str_args or "//:all" in str_args or ":all" in str_args: - plus_autocomplete_query = True - plus_starts = ("+", ":+", "//:+") skip_plus_interface = True - for arg in args: - if arg.startswith(plus_starts): - skip_plus_interface = False - - if skip_plus_interface and not autocomplete_query: - return args[1:] - + compiledb_target = False + persistent_compdb = True + compiledb_targets = ["//:compiledb", ":compiledb", "compiledb"] sources_to_bin = {} select_sources = {} current_select = None in_select = False c_exts = (".c", ".cc", ".cpp") + replacements = {} + fileNameFilter = [] + bin_targets = [] + source_targets = {} + + if autocomplete_query: + str_args = " ".join(args) + if "'//:*'" in str_args or "':*'" in str_args or "//:all" in str_args or ":all" in str_args: + plus_autocomplete_query = True + + if os.environ.get("CI") is not None: + persistent_compdb = False + + for arg in args: + if arg in compiledb_targets: + compiledb_target = True + if arg == "--intree_compdb": + replacements[arg] = [] + persistent_compdb = False + skip_plus_interface = False + if arg.startswith(plus_starts): + skip_plus_interface = False + + if compiledb_target: + generate_compiledb(args[0], persistent_compdb) + + if skip_plus_interface and not autocomplete_query: + return args[1:] def add_source_test(source_file, bin_file, sources_to_bin): - src_key = os.path.splitext( - os.path.basename(source_file.replace("//", "").replace(":", "/")) - )[0] + src_key = pathlib.Path( + pathlib.Path(source_file.replace("//", "").replace(":", "/")).name + ).stem if src_key in sources_to_bin: raise DuplicateSourceNames( f"Two test files with the same name:\n {bin_file}->{src_key}\n {sources_to_bin[src_key]}->{src_key}" ) - if src_key == os.path.basename(bin_file.replace("//", "").replace(":", "/")): + if src_key == pathlib.Path(bin_file.replace("//", "").replace(":", "/")).name: src_key = f"{src_key}-{src_key}" sources_to_bin[src_key] = bin_file @@ -225,9 +129,10 @@ def test_runner_interface(args, autocomplete_query, get_buildozer_output=get_bui if plus_autocomplete_query: autocomplete_target = ["//:+" + test for test in sources_to_bin.keys()] autocomplete_target += [ - "//:+" + os.path.basename(test.replace("//", "").replace(":", "/")) + "//:+" + pathlib.Path(test.replace("//", "").replace(":", "/")).name for test in set(sources_to_bin.values()) ] + autocomplete_target += ["//:compiledb"] with open("/tmp/mongo_autocomplete_plus_targets", "w") as f: f.write(" ".join(autocomplete_target)) elif autocomplete_query: @@ -237,11 +142,6 @@ def test_runner_interface(args, autocomplete_query, get_buildozer_output=get_bui if autocomplete_query or plus_autocomplete_query: return args[1:] - replacements = {} - fileNameFilter = [] - bin_targets = [] - source_targets = {} - for arg in args[1:]: if arg.startswith(plus_starts): test_name = arg[arg.find("+") + 1 :] @@ -250,7 +150,7 @@ def test_runner_interface(args, autocomplete_query, get_buildozer_output=get_bui if not real_target: for bin_target in set(sources_to_bin.values()): if ( - os.path.basename(bin_target.replace("//", "").replace(":", "/")) + pathlib.Path(bin_target.replace("//", "").replace(":", "/")).name == test_name ): bin_targets.append(bin_target) @@ -286,7 +186,7 @@ def test_runner_interface(args, autocomplete_query, get_buildozer_output=get_bui + "Conflicting binary targets:\n " + "\n ".join( [ - os.path.basename(bin_target.replace("//", "").replace(":", "/")) + pathlib.Path(bin_target.replace("//", "").replace(":", "/")).name for bin_target in bin_targets ] ) @@ -315,20 +215,3 @@ def test_runner_interface(args, autocomplete_query, get_buildozer_output=get_bui wrapper_debug(f"plus interface time: {time.time() - start}") return new_args - - -def main(): - install_modules(sys.argv[1]) - - engflow_auth(sys.argv) - - args = test_runner_interface( - sys.argv[1:], autocomplete_query=os.environ.get("MONGO_AUTOCOMPLETE_QUERY") == "1" - ) - os.chmod(os.environ.get("MONGO_BAZEL_WRAPPER_ARGS"), 0o644) - with open(os.environ.get("MONGO_BAZEL_WRAPPER_ARGS"), "w") as f: - f.write(" ".join(args)) - - -if __name__ == "__main__": - main() diff --git a/bazel/wrapper_hook/wrapper_debug.py b/bazel/wrapper_hook/wrapper_debug.py new file mode 100644 index 00000000000..fc50b4648bc --- /dev/null +++ b/bazel/wrapper_hook/wrapper_debug.py @@ -0,0 +1,14 @@ +import os +import sys + +if ( + os.environ.get("MONGO_BAZEL_WRAPPER_DEBUG") == "1" + and os.environ.get("MONGO_AUTOCOMPLETE_QUERY") != "1" +): + + def wrapper_debug(x): + print("[WRAPPER_HOOK_DEBUG]: " + x, file=sys.stderr) +else: + + def wrapper_debug(x): + pass diff --git a/bazel/wrapper_hook/wrapper_hook.py b/bazel/wrapper_hook/wrapper_hook.py new file mode 100644 index 00000000000..e89c2b9f557 --- /dev/null +++ b/bazel/wrapper_hook/wrapper_hook.py @@ -0,0 +1,34 @@ +import os +import pathlib +import sys + +REPO_ROOT = pathlib.Path(__file__).parent.parent.parent +sys.path.append(str(REPO_ROOT)) + +# This script should be careful not to disrupt automatic mechanism which +# may be expecting certain stdout, always print to stderr. +sys.stdout = sys.stderr + +from bazel.wrapper_hook.engflow_check import engflow_auth +from bazel.wrapper_hook.install_modules import install_modules +from bazel.wrapper_hook.plus_interface import test_runner_interface +from bazel.wrapper_hook.wrapper_debug import wrapper_debug + +wrapper_debug(f"wrapper hook script is using {sys.executable}") + + +def main(): + install_modules(sys.argv[1]) + + engflow_auth(sys.argv) + + args = test_runner_interface( + sys.argv[1:], autocomplete_query=os.environ.get("MONGO_AUTOCOMPLETE_QUERY") == "1" + ) + os.chmod(os.environ.get("MONGO_BAZEL_WRAPPER_ARGS"), 0o644) + with open(os.environ.get("MONGO_BAZEL_WRAPPER_ARGS"), "w") as f: + f.write(" ".join(args)) + + +if __name__ == "__main__": + main() diff --git a/buildscripts/BUILD.bazel b/buildscripts/BUILD.bazel index 497c15b7fab..b3e2d46f105 100644 --- a/buildscripts/BUILD.bazel +++ b/buildscripts/BUILD.bazel @@ -1,5 +1,10 @@ load("@poetry//:dependencies.bzl", "dependency") +exports_files([ + "cheetah_source_generator.py", + "clang_tidy_config_gen.py", +]) + py_binary( name = "codeowners", srcs = ["codeowners_generate.py"], @@ -134,8 +139,6 @@ py_binary( ], ) -exports_files(["cheetah_source_generator.py"]) - sh_binary( name = "shellscripts_linters", srcs = ["shellscripts-linters.sh"], diff --git a/buildscripts/clang_tidy.py b/buildscripts/clang_tidy.py index 783f118b8f5..35ee44a0bbd 100755 --- a/buildscripts/clang_tidy.py +++ b/buildscripts/clang_tidy.py @@ -21,6 +21,19 @@ import yaml from clang_tidy_vscode import CHECKS_SO from simple_report import make_report, put_report, try_combine_reports +checks_so = "" +for module in CHECKS_SO: + if os.path.exists(module): + checks_so = module + break + + +config_file = "" +for config in ["/tmp/compiledb-bin/.clang-tidy.strict", "bazel-bin/.clang-tidy.strict"]: + if os.path.exists(config): + config_file = config + break + def _clang_tidy_executor( clang_tidy_filename: Path, @@ -344,7 +357,7 @@ def main(): "-m", "--check-module", type=str, - default=CHECKS_SO, + default=checks_so, help="Path to load the custom mongo checks module.", ) parser.add_argument( @@ -355,7 +368,7 @@ def main(): ) # TODO: Is there someway to get this without hardcoding this much parser.add_argument("-y", "--clang-tidy-toolchain", type=str, default="v4") - parser.add_argument("-f", "--clang-tidy-cfg", type=str, default=".clang-tidy") + parser.add_argument("-f", "--clang-tidy-cfg", type=str, default=config_file) args = parser.parse_args() if args.only_process_fixes: diff --git a/buildscripts/clang_tidy_config_gen.py b/buildscripts/clang_tidy_config_gen.py new file mode 100644 index 00000000000..cb26fed86dc --- /dev/null +++ b/buildscripts/clang_tidy_config_gen.py @@ -0,0 +1,21 @@ +import argparse +import os + +parser = argparse.ArgumentParser() +parser.add_argument("--input", required=True, type=str) +parser.add_argument("--warnings-as-errors", action="store_true") +parser.add_argument("--output", required=True, type=str) +args = parser.parse_args() + +rule_dir = os.path.dirname(args.output) + +with open(args.input) as f: + content = f.read() + +content = content.replace("@MONGO_BUILD_DIR@", f"{rule_dir}/src/mongo") +content = content.replace("@MONGO_BRACKET_BUILD_DIR@", f"{rule_dir}/src/mongo") +if args.warnings_as_errors: + content += 'WarningsAsErrors: "*"\n' + +with open(args.output, "w") as f: + f.write(content) diff --git a/buildscripts/clang_tidy_vscode.py b/buildscripts/clang_tidy_vscode.py index 7b9aeb53f97..fcab914a2d3 100755 --- a/buildscripts/clang_tidy_vscode.py +++ b/buildscripts/clang_tidy_vscode.py @@ -28,13 +28,21 @@ import os import subprocess import sys -CHECKS_SO = "build/install/lib/libmongo_tidy_checks.so" +CHECKS_SO = [ + "build/install/lib/libmongo_tidy_checks.so", +] + +if os.path.exists(".mongo_checks_module_path"): + with open(".mongo_checks_module_path") as f: + CHECKS_SO = [f.read().strip()] + CHECKS_SO def main(): clang_tidy_args = ["/opt/mongodbtoolchain/v4/bin/clang-tidy"] - if os.path.isfile(CHECKS_SO): - clang_tidy_args += [f"-load={CHECKS_SO}"] + for check_lib in CHECKS_SO: + if os.path.isfile(check_lib): + clang_tidy_args += [f"-load={check_lib}"] + break # Filter out non src/mongo files for clang tidy checks files_to_check = [] diff --git a/buildscripts/make_vcxproj.py b/buildscripts/make_vcxproj.py index 9d838888f73..bd5e66b4d20 100644 --- a/buildscripts/make_vcxproj.py +++ b/buildscripts/make_vcxproj.py @@ -17,7 +17,6 @@ import argparse import io import json import os -import re import uuid import xml.etree.ElementTree as ET @@ -238,17 +237,25 @@ class ProjFileGenerator(object): # Replace build commands _replace_vcxproj(self.vcxproj_file_name, self.existing_build_commands) - def parse_line(self, line): + def parse_args(self, args): """Parse a build line.""" - cl_exe_end = line.lower().find('cl.exe" ') - cl_len = len('cl.exe" ') - if cl_exe_end: - self.__parse_cl_line(line[cl_exe_end + cl_len :]) + self.__parse_cl_line(args[1:]) - def __parse_cl_line(self, line): + def __parse_cl_line(self, args): """Parse a compiler line.""" # Get the file we are compilong - file_name = re.search(r"/c ([\w\\.-]+)", line).group(1) + file_name = None + prev_arg = None + for arg in reversed(args): + if not file_name: + if arg == "/c": + file_name = prev_arg + elif arg.startswith("/c"): + file_name = arg[2:] + + if file_name: + break + prev_arg = arg # Skip files made by scons for configure testing if "sconf_temp" in file_name: @@ -257,8 +264,6 @@ class ProjFileGenerator(object): if file_name not in self.files: self.files.add(file_name) - args = line.split(" ") - file_defines = set() for arg in get_defines(args): if arg not in self.common_defines: @@ -402,8 +407,8 @@ def main(): commands = json.loads(contents) for command in commands: - command_str = command["command"] - projfile.parse_line(command_str) + command_args = command["arguments"] + projfile.parse_args(command_args) main() diff --git a/buildscripts/tests/test_bazel_plus_test_interface.py b/buildscripts/tests/test_bazel_plus_test_interface.py index 202ec2135a9..2906e4f2292 100644 --- a/buildscripts/tests/test_bazel_plus_test_interface.py +++ b/buildscripts/tests/test_bazel_plus_test_interface.py @@ -3,7 +3,11 @@ import unittest sys.path.append(".") -from bazel.wrapper_hook import BinAndSourceIncompatible, DuplicateSourceNames, test_runner_interface +from bazel.wrapper_hook.plus_interface import ( + BinAndSourceIncompatible, + DuplicateSourceNames, + test_runner_interface, +) class Tests(unittest.TestCase): diff --git a/site_scons/site_tools/compilation_db.py b/site_scons/site_tools/compilation_db.py index 6239fc84f17..812b5fd7a52 100644 --- a/site_scons/site_tools/compilation_db.py +++ b/site_scons/site_tools/compilation_db.py @@ -199,10 +199,9 @@ def WriteCompilationDb(target, source, env): else: bazel_compdb = ["--bazel-compdb", "compile_commands.json"] env.RunBazelCommand( - [env["SCONS2BAZEL_TARGETS"].bazel_executable, "run"] - + env["BAZEL_FLAGS_STR"] - + ["//:compiledb", "--"] + [env["SCONS2BAZEL_TARGETS"].bazel_executable, "build"] + env["BAZEL_FLAGS_STR"] + + ["//:compiledb"] ) subprocess.run( diff --git a/site_scons/site_tools/integrate_bazel.py b/site_scons/site_tools/integrate_bazel.py index 2356245829e..b412d9721d6 100644 --- a/site_scons/site_tools/integrate_bazel.py +++ b/site_scons/site_tools/integrate_bazel.py @@ -858,10 +858,9 @@ def generate_bazel_info_for_ninja(env: SCons.Environment.Environment) -> None: # that bazel will need to construct the correct command line for any given targets ninja_bazel_build_json = { "bazel_cmd": Globals.bazel_base_build_command, - "compiledb_cmd": [Globals.bazel_executable, "run"] + "compiledb_cmd": [Globals.bazel_executable, "build"] + env["BAZEL_FLAGS_STR"] - + ["//:compiledb", "--"] - + env["BAZEL_FLAGS_STR"], + + ["//:compiledb"], "defaults": [str(t) for t in SCons.Script.DEFAULT_TARGETS], "targets": Globals.scons2bazel_targets, "CC": env.get("CC", ""), @@ -1743,6 +1742,17 @@ def generate(env: SCons.Environment.Environment) -> None: "bazel_target": target, "bazel_output": bazel_output_file.replace("\\", "/"), } + compiledb_nodes = env.ThinTarget( + target=env.Alias("compiledb"), + source="compile_commands.json", + NINJA_GENSOURCE_INDEPENDENT=True, + ) + env.NoCache(compiledb_nodes) + + Globals.scons2bazel_targets["compiledb"] = { + "bazel_target": "//:compiledb", + "bazel_output": "compile_commands.json", + } globals = Globals() env["SCONS2BAZEL_TARGETS"] = globals diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py index 0463aeb087f..ccce7a6632b 100644 --- a/site_scons/site_tools/ninja.py +++ b/site_scons/site_tools/ninja.py @@ -824,22 +824,6 @@ class NinjaState: template_builders = [] - # If we ever change the name/s of the rules that include - # compile commands (i.e. something like CC) we will need to - # update this build to reflect that complete list. - compile_commands = "compile_commands_ninja.json" - compdb_expand = "-x " if self.env.get("NINJA_COMPDB_EXPAND") else "" - adjust_script_out = self.env.File("#site_scons/site_tools/compdb_adjust.py").path - self.builds[compile_commands] = { - "rule": "CMD_PRECIOUS", - "outputs": ["compile_commands.json", "compdb_always_rebuild"], - "pool": "console", - "implicit": [ninja_file, "bazel_run_first"], - "variables": { - "cmd": f"ninja -f {ninja_file} -t compdb {compdb_expand}COMPDB_CC COMPDB_CXX > {compile_commands} && " - + f"{sys.executable} {adjust_script_out} --ninja --input-compdb {compile_commands} --output-compdb compile_commands.json --bazel-compdb compile_commands.json" - }, - } self.builds["compiledb"] = { "rule": "phony", "outputs": ["compiledb"], diff --git a/tools/bazel b/tools/bazel index 365bb08b7d2..d13e784b7e9 100755 --- a/tools/bazel +++ b/tools/bazel @@ -8,7 +8,6 @@ REPO_ROOT=$(dirname $(dirname $(realpath "$0"))) bazel_real="$BAZEL_REAL" -echo $BAZEL_REAL > "$REPO_ROOT/.bazel_real" bazelrc_xcode_lines=() @@ -111,15 +110,28 @@ for cert in ${cert_locs[@]}; do done cur_dir=$(basename $REPO_ROOT) -python="$REPO_ROOT/bazel-$cur_dir/external/py_${os}_${ARCH}/dist/bin/python3" +bazel_python="$REPO_ROOT/bazel-$cur_dir/external/py_${os}_${ARCH}/dist/bin/python3" +compdb_python="/tmp/compiledb-$cur_dir/external/py_${os}_${ARCH}/dist/bin/python3" +python=$bazel_python +if [ ! -f $python ]; then + python=$compdb_python +fi if [ ! -f $python ]; then >&2 echo "python prereq missing, using bazel to install python..." >&2 $bazel_real build --config=local @py_${os}_${ARCH}//:all if [[ $? != 0 ]]; then - >&2 echo "wrapper script failed to install python! falling back to normal bazel call..." - exec "$bazel_real" $@ + if [[ ! -z "$CI" ]] || [[ $MONGO_BAZEL_WRAPPER_FALLBACK == 1 ]]; then + >&2 echo "wrapper script failed to install python! falling back to normal bazel call..." + exec "$bazel_real" $@ + else + exit $? + fi fi fi +python=$bazel_python +if [ ! -f $python ]; then + python=$compdb_python +fi autocomplete_query=0 # bash autocomplete detection @@ -136,10 +148,14 @@ MONGO_BAZEL_WRAPPER_ARGS=$(mktemp) MONGO_BAZEL_WRAPPER_ARGS=$MONGO_BAZEL_WRAPPER_ARGS \ MONGO_AUTOCOMPLETE_QUERY=$autocomplete_query \ -$python $REPO_ROOT/bazel/wrapper_hook.py $bazel_real "$@" +$python $REPO_ROOT/bazel/wrapper_hook/wrapper_hook.py $bazel_real "$@" if [[ $? != 0 ]]; then - >&2 echo "wrapper script failed! falling back to normal bazel call..." - exec "$bazel_real" $@ + if [[ ! -z "$CI" ]] || [[ $MONGO_BAZEL_WRAPPER_FALLBACK == 1 ]]; then + >&2 echo "wrapper script failed! falling back to normal bazel call..." + exec "$bazel_real" $@ + else + exit $? + fi fi new_args=$(<$MONGO_BAZEL_WRAPPER_ARGS) diff --git a/tools/bazel.bat b/tools/bazel.bat index 5eb6bb1c82a..c02a4883bb6 100644 --- a/tools/bazel.bat +++ b/tools/bazel.bat @@ -7,23 +7,34 @@ set REPO_ROOT=%~dp0.. for %%I in (%REPO_ROOT%) do set cur_dir=%%~nxI -set python="%REPO_ROOT%\bazel-%cur_dir%\external\py_windows_x86_64\dist\python.exe" - +set bazel_python="%REPO_ROOT%\bazel-%cur_dir%\external\py_windows_x86_64\dist\python.exe" +set compdb_python="%Temp%\compiledb-%cur_dir%\external\py_windows_x86_64\dist\python.exe" +set python=%bazel_python% +if not exist "%python%" ( + set python=%compdb_python% +) if not exist "%python%" ( echo python prereq missing, using bazel to install python... 1>&2 "%BAZEL_REAL%" build --config=local @py_windows_x86_64//:all 1>&2 + if %ERRORLEVEL% NEQ 0 ( + if "%CI%"=="" if "%MONGO_BAZEL_WRAPPER_FALLBACK%"=="" exit %ERRORLEVEL% echo wrapper script failed to install python! falling back to normal bazel call... "%BAZEL_REAL%" %* exit %ERRORLEVEL% ) ) +set python=%bazel_python% +if not exist "%python%" ( + set python=%compdb_python% +) SET STARTTIME=%TIME% set "MONGO_BAZEL_WRAPPER_ARGS=%tmp%\bat~%RANDOM%.tmp" echo "" > %MONGO_BAZEL_WRAPPER_ARGS% -%python% %REPO_ROOT%/bazel/wrapper_hook.py "%BAZEL_REAL%" %* +%python% %REPO_ROOT%/bazel/wrapper_hook/wrapper_hook.py "%BAZEL_REAL%" %* if %ERRORLEVEL% NEQ 0 ( + if "%CI%"=="" if "%MONGO_BAZEL_WRAPPER_FALLBACK%"=="" exit %ERRORLEVEL% echo wrapper script failed! falling back to normal bazel call... "%BAZEL_REAL%" %* exit %ERRORLEVEL%