mirror of https://github.com/mongodb/mongo
SERVER-99251 switch plus interface to dynamic generation (#31070)
GitOrigin-RevId: a647d892e79bcd4cd521fc078ecea2fed6c7ad10
This commit is contained in:
parent
ebdd998e8e
commit
7c363f9d88
8443
BUILD.bazel
8443
BUILD.bazel
File diff suppressed because it is too large
Load Diff
|
|
@ -347,47 +347,3 @@ def mongo_install(
|
|||
testonly = testonly,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def mongo_unittest_install(
|
||||
name,
|
||||
srcs,
|
||||
deps = [],
|
||||
target_compatible_with = [],
|
||||
**kwargs):
|
||||
mongo_install(
|
||||
name,
|
||||
srcs,
|
||||
deps = [],
|
||||
target_compatible_with = [],
|
||||
testonly = True,
|
||||
**kwargs
|
||||
)
|
||||
if "_test-" in name:
|
||||
test_bin = name.split("_test-")[0] + "_test"
|
||||
test_file = name.split("_test-")[1]
|
||||
if test_bin != test_file:
|
||||
test_name = "+" + test_file
|
||||
else:
|
||||
test_name = "+" + name
|
||||
native.sh_test(
|
||||
name = test_name,
|
||||
srcs = ["install-" + test_bin],
|
||||
args = ["-fileNameFilter", test_file],
|
||||
testonly = True,
|
||||
exec_properties = {
|
||||
"no-remote": "1",
|
||||
},
|
||||
env = SANITIZER_ENV,
|
||||
data = SANITIZER_DATA,
|
||||
)
|
||||
else:
|
||||
native.sh_test(
|
||||
name = "+" + name,
|
||||
srcs = ["install-" + name],
|
||||
testonly = True,
|
||||
exec_properties = {
|
||||
"no-remote": "1",
|
||||
},
|
||||
env = SANITIZER_ENV,
|
||||
data = SANITIZER_DATA,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -126,8 +126,9 @@ def _py_download(ctx):
|
|||
usercustomize_file,
|
||||
"""
|
||||
import sys
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
import os
|
||||
import tempfile
|
||||
sys.pycache_prefix = os.path.join(tempfile.gettempdir(), "bazel_pycache")
|
||||
""",
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,238 @@
|
|||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
sys.path.append(".")
|
||||
|
||||
from buildscripts.engflow_auth import setup_auth
|
||||
from buildscripts.install_bazel import install_bazel
|
||||
|
||||
# do not print to stdout as that is used for arg modifications
|
||||
orig_stdout = sys.stdout
|
||||
sys.stdout = sys.stderr
|
||||
|
||||
if (
|
||||
os.environ.get("MONGO_BAZEL_WRAPPER_DEBUG") == "1"
|
||||
and os.environ.get("MONGO_AUTOCOMPLETE_QUERY") != "1"
|
||||
):
|
||||
|
||||
def wrapper_debug(x):
|
||||
print("[WRAPPER_HOOK_DEBUG]: " + x, file=sys.stderr)
|
||||
else:
|
||||
|
||||
def wrapper_debug(x):
|
||||
pass
|
||||
|
||||
|
||||
class BinAndSourceIncompatible(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DuplicateSourceNames(Exception):
|
||||
pass
|
||||
|
||||
|
||||
wrapper_debug(f"wrapper hook script is using {sys.executable}")
|
||||
|
||||
|
||||
def get_buildozer_output(autocomplete_query):
|
||||
buildozer = shutil.which("buildozer")
|
||||
if not buildozer:
|
||||
buildozer = os.path.expanduser("~/.local/bin/buildozer")
|
||||
if not os.path.exists(buildozer):
|
||||
bazel_bin_dir = os.path.expanduser("~/.local/bin")
|
||||
if not os.path.exists(bazel_bin_dir):
|
||||
os.makedirs(bazel_bin_dir)
|
||||
install_bazel(bazel_bin_dir)
|
||||
|
||||
p = subprocess.run(
|
||||
[buildozer, "print label srcs", "//src/...:%mongo_cc_unit_test"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if not autocomplete_query and p.returncode != 0:
|
||||
print("buildozer test target query failed:")
|
||||
print(p.args)
|
||||
print(p.stdout)
|
||||
print(p.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
return p.stdout
|
||||
|
||||
|
||||
def engflow_auth(args):
|
||||
start = time.time()
|
||||
args_str = " ".join(args)
|
||||
if (
|
||||
"--config=local" not in args_str
|
||||
and "--config=public-release" not in args_str
|
||||
and "--config local" not in args_str
|
||||
and "--config public-release" not in args_str
|
||||
):
|
||||
if os.environ.get("CI") is None:
|
||||
setup_auth(verbose=False)
|
||||
wrapper_debug(f"engflow auth time: {time.time() - start}")
|
||||
|
||||
|
||||
def test_runner_interface(args, autocomplete_query, get_buildozer_output=get_buildozer_output):
|
||||
start = time.time()
|
||||
|
||||
plus_autocomplete_query = False
|
||||
if autocomplete_query:
|
||||
str_args = " ".join(args)
|
||||
if "'//:*'" in str_args or "':*'" in str_args or "//:all" in str_args or ":all" in str_args:
|
||||
plus_autocomplete_query = True
|
||||
|
||||
plus_starts = ("+", ":+", "//:+")
|
||||
skip_plus_interface = True
|
||||
for arg in args:
|
||||
if arg.startswith(plus_starts):
|
||||
skip_plus_interface = False
|
||||
|
||||
if skip_plus_interface and not autocomplete_query:
|
||||
return args[1:]
|
||||
|
||||
sources_to_bin = {}
|
||||
select_sources = {}
|
||||
current_select = None
|
||||
in_select = False
|
||||
c_exts = (".c", ".cc", ".cpp")
|
||||
|
||||
def add_source_test(source_file, bin_file, sources_to_bin):
|
||||
src_key = os.path.splitext(
|
||||
os.path.basename(source_file.replace("//", "").replace(":", "/"))
|
||||
)[0]
|
||||
if src_key in sources_to_bin:
|
||||
raise DuplicateSourceNames(
|
||||
f"Two test files with the same name:\n {bin_file}->{src_key}\n {sources_to_bin[src_key]}->{src_key}"
|
||||
)
|
||||
if src_key == os.path.basename(bin_file.replace("//", "").replace(":", "/")):
|
||||
src_key = f"{src_key}-{src_key}"
|
||||
sources_to_bin[src_key] = bin_file
|
||||
|
||||
# this naively gets all possible source file targets
|
||||
for line in get_buildozer_output(autocomplete_query).splitlines():
|
||||
# non select case
|
||||
if line.startswith("//") and line.endswith("]"):
|
||||
in_select = False
|
||||
current_select = None
|
||||
tokens = line.split("[")
|
||||
binfile = tokens[0].strip()
|
||||
srcs = tokens[1][:-1].split(" ")
|
||||
for src in srcs:
|
||||
if src.endswith(c_exts):
|
||||
add_source_test(src, binfile, sources_to_bin)
|
||||
else:
|
||||
if not in_select:
|
||||
current_select = line.split(" ")[0]
|
||||
select_sources[current_select] = []
|
||||
in_select = True
|
||||
for token in line.split('"'):
|
||||
if token.strip().endswith(c_exts):
|
||||
add_source_test(token.strip(), current_select, sources_to_bin)
|
||||
|
||||
if plus_autocomplete_query:
|
||||
autocomplete_target = ["//:+" + test for test in sources_to_bin.keys()]
|
||||
autocomplete_target += [
|
||||
"//:+" + os.path.basename(test.replace("//", "").replace(":", "/"))
|
||||
for test in set(sources_to_bin.values())
|
||||
]
|
||||
with open("/tmp/mongo_autocomplete_plus_targets", "w") as f:
|
||||
f.write(" ".join(autocomplete_target))
|
||||
elif autocomplete_query:
|
||||
with open("/tmp/mongo_autocomplete_plus_targets", "w") as f:
|
||||
f.write("")
|
||||
|
||||
if autocomplete_query or plus_autocomplete_query:
|
||||
return args[1:]
|
||||
|
||||
replacements = {}
|
||||
fileNameFilter = []
|
||||
bin_targets = []
|
||||
source_targets = {}
|
||||
|
||||
for arg in args[1:]:
|
||||
if arg.startswith(plus_starts):
|
||||
test_name = arg[arg.find("+") + 1 :]
|
||||
real_target = sources_to_bin.get(test_name)
|
||||
|
||||
if not real_target:
|
||||
for bin_target in set(sources_to_bin.values()):
|
||||
if (
|
||||
os.path.basename(bin_target.replace("//", "").replace(":", "/"))
|
||||
== test_name
|
||||
):
|
||||
bin_targets.append(bin_target)
|
||||
real_target = bin_target
|
||||
replacements[arg] = [real_target]
|
||||
else:
|
||||
# defer source targets to see if we can skip redundant tests
|
||||
source_targets[test_name] = [arg, real_target]
|
||||
|
||||
source_targets_without_bin_targets = []
|
||||
bins_from_source_added = []
|
||||
for test_name, values in source_targets.items():
|
||||
arg, real_target = values
|
||||
|
||||
if real_target not in bin_targets:
|
||||
if real_target not in bins_from_source_added:
|
||||
replacements[arg] = [real_target]
|
||||
bins_from_source_added.append(real_target)
|
||||
else:
|
||||
replacements[arg] = []
|
||||
if test_name not in fileNameFilter:
|
||||
fileNameFilter += [test_name]
|
||||
source_targets_without_bin_targets.append(test_name)
|
||||
else:
|
||||
replacements[arg] = []
|
||||
|
||||
if bin_targets and source_targets_without_bin_targets:
|
||||
raise BinAndSourceIncompatible(
|
||||
"Cannot mix source file test targets with different test binary targets.\n"
|
||||
+ "Conflicting source targets:\n "
|
||||
+ "\n ".join(source_targets_without_bin_targets)
|
||||
+ "\n"
|
||||
+ "Conflicting binary targets:\n "
|
||||
+ "\n ".join(
|
||||
[
|
||||
os.path.basename(bin_target.replace("//", "").replace(":", "/"))
|
||||
for bin_target in bin_targets
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
new_args = []
|
||||
replaced_already = []
|
||||
for arg in args[1:]:
|
||||
replaced = False
|
||||
for k, v in replacements.items():
|
||||
if v and v[0] is None:
|
||||
pass
|
||||
elif arg == k:
|
||||
if k not in replaced_already:
|
||||
new_args.extend(v)
|
||||
replaced_already.append(k)
|
||||
replaced = True
|
||||
break
|
||||
if not replaced:
|
||||
new_args.append(arg)
|
||||
|
||||
if fileNameFilter:
|
||||
new_args.append("--test_arg=-fileNameFilter")
|
||||
new_args.append(f"--test_arg={'|'.join(fileNameFilter)}")
|
||||
|
||||
wrapper_debug(f"plus interface time: {time.time() - start}")
|
||||
|
||||
return new_args
|
||||
|
||||
|
||||
engflow_auth(sys.argv)
|
||||
|
||||
args = test_runner_interface(
|
||||
sys.argv, autocomplete_query=os.environ.get("MONGO_AUTOCOMPLETE_QUERY") == "1"
|
||||
)
|
||||
|
||||
print(" ".join(args), file=orig_stdout)
|
||||
|
|
@ -43,7 +43,7 @@ def find_all_failed(bin_path: str) -> list[str]:
|
|||
def lint_all(bin_path: str, generate_report: bool):
|
||||
files = find_all_failed(bin_path)
|
||||
result = lint(bin_path, files, generate_report)
|
||||
validate_bazel_groups(generate_report=generate_report, fix=False, quick=False)
|
||||
validate_bazel_groups(generate_report=generate_report, fix=False)
|
||||
return result
|
||||
|
||||
|
||||
|
|
@ -51,12 +51,7 @@ def fix_all(bin_path: str):
|
|||
files = find_all_failed(bin_path)
|
||||
fix(bin_path, files)
|
||||
print("Checking unittest rules...")
|
||||
validate_bazel_groups(generate_report=False, fix=True, quick=True)
|
||||
|
||||
|
||||
def fix_unittests(bin_path: str):
|
||||
print("Checking unittest rules (thorough)...")
|
||||
validate_bazel_groups(generate_report=False, fix=True, quick=False)
|
||||
validate_bazel_groups(generate_report=False, fix=True)
|
||||
|
||||
|
||||
def lint(bin_path: str, files: list[str], generate_report: bool):
|
||||
|
|
@ -99,7 +94,6 @@ def lint(bin_path: str, files: list[str], generate_report: bool):
|
|||
def fix(bin_path: str, files: list[str]):
|
||||
for file in files:
|
||||
subprocess.run([bin_path, "--mode=fix", file], check=True)
|
||||
|
||||
print("Done fixing files")
|
||||
|
||||
|
||||
|
|
@ -131,11 +125,6 @@ def main():
|
|||
fix_all_parser = sub.add_parser("fix-all", help="Fix all files")
|
||||
fix_all_parser.set_defaults(subcommand="fix-all")
|
||||
|
||||
fix_all_parser = sub.add_parser(
|
||||
"fix-unittests", help="Fix all unittests without taking any short-cuts"
|
||||
)
|
||||
fix_all_parser.set_defaults(subcommand="fix-unittests")
|
||||
|
||||
lint_parser = sub.add_parser("lint", help="Lint specified list of files")
|
||||
lint_parser.add_argument("files", nargs="+")
|
||||
lint_parser.set_defaults(subcommand="lint")
|
||||
|
|
@ -167,8 +156,6 @@ def main():
|
|||
lint(binary_path, args.files, args.generate_report)
|
||||
elif subcommand == "fix":
|
||||
fix(binary_path, args.files)
|
||||
elif subcommand == "fix-unittests":
|
||||
fix_unittests(binary_path)
|
||||
else:
|
||||
# we purposefully do not use sub.choices.keys() so it does not print as a dict_keys object
|
||||
choices = [key for key in sub.choices]
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ def get_release_tag():
|
|||
return tag + f"_{os_sys}_{arch}"
|
||||
|
||||
|
||||
def install() -> str:
|
||||
def install(verbose: bool) -> str:
|
||||
binary_directory = os.path.expanduser("~/.local/bin")
|
||||
os.makedirs(binary_directory, exist_ok=True)
|
||||
binary_filename = "engflow_auth"
|
||||
|
|
@ -52,6 +52,7 @@ def install() -> str:
|
|||
if "windows" in tag:
|
||||
binary_path += ".exe"
|
||||
if os.path.exists(binary_path):
|
||||
if verbose:
|
||||
print(f"{binary_filename} already exists at {binary_path}, skipping download")
|
||||
else:
|
||||
url = GH_URL_PREFIX + tag
|
||||
|
|
@ -63,10 +64,11 @@ def install() -> str:
|
|||
return binary_path
|
||||
|
||||
|
||||
def update_bazelrc(binary_path: str):
|
||||
def update_bazelrc(binary_path: str, verbose: bool):
|
||||
norm_path = os.path.normpath(binary_path).replace("\\", "/")
|
||||
lines = []
|
||||
bazelrc_path = f"{os.path.expanduser('~')}/.bazelrc"
|
||||
if verbose:
|
||||
print(f"Updating {bazelrc_path}")
|
||||
if os.path.exists(bazelrc_path):
|
||||
with open(bazelrc_path, "r") as bazelrc:
|
||||
|
|
@ -80,7 +82,7 @@ def update_bazelrc(binary_path: str):
|
|||
bazelrc.writelines(lines)
|
||||
|
||||
|
||||
def authenticate(binary_path: str):
|
||||
def authenticate(binary_path: str, verbose: bool) -> bool:
|
||||
need_login = False
|
||||
p = subprocess.run(f"{binary_path} export {CLUSTER}", shell=True, capture_output=True)
|
||||
if p.returncode != 0:
|
||||
|
|
@ -90,8 +92,9 @@ def authenticate(binary_path: str):
|
|||
if datetime.now() > datetime.fromisoformat(expiry_iso):
|
||||
need_login = True
|
||||
if not need_login:
|
||||
if verbose:
|
||||
print("Already authenticated. Skipping authentication.")
|
||||
return
|
||||
return True
|
||||
|
||||
p = subprocess.Popen(
|
||||
f"{binary_path} login -store=file {CLUSTER}",
|
||||
|
|
@ -112,7 +115,7 @@ def authenticate(binary_path: str):
|
|||
if not login_url:
|
||||
print("CLI had unexpected output.")
|
||||
p.kill()
|
||||
return
|
||||
return False
|
||||
|
||||
print(
|
||||
f"On any device with a browser, login via the following link to complete EngFlow authentication:\n{login_url}"
|
||||
|
|
@ -126,12 +129,22 @@ def authenticate(binary_path: str):
|
|||
"Timed out waiting for login attempt. Failed to authenticate with EngFlow. Builds will be run locally..."
|
||||
)
|
||||
p.kill()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def setup_auth(verbose: bool = True) -> bool:
|
||||
path = install(verbose)
|
||||
authenticated = authenticate(path, verbose)
|
||||
if not authenticated:
|
||||
return False
|
||||
update_bazelrc(path, verbose)
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
path = install()
|
||||
authenticate(path)
|
||||
update_bazelrc(path)
|
||||
return 0 if setup_auth() else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -20,6 +20,35 @@ _S3_HASH_MAPPING = {
|
|||
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.19.0/bazelisk-windows-amd64.exe": "d04555245a99dfb628e33da24e2b9198beb8f46d7e7661c313eb045f6a59f5e4",
|
||||
}
|
||||
|
||||
BUILDOZER_RELEASE_URL = "https://github.com/bazelbuild/buildtools/releases/download/v7.3.1/"
|
||||
|
||||
|
||||
def determine_platform():
|
||||
syst = platform.system()
|
||||
pltf = None
|
||||
if syst == "Darwin":
|
||||
pltf = "darwin"
|
||||
elif syst == "Windows":
|
||||
pltf = "windows"
|
||||
elif syst == "Linux":
|
||||
pltf = "linux"
|
||||
else:
|
||||
raise RuntimeError("Platform cannot be inferred.")
|
||||
return pltf
|
||||
|
||||
|
||||
def determine_architecture():
|
||||
arch = None
|
||||
machine = platform.machine()
|
||||
if machine in ("AMD64", "x86_64"):
|
||||
arch = "amd64"
|
||||
elif machine in ("arm", "arm64", "aarch64"):
|
||||
arch = "arm64"
|
||||
else:
|
||||
raise RuntimeError(f"Detected architecture is not supported: {machine}")
|
||||
|
||||
return arch
|
||||
|
||||
|
||||
def _download_path_with_retry(*args, **kwargs):
|
||||
for i in range(5):
|
||||
|
|
@ -55,7 +84,24 @@ def _verify_s3_hash(s3_path: str, local_path: str) -> None:
|
|||
)
|
||||
|
||||
|
||||
def install_buildozer(download_location: str = "./"):
|
||||
operating_system = determine_platform()
|
||||
architechture = determine_architecture()
|
||||
if operating_system == "windows" and architechture == "arm64":
|
||||
raise RuntimeError("There are no published arm windows releases for buildifier.")
|
||||
|
||||
extension = ".exe" if operating_system == "windows" else ""
|
||||
binary_name = f"buildozer-{operating_system}-{architechture}{extension}"
|
||||
url = f"{BUILDOZER_RELEASE_URL}{binary_name}"
|
||||
|
||||
file_location = os.path.join(download_location, f"buildozer{extension}")
|
||||
urllib.request.urlretrieve(url, file_location)
|
||||
os.chmod(file_location, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
return file_location
|
||||
|
||||
|
||||
def install_bazel(binary_directory: str) -> str:
|
||||
install_buildozer(binary_directory)
|
||||
normalized_arch = (
|
||||
platform.machine().lower().replace("aarch64", "arm64").replace("x86_64", "amd64")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,200 @@
|
|||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.append(".")
|
||||
|
||||
from bazel.wrapper_hook import BinAndSourceIncompatible, DuplicateSourceNames, test_runner_interface
|
||||
|
||||
|
||||
class Tests(unittest.TestCase):
|
||||
def test_single_source_file(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+source1"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == ["test", "//some:test", "--test_arg=-fileNameFilter", "--test_arg=source1"]
|
||||
|
||||
def test_double_source_file(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+source1", "+source2"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == [
|
||||
"test",
|
||||
"//some:test",
|
||||
"--test_arg=-fileNameFilter",
|
||||
"--test_arg=source1|source2",
|
||||
]
|
||||
|
||||
def test_duplicate_source_file(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+source1", "+source1"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == ["test", "//some:test", "--test_arg=-fileNameFilter", "--test_arg=source1"]
|
||||
|
||||
def test_no_plus_targets(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "source1", "source1"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == ["test", "source1", "source1"]
|
||||
|
||||
def test_plus_option(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = [
|
||||
"wrapper_hook",
|
||||
"test",
|
||||
"+source1",
|
||||
"+source2",
|
||||
"//some:other_target",
|
||||
"--features",
|
||||
"+some_feature",
|
||||
]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == [
|
||||
"test",
|
||||
"//some:test",
|
||||
"//some:other_target",
|
||||
"--features",
|
||||
"+some_feature",
|
||||
"--test_arg=-fileNameFilter",
|
||||
"--test_arg=source1|source2",
|
||||
]
|
||||
|
||||
def test_single_bin_file(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+test"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == ["test", "//some:test"]
|
||||
|
||||
def test_double_bin_file(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]\n//some:test2 [source3.cpp source4.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+test", "+test2"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == ["test", "//some:test", "//some:test2"]
|
||||
|
||||
def test_bin_source_redundant_mix(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+test", "+source2"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == ["test", "//some:test"]
|
||||
|
||||
def test_bin_source_mix(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]\n//some:test2 [source3.cpp source4.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+test", "+source3"]
|
||||
|
||||
with self.assertRaises(BinAndSourceIncompatible):
|
||||
test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
def test_duplicate_source_names(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]\n//some:test2 [source1.cpp source4.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+test", "+source3"]
|
||||
|
||||
with self.assertRaises(DuplicateSourceNames):
|
||||
test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
def test_autocomplete(self):
|
||||
if "linux" not in sys.platform:
|
||||
self.skipTest("Skipping because not linux")
|
||||
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "query", "some_autocomplete_query", "+wrench", "+source1"]
|
||||
|
||||
result = test_runner_interface(args, True, buildozer_output)
|
||||
|
||||
assert result == ["query", "some_autocomplete_query", "+wrench", "+source1"]
|
||||
|
||||
def test_select_statement(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return """//some/select:test [
|
||||
"source1.cpp",
|
||||
] + select({
|
||||
"//some:config": [
|
||||
"source2.cpp",
|
||||
],
|
||||
"//some:other_config": [
|
||||
"source3.cpp",
|
||||
],
|
||||
}) + [
|
||||
"source4.cpp",
|
||||
"source5.cpp",
|
||||
]"""
|
||||
|
||||
args = ["wrapper_hook", "+source1", "+source2", "+source3", "+source4"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
assert result == [
|
||||
"//some/select:test",
|
||||
"--test_arg=-fileNameFilter",
|
||||
"--test_arg=source1|source2|source3|source4",
|
||||
]
|
||||
|
||||
def test_c_extensions(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.c source2.h source3.cpp source4.cc]"
|
||||
|
||||
args = ["wrapper_hook", "test", "+source1", "+source2", "+source3", "+source4"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == [
|
||||
"test",
|
||||
"//some:test",
|
||||
"+source2",
|
||||
"--test_arg=-fileNameFilter",
|
||||
"--test_arg=source1|source3|source4",
|
||||
]
|
||||
|
||||
def test_prefixes(self):
|
||||
def buildozer_output(autocomplete_query):
|
||||
return "//some:test [source1.cpp source2.cpp source3.cpp s+ource4.cpp]"
|
||||
|
||||
args = ["wrapper_hook", "test", "//:+source1", ":+source2", "+source3"]
|
||||
|
||||
result = test_runner_interface(args, False, buildozer_output)
|
||||
|
||||
assert result == [
|
||||
"test",
|
||||
"//some:test",
|
||||
"--test_arg=-fileNameFilter",
|
||||
"--test_arg=source1|source2|source3",
|
||||
]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
@ -116,16 +116,11 @@ def find_group(unittest_paths):
|
|||
return json.dumps(group_to_path, indent=4)
|
||||
|
||||
|
||||
def validate_bazel_groups(generate_report, fix, quick):
|
||||
def validate_bazel_groups(generate_report, fix):
|
||||
buildozer = download_buildozer()
|
||||
|
||||
bazel_bin = install_bazel(".")
|
||||
|
||||
if quick:
|
||||
print(
|
||||
"Checking unittests in quick mode, you may consider running 'fix-unittests' for a thorough (longer) check."
|
||||
)
|
||||
|
||||
query_opts = [
|
||||
"--implicit_deps=False",
|
||||
"--tool_deps=False",
|
||||
|
|
@ -135,36 +130,6 @@ def validate_bazel_groups(generate_report, fix, quick):
|
|||
"--bes_backend=",
|
||||
"--bes_results_url=",
|
||||
]
|
||||
try:
|
||||
start = time.time()
|
||||
sys.stdout.write("Query all unittest runner rules... ")
|
||||
sys.stdout.flush()
|
||||
query_proc = subprocess.run(
|
||||
[
|
||||
bazel_bin,
|
||||
"cquery",
|
||||
'kind("mongo_install_rule", //:all-targets)',
|
||||
"--output",
|
||||
"build",
|
||||
]
|
||||
+ query_opts,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
sys.stdout.write("{:0.2f}s\n".format(time.time() - start))
|
||||
installed_tests = query_proc.stdout.splitlines()
|
||||
installed_test_names = set()
|
||||
for i in range(2, len(installed_tests)):
|
||||
if 'generator_function = "mongo_unittest_install' in installed_tests[i]:
|
||||
test_name = installed_tests[i - 1].split('"')[1]
|
||||
installed_test_names.add(test_name)
|
||||
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print("BAZEL ERROR:")
|
||||
print(exc.stdout)
|
||||
print(exc.stderr)
|
||||
sys.exit(exc.returncode)
|
||||
|
||||
try:
|
||||
start = time.time()
|
||||
|
|
@ -189,86 +154,7 @@ def validate_bazel_groups(generate_report, fix, quick):
|
|||
print(exc.stderr)
|
||||
sys.exit(exc.returncode)
|
||||
|
||||
buildozer_delete_cmds = []
|
||||
buildozer_add_cmds = []
|
||||
buildozer_update_cmds = []
|
||||
nodiff_build_files = set()
|
||||
diff_build_files = set()
|
||||
|
||||
for test in bazel_unittests:
|
||||
test_name = test.split(":")[1]
|
||||
if test_name not in installed_test_names:
|
||||
buildozer_add_cmds += [f"new mongo_unittest_install {test_name}"]
|
||||
buildozer_update_cmds += [[f"add srcs {test}", f"//:{test_name}"]]
|
||||
else:
|
||||
installed_test_names.remove(test_name)
|
||||
|
||||
if quick:
|
||||
build_file = (
|
||||
os.path.dirname(test.replace("//", "./").replace(":", "/")) + "/BUILD.bazel"
|
||||
)
|
||||
if build_file not in diff_build_files and build_file not in nodiff_build_files:
|
||||
cmd = [
|
||||
"git",
|
||||
"diff",
|
||||
"master",
|
||||
os.path.dirname(test.replace("//", "./").replace(":", "/")) + "/BUILD.bazel",
|
||||
]
|
||||
stdout = subprocess.run(cmd, capture_output=True, text=True).stdout
|
||||
|
||||
if build_file in nodiff_build_files or not stdout:
|
||||
nodiff_build_files.add(build_file)
|
||||
associated_files = []
|
||||
# print(installed_test_names)
|
||||
for name in installed_test_names:
|
||||
if name.startswith(test_name + "-"):
|
||||
associated_files.append(name)
|
||||
for name in associated_files:
|
||||
installed_test_names.remove(name)
|
||||
continue
|
||||
else:
|
||||
diff_build_files.add(build_file)
|
||||
|
||||
try:
|
||||
start = time.time()
|
||||
sys.stdout.write(f"Query tests in {test}... ")
|
||||
sys.stdout.flush()
|
||||
query_proc = subprocess.run(
|
||||
[
|
||||
bazel_bin,
|
||||
"query",
|
||||
f"labels(srcs, {test}_with_debug)",
|
||||
]
|
||||
+ query_opts,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
sys.stdout.write("{:0.2f}s\n".format(time.time() - start))
|
||||
sources = query_proc.stdout.splitlines()
|
||||
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print("BAZEL ERROR:")
|
||||
print(exc.stdout)
|
||||
print(exc.stderr)
|
||||
sys.exit(exc.returncode)
|
||||
|
||||
for source in sources:
|
||||
if source.endswith(".cpp"):
|
||||
source_base = source.split(":")
|
||||
if len(source_base) > 1:
|
||||
source_base = source_base[1]
|
||||
else:
|
||||
source_base = source_base[0]
|
||||
source_base = source_base.replace(".cpp", "")
|
||||
if f"{test_name}-{source_base}" not in installed_test_names:
|
||||
buildozer_add_cmds += [f"new mongo_unittest_install {test_name}-{source_base}"]
|
||||
buildozer_update_cmds += [[f"add srcs {test}", f"//:{test_name}-{source_base}"]]
|
||||
else:
|
||||
installed_test_names.remove(f"{test_name}-{source_base}")
|
||||
|
||||
for existing_test in installed_test_names:
|
||||
buildozer_delete_cmds += ["delete", existing_test]
|
||||
|
||||
groups = json.loads(find_group(bazel_unittests))
|
||||
failures = []
|
||||
|
|
@ -323,15 +209,9 @@ def validate_bazel_groups(generate_report, fix, quick):
|
|||
]
|
||||
|
||||
if fix:
|
||||
if buildozer_delete_cmds:
|
||||
subprocess.run([buildozer] + buildozer_delete_cmds)
|
||||
subprocess.run([buildozer] + buildozer_add_cmds + ["//:__pkg__"])
|
||||
for cmd in buildozer_update_cmds:
|
||||
subprocess.run([buildozer] + cmd)
|
||||
|
||||
if buildozer_delete_cmds or buildozer_add_cmds:
|
||||
failures.append(["unittest install rules", "Some install rules are incorrect"])
|
||||
|
||||
if failures:
|
||||
for failure in failures:
|
||||
if generate_report:
|
||||
|
|
@ -346,7 +226,7 @@ def main():
|
|||
parser.add_argument("--generate-report", default=False, action="store_true")
|
||||
parser.add_argument("--fix", default=False, action="store_true")
|
||||
args = parser.parse_args()
|
||||
validate_bazel_groups(args.generate_report, args.fix, quick=False)
|
||||
validate_bazel_groups(args.generate_report, args.fix)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -1,232 +0,0 @@
|
|||
cxx_library(
|
||||
name='zstd',
|
||||
header_namespace='',
|
||||
exported_headers=['zstd.h'],
|
||||
visibility=['PUBLIC'],
|
||||
deps=[
|
||||
':common',
|
||||
':compress',
|
||||
':decompress',
|
||||
':deprecated',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='compress',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('compress', 'zstd*.h'),
|
||||
]),
|
||||
srcs=glob(['compress/zstd*.c', 'compress/hist.c']),
|
||||
deps=[':common'],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='decompress',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
headers=subdir_glob([
|
||||
('decompress', '*_impl.h'),
|
||||
]),
|
||||
srcs=glob(['decompress/zstd*.c']),
|
||||
deps=[
|
||||
':common',
|
||||
':legacy',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='deprecated',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('deprecated', '*.h'),
|
||||
]),
|
||||
srcs=glob(['deprecated/*.c']),
|
||||
deps=[':common'],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='legacy',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('legacy', '*.h'),
|
||||
]),
|
||||
srcs=glob(['legacy/*.c']),
|
||||
deps=[':common'],
|
||||
exported_preprocessor_flags=[
|
||||
'-DZSTD_LEGACY_SUPPORT=4',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='zdict',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=['zdict.h'],
|
||||
headers=subdir_glob([
|
||||
('dictBuilder', 'divsufsort.h'),
|
||||
('dictBuilder', 'cover.h'),
|
||||
]),
|
||||
srcs=glob(['dictBuilder/*.c']),
|
||||
deps=[':common'],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='compiler',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'compiler.h'),
|
||||
]),
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='cpu',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'cpu.h'),
|
||||
]),
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='bitstream',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'bitstream.h'),
|
||||
]),
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='entropy',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'fse.h'),
|
||||
('common', 'huf.h'),
|
||||
]),
|
||||
srcs=[
|
||||
'common/entropy_common.c',
|
||||
'common/fse_decompress.c',
|
||||
'compress/fse_compress.c',
|
||||
'compress/huf_compress.c',
|
||||
'decompress/huf_decompress.c',
|
||||
],
|
||||
deps=[
|
||||
':debug',
|
||||
':bitstream',
|
||||
':compiler',
|
||||
':errors',
|
||||
':mem',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='errors',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=[
|
||||
'zstd_errors.h',
|
||||
'common/error_private.h',
|
||||
]
|
||||
srcs=['common/error_private.c'],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='mem',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'mem.h'),
|
||||
]),
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='pool',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'pool.h'),
|
||||
]),
|
||||
srcs=['common/pool.c'],
|
||||
deps=[
|
||||
':threading',
|
||||
':zstd_common',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='threading',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'threading.h'),
|
||||
]),
|
||||
srcs=['common/threading.c'],
|
||||
exported_preprocessor_flags=[
|
||||
'-DZSTD_MULTITHREAD',
|
||||
],
|
||||
exported_linker_flags=[
|
||||
'-pthread',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='xxhash',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'xxhash.h'),
|
||||
]),
|
||||
srcs=['common/xxhash.c'],
|
||||
exported_preprocessor_flags=[
|
||||
'-DXXH_NAMESPACE=ZSTD_',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='zstd_common',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('', 'zstd.h'),
|
||||
('common', 'zstd_internal.h'),
|
||||
]),
|
||||
srcs=['common/zstd_common.c'],
|
||||
deps=[
|
||||
':compiler',
|
||||
':errors',
|
||||
':mem',
|
||||
],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='debug',
|
||||
header_namespace='',
|
||||
visibility=['PUBLIC'],
|
||||
exported_headers=subdir_glob([
|
||||
('common', 'debug.h'),
|
||||
]),
|
||||
srcs=['common/debug.c'],
|
||||
)
|
||||
|
||||
cxx_library(
|
||||
name='common',
|
||||
deps=[
|
||||
':debug',
|
||||
':bitstream',
|
||||
':compiler',
|
||||
':cpu',
|
||||
':entropy',
|
||||
':errors',
|
||||
':mem',
|
||||
':pool',
|
||||
':threading',
|
||||
':xxhash',
|
||||
':zstd_common',
|
||||
]
|
||||
)
|
||||
90
tools/bazel
90
tools/bazel
|
|
@ -71,4 +71,92 @@ printf '%s\n' "${bazelrc_xcode_lines[@]}" > .bazelrc.xcode
|
|||
|
||||
echo "common --//bazel/config:running_through_bazelisk" > .bazelrc.bazelisk
|
||||
|
||||
exec "$bazel_real" "$@"
|
||||
if [[ $MONGO_BAZEL_WRAPPER_DEBUG == 1 ]]; then
|
||||
wrapper_start_time="$(date -u +%s.%N)"
|
||||
fi
|
||||
|
||||
cur_dir="${PWD##*/}"
|
||||
|
||||
if [[ "$OSTYPE" == "linux"* ]]; then
|
||||
os="linux"
|
||||
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
os="macos"
|
||||
else
|
||||
echo "Unsupported OS $OSTYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ARCH=$(uname -m)
|
||||
if [[ "$ARCH" == "arm64" || "$ARCH" == "aarch64" ]]; then
|
||||
ARCH="arm64"
|
||||
elif [[ "$ARCH" == "ppc64le" || "$ARCH" == "ppc64" || "$ARCH" == "ppc" || "$ARCH" == "ppcle" ]]; then
|
||||
ARCH="ppc64le"
|
||||
elif [[ "$ARCH" == "s390x" || "$ARCH" == "s390" ]]; then
|
||||
ARCH="s390x"
|
||||
else
|
||||
ARCH="x86_64"
|
||||
fi
|
||||
|
||||
python="bazel-$cur_dir/external/py_${os}_${ARCH}/dist/bin/python3"
|
||||
|
||||
wrapper_deps=("retry")
|
||||
declare -a wrapper_deps_to_install=()
|
||||
declare -a python_path=()
|
||||
for dep in ${wrapper_deps[@]}; do
|
||||
python_path+=("bazel-bin/external/poetry/$dep")
|
||||
if [ ! -d bazel-bin/external/poetry/$dep ] || [ ! -d bazel-$cur_dir/external/py_${os}_${ARCH}/dist/bin ] ; then
|
||||
wrapper_deps_to_install+=(@poetry//:install_$dep)
|
||||
fi
|
||||
done
|
||||
|
||||
declare -a cert_locs=()
|
||||
cert_locs+=("/etc/ssl/certs/ca-certificates.crt") # Debian/Ubuntu/Gentoo etc.
|
||||
cert_locs+=("/etc/pki/tls/certs/ca-bundle.crt") # Fedora/RHEL 6
|
||||
cert_locs+=("/etc/ssl/ca-bundle.pem") # OpenSUSE
|
||||
cert_locs+=("/etc/pki/tls/cacert.pem") # OpenELEC
|
||||
cert_locs+=("/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem") # CentOS/RHEL 7
|
||||
cert_locs+=("/etc/ssl/cert.pem") # Alpine Linux
|
||||
|
||||
for cert in ${cert_locs[@]}; do
|
||||
if [ -f $cert ]; then
|
||||
export SSL_CERT_DI=R$(dirname $cert)
|
||||
export SSL_CERT_FILE=$cert
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#wrapper_deps_to_install[@]} != 0 ]]; then
|
||||
>&2 echo "python prereq missing, using bazel to install python..."
|
||||
>&2 $bazel_real build --config=local $(IFS= ; echo "${wrapper_deps_to_install[*]}")
|
||||
fi
|
||||
|
||||
autocomplete_query=0
|
||||
# bash autocomplete detection
|
||||
if [[ $* =~ ^.*--output_base=/tmp/.*-completion-$USER.*$ ]]; then
|
||||
autocomplete_query=1
|
||||
fi
|
||||
# zsh autocomplete detection
|
||||
if [[ $* == *"--noblock_for_lock query kind(\".*_test\", //:all)"* ]] || [[ $* == *"--noblock_for_lock query kind(\".*_test\", :all)"* ]]; then
|
||||
autocomplete_query=1
|
||||
fi
|
||||
|
||||
rm -f /tmp/mongo_autocomplete_plus_targets
|
||||
python="bazel-$cur_dir/external/py_${os}_${ARCH}/dist/bin/python3"
|
||||
new_args=$(MONGO_AUTOCOMPLETE_QUERY=$autocomplete_query PYTHONPATH=$(IFS=: ; echo "${python_path[*]}") $python bazel/wrapper_hook.py "$@")
|
||||
|
||||
if [[ $MONGO_BAZEL_WRAPPER_DEBUG == 1 ]] && [[ $autocomplete_query == 0 ]]; then
|
||||
wrapper_end_time="$(date -u +%s.%N)"
|
||||
runtime=$(bc <<< "$wrapper_end_time - $wrapper_start_time")
|
||||
runtime=$(printf "%0.3f" $runtime)
|
||||
echo "[WRAPPER_HOOK_DEBUG]: wrapper hook script input args: $@"
|
||||
echo "[WRAPPER_HOOK_DEBUG]: wrapper hook script new args: $new_args"
|
||||
echo "[WRAPPER_HOOK_DEBUG]: wrapper hook script took $runtime seconds"
|
||||
fi
|
||||
|
||||
if [[ $autocomplete_query == 1 ]]; then
|
||||
plus_targets=$(</tmp/mongo_autocomplete_plus_targets)
|
||||
query_output=$("$bazel_real" $new_args)
|
||||
echo $query_output $plus_targets | tr " " "\n"
|
||||
else
|
||||
exec "$bazel_real" $new_args
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -1,3 +1,68 @@
|
|||
@echo off
|
||||
setlocal EnableDelayedExpansion
|
||||
|
||||
echo common --//bazel/config:running_through_bazelisk > .bazelrc.bazelisk
|
||||
"%BAZEL_REAL%" %*
|
||||
|
||||
for %%I in (.) do set cur_dir=%%~nxI
|
||||
|
||||
set python="bazel-%cur_dir%\external\py_windows_x86_64\dist\python.exe"
|
||||
|
||||
set "wrapper_deps="
|
||||
set "wrapper_deps_to_install="
|
||||
set "PYTHONPATH="
|
||||
|
||||
set "wrapper_deps=!wrapper_deps! retry"
|
||||
REM set "wrapper_deps=!wrapper_deps! example_append"
|
||||
set len=0
|
||||
(for %%d in (!wrapper_deps!) do (
|
||||
set "PYTHONPATH=%PYTHONPATH%;bazel-bin/external/poetry/%%d"
|
||||
if not exist "bazel-bin\external\poetry/%%d" (
|
||||
set /a len+=1
|
||||
set "wrapper_deps_to_install=!wrapper_deps_to_install! @poetry//:install_%%d"
|
||||
) else (
|
||||
if not exist "%python%" (
|
||||
set /a len+=1
|
||||
set "wrapper_deps_to_install=!wrapper_deps_to_install! @poetry//:install_%%d"
|
||||
)
|
||||
)
|
||||
))
|
||||
|
||||
if %len% gtr 0 (
|
||||
echo python prereq missing, using bazel to install python... 1>&2
|
||||
"%BAZEL_REAL%" build %wrapper_deps_to_install% 1>&2
|
||||
)
|
||||
SET STARTTIME=%TIME%
|
||||
|
||||
set "uniqueFileName=%tmp%\bat~%RANDOM%.tmp"
|
||||
%python% bazel/wrapper_hook.py %* > %uniqueFileName%
|
||||
for /f "Tokens=* Delims=" %%x in ( %uniqueFileName% ) do set "new_args=!new_args!%%x"
|
||||
del %uniqueFileName%
|
||||
|
||||
REM Final Calculations
|
||||
SET ENDTIME=%TIME%
|
||||
FOR /F "tokens=1-4 delims=:.," %%a IN ("%STARTTIME%") DO (
|
||||
SET /A "start=(((%%a*60)+1%%b %% 100)*60+1%%c %% 100)*100+1%%d %% 100"
|
||||
)
|
||||
|
||||
FOR /F "tokens=1-4 delims=:.," %%a IN ("%ENDTIME%") DO (
|
||||
SET /A "end=(((%%a*60)+1%%b %% 100)*60+1%%c %% 100)*100+1%%d %% 100"
|
||||
)
|
||||
|
||||
REM Calculate the elapsed time by subtracting values
|
||||
SET /A elapsed=end-start
|
||||
|
||||
REM Format the results for output
|
||||
SET /A hh=elapsed/(60*60*100), rest=elapsed%%(60*60*100), mm=rest/(60*100), rest%%=60*100, ss=rest/100, cc=rest%%100
|
||||
IF %hh% lss 10 SET hh=0%hh%
|
||||
IF %mm% lss 10 SET mm=0%mm%
|
||||
IF %ss% lss 10 SET ss=0%ss%
|
||||
IF %cc% lss 10 SET cc=0%cc%
|
||||
SET DURATION=%mm%m and %ss%.%cc%s
|
||||
|
||||
if "%MONGO_BAZEL_WRAPPER_DEBUG%"=="1" (
|
||||
ECHO [WRAPPER_HOOK_DEBUG]: wrapper hook script input args: %*
|
||||
ECHO [WRAPPER_HOOK_DEBUG]: wrapper hook script new args: !new_args!
|
||||
ECHO [WRAPPER_HOOK_DEBUG]: wrapper hook script took %DURATION%
|
||||
)
|
||||
|
||||
"%BAZEL_REAL%" !new_args!
|
||||
|
|
|
|||
Loading…
Reference in New Issue