mirror of https://github.com/mongodb/mongo
SERVER-111295 [v8.2] Set python as formatter in format_multirun (#41680)
GitOrigin-RevId: e47fadfd8bfc0835d7d121df0ce2073d43f2e898
This commit is contained in:
parent
3c713fd09f
commit
eb9af3a1da
|
|
@ -6,6 +6,7 @@
|
||||||
external rules-lint-ignored=true
|
external rules-lint-ignored=true
|
||||||
**/*.tpl.h rules-lint-ignored=true
|
**/*.tpl.h rules-lint-ignored=true
|
||||||
**/*.tpl.cpp rules-lint-ignored=true
|
**/*.tpl.cpp rules-lint-ignored=true
|
||||||
|
rpm/*.spec rules-lint-ignored=true
|
||||||
src/mongo/bson/column/bson_column_compressed_data.inl rules-lint-ignored=true
|
src/mongo/bson/column/bson_column_compressed_data.inl rules-lint-ignored=true
|
||||||
*.idl linguist-language=yaml
|
*.idl linguist-language=yaml
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,11 +17,15 @@ bazel_cache = os.path.expanduser(args.bazel_cache)
|
||||||
# the cc_library and cc_binaries in our build. There is not a good way from
|
# the cc_library and cc_binaries in our build. There is not a good way from
|
||||||
# within the build to get all those targets, so we will generate the list via query
|
# within the build to get all those targets, so we will generate the list via query
|
||||||
# https://sig-product-docs.synopsys.com/bundle/coverity-docs/page/coverity-analysis/topics/building_with_bazel.html#build_with_bazel
|
# https://sig-product-docs.synopsys.com/bundle/coverity-docs/page/coverity-analysis/topics/building_with_bazel.html#build_with_bazel
|
||||||
cmd = [
|
cmd = (
|
||||||
|
[
|
||||||
bazel_executable,
|
bazel_executable,
|
||||||
bazel_cache,
|
bazel_cache,
|
||||||
"aquery",
|
"aquery",
|
||||||
] + bazel_cmd_args + [args.bazel_query]
|
]
|
||||||
|
+ bazel_cmd_args
|
||||||
|
+ [args.bazel_query]
|
||||||
|
)
|
||||||
print(f"Running command: {cmd}")
|
print(f"Running command: {cmd}")
|
||||||
proc = subprocess.run(
|
proc = subprocess.run(
|
||||||
cmd,
|
cmd,
|
||||||
|
|
@ -33,9 +37,7 @@ proc = subprocess.run(
|
||||||
print(proc.stderr)
|
print(proc.stderr)
|
||||||
|
|
||||||
targets = set()
|
targets = set()
|
||||||
with open('coverity_targets.list', 'w') as f:
|
with open("coverity_targets.list", "w") as f:
|
||||||
for line in proc.stdout.splitlines():
|
for line in proc.stdout.splitlines():
|
||||||
if line.startswith(" Target: "):
|
if line.startswith(" Target: "):
|
||||||
f.write(line.split()[-1] + "\n")
|
f.write(line.split()[-1] + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,7 @@ format_multirun(
|
||||||
graphql = "//:prettier",
|
graphql = "//:prettier",
|
||||||
html = "//:prettier",
|
html = "//:prettier",
|
||||||
markdown = "//:prettier",
|
markdown = "//:prettier",
|
||||||
|
python = "@aspect_rules_lint//format:ruff",
|
||||||
shell = "@shfmt//:shfmt",
|
shell = "@shfmt//:shfmt",
|
||||||
sql = "//:prettier",
|
sql = "//:prettier",
|
||||||
starlark = "@buildifier_prebuilt//:buildifier",
|
starlark = "@buildifier_prebuilt//:buildifier",
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,6 @@ def run_pty_command(cmd):
|
||||||
|
|
||||||
|
|
||||||
def generate_compiledb(bazel_bin, persistent_compdb, enterprise):
|
def generate_compiledb(bazel_bin, persistent_compdb, enterprise):
|
||||||
|
|
||||||
# compiledb ignores command line args so just make a version rc file in anycase
|
# compiledb ignores command line args so just make a version rc file in anycase
|
||||||
write_mongo_variables_bazelrc([])
|
write_mongo_variables_bazelrc([])
|
||||||
if persistent_compdb:
|
if persistent_compdb:
|
||||||
|
|
|
||||||
|
|
@ -71,7 +71,7 @@ def write_workstation_bazelrc(args):
|
||||||
|
|
||||||
filtered_args = args[1:]
|
filtered_args = args[1:]
|
||||||
if "--" in filtered_args:
|
if "--" in filtered_args:
|
||||||
filtered_args = filtered_args[:filtered_args.index("--")] + ["--", "(REDACTED)"]
|
filtered_args = filtered_args[: filtered_args.index("--")] + ["--", "(REDACTED)"]
|
||||||
|
|
||||||
developer_build = os.environ.get("CI") is None
|
developer_build = os.environ.get("CI") is None
|
||||||
filtered_command_line = " ".join(filtered_args)
|
filtered_command_line = " ".join(filtered_args)
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ ARCH_NORMALIZE_MAP = {
|
||||||
"s390x": "s390x",
|
"s390x": "s390x",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_mongo_arch(args):
|
def get_mongo_arch(args):
|
||||||
arch = platform.machine().lower()
|
arch = platform.machine().lower()
|
||||||
if arch in ARCH_NORMALIZE_MAP:
|
if arch in ARCH_NORMALIZE_MAP:
|
||||||
|
|
@ -20,14 +21,16 @@ def get_mongo_arch(args):
|
||||||
else:
|
else:
|
||||||
return arch
|
return arch
|
||||||
|
|
||||||
|
|
||||||
def get_mongo_version(args):
|
def get_mongo_version(args):
|
||||||
proc = subprocess.run(["git", "describe", "--abbrev=0"], capture_output=True, text=True)
|
proc = subprocess.run(["git", "describe", "--abbrev=0"], capture_output=True, text=True)
|
||||||
return proc.stdout.strip()[1:]
|
return proc.stdout.strip()[1:]
|
||||||
|
|
||||||
|
|
||||||
def write_mongo_variables_bazelrc(args):
|
def write_mongo_variables_bazelrc(args):
|
||||||
mongo_version = get_mongo_version(args)
|
mongo_version = get_mongo_version(args)
|
||||||
mongo_arch = get_mongo_arch(args)
|
mongo_arch = get_mongo_arch(args)
|
||||||
|
|
||||||
repo_root = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent
|
repo_root = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent
|
||||||
version_file = os.path.join(repo_root, ".bazelrc.mongo_variables")
|
version_file = os.path.join(repo_root, ".bazelrc.mongo_variables")
|
||||||
existing_hash = ""
|
existing_hash = ""
|
||||||
|
|
@ -42,4 +45,4 @@ common --define=MONGO_VERSION={mongo_version}
|
||||||
current_hash = hashlib.md5(bazelrc_contents.encode()).hexdigest()
|
current_hash = hashlib.md5(bazelrc_contents.encode()).hexdigest()
|
||||||
if existing_hash != current_hash:
|
if existing_hash != current_hash:
|
||||||
with open(version_file, "w", encoding="utf-8") as f:
|
with open(version_file, "w", encoding="utf-8") as f:
|
||||||
f.write(bazelrc_contents)
|
f.write(bazelrc_contents)
|
||||||
|
|
|
||||||
|
|
@ -105,6 +105,7 @@ def validate_help(exe_path):
|
||||||
print(f"Error while calling help for {exe_path}: {e}")
|
print(f"Error while calling help for {exe_path}: {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
# Make sure we have a proper git version in the windows release
|
# Make sure we have a proper git version in the windows release
|
||||||
def validate_version(exe_path):
|
def validate_version(exe_path):
|
||||||
try:
|
try:
|
||||||
|
|
@ -124,6 +125,7 @@ def validate_version(exe_path):
|
||||||
print(f"Error while calling version for {exe_path}: {e}")
|
print(f"Error while calling version for {exe_path}: {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
if len(sys.argv) != 2:
|
if len(sys.argv) != 2:
|
||||||
print("Usage: python msi_validation.py <path_to_msi>")
|
print("Usage: python msi_validation.py <path_to_msi>")
|
||||||
|
|
|
||||||
|
|
@ -533,12 +533,16 @@ def get_edition_alias(edition_name: str) -> str:
|
||||||
return "org"
|
return "org"
|
||||||
return edition_name
|
return edition_name
|
||||||
|
|
||||||
|
|
||||||
def validate_top_level_directory(tar_name: str):
|
def validate_top_level_directory(tar_name: str):
|
||||||
command = f"tar -tf {tar_name} | head -n 1 | awk -F/ '{{print $1}}'"
|
command = f"tar -tf {tar_name} | head -n 1 | awk -F/ '{{print $1}}'"
|
||||||
proc = subprocess.run(command, capture_output=True, shell=True, text=True)
|
proc = subprocess.run(command, capture_output=True, shell=True, text=True)
|
||||||
top_level_directory = proc.stdout.strip()
|
top_level_directory = proc.stdout.strip()
|
||||||
if all(os_arch not in top_level_directory for os_arch in VALID_TAR_DIRECTORY_ARCHITECTURES):
|
if all(os_arch not in top_level_directory for os_arch in VALID_TAR_DIRECTORY_ARCHITECTURES):
|
||||||
raise Exception(f"Found an unexpected os-arch pairing as the top level directory. Top level directory: {top_level_directory}")
|
raise Exception(
|
||||||
|
f"Found an unexpected os-arch pairing as the top level directory. Top level directory: {top_level_directory}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
arches: Set[str] = set()
|
arches: Set[str] = set()
|
||||||
oses: Set[str] = set()
|
oses: Set[str] = set()
|
||||||
|
|
|
||||||
|
|
@ -50,6 +50,7 @@ DISTROS = ["suse", "debian", "redhat", "ubuntu", "amazon", "amazon2", "amazon202
|
||||||
|
|
||||||
unexpected_lts_release_series = ("8.2",)
|
unexpected_lts_release_series = ("8.2",)
|
||||||
|
|
||||||
|
|
||||||
def get_suffix(version, stable_name: str, unstable_name: str) -> str:
|
def get_suffix(version, stable_name: str, unstable_name: str) -> str:
|
||||||
parts = version.split(".")
|
parts = version.split(".")
|
||||||
|
|
||||||
|
|
@ -59,11 +60,12 @@ def get_suffix(version, stable_name: str, unstable_name: str) -> str:
|
||||||
series = f"{major}.{minor}"
|
series = f"{major}.{minor}"
|
||||||
|
|
||||||
if major >= 5:
|
if major >= 5:
|
||||||
is_stable_version = (minor == 0 or series in unexpected_lts_release_series)
|
is_stable_version = minor == 0 or series in unexpected_lts_release_series
|
||||||
return stable_name if is_stable_version else unstable_name
|
return stable_name if is_stable_version else unstable_name
|
||||||
else:
|
else:
|
||||||
return stable_name if minor % 2 == 0 else unstable_name
|
return stable_name if minor % 2 == 0 else unstable_name
|
||||||
|
|
||||||
|
|
||||||
class Spec(object):
|
class Spec(object):
|
||||||
"""Spec class."""
|
"""Spec class."""
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ def read_sha_file(filename):
|
||||||
content = f.read()
|
content = f.read()
|
||||||
return content.strip().split()[0]
|
return content.strip().split()[0]
|
||||||
|
|
||||||
|
|
||||||
def _fetch_remote_sha256_hash(s3_path: str):
|
def _fetch_remote_sha256_hash(s3_path: str):
|
||||||
downloaded = False
|
downloaded = False
|
||||||
result = None
|
result = None
|
||||||
|
|
@ -41,7 +42,7 @@ def _fetch_remote_sha256_hash(s3_path: str):
|
||||||
|
|
||||||
if downloaded:
|
if downloaded:
|
||||||
result = read_sha_file(tempfile_name)
|
result = read_sha_file(tempfile_name)
|
||||||
|
|
||||||
if tempfile_name and os.path.exists(tempfile_name):
|
if tempfile_name and os.path.exists(tempfile_name):
|
||||||
os.unlink(tempfile_name)
|
os.unlink(tempfile_name)
|
||||||
|
|
||||||
|
|
@ -63,13 +64,14 @@ def _verify_s3_hash(s3_path: str, local_path: str, expected_hash: str) -> None:
|
||||||
f"Hash mismatch for {s3_path}, expected {expected_hash} but got {hash_string}"
|
f"Hash mismatch for {s3_path}, expected {expected_hash} but got {hash_string}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_file(s3_path, output_path, remote_sha_allowed):
|
def validate_file(s3_path, output_path, remote_sha_allowed):
|
||||||
hexdigest = S3_SHA256_HASHES.get(s3_path)
|
hexdigest = S3_SHA256_HASHES.get(s3_path)
|
||||||
if hexdigest:
|
if hexdigest:
|
||||||
print(f"Validating against hard coded sha256: {hexdigest}")
|
print(f"Validating against hard coded sha256: {hexdigest}")
|
||||||
_verify_s3_hash(s3_path, output_path, hexdigest)
|
_verify_s3_hash(s3_path, output_path, hexdigest)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not remote_sha_allowed:
|
if not remote_sha_allowed:
|
||||||
raise ValueError(f"No SHA256 hash available for {s3_path}")
|
raise ValueError(f"No SHA256 hash available for {s3_path}")
|
||||||
|
|
||||||
|
|
@ -82,13 +84,13 @@ def validate_file(s3_path, output_path, remote_sha_allowed):
|
||||||
print(f"Validating against remote sha256 {hexdigest}\n({s3_path}.sha256)")
|
print(f"Validating against remote sha256 {hexdigest}\n({s3_path}.sha256)")
|
||||||
else:
|
else:
|
||||||
print(f"Failed to download remote sha256 at {s3_path}.sha256)")
|
print(f"Failed to download remote sha256 at {s3_path}.sha256)")
|
||||||
|
|
||||||
if hexdigest:
|
if hexdigest:
|
||||||
_verify_s3_hash(s3_path, output_path, hexdigest)
|
_verify_s3_hash(s3_path, output_path, hexdigest)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"No SHA256 hash available for {s3_path}")
|
raise ValueError(f"No SHA256 hash available for {s3_path}")
|
||||||
|
|
||||||
|
|
||||||
def _download_and_verify(s3_path, output_path, remote_sha_allowed):
|
def _download_and_verify(s3_path, output_path, remote_sha_allowed):
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
|
|
@ -98,7 +100,7 @@ def _download_and_verify(s3_path, output_path, remote_sha_allowed):
|
||||||
download_from_s3_with_boto(s3_path, output_path)
|
download_from_s3_with_boto(s3_path, output_path)
|
||||||
except Exception:
|
except Exception:
|
||||||
download_from_s3_with_requests(s3_path, output_path)
|
download_from_s3_with_requests(s3_path, output_path)
|
||||||
|
|
||||||
validate_file(s3_path, output_path, remote_sha_allowed)
|
validate_file(s3_path, output_path, remote_sha_allowed)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
@ -155,8 +157,6 @@ def download_s3_binary(
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Download and verify S3 binary.")
|
parser = argparse.ArgumentParser(description="Download and verify S3 binary.")
|
||||||
parser.add_argument("s3_path", help="S3 URL to download from")
|
parser.add_argument("s3_path", help="S3 URL to download from")
|
||||||
parser.add_argument("local_path", nargs="?", help="Optional output file path")
|
parser.add_argument("local_path", nargs="?", help="Optional output file path")
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ def compute_sha256(file_path: str) -> str:
|
||||||
sha256.update(block)
|
sha256.update(block)
|
||||||
return sha256.hexdigest()
|
return sha256.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def write_sha256_file(file_path: str, hash_value: str):
|
def write_sha256_file(file_path: str, hash_value: str):
|
||||||
sha256_path = file_path + ".sha256"
|
sha256_path = file_path + ".sha256"
|
||||||
file_name = os.path.basename(file_path)
|
file_name = os.path.basename(file_path)
|
||||||
|
|
@ -19,6 +20,7 @@ def write_sha256_file(file_path: str, hash_value: str):
|
||||||
f.write(f"{hash_value} {file_name}\n")
|
f.write(f"{hash_value} {file_name}\n")
|
||||||
print(f"Wrote SHA-256 to {sha256_path}")
|
print(f"Wrote SHA-256 to {sha256_path}")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
if len(sys.argv) != 2:
|
if len(sys.argv) != 2:
|
||||||
print("Usage: sha256sum.py <file>")
|
print("Usage: sha256sum.py <file>")
|
||||||
|
|
@ -32,5 +34,6 @@ def main():
|
||||||
hash_value = compute_sha256(file_path)
|
hash_value = compute_sha256(file_path)
|
||||||
write_sha256_file(file_path, hash_value)
|
write_sha256_file(file_path, hash_value)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
||||||
|
|
@ -60,21 +60,9 @@ class TestPackager(TestCase):
|
||||||
want: str
|
want: str
|
||||||
|
|
||||||
cases = [
|
cases = [
|
||||||
Case(
|
Case(name="Old unstable", version="4.3.0", want="-org-unstable"),
|
||||||
name="Old unstable",
|
Case(name="Old stable 4.2", version="4.2.0", want="-org"),
|
||||||
version="4.3.0",
|
Case(name="Old stable 4.4", version="4.4.0", want="-org"),
|
||||||
want="-org-unstable"
|
|
||||||
),
|
|
||||||
Case(
|
|
||||||
name="Old stable 4.2",
|
|
||||||
version="4.2.0",
|
|
||||||
want="-org"
|
|
||||||
),
|
|
||||||
Case(
|
|
||||||
name="Old stable 4.4",
|
|
||||||
version="4.4.0",
|
|
||||||
want="-org"
|
|
||||||
),
|
|
||||||
Case(
|
Case(
|
||||||
name="New stable standard",
|
name="New stable standard",
|
||||||
version="8.0.0",
|
version="8.0.0",
|
||||||
|
|
|
||||||
|
|
@ -13,8 +13,8 @@ def url_exists(url, timeout=5):
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Download and verify S3 binary.")
|
parser = argparse.ArgumentParser(description="Download and verify S3 binary.")
|
||||||
parser.add_argument("s3_path", help="S3 URL to download from")
|
parser.add_argument("s3_path", help="S3 URL to download from")
|
||||||
parser.add_argument("local_path", nargs="?", help="Optional output file path")
|
parser.add_argument("local_path", nargs="?", help="Optional output file path")
|
||||||
|
|
@ -23,4 +23,4 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
if url_exists(args.s3_path):
|
if url_exists(args.s3_path):
|
||||||
if not download_s3_binary(args.s3_path, args.local_path, True):
|
if not download_s3_binary(args.s3_path, args.local_path, True):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue