SERVER-106408: Begin using configuration dict instead of namespace (#38308)

GitOrigin-RevId: a018c70c02ed1249f823f199a8556771e34c0fe2
This commit is contained in:
Jeff Zambory 2025-07-10 12:04:10 -04:00 committed by MongoDB Bot
parent d986880be8
commit 84be72a158
16 changed files with 220 additions and 172 deletions

View File

@ -194,14 +194,12 @@ def main():
arg_parser.add_argument("api_version", metavar="API_VERSION", help="API Version to check") arg_parser.add_argument("api_version", metavar="API_VERSION", help="API Version to check")
args = arg_parser.parse_args() args = arg_parser.parse_args()
class FakeArgs: fake_args = {
"""Fake argparse.Namespace-like class to pass arguments to _update_config_vars.""" "INSTALL_DIR": args.install_dir,
"command": "",
}
def __init__(self): configure_resmoke._update_config_vars(arg_parser, fake_args)
self.INSTALL_DIR = args.install_dir
self.command = ""
configure_resmoke._update_config_vars(arg_parser, FakeArgs())
configure_resmoke._set_logging_config() configure_resmoke._set_logging_config()
# Configure Fixture logging. # Configure Fixture logging.

View File

@ -37,9 +37,12 @@ BASE_16_TO_INT = 16
COLLECTOR_ENDPOINT = "otel-collector.prod.corp.mongodb.com:443" COLLECTOR_ENDPOINT = "otel-collector.prod.corp.mongodb.com:443"
BAZEL_GENERATED_OFF_FEATURE_FLAGS = "bazel/resmoke/off_feature_flags.txt" BAZEL_GENERATED_OFF_FEATURE_FLAGS = "bazel/resmoke/off_feature_flags.txt"
BAZEL_GENERATED_UNRELEASED_IFR_FEATURE_FLAGS = "bazel/resmoke/unreleased_ifr_feature_flags.txt" BAZEL_GENERATED_UNRELEASED_IFR_FEATURE_FLAGS = "bazel/resmoke/unreleased_ifr_feature_flags.txt"
EVERGREEN_EXPANSIONS_FILE = "../expansions.yml"
def validate_and_update_config(parser, args, should_configure_otel=True): def validate_and_update_config(
parser: argparse.ArgumentParser, args: dict, should_configure_otel: bool = True
):
"""Validate inputs and update config module.""" """Validate inputs and update config module."""
_validate_options(parser, args) _validate_options(parser, args)
@ -49,39 +52,44 @@ def validate_and_update_config(parser, args, should_configure_otel=True):
_set_logging_config() _set_logging_config()
def _validate_options(parser, args): def process_feature_flag_file(path: str) -> list[str]:
with open(path) as fd:
return fd.read().split()
def _validate_options(parser: argparse.ArgumentParser, args: dict):
"""Do preliminary validation on the options and error on any invalid options.""" """Do preliminary validation on the options and error on any invalid options."""
if "shell_port" not in args or "shell_conn_string" not in args: if "shell_port" not in args or "shell_conn_string" not in args:
return return
if args.shell_port is not None and args.shell_conn_string is not None: if args["shell_port"] is not None and args["shell_conn_string"] is not None:
parser.error("Cannot specify both `shellPort` and `shellConnString`") parser.error("Cannot specify both `shellPort` and `shellConnString`")
if args.executor_file: if args["executor_file"]:
parser.error( parser.error(
"--executor is superseded by --suites; specify --suites={} {} to run the" "--executor is superseded by --suites; specify --suites={} {} to run the"
" test(s) under those suite configuration(s)".format( " test(s) under those suite configuration(s)".format(
args.executor_file, " ".join(args.test_files) args["executor_file"], " ".join(args["test_files"])
) )
) )
# The "test_files" positional argument logically overlaps with `--replayFile`. Disallow using both. # The "test_files" positional argument logically overlaps with `--replayFile`. Disallow using both.
if args.test_files and args.replay_file: if args["test_files"] and args["replay_file"]:
parser.error( parser.error(
"Cannot use --replayFile with additional test files listed on the command line invocation." "Cannot use --replayFile with additional test files listed on the command line invocation."
) )
for f in args.test_files or []: for f in args["test_files"] or []:
# args.test_files can be a "replay" command or a list of tests files, if it's neither raise an error. # args.test_files can be a "replay" command or a list of tests files, if it's neither raise an error.
if not f.startswith("@") and not Path(f).exists(): if not f.startswith("@") and not Path(f).exists():
parser.error(f"Test file {f} does not exist.") parser.error(f"Test file {f} does not exist.")
if args.shell_seed and (not args.test_files or len(args.test_files) != 1): if args["shell_seed"] and (not args["test_files"] or len(args["test_files"]) != 1):
parser.error("The --shellSeed argument must be used with only one test.") parser.error("The --shellSeed argument must be used with only one test.")
if args.additional_feature_flags_file and not os.path.isfile( if args["additional_feature_flags_file"] and not os.path.isfile(
args.additional_feature_flags_file args["additional_feature_flags_file"]
): ):
parser.error("The specified additional feature flags file does not exist.") parser.error("The specified additional feature flags file does not exist.")
@ -105,13 +113,12 @@ def _validate_options(parser, args):
return errors return errors
config = vars(args) mongod_set_param_errors = get_set_param_errors(args.get("mongod_set_parameters") or [])
mongod_set_param_errors = get_set_param_errors(config.get("mongod_set_parameters") or []) mongos_set_param_errors = get_set_param_errors(args.get("mongos_set_parameters") or [])
mongos_set_param_errors = get_set_param_errors(config.get("mongos_set_parameters") or [])
mongocryptd_set_param_errors = get_set_param_errors( mongocryptd_set_param_errors = get_set_param_errors(
config.get("mongocryptd_set_parameters") or [] args.get("mongocryptd_set_parameters") or []
) )
mongo_set_param_errors = get_set_param_errors(config.get("mongo_set_parameters") or []) mongo_set_param_errors = get_set_param_errors(args.get("mongo_set_parameters") or [])
error_msgs = {} error_msgs = {}
if mongod_set_param_errors: if mongod_set_param_errors:
error_msgs["mongodSetParameters"] = mongod_set_param_errors error_msgs["mongodSetParameters"] = mongod_set_param_errors
@ -124,13 +131,13 @@ def _validate_options(parser, args):
if error_msgs: if error_msgs:
parser.error(str(error_msgs)) parser.error(str(error_msgs))
if (args.shard_count is not None) ^ (args.shard_index is not None): if (args["shard_count"] is not None) ^ (args["shard_index"] is not None):
parser.error("Must specify both or neither of --shardCount and --shardIndex") parser.error("Must specify both or neither of --shardCount and --shardIndex")
if (args.shard_count is not None) and (args.shard_index is not None) and args.jobs: if (args["shard_count"] is not None) and (args["shard_index"] is not None) and args["jobs"]:
parser.error("Cannot specify --shardCount and --shardIndex in combination with --jobs.") parser.error("Cannot specify --shardCount and --shardIndex in combination with --jobs.")
def _validate_config(parser): def _validate_config(parser: argparse.ArgumentParser):
from buildscripts.resmokelib.config_fuzzer_limits import config_fuzzer_params from buildscripts.resmokelib.config_fuzzer_limits import config_fuzzer_params
"""Do validation on the config settings and config fuzzer limits.""" """Do validation on the config settings and config fuzzer limits."""
@ -174,7 +181,7 @@ def _validate_config(parser):
_validate_params_spec(parser, config_fuzzer_params[param_type]) _validate_params_spec(parser, config_fuzzer_params[param_type])
def _validate_params_spec(parser, spec): def _validate_params_spec(parser: argparse.ArgumentParser, spec: dict):
valid_fuzz_at_vals = {"startup", "runtime"} valid_fuzz_at_vals = {"startup", "runtime"}
for key, value in spec.items(): for key, value in spec.items():
if "fuzz_at" not in value: if "fuzz_at" not in value:
@ -291,21 +298,21 @@ def _set_up_tracing(
return success return success
def _update_config_vars(parser, values, should_configure_otel=True): def _update_config_vars(
parser: argparse.ArgumentParser, values: dict, should_configure_otel: bool = True
):
"""Update the variables of the config module.""" """Update the variables of the config module."""
config = _config.DEFAULTS.copy() config = _config.DEFAULTS.copy()
# Override `config` with values from command line arguments. for cmdline_key in values:
cmdline_vars = vars(values)
for cmdline_key in cmdline_vars:
if cmdline_key not in _config.DEFAULTS: if cmdline_key not in _config.DEFAULTS:
# Ignore options that don't map to values in config.py # Ignore options that don't map to values in config.py
continue continue
if cmdline_vars[cmdline_key] is not None: if values[cmdline_key] is not None:
config[cmdline_key] = cmdline_vars[cmdline_key] config[cmdline_key] = values[cmdline_key]
if values.command == "run" and os.path.isfile("resmoke.ini"): if values["command"] == "run" and os.path.isfile("resmoke.ini"):
err = textwrap.dedent("""\ err = textwrap.dedent("""\
Support for resmoke.ini has been removed. You must delete Support for resmoke.ini has been removed. You must delete
resmoke.ini and rerun your build to run resmoke. If only one testable resmoke.ini and rerun your build to run resmoke. If only one testable
@ -326,10 +333,6 @@ be invoked as either:
- buildscripts/resmoke.py --installDir {shlex.quote(user_config["install_dir"])}""") - buildscripts/resmoke.py --installDir {shlex.quote(user_config["install_dir"])}""")
raise RuntimeError(err) raise RuntimeError(err)
def process_feature_flag_file(path):
with open(path) as fd:
return fd.read().split()
def set_up_feature_flags(): def set_up_feature_flags():
# These logging messages start with # becuase the output of this file must produce # These logging messages start with # becuase the output of this file must produce
# valid yaml. This comments out these print statements when the output is parsed. # valid yaml. This comments out these print statements when the output is parsed.
@ -406,7 +409,7 @@ flags in common: {common_set}
_config.DISABLED_FEATURE_FLAGS = [] _config.DISABLED_FEATURE_FLAGS = []
default_disabled_feature_flags = [] default_disabled_feature_flags = []
off_feature_flags = [] off_feature_flags = []
if values.command == "run": if values["command"] == "run":
( (
_config.ENABLED_FEATURE_FLAGS, _config.ENABLED_FEATURE_FLAGS,
_config.DISABLED_FEATURE_FLAGS, _config.DISABLED_FEATURE_FLAGS,
@ -513,7 +516,7 @@ flags in common: {common_set}
_config.RELEASES_FILE = releases_file _config.RELEASES_FILE = releases_file
_config.INSTALL_DIR = config.pop("install_dir") _config.INSTALL_DIR = config.pop("install_dir")
if values.command == "run" and _config.INSTALL_DIR is None: if values["command"] == "run" and _config.INSTALL_DIR is None:
bazel_bin_path = os.path.abspath("bazel-bin/install/bin") bazel_bin_path = os.path.abspath("bazel-bin/install/bin")
if os.path.exists(bazel_bin_path): if os.path.exists(bazel_bin_path):
_config.INSTALL_DIR = bazel_bin_path _config.INSTALL_DIR = bazel_bin_path
@ -744,11 +747,11 @@ flags in common: {common_set}
"evergreen.revision": _config.EVERGREEN_REVISION, "evergreen.revision": _config.EVERGREEN_REVISION,
"evergreen.patch_build": _config.EVERGREEN_PATCH_BUILD, "evergreen.patch_build": _config.EVERGREEN_PATCH_BUILD,
"resmoke.cmd.verbatim": " ".join(sys.argv), "resmoke.cmd.verbatim": " ".join(sys.argv),
"resmoke.cmd": values.command, "resmoke.cmd": values["command"],
"machine.os": sys.platform, "machine.os": sys.platform,
} }
for arg, value in vars(values).items(): for arg, value in values.items():
if arg != "command" and value is not None: if arg != "command" and value is not None:
extra_context[f"resmoke.cmd.params.{arg}"] = value extra_context[f"resmoke.cmd.params.{arg}"] = value
@ -979,24 +982,26 @@ def add_otel_args(parser: argparse.ArgumentParser):
) )
def detect_evergreen_config( def detect_evergreen_config(parsed_args: dict):
parsed_args: argparse.Namespace, expansions_file: str = "../expansions.yml" if not os.path.exists(EVERGREEN_EXPANSIONS_FILE):
):
if not os.path.exists(expansions_file):
return return
expansions = read_config_file(expansions_file) expansions = read_config_file(EVERGREEN_EXPANSIONS_FILE)
parsed_args.build_id = expansions.get("build_id", None) parsed_args.update(
parsed_args.distro_id = expansions.get("distro_id", None) {
parsed_args.execution_number = expansions.get("execution", None) "build_id": expansions.get("build_id", None),
parsed_args.project_name = expansions.get("project", None) "distro_id": expansions.get("distro_id", None),
parsed_args.git_revision = expansions.get("revision", None) "execution_number": expansions.get("execution", None),
parsed_args.revision_order_id = expansions.get("revision_order_id", None) "project_name": expansions.get("project", None),
parsed_args.task_id = expansions.get("task_id", None) "git_revision": expansions.get("revision", None),
parsed_args.task_name = expansions.get("task_name", None) "revision_order_id": expansions.get("revision_order_id", None),
parsed_args.variant_name = expansions.get("build_variant", None) "task_id": expansions.get("task_id", None),
parsed_args.version_id = expansions.get("version_id", None) "task_name": expansions.get("task_name", None),
parsed_args.work_dir = expansions.get("workdir", None) "variant_name": expansions.get("build_variant", None),
parsed_args.evg_project_config_path = expansions.get("evergreen_config_file_path", None) "version_id": expansions.get("version_id", None),
parsed_args.requester = expansions.get("requester", None) "work_dir": expansions.get("workdir", None),
"evg_project_config_path": expansions.get("evergreen_config_file_path", None),
"requester": expansions.get("requester", None),
}
)

View File

@ -1,5 +1,6 @@
"""Subcommands for test discovery.""" """Subcommands for test discovery."""
import argparse
from typing import List, Optional from typing import List, Optional
import yaml import yaml
@ -123,7 +124,12 @@ class DiscoveryPlugin(PluginInterface):
parser.add_argument("--suite", metavar="SUITE", help="Suite to run against.") parser.add_argument("--suite", metavar="SUITE", help="Suite to run against.")
def parse( def parse(
self, subcommand, parser, parsed_args, should_configure_otel=True, **kwargs self,
subcommand: str,
parser: argparse.ArgumentParser,
parsed_args: dict,
should_configure_otel: bool = True,
**kwargs,
) -> Optional[Subcommand]: ) -> Optional[Subcommand]:
""" """
Resolve command-line options to a Subcommand or None. Resolve command-line options to a Subcommand or None.
@ -136,8 +142,8 @@ class DiscoveryPlugin(PluginInterface):
""" """
if subcommand == TEST_DISCOVERY_SUBCOMMAND: if subcommand == TEST_DISCOVERY_SUBCOMMAND:
configure_resmoke.validate_and_update_config(parser, parsed_args, should_configure_otel) configure_resmoke.validate_and_update_config(parser, parsed_args, should_configure_otel)
return TestDiscoverySubcommand(parsed_args.suite) return TestDiscoverySubcommand(parsed_args["suite"])
if subcommand == SUITECONFIG_SUBCOMMAND: if subcommand == SUITECONFIG_SUBCOMMAND:
configure_resmoke.validate_and_update_config(parser, parsed_args, should_configure_otel) configure_resmoke.validate_and_update_config(parser, parsed_args, should_configure_otel)
return SuiteConfigSubcommand(parsed_args.suite) return SuiteConfigSubcommand(parsed_args["suite"])
return None return None

View File

@ -1,5 +1,6 @@
"""Generate mongod.conf and mongos.conf using config fuzzer.""" """Generate mongod.conf and mongos.conf using config fuzzer."""
import argparse
import os.path import os.path
import shutil import shutil
@ -157,7 +158,14 @@ class GenerateFuzzConfigPlugin(PluginInterface):
help="Disables the fuzzing that sometimes enables the encrypted storage engine.", help="Disables the fuzzing that sometimes enables the encrypted storage engine.",
) )
def parse(self, subcommand, parser, parsed_args, should_configure_otel=True, **kwargs): def parse(
self,
subcommand: str,
parser: argparse.ArgumentParser,
parsed_args: dict,
should_configure_otel: bool = True,
**kwargs,
):
""" """
Return the GenerateFuzzConfig subcommand for execution. Return the GenerateFuzzConfig subcommand for execution.
@ -171,11 +179,11 @@ class GenerateFuzzConfigPlugin(PluginInterface):
if subcommand != _COMMAND: if subcommand != _COMMAND:
return None return None
config.DISABLE_ENCRYPTION_FUZZING = parsed_args.disable_encryption_fuzzing config.DISABLE_ENCRYPTION_FUZZING = parsed_args["disable_encryption_fuzzing"]
return GenerateFuzzConfig( return GenerateFuzzConfig(
parsed_args.template, parsed_args["template"],
parsed_args.output, parsed_args["output"],
parsed_args.fuzz_mongod_configs, parsed_args["fuzz_mongod_configs"],
parsed_args.fuzz_mongos_configs, parsed_args["fuzz_mongos_configs"],
parsed_args.config_fuzz_seed, parsed_args["config_fuzz_seed"],
) )

View File

@ -18,20 +18,20 @@ TRACER = trace.get_tracer("resmoke")
class CoreAnalyzer(Subcommand): class CoreAnalyzer(Subcommand):
def __init__(self, options: argparse.Namespace, logger: logging.Logger = None): def __init__(self, options: dict, logger: logging.Logger = None):
self.options = options self.options = options
self.task_id = options.failed_task_id self.task_id = options["failed_task_id"]
self.execution = options.execution self.execution = options["execution"]
self.gdb_index_cache = options.gdb_index_cache self.gdb_index_cache = options["gdb_index_cache"]
self.root_logger = self.setup_logging(logger) self.root_logger = self.setup_logging(logger)
self.extra_otel_options = {} self.extra_otel_options = {}
for option in options.otel_extra_data: for option in options["otel_extra_data"]:
key, val = option.split("=") key, val = option.split("=")
self.extra_otel_options[key] = val self.extra_otel_options[key] = val
@TRACER.start_as_current_span("core_analyzer.execute") @TRACER.start_as_current_span("core_analyzer.execute")
def execute(self): def execute(self):
base_dir = self.options.working_dir base_dir = self.options["working_dir"]
current_span = get_default_current_span( current_span = get_default_current_span(
{"failed_task_id": self.task_id} | self.extra_otel_options {"failed_task_id": self.task_id} | self.extra_otel_options
) )
@ -74,18 +74,18 @@ class CoreAnalyzer(Subcommand):
core_dump_dir = os.path.join(base_dir, "core-dumps") core_dump_dir = os.path.join(base_dir, "core-dumps")
install_dir = os.path.join(base_dir, "install") install_dir = os.path.join(base_dir, "install")
else: # if a task id was not specified, look for input files on the current machine else: # if a task id was not specified, look for input files on the current machine
install_dir = self.options.install_dir or os.path.join( install_dir = self.options["install_dir"] or os.path.join(
os.path.curdir, "build", "install" os.path.curdir, "build", "install"
) )
core_dump_dir = self.options.core_dir or os.path.curdir core_dump_dir = self.options["core_dir"] or os.path.curdir
multiversion_dir = self.options.multiversion_dir or os.path.curdir multiversion_dir = self.options["multiversion_dir"] or os.path.curdir
analysis_dir = os.path.join(base_dir, "analysis") analysis_dir = os.path.join(base_dir, "analysis")
report = dumpers.dbg.analyze_cores( report = dumpers.dbg.analyze_cores(
core_dump_dir, install_dir, analysis_dir, multiversion_dir, self.gdb_index_cache core_dump_dir, install_dir, analysis_dir, multiversion_dir, self.gdb_index_cache
) )
if self.options.generate_report: if self.options["generate_report"]:
with open("report.json", "w") as file: with open("report.json", "w") as file:
json.dump(report, file) json.dump(report, file)
@ -107,7 +107,7 @@ class CoreAnalyzerPlugin(PluginInterface):
self, self,
subcommand: str, subcommand: str,
parser: argparse.ArgumentParser, parser: argparse.ArgumentParser,
parsed_args: argparse.Namespace, parsed_args: dict,
should_configure_otel=True, should_configure_otel=True,
**kwargs, **kwargs,
) -> Optional[Subcommand]: ) -> Optional[Subcommand]:

View File

@ -10,6 +10,7 @@ A prototype hang analyzer for Evergreen integration to help investigate test tim
Supports Linux, MacOS X, and Windows. Supports Linux, MacOS X, and Windows.
""" """
import argparse
import getpass import getpass
import logging import logging
import os import os
@ -33,7 +34,7 @@ from buildscripts.resmokelib.symbolizer import Symbolizer
class HangAnalyzer(Subcommand): class HangAnalyzer(Subcommand):
"""Main class for the hang analyzer subcommand.""" """Main class for the hang analyzer subcommand."""
def __init__(self, options, task_id=None, logger=None, **_kwargs): def __init__(self, options: dict, task_id=None, logger=None, **_kwargs):
""" """
Configure processe lists based on options. Configure processe lists based on options.
@ -74,7 +75,7 @@ class HangAnalyzer(Subcommand):
processes = process_list.get_processes( processes = process_list.get_processes(
self.process_ids, self.process_ids,
self.interesting_processes, self.interesting_processes,
self.options.process_match, self.options["process_match"],
self.root_logger, self.root_logger,
) )
process.teardown_processes(self.root_logger, processes, dump_pids={}) process.teardown_processes(self.root_logger, processes, dump_pids={})
@ -89,12 +90,12 @@ class HangAnalyzer(Subcommand):
self._log_system_info() self._log_system_info()
dumpers = dumper.get_dumpers(self.root_logger, self.options.debugger_output) dumpers = dumper.get_dumpers(self.root_logger, self.options["debugger_output"])
processes = process_list.get_processes( processes = process_list.get_processes(
self.process_ids, self.process_ids,
self.interesting_processes, self.interesting_processes,
self.options.process_match, self.options["process_match"],
self.root_logger, self.root_logger,
) )
@ -120,7 +121,7 @@ class HangAnalyzer(Subcommand):
dump_pids = {} dump_pids = {}
# Dump core files of all processes, except python & java. # Dump core files of all processes, except python & java.
if self.options.dump_core: if self.options["dump_core"]:
take_core_processes = [ take_core_processes = [
pinfo for pinfo in processes if not re.match("^(java|python)", pinfo.name) pinfo for pinfo in processes if not re.match("^(java|python)", pinfo.name)
] ]
@ -210,7 +211,7 @@ class HangAnalyzer(Subcommand):
for pid in pinfo.pidv: for pid in pinfo.pidv:
try: try:
dumpers.jstack.dump_info( dumpers.jstack.dump_info(
self.root_logger, self.options.debugger_output, pinfo.name, pid self.root_logger, self.options["debugger_output"], pinfo.name, pid
) )
except Exception as err: except Exception as err:
self.root_logger.info("Error encountered when invoking debugger %s", err) self.root_logger.info("Error encountered when invoking debugger %s", err)
@ -230,7 +231,7 @@ class HangAnalyzer(Subcommand):
self.root_logger.info("Done analyzing all processes for hangs") self.root_logger.info("Done analyzing all processes for hangs")
# Kill and abort processes if "-k" was specified. # Kill and abort processes if "-k" was specified.
if self.options.kill_processes: if self.options["kill_processes"]:
process.teardown_processes(self.root_logger, processes, dump_pids) process.teardown_processes(self.root_logger, processes, dump_pids)
else: else:
# Resuming all suspended processes. # Resuming all suspended processes.
@ -246,19 +247,19 @@ class HangAnalyzer(Subcommand):
) )
def _configure_processes(self): def _configure_processes(self):
if self.options.debugger_output is None: if self.options["debugger_output"] is None:
self.options.debugger_output = ["stdout"] self.options["debugger_output"] = ["stdout"]
# add != "" check to avoid empty process_ids # add != "" check to avoid empty process_ids
if self.options.process_ids is not None and self.options.process_ids != "": if self.options["process_ids"] is not None and self.options["process_ids"] != "":
# self.process_ids is an int list of PIDs # self.process_ids is an int list of PIDs
self.process_ids = [int(pid) for pid in self.options.process_ids.split(",")] self.process_ids = [int(pid) for pid in self.options["process_ids"].split(",")]
if self.options.process_names is not None: if self.options["process_names"] is not None:
self.interesting_processes = self.options.process_names.split(",") self.interesting_processes = self.options["process_names"].split(",")
if self.options.go_process_names is not None: if self.options["go_process_names"] is not None:
self.go_processes = self.options.go_process_names.split(",") self.go_processes = self.options["go_process_names"].split(",")
self.interesting_processes += self.go_processes self.interesting_processes += self.go_processes
def _setup_logging(self, logger): def _setup_logging(self, logger):
@ -301,16 +302,23 @@ class HangAnalyzer(Subcommand):
def _check_enough_free_space(self): def _check_enough_free_space(self):
usage_percent = psutil.disk_usage(".").percent usage_percent = psutil.disk_usage(".").percent
self.root_logger.info("Current disk usage percent: %s", usage_percent) self.root_logger.info("Current disk usage percent: %s", usage_percent)
return usage_percent < self.options.max_disk_usage_percent return usage_percent < self.options["max_disk_usage_percent"]
class HangAnalyzerPlugin(PluginInterface): class HangAnalyzerPlugin(PluginInterface):
"""Integration-point for hang-analyzer.""" """Integration-point for hang-analyzer."""
def parse(self, subcommand, parser, parsed_args, should_configure_otel=True, **kwargs): def parse(
self,
subcommand: str,
parser: argparse.ArgumentParser,
parsed_args: dict,
should_configure_otel: bool = True,
**kwargs,
):
"""Parse command-line options.""" """Parse command-line options."""
if subcommand == "hang-analyzer": if subcommand == "hang-analyzer":
return HangAnalyzer(parsed_args, task_id=parsed_args.task_id, **kwargs) return HangAnalyzer(parsed_args, task_id=parsed_args["task_id"], **kwargs)
return None return None
def add_subcommand(self, subparsers): def add_subcommand(self, subparsers):

View File

@ -41,8 +41,8 @@ class MultiversionConfig(BaseModel):
class MultiversionConfigSubcommand(Subcommand): class MultiversionConfigSubcommand(Subcommand):
"""Subcommand for discovering multiversion configuration.""" """Subcommand for discovering multiversion configuration."""
def __init__(self, options: argparse.Namespace) -> None: def __init__(self, options: dict) -> None:
self.config_file_output = options.config_file_output self.config_file_output = options["config_file_output"]
def execute(self): def execute(self):
"""Execute the subcommand.""" """Execute the subcommand."""
@ -100,7 +100,7 @@ class MultiversionPlugin(PluginInterface):
self, self,
subcommand: str, subcommand: str,
parser: argparse.ArgumentParser, parser: argparse.ArgumentParser,
parsed_args: argparse.Namespace, parsed_args: dict,
should_configure_otel=True, should_configure_otel=True,
**kwargs, **kwargs,
) -> Optional[Subcommand]: ) -> Optional[Subcommand]:

View File

@ -49,20 +49,20 @@ def get_parser(usage=None):
return parser return parser
def parse(sys_args, usage=None): def parse(sys_args, usage=None) -> tuple[argparse.ArgumentParser, dict]:
"""Parse the CLI args.""" """Parse the CLI args."""
parser = get_parser(usage=usage) parser = get_parser(usage=usage)
parsed_args = parser.parse_args(sys_args) parsed_args = parser.parse_args(sys_args)
return parser, parsed_args return parser, vars(parsed_args)
def parse_command_line(sys_args, usage=None, should_configure_otel=True, **kwargs): def parse_command_line(sys_args, usage=None, should_configure_otel=True, **kwargs):
"""Parse the command line arguments passed to resmoke.py and return the subcommand object to execute.""" """Parse the command line arguments passed to resmoke.py and return the subcommand object to execute."""
parser, parsed_args = parse(sys_args, usage) parser, parsed_args = parse(sys_args, usage)
subcommand = parsed_args.command subcommand = parsed_args["command"]
for plugin in _PLUGINS: for plugin in _PLUGINS:
subcommand_obj = plugin.parse( subcommand_obj = plugin.parse(

View File

@ -1,6 +1,7 @@
"""Interface for creating a resmoke plugin.""" """Interface for creating a resmoke plugin."""
import abc import abc
import argparse
class Subcommand(object): class Subcommand(object):
@ -22,7 +23,14 @@ class PluginInterface(abc.ABC):
""" """
raise NotImplementedError() raise NotImplementedError()
def parse(self, subcommand, parser, parsed_args, should_configure_otel=True, **kwargs): def parse(
self,
subcommand: str,
parser: argparse.ArgumentParser,
parsed_args: dict,
should_configure_otel: bool = True,
**kwargs,
):
""" """
Resolve command-line options to a Subcommand or None. Resolve command-line options to a Subcommand or None.

View File

@ -35,7 +35,7 @@ class Powercycle(Subcommand):
SAVE_DIAG = 3 SAVE_DIAG = 3
REMOTE_HANG_ANALYZER = 4 REMOTE_HANG_ANALYZER = 4
def __init__(self, parser_actions, options): def __init__(self, parser_actions, options: dict):
"""Initialize.""" """Initialize."""
self.parser_actions = parser_actions self.parser_actions = parser_actions
self.options = options self.options = options
@ -47,7 +47,7 @@ class Powercycle(Subcommand):
self.HOST_SETUP: self._exec_powercycle_host_setup, self.HOST_SETUP: self._exec_powercycle_host_setup,
self.SAVE_DIAG: self._exec_powercycle_save_diagnostics, self.SAVE_DIAG: self._exec_powercycle_save_diagnostics,
self.REMOTE_HANG_ANALYZER: self._exec_powercycle_hang_analyzer, self.REMOTE_HANG_ANALYZER: self._exec_powercycle_hang_analyzer,
}[self.options.run_option]() }[self.options["run_option"]]()
def _exec_powercycle_main(self): def _exec_powercycle_main(self):
powercycle.main(self.parser_actions, self.options) powercycle.main(self.parser_actions, self.options)

View File

@ -777,7 +777,7 @@ class LocalToRemoteOperations(object):
return self.remote_op.access_info() return self.remote_op.access_info()
def remote_handler(options, task_config, root_dir): def remote_handler(options: dict, task_config, root_dir):
"""Remote operations handler executes all remote operations on the remote host. """Remote operations handler executes all remote operations on the remote host.
These operations are invoked on the remote host's copy of this script. These operations are invoked on the remote host's copy of this script.
@ -786,13 +786,13 @@ def remote_handler(options, task_config, root_dir):
# Set 'root_dir' to absolute path. # Set 'root_dir' to absolute path.
root_dir = abs_path(root_dir) root_dir = abs_path(root_dir)
if not options.remote_operations: if not options["remote_operations"]:
raise ValueError("No remote operation specified.") raise ValueError("No remote operation specified.")
print_uptime() print_uptime()
LOGGER.info("Operations to perform %s", options.remote_operations) LOGGER.info("Operations to perform %s", options["remote_operations"])
host = options.host if options.host else "localhost" host = options["host"] if options["host"] else "localhost"
host_port = "{}:{}".format(host, options.port) host_port = "{}:{}".format(host, options["port"])
mongod_options = task_config.mongod_options mongod_options = task_config.mongod_options
if task_config.repl_set: if task_config.repl_set:
@ -807,14 +807,16 @@ def remote_handler(options, task_config, root_dir):
bin_dir=bin_dir, bin_dir=bin_dir,
db_path=db_path, db_path=db_path,
log_path=log_path, log_path=log_path,
port=options.port, port=options["port"],
options=mongod_options, options=mongod_options,
) )
mongo_client_opts = get_mongo_client_args(host=host, port=options.port, task_config=task_config) mongo_client_opts = get_mongo_client_args(
host=host, port=options["port"], task_config=task_config
)
# Perform the sequence of operations specified. If any operation fails then return immediately. # Perform the sequence of operations specified. If any operation fails then return immediately.
for operation in options.remote_operations: for operation in options["remote_operations"]:
ret = 0 ret = 0
def noop(): def noop():
@ -851,7 +853,7 @@ def remote_handler(options, task_config, root_dir):
return ret return ret
def install_mongod(): def install_mongod():
ret, output = mongod.install(root_dir, options.tarball_url) ret, output = mongod.install(root_dir, options["tarball_url"])
LOGGER.info(output) LOGGER.info(output)
# Create mongod's dbpath, if it does not exist. # Create mongod's dbpath, if it does not exist.
@ -882,9 +884,11 @@ def remote_handler(options, task_config, root_dir):
ret, output = mongod.start() ret, output = mongod.start()
LOGGER.info(output) LOGGER.info(output)
if ret: if ret:
LOGGER.error("Failed to start mongod on port %d: %s", options.port, output) LOGGER.error("Failed to start mongod on port %d: %s", options["port"], output)
return ret return ret
LOGGER.info("Started mongod running on port %d pid %s", options.port, mongod.get_pids()) LOGGER.info(
"Started mongod running on port %d pid %s", options["port"], mongod.get_pids()
)
mongo = pymongo.MongoClient(**mongo_client_opts) mongo = pymongo.MongoClient(**mongo_client_opts)
# Limit retries to a reasonable value # Limit retries to a reasonable value
for _ in range(100): for _ in range(100):
@ -915,7 +919,7 @@ def remote_handler(options, task_config, root_dir):
return wait_for_mongod_shutdown(mongod) return wait_for_mongod_shutdown(mongod)
def rsync_data(): def rsync_data():
rsync_dir, new_rsync_dir = options.rsync_dest rsync_dir, new_rsync_dir = options["rsync_dest"]
ret, output = rsync( ret, output = rsync(
powercycle_constants.DB_PATH, rsync_dir, powercycle_constants.RSYNC_EXCLUDE_FILES powercycle_constants.DB_PATH, rsync_dir, powercycle_constants.RSYNC_EXCLUDE_FILES
) )
@ -1365,7 +1369,7 @@ def get_remote_python():
return remote_python return remote_python
def main(parser_actions, options): def main(parser_actions, options: dict):
"""Execute Main program.""" """Execute Main program."""
global REPORT_JSON global REPORT_JSON
@ -1380,15 +1384,15 @@ def main(parser_actions, options):
logging.basicConfig( logging.basicConfig(
format="%(asctime)s %(levelname)s %(message)s", format="%(asctime)s %(levelname)s %(message)s",
level=logging.ERROR, level=logging.ERROR,
filename=options.log_file, filename=options["log_file"],
) )
logging.getLogger(__name__).setLevel(options.log_level.upper()) logging.getLogger(__name__).setLevel(options["log_file"].upper())
logging.Formatter.converter = time.gmtime logging.Formatter.converter = time.gmtime
LOGGER.info("powercycle invocation: %s", " ".join(sys.argv)) LOGGER.info("powercycle invocation: %s", " ".join(sys.argv))
task_name = re.sub(r"(_[0-9]+)(_[\w-]+)?$", "", options.task_name) task_name = re.sub(r"(_[0-9]+)(_[\w-]+)?$", "", options["task_name"])
task_config = powercycle_config.get_task_config(task_name, options.remote_operation) task_config = powercycle_config.get_task_config(task_name, options["remote_operation"])
LOGGER.info("powercycle task config: %s", task_config) LOGGER.info("powercycle task config: %s", task_config)
@ -1402,7 +1406,7 @@ def main(parser_actions, options):
# Invoke remote_handler if remote_operation is specified. # Invoke remote_handler if remote_operation is specified.
# The remote commands are program args. # The remote commands are program args.
if options.remote_operation: if options["remote_operation"]:
ret = remote_handler(options, task_config, root_dir) ret = remote_handler(options, task_config, root_dir)
# Exit here since the local operations are performed after this. # Exit here since the local operations are performed after this.
local_exit(ret) local_exit(ret)
@ -1491,7 +1495,7 @@ def main(parser_actions, options):
# The remote mongod host comes from the ssh_user_host, # The remote mongod host comes from the ssh_user_host,
# which may be specified as user@host. # which may be specified as user@host.
ssh_user_host = options.ssh_user_host ssh_user_host = options["ssh_user_host"]
_, ssh_host = get_user_host(ssh_user_host) _, ssh_host = get_user_host(ssh_user_host)
mongod_host = ssh_host mongod_host = ssh_host
@ -1499,7 +1503,7 @@ def main(parser_actions, options):
# the first occurrence for each parameter, so we have the default connection options follow the # the first occurrence for each parameter, so we have the default connection options follow the
# user-specified --sshConnection options. # user-specified --sshConnection options.
ssh_connection_options = ( ssh_connection_options = (
f"{options.ssh_connection_options if options.ssh_connection_options else ''}" f"{options['ssh_connection_options'] if options['ssh_connection_options'] else ''}"
f" {powercycle_constants.DEFAULT_SSH_CONNECTION_OPTIONS}" f" {powercycle_constants.DEFAULT_SSH_CONNECTION_OPTIONS}"
) )
# For remote operations requiring sudo, force pseudo-tty allocation, # For remote operations requiring sudo, force pseudo-tty allocation,
@ -1513,15 +1517,14 @@ def main(parser_actions, options):
ssh_connection_options=ssh_connection_options, ssh_connection_options=ssh_connection_options,
ssh_options=ssh_options, ssh_options=ssh_options,
use_shell=True, use_shell=True,
access_retry_count=options.ssh_access_retry_count, access_retry_count=options["ssh_access_retry_count"],
) )
verify_remote_access(local_ops) verify_remote_access(local_ops)
# Pass client_args to the remote script invocation. # Pass client_args to the remote script invocation.
client_args = "powercycle run" client_args = "powercycle run"
options_dict = vars(options)
for action in parser_actions: for action in parser_actions:
option_value = options_dict.get(action.dest, None) option_value = options.get(action.dest, None)
if option_value != action.default: if option_value != action.default:
# The boolean options do not require the option_value. # The boolean options do not require the option_value.
if isinstance(option_value, bool): if isinstance(option_value, bool):
@ -1777,7 +1780,7 @@ def main(parser_actions, options):
ssh_connection_options=ssh_connection_options, ssh_connection_options=ssh_connection_options,
ssh_options=ssh_options, ssh_options=ssh_options,
use_shell=True, use_shell=True,
access_retry_count=options.ssh_access_retry_count, access_retry_count=options["ssh_access_retry_count"],
) )
verify_remote_access(local_ops) verify_remote_access(local_ops)
ret, output = call_remote_operation( ret, output = call_remote_operation(

View File

@ -2357,7 +2357,7 @@ def to_local_args(input_args: Optional[List[str]] = None):
(parser, parsed_args) = main_parser.parse(input_args) (parser, parsed_args) = main_parser.parse(input_args)
if parsed_args.command != "run": if parsed_args["command"] != "run":
raise TypeError( raise TypeError(
f"to_local_args can only be called for the 'run' subcommand. Instead was called on '{parsed_args.command}'" f"to_local_args can only be called for the 'run' subcommand. Instead was called on '{parsed_args.command}'"
) )
@ -2365,9 +2365,9 @@ def to_local_args(input_args: Optional[List[str]] = None):
# If --originSuite was specified, then we replace the value of --suites with it. This is done to # If --originSuite was specified, then we replace the value of --suites with it. This is done to
# avoid needing to have engineers learn about the test suites generated by the # avoid needing to have engineers learn about the test suites generated by the
# evergreen_generate_resmoke_tasks.py script. # evergreen_generate_resmoke_tasks.py script.
origin_suite = getattr(parsed_args, "origin_suite", None) origin_suite = parsed_args.get("origin_suite", None)
if origin_suite is not None: if origin_suite is not None:
setattr(parsed_args, "suite_files", origin_suite) parsed_args["suite_files"] = origin_suite
# The top-level parser has one subparser that contains all subcommand parsers. # The top-level parser has one subparser that contains all subcommand parsers.
command_subparser = [action for action in parser._actions if action.dest == "command"][0] command_subparser = [action for action in parser._actions if action.dest == "command"][0]
@ -2398,7 +2398,7 @@ def to_local_args(input_args: Optional[List[str]] = None):
arg_dests_visited = set() arg_dests_visited = set()
for action in group._group_actions: for action in group._group_actions:
arg_dest = action.dest arg_dest = action.dest
arg_value = getattr(parsed_args, arg_dest, None) arg_value = parsed_args.get(arg_dest, None)
# Some arguments, such as --shuffle and --shuffleMode, update the same dest variable. # Some arguments, such as --shuffle and --shuffleMode, update the same dest variable.
# To not print out multiple arguments that will update the same dest, we will skip once # To not print out multiple arguments that will update the same dest, we will skip once
@ -2410,7 +2410,7 @@ def to_local_args(input_args: Optional[List[str]] = None):
# If the arg doesn't exist in the parsed namespace, skip. # If the arg doesn't exist in the parsed namespace, skip.
# This is mainly for "--help". # This is mainly for "--help".
if not hasattr(parsed_args, arg_dest): if arg_dest not in parsed_args:
continue continue
# Skip any evergreen centric args. # Skip any evergreen centric args.
elif group.title in [ elif group.title in [
@ -2428,7 +2428,7 @@ def to_local_args(input_args: Optional[List[str]] = None):
arg_name = action.option_strings[-1] arg_name = action.option_strings[-1]
# If an option has the same value as the default, we don't need to specify it. # If an option has the same value as the default, we don't need to specify it.
if getattr(parsed_args, arg_dest, None) == action.default: if parsed_args.get(arg_dest, None) == action.default:
continue continue
# These are arguments that take no value. # These are arguments that take no value.
elif action.nargs == 0: elif action.nargs == 0:

View File

@ -514,42 +514,46 @@ class _DownloadOptions(object):
class SetupMultiversionPlugin(PluginInterface): class SetupMultiversionPlugin(PluginInterface):
"""Integration point for setup-multiversion-mongodb.""" """Integration point for setup-multiversion-mongodb."""
def parse(self, subcommand, parser, parsed_args, **kwargs): def parse(
self,
subcommand: str,
parser: argparse.ArgumentParser,
parsed_args: dict,
should_configure_otel: bool = True,
**kwargs,
):
"""Parse command-line options.""" """Parse command-line options."""
if subcommand != SUBCOMMAND: if subcommand != SUBCOMMAND:
return None return None
# Shorthand for brevity.
args = parsed_args
download_options = _DownloadOptions( download_options = _DownloadOptions(
db=args.download_binaries, db=parsed_args["download_binaries"],
ds=args.download_symbols, ds=parsed_args["download_symbols"],
da=args.download_artifacts, da=parsed_args["download_artifacts"],
dv=args.download_python_venv, dv=parsed_args["download_python_venv"],
) )
if args.use_existing_releases_file: if parsed_args["use_existing_releases_file"]:
multiversionsetupconstants.USE_EXISTING_RELEASES_FILE = True multiversionsetupconstants.USE_EXISTING_RELEASES_FILE = True
return SetupMultiversion( return SetupMultiversion(
install_dir=args.install_dir, install_dir=parsed_args["install_dir"],
link_dir=args.link_dir, link_dir=parsed_args["link_dir"],
mv_platform=args.platform, mv_platform=parsed_args["platform"],
edition=args.edition, edition=parsed_args["edition"],
architecture=args.architecture, architecture=parsed_args["architecture"],
use_latest=args.use_latest, use_latest=parsed_args["use_latest"],
versions=args.versions, versions=parsed_args["versions"],
install_last_lts=args.install_last_lts, install_last_lts=parsed_args["install_last_lts"],
variant=args.variant, variant=parsed_args["variant"],
install_last_continuous=args.install_last_continuous, install_last_continuous=parsed_args["install_last_continuous"],
download_options=download_options, download_options=download_options,
evergreen_config=args.evergreen_config, evergreen_config=parsed_args["evergreen_config"],
github_oauth_token=args.github_oauth_token, github_oauth_token=parsed_args["github_oauth_token"],
ignore_failed_push=(not args.require_push), ignore_failed_push=(not parsed_args["require_push"]),
evg_versions_file=args.evg_versions_file, evg_versions_file=parsed_args["evg_versions_file"],
debug=args.debug, debug=parsed_args["debug"],
logger=SetupMultiversion.setup_logger(parsed_args.debug), logger=SetupMultiversion.setup_logger(parsed_args["debug"]),
) )
@classmethod @classmethod

View File

@ -1,5 +1,6 @@
"""Wrapper around mongosym to download everything required.""" """Wrapper around mongosym to download everything required."""
import argparse
import logging import logging
import os import os
import shutil import shutil
@ -290,7 +291,14 @@ class SymbolizerPlugin(PluginInterface):
) )
mongosymb.make_argument_parser(group) mongosymb.make_argument_parser(group)
def parse(self, subcommand, parser, parsed_args, **kwargs): def parse(
self,
subcommand: str,
parser: argparse.ArgumentParser,
parsed_args: dict,
should_configure_otel: bool = True,
**kwargs,
):
""" """
Return Symbolizer if command is one we recognize. Return Symbolizer if command is one we recognize.
@ -304,14 +312,14 @@ class SymbolizerPlugin(PluginInterface):
if subcommand != _COMMAND: if subcommand != _COMMAND:
return None return None
task_id = parsed_args.task_id task_id = parsed_args["task_id"]
binary_name = parsed_args.binary_name binary_name = parsed_args["binary_name"]
download_symbols_only = parsed_args.download_symbols_only download_symbols_only = parsed_args["download_symbols_only"]
return Symbolizer( return Symbolizer(
task_id, task_id,
download_symbols_only=download_symbols_only, download_symbols_only=download_symbols_only,
bin_name=binary_name, bin_name=binary_name,
all_args=parsed_args, all_args=parsed_args,
logger=Symbolizer.setup_logger(parsed_args.debug), logger=Symbolizer.setup_logger(parsed_args["debug"]),
) )

View File

@ -366,7 +366,7 @@ class TestParseArgs(unittest.TestCase):
) )
self.assertEqual( self.assertEqual(
args.test_files, args["test_files"],
[ [
"test_file1.js", "test_file1.js",
"test_file2.js", "test_file2.js",
@ -374,7 +374,7 @@ class TestParseArgs(unittest.TestCase):
], ],
) )
# suites get split up when config.py gets populated # suites get split up when config.py gets populated
self.assertEqual(args.suite_files, "my_suite1,my_suite2") self.assertEqual(args["suite_files"], "my_suite1,my_suite2")
def test_files_in_the_middle(self): def test_files_in_the_middle(self):
_, args = parse( _, args = parse(
@ -389,14 +389,14 @@ class TestParseArgs(unittest.TestCase):
) )
self.assertEqual( self.assertEqual(
args.test_files, args["test_files"],
[ [
"test_file1.js", "test_file1.js",
"test_file2.js", "test_file2.js",
"test_file3.js", "test_file3.js",
], ],
) )
self.assertEqual(args.suite_files, "my_suite1") self.assertEqual(args["suite_files"], "my_suite1")
class TestParseCommandLine(unittest.TestCase): class TestParseCommandLine(unittest.TestCase):

View File

@ -30,7 +30,7 @@ def get_config_value(attrib, cmd_line_options, config_file_data, required=False,
return default return default
def read_config_file(config_file): def read_config_file(config_file) -> dict:
""" """
Read the yaml config file specified. Read the yaml config file specified.