mirror of https://github.com/mongodb/mongo
SERVER-106601 Open yaml files with utf8 for windows (#37659)
GitOrigin-RevId: e37e02b7ced06618cc951630fbcd07455d7c00f0
This commit is contained in:
parent
b7150c9eca
commit
bb84a8c1a7
|
|
@ -63,7 +63,7 @@ def process_owners_file(output_lines: list[str], node: FileNode) -> None:
|
|||
print(f"parsing: {owners_file_path}")
|
||||
output_lines.append(f"# The following patterns are parsed from {owners_file_path}")
|
||||
|
||||
with open(owners_file_path, "r") as file:
|
||||
with open(owners_file_path, "r", encoding="utf8") as file:
|
||||
contents = yaml.safe_load(file)
|
||||
assert "version" in contents, f"Version not found in {owners_file_path}"
|
||||
assert contents["version"] in parsers, f"Unsupported version in {owners_file_path}"
|
||||
|
|
@ -279,7 +279,7 @@ def get_allowed_unowned_files() -> Set[str]:
|
|||
|
||||
unowned_files = set()
|
||||
|
||||
with open(allowed_unowned_file_path, "r") as file:
|
||||
with open(allowed_unowned_file_path, "r", encoding="utf8") as file:
|
||||
contents = yaml.safe_load(file)
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ class OwnersParserV1:
|
|||
if not os.path.exists(parsed_path):
|
||||
raise RuntimeError(f"Could not find alias file {path}")
|
||||
|
||||
with open(parsed_path, "r") as file:
|
||||
with open(parsed_path, "r", encoding="utf8") as file:
|
||||
contents = yaml.safe_load(file)
|
||||
assert "version" in contents, f"Version not found in {path}"
|
||||
assert "aliases" in contents, f"Alias not found in {path}"
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ def get_expansions(expansions_file: str) -> Dict[str, any]:
|
|||
if not os.path.exists(expansions_file):
|
||||
raise RuntimeError(f"Expansions file not found at {expansions_file}")
|
||||
|
||||
with open(expansions_file, "r") as file:
|
||||
with open(expansions_file, "r", encoding="utf8") as file:
|
||||
return yaml.safe_load(file)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ def parse_evergreen_file(path, evergreen_binary="evergreen"):
|
|||
if evergreen_binary:
|
||||
# Call 'evergreen evaluate path' to pre-process the project configuration file.
|
||||
cmd = [evergreen_binary, "evaluate", path]
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
result = subprocess.run(cmd, capture_output=True, encoding="utf8", text=True)
|
||||
if result.returncode:
|
||||
raise RuntimeError(
|
||||
"Unable to evaluate {}.\nSTDOUT:{}\nSTDERR:{}".format(
|
||||
|
|
@ -82,7 +82,7 @@ def parse_evergreen_file(path, evergreen_binary="evergreen"):
|
|||
)
|
||||
config = yaml.safe_load(result.stdout)
|
||||
else:
|
||||
with open(path, "r") as fstream:
|
||||
with open(path, "r", encoding="utf8") as fstream:
|
||||
config = yaml.safe_load(fstream)
|
||||
|
||||
return EvergreenProjectConfig(config)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ except ModuleNotFoundError:
|
|||
|
||||
|
||||
def _load_defaults(defaults_file: str) -> dict:
|
||||
with open(defaults_file) as fh:
|
||||
with open(defaults_file, encoding="utf8") as fh:
|
||||
defaults = yaml.safe_load(fh)
|
||||
if not isinstance(defaults, dict):
|
||||
_error(
|
||||
|
|
@ -67,7 +67,7 @@ def _load_defaults(defaults_file: str) -> dict:
|
|||
|
||||
|
||||
def _load_expansions(expansions_file) -> dict:
|
||||
with open(expansions_file) as fh:
|
||||
with open(expansions_file, encoding="utf8") as fh:
|
||||
expansions = yaml.safe_load(fh)
|
||||
|
||||
if not isinstance(expansions, dict):
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ class TimeoutOverrides(BaseModel):
|
|||
@classmethod
|
||||
def from_yaml_file(cls, file_path: Path) -> "TimeoutOverrides":
|
||||
"""Read the timeout overrides from the given file."""
|
||||
with open(file_path) as file_handler:
|
||||
with open(file_path, encoding="utf8") as file_handler:
|
||||
return cls(**yaml.safe_load(file_handler))
|
||||
|
||||
def _lookup_override(self, build_variant: str, task_name: str) -> Optional[TimeoutOverride]:
|
||||
|
|
@ -173,7 +173,7 @@ def output_timeout(
|
|||
output["timeout_secs"] = math.ceil(idle_timeout.total_seconds())
|
||||
|
||||
if output_file:
|
||||
with open(output_file, "w") as outfile:
|
||||
with open(output_file, "w", encoding="utf8") as outfile:
|
||||
yaml.dump(output, stream=outfile, default_flow_style=False)
|
||||
|
||||
yaml.dump(output, stream=sys.stdout, default_flow_style=False)
|
||||
|
|
|
|||
|
|
@ -87,7 +87,9 @@ def get_all_feature_flags_turned_off_by_default(idl_dirs: List[str] = None):
|
|||
if not binder.is_feature_flag_enabled_by_default(flag)
|
||||
]
|
||||
|
||||
with open("buildscripts/resmokeconfig/fully_disabled_feature_flags.yml") as fully_disabled_ffs:
|
||||
with open(
|
||||
"buildscripts/resmokeconfig/fully_disabled_feature_flags.yml", encoding="utf8"
|
||||
) as fully_disabled_ffs:
|
||||
force_disabled_flags = yaml.safe_load(fully_disabled_ffs)
|
||||
|
||||
return list(set(all_default_false_flags) - set(force_disabled_flags))
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ def load_rules_file() -> dict:
|
|||
if not os.path.exists(abs_filename):
|
||||
raise ValueError(f"Rules file {abs_filename} not found")
|
||||
|
||||
with open(abs_filename) as file:
|
||||
with open(abs_filename, encoding="utf8") as file:
|
||||
return yaml.safe_load(file)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ class TestJSONSchema(testcase.IDLTestcase):
|
|||
return os.path.join(self._base_dir, "buildscripts", "idl")
|
||||
|
||||
def load_yaml_file(self, file_path):
|
||||
with open(file_path, "r") as f:
|
||||
with open(file_path, "r", encoding="utf8") as f:
|
||||
return yaml.safe_load(f)
|
||||
|
||||
def validate_yaml_file(self, file_path, schema):
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ class CodeLockdownConfig(BaseModel):
|
|||
:param file_path: Path to file.
|
||||
:return: Config object.
|
||||
"""
|
||||
with open(file_path) as file_handler:
|
||||
with open(file_path, encoding="utf8") as file_handler:
|
||||
return cls(**yaml.safe_load(file_handler))
|
||||
|
||||
def get_all_group_names(self) -> List[str]:
|
||||
|
|
|
|||
|
|
@ -438,7 +438,7 @@ flags in common: {common_set}
|
|||
)
|
||||
if not _config.INCLUDE_FULLY_DISABLED_FEATURE_TESTS:
|
||||
with open(
|
||||
"buildscripts/resmokeconfig/fully_disabled_feature_flags.yml"
|
||||
"buildscripts/resmokeconfig/fully_disabled_feature_flags.yml", encoding="utf8"
|
||||
) as fully_disabled_ffs:
|
||||
# the ENABLED_FEATURE_FLAGS list already excludes the fully disabled features flags
|
||||
# This keeps any feature flags enabled that were manually turned on from being excluded
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ class MongoVersion(BaseModel):
|
|||
:param yaml_file: Path to yaml file.
|
||||
:return: MongoVersion read from file.
|
||||
"""
|
||||
mongo_version_yml_file = open(yaml_file, "r")
|
||||
mongo_version_yml_file = open(yaml_file, "r", encoding="utf8")
|
||||
return cls(**yaml.safe_load(mongo_version_yml_file))
|
||||
|
||||
def get_version(self) -> Version:
|
||||
|
|
@ -164,7 +164,7 @@ class MongoReleases(BaseModel):
|
|||
:return: MongoReleases read from file.
|
||||
"""
|
||||
|
||||
with open(yaml_file, "r") as mongo_releases_file:
|
||||
with open(yaml_file, "r", encoding="utf8") as mongo_releases_file:
|
||||
yaml_contents = mongo_releases_file.read()
|
||||
safe_load_result = yaml.safe_load(yaml_contents)
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -1317,10 +1317,10 @@ def new_resmoke_config(config_file, new_config_file, test_data, eval_str=""):
|
|||
"config": {"shell_options": {"eval": eval_str, "global_vars": {"TestData": test_data}}}
|
||||
}
|
||||
}
|
||||
with open(config_file, "r") as yaml_stream:
|
||||
with open(config_file, "r", encoding="utf8") as yaml_stream:
|
||||
config = yaml.safe_load(yaml_stream)
|
||||
config.update(new_config)
|
||||
with open(new_config_file, "w") as yaml_stream:
|
||||
with open(new_config_file, "w", encoding="utf8") as yaml_stream:
|
||||
yaml.safe_dump(config, yaml_stream)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ def get_task_config(task_name, is_remote):
|
|||
else:
|
||||
config_location = powercycle.abs_path(POWERCYCLE_TASKS_CONFIG)
|
||||
|
||||
with open(config_location) as file_handle:
|
||||
with open(config_location, encoding="utf8") as file_handle:
|
||||
raw_yaml = yaml.safe_load(file_handle)
|
||||
tasks_raw_yaml = raw_yaml.get("tasks", [])
|
||||
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ class SetupMultiversion(Subcommand):
|
|||
self.github_oauth_token = (
|
||||
github_oauth_token.replace("token ", "") if github_oauth_token else None
|
||||
)
|
||||
with open(config.SETUP_MULTIVERSION_CONFIG) as file_handle:
|
||||
with open(config.SETUP_MULTIVERSION_CONFIG, encoding="utf8") as file_handle:
|
||||
raw_yaml = yaml.safe_load(file_handle)
|
||||
self.config = config.SetupMultiversionConfig(raw_yaml)
|
||||
|
||||
|
|
|
|||
|
|
@ -329,7 +329,7 @@ class MatrixSuiteConfig(SuiteConfigInterface):
|
|||
new_text = cls.generate_matrix_suite_text(suite_name)
|
||||
new_yaml = yaml.safe_load(new_text)
|
||||
|
||||
with open(generated_path, "r") as file:
|
||||
with open(generated_path, "r", encoding="utf8") as file:
|
||||
old_text = file.read()
|
||||
old_yaml = yaml.safe_load(old_text)
|
||||
if new_yaml != old_yaml:
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ class GenerateAndCheckPerfResults(interface.Hook):
|
|||
self.create_time = datetime.datetime.now()
|
||||
|
||||
try:
|
||||
with open(THRESHOLD_LOCATION) as fh:
|
||||
with open(THRESHOLD_LOCATION, encoding="utf8") as fh:
|
||||
self.performance_thresholds = yaml.safe_load(fh)["tests"]
|
||||
except Exception:
|
||||
self.logger.exception(
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class TagsConfig(object):
|
|||
See TagsConfig.__init__() for the keyword arguments that can be specified.
|
||||
"""
|
||||
|
||||
with open(filename, "r") as fstream:
|
||||
with open(filename, "r", encoding="utf8") as fstream:
|
||||
raw = yaml.safe_load(fstream)
|
||||
|
||||
return cls(raw, **kwargs)
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ def is_string_list(lst):
|
|||
def load_yaml_file(filename):
|
||||
"""Attempt to read 'filename' as YAML."""
|
||||
try:
|
||||
with open(filename, "r") as fp:
|
||||
with open(filename, "r", encoding="utf8") as fp:
|
||||
return yaml.safe_load(fp)
|
||||
except yaml.YAMLError as err:
|
||||
raise ValueError("File '%s' contained invalid YAML: %s" % (filename, err))
|
||||
|
|
|
|||
|
|
@ -403,7 +403,7 @@ class TestSetParameters(_ResmokeSelftest):
|
|||
def generate_suite(self, suite_output_path, template_file):
|
||||
"""Read the template file, substitute the `outputLocation` and rewrite to the file `suite_output_path`."""
|
||||
|
||||
with open(os.path.normpath(template_file), "r") as template_suite_fd:
|
||||
with open(os.path.normpath(template_file), "r", encoding="utf8") as template_suite_fd:
|
||||
suite = yaml.safe_load(template_suite_fd)
|
||||
|
||||
try:
|
||||
|
|
@ -568,7 +568,7 @@ class TestDiscovery(_ResmokeSelftest):
|
|||
)
|
||||
|
||||
with open(
|
||||
"buildscripts/resmokeconfig/fully_disabled_feature_flags.yml"
|
||||
"buildscripts/resmokeconfig/fully_disabled_feature_flags.yml", encoding="utf8"
|
||||
) as fully_disabled_ffs:
|
||||
self.assertIn(
|
||||
"featureFlagFryer",
|
||||
|
|
@ -832,7 +832,7 @@ class TestMultiversionConfig(unittest.TestCase):
|
|||
],
|
||||
check=True,
|
||||
)
|
||||
with open(file_name, "r") as file:
|
||||
with open(file_name, "r", encoding="utf8") as file:
|
||||
file_contents = file.read()
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class TestSuiteGeneration(unittest.TestCase):
|
|||
tested_suite = "test_matrix_suite"
|
||||
generated_suite_path = self.matrix_suite_config.get_generated_suite_path(tested_suite)
|
||||
self.matrix_suite_config.generate_matrix_suite_file(tested_suite)
|
||||
with open(generated_suite_path, "r+") as file:
|
||||
with open(generated_suite_path, "r+", encoding="utf8") as file:
|
||||
gen_yaml = yaml.safe_load(file)
|
||||
gen_yaml["abc"] = "def"
|
||||
file.seek(0)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def process_owners(cur_dir: str) -> Tuple[Dict[re.Pattern, List[str]], bool]:
|
|||
if not os.path.exists(owners_file_path):
|
||||
return process_owners(os.path.dirname(cur_dir))
|
||||
|
||||
with open(owners_file_path, "r") as f:
|
||||
with open(owners_file_path, "r", encoding="utf8") as f:
|
||||
contents = yaml.safe_load(f)
|
||||
|
||||
assert "version" in contents, f"Version not found in {owners_file_path}"
|
||||
|
|
@ -50,7 +50,9 @@ def process_owners(cur_dir: str) -> Tuple[Dict[re.Pattern, List[str]], bool]:
|
|||
|
||||
class Owners:
|
||||
def __init__(self):
|
||||
self.co_jira_map = yaml.safe_load(open("buildscripts/util/co_jira_map.yml", "r"))
|
||||
self.co_jira_map = yaml.safe_load(
|
||||
open("buildscripts/util/co_jira_map.yml", "r", encoding="utf8")
|
||||
)
|
||||
|
||||
def get_codeowners(self, file_path: str) -> List[str]:
|
||||
cur_dir = os.path.dirname(file_path)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def get_expansions() -> dict:
|
|||
if not os.path.exists(expansions_file):
|
||||
return None
|
||||
|
||||
with open(expansions_file, "r") as file:
|
||||
with open(expansions_file, "r", encoding="utf8") as file:
|
||||
return yaml.safe_load(file)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -70,5 +70,5 @@ def read_yaml_file(path: str) -> Dict[str, Any]:
|
|||
:param path: Path to file to read.
|
||||
:return: Contents of given file.
|
||||
"""
|
||||
with open(path) as file_handle:
|
||||
with open(path, encoding="utf8") as file_handle:
|
||||
return yaml.safe_load(file_handle)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ if __name__ == "__main__":
|
|||
owners_paths = glob.glob("**/OWNERS.yml", recursive=True)
|
||||
print(len(owners_paths))
|
||||
for path in owners_paths:
|
||||
with open(path, "r") as owner_file:
|
||||
with open(path, "r", encoding="utf8") as owner_file:
|
||||
contents = yaml.safe_load(owner_file)
|
||||
if "filters" not in contents:
|
||||
continue
|
||||
|
|
@ -15,5 +15,5 @@ if __name__ == "__main__":
|
|||
assert "approvers" in file_filter
|
||||
approvers.update(set(file_filter["approvers"]))
|
||||
|
||||
f = open("co_jira_map.yml", "w+")
|
||||
f = open("co_jira_map.yml", "w+", encoding="utf8")
|
||||
yaml.dump({approver: "jira_team" for approver in approvers}, f)
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ def read_config_file(config_file):
|
|||
"""
|
||||
config_file_data = {}
|
||||
if config_file:
|
||||
with open(config_file) as file_handle:
|
||||
with open(config_file, encoding="utf8") as file_handle:
|
||||
config_file_data = yaml.safe_load(file_handle)
|
||||
|
||||
return config_file_data
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ def read_variable_from_yml(filename, variable_name):
|
|||
:param variable_name: Variable to read from file.
|
||||
:return: Value of variable or None.
|
||||
"""
|
||||
with open(filename, "r") as fh:
|
||||
with open(filename, "r", encoding="utf8") as fh:
|
||||
nodes = yaml.safe_load(fh)
|
||||
|
||||
variables = nodes["variables"]
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import yaml
|
|||
|
||||
def get_yaml_value(yaml_file, yaml_key):
|
||||
"""Return string value for 'yaml_key' from 'yaml_file'."""
|
||||
with open(yaml_file, "r") as ystream:
|
||||
with open(yaml_file, "r", encoding="utf8") as ystream:
|
||||
yaml_dict = yaml.safe_load(ystream)
|
||||
return str(yaml_dict.get(yaml_key, ""))
|
||||
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ parser.add_argument("--variant_name")
|
|||
parser.add_argument("--task_name")
|
||||
args = parser.parse_args()
|
||||
|
||||
with open("etc/evergreen_yml_components/configuration.yml") as f:
|
||||
with open("etc/evergreen_yml_components/configuration.yml", encoding="utf8") as f:
|
||||
yml = yaml.safe_load(f)
|
||||
default_timeout = yml["exec_timeout_secs"]
|
||||
|
||||
override_timeout = None
|
||||
with open("etc/evergreen_timeouts.yml") as f:
|
||||
with open("etc/evergreen_timeouts.yml", encoding="utf8") as f:
|
||||
yml = yaml.safe_load(f)
|
||||
if args.variant_name in yml["overrides"]:
|
||||
for task in yml["overrides"][args.variant_name]:
|
||||
|
|
@ -21,7 +21,7 @@ with open("etc/evergreen_timeouts.yml") as f:
|
|||
override_timeout = task["exec_timeout"] * 60
|
||||
break
|
||||
|
||||
with open("override_task_timeout.yml", "w") as f:
|
||||
with open("override_task_timeout.yml", "w", encoding="utf8") as f:
|
||||
if override_timeout:
|
||||
print(
|
||||
f"Overriding timeout for {args.variant_name}:{args.task_name} of {override_timeout} seconds."
|
||||
|
|
|
|||
Loading…
Reference in New Issue