SERVER-99522 Delete dead py linters (#31760)

GitOrigin-RevId: 3aedfa559dda734d4d89fefe0fe1154a5a3cf04d
This commit is contained in:
Juan Gu 2025-02-06 08:51:47 -08:00 committed by MongoDB Bot
parent 92b128fcbe
commit 7209d62b77
165 changed files with 209 additions and 476 deletions

View File

@ -25,11 +25,6 @@
},
"files.insertFinalNewline": true,
"js/ts.implicitProjectConfig.target": "ES2020",
"pylint.path": [
"${interpreter}",
"-m",
"pylint"
],
"python.autoComplete.extraPaths": [
"/opt/mongodbtoolchain/v4/share/gcc-11.3.0/python",
"src/third_party/scons-3.1.2/scons-local-3.1.2"
@ -85,7 +80,6 @@
"xaver.clang-format",
"cs128.cs128-clang-tidy",
"charliermarsh.ruff",
"ms-python.pylint",
"ms-python.mypy-type-checker",
"esbenp.prettier-vscode",
"redhat.vscode-yaml",

View File

@ -1,4 +1,3 @@
# pylint: disable=g-bad-file-header
# Copyright 2016 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -24,7 +24,6 @@ from structlog.stdlib import LoggerFactory
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# pylint: disable=wrong-import-position
import buildscripts.resmokelib.parser
from buildscripts.ciconfig.evergreen import (
EvergreenProjectConfig,
@ -40,8 +39,6 @@ from buildscripts.patch_builds.change_data import (
from buildscripts.resmokelib.suitesconfig import create_test_membership_map, get_suite, get_suites
from buildscripts.resmokelib.utils import default_if_none, globstar
# pylint: enable=wrong-import-position
structlog.configure(logger_factory=LoggerFactory())
LOGGER = structlog.getLogger(__name__)
EXTERNAL_LOGGERS = {
@ -799,10 +796,12 @@ def run(
"""
_configure_logging(verbose)
repeat_config = RepeatConfig(repeat_tests_secs=repeat_tests_secs,
repeat_tests_min=repeat_tests_min,
repeat_tests_max=repeat_tests_max,
repeat_tests_num=repeat_tests_num) # yapf: disable
repeat_config = RepeatConfig(
repeat_tests_secs=repeat_tests_secs,
repeat_tests_min=repeat_tests_min,
repeat_tests_max=repeat_tests_max,
repeat_tests_num=repeat_tests_num,
)
repos = [Repo(x) for x in DEFAULT_REPO_LOCATIONS if os.path.isdir(x)]
evg_conf = parse_evergreen_file(evg_project_file)

View File

@ -8,8 +8,6 @@
5. Supports validating and updating a set of files to the right coding style.
"""
# pylint: disable=wrong-import-position
import difflib
import glob
import logging

View File

@ -14,7 +14,7 @@ from bson.json_util import dumps
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from buildscripts.resmokelib import utils # pylint: disable=wrong-import-position
from buildscripts.resmokelib import utils
def main():

View File

@ -11,8 +11,8 @@ from optparse import OptionParser
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from buildscripts.resmokelib import utils # pylint: disable=wrong-import-position
from buildscripts.resmokelib.testing import report # pylint: disable=wrong-import-position
from buildscripts.resmokelib import utils
from buildscripts.resmokelib.testing import report
def read_json_file(json_file):

View File

@ -62,7 +62,6 @@ def calibrate_node(
if node_config.filter_function is not None:
abt_node_df = node_config.filter_function(abt_node_df)
# pylint: disable=invalid-name
if node_config.variables_override is None:
variables = ["n_processed"]
else:

View File

@ -58,7 +58,6 @@ class CostModelParameters:
class LinearModel:
"""Calibrated Linear Model and its metrics."""
# pylint: disable=invalid-name
intercept: float
coef: list[float]
mse: float # Mean Squared Error
@ -67,7 +66,6 @@ class LinearModel:
corrcoef: any # Correlation Coefficients
# pylint: disable=invalid-name
def estimate(
fit, X: np.ndarray, y: np.ndarray, test_size: float, trace: bool = False
) -> LinearModel:

View File

@ -212,7 +212,6 @@ def print_explain(calibration_df: pd.DataFrame, abt_df: pd.DataFrame, row_index:
def calibrate(abt_node_df: pd.DataFrame, variables: list[str] = None):
"""Calibrate the ABT node given in abd_node_df with the given model input variables."""
# pylint: disable=invalid-name
if variables is None:
variables = ["n_processed"]
y = abt_node_df["execution_time"]

View File

@ -18,7 +18,6 @@ import requests
# register parent directory in sys.path, so 'buildscripts' is detected no matter where the script is called from
sys.path.append(str(pathlib.Path(os.path.join(os.getcwd(), __file__)).parent.parent))
# pylint: disable=wrong-import-position
from buildscripts.build_system_options import PathOptions
from buildscripts.resmokelib.setup_multiversion.setup_multiversion import (
SetupMultiversion,

View File

@ -30,7 +30,7 @@ codes = [] # type: ignore
# Each AssertLocation identifies the C++ source location of an assertion
AssertLocation = namedtuple("AssertLocation", ["sourceFile", "byteOffset", "lines", "code"])
list_files = False # pylint: disable=invalid-name
list_files = False
_CODE_PATTERNS = [
re.compile(p + r"\s*(?P<code>\d+)", re.MULTILINE)
@ -336,7 +336,7 @@ def main():
if extra:
parser.error(f"Unrecognized arguments: {' '.join(extra)}")
global list_files # pylint: disable=global-statement,invalid-name
global list_files
list_files = options.list_files
(_, errors, seen) = read_error_codes()

View File

@ -33,12 +33,9 @@ import structlog
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
# pylint: disable=wrong-import-position
from buildscripts.linter import git, parallel
from buildscripts.linter.filediff import gather_changed_files_for_lint
# pylint: enable=wrong-import-position
##############################################################################
#
# Constants for ESLint

View File

@ -14,13 +14,10 @@ from evergreen.api import EvergreenApi, RetryingEvergreenApi
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# pylint: disable=wrong-import-position
from buildscripts.util.cmdutils import enable_logging
from buildscripts.util.fileops import read_yaml_file
from buildscripts.util.taskname import remove_gen_suffix
# pylint: enable=wrong-import-position
LOGGER = structlog.getLogger(__name__)
EVG_CONFIG_FILE = "./.evergreen.yml"
@ -141,4 +138,4 @@ def main(expansion_file: str, evergreen_config: str, verbose: bool) -> None:
if __name__ == "__main__":
main() # pylint: disable=no-value-for-parameter
main()

View File

@ -102,9 +102,9 @@ def _main(expansions_file: str, defaults_file: str):
for key, value in expansions.items():
print(f"{_clean_key(key)}={quote(value)}; ", end="")
except Exception as ex: # pylint: disable=broad-except
except Exception as ex:
_error(ex)
if __name__ == "__main__":
_main() # pylint: disable=no-value-for-parameter
_main()

View File

@ -11,7 +11,7 @@ import sys
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from buildscripts.ciconfig import evergreen # pylint: disable=wrong-import-position
from buildscripts.ciconfig import evergreen
DEFAULT_EVERGREEN_FILE = "etc/evergreen.yml"

View File

@ -14,7 +14,6 @@ import sys
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# pylint: disable=wrong-import-position
from buildscripts.resmokelib import selector
from buildscripts.resmokelib.multiversionconstants import REQUIRES_FCV_TAG_LATEST
from buildscripts.resmokelib.utils import jscomment

View File

@ -95,7 +95,7 @@ def load_libstdcxx_printers(progspace):
stdcxx_printer_toolchain_paths[progspace] = detect_toolchain(progspace)
try:
sys.path.insert(0, stdcxx_printer_toolchain_paths[progspace])
global stdlib_printers # pylint: disable=invalid-name,global-variable-not-assigned
global stdlib_printers
from libstdcxx.v6 import printers as stdlib_printers
from libstdcxx.v6 import register_libstdcxx_printers
@ -184,7 +184,7 @@ def lookup_type(gdb_type_str: str) -> gdb.Type:
case or at least it doesn't search all global blocks, sometimes it required
to get the global block based off the current frame.
"""
global MAIN_GLOBAL_BLOCK # pylint: disable=global-statement
global MAIN_GLOBAL_BLOCK
exceptions = []
try:
@ -383,7 +383,7 @@ class DumpGlobalServiceContext(gdb.Command):
"""Initialize DumpGlobalServiceContext."""
RegisterMongoCommand.register(self, "mongodb-service-context", gdb.COMMAND_DATA)
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
def invoke(self, arg, _from_tty):
"""Invoke GDB command to print the Global Service Context."""
gdb.execute("print *('mongo::(anonymous namespace)::globalServiceContext')")
@ -575,7 +575,7 @@ class MongoDBDumpLocks(gdb.Command):
"""Initialize MongoDBDumpLocks."""
RegisterMongoCommand.register(self, "mongodb-dump-locks", gdb.COMMAND_DATA)
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
def invoke(self, arg, _from_tty):
"""Invoke MongoDBDumpLocks."""
print("Running Hang Analyzer Supplement - MongoDBDumpLocks")
@ -723,7 +723,7 @@ class MongoDBDumpStorageEngineInfo(gdb.Command):
"""Initialize MongoDBDumpStorageEngineInfo."""
RegisterMongoCommand.register(self, "mongodb-dump-storage-engine-info", gdb.COMMAND_DATA)
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
def invoke(self, arg, _from_tty):
"""Invoke MongoDBDumpStorageEngineInfo."""
print("Running Hang Analyzer Supplement - MongoDBDumpStorageEngineInfo")
@ -760,7 +760,7 @@ class BtIfActive(gdb.Command):
"""Initialize BtIfActive."""
RegisterMongoCommand.register(self, "mongodb-bt-if-active", gdb.COMMAND_DATA)
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
def invoke(self, arg, _from_tty):
"""Invoke GDB to print stack trace."""
try:
idle_location = gdb.parse_and_eval("mongo::for_debuggers::idleThreadLocation")
@ -881,7 +881,7 @@ class MongoDBJavaScriptStack(gdb.Command):
"""Initialize MongoDBJavaScriptStack."""
RegisterMongoCommand.register(self, "mongodb-javascript-stack", gdb.COMMAND_STATUS)
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
def invoke(self, arg, _from_tty):
"""Invoke GDB to dump JS stacks."""
print("Running Print JavaScript Stack Supplement")
@ -992,7 +992,7 @@ class MongoDBPPrintBsonAtPointer(gdb.Command):
print("Usage: mongodb-pprint-bson <ptr> <optional length>")
return
ptr = eval(args[0]) # pylint: disable=eval-used
ptr = eval(args[0])
size = 20 * 1024
if len(args) >= 2:
size = int(args[1])
@ -1016,7 +1016,7 @@ class MongoDBHelp(gdb.Command):
"""Initialize MongoDBHelp."""
gdb.Command.__init__(self, "mongodb-help", gdb.COMMAND_SUPPORT)
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
def invoke(self, arg, _from_tty):
"""Register the mongo print commands."""
RegisterMongoCommand.print_commands()

View File

@ -14,7 +14,7 @@ ROOT_PATH = str(Path(os.path.abspath(__file__)).parent.parent.parent)
if ROOT_PATH not in sys.path:
sys.path.insert(0, ROOT_PATH)
from src.third_party.immer.dist.tools.gdb_pretty_printers.printers import (
ListIter as ImmerListIter, # pylint: disable=wrong-import-position
ListIter as ImmerListIter,
)
if not gdb:
@ -907,7 +907,7 @@ class WtUpdateToBsonPrinter(object):
memory = gdb.selected_inferior().read_memory(self.ptr, self.size).tobytes()
bsonobj = None
try:
bsonobj = next(bson.decode_iter(memory)) # pylint: disable=stop-iteration-return
bsonobj = next(bson.decode_iter(memory))
except bson.errors.InvalidBSON:
return

View File

@ -60,7 +60,7 @@ class LoadDebugFile(gdb.Command):
try:
section_map = parse_sections()
load_sym_file_at_addrs(dbg_file, section_map)
except Exception as err: # pylint: disable=broad-except
except Exception as err:
print(err)
@ -247,7 +247,7 @@ class LoadDistTest(gdb.Command):
if (idx + 1) % 50 == 0 or len(dwarf_files) == idx + 1:
print(f"{idx+1}/{len(dwarf_files)} symbol files loaded")
except Exception as err: # pylint: disable=broad-except
except Exception as err:
print(err)
if self._is_udb:

View File

@ -19,11 +19,8 @@ import click
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# pylint: disable=wrong-import-position
from buildscripts.util.fileops import read_yaml_file
# pylint: enable=wrong-import-position
assert sys.version_info >= (3, 7)
@ -397,7 +394,7 @@ class GoldenTestApp(object):
def main():
"""Execute main."""
try:
cli() # pylint: disable=no-value-for-parameter
cli()
except AppError as err:
print(err)
sys.exit(1)

View File

@ -42,7 +42,6 @@ from pymongo import MongoClient
# Permit imports from "buildscripts".
sys.path.append(os.path.normpath(os.path.join(os.path.abspath(__file__), "../../..")))
# pylint: disable=wrong-import-position
from idl import syntax
from buildscripts.idl.lib import list_idls, parse_idl
@ -51,8 +50,6 @@ from buildscripts.resmokelib.logging import loggers
from buildscripts.resmokelib.testing.fixtures import interface
from buildscripts.resmokelib.testing.fixtures.fixturelib import FixtureLib
# pylint: enable=wrong-import-position
LOGGER_NAME = "check-idl-definitions"
LOGGER = logging.getLogger(LOGGER_NAME)
@ -202,10 +199,9 @@ def main():
"""Fake argparse.Namespace-like class to pass arguments to _update_config_vars."""
def __init__(self):
self.INSTALL_DIR = args.install_dir # pylint: disable=invalid-name
self.INSTALL_DIR = args.install_dir
self.command = ""
# pylint: disable=protected-access
configure_resmoke._update_config_vars(arg_parser, FakeArgs())
configure_resmoke._set_logging_config()

View File

@ -40,15 +40,12 @@ from packaging.version import Version
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
# pylint: disable=wrong-import-position
from buildscripts.resmokelib.multiversionconstants import (
LAST_CONTINUOUS_FCV,
LAST_LTS_FCV,
LATEST_FCV,
)
# pylint: enable=wrong-import-position
LOGGER_NAME = "checkout-idl"
LOGGER = logging.getLogger(LOGGER_NAME)

View File

@ -40,7 +40,6 @@ import yaml
# Permit imports from "buildscripts".
sys.path.append(os.path.normpath(os.path.join(os.path.abspath(__file__), "../../..")))
# pylint: disable=wrong-import-position
from buildscripts.idl import lib
from buildscripts.idl.idl import parser

View File

@ -37,7 +37,6 @@ from typing import List
# Permit imports from "buildscripts".
sys.path.append(os.path.normpath(os.path.join(os.path.abspath(__file__), "../../..")))
# pylint: disable=wrong-import-position
from buildscripts.idl import lib
from buildscripts.idl.idl import parser

View File

@ -146,7 +146,6 @@ class Struct(common.SourceLocation):
self.generic_list_type = None # type: Optional[GenericListType]
# Determines whether or not this IDL struct can be a component of a query shape. See WRITING-13831.
self.query_shape_component = False # type: bool
# pylint: disable=invalid-name
self.unsafe_dangerous_disable_extra_field_duplicate_checks = None # type: bool
# Indicates if the struct is a view type or not. If a struct is a view type, then the
@ -206,7 +205,6 @@ class Validator(common.SourceLocation):
# type: (str, int, int) -> None
"""Construct a Validator."""
# Don't lint gt/lt as bad attribute names.
# pylint: disable=C0103
self.gt = None # type: Expression
self.lt = None # type: Expression
self.gte = None # type: Expression

View File

@ -559,7 +559,7 @@ class ParserContext(object):
% (name),
)
def add_not_custom_scalar_serialization_not_supported_error( # pylint: disable=invalid-name
def add_not_custom_scalar_serialization_not_supported_error(
self, location, ast_type, ast_parent, bson_type_name
):
# type: (common.SourceLocation, str, str, str) -> None
@ -859,7 +859,7 @@ class ParserContext(object):
("Command '%s' cannot have the same name as a field.") % (command_name),
)
def add_bad_field_non_const_getter_in_immutable_struct_error( # pylint: disable=invalid-name
def add_bad_field_non_const_getter_in_immutable_struct_error(
self, location, struct_name, field_name
):
# type: (common.SourceLocation, str, str) -> None

View File

@ -644,8 +644,6 @@ def _parse_struct(ctxt, spec, name, node):
},
)
# PyLint has difficulty with some iterables: https://github.com/PyCQA/pylint/issues/3105
# pylint: disable=not-an-iterable
if (
struct.generate_comparison_operators
and struct.fields

View File

@ -97,8 +97,6 @@ class ArgumentInfo(object):
class MethodInfo(object):
"""Class that encapslates information about a method and how to declare, define, and call it."""
# pylint: disable=too-many-instance-attributes
def __init__(
self,
class_name,
@ -111,7 +109,6 @@ class MethodInfo(object):
desc_for_comment=None,
):
# type: (str, str, List[str], str, bool, bool, bool, Optional[str]) -> None
# pylint: disable=too-many-arguments
"""Create a MethodInfo instance."""
self.class_name = class_name
self.method_name = method_name

View File

@ -453,7 +453,6 @@ class Validator(common.SourceLocation):
# type: (str, int, int) -> None
"""Construct a Validator."""
# Don't lint gt/lt as bad attibute names.
# pylint: disable=C0103
self.gt = None # type: Expression
self.lt = None # type: Expression
self.gte = None # type: Expression
@ -577,7 +576,6 @@ class Struct(common.SourceLocation):
self.is_command_reply = False # type: bool
self.is_catalog_ctxt = False # type: bool
self.is_generic_cmd_list = None # type: Optional[str]
# pylint: disable=invalid-name
self.unsafe_dangerous_disable_extra_field_duplicate_checks = None # type: bool
# Command only property
@ -917,7 +915,6 @@ class FeatureFlag(common.SourceLocation):
self.cpp_varname = None # type: str
self.default = None # type: Expression
self.version = None # type: str
# pylint: disable=C0103
self.shouldBeFCVGated = None # type: Expression
super(FeatureFlag, self).__init__(file_name, line, column)

View File

@ -1751,7 +1751,6 @@ def check_security_access_checks(
new_idl_file_path: str,
) -> None:
"""Check the compatibility between security access checks of the old and new command."""
# pylint:disable=too-many-nested-blocks
cmd_name = cmd.command_name
if old_access_checks is not None and new_access_checks is not None:
old_access_check_type = old_access_checks.get_access_check_type()

View File

@ -428,7 +428,6 @@ class IDLCompatibilityContext(object):
type_name: Optional[str],
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""Add an error about the new and old command or parameter type validators not being equal."""
if is_command_parameter:
self._add_error(
@ -536,7 +535,6 @@ class IDLCompatibilityContext(object):
type_name: str,
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""
Add a new added required parameter or command type field error.
@ -630,7 +628,6 @@ class IDLCompatibilityContext(object):
type_name: Optional[str],
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""
Add a stable required parameter or command type field error.
@ -764,7 +761,6 @@ class IDLCompatibilityContext(object):
field_name: Optional[str],
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""
Add an error about the new command or parameter type not being a variant type.
@ -804,7 +800,6 @@ class IDLCompatibilityContext(object):
field_name: Optional[str],
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""
Add an error about the new variant types not being a superset.
@ -846,7 +841,6 @@ class IDLCompatibilityContext(object):
field_name: Optional[str],
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""
Add an error about the new chained types not being a superset.
@ -1217,7 +1211,6 @@ class IDLCompatibilityContext(object):
field_name: Optional[str],
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""
Add an error about the old command or param type bson serialization type being 'any'.
@ -1256,7 +1249,6 @@ class IDLCompatibilityContext(object):
field_name: Optional[str],
is_command_parameter: bool,
) -> None:
# pylint: disable=invalid-name
"""
Add an error about the new command or param type bson serialization type being 'any'.
@ -1642,7 +1634,6 @@ class IDLCompatibilityContext(object):
def add_new_param_or_command_type_field_requires_stability_error(
self, command_name: str, field_name: str, file: str, is_command_parameter: bool
) -> None:
# pylint: disable=invalid-name
"""Add an error that a new param or command type field requires the 'stability' field."""
if is_command_parameter:
self._add_error(

View File

@ -3563,7 +3563,6 @@ class TestBinder(testcase.IDLTestcase):
idl.errors.ERROR_ID_CANNOT_DECLARE_SHAPE_LITERAL,
)
# pylint: disable=invalid-name
def test_struct_unsafe_dangerous_disable_extra_field_duplicate_checks_negative(self):
# type: () -> None
"""Negative struct tests for unsafe_dangerous_disable_extra_field_duplicate_checks."""

View File

@ -35,8 +35,8 @@ from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import idl_check_compatibility # noqa: E402 pylint: disable=wrong-import-position
import idl_compatibility_errors # noqa: E402 pylint: disable=wrong-import-position
import idl_check_compatibility
import idl_compatibility_errors
class TestIDLCompatibilityChecker(unittest.TestCase):
@ -93,7 +93,6 @@ class TestIDLCompatibilityChecker(unittest.TestCase):
["src"],
)
# pylint: disable=invalid-name
def test_newly_added_commands_should_fail(self):
"""Tests that incompatible newly added commands should fail."""
dir_path = path.dirname(path.realpath(__file__))
@ -179,7 +178,6 @@ class TestIDLCompatibilityChecker(unittest.TestCase):
"newCommandTypeStructFieldBsonSerializationTypeAny",
)
# pylint: disable=invalid-name
def test_should_fail(self):
"""Tests that incompatible old and new IDL commands should fail."""
dir_path = path.dirname(path.realpath(__file__))

View File

@ -58,7 +58,6 @@ class DictionaryImportResolver(idl.parser.ImportResolverBase):
def resolve(self, base_file, imported_file_name):
# type: (str, str) -> str
"""Return the complete path to an imported file name."""
# pylint: disable=unused-argument
if imported_file_name not in self._import_dict:
return None

View File

@ -2131,7 +2131,6 @@ class TestParser(testcase.IDLTestcase):
idl.errors.ERROR_ID_EMPTY_ACCESS_CHECK,
)
# pylint: disable=invalid-name
def test_struct_unsafe_dangerous_disable_extra_field_duplicate_checks_negative(self):
# Test commands and unsafe_dangerous_disable_extra_field_duplicate_checks are disallowed
self.assert_parse_fail(

View File

@ -65,14 +65,13 @@ class NothingImportResolver(idl.parser.ImportResolverBase):
class IDLTestcase(unittest.TestCase):
"""IDL Test case base class."""
# pylint: disable=inconsistent-return-statements
def _parse(self, doc_str, resolver):
# type: (str, idl.parser.ImportResolverBase) -> idl.syntax.IDLParsedSpec
"""Parse a document and throw a unittest failure if it fails to parse as a valid YAML document."""
try:
return idl.parser.parse(doc_str, "unknown", resolver)
except: # pylint: disable=bare-except
except:
self.fail("Failed to parse document:\n%s" % (doc_str))
def _assert_parse(self, doc_str, parsed_doc):

View File

@ -417,7 +417,7 @@ def get_clang_includes() -> List[str]:
The query reliably gets the include dirs that would be used in normal compiles. We cache and reuse the result
so the subprocess only runs once.
"""
global CLANG_INCLUDES # pylint: disable=global-statement
global CLANG_INCLUDES
if CLANG_INCLUDES is None:
clang_includes = subprocess.getoutput(
f"{TOOLCHAIN_DIR}/clang++ -Wp,-v -x c++ - -fsyntax-only < /dev/null 2>&1 | sed -e '/^#include <...>/,/^End of search/{{ //!b }};d'"
@ -904,7 +904,7 @@ def run_iwyu(cmd_entry: CompileCommand) -> Tuple[ResultType, CompileCommand]:
def main() -> None:
"""Main function."""
global IWYU_ANALYSIS_STATE, SHUTDOWN_FLAG # pylint: disable=global-statement
global IWYU_ANALYSIS_STATE, SHUTDOWN_FLAG
atexit.register(write_iwyu_data)
with concurrent.futures.ThreadPoolExecutor(

View File

@ -178,7 +178,7 @@ def _try_find_log_file(store: Optional[str], test_name) -> str:
with open(os.path.join(store, test_name, "jepsen.log")) as fh:
return fh.read()
except Exception: # pylint: disable=broad-except
except Exception:
return ""
@ -312,4 +312,4 @@ def main(
if __name__ == "__main__":
main() # pylint: disable=no-value-for-parameter
main()

View File

@ -1,8 +1,6 @@
#!/usr/bin/env python3
"""Check files in git diff to ensure they are within a given size limit."""
# pylint: disable=wrong-import-position
import argparse
import fnmatch
import logging

View File

@ -10,7 +10,6 @@ from typing import List, Tuple
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
# pylint: disable=wrong-import-position
from buildscripts.linter.filediff import gather_changed_files_with_lines
LEGACY_TYPES = [

View File

@ -116,7 +116,6 @@ class BackendServer:
version = ""
git_hash = ""
# pylint: disable=c-extension-no-member
for _, element in etree.iterparse(
str(graph_file), tag="{http://graphml.graphdrawing.org/xmlns}data"
):

View File

@ -73,9 +73,6 @@ def get_class_that_defined_method(meth):
return getattr(meth, "__objclass__", None) # handle special descriptor objects
# newer pylints contain the fix: https://github.com/PyCQA/pylint/pull/2926/commits/35e1c61026eab90af504806ef9da6241b096e659
# signature-mutators=buildscripts.libdeps.graph_analyzer.schema_check
# pylint: disable=no-value-for-parameter
def parametrized(dec):
"""Allow parameters passed to the decorator."""

View File

@ -38,10 +38,6 @@ except ImportError:
pass
# We need to disable invalid name here because it break backwards compatibility with
# our graph schemas. Possibly we could use lower case conversion process to maintain
# backwards compatibility and make pylint happy.
# pylint: disable=invalid-name
class CountTypes(Enum):
"""Enums for the different types of counts to perform on a graph."""

View File

@ -10,13 +10,10 @@ py_library(
"mongolint.py",
"mypy.py",
"parallel.py",
"pydocstyle.py",
"pylint.py",
"pyrightlinter.py",
"ruffchecker.py",
"ruffformatter.py",
"runner.py",
"yapf.py",
],
visibility = ["//visibility:public"],
deps = [

View File

@ -41,7 +41,7 @@ class LinterBase(object, metaclass=ABCMeta):
"""
Check if we need to diff the output of this linter with the original file.
This applies to tools like clang-format and yapf which do not have a notion of linting. We
This applies to tools like clang-format which do not have a notion of linting. We
introduce the idea of linting by formatting a file with the tool to standard out and
comparing it to the original.
"""

View File

@ -11,7 +11,6 @@ from git import Repo
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
# pylint: disable=wrong-import-position
from buildscripts.linter import git
from buildscripts.patch_builds.change_data import (
RevisionMap,
@ -20,8 +19,6 @@ from buildscripts.patch_builds.change_data import (
generate_revision_map,
)
# pylint: enable=wrong-import-position
LOGGER = structlog.get_logger(__name__)
MONGO_REVISION_ENV_VAR = "REVISION"

View File

@ -277,7 +277,7 @@ def main():
print('File "{}" failed with {} errors.'.format(args.file, error_count))
return 1
return 0
except Exception as ex: # pylint: disable=broad-except
except Exception as ex:
print('Exception while checking file "{}": {}'.format(args.file, ex))
return 2

View File

@ -1,22 +0,0 @@
"""PyDocStyle linter support module."""
from . import base
class PyDocstyleLinter(base.LinterBase):
"""PyDocStyle linter."""
def __init__(self):
# type: () -> None
"""Create a pydocstyle linter."""
super(PyDocstyleLinter, self).__init__("pydocstyle", "6.1.1")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
"""Get the command to run a linter version check."""
return ["--version"]
def get_lint_cmd_args(self, file_name):
# type: (str) -> List[str]
"""Get the command to run a linter."""
return [file_name]

View File

@ -1,22 +0,0 @@
"""PyLint linter support module."""
from . import base
class PyLintLinter(base.LinterBase):
"""Pylint linter."""
def __init__(self):
# type: () -> None
"""Create a pylint linter."""
super(PyLintLinter, self).__init__("pylint", "2.7.2")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
"""Get the command to run a linter version check."""
return ["--version"]
def get_lint_cmd_args(self, file_name):
# type: (str) -> List[str]
"""Get the command to run a linter."""
return ["--output-format=msvs", "--reports=n", file_name]

View File

@ -1,32 +0,0 @@
"""YAPF linter support module."""
from . import base
class YapfLinter(base.LinterBase):
"""Yapf linter."""
def __init__(self):
# type: () -> None
"""Create a yapf linter."""
super(YapfLinter, self).__init__("yapf", "0.26.0")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
"""Get the command to run a linter version check."""
return ["--version"]
def needs_file_diff(self):
# type: () -> bool
"""See comment in base class."""
return True
def get_lint_cmd_args(self, file_name):
# type: (str) -> List[str]
"""Get the command to run a linter."""
return [file_name]
def get_fix_cmd_args(self, file_name):
# type: (str) -> List[str]
"""Get the command to run a linter fix."""
return ["-i", file_name]

View File

@ -27,7 +27,7 @@ def __lldb_init_module(debugger, *_args):
#######################
def PrintGlobalServiceContext(debugger, *_args): # pylint: disable=invalid-name
def PrintGlobalServiceContext(debugger, *_args):
"""Provide the mongodb-service-context command.
Emulates the same convenience command available in GDB
@ -36,12 +36,12 @@ def PrintGlobalServiceContext(debugger, *_args): # pylint: disable=invalid-name
debugger.HandleCommand("print *globalServiceContext")
def MongoDBDumpLocks(debugger, *_args): # pylint: disable=invalid-name
def MongoDBDumpLocks(debugger, *_args):
"""Dump locks in the mongod process."""
debugger.HandleCommand("call mongo::dumpLockManager()")
def BreakpointOnAssert(debugger, command, _exec_ctx, _result, _internal_dict): # pylint: disable=invalid-name
def BreakpointOnAssert(debugger, command, _exec_ctx, _result, _internal_dict):
"""Set a breakpoint on MongoDB uassert that throws the specified error code."""
arg_strs = shlex.split(command)
@ -56,7 +56,7 @@ def BreakpointOnAssert(debugger, command, _exec_ctx, _result, _internal_dict):
)
def MongoDBFindBreakpoint(debugger, _command, exec_ctx, _result, _internal_dict): # pylint: disable=invalid-name
def MongoDBFindBreakpoint(debugger, _command, exec_ctx, _result, _internal_dict):
"""Find the thread that triggered a breakpoint from 'debugger.cpp'."""
process = exec_ctx.process
@ -80,7 +80,7 @@ def MongoDBFindBreakpoint(debugger, _command, exec_ctx, _result, _internal_dict)
debugger.HandleCommand("thread select %d" % (thread_num))
def DumpGSC(_debugger, _command, exec_ctx, _result, _internal_dict): # pylint: disable=invalid-name
def DumpGSC(_debugger, _command, exec_ctx, _result, _internal_dict):
"""Dump the global service context as a hash table."""
gsc_list = exec_ctx.target.FindGlobalVariables("globalServiceContext", 1)

View File

@ -85,7 +85,7 @@ def __lldb_init_module(debugger, *_args):
#############################
def StatusPrinter(valobj, *_args): # pylint: disable=invalid-name
def StatusPrinter(valobj, *_args):
"""Pretty-Prints MongoDB Status objects."""
err = valobj.GetChildMemberWithName("_error")
px = err.GetChildMemberWithName("px")
@ -96,7 +96,7 @@ def StatusPrinter(valobj, *_args): # pylint: disable=invalid-name
return "Status({}, {})".format(code, reason)
def StatusWithPrinter(valobj, *_args): # pylint: disable=invalid-name
def StatusWithPrinter(valobj, *_args):
"""Extend the StatusPrinter to print the value of With for a StatusWith."""
status = valobj.GetChildMemberWithName("_status")
code = (
@ -111,7 +111,7 @@ def StatusWithPrinter(valobj, *_args): # pylint: disable=invalid-name
return rep.replace("Status", "StatusWith", 1)
def StringDataPrinter(valobj, *_args): # pylint: disable=invalid-name
def StringDataPrinter(valobj, *_args):
"""Print StringData value."""
ptr = valobj.GetChildMemberWithName("_data").GetValueAsUnsigned()
if ptr == 0:
@ -130,7 +130,7 @@ def read_memory_as_hex(process, address, size):
return "0x" + ba.hex(" ", 1)
def ConstDataRangePrinter(valobj, *_args): # pylint: disable=invalid-name
def ConstDataRangePrinter(valobj, *_args):
"""Pretty-Prints MongoDB Status objects."""
begin_value = valobj.GetChildMemberWithName("_begin")
begin = begin_value.GetValueAsUnsigned()
@ -152,7 +152,7 @@ def ConstDataRangePrinter(valobj, *_args): # pylint: disable=invalid-name
return "size=%d,v=%s" % (size, value)
def BSONObjPrinter(valobj, *_args): # pylint: disable=invalid-name
def BSONObjPrinter(valobj, *_args):
"""Print a BSONObj in a JSON format."""
ptr = valobj.GetChildMemberWithName("_objdata").GetValueAsUnsigned()
@ -176,7 +176,7 @@ def BSONObjPrinter(valobj, *_args): # pylint: disable=invalid-name
return obj
def BSONElementPrinter(valobj, *_args): # pylint: disable=invalid-name
def BSONElementPrinter(valobj, *_args):
"""Print a BSONElement in a JSON format."""
ptr = valobj.GetChildMemberWithName("data").GetValueAsUnsigned()
size = valobj.GetChildMemberWithName("totalSize").GetValueAsUnsigned()
@ -187,12 +187,12 @@ def BSONElementPrinter(valobj, *_args): # pylint: disable=invalid-name
mem = bytes(memoryview(valobj.GetProcess().ReadMemory(ptr, size, lldb.SBError())))
# Call an internal bson method to directly convert an BSON element to a string
el_tuple = bson._element_to_dict(mem, memoryview(mem), 0, len(mem), DEFAULT_CODEC_OPTIONS) # pylint: disable=protected-access
el_tuple = bson._element_to_dict(mem, memoryview(mem), 0, len(mem), DEFAULT_CODEC_OPTIONS)
return '"%s": %s' % (el_tuple[0], el_tuple[1])
def Date_tPrinter(valobj, *_args): # pylint: disable=invalid-name
def Date_tPrinter(valobj, *_args):
"""Print a Date_t in a string format."""
millis = valobj.GetChildMemberWithName("millis").GetValueAsUnsigned()
@ -207,7 +207,7 @@ def Date_tPrinter(valobj, *_args): # pylint: disable=invalid-name
return dt.isoformat()
def UUIDPrinter(valobj, *_args): # pylint: disable=invalid-name
def UUIDPrinter(valobj, *_args):
"""Print the UUID's hex string value."""
char_array = valobj.GetChildMemberWithName("_uuid").GetChildAtIndex(0)
raw_bytes = [x.GetValueAsUnsigned() for x in char_array]
@ -215,7 +215,7 @@ def UUIDPrinter(valobj, *_args): # pylint: disable=invalid-name
return str(uuid.UUID("".join(uuid_hex_bytes)))
def Decimal128Printer(valobj, *_args): # pylint: disable=invalid-name
def Decimal128Printer(valobj, *_args):
"""Print the Decimal128's string value."""
value = valobj.GetChildMemberWithName("_value")
low64 = value.GetChildMemberWithName("low64").GetValueAsUnsigned()
@ -325,7 +325,7 @@ def optional_sb_value_to_string(sb_value):
return desc
def OptionalSummaryPrinter(valobj, *_args): # pylint: disable=invalid-name
def OptionalSummaryPrinter(valobj, *_args):
"""Pretty-Prints boost::optional objects."""
# This is displayed in vscode variables windows
# The input is from OptionalPrinter
@ -388,7 +388,7 @@ class AbslHashSetPrinter:
self.data_type = resolve_type_to_base(
self.valobj.GetChildMemberWithName("slots_").GetType()
).GetPointerType()
except: # pylint: disable=bare-except
except:
print("Exception: " + str(sys.exc_info()))

View File

@ -60,7 +60,7 @@ def delete_directory(directory):
"""Recursively deletes a directory and its contents."""
try:
shutil.rmtree(directory)
except Exception: # pylint: disable=broad-except
except Exception:
pass
@ -184,7 +184,7 @@ def parse_options(args):
xform.replace(os.path.altsep or os.path.sep, os.path.sep).split("=", 1)
for xform in opts.transformations
]
except Exception as err: # pylint: disable=broad-except
except Exception as err:
parser.error(err)
return opts

View File

@ -29,7 +29,7 @@ __all__ = (
"discover_module_directories",
"configure_modules",
"register_module_test",
) # pylint: disable=undefined-all-variable
)
import imp
import inspect

View File

@ -36,12 +36,12 @@ from tenacity import Retrying, retry_if_result, stop_after_delay, wait_fixed
sys.path.append(str(Path(os.getcwd(), __file__).parent.parent))
from buildscripts.build_system_options import PathOptions # pylint: disable=wrong-import-position
from buildscripts.build_system_options import PathOptions
from buildscripts.util.oauth import (
Configs,
get_client_cred_oauth_credentials,
get_oauth_credentials,
) # pylint: disable=wrong-import-position
)
SYMBOLIZER_PATH_ENV = "MONGOSYMB_SYMBOLIZER_PATH"
# since older versions may have issues with symbolizing, we are setting the toolchain version to v4
@ -98,7 +98,7 @@ class S3BuildidDbgFileResolver(DbgFileResolver):
if not os.path.exists(build_id_path):
try:
self._get_from_s3(build_id)
except Exception: # noqa pylint: disable=broad-except
except Exception:
ex = sys.exc_info()[0]
sys.stderr.write(
"Failed to find debug symbols for {} in s3: {}\n".format(build_id, ex)
@ -399,7 +399,7 @@ class PathResolver(DbgFileResolver):
else:
data = response.json().get("data", {})
path, binary_name = data.get("debug_symbols_url"), data.get("file_name")
except Exception as err: # noqa pylint: disable=broad-except
except Exception as err:
sys.stderr.write(
f"Error occurred while trying to get response from server "
f"for buildId({build_id}): {err}\n"
@ -421,7 +421,7 @@ class PathResolver(DbgFileResolver):
else:
print("Downloaded, now unpacking...")
path = self.unpack(dl_path)
except Exception as err: # noqa pylint: disable=broad-except
except Exception as err:
sys.stderr.write(f"Failed to download & unpack file: {err}\n")
# we may have '<name>.debug', '<name>.so' or just executable binary file which may not have file 'extension'.
# if file has extension, it is good. if not, we should append .debug, because those without extension are
@ -630,7 +630,7 @@ def preprocess_frames_with_retries(
return retrying(preprocess_frames, dbg_path_resolver, trace_doc, input_format)
def classic_output(frames, outfile, **kwargs): # pylint: disable=unused-argument
def classic_output(frames, outfile, **kwargs):
"""Provide classic output."""
for frame in frames:
symbinfo = frame.get("symbinfo")

View File

@ -1,8 +1,6 @@
# This script needs to be compatible with odler versions of python since it runs on older versions of OSs when testing packaging
# For example ubuntu 1604 uses python3.5
# pylint: disable=redefined-outer-name,invalid-name,subprocess-run-check
import grp
import json
import logging

View File

@ -40,7 +40,7 @@ import git
sys.path.append(os.getcwd())
import packager # pylint: disable=wrong-import-position
import packager
# The MongoDB names for the architectures we support.
ARCH_CHOICES = ["x86_64", "ppc64le", "s390x", "arm64", "aarch64"]

View File

@ -8,7 +8,6 @@ import sys
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# pylint: disable=wrong-import-position
from buildscripts.powercycle_setup import cli
cli.main(sys.argv)

View File

@ -13,11 +13,8 @@ mongo_dir = os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__f
if __name__ == "__main__" and __package__ is None:
sys.path.append(mongo_dir)
# pylint: disable=wrong-import-position
from buildscripts.linter import ruffchecker, ruffformatter, runner
# pylint: enable=wrong-import-position
# List of supported linters
_LINTERS = [
ruffchecker.RuffChecker(),

View File

@ -16,12 +16,9 @@ MONGO_DIR = os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__f
if __name__ == "__main__" and __package__ is None:
sys.path.append(MONGO_DIR)
# pylint: disable=wrong-import-position
from buildscripts.linter import pyrightlinter, runner
from buildscripts.linter.filediff import gather_changed_files_for_lint
# pylint: enable=wrong-import-position
def is_interesting_file(filename: str) -> bool:
"""Return true if this file should be checked."""

View File

@ -13,9 +13,9 @@ if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
from buildscripts.linter import (
git, # pylint: disable=wrong-import-position
mongolint, # pylint: disable=wrong-import-position
parallel, # pylint: disable=wrong-import-position
git,
mongolint,
parallel,
)
FILES_RE = re.compile("\\.(h|cpp)$")
@ -62,7 +62,6 @@ def lint(file_names: List[str]) -> None:
def lint_all(file_names: List[str]) -> None:
# pylint: disable=unused-argument
"""Lint files command entry point based on working tree."""
all_file_names = git.get_files_to_check_working_tree(is_interesting_file)

View File

@ -8,7 +8,6 @@ import sys
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# pylint: disable=wrong-import-position
from buildscripts.resmokelib import cli

View File

@ -126,4 +126,4 @@ def main(resmoke_report_file: str, project_id: str, build_variant: str, task_nam
if __name__ == "__main__":
main() # pylint: disable=no-value-for-parameter
main()

View File

@ -77,8 +77,6 @@ if sys.platform == "win32":
class Process(object):
"""Wrapper around subprocess.Popen class."""
# pylint: disable=protected-access
def __init__(self, logger, args, env=None, env_vars=None, cwd=None):
"""Initialize the process with the specified logger, arguments, and environment."""

View File

@ -48,7 +48,6 @@ def get_path_env_var(env_vars):
def get_binary_version(executable):
"""Return the string for the binary version of the given executable."""
# pylint: disable=wrong-import-position
from buildscripts.resmokelib.multiversionconstants import LATEST_FCV
split_executable = os.path.basename(executable).split("-")

View File

@ -9,7 +9,6 @@ from typing import Optional
mongo_path = pathlib.Path(__file__).parents[3]
sys.path.append(mongo_path)
# pylint: disable=wrong-import-position
from buildscripts.resmokelib.hang_analyzer.gen_hang_analyzer_tasks import (
GENERATED_TASK_PREFIX,
RANDOM_STRING_LENGTH,

View File

@ -143,7 +143,7 @@ class WindowsDumper(Dumper):
cdb = spawn.find_executable(debugger)
if cdb is not None:
return cdb
from win32com.shell import shell, shellcon # pylint: disable=import-outside-toplevel
from win32com.shell import shell, shellcon
# Cygwin via sshd does not expose the normal environment variables
# Use the shell api to get the variable instead

View File

@ -19,7 +19,6 @@ from buildscripts.resmokelib.hang_analyzer import dumper
mongo_path = pathlib.Path(__file__).parents[3]
sys.path.append(mongo_path)
# pylint: disable=wrong-import-position
from buildscripts.resmokelib.utils import evergreen_conn
from buildscripts.util.fileops import write_file
from buildscripts.util.read_config import read_config_file

View File

@ -177,7 +177,7 @@ class HangAnalyzer(Subcommand):
except dumper.DumpError as err:
self.root_logger.error(err.message)
dump_pids = {**err.dump_pids, **dump_pids}
except Exception as err: # pylint: disable=broad-except
except Exception as err:
self.root_logger.info(
"Error encountered when invoking debugger %s", err
)
@ -201,7 +201,7 @@ class HangAnalyzer(Subcommand):
for pinfo in [pinfo for pinfo in processes if not re.match("^(java|python)", pinfo.name)]:
try:
dumpers.dbg.dump_info(pinfo, take_dump=False)
except Exception as err: # pylint: disable=broad-except
except Exception as err:
self.root_logger.info("Error encountered when invoking debugger %s", err)
trapped_exceptions.append(traceback.format_exc())
@ -212,7 +212,7 @@ class HangAnalyzer(Subcommand):
dumpers.jstack.dump_info(
self.root_logger, self.options.debugger_output, pinfo.name, pid
)
except Exception as err: # pylint: disable=broad-except
except Exception as err:
self.root_logger.info("Error encountered when invoking debugger %s", err)
trapped_exceptions.append(traceback.format_exc())

View File

@ -15,7 +15,7 @@ _FLUSH_THREAD: "_FlushThread" = None
def start_thread():
"""Start the flush thread."""
global _FLUSH_THREAD # pylint: disable=global-statement
global _FLUSH_THREAD
with _FLUSH_THREAD_LOCK:
if _FLUSH_THREAD is not None:
raise ValueError("FlushThread has already been started")

View File

@ -74,11 +74,11 @@ def configure_loggers():
"""Configure the loggers and setup redirects."""
_setup_redirects()
global ROOT_TESTS_LOGGER # pylint: disable=global-statement
global ROOT_TESTS_LOGGER
ROOT_TESTS_LOGGER = new_root_logger(TESTS_LOGGER_NAME)
global ROOT_FIXTURE_LOGGER # pylint: disable=global-statement
global ROOT_FIXTURE_LOGGER
ROOT_FIXTURE_LOGGER = new_root_logger(FIXTURE_LOGGER_NAME)
global ROOT_EXECUTOR_LOGGER # pylint: disable=global-statement
global ROOT_EXECUTOR_LOGGER
ROOT_EXECUTOR_LOGGER = new_root_logger(EXECUTOR_LOGGER_NAME)
_write_evergreen_log_spec()

View File

@ -183,15 +183,15 @@ class MongoReleases(BaseModel):
def get_fcv_versions(self) -> List[Version]:
"""Get the Version representation of all fcv versions."""
return [Version(fcv) for fcv in self.feature_compatibility_versions] # pylint: disable=not-an-iterable
return [Version(fcv) for fcv in self.feature_compatibility_versions]
def get_lts_versions(self) -> List[Version]:
"""Get the Version representation of the lts versions."""
return [Version(lts) for lts in self.long_term_support_releases] # pylint: disable=not-an-iterable
return [Version(lts) for lts in self.long_term_support_releases]
def get_eol_versions(self) -> List[Version]:
"""Get the Version representation of the EOL versions."""
return [Version(eol) for eol in self.eol_versions] # pylint: disable=not-an-iterable
return [Version(eol) for eol in self.eol_versions]
class MultiversionService:

View File

@ -244,7 +244,7 @@ MongoDB Powercycle Tests. To run a powercycle test locally, use the following st
parser.add_argument("remote_operations", nargs="*", help=argparse.SUPPRESS)
self.parser_actions = parser._actions[1:-1] # pylint: disable=protected-access
self.parser_actions = parser._actions[1:-1]
def parse(self, subcommand, parser, parsed_args, should_configure_otel=True, **kwargs):
"""Parse command-line options."""

View File

@ -18,7 +18,6 @@ from buildscripts.resmokelib.powercycle.lib.remote_operations import RemoteOpera
LOGGER = logging.getLogger(__name__)
# pylint: disable=abstract-method
class PowercycleCommand(Subcommand):
"""Base class for remote operations to set up powercycle."""

View File

@ -138,7 +138,6 @@ class RemoteOperations(object):
"""
return message.startswith("ssh:")
# pylint: disable=inconsistent-return-statements
def operation(
self, operation_type, operation_param, operation_dir=None, retry=False, retry_count=5
):

View File

@ -36,7 +36,6 @@ if _IS_WINDOWS:
_try_import("win32serviceutil")
# pylint: disable=undefined-variable,unused-variable
class WindowsService(object):
"""Windows service control class."""
@ -184,7 +183,6 @@ class WindowsService(object):
return self.pids
# pylint: enable=undefined-variable,unused-variable
class PosixService(object):
"""Service control on POSIX systems.
@ -222,7 +220,7 @@ class PosixService(object):
self.pids = proc.get_pids()
return ret, output
def stop(self, timeout): # pylint: disable=unused-argument
def stop(self, timeout):
"""Crash the posix process process. Empty "pids" to signal to `status` the process was terminated. Returns (code, output) tuple."""
proc = ProcessControl(name=self.bin_name)
proc.kill()

View File

@ -115,7 +115,7 @@ def exit_handler():
with open(REPORT_JSON_FILE, "w") as jstream:
json.dump(REPORT_JSON, jstream)
LOGGER.debug("Exit handler: report file contents %s", REPORT_JSON)
except: # pylint: disable=bare-except
except:
pass
if EXIT_YML_FILE:
@ -124,21 +124,21 @@ def exit_handler():
with open(EXIT_YML_FILE, "w") as yaml_stream:
yaml.safe_dump(EXIT_YML, yaml_stream)
LOGGER.debug("Exit handler: report file contents %s", EXIT_YML)
except: # pylint: disable=bare-except
except:
pass
LOGGER.debug("Exit handler: Killing processes")
try:
Processes.kill_all()
LOGGER.debug("Exit handler: Killing processes finished")
except: # pylint: disable=bare-except
except:
pass
LOGGER.debug("Exit handler: Cleaning up temporary files")
try:
NamedTempFile.delete_all()
LOGGER.debug("Exit handler: Cleaning up temporary files finished")
except: # pylint: disable=bare-except
except:
pass
@ -192,7 +192,7 @@ def register_signal_handler(handler):
signal.signal(signal_num, handler)
def dump_stacks_and_exit(signum, frame): # pylint: disable=unused-argument
def dump_stacks_and_exit(signum, frame):
"""Provide a handler that will dump the stacks of all threads."""
LOGGER.info("Dumping stacks!")
@ -282,9 +282,7 @@ def abs_path(path):
def symlink_dir(source_dir, dest_dir):
"""Symlink the 'dest_dir' to 'source_dir'."""
if _IS_WINDOWS:
win32file.CreateSymbolicLink( # pylint: disable=undefined-variable
dest_dir, source_dir, win32file.SYMBOLIC_LINK_FLAG_DIRECTORY
) # pylint: disable=undefined-variable
win32file.CreateSymbolicLink(dest_dir, source_dir, win32file.SYMBOLIC_LINK_FLAG_DIRECTORY)
else:
os.symlink(source_dir, dest_dir)
@ -480,14 +478,12 @@ def chmod_w_file(chmod_file):
# The os package cannot set the directory to '+w', so we use win32security.
# See https://stackoverflow.com/
# questions/12168110/setting-folder-permissions-in-windows-using-python
# pylint: disable=undefined-variable,unused-variable
user, domain, sec_type = win32security.LookupAccountName("", "Everyone")
file_sd = win32security.GetFileSecurity(chmod_file, win32security.DACL_SECURITY_INFORMATION)
dacl = file_sd.GetSecurityDescriptorDacl()
dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_WRITE, user)
file_sd.SetSecurityDescriptorDacl(1, dacl, 0)
win32security.SetFileSecurity(chmod_file, win32security.DACL_SECURITY_INFORMATION, file_sd)
# pylint: enable=undefined-variable,unused-variable
else:
os.chmod(chmod_file, os.stat(chmod_file) | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
@ -1372,13 +1368,11 @@ def get_remote_python():
def main(parser_actions, options):
"""Execute Main program."""
# pylint: disable=global-statement
global REPORT_JSON
global REPORT_JSON_FILE
global REPORT_JSON_SUCCESS
global EXIT_YML_FILE
global EXIT_YML
# pylint: enable=global-statement
atexit.register(exit_handler)
register_signal_handler(dump_stacks_and_exit)

View File

@ -148,7 +148,7 @@ class TestRunner(Subcommand):
):
tag_docs[tag_name] = doc
if suite_name in config.SUITE_FILES: # pylint: disable=unsupported-membership-test
if suite_name in config.SUITE_FILES:
out_tag_names.append(tag_name)
if config.SUITE_FILES == [config.DEFAULTS["suite_files"]]:
@ -872,7 +872,7 @@ class TestRunner(Subcommand):
}
)
return True
except: # pylint: disable=bare-except
except:
self._exec_logger.exception(
"Encountered an error when running %ss of suite %s.",
suite.test_kind,
@ -910,7 +910,6 @@ class TestRunner(Subcommand):
)
random.shuffle(suite.tests)
# pylint: disable=inconsistent-return-statements
def _get_suites(self) -> List[Suite]:
"""Return the list of suites for this resmoke invocation."""
try:
@ -2284,11 +2283,7 @@ def to_local_args(input_args: Optional[List[str]] = None):
setattr(parsed_args, "suite_files", origin_suite)
# The top-level parser has one subparser that contains all subcommand parsers.
command_subparser = [
action
for action in parser._actions # pylint: disable=protected-access
if action.dest == "command"
][0]
command_subparser = [action for action in parser._actions if action.dest == "command"][0]
run_parser = command_subparser.choices.get("run")
@ -2312,9 +2307,9 @@ def to_local_args(input_args: Optional[List[str]] = None):
return f"'{option_name}={option_value}'"
# Trim the argument namespace of any args we don't want to return.
for group in run_parser._action_groups: # pylint: disable=protected-access
for group in run_parser._action_groups:
arg_dests_visited = set()
for action in group._group_actions: # pylint: disable=protected-access
for action in group._group_actions:
arg_dest = action.dest
arg_value = getattr(parsed_args, arg_dest, None)
@ -2351,7 +2346,7 @@ def to_local_args(input_args: Optional[List[str]] = None):
# These are arguments that take no value.
elif action.nargs == 0:
other_local_args.append(arg_name)
elif isinstance(action, argparse._AppendAction): # pylint: disable=protected-access
elif isinstance(action, argparse._AppendAction):
args = [format_option(arg_name, elem) for elem in arg_value]
other_local_args.extend(args)
else:

View File

@ -586,7 +586,7 @@ class _Selector(object):
return tests, excluded
@staticmethod
def get_tags(test_file): # pylint: disable=unused-argument
def get_tags(test_file):
"""Retrieve the tags associated with the give test file."""
return []

View File

@ -461,7 +461,7 @@ class SetupMultiversion(Subcommand):
try:
try_download(url)
except Exception as err: # pylint: disable=broad-except
except Exception as err:
self.logger.warning(
"Setting up tarball failed with error, retrying once... error=%s", err
)

View File

@ -22,7 +22,7 @@ if _IS_WINDOWS:
def register(logger, suites, start_time):
"""Register an event object to wait for signal, or a signal handler for SIGUSR1."""
def _handle_sigusr1(signum, frame): # pylint: disable=unused-argument
def _handle_sigusr1(signum, frame):
"""Signal handler for SIGUSR1.
The handler will dump the stacks of all threads and write out the report file and
@ -110,7 +110,7 @@ def _dump_stacks(logger, header_msg):
sb = []
sb.append(header_msg)
frames = sys._current_frames() # pylint: disable=protected-access
frames = sys._current_frames()
sb.append("Total threads: %d" % (len(frames)))
sb.append("")

View File

@ -31,7 +31,7 @@ _NAMED_SUITES = None
def get_named_suites() -> List[SuiteName]:
"""Return a list of the suites names."""
global _NAMED_SUITES # pylint: disable=global-statement
global _NAMED_SUITES
if _NAMED_SUITES is None:
# Skip "with_*server" and "no_server" because they do not define any test files to run.

View File

@ -15,11 +15,11 @@ from buildscripts.util.expansions import get_expansion
def build_images(suite_name, fixture_instance):
"""Build images needed to run the resmoke suite against docker containers."""
image_builder = DockerComposeImageBuilder(suite_name, fixture_instance)
if "config" in config.DOCKER_COMPOSE_BUILD_IMAGES: # pylint: disable=unsupported-membership-test
if "config" in config.DOCKER_COMPOSE_BUILD_IMAGES:
image_builder.build_config_image()
if "mongo-binaries" in config.DOCKER_COMPOSE_BUILD_IMAGES: # pylint: disable=unsupported-membership-test
if "mongo-binaries" in config.DOCKER_COMPOSE_BUILD_IMAGES:
image_builder.build_mongo_binaries_image()
if "workload" in config.DOCKER_COMPOSE_BUILD_IMAGES: # pylint: disable=unsupported-membership-test
if "workload" in config.DOCKER_COMPOSE_BUILD_IMAGES:
image_builder.build_workload_image()
if config.DOCKER_COMPOSE_BUILD_IMAGES:
repro_command = f"""

View File

@ -73,7 +73,7 @@ def make_dummy_fixture(suite_name):
return make_fixture(fixture_class, fixture_logger, job_num=0, **fixture_config)
class FixtureBuilder(ABC, metaclass=registry.make_registry_metaclass(_BUILDERS, type(ABC))): # pylint: disable=invalid-metaclass
class FixtureBuilder(ABC, metaclass=registry.make_registry_metaclass(_BUILDERS, type(ABC))):
"""
ABC for fixture builders.

View File

@ -149,7 +149,6 @@ class _FixtureConfig(object):
LAST_LTS_MONGOS_BINARY,
)
# pylint: disable=invalid-name
self.MONGOD_EXECUTABLE = config.MONGOD_EXECUTABLE
self.DEFAULT_MONGOD_EXECUTABLE = config.DEFAULT_MONGOD_EXECUTABLE
self.MONGOD_SET_PARAMETERS = config.MONGOD_SET_PARAMETERS

View File

@ -37,7 +37,7 @@ _VERSIONS = {} # type: ignore
# interface.py and fixturelib API establishes forward-compatibility of fixture files.
# If the informal API becomes heavily used and needs forward-compatibility,
# consider adding it to the formal API.
class APIVersion(object, metaclass=registry.make_registry_metaclass(_VERSIONS)): # pylint: disable=invalid-metaclass
class APIVersion(object, metaclass=registry.make_registry_metaclass(_VERSIONS)):
"""Class storing fixture API version info."""
REGISTERED_NAME = "APIVersion"
@ -74,7 +74,7 @@ class TeardownMode(Enum):
ABORT = 6
class Fixture(object, metaclass=registry.make_registry_metaclass(_FIXTURES)): # pylint: disable=invalid-metaclass
class Fixture(object, metaclass=registry.make_registry_metaclass(_FIXTURES)):
"""Base class for all fixtures."""
# Error response codes copied from mongo/base/error_codes.yml.

View File

@ -364,7 +364,6 @@ class ReplicaSetFixture(interface.ReplFixture, interface._DockerComposeInterface
# These error codes may be transient, and so we retry the reconfig with a
# (potentially) higher config version. We should not receive these codes
# indefinitely.
# pylint: disable=too-many-boolean-expressions
if err.code not in [
ReplicaSetFixture._NEW_REPLICA_SET_CONFIGURATION_INCOMPATIBLE,
ReplicaSetFixture._CURRENT_CONFIG_NOT_COMMITTED_YET,

View File

@ -5,7 +5,7 @@ import signal
from buildscripts.resmokelib.testing.fixtures import interface
class YesFixture(interface.Fixture): # pylint: disable=abstract-method
class YesFixture(interface.Fixture):
"""Fixture which spawns several 'yes' executables to generate lots of log messages."""
def __init__(self, logger, job_num, fixturelib, num_instances=1, message_length=100):

View File

@ -23,7 +23,7 @@ TRACER = trace.get_tracer("resmoke")
class HookTestArchival(object):
"""Archive hooks and tests to S3."""
def __init__(self, suite: Suite, hooks, archive_instance, archive_config): # pylint: disable=unused-argument
def __init__(self, suite: Suite, hooks, archive_instance, archive_config):
"""Initialize HookTestArchival."""
self.archive_instance = archive_instance
archive_config = utils.default_if_none(archive_config, {})

View File

@ -271,7 +271,7 @@ class _AddRemoveShardThread(threading.Thread):
if self.__lifecycle.poll_for_idle_request():
self.__lifecycle.send_idle_acknowledgement()
except Exception: # pylint: disable=W0703
except Exception:
# Proactively log the exception when it happens so it will be
# flushed immediately.
self.logger.exception("Add/Remove Shard Thread threw exception")

View File

@ -45,7 +45,7 @@ class _BackgroundJob(threading.Thread):
try:
self._hook_test_case.run_dynamic_test(self._test_report)
except: # pylint: disable=bare-except
except:
self.exc_info = sys.exc_info()
finally:
with self._lock:

View File

@ -39,7 +39,7 @@ class BGJob(threading.Thread):
if interrupted:
self._hook.logger.info("interrupted")
break
except Exception as err: # pylint: disable=broad-except
except Exception as err:
self._hook.logger.error("Background thread caught exception: %s.", err)
self.err = err
self.__is_alive = False

View File

@ -10,12 +10,10 @@ class CheckReplChangeCollectionConsistency(jsfile.PerClusterDataConsistencyHook)
IS_BACKGROUND = False
def __init__( # pylint: disable=super-init-not-called
self, hook_logger, fixture, shell_options=None
):
def __init__(self, hook_logger, fixture, shell_options=None):
"""Initialize CheckReplChangeCollectionConsistency."""
description = "Check change_collection(s) of all replica set members"
js_filename = os.path.join("jstests", "hooks", "run_check_repl_change_collection.js")
jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
jsfile.JSHook.__init__(
self, hook_logger, fixture, js_filename, description, shell_options=shell_options
)

View File

@ -82,7 +82,7 @@ class _ChangeStreamsThread(threading.Thread):
while stream.alive and not self._stop_iterating.is_set():
try:
change = stream.try_next()
except Exception as err: # pylint: disable=broad-except
except Exception as err:
self.logger.error(
"Failed to get the next change from the change stream: %s", err
)

View File

@ -33,7 +33,7 @@ class CleanEveryN(interface.Hook):
)
n = 1
self.n = n # pylint: disable=invalid-name
self.n = n
self.tests_run = 0
self.shell_options = shell_options
self.skip_database_deletion = skip_database_deletion

View File

@ -232,7 +232,7 @@ class _InitialSyncThread(threading.Thread):
)
self.__lifecycle.wait_for_action_interval(wait_secs)
except Exception as err: # pylint: disable=broad-except
except Exception as err:
msg = "Syncer Thread threw exception: {}".format(err)
self.logger.exception(msg)
self._is_idle_evt.set()
@ -343,7 +343,6 @@ class _InitialSyncThread(threading.Thread):
# These error codes may be transient, and so we retry the reconfig with a
# (potentially) higher config version. We should not receive these codes
# indefinitely.
# pylint: disable=too-many-boolean-expressions
if err.code not in (
self._NEW_REPLICA_SET_CONFIGURATION_INCOMPATIBLE,
self._CURRENT_CONFIG_NOT_COMMITTED_YET,

View File

@ -14,12 +14,10 @@ class CheckReplDBHash(jsfile.PerClusterDataConsistencyHook):
IS_BACKGROUND = False
def __init__( # pylint: disable=super-init-not-called
self, hook_logger, fixture, shell_options=None
):
def __init__(self, hook_logger, fixture, shell_options=None):
"""Initialize CheckReplDBHash."""
description = "Check dbhashes of all replica set or master/slave members"
js_filename = os.path.join("jstests", "hooks", "run_check_repl_dbhash.js")
jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
jsfile.JSHook.__init__(
self, hook_logger, fixture, js_filename, description, shell_options=shell_options
)

View File

@ -297,7 +297,7 @@ class _SetParameterThread(threading.Thread):
now = time.time()
wait_secs = max(0, self._setparameter_interval_secs - (now - self._last_exec))
self.__lifecycle.wait_for_action_interval(wait_secs)
except Exception: # pylint: disable=W0703
except Exception:
# Proactively log the exception when it happens so it will be
# flushed immediately.
self.logger.exception("SetParameter thread threw exception")

View File

@ -125,7 +125,7 @@ class _FuzzStressThread(threading.Thread):
now = time.time()
wait_secs = max(0, self._interval_secs - (now - self._last_exec))
self.__lifecycle.wait_for_action_interval(wait_secs)
except Exception: # pylint: disable=W0703
except Exception:
# Proactively log the exception when it happens so it will be
# flushed immediately.
self.logger.exception("Stress fuzzing thread threw exception")

View File

@ -43,7 +43,7 @@ class BackgroundInitialSync(interface.Hook):
description = "Background Initial Sync"
interface.Hook.__init__(self, hook_logger, fixture, description)
self.n = n # pylint: disable=invalid-name
self.n = n
self.tests_run = 0
self.random_restarts = 0
self._shell_options = shell_options
@ -203,7 +203,7 @@ class IntermediateInitialSync(interface.Hook):
description = "Intermediate Initial Sync"
interface.Hook.__init__(self, hook_logger, fixture, description)
self.n = n # pylint: disable=invalid-name
self.n = n
self.tests_run = 0
def _should_run_after_test(self):

Some files were not shown because too many files have changed in this diff Show More