SERVER-54861: Update pylint to 2.7.2

This commit is contained in:
David Bradford 2021-03-01 12:35:13 -05:00 committed by Evergreen Agent
parent fae0542f9f
commit a22cbaba06
24 changed files with 32 additions and 33 deletions

View File

@ -31,7 +31,7 @@ variable-rgx=[a-z_][a-z0-9_]{1,50}$
# R0801 - duplicate-code - See PM-1380 # R0801 - duplicate-code - See PM-1380
# E0611 - no-name-in-module # E0611 - no-name-in-module
disable=bad-continuation,fixme,import-error,line-too-long,no-member,locally-disabled,no-else-return,redefined-variable-type,too-few-public-methods,unused-import,useless-object-inheritance,deprecated-module,unnecessary-pass,duplicate-code,no-else-raise,deprecated-method,exec-used,no-name-in-module disable=bad-continuation,fixme,import-error,line-too-long,no-member,locally-disabled,no-else-return,redefined-variable-type,too-few-public-methods,unused-import,useless-object-inheritance,deprecated-module,unnecessary-pass,duplicate-code,no-else-raise,deprecated-method,exec-used,no-name-in-module,raise-missing-from, unnecessary-comprehension,super-with-arguments,consider-using-sys-exit,import-outside-toplevel,no-else-continue,no-else-break
[IMPORTS] [IMPORTS]
known-third-party=boto3,botocore,psutil,yaml,xmlrunner known-third-party=boto3,botocore,psutil,yaml,xmlrunner

View File

@ -927,7 +927,7 @@ Black Duck manually. After the update to the third-party library is committed,
version information for this component at {BLACKDUCK_PROJECT_URL}. Click on the down arrow on the version information for this component at {BLACKDUCK_PROJECT_URL}. Click on the down arrow on the
far right of the component, choose edit and specify the new version.""" far right of the component, choose edit and specify the new version."""
else: else:
component_explanation = f"""This commponent was automatically detected by Black Duck. Black Duck should automatically detect component_explanation = """This commponent was automatically detected by Black Duck. Black Duck should automatically detect
the new version after the library is updated and the daily scanner task runs again.""" the new version after the library is updated and the daily scanner task runs again."""
mgr.write_report( mgr.write_report(

View File

@ -192,10 +192,8 @@ class ClangFormat(object):
self.path = os.path.join(ospath, program) self.path = os.path.join(ospath, program)
if os.path.exists(self.path) and self._validate_version(): if os.path.exists(self.path) and self._validate_version():
break break
else: self.path = None
self.path = None continue
continue
break
else: else:
continue continue
break break

View File

@ -59,7 +59,7 @@ def parse_source_files(callback, src_root):
with open(source_file, 'r', encoding='utf-8') as fh: with open(source_file, 'r', encoding='utf-8') as fh:
text = fh.read() text = fh.read()
if not any([zz in text for zz in quick]): if not any(zz in text for zz in quick):
continue continue
matchiters = [p.finditer(text) for p in patterns] matchiters = [p.finditer(text) for p in patterns]

View File

@ -93,7 +93,6 @@ def get_backports_required_last_lts_hash(task_path_suffix: str):
"""Parse the last-lts shell binary to get the commit hash.""" """Parse the last-lts shell binary to get the commit hash."""
last_lts_shell_exec = os.path.join(task_path_suffix, LAST_LTS_MONGO_BINARY) last_lts_shell_exec = os.path.join(task_path_suffix, LAST_LTS_MONGO_BINARY)
shell_version = check_output([last_lts_shell_exec, "--version"]).decode('utf-8') shell_version = check_output([last_lts_shell_exec, "--version"]).decode('utf-8')
last_lts_commit_hash = ""
for line in shell_version.splitlines(): for line in shell_version.splitlines():
if "gitVersion" in line: if "gitVersion" in line:
version_line = line.split(':')[1] version_line = line.split(':')[1]

View File

@ -101,8 +101,8 @@ def is_task_tagged(task, tags, filters):
:param filters: List of tags that should not belong to the task. :param filters: List of tags that should not belong to the task.
:return: True if task matches the query. :return: True if task matches the query.
""" """
if all([tag in task.tags for tag in tags]): if all(tag in task.tags for tag in tags):
if not filters or not any([tag in task.tags for tag in filters]): if not filters or not any(tag in task.tags for tag in filters):
return True return True
return False return False

View File

@ -42,7 +42,7 @@ from pymongo import MongoClient
# Permit imports from "buildscripts". # Permit imports from "buildscripts".
sys.path.append(os.path.normpath(os.path.join(os.path.abspath(__file__), '../../..'))) sys.path.append(os.path.normpath(os.path.join(os.path.abspath(__file__), '../../..')))
# pylint: disable=wrong-import-position # pylint: disable=wrong-import-position,wrong-import-order
from buildscripts.resmokelib import configure_resmoke from buildscripts.resmokelib import configure_resmoke
from buildscripts.resmokelib.logging import loggers from buildscripts.resmokelib.logging import loggers
from buildscripts.resmokelib.testing.fixtures import interface from buildscripts.resmokelib.testing.fixtures import interface

View File

@ -1055,7 +1055,7 @@ def _validate_enum_int(ctxt, idl_enum):
min_value = min(int_values_set) min_value = min(int_values_set)
max_value = max(int_values_set) max_value = max(int_values_set)
valid_int = {x for x in range(min_value, max_value + 1)} valid_int = set(range(min_value, max_value + 1))
if valid_int != int_values_set: if valid_int != int_values_set:
ctxt.add_enum_non_continuous_range_error(idl_enum, idl_enum.name) ctxt.add_enum_non_continuous_range_error(idl_enum, idl_enum.name)

View File

@ -682,13 +682,13 @@ class ParserContext(object):
" struct '%s' is marked as immutable.") % (field_name, struct_name, struct_name)) " struct '%s' is marked as immutable.") % (field_name, struct_name, struct_name))
def add_useless_variant_error(self, location): def add_useless_variant_error(self, location):
# type: (common.SourceLocation,) -> None # type: (common.SourceLocation) -> None
"""Add an error about a variant with 0 or 1 variant types.""" """Add an error about a variant with 0 or 1 variant types."""
self._add_error(location, ERROR_ID_USELESS_VARIANT, self._add_error(location, ERROR_ID_USELESS_VARIANT,
("Cannot declare a variant with only 0 or 1 variant types")) ("Cannot declare a variant with only 0 or 1 variant types"))
def add_variant_comparison_error(self, location): def add_variant_comparison_error(self, location):
# type: (common.SourceLocation,) -> None # type: (common.SourceLocation) -> None
"""Add an error about a struct with generate_comparison_operators and a variant field.""" """Add an error about a struct with generate_comparison_operators and a variant field."""
self._add_error(location, ERROR_ID_VARIANT_COMPARISON, self._add_error(location, ERROR_ID_VARIANT_COMPARISON,
("generate_comparison_operators is not supported with variant types")) ("generate_comparison_operators is not supported with variant types"))

View File

@ -1745,7 +1745,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_line( self._writer.write_line(
'%s object(localNS);' % (common.title_case(struct.cpp_name))) '%s object(localNS);' % (common.title_case(struct.cpp_name)))
else: else:
assert "Missing case" assert False, "Missing case"
else: else:
self._writer.write_line('%s object;' % common.title_case(struct.cpp_name)) self._writer.write_line('%s object;' % common.title_case(struct.cpp_name))

View File

@ -1446,7 +1446,7 @@ class TestParser(testcase.IDLTestcase):
# type: () -> None # type: () -> None
"""Negative unstable-field test cases.""" """Negative unstable-field test cases."""
self.assert_parse_fail( self.assert_parse_fail(
textwrap.dedent(f""" textwrap.dedent("""
commands: commands:
foo: foo:
description: foo description: foo
@ -1464,7 +1464,7 @@ class TestParser(testcase.IDLTestcase):
# type: () -> None # type: () -> None
"""Positive same command_name with different api_version test cases.""" """Positive same command_name with different api_version test cases."""
self.assert_parse( self.assert_parse(
textwrap.dedent(f""" textwrap.dedent("""
commands: commands:
foo: foo:
description: foo description: foo
@ -1692,7 +1692,7 @@ class TestParser(testcase.IDLTestcase):
# The 'command_name' and 'command_alias' fields cannot have same value. # The 'command_name' and 'command_alias' fields cannot have same value.
self.assert_parse_fail( self.assert_parse_fail(
textwrap.dedent(f""" textwrap.dedent("""
commands: commands:
foo: foo:
description: foo description: foo

View File

@ -64,6 +64,7 @@ class NothingImportResolver(idl.parser.ImportResolverBase):
class IDLTestcase(unittest.TestCase): class IDLTestcase(unittest.TestCase):
"""IDL Test case base class.""" """IDL Test case base class."""
# pylint: disable=inconsistent-return-statements
def _parse(self, doc_str, resolver): def _parse(self, doc_str, resolver):
# type: (str, idl.parser.ImportResolverBase) -> idl.syntax.IDLParsedSpec # type: (str, idl.parser.ImportResolverBase) -> idl.syntax.IDLParsedSpec
"""Parse a document and throw a unittest failure if it fails to parse as a valid YAML document.""" """Parse a document and throw a unittest failure if it fails to parse as a valid YAML document."""

View File

@ -399,10 +399,9 @@ class ExcludeDependencies(Analyzer):
valid_depender_nodes = [] valid_depender_nodes = []
for depender_node in set(self.graph[self.nodes[0]]): for depender_node in set(self.graph[self.nodes[0]]):
if all([ if all(
bool(excludes_node not in set(self.graph.rgraph[depender_node])) bool(excludes_node not in set(self.graph.rgraph[depender_node]))
for excludes_node in self.nodes[1:] for excludes_node in self.nodes[1:]):
]):
valid_depender_nodes.append(depender_node) valid_depender_nodes.append(depender_node)
return valid_depender_nodes return valid_depender_nodes
@ -590,7 +589,7 @@ class GaJsonPrinter(GaPrinter):
def print(self): def print(self):
"""Print the result data.""" """Print the result data."""
import json import json # pylint: disable=import-outside-toplevel
results = self.libdeps_graph_analysis.get_results() results = self.libdeps_graph_analysis.get_results()
print(json.dumps(self.serialize(results))) print(json.dumps(self.serialize(results)))

View File

@ -31,6 +31,8 @@ from enum import Enum, auto
import networkx import networkx
# pylint: disable=invalid-name
class CountTypes(Enum): class CountTypes(Enum):
"""Enums for the different types of counts to perform on a graph.""" """Enums for the different types of counts to perform on a graph."""

View File

@ -13,7 +13,7 @@ class PyLintLinter(base.LinterBase):
def __init__(self): def __init__(self):
# type: () -> None # type: () -> None
"""Create a pylint linter.""" """Create a pylint linter."""
super(PyLintLinter, self).__init__("pylint", "2.3.1") super(PyLintLinter, self).__init__("pylint", "2.7.2")
def get_lint_version_cmd_args(self): def get_lint_version_cmd_args(self):
# type: () -> List[str] # type: () -> List[str]

View File

@ -806,7 +806,7 @@ def make_rpm(distro, build_os, arch, spec, srcdir): # pylint: disable=too-many-
"-D", "-D",
f"dist .{distro.release_dist(build_os)}", f"dist .{distro.release_dist(build_os)}",
"-D", "-D",
f"_use_internal_dependency_generator 0", "_use_internal_dependency_generator 0",
"-D", "-D",
f"dynamic_version {spec.pversion(distro)}", f"dynamic_version {spec.pversion(distro)}",
"-D", "-D",

View File

@ -110,7 +110,7 @@ class WindowsDumper(Dumper):
cdb = spawn.find_executable(debugger) cdb = spawn.find_executable(debugger)
if cdb is not None: if cdb is not None:
return cdb return cdb
from win32com.shell import shell, shellcon from win32com.shell import shell, shellcon # pylint: disable=import-outside-toplevel
# Cygwin via sshd does not expose the normal environment variables # Cygwin via sshd does not expose the normal environment variables
# Use the shell api to get the variable instead # Use the shell api to get the variable instead

View File

@ -31,7 +31,7 @@ def extract_debug_symbols(root_logger):
def _extract_tar(path, root_logger): def _extract_tar(path, root_logger):
import shutil import shutil # pylint: disable=import-outside-toplevel
# The file name is always .tgz but it's "secretly" a zip file on Windows :( # The file name is always .tgz but it's "secretly" a zip file on Windows :(
compressed_format = 'zip' if sys.platform == "win32" else 'gztar' compressed_format = 'zip' if sys.platform == "win32" else 'gztar'
shutil.unpack_archive(path, format=compressed_format) shutil.unpack_archive(path, format=compressed_format)

View File

@ -76,7 +76,7 @@ class RemoteOperations(object): # pylint: disable=too-many-instance-attributes
shell=self.use_shell) shell=self.use_shell)
buff_stdout, _ = process.communicate() buff_stdout, _ = process.communicate()
buff = buff_stdout.decode("utf-8", "replace") buff = buff_stdout.decode("utf-8", "replace")
print(f"Result of command:") print("Result of command:")
print(textwrap.indent(buff, "[result body] ")) print(textwrap.indent(buff, "[result body] "))
return process.poll(), buff return process.poll(), buff

View File

@ -481,8 +481,7 @@ def install_tarball(tarball, root_dir):
fi ; fi ;
done ; done ;
popd ; popd ;
""".format( # pylint: disable=bad-continuation """.format(tarball=tarball, tmp_dir=tmp_dir, root_dir=root_dir)
tarball=tarball, tmp_dir=tmp_dir, root_dir=root_dir)
ret, output = execute_cmd(cmds, use_file=True) ret, output = execute_cmd(cmds, use_file=True)
shutil.rmtree(tmp_dir) shutil.rmtree(tmp_dir)
else: else:

View File

@ -25,9 +25,9 @@ class SetUpEC2Instance(PowercycleCommand):
remote_dir = powercycle_constants.REMOTE_DIR remote_dir = powercycle_constants.REMOTE_DIR
db_path = powercycle_constants.DB_PATH db_path = powercycle_constants.DB_PATH
set_permission_stmt = f"chmod -R 777" set_permission_stmt = "chmod -R 777"
if self.is_windows(): if self.is_windows():
set_permission_stmt = f"setfacl -s user::rwx,group::rwx,other::rwx" set_permission_stmt = "setfacl -s user::rwx,group::rwx,other::rwx"
cmds = f"{self.sudo} mkdir -p {remote_dir}; {self.sudo} chown -R {user_group} {remote_dir}; {set_permission_stmt} {remote_dir}; ls -ld {remote_dir}" cmds = f"{self.sudo} mkdir -p {remote_dir}; {self.sudo} chown -R {user_group} {remote_dir}; {set_permission_stmt} {remote_dir}; ls -ld {remote_dir}"
cmds = f"{cmds}; {self.sudo} mkdir -p {db_path}; {self.sudo} chown -R {user_group} {db_path}; {set_permission_stmt} {db_path}; ls -ld {db_path}" cmds = f"{cmds}; {self.sudo} mkdir -p {db_path}; {self.sudo} chown -R {user_group} {db_path}; {set_permission_stmt} {db_path}; ls -ld {db_path}"

View File

@ -269,6 +269,7 @@ class TestRunner(Subcommand): # pylint: disable=too-many-instance-attributes
suite.test_kind, suite.get_display_name(), config.RANDOM_SEED) suite.test_kind, suite.get_display_name(), config.RANDOM_SEED)
random.shuffle(suite.tests) random.shuffle(suite.tests)
# pylint: disable=inconsistent-return-statements
def _get_suites(self): def _get_suites(self):
"""Return the list of suites for this resmoke invocation.""" """Return the list of suites for this resmoke invocation."""
try: try:

View File

@ -181,7 +181,7 @@ class TestRepeatConfig(unittest.TestCase):
repeat_config = under_test.RepeatConfig(repeat_tests_num=5) repeat_config = under_test.RepeatConfig(repeat_tests_num=5)
repeat_options = repeat_config.generate_resmoke_options() repeat_options = repeat_config.generate_resmoke_options()
self.assertEqual(repeat_options.strip(), f"--repeatSuites=5") self.assertEqual(repeat_options.strip(), "--repeatSuites=5")
def test_get_resmoke_repeat_options_secs(self): def test_get_resmoke_repeat_options_secs(self):
repeat_config = under_test.RepeatConfig(repeat_tests_secs=5) repeat_config = under_test.RepeatConfig(repeat_tests_secs=5)

View File

@ -3,7 +3,7 @@
GitPython ~= 3.1.7 GitPython ~= 3.1.7
mypy ~= 0.800; python_version > "3.5" mypy ~= 0.800; python_version > "3.5"
pydocstyle == 2.1.1 pydocstyle == 2.1.1
pylint == 2.3.1 pylint == 2.7.2
structlog ~= 19.2.0 structlog ~= 19.2.0
typing typing
yamllint == 1.15.0 yamllint == 1.15.0