SERVER-42240: Fix what build burn_in_tags looks at and cleanup

This commit is contained in:
David Bradford 2019-07-16 16:34:29 -04:00
parent 0627df0025
commit 2fb05c59a4
18 changed files with 352 additions and 596 deletions

View File

@ -7,11 +7,10 @@ import os
from collections import namedtuple
from evergreen.api import RetryingEvergreenApi
from git import Repo
from shrub.config import Configuration
from shrub.variant import TaskSpec
from shrub.variant import Variant
from evergreen.api import RetryingEvergreenApi
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
@ -120,7 +119,7 @@ def _generate_evg_buildvariant(shrub_config, buildvariant, run_buildvariant,
new_variant.modules(modules)
def _generate_evg_tasks(evergreen_api, shrub_config, expansions_file_data, buildvariant_map):
def _generate_evg_tasks(evergreen_api, shrub_config, expansions_file_data, buildvariant_map, repo):
"""
Generate burn in tests tasks for a given shrub config and group of buildvariants.
@ -128,10 +127,11 @@ def _generate_evg_tasks(evergreen_api, shrub_config, expansions_file_data, build
:param shrub_config: Shrub config object that the build variants will be built upon.
:param expansions_file_data: Config data file to use.
:param buildvariant_map: Map of base buildvariants to their generated buildvariant.
:param repo: Git repository.
"""
for buildvariant, run_buildvariant in buildvariant_map.items():
config_options = _get_config_options(expansions_file_data, buildvariant, run_buildvariant)
tests_by_task = create_tests_by_task(config_options, evergreen_api)
tests_by_task = create_tests_by_task(config_options, repo)
if tests_by_task:
_generate_evg_buildvariant(shrub_config, buildvariant, run_buildvariant,
expansions_file_data["build_variant"])
@ -152,7 +152,7 @@ def _write_to_file(shrub_config):
file_handle.write(shrub_config.to_json())
def main(evergreen_api):
def main(evergreen_api, repo):
"""Execute Main program."""
parser = argparse.ArgumentParser(description=main.__doc__)
@ -163,9 +163,9 @@ def main(evergreen_api):
shrub_config = Configuration()
buildvariant_map = _create_evg_buildvariant_map(expansions_file_data)
_generate_evg_tasks(evergreen_api, shrub_config, expansions_file_data, buildvariant_map)
_generate_evg_tasks(evergreen_api, shrub_config, expansions_file_data, buildvariant_map, repo)
_write_to_file(shrub_config)
if __name__ == '__main__':
main(RetryingEvergreenApi.get_api(config_file=EVG_CONFIG_FILE))
main(RetryingEvergreenApi.get_api(config_file=EVG_CONFIG_FILE), Repo("."))

View File

@ -1,35 +1,92 @@
#!/usr/bin/env python3
"""Bypass compile and fetch binaries for burn_in_tags."""
import logging
import sys
from urllib.parse import urlparse
from buildscripts.bypass_compile_and_fetch_binaries import (
find_suitable_build_id, generate_bypass_expansions, parse_args, read_evg_config,
requests_get_json, write_out_bypass_compile_expansions)
import click
from evergreen.api import RetryingEvergreenApi
import structlog
from structlog.stdlib import LoggerFactory
from buildscripts.bypass_compile_and_fetch_binaries import generate_bypass_expansions, \
write_out_bypass_compile_expansions
structlog.configure(logger_factory=LoggerFactory())
LOGGER = structlog.get_logger(__name__)
EVG_CONFIG_FILE = ".evergreen.yml"
def main(): # pylint: disable=too-many-locals,too-many-statements
"""Execute Main program."""
def _retrieve_used_build_id(build):
"""
Determine what build_id should be used for downloading artifacts.
args = parse_args()
evg_config = read_evg_config()
if evg_config is None:
print("Could not find ~/.evergreen.yml config file. Default compile bypass to false.")
return
If bypass_compile was used by the main compile task, then our expansions should use the
same references.
api_server = "{url.scheme}://{url.netloc}".format(
url=urlparse(evg_config.get("api_server_host")))
revision_url = f"{api_server}/rest/v1/projects/{args.project}/revisions/{args.revision}"
revisions = requests_get_json(revision_url)
build_id = find_suitable_build_id(revisions["builds"], args)
if not build_id:
print("Could not find build id for revision {args.revision} on project {args.project}."
" Default compile bypass to false.")
return
:param build: Evergreen build containing the compile task to use.
:return: build_id that should be used for expansions.
"""
log = LOGGER.bind(build_id=build.id)
tasks = build.get_tasks()
possible_compile_tasks = {task for task in tasks if task.display_name == "compile"}
if len(possible_compile_tasks) != 1:
log.warning("Could not find 'compile' task")
raise ValueError(f"Compile task not found in {build.id}")
expansions = generate_bypass_expansions(args.project, args.buildVariant, args.revision,
build_id)
write_out_bypass_compile_expansions(args.outFile, **expansions)
compile_task = possible_compile_tasks.pop()
# We will use the 'Binaries' artifact to see what build_id to use.
binary_artifacts = [
artifact for artifact in compile_task.artifacts if artifact.name == "Binaries"
]
for artifact in binary_artifacts:
log.info("Checking artifact for build_id", url=artifact.url)
build_id = artifact.url.split("/")[-1].split(".")[0]
prefix = "mongo-"
return build_id[len(prefix):]
log.warning("Count not determine build_id")
raise ValueError(f"Could not determine build id for bypass compile in {build.id}")
@click.command()
@click.option("--project", required=True, help="The evergreen project.")
@click.option("--build-variant", required=True, help="Build variant where compile is running.")
@click.option("--revision", required=True, help="Base revision of the build.")
@click.option("--out-file", required=True, help="File to write macros expansions to.")
@click.option("--version-id", required=True, help="Evergreen version id of the current build.")
def main(project, build_variant, revision, out_file, version_id):
"""
Create a file with expansions that can be used to bypass compile.
This is used for dynamically generated build variants that can use a base build variants
compile artifacts to run against.
\f
:param project: The evergreen project.
:param build_variant: The build variant whose artifacts we want to use.
:param revision: The base revision being run against.
:param out_file: File to write expansions to.
:param version_id: Evergreen version id being run against.
"""
logging.basicConfig(
format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
level=logging.DEBUG,
stream=sys.stdout,
)
evg_api = RetryingEvergreenApi.get_api(config_file=EVG_CONFIG_FILE)
version = evg_api.version_by_id(version_id)
build_id = _retrieve_used_build_id(version.build_by_variant(build_variant))
LOGGER.info("Creating expansions files", project=project, build_variant=build_variant,
revision=revision, build_id=build_id)
expansions = generate_bypass_expansions(project, build_variant, revision, build_id)
write_out_bypass_compile_expansions(out_file, **expansions)
if __name__ == "__main__":
main()
main() # pylint: disable=no-value-for-parameter

View File

@ -14,8 +14,11 @@ import logging
from math import ceil
import yaml
from git import Repo
import requests
import structlog
from structlog.stdlib import LoggerFactory
import yaml
from shrub.config import Configuration
from shrub.command import CommandDefinition
@ -31,17 +34,23 @@ if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# pylint: disable=wrong-import-position
from buildscripts import git
from buildscripts.patch_builds.change_data import find_changed_files
from buildscripts import resmokelib
from buildscripts.ciconfig import evergreen
from buildscripts.util import teststats
# pylint: enable=wrong-import-position
LOGGER = logging.getLogger(__name__)
structlog.configure(logger_factory=LoggerFactory())
LOGGER = structlog.getLogger(__name__)
EXTERNAL_LOGGERS = {
"evergreen",
"git",
"urllib3",
}
AVG_TEST_RUNTIME_ANALYSIS_DAYS = 14
AVG_TEST_TIME_MULTIPLIER = 3
CONFIG_FILE = "../src/.evergreen.yml"
CONFIG_FILE = ".evergreen.yml"
REPEAT_SUITES = 2
EVERGREEN_FILE = "etc/evergreen.yml"
MAX_TASKS_TO_CREATE = 1000
@ -181,31 +190,24 @@ def validate_options(parser, options):
check_variant(options.run_buildvariant, parser)
def find_last_activated_task(revisions, variant, project, evg_api):
def _is_file_a_test_file(file_path):
"""
Search the given list of revisions for the first build that was activated in evergreen.
Check if the given path points to a test file.
:param revisions: List of revisions to search.
:param variant: Build variant to query for.
:param project: Project being run against.
:param evg_api: Evergreen api.
:return: First revision from list that has been activated.
:param file_path: path to file.
:return: True if path points to test.
"""
prefix = project.replace("-", "_")
# Check that the file exists because it may have been moved or deleted in the patch.
if os.path.splitext(file_path)[1] != ".js" or not os.path.isfile(file_path):
return False
for githash in revisions:
version_id = f"{prefix}_{githash}"
version = evg_api.version_by_id(version_id)
if "jstests" not in file_path:
return False
build = version.build_by_variant(variant)
if build.activated:
return githash
return None
return True
def find_changed_tests( # pylint: disable=too-many-locals,too-many-arguments
branch_name, base_commit, max_revisions, buildvariant, project, check_evergreen, evg_api):
def find_changed_tests(repo: Repo):
"""
Find the changed tests.
@ -213,66 +215,12 @@ def find_changed_tests( # pylint: disable=too-many-locals,too-many-arguments
TODO: This should be expanded to search for enterprise modules.
The returned file paths are in normalized form (see os.path.normpath(path)).
:param branch_name: Branch being run against.
:param base_commit: Commit changes are made on top of.
:param max_revisions: Max number of revisions to search through.
:param buildvariant: Build variant burn is being run on.
:param project: Project that is being run on.
:param check_evergreen: Should evergreen be checked for an activated build.
:param evg_api: Evergreen api.
:returns: List of changed tests.
:returns: Set of changed tests.
"""
changed_tests = []
repo = git.Repository(".")
if base_commit is None:
base_commit = repo.get_merge_base([branch_name + "@{upstream}", "HEAD"])
if check_evergreen:
# We're going to check up to 200 commits in Evergreen for the last scheduled one.
# The current commit will be activated in Evergreen; we use --skip to start at the
# previous commit when trying to find the most recent preceding commit that has been
# activated.
revs_to_check = repo.git_rev_list([base_commit, "--max-count=200", "--skip=1"]).splitlines()
last_activated = find_last_activated_task(revs_to_check, buildvariant, project, evg_api)
if last_activated is None:
# When the current commit is the first time 'buildvariant' has run, there won't be a
# commit among 'revs_to_check' that's been activated in Evergreen. We handle this by
# only considering tests changed in the current commit.
last_activated = "HEAD"
print("Comparing current branch against", last_activated)
revisions = repo.git_rev_list([base_commit + "..." + last_activated]).splitlines()
base_commit = last_activated
else:
revisions = repo.git_rev_list([base_commit + "...HEAD"]).splitlines()
revision_count = len(revisions)
if revision_count > max_revisions:
print(("There are too many revisions included ({}). This is likely because your base"
" branch is not {}. You can allow us to review more than {} revisions by using"
" the --maxRevisions option.".format(revision_count, branch_name, max_revisions)))
return changed_tests
changed_files = repo.git_diff(["--name-only", base_commit]).splitlines()
# New files ("untracked" in git terminology) won't show up in the git diff results.
untracked_files = repo.git_status(["--porcelain"]).splitlines()
# The lines with untracked files start with '?? '.
for line in untracked_files:
if line.startswith("?"):
(_, line) = line.split(" ", 1)
changed_files.append(line)
for line in changed_files:
line = line.rstrip()
# Check that the file exists because it may have been moved or deleted in the patch.
if os.path.splitext(line)[1] != ".js" or not os.path.isfile(line):
continue
if "jstests" in line:
path = os.path.normpath(line)
changed_tests.append(path)
changed_files = find_changed_files(repo)
LOGGER.debug("Found changed files", files=changed_files)
changed_tests = {os.path.normpath(path) for path in changed_files if _is_file_a_test_file(path)}
LOGGER.debug("Found changed tests", files=changed_tests)
return changed_tests
@ -526,7 +474,7 @@ def _generate_timeouts(options, commands, test, task_avg_test_runtime_stats):
avg_test_runtime = _parse_avg_test_runtime(test, task_avg_test_runtime_stats)
if avg_test_runtime:
cmd_timeout = CmdTimeoutUpdate()
LOGGER.debug("Avg test runtime for test %s is: %s", test, avg_test_runtime)
LOGGER.debug("Avg test runtime", test=test, runtime=avg_test_runtime)
timeout = _calculate_timeout(avg_test_runtime)
cmd_timeout.timeout(timeout)
@ -555,7 +503,7 @@ def _get_task_runtime_history(evg_api, project, task, variant):
tasks=[task], variants=[variant], group_by="test",
group_num_days=AVG_TEST_RUNTIME_ANALYSIS_DAYS)
test_runtimes = teststats.TestStats(data).get_tests_runtimes()
LOGGER.debug("Test_runtime data parsed from Evergreen history: %s", test_runtimes)
LOGGER.debug("Test_runtime data parsed from Evergreen history", runtimes=test_runtimes)
return test_runtimes
except requests.HTTPError as err:
if err.response.status_code == requests.codes.SERVICE_UNAVAILABLE:
@ -605,20 +553,18 @@ def create_generate_tasks_config(evg_api, evg_config, options, tests_by_task, in
return evg_config
def create_tests_by_task(options, evg_api):
def create_tests_by_task(options, repo):
"""
Create a list of tests by task.
:param options: Options.
:param evg_api: Evergreen api.
:param repo: Git repo being tracked.
:return: Tests by task
"""
# Parse the Evergreen project configuration file.
evergreen_conf = evergreen.parse_evergreen_file(EVERGREEN_FILE)
changed_tests = find_changed_tests(options.branch, options.base_commit, options.max_revisions,
options.buildvariant, options.project,
options.check_evergreen, evg_api)
changed_tests = find_changed_tests(repo)
exclude_suites, exclude_tasks, exclude_tests = find_excludes(SELECTOR_FILE)
changed_tests = filter_tests(changed_tests, exclude_tests)
@ -644,7 +590,8 @@ def create_generate_tasks_file(evg_api, options, tests_by_task):
json_config = evg_config.to_map()
tasks_to_create = len(json_config.get('tasks', []))
if tasks_to_create > MAX_TASKS_TO_CREATE:
LOGGER.warning("Attempting to create more tasks than max(%d), aborting", tasks_to_create)
LOGGER.warning("Attempting to create more tasks than max, aborting", tasks=tasks_to_create,
max=MAX_TASKS_TO_CREATE)
sys.exit(1)
_write_json_file(json_config, options.generate_tasks_file)
@ -676,17 +623,22 @@ def run_tests(no_exec, tests_by_task, resmoke_cmd, report_file):
_write_json_file(test_results, report_file)
def main(evg_api):
"""Execute Main program."""
def configure_logging():
"""Configure logging for the application."""
logging.basicConfig(
format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
level=logging.DEBUG,
stream=sys.stdout,
)
for log_name in EXTERNAL_LOGGERS:
logging.getLogger(log_name).setLevel(logging.WARNING)
def main(evg_api):
"""Execute Main program."""
configure_logging()
options, args = parse_command_line()
resmoke_cmd = _set_resmoke_cmd(options, args)
# Load the dict of tests to run.
@ -700,7 +652,8 @@ def main(evg_api):
# Run the executor finder.
else:
tests_by_task = create_tests_by_task(options, evg_api)
repo = Repo(".")
tests_by_task = create_tests_by_task(options, repo)
if options.test_list_outfile:
_write_json_file(tests_by_task, options.test_list_outfile)

View File

@ -3,6 +3,7 @@
import argparse
import json
import logging
import os
import re
import sys
@ -19,7 +20,10 @@ except ImportError:
from urllib.parse import urlparse # type: ignore
# pylint: enable=ungrouped-imports
from git.repo import Repo
import requests
import structlog
from structlog.stdlib import LoggerFactory
import yaml
# Get relative imports to work when the package is not installed on the PYTHONPATH.
@ -28,9 +32,11 @@ if __name__ == "__main__" and __package__ is None:
# pylint: disable=wrong-import-position
from buildscripts.ciconfig.evergreen import parse_evergreen_file
from buildscripts.git import Repository
# pylint: enable=wrong-import-position
structlog.configure(logger_factory=LoggerFactory())
LOGGER = structlog.get_logger(__name__)
_IS_WINDOWS = (sys.platform == "win32" or sys.platform == "cygwin")
# If changes are only from files in the bypass_files list or the bypass_directories list, then
@ -104,7 +110,7 @@ def requests_get_json(url):
try:
return response.json()
except ValueError:
print("Invalid JSON object returned with response: {}".format(response.text))
LOGGER.warning("Invalid JSON object returned with response", response=response.text)
raise
@ -124,15 +130,16 @@ def read_evg_config():
def write_out_bypass_compile_expansions(patch_file, **expansions):
"""Write out the macro expansions to given file."""
with open(patch_file, "w") as out_file:
print("Saving compile bypass expansions to {0}: ({1})".format(patch_file, expansions))
LOGGER.info("Saving compile bypass expansions", patch_file=patch_file,
expansions=expansions)
yaml.safe_dump(expansions, out_file, default_flow_style=False)
def write_out_artifacts(json_file, artifacts):
"""Write out the JSON file with URLs of artifacts to given file."""
with open(json_file, "w") as out_file:
print("Generating artifacts.json from pre-existing artifacts {0}".format(
json.dumps(artifacts, indent=4)))
LOGGER.info("Generating artifacts.json from pre-existing artifacts", json=json.dumps(
artifacts, indent=4))
json.dump(artifacts, out_file)
@ -182,8 +189,8 @@ def _get_original_etc_evergreen(path):
:param path: path to etc/evergreen.
:return: An EvergreenProjectConfig for the previous etc/evergreen file.
"""
repo = Repository(".")
previous_contents = repo.git_show([f"HEAD:{path}"])
repo = Repo(".")
previous_contents = repo.git.show([f"HEAD:{path}"])
with TemporaryDirectory() as tmpdir:
file_path = os.path.join(tmpdir, "evergreen.yml")
with open(file_path, "w") as fp:
@ -260,20 +267,18 @@ def should_bypass_compile(args):
if os.path.isdir(filename):
continue
log = LOGGER.bind(filename=filename)
if _file_in_group(filename, BYPASS_BLACKLIST):
print("Compile bypass disabled after detecting {} as being modified because"
" it is a file known to affect compilation.".format(filename))
log.warning("Compile bypass disabled due to blacklisted file")
return False
if not _file_in_group(filename, BYPASS_WHITELIST):
print("Compile bypass disabled after detecting {} as being modified because"
" it isn't a file known to not affect compilation.".format(filename))
log.warning("Compile bypass disabled due to non-whitelisted file")
return False
if filename in BYPASS_EXTRA_CHECKS_REQUIRED:
if not _check_file_for_bypass(filename, args.buildVariant):
print("Compile bypass disabled after detecting {} as being modified because"
" the changes could affect compilation.".format(filename))
log.warning("Compile bypass disabled due to extra checks for file.")
return False
return True
@ -335,12 +340,18 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
determine to bypass compile do we write out the macro expansions.
"""
args = parse_args()
logging.basicConfig(
format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
level=logging.DEBUG,
stream=sys.stdout,
)
# Determine if we should bypass compile based on modified patch files.
if should_bypass_compile(args):
evg_config = read_evg_config()
if evg_config is None:
print("Could not find ~/.evergreen.yml config file. Default compile bypass to false.")
LOGGER.warning(
"Could not find ~/.evergreen.yml config file. Default compile bypass to false.")
return
api_server = "{url.scheme}://{url.netloc}".format(
@ -350,8 +361,8 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
revisions = requests_get_json(revision_url)
build_id = find_suitable_build_id(revisions["builds"], args)
if not build_id:
print("Could not find build id for revision {} on project {}."
" Default compile bypass to false.".format(args.revision, args.project))
LOGGER.warning("Could not find build id. Default compile bypass to false.",
revision=args.revision, project=args.project)
return
# Generate the compile task id.
@ -361,23 +372,25 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
# Get info on compile task of base commit.
task = requests_get_json(task_url)
if task is None or task["status"] != "success":
print("Could not retrieve artifacts because the compile task {} for base commit"
" was not available. Default compile bypass to false.".format(compile_task_id))
LOGGER.warning(
"Could not retrieve artifacts because the compile task for base commit"
" was not available. Default compile bypass to false.", task_id=compile_task_id)
return
# Get the compile task artifacts from REST API
print("Fetching pre-existing artifacts from compile task {}".format(compile_task_id))
LOGGER.info("Fetching pre-existing artifacts from compile task", task_id=compile_task_id)
artifacts = []
for artifact in task["files"]:
filename = os.path.basename(artifact["url"])
if filename.startswith(build_id):
print("Retrieving archive {}".format(filename))
LOGGER.info("Retrieving archive", filename=filename)
# This is the artifacts.tgz as referenced in evergreen.yml.
try:
urllib.request.urlretrieve(artifact["url"], filename)
except urllib.error.ContentTooShortError:
print("The artifact {} could not be completely downloaded. Default"
" compile bypass to false.".format(filename))
LOGGER.warning(
"The artifact could not be completely downloaded. Default"
" compile bypass to false.", filename=filename)
return
# Need to extract certain files from the pre-existing artifacts.tgz.
@ -395,24 +408,25 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
tarinfo for tarinfo in tar.getmembers()
if tarinfo.name.startswith("repo/") or tarinfo.name in extract_files
]
print("Extracting the following files from {0}...\n{1}".format(
filename, "\n".join(tarinfo.name for tarinfo in subdir)))
LOGGER.info("Extracting the files...", filename=filename,
files="\n".join(tarinfo.name for tarinfo in subdir))
tar.extractall(members=subdir)
elif filename.startswith("mongo-src"):
print("Retrieving mongo source {}".format(filename))
LOGGER.info("Retrieving mongo source", filename=filename)
# This is the distsrc.[tgz|zip] as referenced in evergreen.yml.
try:
urllib.request.urlretrieve(artifact["url"], filename)
except urllib.error.ContentTooShortError:
print("The artifact {} could not be completely downloaded. Default"
" compile bypass to false.".format(filename))
LOGGER.warn(
"The artifact could not be completely downloaded. Default"
" compile bypass to false.", filename=filename)
return
extension = os.path.splitext(filename)[1]
distsrc_filename = "distsrc{}".format(extension)
print("Renaming {} to {}".format(filename, distsrc_filename))
LOGGER.info("Renaming", filename=filename, rename=distsrc_filename)
os.rename(filename, distsrc_filename)
else:
print("Linking base artifact {} to this patch build".format(filename))
LOGGER.info("Linking base artifact to this patch build", filename=filename)
# For other artifacts we just add their URLs to the JSON file to upload.
files = {
"name": artifact["name"],

View File

@ -5,7 +5,7 @@ import os
import re
from typing import Any, Callable, List, Tuple
from buildscripts import git as _git
from buildscripts.linter import git_base as _git
from buildscripts import moduleconfig
from buildscripts.resmokelib.utils import globstar

View File

@ -1,8 +1,6 @@
"""Module to run git commands on a repository."""
import logging
import os
import sys
import subprocess
LOGGER = logging.getLogger(__name__)

View File

@ -0,0 +1 @@
"""Patch build module."""

View File

@ -0,0 +1,51 @@
"""Tools for detecting changes in a commit."""
from typing import Any, Set
from git import Repo, DiffIndex
import structlog
from structlog.stdlib import LoggerFactory
structlog.configure(logger_factory=LoggerFactory())
LOGGER = structlog.get_logger(__name__)
def _paths_for_iter(diff, iter_type):
return {change.a_path for change in diff.iter_change_type(iter_type)}
def _modified_files_for_diff(diff: DiffIndex, log: Any) -> Set:
modified_files = _paths_for_iter(diff, 'M')
log.debug("modified files", files=modified_files)
added_files = _paths_for_iter(diff, 'A')
log.debug("added files", files=added_files)
renamed_files = _paths_for_iter(diff, 'R')
log.debug("renamed files", files=renamed_files)
# We don't care about delete files, but log them just in case.
deleted_files = _paths_for_iter(diff, 'D')
log.debug("deleted files", files=deleted_files)
return modified_files.union(added_files).union(renamed_files)
def find_changed_files(repo: Repo) -> Set[str]:
"""
Find files that were new or added to the repository between commits.
:param repo: Git repository.
:return: Set of changed files.
"""
diff = repo.index.diff(None)
work_tree_files = _modified_files_for_diff(diff, LOGGER.bind(diff="working tree diff"))
commit = repo.index
diff = commit.diff(repo.head.commit)
index_files = _modified_files_for_diff(diff, LOGGER.bind(diff="index diff"))
untracked_files = set(repo.untracked_files)
LOGGER.info("untracked files", files=untracked_files, diff="untracked diff")
return work_tree_files.union(index_files).union(untracked_files)

View File

@ -102,8 +102,9 @@ class TestGenerateEvgTasks(unittest.TestCase):
} # yapf: disable
shrub_config = Configuration()
evergreen_api = Mock()
repo = Mock()
burn_in_tags._generate_evg_tasks(evergreen_api, shrub_config, expansions_file_data,
buildvariant_map)
buildvariant_map, repo)
self.assertEqual(shrub_config.to_map(), {})
@ -127,11 +128,12 @@ class TestGenerateEvgTasks(unittest.TestCase):
} # yapf: disable
shrub_config = Configuration()
evergreen_api = Mock()
repo = Mock()
evergreen_api.test_stats_by_project.return_value = [
Mock(test_file="dir/test2.js", avg_duration_pass=10)
]
burn_in_tags._generate_evg_tasks(evergreen_api, shrub_config, expansions_file_data,
buildvariant_map)
buildvariant_map, repo)
generated_config = shrub_config.to_map()
self.assertEqual(len(generated_config["buildvariants"]), 2)

View File

@ -0,0 +1,36 @@
"""Unit tests for burn_in_tags_bypass_compile_and_fetch_binaries."""
import unittest
from unittest.mock import MagicMock
import buildscripts.burn_in_tags_bypass_compile_and_fetch_binaries as under_test
# pylint: disable=missing-docstring,invalid-name,unused-argument,no-self-use,protected-access
class TestRetrieveUsedBuildId(unittest.TestCase):
def test_build_with_no_compile_throws_exception(self):
build_mock = MagicMock()
with self.assertRaises(ValueError):
under_test._retrieve_used_build_id(build_mock)
def test_compile_with_no_binaries_artifact_throws_exception(self):
build_mock = MagicMock()
compile_task = MagicMock(display_name="compile")
build_mock.get_tasks.return_value = [compile_task]
with self.assertRaises(ValueError):
under_test._retrieve_used_build_id(build_mock)
def test_build_id_from_compile_binaries_is_used(self):
build_id = "this_is_the_build_id"
url = f"http://s3.amazon.com/mciuploads/mongodb/build_var//binaries/mongo-{build_id}.tgz"
build_mock = MagicMock()
compile_task = MagicMock(display_name="compile")
build_mock.get_tasks.return_value = [MagicMock(), compile_task, MagicMock()]
artifact_mock = MagicMock(url=url)
artifact_mock.name = "Binaries"
compile_task.artifacts = [MagicMock(), artifact_mock, MagicMock()]
self.assertEqual(build_id, under_test._retrieve_used_build_id(build_mock))

View File

@ -51,6 +51,13 @@ RUN_TESTS_MULTIVERSION_COMMAND = {
"vars": {"resmoke_args": "--shellWriteMode=commands", "task_path_suffix": MULTIVERSION_PATH}
}
NS = "buildscripts.burn_in_tests"
def ns(relative_name): # pylint: disable=invalid-name
"""Return a full name from a name relative to the test module"s name space."""
return NS + "." + relative_name
def tasks_mock( #pylint: disable=too-many-arguments
tasks, generate_resmoke_tasks_command=None, get_vars_task_name=None, run_tests_command=None,
@ -440,8 +447,8 @@ class TestGenerateTimeouts(unittest.TestCase):
self.assertEqual(len(shrub_commands), 1)
command_definition = shrub_commands[0]
self.assertEqual(command_definition.to_map()['params']['exec_timeout_secs'], 1531)
self.assertEqual(command_definition.to_map()['params']['timeout_secs'], 1366)
self.assertEqual(command_definition.to_map()["params"]["exec_timeout_secs"], 1531)
self.assertEqual(command_definition.to_map()["params"]["timeout_secs"], 1366)
def test__generate_timeouts_no_results(self):
shrub_commands = []
@ -799,7 +806,7 @@ class TestCreateGenerateTasksFile(unittest.TestCase):
gen_tasks_config_mock.return_value = evg_config
exit_mock.side_effect = ValueError('exiting')
exit_mock.side_effect = ValueError("exiting")
with self.assertRaises(ValueError):
burn_in.create_generate_tasks_file(evg_api, options, tests_by_task)
@ -893,50 +900,6 @@ class RunTests(unittest.TestCase):
burn_in.run_tests(no_exec, TESTS_BY_TASK, resmoke_cmd, None)
class FindLastActivated(unittest.TestCase):
def test_find_last_activated_task_first_rev(self):
rev_list = ["rev1", "rev2", "rev3"]
variant = "build_variant_0"
branch = "master"
evg_api = MagicMock()
revision = burn_in.find_last_activated_task(rev_list, variant, branch, evg_api)
self.assertEqual(revision, rev_list[0])
def test_find_last_activated_task_last_rev(self):
rev_list = ["rev1", "rev2", "rev3"]
variant = "build_variant_0"
branch = "master"
evg_api = MagicMock()
evg_api.version_by_id.return_value.build_by_variant.side_effect = [
MagicMock(activated=False),
MagicMock(activated=False),
MagicMock(activated=True),
]
revision = burn_in.find_last_activated_task(rev_list, variant, branch, evg_api)
self.assertEqual(revision, rev_list[2])
def test_find_last_activated_task_no_rev(self):
rev_list = ["rev1", "rev2", "rev3"]
variant = "build_variant_0"
branch = "master"
evg_api = MagicMock()
evg_api.version_by_id.return_value.build_by_variant.return_value.activated = False
revision = burn_in.find_last_activated_task(rev_list, variant, branch, evg_api)
self.assertIsNone(revision)
def test_find_last_activated_norevisions(self):
rev_list = []
variant = "build_variant_0"
branch = "master"
evg_api = MagicMock()
revision = burn_in.find_last_activated_task(rev_list, variant, branch, evg_api)
self.assertIsNone(revision)
MEMBERS_MAP = {
"test1.js": ["suite1", "suite2"], "test2.js": ["suite1", "suite3"], "test3.js": [],
"test4.js": ["suite1", "suite2", "suite3"], "test5.js": ["suite2"]
@ -1104,228 +1067,63 @@ class CreateTaskList(unittest.TestCase):
burn_in.create_task_list(EVERGREEN_CONF, variant, suite_list, [])
class FindChangedTests(unittest.TestCase):
class TestFindChangedTests(unittest.TestCase):
@patch(ns("find_changed_files"))
def test_nothing_found(self, changed_files_mock):
repo_mock = MagicMock()
changed_files_mock.return_value = set()
NUM_COMMITS = 10
MOD_FILES = [os.path.normpath("jstests/test1.js"), os.path.normpath("jstests/test2.js")]
REV_DIFF = dict(zip([str(x) for x in range(NUM_COMMITS)],
[MOD_FILES] * NUM_COMMITS)) #type: ignore
NO_REV_DIFF = dict(
zip([str(x) for x in range(NUM_COMMITS)], [None for _ in range(NUM_COMMITS)]))
self.assertEqual(0, len(burn_in.find_changed_tests(repo_mock)))
UNTRACKED_FILES = [
os.path.normpath("jstests/untracked1.js"),
os.path.normpath("jstests/untracked2.js")
@patch(ns("find_changed_files"))
@patch(ns("os.path.isfile"))
def test_non_js_files_filtered(self, is_file_mock, changed_files_mock):
repo_mock = MagicMock()
file_list = [
os.path.join("jstests", "test1.js"),
os.path.join("jstests", "test1.cpp"),
os.path.join("jstests", "test2.js"),
]
changed_files_mock.return_value = set(file_list)
is_file_mock.return_value = True
@staticmethod
def _copy_rev_diff(rev_diff):
"""Use this method instead of copy.deepcopy().
found_tests = burn_in.find_changed_tests(repo_mock)
Note - it was discovered during testing that after using copy.deepcopy() that
updating one key would update all of them, i.e.,
rev_diff = {"1": ["abc"], 2": ["abc"]}
copy_rev_diff = copy.deepcopy(rev_diff)
copy_rev_diff["2"] += "xyz"
print(rev_diff)
Result: {"1": ["abc"], 2": ["abc"]}
print(copy_rev_diff)
Result: {"1": ["abc", "xyz"], 2": ["abc", "xyz"]}
At this point no identifiable issue could be found related to this problem.
"""
copy_rev_diff = {}
for key in rev_diff:
copy_rev_diff[key] = []
for file_name in rev_diff[key]:
copy_rev_diff[key].append(file_name)
return copy_rev_diff
self.assertIn(file_list[0], found_tests)
self.assertIn(file_list[2], found_tests)
self.assertNotIn(file_list[1], found_tests)
@staticmethod
def _get_rev_list(range1, range2):
return [str(num) for num in range(range1, range2 + 1)]
@patch(ns("find_changed_files"))
@patch(ns("os.path.isfile"))
def test_missing_files_filtered(self, is_file_mock, changed_files_mock):
repo_mock = MagicMock()
file_list = [
os.path.join("jstests", "test1.js"),
os.path.join("jstests", "test2.js"),
os.path.join("jstests", "test3.js"),
]
changed_files_mock.return_value = set(file_list)
is_file_mock.return_value = False
def _mock_git_repository(self, directory):
return MockGitRepository(directory, FindChangedTests._get_rev_list(self.rev1, self.rev2),
self.rev_diff, self.untracked_files)
found_tests = burn_in.find_changed_tests(repo_mock)
def _test_find_changed_tests( #pylint: disable=too-many-arguments
self, commit, max_revisions, variant, check_evg, rev1, rev2, rev_diff, untracked_files,
last_activated_task=None):
branch = "master"
project = "project"
# pylint: disable=attribute-defined-outside-init
self.rev1 = rev1
self.rev2 = rev2
self.rev_diff = rev_diff
self.untracked_files = untracked_files
self.expected_changed_tests = []
if commit is None and rev_diff:
self.expected_changed_tests += rev_diff[str(self.NUM_COMMITS - 1)]
elif rev_diff.get(commit, []):
self.expected_changed_tests += rev_diff.get(commit, [])
self.expected_changed_tests += untracked_files
# pylint: enable=attribute-defined-outside-init
evg_api = MagicMock()
with patch(GIT + ".Repository", self._mock_git_repository),\
patch("os.path.isfile", return_value=True),\
patch(BURN_IN + ".find_last_activated_task", return_value=last_activated_task):
return burn_in.find_changed_tests(branch, commit, max_revisions, variant, project,
check_evg, evg_api)
self.assertEqual(0, len(found_tests))
def test_find_changed_tests(self):
commit = "3"
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 3,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.expected_changed_tests)
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 3,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.expected_changed_tests)
@patch(ns("find_changed_files"))
@patch(ns("os.path.isfile"))
def test_non_jstests_files_filtered(self, is_file_mock, changed_files_mock):
repo_mock = MagicMock()
file_list = [
os.path.join("jstests", "test1.js"),
os.path.join("other", "test2.js"),
os.path.join("jstests", "test3.js"),
]
changed_files_mock.return_value = set(file_list)
is_file_mock.return_value = True
def test_find_changed_tests_no_changes(self):
commit = "3"
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 3,
self.NO_REV_DIFF, [])
self.assertEqual(changed_tests, [])
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 3,
self.NO_REV_DIFF, [], "1")
self.assertEqual(changed_tests, [])
found_tests = burn_in.find_changed_tests(repo_mock)
def test_find_changed_tests_check_evergreen(self):
commit = "1"
rev_diff = self._copy_rev_diff(self.REV_DIFF)
rev_diff["2"] += [os.path.normpath("jstests/test.js")]
expected_changed_tests = self.REV_DIFF[commit] + self.UNTRACKED_FILES
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 3, rev_diff,
self.UNTRACKED_FILES)
self.assertEqual(changed_tests, expected_changed_tests)
rev_diff = self._copy_rev_diff(self.REV_DIFF)
rev_diff["3"] += [os.path.normpath("jstests/test.js")]
expected_changed_tests = rev_diff["3"] + self.UNTRACKED_FILES
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 3, rev_diff,
self.UNTRACKED_FILES, "1")
self.assertEqual(changed_tests, expected_changed_tests)
def test_find_changed_tests_no_diff(self):
commit = "3"
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 3,
self.NO_REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.UNTRACKED_FILES)
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 3,
self.NO_REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.UNTRACKED_FILES)
def test_find_changed_tests_no_untracked(self):
commit = "3"
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 3,
self.REV_DIFF, [])
self.assertEqual(changed_tests, self.REV_DIFF[commit])
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 3,
self.REV_DIFF, [])
self.assertEqual(changed_tests, self.REV_DIFF[commit])
def test_find_changed_tests_no_base_commit(self):
changed_tests = self._test_find_changed_tests(None, 5, "myvariant", False, 0, 3,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.expected_changed_tests)
changed_tests = self._test_find_changed_tests(None, 5, "myvariant", True, 0, 3,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.expected_changed_tests)
def test_find_changed_tests_non_js(self):
commit = "3"
rev_diff = self._copy_rev_diff(self.REV_DIFF)
rev_diff[commit] += [os.path.normpath("jstests/test.yml")]
untracked_files = self.UNTRACKED_FILES + [os.path.normpath("jstests/untracked.yml")]
expected_changed_tests = self.REV_DIFF[commit] + self.UNTRACKED_FILES
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 3, rev_diff,
untracked_files)
self.assertEqual(changed_tests, expected_changed_tests)
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 3, rev_diff,
untracked_files)
self.assertEqual(changed_tests, expected_changed_tests)
def test_find_changed_tests_not_in_jstests(self):
commit = "3"
rev_diff = self._copy_rev_diff(self.REV_DIFF)
rev_diff[commit] += [os.path.normpath("other/test.js")]
untracked_files = self.UNTRACKED_FILES + [os.path.normpath("other/untracked.js")]
expected_changed_tests = self.REV_DIFF[commit] + self.UNTRACKED_FILES
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 3, rev_diff,
untracked_files)
self.assertEqual(changed_tests, expected_changed_tests)
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 3, rev_diff,
untracked_files)
self.assertEqual(changed_tests, expected_changed_tests)
def test_find_changed_tests_no_revisions(self):
commit = "3"
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 0,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.expected_changed_tests)
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 0,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, self.expected_changed_tests)
def test_find_changed_tests_too_many_revisions(self):
commit = "3"
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", False, 0, 9,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, [])
changed_tests = self._test_find_changed_tests(commit, 5, "myvariant", True, 0, 9,
self.REV_DIFF, self.UNTRACKED_FILES)
self.assertEqual(changed_tests, [])
class MockGitRepository(object):
def __init__(self, _, rev_list, rev_diff, untracked_files):
self.rev_list = rev_list
self.rev_diff = rev_diff
self.untracked_files = untracked_files
def _get_revs(self, rev_range):
revs = rev_range.split("...")
if not revs:
return revs
elif len(revs) == 1:
revs.append("HEAD")
if revs[1] == "HEAD" and self.rev_list:
revs[1] = self.rev_list[-1]
return revs
def __get_rev_range(self, rev_range):
commits = []
if len(self.rev_list) < 2:
return commits
revs = self._get_revs(rev_range)
latest_commit_found = False
for commit in self.rev_list:
latest_commit_found = latest_commit_found or revs[0] == commit
if revs[1] == commit:
break
if latest_commit_found:
commits.append(commit)
return commits
def get_merge_base(self, _):
return self.rev_list[-1]
def git_rev_list(self, args):
return "\n".join(self.__get_rev_range(args[0])[::-1])
def git_diff(self, args):
revs = self._get_revs(args[1])
if revs:
diff_list = self.rev_diff.get(revs[-1], [])
if diff_list:
return "\n".join(diff_list)
return ""
def git_status(self, args):
revs = self._get_revs(args[0])
modified_files = [""]
if revs:
diff_list = self.rev_diff.get(revs[-1], [])
if diff_list:
modified_files = [" M {}".format(untracked) for untracked in diff_list]
untracked_files = ["?? {}".format(untracked) for untracked in self.untracked_files]
return "\n".join(modified_files + untracked_files)
self.assertIn(file_list[0], found_tests)
self.assertIn(file_list[2], found_tests)
self.assertNotIn(file_list[1], found_tests)
self.assertEqual(2, len(found_tests))

View File

@ -1,96 +0,0 @@
"""Unit tests for the buildscripts.git module."""
import subprocess
import unittest
import buildscripts.git as _git
# pylint: disable=missing-docstring,protected-access
class TestRepository(unittest.TestCase):
def setUp(self):
self.subprocess = MockSubprocess()
_git.subprocess = self.subprocess
def tearDown(self):
_git.subprocess = subprocess
def test_base_git_methods(self):
params = ["param1", "param2", "param3"]
repo = _git.Repository("/tmp")
self._check_gito_command(repo.git_add, "add", params)
self._check_gito_command(repo.git_commit, "commit", params)
self._check_gito_command(repo.git_diff, "diff", params)
self._check_gito_command(repo.git_log, "log", params)
self._check_gito_command(repo.git_push, "push", params)
self._check_gito_command(repo.git_fetch, "fetch", params)
self._check_gito_command(repo.git_ls_files, "ls-files", params)
self._check_gito_command(repo.git_rev_parse, "rev-parse", params)
self._check_gito_command(repo.git_rm, "rm", params)
self._check_gito_command(repo.git_show, "show", params)
self._check_gito_command(repo.git_status, "status", params)
def test_base_gito_methods_errors(self):
params = ["param1", "param2", "param3"]
repo = _git.Repository("/tmp")
self._check_gito_command_error(repo.git_add, "add", params)
self._check_gito_command_error(repo.git_commit, "commit", params)
self._check_gito_command_error(repo.git_diff, "diff", params)
self._check_gito_command_error(repo.git_log, "log", params)
self._check_gito_command_error(repo.git_push, "push", params)
self._check_gito_command_error(repo.git_fetch, "fetch", params)
self._check_gito_command_error(repo.git_ls_files, "ls-files", params)
self._check_gito_command_error(repo.git_rev_parse, "rev-parse", params)
self._check_gito_command_error(repo.git_rm, "rm", params)
self._check_gito_command_error(repo.git_show, "show", params)
self._check_gito_command_error(repo.git_status, "status", params)
def _check_gito_command(self, method, command, params):
# Initialize subprocess mock.
self.subprocess.call_output_args = None # pylint: disable=attribute-defined-outside-init
self.subprocess.call_output = str(method).encode("utf-8")
self.subprocess.call_returncode = 0
# Call method.
value = method(params)
# Check.
args = self.subprocess.call_args
given_args = [command] + params
self.assertEqual("git", args[0])
self.assertEqual(given_args, args[-len(given_args):])
self.assertEqual(str(method), value)
def _check_gito_command_error(self, method, command, params):
self.subprocess.call_args = None
self.subprocess.call_output = None
self.subprocess.call_returncode = 1
with self.assertRaises(_git.GitException):
method(params)
args = self.subprocess.call_args
given_args = [command] + params
self.assertEqual("git", args[0])
self.assertEqual(given_args, args[-len(given_args):])
class MockSubprocess(object):
PIPE = subprocess.PIPE
CalledProcessError = subprocess.CalledProcessError
def __init__(self):
self.call_args = None
self.call_returncode = 0
self.call_output = b""
def Popen(self, args, **kwargs): # pylint: disable=invalid-name,unused-argument
self.call_args = args
return MockProcess(self.call_returncode, self.call_output)
class MockProcess(object):
def __init__(self, returncode, output):
self.returncode = returncode
self._output = output
def communicate(self):
return self._output, b""

View File

@ -254,8 +254,8 @@ variables:
# The python virtual environment is installed in ${workdir}, which is created in
# "set up virtualenv".
- func: "set up virtualenv"
- func: "configure evergreen api credentials"
# NOTE: To disable the compile bypass feature, comment out the next line.
#
- func: "bypass compile and fetch binaries"
- func: "update bypass expansions"
- func: "get buildnumber"
@ -1159,11 +1159,10 @@ functions:
# Evergreen executable is in $HOME, so add that to the path.
PATH=$PATH:$HOME $python buildscripts/burn_in_tags_bypass_compile_and_fetch_binaries.py \
--project ${project} \
--buildVariant ${burn_in_bypass} \
--build-variant ${burn_in_bypass} \
--revision ${revision} \
--patchFile patch_files.txt \
--outFile bypass_compile_expansions.yml \
--jsonArtifact artifacts.json
--out-file bypass_compile_expansions.yml \
--version-id ${version_id}
fi
# For patch builds determine if we can bypass compile.
@ -1269,7 +1268,6 @@ functions:
script: |
# exit immediately if virtualenv is not found
set -o errexit
set -o verbose
virtualenv_loc=$(which ${virtualenv|virtualenv})
@ -1501,7 +1499,7 @@ functions:
"generate burn in tags":
- command: expansions.write
params:
file: expansions.yml
file: src/expansions.yml
- *configure_evergreen_api_credentials
- command: shell.exec
params:
@ -1509,9 +1507,9 @@ functions:
shell: bash
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
PATH=$PATH:$HOME $python buildscripts/burn_in_tags.py --expansion-file ../expansions.yml
PATH=$PATH:$HOME $python buildscripts/burn_in_tags.py --expansion-file expansions.yml
- command: archive.targz_pack
params:
target: burn_in_tags_gen.tgz
@ -1548,7 +1546,6 @@ functions:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
$python buildscripts/evergreen_generate_resmoke_tasks.py --expansion-file expansions.yml --verbose
@ -4817,88 +4814,27 @@ tasks:
${activate_virtualenv}
find buildscripts etc jstests -name '*.y*ml' -exec yamllint -c etc/yamllint_config.yml {} +
### This task is deprecated, but left in here in case of need to run burn_in_tests
### instead of the generated task:
### - Rename burn_in_tests_gen task to burn_in_tests_gen_UNUSED
### - Rename burn_in_tests_gen to burn_in_tests on the build variants
- <<: *task_template
name: burn_in_tests
depends_on:
- name: compile
commands:
- command: manifest.load
- func: "git get project"
# The repository is cloned in a directory distinct from src for the modified test detection
# because the extraction of the artifacts performed in the 'do setup' causes
# 'git diff --name-only' to see all tests as modified on Windows (git 1.9.5). See SERVER-30634.
vars:
git_project_directory: burn_in_tests_clonedir
- func: "do setup"
- command: shell.exec
params:
working_dir: burn_in_tests_clonedir
shell: bash
script: |
set -o errexit
set -o verbose
# If this is a scheduled build, we check for changes against the last scheduled commit.
if [ "${is_patch}" != "true" ]; then
burn_in_args="--checkEvergreen"
fi
pushd ../src
${activate_virtualenv}
popd
# Capture a list of new and modified tests.
build_variant=${build_variant}
if [ -n "${burn_in_tests_build_variant|}" ]; then
build_variant=${burn_in_tests_build_variant|}
fi
# Evergreen executable is in $HOME.
PATH=$PATH:$HOME $python buildscripts/burn_in_tests.py --branch=${branch_name} --buildVariant=$build_variant --testListOutfile=jstests/new_tests.json --noExec $burn_in_args
# Copy the results to the src dir.
cp jstests/new_tests.json ../src/jstests/new_tests.json
- func: "do multiversion setup"
- func: "run tests"
vars:
task_path_suffix: /data/multiversion:$HOME
resmoke_wrapper: $python buildscripts/burn_in_tests.py --testListFile=jstests/new_tests.json
resmoke_args: --repeatSuites=2
###
- name: burn_in_tests_gen
commands:
- command: manifest.load
- func: "git get project"
# The repository is cloned in a directory distinct from src for the modified test detection
# because the extraction of the artifacts performed in the 'do setup' causes
# 'git diff --name-only' to see all tests as modified on Windows (git 1.9.5). See SERVER-30634.
vars:
git_project_directory: burn_in_tests_clonedir
- func: "set task expansion macros"
- func: "set up virtualenv"
vars:
pip_dir: ${workdir}/burn_in_tests_clonedir/etc/pip
- command: shell.exec
params:
working_dir: burn_in_tests_clonedir
shell: bash
script: |
set -o errexit
set -o verbose
mkdir ../src
- func: "configure evergreen api credentials"
- command: shell.exec
params:
working_dir: burn_in_tests_clonedir
working_dir: src
shell: bash
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
# If this is a scheduled build, we check for changes against the last scheduled commit.
if [ "${is_patch}" != "true" ]; then
burn_in_args="--checkEvergreen"
fi
# Capture a list of new and modified tests. The expansion macro burn_in_tests_build_variant
# is used to for finding the associated tasks from a different build varaint than the
# burn_in_tests_gen task executes on.
@ -4906,10 +4842,11 @@ tasks:
if [ -n "${burn_in_tests_build_variant|}" ]; then
build_variant_opts="--buildVariant=${burn_in_tests_build_variant} --runBuildVariant=${build_variant}"
fi
# Increase the burn_in repetition from 2 to 1000 executions or 10 minutes
burn_in_args="$burn_in_args --repeatTestsMin=2 --repeatTestsMax=1000 --repeatTestsSecs=600"
# Evergreen executable is in $HOME.
PATH=$PATH:$HOME $python buildscripts/burn_in_tests.py --branch=${branch_name} --project=${project} $build_variant_opts --distro=${distro_id} --generateTasksFile=../src/burn_in_tests_gen.json --noExec $burn_in_args
PATH=$PATH:$HOME $python buildscripts/burn_in_tests.py --branch=${branch_name} --project=${project} $build_variant_opts --distro=${distro_id} --generateTasksFile=burn_in_tests_gen.json --noExec $burn_in_args
- command: archive.targz_pack
params:
target: src/burn_in_tests_gen.tgz

View File

@ -1,5 +1,5 @@
# Core (we need these for most buildscripts)
psutil
pymongo >= 3.0, != 3.6.0 # See PYTHON-1434, SERVER-34820
PyYAML >= 3.0.0
requests >= 2.0.0
pymongo >= 3.0, != 3.6.0 # See PYTHON-1434, SERVER-34820
psutil

View File

@ -1 +1,4 @@
click ~= 7.0
GitPython ~= 2.1.11
psutil
structlog ~= 19.1.0

View File

@ -1,5 +1,5 @@
PyKMIP == 0.4.0 # It's now 0.8.0. We're far enough back to have API conflicts.
evergreen.py == 0.3.2
evergreen.py == 0.3.9
jinja2
mock
shrub.py == 0.2.3

View File

@ -5,3 +5,4 @@
-r components/lint.req
-r components/resmoke.req
-r components/external_auth.req
-r components/evergreen.req

View File

@ -4,6 +4,7 @@
-r components/core.req
-r components/compile.req
-r components/evergreen.req
-r components/lint.req
-r components/mypy.req
-r components/resmoke.req