diff --git a/.bazelrc b/.bazelrc index 5d174d87265..2a4e78edcb8 100644 --- a/.bazelrc +++ b/.bazelrc @@ -405,10 +405,7 @@ common:mod-scanner --aspects //modules_poc:mod_scanner.bzl%mod_scanner_aspect common:mod-scanner --remote_download_regex=.*\.mod_scanner_decls.json$ # if you don't have access to the remote execution cluster above, use the local config -# described below. -# pass local config to SCons like: -# > buildscripts/scons.py BAZEL_FLAGS=--config=local -# or if invoking bazel directly pass "--config=local" on the bazel command line +# by passing "--config=local" on the bazel command line --config=local common:local --remote_executor= common:local --remote_cache= diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 47883c7d7db..c0673bb2205 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -6,16 +6,14 @@ # The following patterns are parsed from ./OWNERS.yml OWNERS.yml @10gen/server-root-ownership @svc-auto-approve-bot -.bazelignore @10gen/devprod-build @svc-auto-approve-bot -.bazelrc @10gen/devprod-build @svc-auto-approve-bot -.bazelversion @10gen/devprod-build @svc-auto-approve-bot +.bazel* @10gen/devprod-build @svc-auto-approve-bot .clang-format @10gen/server-programmability @svc-auto-approve-bot .clang-tidy.in @10gen/server-programmability @svc-auto-approve-bot .gitignore @10gen/devprod-build @svc-auto-approve-bot .mypy.ini @10gen/devprod-build @10gen/devprod-correctness @svc-auto-approve-bot .prettierignore @10gen/devprod-correctness @svc-auto-approve-bot .prettierrc @10gen/devprod-correctness @svc-auto-approve-bot -/BUILD.bazel @10gen/devprod-build @svc-auto-approve-bot +BUILD.bazel @10gen/devprod-build @svc-auto-approve-bot copy.bara.sky @IamXander @smcclure15 @svc-auto-approve-bot copy.bara.staging.sky @10gen/devprod-correctness @svc-auto-approve-bot eslint.config.mjs @10gen/devprod-correctness @svc-auto-approve-bot @@ -25,7 +23,7 @@ pnpm-lock.yaml @10gen/devprod-correctness @svc-auto-approve-bot poetry.lock @10gen/devprod-build @10gen/devprod-correctness @svc-auto-approve-bot pyproject.toml @10gen/devprod-build @10gen/devprod-correctness @svc-auto-approve-bot sbom.json @10gen/server-security @svc-auto-approve-bot -SConstruct @10gen/devprod-build @svc-auto-approve-bot +MODULE.bazel* @10gen/devprod-build @svc-auto-approve-bot WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot # The following patterns are parsed from ./bazel/OWNERS.yml @@ -163,9 +161,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot /buildscripts/tracing_profiler/**/* @10gen/query @svc-auto-approve-bot # The following patterns are parsed from ./docs/OWNERS.yml -/docs/**/bazel.md @10gen/devprod-build @svc-auto-approve-bot -/docs/**/build_system_reference.md @10gen/devprod-build @svc-auto-approve-bot -/docs/**/build_system.md @10gen/devprod-build @svc-auto-approve-bot /docs/**/building.md @10gen/devprod-build @svc-auto-approve-bot /docs/**/poetry_execution.md @10gen/devprod-correctness @svc-auto-approve-bot /docs/**/linting.md @10gen/devprod-build @svc-auto-approve-bot @@ -1232,9 +1227,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot # The following patterns are parsed from ./jstests/with_mongot/OWNERS.yml /jstests/with_mongot/**/* @10gen/query-integration-search @svc-auto-approve-bot -# The following patterns are parsed from ./site_scons/OWNERS.yml -/site_scons/**/* @10gen/devprod-build @svc-auto-approve-bot - # The following patterns are parsed from ./src/mongo/OWNERS.yml /src/mongo/**/config.h.in @10gen/server-programmability @svc-auto-approve-bot @@ -1606,8 +1598,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot # The following patterns are parsed from ./src/mongo/db/modules/enterprise/OWNERS.yml /src/mongo/db/modules/enterprise/BUILD.bazel @10gen/devprod-build @svc-auto-approve-bot -/src/mongo/db/modules/enterprise/build.py @10gen/devprod-build @svc-auto-approve-bot -/src/mongo/db/modules/enterprise/SConscript @10gen/devprod-build @svc-auto-approve-bot /src/mongo/db/modules/enterprise/.gitignore @10gen/devprod-build @svc-auto-approve-bot /src/mongo/db/modules/enterprise/.git-blame-ignore-revs @10gen/devprod-build @svc-auto-approve-bot /src/mongo/db/modules/enterprise/.clang-format @10gen/server-programmability @svc-auto-approve-bot @@ -2620,7 +2610,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot /src/third_party/**/SafeInt @10gen/server-programmability @svc-auto-approve-bot /src/third_party/**/sasl @10gen/server-security @svc-auto-approve-bot /src/third_party/**/schemastore.org @10gen/query-optimization @svc-auto-approve-bot -/src/third_party/**/scons* @10gen/devprod-build @svc-auto-approve-bot /src/third_party/**/snappy @10gen/server-networking-and-observability @svc-auto-approve-bot /src/third_party/**/tcmalloc @10gen/server-workload-scheduling @svc-auto-approve-bot /src/third_party/**/timelib @10gen/query-execution @svc-auto-approve-bot @@ -2631,7 +2620,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot /src/third_party/**/yaml-cpp @10gen/server-security @svc-auto-approve-bot /src/third_party/**/zlib @10gen/server-networking-and-observability @svc-auto-approve-bot /src/third_party/**/zstandard @10gen/server-networking-and-observability @svc-auto-approve-bot -/src/third_party/**/SConscript @10gen/devprod-build @svc-auto-approve-bot /src/third_party/**/*.bazel @10gen/devprod-build @svc-auto-approve-bot # The following patterns are parsed from ./src/third_party/libmongocrypt/OWNERS.yml diff --git a/.vscode_defaults/linux-virtual-workstation.code-workspace b/.vscode_defaults/linux-virtual-workstation.code-workspace index b1fc4b70784..d2c16aea85d 100644 --- a/.vscode_defaults/linux-virtual-workstation.code-workspace +++ b/.vscode_defaults/linux-virtual-workstation.code-workspace @@ -19,20 +19,16 @@ "javascript" ], "files.associations": { - "SConstruct": "python", - "SConscript": "python", "*.idl": "yaml", }, "files.insertFinalNewline": true, "js/ts.implicitProjectConfig.target": "ES2020", "python.autoComplete.extraPaths": [ "/opt/mongodbtoolchain/v4/share/gcc-11.3.0/python", - "src/third_party/scons-3.1.2/scons-local-3.1.2" ], "python.defaultInterpreterPath": "python3-venv/bin/python", "python.analysis.extraPaths": [ "/opt/mongodbtoolchain/v4/share/gcc-11.3.0/python", - "src/third_party/scons-3.1.2/scons-local-3.1.2" ], "mypy-type-checker.path": [ "${interpreter}", diff --git a/OWNERS.yml b/OWNERS.yml index 4a5a5af5a4e..c3ecf5b4efa 100644 --- a/OWNERS.yml +++ b/OWNERS.yml @@ -6,13 +6,7 @@ filters: metadata: emeritus_approvers: - visemet # TODO: add back to approvers once project work is finished - - ".bazelignore": - approvers: - - 10gen/devprod-build - - ".bazelrc": - approvers: - - 10gen/devprod-build - - ".bazelversion": + - ".bazel*": approvers: - 10gen/devprod-build - ".clang-format": @@ -34,7 +28,7 @@ filters: - ".prettierrc": approvers: - 10gen/devprod-correctness - - "/BUILD.bazel": + - "BUILD.bazel": approvers: - 10gen/devprod-build - "copy.bara.sky": @@ -67,7 +61,7 @@ filters: - "sbom.json": approvers: - 10gen/server-security - - "SConstruct": + - "MODULE.bazel*": approvers: - 10gen/devprod-build - "WORKSPACE.bazel": diff --git a/README.third_party.md b/README.third_party.md index fbe72c9e733..1c8129b08de 100644 --- a/README.third_party.md +++ b/README.third_party.md @@ -64,7 +64,6 @@ a notice will be included in | [pyiso8601] | MIT | 2.1.0 | unknown | | | [RoaringBitmap/CRoaring] | Unknown License | v3.0.1 | | ✗ | | [SchemaStore/schemastore] | Apache-2.0 | Unknown | | | -| [SCons - a Software Construction tool] | MIT | 3.1.2 | | ✗ | | [smhasher] | Unknown License | Unknown | unknown | ✗ | | [Snowball Stemming Algorithms] | BSD-3-Clause | 7b264ffa0f767c579d052fd8142558dc8264d795 | ✗ | ✗ | | [subunit] | BSD-3-Clause, Apache-2.0 | 1.4.4 | unknown | | @@ -90,7 +89,6 @@ a notice will be included in [PCRE2]: http://www.pcre.org/ [Protobuf]: https://github.com/protocolbuffers/protobuf [RoaringBitmap/CRoaring]: https://github.com/RoaringBitmap/CRoaring -[SCons - a Software Construction tool]: https://github.com/SCons/scons [SchemaStore/schemastore]: https://www.schemastore.org/json/ [Snowball Stemming Algorithms]: https://github.com/snowballstem/snowball [arximboldi/immer]: https://github.com/arximboldi/immer diff --git a/SConstruct b/SConstruct deleted file mode 100644 index 0360663fdcb..00000000000 --- a/SConstruct +++ /dev/null @@ -1,7158 +0,0 @@ -# -*- mode: python; -*- - -import atexit -import copy -import errno -import functools -import json -import os -import platform -import re -import shlex -import shutil -import subprocess -import sys -import textwrap -import time -import uuid -from datetime import datetime -from glob import glob - -import SCons -import SCons.Script -from mongo_tooling_metrics.lib.top_level_metrics import SConsToolingMetrics -from pkg_resources import parse_version - -from site_scons.mongo import build_profiles - -# This must be first, even before EnsureSConsVersion, if -# we are to avoid bulk loading all tools in the DefaultEnvironment. -DefaultEnvironment(tools=[]) - -# These come from site_scons/mongo. Import these things -# after calling DefaultEnvironment, for the sake of paranoia. -import mongo -import mongo.generators as mongo_generators -import mongo.install_actions as install_actions -import mongo.platform as mongo_platform -import mongo.toolchain as mongo_toolchain - -EnsurePythonVersion(3, 10) -EnsureSConsVersion(3, 1, 1) - -utc_starttime = datetime.utcnow() - - -# Monkey patch SCons.FS.File.release_target_info to be a no-op. -# See https://github.com/SCons/scons/issues/3454 -def release_target_info_noop(self): - pass - - -SCons.Node.FS.File.release_target_info = release_target_info_noop - -import psutil - -from buildscripts import moduleconfig, utils - -if os.environ.get("CI") is None: - print(""" - -------- ANNOUNCEMENT -------- - The SCons interface will soon be deprecated on the master branch, please try - your workflow with Bazel directly by visiting https://wiki.corp.mongodb.com/display/HGTC/Building+with+Bazel - - If your workflow does not work with Bazel now, please post in #ask-devprod-build with details. - ------------------------------ - """) - - time.sleep(5) - -scons_invocation = "{} {}".format(sys.executable, " ".join(sys.argv)) -print("scons: running with args {}".format(scons_invocation)) - -atexit.register(mongo.print_build_failures) - -# An extra instance of the SCons parser is used to manually validate options -# flags. We use it detect some common misspellings/unknown options and -# communicate with the user more effectively than just allowing Configure to -# fail. -# This is to work around issue #4187 -# (https://github.com/SCons/scons/issues/4187). Upon a future upgrade to SCons -# that incorporates #4187, we should replace this solution with that. -_parser = SCons.Script.SConsOptions.Parser("") - - -def add_option(name, **kwargs): - _parser.add_option("--" + name, **{"default": None, **kwargs}) - - if "dest" not in kwargs: - kwargs["dest"] = name - - if "metavar" not in kwargs and kwargs.get("type", None) == "choice": - kwargs["metavar"] = "[" + "|".join(kwargs["choices"]) + "]" - - AddOption("--" + name, **kwargs) - - -def get_option(name): - return GetOption(name) - - -def has_option(name): - optval = GetOption(name) - # Options with nargs=0 are true when their value is the empty tuple. Otherwise, - # if the value is falsish (empty string, None, etc.), coerce to False. - return True if optval == () else bool(optval) - - -def use_system_version_of_library(name): - # Disabled during Bazel migration - return False - - -def using_system_version_of_cxx_libraries(): - # Disabled during Bazel migration - return False - - -def make_variant_dir_generator(): - memoized_variant_dir = [False] - - def generate_variant_dir(target, source, env, for_signature): - if not memoized_variant_dir[0]: - memoized_variant_dir[0] = env.subst("$BUILD_ROOT/$VARIANT_DIR") - return memoized_variant_dir[0] - - return generate_variant_dir - - -# Always randomize the build order to shake out missing edges, and to help the cache: -# http://scons.org/doc/production/HTML/scons-user/ch24s06.html -SetOption("random", 1) - -# Options TODOs: -# -# - We should either alphabetize the entire list of options, or split them into logical groups -# with clear boundaries, and then alphabetize the groups. There is no way in SCons though to -# inform it of options groups. -# -# - Many of these options are currently only either present or absent. This is not good for -# scripting the build invocation because it means you need to interpolate in the presence of -# the whole option. It is better to make all options take an optional on/off or true/false -# using the nargs='const' mechanism. -# - -add_option( - "build-profile", - choices=[type for type in build_profiles.BuildProfileType], - default=build_profiles.BuildProfileType.DEFAULT, - type="choice", - help="""Short hand for common build configurations. These profiles are well supported by the build - and are kept up to date. The 'default' profile should be used unless you have the required - prerequisites in place to use the other profiles, i.e. having the mongodbtoolchain installed - and being connected to an icecream cluster. For mongodb developers, it is recommended to use - the 'san' (sanitizer) profile to identify bugs as soon as possible. Check out - site_scons/mongo/build_profiles.py to see each profile.""", -) - -add_option( - "evergreen-tmp-dir", - help="Configures the path to the evergreen configured tmp directory.", - default=None, -) - -integrate_bazel = Tool("integrate_bazel") -integrate_bazel.exists(DefaultEnvironment()) -mongo_toolchain_execroot = DefaultEnvironment().BazelExecroot() - -build_profile = build_profiles.get_build_profile(get_option("build-profile")) - -add_option( - "ninja", - choices=["enabled", "disabled"], - default=build_profile.ninja, - nargs="?", - const="enabled", - type="choice", - help="Enable the build.ninja generator tool stable or canary version", -) - -add_option( - "force-jobs", - help="Allow more jobs than available cpu's when icecream is not enabled.", - nargs=0, -) - -add_option( - "build-tools", - choices=["stable", "next"], - default="stable", - type="choice", - help="Enable experimental build tools", -) - -add_option( - "legacy-tarball", - choices=["true", "false"], - default="false", - const="true", - nargs="?", - type="choice", - help="Build a tarball matching the old MongoDB dist targets", -) - -add_option( - "lint-scope", - choices=["all", "changed"], - default="all", - type="choice", - help="Lint files in the current git diff instead of all files", -) - -add_option( - "bazel-includes-info", - action="append", - help="write included headers in bazel label format to put files ([library].bazel_includes)", - default=[], -) - -add_option( - "install-mode", - choices=["hygienic"], - default="hygienic", - help="select type of installation", - nargs=1, - type="choice", -) - -add_option( - "install-action", - choices=([*install_actions.available_actions] + ["default"]), - default="hardlink", - help="select mechanism to use to install files (advanced option to reduce disk IO and utilization)", - nargs=1, - type="choice", -) - -add_option( - "build-dir", - default="#build", - help="build output directory", -) - -add_option( - "release", - choices=["on", "off"], - const="on", - default=build_profile.release, - help="release build", - nargs="?", - type="choice", -) - -add_option( - "remote-exec-release", - choices=["on", "off"], - const="off", - default=build_profile.remote_exec_release, - help="Turn on bazel remote execution for release", - nargs="?", - type="choice", -) - -add_option( - "lto", - help="enable full link time optimizations (experimental, except with MSVC)", - nargs=0, -) - -add_option( - "thin-lto", - help="enable thin link time optimizations (experimental)", - nargs=0, -) - -add_option( - "endian", - choices=["big", "little", "auto"], - default="auto", - help="endianness of target platform", - nargs=1, - type="choice", -) - -add_option( - "disable-minimum-compiler-version-enforcement", - help="allow use of unsupported older compilers (NEVER for production builds)", - nargs=0, -) - -add_option( - "ssl", - help="Enable or Disable SSL", - choices=["on", "off"], - default="on", - const="on", - nargs="?", - type="choice", -) - -add_option( - "wiredtiger", - choices=["on", "off"], - const="on", - default="on", - help="Enable wiredtiger", - nargs="?", - type="choice", -) - -add_option( - "ocsp-stapling", - choices=["on", "off"], - default="on", - help="Enable OCSP Stapling on servers", - nargs="?", - type="choice", -) - -js_engine_choices = ["mozjs", "none"] -add_option( - "js-engine", - choices=js_engine_choices, - default=js_engine_choices[0], - help="JavaScript scripting engine implementation", - type="choice", -) - -add_option( - "server-js", - choices=["on", "off"], - default="on", - help="Build mongod without JavaScript support", - type="choice", -) - -add_option( - "libc++", - help="use libc++ (experimental, requires clang)", - nargs=0, -) - -add_option( - "use-glibcxx-debug", - help="Enable the glibc++ debug implementations of the C++ standard libary", - nargs=0, -) - -add_option( - "noshell", - help="don't build shell", - nargs=0, -) - -add_option( - "dbg", - choices=["on", "off"], - const="on", - default=build_profile.dbg, - help="Enable runtime debugging checks", - nargs="?", - type="choice", -) - -add_option( - "debug-symbols", - choices=["on", "off", "minimal"], - default=build_profile.debug_symbols, - help="Enable producing debug symbols", - nargs=1, - type="choice", -) - -add_option( - "skip-archive", - choices=["on", "off"], - default="off", - help="Enable runtime debugging checks", - nargs="?", - type="choice", -) - -add_option( - "disable-ref-track", - help="Disables runtime tracking of REF state changes for pages within wiredtiger. " - "Tracking the REF state changes is useful for debugging but there is a small performance cost.", - nargs=0, -) - -add_option( - "separate-debug", - choices=["on", "off"], - const="on", - default="off", - help="Produce separate debug files", - nargs="?", - type="choice", -) - -add_option( - "spider-monkey-dbg", - choices=["on", "off"], - const="on", - default="off", - help="Enable SpiderMonkey debug mode", - nargs="?", - type="choice", -) - -add_option( - "opt", - choices=["on", "debug", "size", "off", "auto"], - const="on", - default=build_profile.opt, - help="Enable compile-time optimization", - nargs="?", - type="choice", -) - -experimental_optimizations = [ - "O3", - "builtin-memcmp", - "fnsi", - "nofp", - "nordyn", - "sandybridge", - "tbaa", - "treevec", - "vishidden", -] -experimental_optimization_choices = ["*"] -experimental_optimization_choices.extend("+" + opt for opt in experimental_optimizations) -experimental_optimization_choices.extend("-" + opt for opt in experimental_optimizations) - -add_option( - "experimental-optimization", - action="append", - choices=experimental_optimization_choices, - const=experimental_optimization_choices[0], - default=["+sandybridge"], - help="Enable experimental optimizations", - nargs="?", - type="choice", -) - -add_option( - "debug-compress", - action="append", - choices=["off", "as", "ld"], - default=["auto"], - help="Compress debug sections", -) - -add_option( - "coverity-build", - action="store_true", - default=False, - help=( - "Enable coverity build mode, which only means the bazel build will not run. " - "The bazel build is expected to be run in a prior separate coverity enabled bazel build." - ), -) - -add_option( - "sanitize", - help="enable selected sanitizers", - metavar="san1,san2,...sanN", - default=build_profile.sanitize, -) - -add_option( - "sanitize-coverage", - help="enable selected coverage sanitizers", - metavar="cov1,cov2,...covN", -) - -add_option( - "shared-libsan", - choices=["on", "off"], - default="off", - nargs="?", - const="on", - help="dynamically link to sanitizer runtime(s)", - type="choice", -) - -add_option( - "allocator", - choices=["auto", "system", "tcmalloc-google", "tcmalloc-gperf"], - default=build_profile.allocator, - help='allocator to use (use "auto" for best choice for current platform)', - type="choice", -) - -add_option( - "gdbserver", - help="build in gdb server support", - nargs=0, -) - -add_option( - "lldb-server", - help="build in lldb server support", - nargs=0, -) - -add_option( - "wait-for-debugger", - help="Wait for debugger attach on process startup", - nargs=0, -) - -add_option( - "gcov", - help="compile with flags for gcov", - nargs=0, -) - -add_option( - "pgo-profile", - help="compile with pgo profiling", - nargs=0, -) - -add_option( - "pgo", - help="compile with pgo. Assumes profile file default.profdata at root of repository", - nargs=0, -) - -add_option( - "bolt", - help="compile with bolt", - nargs=0, -) - -add_option( - "enable-http-client", - choices=["auto", "on", "off"], - default="auto", - help="Enable support for HTTP client requests (required WinHTTP or cURL)", - type="choice", -) - -add_option( - "consolidated-test-bins", - choices=["on", "off"], - default="off", - help="Test binaries should build consolidated versions of themselves as defined by CONSOLIDATED_TARGET", - type="choice", -) - -add_option( - "use-sasl-client", - help="Support SASL authentication in the client library", - nargs=0, -) - -add_option( - "use-tracing-profiler", - choices=["on", "off"], - default="off", - help="Enable tracing profiler statistic collection", - type="choice", -) - -add_option( - "build-fast-and-loose", - choices=["on", "off", "auto"], - const="on", - default="auto", - help="looser dependency checking", - nargs="?", - type="choice", -) - -add_option( - "disable-warnings-as-errors", - action="append", - choices=["configure", "source"], - const="source", - default=build_profile.disable_warnings_as_errors, - help="Don't add a warnings-as-errors flag to compiler command lines in selected contexts; defaults to 'source' if no argument is provided", - nargs="?", - type="choice", -) - -add_option( - "detect-odr-violations", - help="Have the linker try to detect ODR violations, if supported", - nargs=0, -) - -add_option( - "variables-help", - help="Print the help text for SCons variables", - nargs=0, -) - -add_option( - "osx-version-min", - help="minimum OS X version to support", -) - -# https://docs.microsoft.com/en-us/cpp/porting/modifying-winver-and-win32-winnt?view=vs-2017 -# https://docs.microsoft.com/en-us/windows-server/get-started/windows-server-release-info -win_version_min_choices = { - "win10": ("0A00", "0000"), - "ws2016": ("0A00", "1607"), - "ws2019": ("0A00", "1809"), -} - -add_option( - "win-version-min", - choices=list(win_version_min_choices.keys()), - default=None, - help="minimum Windows version to support", - type="choice", -) - -add_option( - "cache", - choices=["all", "nolinked"], - const="all", - help="Use an object cache rather than a per-build variant directory (experimental)", - nargs="?", -) - -add_option( - "cache-dir", - default="$BUILD_ROOT/scons/cache", - help="Specify the directory to use for caching objects if --cache is in use", -) - -add_option( - "cache-signature-mode", - choices=["none", "validate"], - default="none", - help="Extra check to validate integrity of cache files after pulling from cache", -) - -add_option( - "cxx-std", - choices=["20"], - default="20", - help="Select the C++ language standard to build with", -) - - -def find_mongo_custom_variables(): - files = [] - paths = [path for path in sys.path if "site_scons" in path] - for path in paths: - probe = os.path.join(path, "mongo_custom_variables.py") - if os.path.isfile(probe): - files.append(probe) - return files - - -add_option( - "variables-files", - default=build_profile.variables_files, - action="append", - help="Specify variables files to load.", -) - -add_option( - "bazel-build-tag", - default=[], - action="append", - help="Specify additional tags to aggregate for --build_tag_filters", -) - -add_option( - "streams-release-build", - default=False, - action="store_true", - help="If set, will include the enterprise streams module in a release build.", -) - -add_option( - "disable-streams", - default=False, - action="store_true", - help="If set, will exclude the enterprise streams module in a non-streams build.", -) - -link_model_choices = ["auto", "object", "static", "dynamic", "dynamic-strict", "dynamic-sdk"] -add_option( - "link-model", - choices=link_model_choices, - default=build_profile.link_model, - help="Select the linking model for the project", - type="choice", -) - -add_option( - "linker", - choices=["auto", "gold", "lld", "bfd"], - default="auto", - help="Specify the type of linker to use.", - type="choice", -) - -variable_parse_mode_choices = ["auto", "posix", "other"] -add_option( - "variable-parse-mode", - choices=variable_parse_mode_choices, - default=variable_parse_mode_choices[0], - help="Select which parsing mode is used to interpret command line variables", - type="choice", -) - -add_option( - "modules", - help="Comma-separated list of modules to build. Empty means none. Default is all.", -) - -add_option( - "runtime-hardening", - choices=["on", "off"], - default="on", - help="Enable runtime hardening features (e.g. stack smash protection)", - type="choice", -) - -experimental_runtime_hardenings = [ - "cfex", - "controlflow", - "stackclash", -] -experimental_runtime_hardening_choices = ["*"] -experimental_runtime_hardening_choices.extend("+" + opt for opt in experimental_runtime_hardenings) -experimental_runtime_hardening_choices.extend("-" + opt for opt in experimental_runtime_hardenings) - -add_option( - "experimental-runtime-hardening", - action="append", - choices=experimental_runtime_hardening_choices, - const=experimental_runtime_hardening_choices[0], - default=[], - help="Enable experimental runtime hardenings", - nargs="?", - type="choice", -) - -add_option( - "use-hardware-crc32", - choices=["on", "off"], - default="on", - help="Enable CRC32 hardware acceleration", - type="choice", -) - -add_option( - "xray", - choices=["on", "off"], - default="off", - help="Build with LLVM XRay support", - type="choice", -) - -add_option( - "xray-instruction-threshold", - help="XRay instrumentation instruction threshold", - default=1, - nargs="?", - type=int, -) - -add_option( - "git-decider", - choices=["on", "off"], - const="on", - default="off", - help="Use git metadata for out-of-date detection for source files", - nargs="?", - type="choice", -) - -add_option( - "toolchain-root", - default=mongo_toolchain_execroot if mongo_toolchain_execroot else "", - help="Name a toolchain root for use with toolchain selection Variables files in etc/scons", -) - -add_option( - "msvc-debugging-format", - choices=["codeview", "pdb"], - default="codeview", - help="Debugging format in debug builds using msvc. Codeview (/Z7) or Program database (/Zi). Default is codeview.", - type="choice", -) - -add_option( - "use-libunwind", - choices=["on", "off", "auto"], - const="on", - default=build_profile.libunwind, - help="Enable libunwind for backtraces", - nargs="?", - type="choice", -) - -add_option( - "jlink", - help="Limit link concurrency. Takes either an integer to limit to or a" - " float between 0 and 1.0 whereby jobs will be multiplied to get the final" - " jlink value." - "\n\nExample: --jlink=0.75 --jobs 8 will result in a jlink value of 6", - const=0.5, - default=build_profile.jlink, - nargs="?", - type=float, -) - -add_option( - "enable-usdt-probes", - choices=["on", "off", "auto"], - default="auto", - help="Enable USDT probes. Default is auto, which is enabled only on Linux with SystemTap headers", - type="choice", - nargs="?", - const="on", -) - -add_option( - "libdeps-debug", - choices=["on", "off"], - const="off", - help="Print way too much debugging information on how libdeps is handling dependencies.", - nargs="?", - type="choice", -) - -add_option( - "libdeps-linting", - choices=["on", "off", "print"], - const="on", - default="on", - help="Enable linting of libdeps. Default is on, optionally 'print' will not stop the build.", - nargs="?", - type="choice", -) - -add_option( - "build-metrics", - metavar="FILE", - const="build-metrics.json", - default="", - help="Enable tracking of build performance and output data as json." - ' Use "-" to output json to stdout, or supply a path to the desired' - " file to output to. If no argument is supplied, the default log" - ' file will be "build-metrics.json".', - nargs="?", - type=str, -) - -add_option( - "visibility-support", - choices=["auto", "on", "off"], - const="auto", - default="auto", - help="Enable visibility annotations", - nargs="?", - type="choice", -) - -add_option( - "force-macos-dynamic-link", - default=False, - action="store_true", - help="Bypass link-model=dynamic check for macos versions <12.", -) - -add_option( - "bazel-dynamic-execution", - default=False, - action="store_true", - help="use bazel dynamic execution experimental feature", -) - - -# --build-mongot is a compile flag used by the evergreen build variants that run end-to-end search -# suites, as it downloads the necessary mongot binary. -add_option( - "build-mongot", - choices=["latest", "release"], - default=None, - type="choice", - help="Installs the appropriate mongot for your architecture", -) - -add_option( - "patch-build-mongot-url", - default=None, - help="Installs mongot binary from upstream patch on mongot-master for your architecture", -) - -try: - with open("version.json", "r") as version_fp: - version_data = json.load(version_fp) - - if "version" not in version_data: - print("version.json does not contain a version string") - Exit(1) - if "githash" not in version_data: - version_data["githash"] = utils.get_git_version() - -except IOError as e: - # If the file error wasn't because the file is missing, error out - if e.errno != errno.ENOENT: - print(("Error opening version.json: {0}".format(e.strerror))) - Exit(1) - - version_data = { - "version": utils.get_git_describe()[1:], - "githash": utils.get_git_version(), - } - -except ValueError as e: - print(("Error decoding version.json: {0}".format(e))) - Exit(1) - - -def to_boolean(s): - if isinstance(s, bool): - return s - elif s.lower() in ("1", "on", "true", "yes"): - return True - elif s.lower() in ("0", "off", "false", "no"): - return False - raise ValueError(f"Invalid value {s}, must be a boolean-like string") - - -# Setup the command-line variables -def variable_shlex_converter(val): - # If the argument is something other than a string, propagate - # it literally. - if not isinstance(val, str): - return val - parse_mode = get_option("variable-parse-mode") - if parse_mode == "auto": - parse_mode = "other" if mongo_platform.is_running_os("windows") else "posix" - return shlex.split(val, posix=(parse_mode == "posix")) - - -# Setup the command-line variables -def where_is_converter(val): - path = WhereIs(val) - if path: - return os.path.abspath(path) - return val - - -def variable_arch_converter(val): - arches = { - "x86_64": "x86_64", - "amd64": "x86_64", - "emt64": "x86_64", - "x86": "i386", - } - val = val.lower() - - if val in arches: - return arches[val] - - # Uname returns a bunch of possible x86's on Linux. - # Check whether the value is an i[3456]86 processor. - if re.match(r"^i[3-6]86$", val): - return "i386" - - # Return whatever val is passed in - hopefully it's legit - return val - - -def bool_var_converter(val, var): - try: - return to_boolean(val) - except ValueError as exc: - if val.lower() != "auto": - raise ValueError( - f'Invalid {var} value {s}, must be a boolean-like string or "auto"' - ) from exc - return "auto" - - -# The Scons 'default' tool enables a lot of tools that we don't actually need to enable. -# On platforms like Solaris, it actually does the wrong thing by enabling the sunstudio -# toolchain first. As such it is simpler and more efficient to manually load the precise -# set of tools we need for each platform. -# If we aren't on a platform where we know the minimal set of tools, we fall back to loading -# the 'default' tool. -def decide_platform_tools(): - if mongo_platform.is_running_os("windows"): - # we only support MS toolchain on windows - return ["msvc", "mslink", "mslib", "masm", "vcredist"] - elif mongo_platform.is_running_os("linux", "solaris"): - return ["gcc", "g++", "gnulink", "ar", "gas"] - elif mongo_platform.is_running_os("darwin"): - return ["gcc", "g++", "applelink", "ar", "libtool", "as", "xcode"] - else: - return ["default"] - - -def variable_tools_converter(val): - tool_list = shlex.split(val) - # This list is intentionally not sorted; the order of tool loading - # matters as some of the tools have dependencies on other tools. - return tool_list + [ - "distsrc", - "gziptool", - "mongo_consolidated_targets", - "mongo_test_execution", - "mongo_test_list", - "mongo_benchmark", - "mongo_integrationtest", - "mongo_unittest", - "mongo_libfuzzer", - "mongo_pretty_printer_tests", - "textfile", - "mongo_workload_simulator", - ] - - -def variable_distsrc_converter(val): - if not val.endswith("/"): - return val + "/" - return val - - -def fatal_error(env, msg, *args): - print(msg.format(*args)) - Exit(1) - - -# Apply the default variables files, and walk the provided -# arguments. Interpret any falsy argument (like the empty string) as -# resetting any prior state. This makes the argument -# --variables-files= destructive of any prior variables files -# arguments, including the default. -variables_files_args = get_option("variables-files") -variables_files = find_mongo_custom_variables() -for variables_file in variables_files_args: - if variables_file: - variables_files.append(variables_file) - else: - variables_files = [] -for vf in variables_files: - if not os.path.isfile(vf): - fatal_error(None, f"Specified variables file '{vf}' does not exist") - print(f"Using variable customization file {vf}") - -env_vars = Variables( - files=variables_files, - args=ARGUMENTS, -) - -sconsflags = os.environ.get("SCONSFLAGS", None) -if sconsflags: - print(("Using SCONSFLAGS environment variable arguments: %s" % sconsflags)) - -env_vars.Add( - "ABIDW", - help="Configures the path to the 'abidw' (a libabigail) utility", -) - -env_vars.Add( - "AR", - help="Sets path for the archiver", -) - -env_vars.Add( - "ARFLAGS", - help="Sets flags for the archiver", - converter=variable_shlex_converter, -) - -env_vars.Add( - "CCACHE", - help="Tells SCons where the ccache binary is", - default=build_profile.CCACHE, -) - -env_vars.Add( - "CACHE_SIZE", - help="Maximum size of the SCons cache (in gigabytes)", - default=32, - converter=lambda x: int(x), -) - -env_vars.Add( - "CACHE_PRUNE_TARGET", - help="Maximum percent in-use in SCons cache after pruning", - default=66, - converter=lambda x: int(x), -) - -env_vars.Add( - "CC", - help="Selects the C compiler to use", -) - -env_vars.Add( - "CCFLAGS", - help="Sets flags for the C and C++ compiler", - converter=variable_shlex_converter, -) - -env_vars.Add( - "NON_CONF_CCFLAGS", - help="Sets flags for the C and C++ compiler that are not used in configure checks", - converter=variable_shlex_converter, -) - -env_vars.Add( - "TOOLCHAIN_CCFLAGS", - help="Sets flags for the C and C++ compiler specific to the toolchain", - converter=variable_shlex_converter, -) - -env_vars.Add( - "NON_CONF_LINKFLAGS", - help="Sets flags for the C and C++ linker that are not used in configure checks", - converter=variable_shlex_converter, -) - -env_vars.Add( - "TOOLCHAIN_LINKFLAGS", - help="Sets flags for the C and C++ linker specific to the toolchain", - converter=variable_shlex_converter, -) - -env_vars.Add( - "ASFLAGS", - help="Sets assembler specific flags", - converter=variable_shlex_converter, -) - -env_vars.Add( - "CFLAGS", - help="Sets flags for the C compiler", - converter=variable_shlex_converter, -) - -env_vars.Add( - "CPPDEFINES", - help="Sets pre-processor definitions for C and C++", - converter=variable_shlex_converter, - default=[], -) - -env_vars.Add( - "CPPPATH", - help="Adds paths to the preprocessor search path", - converter=variable_shlex_converter, -) - -env_vars.Add( - "CXX", - help="Selects the C++ compiler to use", -) - -env_vars.Add( - "CXXFLAGS", - help="Sets flags for the C++ compiler", - converter=variable_shlex_converter, -) - -env_vars.Add( - "UNITTESTS_COMPILE_CONCURRENCY", - help="Sets the ratio of total jobs for max concurrency when compiling unittests source files. Should be float between 0 and 1.", - default="1", - converter=lambda val: float(val) if val != "" else "", -) - -env_vars.Add( - "DESTDIR", - help="Where builds will install files", - default="$BUILD_ROOT/install", -) - -env_vars.Add( - "BAZEL_FLAGS", - help="Flags specific to bazel to pass through to the underlying bazel build command.", - default="", -) - -env_vars.Add( - "BAZEL_INTEGRATION_DEBUG", - help="Enable SCons/Bazel integration debug output", - converter=functools.partial(bool_var_converter, var="BAZEL_INTEGRATION_DEBUG"), - default="0", -) - -env_vars.Add( - "DSYMUTIL", - help="Path to the dsymutil utility", -) - -env_vars.Add( - "MONGO_TOOLCHAIN_VERSION", - default="v4", - help="Version of the mongo toolchain to use in bazel.", -) - - -def validate_dwarf_version(key, val, env): - if val == "4" or val == "5" or val == "": - return - - print(f"Invalid DWARF_VERSION '{val}'. Only valid versions are 4 or 5.") - Exit(1) - - -env_vars.Add( - "DWARF_VERSION", - help="Sets the DWARF version (non-Windows). Incompatible with SPLIT_DWARF=1.", - validator=validate_dwarf_version, - converter=lambda val: int(val) if val != "" else "", - default="", -) - - -def validate_dwarf_width(key, val, env): - if val == "32" or val == "64" or val == "": - return - - print(f"Invalid DWARF_WIDTH '{val}'. Only valid versions are 32 or 64.") - Exit(1) - - -env_vars.Add( - "DWARF_WIDTH", - help="Sets the DWARF addressing mode to either 32-bit or 64-bit (non-Windows)", - validator=validate_dwarf_width, - converter=lambda val: int(val) if val != "" else "", - default="", -) - -env_vars.Add( - "READELF", - help="Path to readelf", - default="readelf", -) - -env_vars.Add( - "GITDIFFFLAGS", - help="Sets flags for git diff", - default="", -) - -env_vars.Add( - "REVISION", - help="Base git revision", - default="", -) - -# Note: This probably is only really meaningful when configured via a variables file. It will -# also override whatever the SCons platform defaults would be. -env_vars.Add( - "ENV", - help="Sets the environment for subprocesses", -) - -env_vars.Add( - "FRAMEWORKPATH", - help="Adds paths to the linker search path for darwin frameworks", - converter=variable_shlex_converter, -) - -env_vars.Add( - "FRAMEWORKS", - help="Adds extra darwin frameworks to link against", - converter=variable_shlex_converter, -) - -env_vars.Add( - "HOST_ARCH", - help="Sets the native architecture of the compiler", - converter=variable_arch_converter, - default=None, -) - -env_vars.Add( - "ICECC", - help="Tells SCons where icecream icecc tool is", - default=build_profile.ICECC, -) - -env_vars.Add( - "ICERUN", - help="Tells SCons where icecream icerun tool is", -) - -env_vars.Add( - "ICECC_CREATE_ENV", - help="Tells SCons where icecc-create-env tool is", - default="icecc-create-env", -) - -env_vars.Add( - "ICECC_DEBUG", - help="Tell ICECC to create debug logs (auto, on/off true/false 1/0)", - default=False, -) - -env_vars.Add( - "ICECC_SCHEDULER", - help="Tells ICECC where the scheduler daemon is running", -) - -env_vars.Add( - "ICECC_VERSION", - help="Tells ICECC where the compiler package is", -) - -env_vars.Add( - "ICECC_VERSION_ARCH", - help="Tells ICECC the target architecture for the compiler package, if non-native", -) - -env_vars.Add( - "LIBPATH", - help="Adds paths to the linker search path", - converter=variable_shlex_converter, -) - -env_vars.Add( - "LIBS", - help="Adds extra libraries to link against", - converter=variable_shlex_converter, -) - -env_vars.Add( - "LINKFLAGS", - help="Sets flags for the linker", - converter=variable_shlex_converter, -) - -env_vars.Add( - "LLVM_SYMBOLIZER", - help="Name of or path to the LLVM symbolizer", -) - -env_vars.Add( - "MAXLINELENGTH", - help="Maximum line length before using temp files", - # This is very small, but appears to be the least upper bound - # across our platforms. - # - # See https://support.microsoft.com/en-us/help/830473/command-prompt-cmd.-exe-command-line-string-limitation - default=4095, -) - -# Note: This is only really meaningful when configured via a variables file. See the -# default_buildinfo_environment_data() function for examples of how to use this. -env_vars.Add( - "MONGO_BUILDINFO_ENVIRONMENT_DATA", - help="Sets the info returned from the buildInfo command and --version command-line flag", - default=mongo_generators.default_buildinfo_environment_data(), -) - -env_vars.Add( - "MONGO_DIST_SRC_PREFIX", - help="Sets the prefix for files in the source distribution archive", - converter=variable_distsrc_converter, - default="mongodb-src-r${MONGO_VERSION}", -) - -env_vars.Add( - "MONGO_DISTARCH", - help="Adds a string representing the target processor architecture to the dist archive", - default="$TARGET_ARCH", -) - -env_vars.Add( - "MONGO_DISTMOD", - help="Adds a string that will be embedded in the dist archive naming", - default="", -) - -env_vars.Add( - "MONGO_DISTNAME", - help="Sets the version string to be used in dist archive naming", - default="$MONGO_VERSION", -) - - -def validate_mongo_version(key, val, env): - valid_version_re = re.compile(r"^(\d+)\.(\d+)\.(\d+)-?((?:(rc)(\d+))?.*)?$", re.MULTILINE) - invalid_version_re = re.compile(r"^0\.0\.0(?:-.*)?", re.MULTILINE) - if not valid_version_re.match(val) or invalid_version_re.match(val): - print( - ( - "Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format( - val - ) - ) - ) - Exit(1) - - -env_vars.Add( - "MONGO_VERSION", - help="Sets the version string for MongoDB", - default=version_data["version"], - validator=validate_mongo_version, -) - -env_vars.Add( - "MONGO_GIT_HASH", - help="Sets the githash to store in the MongoDB version information", - default=version_data["githash"], -) - -env_vars.Add( - "MSVC_USE_SCRIPT", - help="Sets the script used to setup Visual Studio.", -) - -env_vars.Add( - "MSVC_VERSION", - help="Sets the version of Visual C++ to use (e.g. 14.2 for VS2019, 14.3 for VS2022)", - default="14.3", -) - -env_vars.Add( - "LINKFLAGS_COMPILER_EXEC_PREFIX", - help="Specify the search path to be injected into the LINKFLAGS", - default="", -) - -env_vars.Add( - "COMPILER_EXEC_PREFIX_OPT", - help="Specify the option sign for compiler exec search paths.", - default="-B", -) - -env_vars.Add( - "NINJA_BUILDDIR", - help="Location for shared Ninja state", - default="$BUILD_ROOT/ninja", -) - -env_vars.Add( - "NINJA_PREFIX", - default=build_profile.NINJA_PREFIX, - help="""A prefix to add to the beginning of generated ninja -files. Useful for when compiling multiple build ninja files for -different configurations, for instance: - - scons --sanitize=asan --ninja NINJA_PREFIX=asan asan.ninja - scons --sanitize=tsan --ninja NINJA_PREFIX=tsan tsan.ninja - -Will generate the files (respectively): - - asan.ninja - tsan.ninja - -Defaults to build. Best used with the --ninja flag so you don't have to -reiterate the prefix in the target name and variable. -""", -) - -env_vars.Add( - "NINJA_SUFFIX", - help="""A suffix to add to the end of generated build.ninja -files. Useful for when compiling multiple build ninja files for -different configurations, for instance: - - scons --sanitize=asan --ninja NINJA_SUFFIX=asan build.ninja - scons --sanitize=tsan --ninja NINJA_SUFFIX=tsan build.ninja - -Will generate the files (respectively): - - build.ninja.asan - build.ninja.tsan -""", -) - -env_vars.Add( - "__NINJA_NO", - help="Disables the Ninja tool unconditionally. Not intended for human use.", - default=0, -) - -env_vars.Add( - "COMPILATIONDB_IGNORE_WRAPPERS", - help="Comma separated list of variables which reference wrapper binaries that should be excluded when generating compile_commands.json", - default="$ICECC,$ICERUN,$ICECREAM_RUN_ICECC,$CCACHE", -) - -env_vars.Add( - "OBJCOPY", - help="Sets the path to objcopy", - default=WhereIs("objcopy"), -) - -env_vars.Add( - "PKGDIR", - help="Directory in which to build packages and archives", - default="$BUILD_DIR/pkgs", -) - -env_vars.Add( - "PREFIX", - help="Final installation location of files. Will be made into a sub dir of $DESTDIR", - default=".", -) - -# Exposed to be able to cross compile Android/*nix from Windows without ending up with the .exe suffix. -env_vars.Add( - "PROGSUFFIX", - help="Sets the suffix for built executable files", -) - -env_vars.Add( - "RPATH", - help="Set the RPATH for dynamic libraries and executables", - converter=variable_shlex_converter, -) - -env_vars.Add( - "SHCCFLAGS", - help="Sets flags for the C and C++ compiler when building shared libraries", - converter=variable_shlex_converter, -) - -env_vars.Add( - "SHCFLAGS", - help="Sets flags for the C compiler when building shared libraries", - converter=variable_shlex_converter, -) - -env_vars.Add( - "SHCXXFLAGS", - help="Sets flags for the C++ compiler when building shared libraries", - converter=variable_shlex_converter, -) - -env_vars.Add( - "SHELL", - help="Picks the shell to use when spawning commands", -) - -env_vars.Add( - "SHLINKFLAGS", - help="Sets flags for the linker when building shared libraries", - converter=variable_shlex_converter, -) - -env_vars.Add( - "SHLINKFLAGS_EXTRA", - help="Adds additional flags for shared links without overwriting tool configured SHLINKFLAGS values", - converter=variable_shlex_converter, -) - -env_vars.Add( - "STRIP", - help="Path to the strip utility (non-darwin platforms probably use OBJCOPY for this)", -) - -env_vars.Add( - "PROTOC", - default="$$PROTOC_VAR_GEN", - help="Path to protobuf compiler.", - converter=where_is_converter, -) - -env_vars.Add( - "PROTOC_GRPC_PLUGIN", - default="$$PROTOC_GRPC_PLUGIN_GEN", - help="Path to protobuf compiler grpc plugin.", - converter=where_is_converter, -) - -env_vars.Add( - "SPLIT_DWARF", - help="Set the boolean (auto, on/off true/false 1/0) to enable gsplit-dwarf (non-Windows). Incompatible with DWARF_VERSION=5", - converter=functools.partial(bool_var_converter, var="SPLIT_DWARF"), - default="auto", -) - -env_vars.Add( - "ENABLE_OTEL_BUILD", - help="Set the boolean (auto, on/off true/false 1/0) to enable building otel and protobuf compiler.", - converter=functools.partial(bool_var_converter, var="ENABLE_OTEL_BUILD"), - default="0", -) - -env_vars.Add( - "GDB", - help="Configures the path to the 'gdb' debugger binary.", -) - -env_vars.Add( - "GDB_INDEX", - help="Set the boolean (auto, on/off true/false 1/0) to enable creation of a gdb_index in binaries.", - converter=functools.partial(bool_var_converter, var="GDB_INDEX"), - default="auto", -) - -env_vars.Add( - "GDB_PPTEST_PYONLY", - help="""Set the boolean (on/off true/false 1/0) to enable SCons to only emit the .py files - needed for testing GDB pretty printers. - - Useful for when the executable files for testing GDB pretty printers are more rapidly built - using Ninja and the install-dist-test target (separately from SCons).""", - converter=functools.partial(bool_var_converter, var="GDB_PPTEST_PYONLY"), - default="False", -) - -env_vars.Add( - "ENABLE_BUILD_RETRY", - help="Set the boolean (auto, on/off true/false 1/0) to enable retrying a compile or link commands failures.", - converter=functools.partial(bool_var_converter, var="ENABLE_BUILD_RETRY"), - default="False", -) - -env_vars.Add( - "TAPI", - help="Configures the path to the 'tapi' (an Xcode) utility", -) - -env_vars.Add( - "TARGET_ARCH", - help="Sets the architecture to build for", - converter=variable_arch_converter, - default=None, -) - -env_vars.Add( - "TARGET_OS", - help="Sets the target OS to build for", - default=mongo_platform.get_running_os_name(), -) - -env_vars.Add( - "TOOLS", - help="Sets the list of SCons tools to add to the environment", - converter=variable_tools_converter, - default=decide_platform_tools(), -) - -env_vars.Add( - "VARIANT_DIR", - help="Sets the name (or generator function) for the variant directory", - default=build_profile.VARIANT_DIR, -) - -env_vars.Add( - "VERBOSE", - help="Controls build verbosity (auto, on/off true/false 1/0)", - default="auto", -) - -env_vars.Add( - "WINDOWS_OPENSSL_BIN", - help="Sets the path to the openssl binaries for packaging", - default="c:/openssl/bin", -) - -env_vars.Add( - PathVariable( - "LOCAL_TMPDIR", - help="Set the TMPDIR when running tests.", - default="$BUILD_ROOT/tmp_test_data", - validator=PathVariable.PathAccept, - ), -) - -env_vars.AddVariables( - ("BUILD_METRICS_EVG_TASK_ID", "Evergreen task ID to add to build metrics data."), - ("BUILD_METRICS_EVG_BUILD_VARIANT", "Evergreen build variant to add to build metrics data."), -) -for tool in ["build_metrics", "split_dwarf"]: - try: - Tool(tool).options(env_vars) - except ImportError as exc: - print(f"WARNING: The {tool} tool might not work as intended due to a failed import:\n{exc}") - pass - -# -- Validate user provided options -- - -# A dummy environment that should *only* have the variables we have set. In practice it has -# some other things because SCons isn't quite perfect about keeping variable initialization -# scoped to Tools, but it should be good enough to do validation on any Variable values that -# came from the command line or from loaded files. -variables_only_env = Environment( - # Disable platform specific variable injection - platform=(lambda x: ()), - # But do *not* load any tools, since those might actually set variables. Note that this - # causes the value of our TOOLS variable to have no effect. - tools=[], - # Use the Variables specified above. - variables=env_vars, -) - -# don't run configure if user calls --help -if GetOption("help"): - try: - Help("\nThe following variables may also be set like scons VARIABLE=value\n", append=True) - Help(env_vars.GenerateHelpText(variables_only_env, sort=True), append=True) - Help( - "\nThe 'list-targets' target can be built to list useful comprehensive build targets\n", - append=True, - ) - except TypeError: - # The append=true kwarg is only supported in scons>=2.4. Without it, calls to Help() clobber - # the automatically generated options help, which we don't want. Users on older scons - # versions will need to use --variables-help to learn about which variables we support. - pass - - Return() - -if ("CC" in variables_only_env) != ("CXX" in variables_only_env): - print("Cannot customize C compiler without customizing C++ compiler, and vice versa") - Exit(1) - -# --- environment setup --- - -# If the user isn't using the # to indicate top-of-tree or $ to expand a variable, forbid -# relative paths. Relative paths don't really work as expected, because they end up relative to -# the top level SConstruct, not the invoker's CWD. We could in theory fix this with -# GetLaunchDir, but that seems a step too far. -buildDir = get_option("build-dir").rstrip("/") -if buildDir[0] not in ["$", "#"]: - if not os.path.isabs(buildDir): - print("Do not use relative paths with --build-dir") - Exit(1) - -cacheDir = get_option("cache-dir").rstrip("/") -if cacheDir[0] not in ["$", "#"]: - if not os.path.isabs(cacheDir): - print("Do not use relative paths with --cache-dir") - Exit(1) - -sconsDataDir = Dir(buildDir).Dir("scons") -SConsignFile(str(sconsDataDir.File("sconsign.py3"))) - - -def printLocalInfo(): - import sys - - import SCons - - print(("scons version: " + SCons.__version__)) - print(("python version: " + " ".join([repr(i) for i in sys.version_info]))) - - -printLocalInfo() - -boostLibs = ["filesystem", "program_options", "system", "iostreams", "thread", "log"] - -onlyServer = len(COMMAND_LINE_TARGETS) == 0 or ( - len(COMMAND_LINE_TARGETS) == 1 and str(COMMAND_LINE_TARGETS[0]) in ["mongod", "mongos", "test"] -) - -noshell = has_option("noshell") - -jsEngine = get_option("js-engine") - -serverJs = get_option("server-js") == "on" - -if not serverJs and not jsEngine: - print("Warning: --server-js=off is not needed with --js-engine=none") - -# We defer building the env until we have determined whether we want certain values. Some values -# in the env actually have semantics for 'None' that differ from being absent, so it is better -# to build it up via a dict, and then construct the Environment in one shot with kwargs. -# -# Yes, BUILD_ROOT vs BUILD_DIR is confusing. Ideally, BUILD_DIR would actually be called -# VARIANT_DIR, and at some point we should probably do that renaming. Until we do though, we -# also need an Environment variable for the argument to --build-dir, which is the parent of all -# variant dirs. For now, we call that BUILD_ROOT. If and when we s/BUILD_DIR/VARIANT_DIR/g, -# then also s/BUILD_ROOT/BUILD_DIR/g. -envDict = dict( - BUILD_ROOT=buildDir, - BUILD_DIR=make_variant_dir_generator(), - DIST_ARCHIVE_SUFFIX=".tgz", - MODULE_BANNERS=[], - MODULE_INJECTORS=dict(), - PYTHON="$( {} $)".format(sys.executable), - SERVER_ARCHIVE="${SERVER_DIST_BASENAME}${DIST_ARCHIVE_SUFFIX}", - UNITTEST_ALIAS="install-unittests", - # TODO: Move unittests.txt to $BUILD_DIR, but that requires - # changes to MCI. - UNITTEST_LIST="$BUILD_ROOT/unittests.txt", - PRETTY_PRINTER_TEST_ALIAS="install-pretty-printer-tests", - PRETTY_PRINTER_TEST_LIST="$BUILD_ROOT/pretty_printer_tests.txt", - LIBFUZZER_TEST_ALIAS="install-fuzzertests", - LIBFUZZER_TEST_LIST="$BUILD_ROOT/libfuzzer_tests.txt", - INTEGRATION_TEST_ALIAS="install-integration-tests", - INTEGRATION_TEST_LIST="$BUILD_ROOT/integration_tests.txt", - BENCHMARK_ALIAS="install-benchmarks", - BENCHMARK_LIST="$BUILD_ROOT/benchmarks.txt", - CONFIGUREDIR="$BUILD_ROOT/scons/$VARIANT_DIR/sconf_temp", - CONFIGURELOG="$BUILD_ROOT/scons/config.log", - LIBDEPS_TAG_EXPANSIONS=[], -) - -# By default, we will get the normal SCons tool search. But if the -# user has opted into the next gen tools, add our experimental tool -# directory into the default toolpath, ahead of whatever is already in -# there so it overrides it. -if get_option("build-tools") == "next": - SCons.Tool.DefaultToolpath.insert(0, os.path.abspath("site_scons/site_tools/next")) - -env = Environment(variables=env_vars, **envDict) -del envDict -env.AddMethod(lambda env, name, **kwargs: add_option(name, **kwargs), "AddOption") - -env.Prepend(CCFLAGS="$TOOLCHAIN_CCFLAGS") -env.Prepend(LINKFLAGS="$TOOLCHAIN_LINKFLAGS") - -if ARGUMENTS.get("CC") and ARGUMENTS.get("CXX"): - os.environ["CC"] = env.get("CC") - os.environ["CXX"] = env.get("CXX") - os.environ["USE_NATIVE_TOOLCHAIN"] = "1" - -# Early load to setup env functions -tool = Tool("integrate_bazel") -tool.exists(env) -env.PrefetchToolchain(env.get("MONGO_TOOLCHAIN_VERSION")) - -# The placement of this is intentional. Here we setup an atexit method to store tooling metrics. -# We should only register this function after env, env_vars and the parser have been properly initialized. -SConsToolingMetrics.register_metrics( - utc_starttime=datetime.utcnow(), - artifact_dir=env.Dir("$BUILD_DIR").get_abspath(), - env_vars=env_vars, - env=env, - parser=_parser, -) - -if get_option("build-metrics"): - env["BUILD_METRICS_ARTIFACTS_DIR"] = "$BUILD_ROOT/$VARIANT_DIR" - env.Tool("build_metrics") - env.AddBuildMetricsMetaData("evg_id", env.get("BUILD_METRICS_EVG_TASK_ID", "UNKNOWN")) - env.AddBuildMetricsMetaData("variant", env.get("BUILD_METRICS_EVG_BUILD_VARIANT", "UNKNOWN")) - -env.Execute(SCons.Defaults.Mkdir(env.Dir("$LOCAL_TMPDIR"))) - -if get_option("cache-signature-mode") == "validate": - validate_cache_dir = Tool("validate_cache_dir") - if validate_cache_dir.exists(env): - validate_cache_dir(env) - else: - env.FatalError("Failed to enable validate_cache_dir tool.") - -# Only print the spinner if stdout is a tty -if sys.stdout.isatty(): - Progress(["-\r", "\\\r", "|\r", "/\r"], interval=50) - - -# We are going to start running conf tests soon, so setup -# --disable-warnings-as-errors as soon as possible. -def create_werror_generator(flagname): - werror_conftests = "configure" not in get_option("disable-warnings-as-errors") - werror_source = "source" not in get_option("disable-warnings-as-errors") - - def generator(target, source, env, for_signature): - if werror_conftests and "conftest" in str(target[0]): - return flagname - - if werror_source: - return flagname - - return str() - - return generator - - -env.Append( - CCFLAGS=["$CCFLAGS_GENERATE_WERROR"], - CCFLAGS_GENERATE_WERROR=create_werror_generator("$CCFLAGS_WERROR"), - CXXFLAGS=["$CXXFLAGS_GENERATE_WERROR"], - CXXFLAGS_GENERATE_WERROR=create_werror_generator("$CXXFLAGS_WERROR"), - LINKFLAGS=["$LINKFLAGS_GENERATE_WERROR"], - LINKFLAGS_GENERATE_WERROR=create_werror_generator("$LINKFLAGS_WERROR"), -) - - -def non_conf_ccflags_gen(target, source, env, for_signature): - if "conftest" in str(target[0]): - return "" - return "$NON_CONF_CCFLAGS" - - -def non_conf_linkflags_gen(target, source, env, for_signature): - if "conftest" in str(target[0]): - return "" - return "$NON_CONF_LINKFLAGS" - - -env["_NON_CONF_CCFLAGS_GEN"] = non_conf_ccflags_gen -env["_NON_CONF_LINKFLAGS_GEN"] = non_conf_linkflags_gen - -env.Append(CCFLAGS="$_NON_CONF_CCFLAGS_GEN") -env.Append(LINKFLAGS="$_NON_CONF_LINKFLAGS_GEN") - -for var in ["CC", "CXX"]: - if var not in env: - continue - path = env[var] - print("{} is {}".format(var, path)) - if not os.path.isabs(path): - which = shutil.which(path) - if which is None: - print("{} was not found in $PATH".format(path)) - else: - print("{} found in $PATH at {}".format(path, which)) - path = which - - realpath = os.path.realpath(path) - if realpath != path: - print("{} resolves to {}".format(path, realpath)) - -env.AddMethod(mongo_platform.env_os_is_wrapper, "TargetOSIs") -env.AddMethod(mongo_platform.env_get_os_name_wrapper, "GetTargetOSName") - - -def conf_error(env, msg, *args): - print(msg.format(*args)) - print("See {0} for details".format(env.File("$CONFIGURELOG").abspath)) - Exit(1) - - -env.AddMethod(fatal_error, "FatalError") -env.AddMethod(conf_error, "ConfError") - -# Normalize the VERBOSE Option, and make its value available as a -# function. -if env["VERBOSE"] == "auto": - env["VERBOSE"] = not sys.stdout.isatty() and env.get("__NINJA_NO") != "1" -else: - try: - env["VERBOSE"] = to_boolean(env["VERBOSE"]) - except ValueError as e: - env.FatalError(f"Error setting VERBOSE variable: {e}") -env.AddMethod(lambda env: env["VERBOSE"], "Verbose") - -env.Append( - LINKFLAGS=['${_concat(COMPILER_EXEC_PREFIX_OPT, LINKFLAGS_COMPILER_EXEC_PREFIX, "", __env__)}'] -) - -# Normalize the ICECC_DEBUG option -try: - env["ICECC_DEBUG"] = to_boolean(env["ICECC_DEBUG"]) -except ValueError as e: - env.FatalError(f"Error setting ICECC_DEBUG variable: {e}") - -if has_option("variables-help"): - print(env_vars.GenerateHelpText(env)) - Exit(0) - -unknown_vars = env_vars.UnknownVariables() -if unknown_vars: - env.FatalError("Unknown variables specified: {0}", ", ".join(list(unknown_vars.keys()))) - -install_actions.setup(env, get_option("install-action")) - - -if env.TargetOSIs("windows") and os.path.exists( - env.File("#/src/mongo/db/modules/enterprise/SConscript").abspath -): - # the sasl zip can be rebuilt by following the instructions at: - # https://github.com/mongodb-forks/cyrus-sasl/blob/mongo-sasl-2-1-28/README.md - import mongo.download_windows_sasl - - mongo.download_windows_sasl.download_sasl(env) - -detectEnv = env.Clone() - -# Identify the toolchain in use. We currently support the following: -# These macros came from -# http://nadeausoftware.com/articles/2012/10/c_c_tip_how_detect_compiler_name_and_version_using_compiler_predefined_macros -toolchain_macros = { - "GCC": "defined(__GNUC__) && !defined(__clang__)", - "clang": "defined(__clang__)", - "MSVC": "defined(_MSC_VER)", -} - - -def CheckForToolchain(context, toolchain, lang_name, compiler_var, source_suffix): - test_body = textwrap.dedent( - """ - #if {0} - /* we are using toolchain {0} */ - #else - #error - #endif - """.format(toolchain_macros[toolchain]) - ) - - print_tuple = (lang_name, context.env[compiler_var], toolchain) - context.Message('Checking if %s compiler "%s" is %s... ' % print_tuple) - - # Strip indentation from the test body to ensure that the newline at the end of the - # endif is the last character in the file (rather than a line of spaces with no - # newline), and that all of the preprocessor directives start at column zero. Both of - # these issues can trip up older toolchains. - result = context.TryCompile(test_body, source_suffix) - context.Result(result) - return result - - -endian = get_option("endian") - -if endian == "auto": - endian = sys.byteorder - -processor_macros = { - "aarch64": {"endian": "little", "check": "(defined(__arm64__) || defined(__aarch64__))"}, - "emscripten": {"endian": "little", "check": "(defined(__EMSCRIPTEN__))"}, - "ppc64le": {"endian": "little", "check": "(defined(__powerpc64__))"}, - "riscv64": {"endian": "little", "check": "(defined(__riscv)) && (__riscv_xlen == 64)"}, - "s390x": {"endian": "big", "check": "(defined(__s390x__))"}, - "x86_64": {"endian": "little", "check": "(defined(__x86_64) || defined(_M_AMD64))"}, -} - - -def CheckForProcessor(context, which_arch): - def run_compile_check(arch): - if not endian == processor_macros[arch]["endian"]: - return False - - test_body = """ - #if {0} - /* Detected {1} */ - #else - #error not {1} - #endif - """.format(processor_macros[arch]["check"], arch) - - return context.TryCompile(textwrap.dedent(test_body), ".c") - - if which_arch: - ret = run_compile_check(which_arch) - context.Message("Checking if target processor is %s " % which_arch) - context.Result(ret) - return ret - - for k in list(processor_macros.keys()): - ret = run_compile_check(k) - if ret: - context.Result("Detected a %s processor" % k) - return k - - context.Result("Could not detect processor model/architecture") - return False - - -# Taken from http://nadeausoftware.com/articles/2012/01/c_c_tip_how_use_compiler_predefined_macros_detect_operating_system -os_macros = { - "windows": "defined(_WIN32)", - "solaris": "defined(__sun)", - "freebsd": "defined(__FreeBSD__)", - "openbsd": "defined(__OpenBSD__)", - "iOS": "defined(__APPLE__) && TARGET_OS_IOS && !TARGET_OS_SIMULATOR", - "iOS-sim": "defined(__APPLE__) && TARGET_OS_IOS && TARGET_OS_SIMULATOR", - "tvOS": "defined(__APPLE__) && TARGET_OS_TV && !TARGET_OS_SIMULATOR", - "tvOS-sim": "defined(__APPLE__) && TARGET_OS_TV && TARGET_OS_SIMULATOR", - "watchOS": "defined(__APPLE__) && TARGET_OS_WATCH && !TARGET_OS_SIMULATOR", - "watchOS-sim": "defined(__APPLE__) && TARGET_OS_WATCH && TARGET_OS_SIMULATOR", - # NOTE: Once we have XCode 8 required, we can rely on the value of TARGET_OS_OSX. In case - # we are on an older XCode, use TARGET_OS_MAC and TARGET_OS_IPHONE. We don't need to correct - # the above declarations since we will never target them with anything other than XCode 8. - "macOS": "defined(__APPLE__) && (TARGET_OS_OSX || (TARGET_OS_MAC && !TARGET_OS_IPHONE))", - "linux": "defined(__linux__)", - "android": "defined(__ANDROID__)", - "emscripten": "defined(__EMSCRIPTEN__)", -} - - -def CheckForOS(context, which_os): - test_body = """ - #if defined(__APPLE__) - #include - #endif - #if {0} - /* detected {1} */ - #else - #error - #endif - """.format(os_macros[which_os], which_os) - context.Message("Checking if target OS {0} is supported by the toolchain... ".format(which_os)) - ret = context.TryCompile(textwrap.dedent(test_body), ".c") - context.Result(ret) - return ret - - -def CheckForCXXLink(context): - test_body = """ - #include - #include - - int main() { - std::cout << "Hello, World" << std::endl; - return EXIT_SUCCESS; - } - """ - context.Message("Checking that the C++ compiler can link a C++ program... ") - ret = context.TryLink(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - -detectSystem = Configure( - detectEnv, - help=False, - custom_tests={ - "CheckForToolchain": CheckForToolchain, - "CheckForProcessor": CheckForProcessor, - "CheckForOS": CheckForOS, - }, -) - -toolchain_search_sequence = ["GCC", "clang"] -if mongo_platform.is_running_os("windows"): - toolchain_search_sequence = ["MSVC", "clang", "GCC"] - -detected_toolchain = None -for candidate_toolchain in toolchain_search_sequence: - if detectSystem.CheckForToolchain(candidate_toolchain, "C++", "CXX", ".cpp"): - detected_toolchain = candidate_toolchain - break - -if not detected_toolchain: - env.ConfError("Couldn't identify the C++ compiler") - -if not detectSystem.CheckForToolchain(detected_toolchain, "C", "CC", ".c"): - env.ConfError("C compiler does not match identified C++ compiler") - - -# Now that we've detected the toolchain, we add methods to the env -# to get the canonical name of the toolchain and to test whether -# scons is using a particular toolchain. -def get_toolchain_name(self): - return detected_toolchain.lower() - - -def is_toolchain(self, *args): - actual_toolchain = self.ToolchainName() - for v in args: - if v.lower() == actual_toolchain: - return True - return False - - -env.AddMethod(get_toolchain_name, "ToolchainName") -env.AddMethod(is_toolchain, "ToolchainIs") - -releaseBuild = get_option("release") == "on" -debugBuild = get_option("dbg") == "on" -debug_symbols = get_option("debug-symbols") != "off" -optBuild = mongo_generators.get_opt_options(env) - -if env.get("ENABLE_BUILD_RETRY"): - if get_option("ninja") != "disabled": - print("ENABLE_BUILD_RETRY not compatible with ninja, disabling ENABLE_BUILD_RETRY.") - else: - env["BUILD_RETRY_ATTEMPTS"] = 10 - env["BUILD_RETRY_MAX_DELAY_SECONDS"] = 120 - env.Tool("build_auto_retry") - -if env.ToolchainIs("clang"): - # LLVM utilizes the stack extensively without optimization enabled, which - # causes the built product to easily blow through our 1M stack size whenever - # either gcov or sanitizers are enabled. Ref: SERVER-65684 - if has_option("gcov") and optBuild not in ("on", "debug"): - env.FatalError( - "Error: A clang --gcov build must have either --opt=debug or --opt=on to " - + "prevent crashes due to excessive stack usage" - ) - - if has_option("sanitize") and get_option("sanitize") != "" and optBuild not in ("on", "debug"): - env.FatalError( - "Error: A clang --sanitize build must have either --opt=debug or --opt=on " - + "to prevent crashes due to excessive stack usage" - ) - -if releaseBuild and (debugBuild or optBuild != "on"): - env.FatalError( - "Error: A --release build may not have debugging, and must have full optimization" - ) - -if env["TARGET_ARCH"]: - if not detectSystem.CheckForProcessor(env["TARGET_ARCH"]): - env.ConfError("Could not detect processor specified in TARGET_ARCH variable") -else: - detected_processor = detectSystem.CheckForProcessor(None) - if not detected_processor: - env.ConfError("Failed to detect a supported target architecture") - env["TARGET_ARCH"] = detected_processor - -if env["TARGET_OS"] not in os_macros: - print("No special config for [{0}] which probably means it won't work".format(env["TARGET_OS"])) -elif not detectSystem.CheckForOS(env["TARGET_OS"]): - env.ConfError("TARGET_OS ({0}) is not supported by compiler", env["TARGET_OS"]) - -detectSystem.Finish() - -if env.TargetOSIs("posix"): - if env.ToolchainIs("gcc", "clang"): - env.Append( - CCFLAGS_WERROR=["-Werror"], - CXXFLAGS_WERROR=["-Werror=unused-result"] if env.ToolchainIs("clang") else [], - LINKFLAGS_WERROR=["-Wl,--fatal-warnings"] if not env.TargetOSIs("darwin") else [], - ) -elif env.TargetOSIs("windows"): - env.Append(CCFLAGS_WERROR=["/WX"]) - -if env.ToolchainIs("clang"): - - def assembler_with_cpp_gen(target, source, env, for_signature): - if source[0].get_suffix() == ".sx": - return "-x assembler-with-cpp" - - env["CLANG_ASSEMBLER_WITH_CPP"] = assembler_with_cpp_gen - env.Append(ASFLAGS=["$CLANG_ASSEMBLER_WITH_CPP"]) - -env["CC_VERSION"] = mongo_toolchain.get_toolchain_ver(env, "CC") -env["CXX_VERSION"] = mongo_toolchain.get_toolchain_ver(env, "CXX") - -if not env["HOST_ARCH"]: - env["HOST_ARCH"] = env["TARGET_ARCH"] - -# In some places we have POSIX vs Windows cpp files, and so there's an additional -# env variable to interpolate their names in child sconscripts - -env["TARGET_OS_FAMILY"] = "posix" if env.TargetOSIs("posix") else env.GetTargetOSName() - -if env.TargetOSIs("linux") or "tcmalloc-google" == get_option("allocator"): - # tcmalloc from google has some requirements on the kernel version for rseq support - # here we check if it should be available - try: - kernel_version = platform.release().split(".") - kernel_major = int(kernel_version[0]) - kernel_minor = int(kernel_version[1]) - except (ValueError, IndexError): - print( - f"Failed to extract kernel major and minor versions, tcmalloc-google will not be available for use: {kernel_version}" - ) - kernel_major = 0 - kernel_minor = 0 - -# Normalize the allocator option and store it in the Environment. It -# would be nicer to use SetOption here, but you can't reset user -# options for some strange reason in SCons. Instead, we store this -# option as a new variable in the environment. -if get_option("allocator") == "auto": - if env.TargetOSIs("linux") and env["TARGET_ARCH"] in ("x86_64", "aarch64"): - env["MONGO_ALLOCATOR"] = "tcmalloc-google" - - # googles tcmalloc uses the membarrier() system call which was added in Linux 4.3, - # so fall back to gperf implementation for older kernels - if kernel_major < 4 or (kernel_major == 4 and kernel_minor < 3): - env["MONGO_ALLOCATOR"] = "tcmalloc-gperf" - - elif env.TargetOSIs("windows") or ( - env.TargetOSIs("linux") and env["TARGET_ARCH"] in ("ppc64le", "s390x") - ): - env["MONGO_ALLOCATOR"] = "tcmalloc-gperf" - else: - env["MONGO_ALLOCATOR"] = "system" -else: - env["MONGO_ALLOCATOR"] = get_option("allocator") - - if env["MONGO_ALLOCATOR"] == "tcmalloc-google": - if kernel_major < 4 or (kernel_major == 4 and kernel_minor < 3): - env.ConfError( - f"tcmalloc-google allocator only supported on linux kernel 4.3 or greater: kenerl verison={platform.release()}" - ) - -if env["MONGO_ALLOCATOR"] == "tcmalloc-google": - env.Append(CPPDEFINES=["ABSL_ALLOCATOR_NOTHROW"]) - -if has_option("cache"): - if has_option("gcov"): - env.FatalError("Mixing --cache and --gcov doesn't work correctly yet. See SERVER-11084") - env.CacheDir(str(env.Dir(cacheDir))) - -# Normalize the link model. If it is auto, then for now both developer and release builds -# use the "static" mode. Someday later, we probably want to make the developer build default -# dynamic. -link_model = get_option("link-model") -if link_model == "auto": - link_model = "static" - -if link_model.startswith("dynamic") and get_option("install-action") == "symlink": - env.FatalError( - f"Options '--link-model={link_model}' not supported with '--install-action={get_option('install-action')}'." - ) - -if ( - link_model == "dynamic" - and env.TargetOSIs("darwin") - and not get_option("force-macos-dynamic-link") -): - macos_version_message = textwrap.dedent("""\ - link-model=dynamic us only supported on macos version 12 or higher. - This is due to a 512 dylib RUNTIME limit on older macos. See this post for - more information: https://developer.apple.com/forums//thread/708366?login=true&page=1#717495022 - Use '--force-macos-dynamic-link' to bypass this check. - """) - - try: - macos_version_major = int(platform.mac_ver()[0].split(".")[0]) - if macos_version_major < 12: - env.FatalError( - textwrap.dedent(f"""\ - Macos version detected: {macos_version_major} - """) - + macos_version_message - ) - except (IndexError, TypeError) as exc: - env.FatalError( - textwrap.dedent(f"""\ - Failed to detect macos version: {exc} - """) - + macos_version_message - ) - -# libunwind configuration. -# In which the following globals are set and normalized to bool: -# - use_libunwind -# - use_system_libunwind -# - use_vendored_libunwind -use_libunwind = get_option("use-libunwind") -use_system_libunwind = use_system_version_of_library("libunwind") - -# Assume system libunwind works if it's installed and selected. -can_use_libunwind = ( - use_system_libunwind - or env.TargetOSIs("linux") - and (env["TARGET_ARCH"] in ("x86_64", "aarch64", "ppc64le", "s390x")) -) - -if use_libunwind == "off": - use_libunwind = False - use_system_libunwind = False -elif use_libunwind == "on": - use_libunwind = True - if not can_use_libunwind: - env.ConfError("libunwind not supported on target platform") - Exit(1) -elif use_libunwind == "auto": - use_libunwind = can_use_libunwind - -use_vendored_libunwind = use_libunwind and not use_system_libunwind -env["USE_VENDORED_LIBUNWIND"] = use_vendored_libunwind -if use_system_libunwind and not use_libunwind: - print("Error: --use-system-libunwind requires --use-libunwind") - Exit(1) - -if get_option("visibility-support") == "auto": - visibility_annotations_enabled = not env.TargetOSIs("windows") and link_model.startswith( - "dynamic" - ) -else: - visibility_annotations_enabled = get_option("visibility-support") == "on" - -# Windows can't currently support anything other than 'object' or 'static', until -# we have annotated functions for export. -if env.TargetOSIs("windows") and not visibility_annotations_enabled: - if link_model not in ["object", "static", "dynamic-sdk"]: - env.FatalError( - "Windows builds must use the 'object', 'dynamic-sdk', or 'static' link models" - ) - -# TODO(SERVER-85904): remove check when object mode & LTO are supported in bazel -if link_model == "object": - env.FatalError( - 'Bazel-enabled builds currently do not support the "object" link model. Reffer to SERVER-85904 for more info.' - ) - -# The 'object' mode for libdeps is enabled by setting _LIBDEPS to $_LIBDEPS_OBJS. The other two -# modes operate in library mode, enabled by setting _LIBDEPS to $_LIBDEPS_LIBS. -env["_LIBDEPS"] = "$_LIBDEPS_OBJS" if link_model == "object" else "$_LIBDEPS_LIBS" - -env["BUILDERS"]["ProgramObject"] = env["BUILDERS"]["StaticObject"] -env["BUILDERS"]["LibraryObject"] = env["BUILDERS"]["StaticObject"] - -env["SHARPREFIX"] = "$LIBPREFIX" -env["SHARSUFFIX"] = "${SHLIBSUFFIX}${LIBSUFFIX}" -env["BUILDERS"]["SharedArchive"] = SCons.Builder.Builder( - action=env["BUILDERS"]["StaticLibrary"].action, - emitter="$SHAREMITTER", - prefix="$SHARPREFIX", - suffix="$SHARSUFFIX", - src_suffix=env["BUILDERS"]["SharedLibrary"].src_suffix, -) - -# Teach object builders how to build underlying generated types -for builder in ["SharedObject", "StaticObject"]: - env["BUILDERS"][builder].add_src_builder("Protoc") - - -# These allow delayed evaluation of the AIB values for the default values of -# the corresponding command line variables -def protoc_var_gen(env, target, source, for_signature): - return env.File("$DESTDIR/$PREFIX_BINDIR/protobuf_compiler$PROGSUFFIX") - - -env["PROTOC_VAR_GEN"] = protoc_var_gen - - -def protoc_grpc_plugin_var_gen(env, target, source, for_signature): - return env.File("$DESTDIR/$PREFIX_BINDIR/grpc_cpp_plugin$PROGSUFFIX") - - -env["PROTOC_GRPC_PLUGIN_GEN"] = protoc_grpc_plugin_var_gen - -if link_model.startswith("dynamic"): - if link_model == "dynamic" and visibility_annotations_enabled: - - def visibility_cppdefines_generator(target, source, env, for_signature): - if "MONGO_API_NAME" not in env: - return None - return "MONGO_API_${MONGO_API_NAME}" - - env["MONGO_VISIBILITY_CPPDEFINES_GENERATOR"] = visibility_cppdefines_generator - - def visibility_shccflags_generator(target, source, env, for_signature): - if env.get("MONGO_API_NAME"): - return "-fvisibility=hidden" - return None - - if not env.TargetOSIs("windows"): - env["MONGO_VISIBILITY_SHCCFLAGS_GENERATOR"] = visibility_shccflags_generator - - env.AppendUnique( - CPPDEFINES=[ - "MONGO_USE_VISIBILITY", - "$MONGO_VISIBILITY_CPPDEFINES_GENERATOR", - ], - SHCCFLAGS=[ - "$MONGO_VISIBILITY_SHCCFLAGS_GENERATOR", - ], - ) - - def library(env, target, source, *args, **kwargs): - sharedLibrary = env.SharedLibrary(target, source, *args, **kwargs) - sharedArchive = env.SharedArchive(target, source=sharedLibrary[0].sources, *args, **kwargs) - sharedLibrary.extend(sharedArchive) - return sharedLibrary - - env["BUILDERS"]["Library"] = library - env["BUILDERS"]["LibraryObject"] = env["BUILDERS"]["SharedObject"] - - # TODO: Ideally, the conditions below should be based on a - # detection of what linker we are using, not the local OS, but I - # doubt very much that we will see the mach-o linker on anything - # other than Darwin, or a BFD/sun-esque linker elsewhere. - - # On Darwin, we need to tell the linker that undefined symbols are - # resolved via dynamic lookup; otherwise we get build failures. On - # other unixes, we need to suppress as-needed behavior so that - # initializers are ensured present, even if there is no visible - # edge to the library in the symbol graph. - # - # NOTE: The darwin linker flag is only needed because the library - # graph is not a DAG. Once the graph is a DAG, we will require all - # edges to be expressed, and we should drop the flag. When that - # happens, we should also add -z,defs flag on ELF platforms to - # ensure that missing symbols due to unnamed dependency edges - # result in link errors. - # - # NOTE: The `illegal_cyclic_or_unresolved_dependencies_allowlisted` - # tag can be applied to a library to indicate that it does not (or - # cannot) completely express all of its required link dependencies. - # This can occur for four reasons: - # - # - No unique provider for the symbol: Some symbols do not have a - # unique dependency that provides a definition, in which case it - # is impossible for the library to express a dependency edge to - # resolve the symbol. - # - # - The library is part of a cycle: If library A depends on B, - # which depends on C, which depends on A, then it is impossible - # to express all three edges in SCons, since otherwise there is - # no way to sequence building the libraries. The cyclic - # libraries actually work at runtime, because some parent object - # links all of them. - # - # - The symbol is provided by an executable into which the library - # will be linked. The mongo::inShutdown symbol is a good - # example. - # - # - The symbol is provided by a third-party library, outside of our - # control. - # - # All of these are defects in the linking model. In an effort to - # eliminate these issues, we have begun tagging those libraries - # that are affected, and requiring that all non-tagged libraries - # correctly express all dependencies. As we repair each defective - # library, we can remove the tag. When all the tags are removed - # the graph will be acyclic. Libraries which are incomplete for the - # final reason, "libraries outside of our control", may remain for - # reasons beyond our control. Such libraries ideally should - # have no dependencies (and thus be leaves in our linking DAG). - # If that condition is met, then the graph will be acyclic. - - if env.TargetOSIs("darwin"): - if link_model.startswith("dynamic"): - print( - "WARNING: Building MongoDB server with dynamic linking " - + "on macOS is not supported. Static linking is recommended." - ) - - if link_model == "dynamic-strict": - # Darwin is strict by default - pass - else: - - def libdeps_tags_expand_incomplete(source, target, env, for_signature): - # On darwin, since it is strict by default, we need to add a flag - # when libraries are tagged incomplete. - if "illegal_cyclic_or_unresolved_dependencies_allowlisted" in target[ - 0 - ].get_env().get("LIBDEPS_TAGS", []): - return ["-Wl,-undefined,dynamic_lookup"] - return [] - - env["LIBDEPS_TAG_EXPANSIONS"].append(libdeps_tags_expand_incomplete) - elif env.TargetOSIs("windows"): - if link_model == "dynamic-strict": - # Windows is strict by default - pass - else: - - def libdeps_tags_expand_incomplete(source, target, env, for_signature): - # On windows, since it is strict by default, we need to add a flag - # when libraries are tagged incomplete. - if "illegal_cyclic_or_unresolved_dependencies_allowlisted" in target[ - 0 - ].get_env().get("LIBDEPS_TAGS", []): - return ["/FORCE:UNRESOLVED"] - return [] - - env["LIBDEPS_TAG_EXPANSIONS"].append(libdeps_tags_expand_incomplete) - else: - env.AppendUnique(LINKFLAGS=["-Wl,--no-as-needed"]) - - # Using zdefs doesn't work at all with the sanitizers - if not has_option("sanitize") and get_option("sanitize") != "": - if link_model == "dynamic-strict": - env.AppendUnique(SHLINKFLAGS=["-Wl,-z,defs"]) - else: - # On BFD/gold linker environments, which are not strict by - # default, we need to add a flag when libraries are not - # tagged incomplete. - def libdeps_tags_expand_incomplete(source, target, env, for_signature): - if "illegal_cyclic_or_unresolved_dependencies_allowlisted" not in target[ - 0 - ].get_env().get("LIBDEPS_TAGS", []): - return ["-Wl,-z,defs"] - return [] - - env["LIBDEPS_TAG_EXPANSIONS"].append(libdeps_tags_expand_incomplete) - -# Enable the fast decider if explicitly requested or if in 'auto' mode -# and not in conflict with other options like the ninja option which -# sets its own decider. -if ( - get_option("ninja") == "disabled" - and get_option("build-fast-and-loose") == "on" - or (get_option("build-fast-and-loose") == "auto" and not has_option("release")) -): - # See http://www.scons.org/wiki/GoFastButton for details - env.Decider("MD5-timestamp") - env.SetOption("max_drift", 1) - -# If the user has requested the git decider, enable it if it is available. We want to do this after -# we set the basic decider above, so that we 'chain' to that one. -if get_option("git-decider") == "on": - git_decider = Tool("git_decider") - if git_decider.exists(env): - git_decider(env) - -# On non-windows platforms, we may need to differentiate between flags being used to target an -# executable (like -fPIE), vs those being used to target a (shared) library (like -fPIC). To do so, -# we inject a new family of SCons variables PROG*FLAGS, by reaching into the various COMs. -if not env.TargetOSIs("windows"): - env["CCCOM"] = env["CCCOM"].replace("$CCFLAGS", "$PROGCCFLAGS") - env["CXXCOM"] = env["CXXCOM"].replace("$CCFLAGS", "$PROGCCFLAGS") - env["PROGCCFLAGS"] = ["$CCFLAGS"] - - env["CCCOM"] = env["CCCOM"].replace("$CFLAGS", "$PROGCFLAGS") - env["PROGCFLAGS"] = ["$CFLAGS"] - - env["CXXCOM"] = env["CXXCOM"].replace("$CXXFLAGS", "$PROGCXXFLAGS") - env["PROGCXXFLAGS"] = ["$CXXFLAGS"] - - env["LINKCOM"] = env["LINKCOM"].replace("$LINKFLAGS", "$PROGLINKFLAGS") - env["PROGLINKFLAGS"] = ["$LINKFLAGS"] - - # ASPPFLAGS is used for assembler commands, this condition below assumes assembler files - # will be only directly assembled in librarys and not programs - if link_model.startswith("dynamic"): - env.Append(ASPPFLAGS=["-fPIC"]) - else: - env.Append(ASPPFLAGS=["-fPIE"]) - -# When it is necessary to supply additional SHLINKFLAGS without modifying the toolset default, -# following appends contents of SHLINKFLAGS_EXTRA variable to the linker command -env.AppendUnique(SHLINKFLAGS=["$SHLINKFLAGS_EXTRA"]) - - -class ForceVerboseConftest: - """ - This class allows for configurable substition calls to enable forcing - the conftest to use verbose logs even when verbose mode is not specified. - """ - - def __init__(self, msg): - self.msg = msg - - def __call__(self, target, source, env, for_signature): - for t in target: - # TODO: SERVER-60915 switch to SCons api conftest check - if "conftest" in str(t): - return None - return self.msg - - -if not env.Verbose(): - # Even though we are not in Verbose mode, conftest logs should - # always be verbose, because they go to a file and not seen - # by the user anyways. - env.Append(CCCOMSTR=ForceVerboseConftest("Compiling $TARGET")) - env.Append(CXXCOMSTR=ForceVerboseConftest(env["CCCOMSTR"])) - env.Append(SHCCCOMSTR=ForceVerboseConftest("Compiling $TARGET")) - env.Append(SHCXXCOMSTR=ForceVerboseConftest(env["SHCCCOMSTR"])) - env.Append(LINKCOMSTR=ForceVerboseConftest("Linking $TARGET")) - env.Append(SHLINKCOMSTR=ForceVerboseConftest(env["LINKCOMSTR"])) - env.Append(ARCOMSTR=ForceVerboseConftest("Generating library $TARGET")) - -# Link tools other than mslink don't setup TEMPFILE in LINKCOM, -# disabling SCons automatically falling back to a temp file when -# running link commands that are over MAXLINELENGTH. With our object -# file linking mode, we frequently hit even the large linux command -# line length, so we want it everywhere. If we aren't using mslink, -# add TEMPFILE in. For verbose builds when using a tempfile, we need -# some trickery so that we print the command we are running, and not -# just the invocation of the compiler being fed the command file. -if "mslink" not in env["TOOLS"]: - if env.Verbose(): - env["LINKCOM"] = "${{TEMPFILE('{0}', '')}}".format(env["LINKCOM"]) - env["SHLINKCOM"] = "${{TEMPFILE('{0}', '')}}".format(env["SHLINKCOM"]) - if "libtool" not in env["TOOLS"]: - env["ARCOM"] = "${{TEMPFILE('{0}', '')}}".format(env["ARCOM"]) - else: - env["LINKCOM"] = "${{TEMPFILE('{0}', 'LINKCOMSTR')}}".format(env["LINKCOM"]) - env["SHLINKCOM"] = "${{TEMPFILE('{0}', 'SHLINKCOMSTR')}}".format(env["SHLINKCOM"]) - if "libtool" not in env["TOOLS"]: - env["ARCOM"] = "${{TEMPFILE('{0}', 'ARCOMSTR')}}".format(env["ARCOM"]) - -if env["_LIBDEPS"] == "$_LIBDEPS_OBJS": - # The libraries we build in LIBDEPS_OBJS mode are just placeholders for tracking dependencies. - # This avoids wasting time and disk IO on them. - def write_uuid_to_file(env, target, source): - with open(env.File(target[0]).abspath, "w") as fake_lib: - fake_lib.write(str(uuid.uuid4())) - fake_lib.write("\n") - - # We originally did this by setting ARCOM to write_uuid_to_file. - # This worked more or less by accident. It works when SCons is - # doing the action execution because when it would subst the - # command line subst would execute the function as part of string - # resolution which would have the side effect of writing the - # file. Since it returned None subst would do some special - # handling to make sure it never made it to the command line. This - # breaks Ninja however because we are taking that return value and - # trying to pass it to the command executor (/bin/sh or - # cmd.exe) and end up with the function name as a command. The - # resulting command looks something like `/bin/sh -c - # 'write_uuid_to_file(env, target, source)`. If we instead - # actually do what we want and that is make the StaticLibrary - # builder's action a FunctionAction the Ninja generator will - # correctly dispatch it and not generate an invalid command - # line. This also has the side benefit of being more clear that - # we're expecting a Python function to execute here instead of - # pretending to be a CommandAction that just happens to not run a - # command but instead runs a function. - env["BUILDERS"]["StaticLibrary"].action = SCons.Action.Action( - write_uuid_to_file, "Generating placeholder library $TARGET" - ) - -import libdeps_tool as libdeps - -libdeps.setup_environment( - env, - emitting_shared=link_model, - debug=get_option("libdeps-debug"), - linting=get_option("libdeps-linting"), -) - -# The abilink/tapilink tools and the thin archive tool must be loaded -# after libdeps, so that the scanners they inject can see the library -# dependencies added by libdeps. Neither abilink nor tapilink can work -# with the current Ninja generation because they rely on adding -# ListActions to builders. -if get_option("ninja") == "disabled" and link_model.startswith("dynamic"): - # Add in the abi linking tool if the user requested and it is - # supported on this platform. - # - # TODO: Can we unify the `abilink` and `tapilink` tools? - if env.get("ABIDW"): - abilink = Tool("abilink") - if abilink.exists(env): - abilink(env) - - if env.get("TAPI"): - # TAPI is less useful when running with Bazel + Remote Execution. Disable since the initial implementation - # of the build system with Bazel will not support it. - # TODO(SERVER-88612): Remove fatal error we decide to implement TAPI support in Bazel - env.FatalError("TAPI is not supported with the hybrid build system.") - - tapilink = Tool("tapilink") - if tapilink.exists(env): - tapilink(env) - -if env["_LIBDEPS"] == "$_LIBDEPS_LIBS": - # The following platforms probably aren't using the binutils - # toolchain, or may be using it for the archiver but not the - # linker, and binutils currently is the only thing that supports - # thin archives. Don't even try on those platforms. - if not env.TargetOSIs("solaris", "darwin", "windows", "openbsd"): - env.Tool("thin_archive") - -if env.TargetOSIs("linux", "freebsd", "openbsd"): - env["LINK_WHOLE_ARCHIVE_LIB_START"] = "-Wl,--whole-archive" - env["LINK_WHOLE_ARCHIVE_LIB_END"] = "-Wl,--no-whole-archive" - env["LINK_AS_NEEDED_LIB_START"] = "-Wl,--as-needed" - env["LINK_AS_NEEDED_LIB_END"] = "-Wl,--no-as-needed" -elif env.TargetOSIs("darwin"): - env["LINK_WHOLE_ARCHIVE_LIB_START"] = "-Wl,-force_load" - env["LINK_WHOLE_ARCHIVE_LIB_END"] = "" - env["LINK_AS_NEEDED_LIB_START"] = "-Wl,-mark_dead_strippable_dylib" - env["LINK_AS_NEEDED_LIB_END"] = "" -elif env.TargetOSIs("solaris"): - env["LINK_WHOLE_ARCHIVE_LIB_START"] = "-Wl,-z,allextract" - env["LINK_WHOLE_ARCHIVE_LIB_END"] = "-Wl,-z,defaultextract" -elif env.TargetOSIs("windows"): - env["LINK_WHOLE_ARCHIVE_LIB_START"] = "/WHOLEARCHIVE" - env["LINK_WHOLE_ARCHIVE_LIB_END"] = "" - env["LIBDEPS_FLAG_SEPARATORS"] = {env["LINK_WHOLE_ARCHIVE_LIB_START"]: {"suffix": ":"}} - -if env.TargetOSIs("darwin") and link_model.startswith("dynamic"): - - def init_no_global_libdeps_tag_expansion(source, target, env, for_signature): - """ - This callable will be expanded by scons and modify the environment by - adjusting the prefix and postfix flags to account for linking options - related to the use of global static initializers for any given libdep. - """ - - if "init-no-global-side-effects" in env.get(libdeps.Constants.LibdepsTags, []): - # macos as-needed flag is used on the library directly when it is built - return env.get("LINK_AS_NEEDED_LIB_START", "") - - env["LIBDEPS_TAG_EXPANSIONS"].append(init_no_global_libdeps_tag_expansion) - - -def init_no_global_add_flags(target, start_flag, end_flag): - """Helper function for init_no_global_libdeps_tag_expand""" - - setattr(target[0].attributes, "libdeps_prefix_flags", [start_flag]) - setattr(target[0].attributes, "libdeps_postfix_flags", [end_flag]) - if env.TargetOSIs("linux", "freebsd", "openbsd"): - setattr( - target[0].attributes, - "libdeps_switch_flags", - [ - { - "on": start_flag, - "off": end_flag, - } - ], - ) - - -def init_no_global_libdeps_tag_emitter(target, source, env): - """ - This emitter will be attached the correct pre and post fix flags to - a given library to cause it to have certain flags before or after on the link - line. - """ - - if link_model == "dynamic": - start_flag = env.get("LINK_AS_NEEDED_LIB_START", "") - end_flag = env.get("LINK_AS_NEEDED_LIB_END", "") - - # In the dynamic case, any library that is known to not have global static - # initializers can supply the flag and be wrapped in --as-needed linking, - # allowing the linker to be smart about linking libraries it may not need. - if "init-no-global-side-effects" in env.get( - libdeps.Constants.LibdepsTags, [] - ) and not env.TargetOSIs("darwin"): - init_no_global_add_flags(target, start_flag, end_flag) - else: - init_no_global_add_flags(target, "", "") - - else: - start_flag = env.get("LINK_WHOLE_ARCHIVE_LIB_START", "") - end_flag = env.get("LINK_WHOLE_ARCHIVE_LIB_END", "") - - # In the static case, any library that is unknown to have global static - # initializers should supply the flag and be wrapped in --whole-archive linking, - # allowing the linker to bring in all those symbols which may not be directly needed - # at link time. - if "init-no-global-side-effects" not in env.get(libdeps.Constants.LibdepsTags, []): - init_no_global_add_flags(target, start_flag, end_flag) - else: - init_no_global_add_flags(target, "", "") - return target, source - - -for target_builder in ["SharedLibrary", "SharedArchive", "StaticLibrary"]: - builder = env["BUILDERS"][target_builder] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter([base_emitter, init_no_global_libdeps_tag_emitter]) - builder.emitter = new_emitter - -link_guard_rules = { - "test": [ - "dist", - ], -} - - -class LibdepsLinkGuard(SCons.Errors.UserError): - pass - - -def checkComponentType(target_comps, comp, target, lib): - """ - For a libdep and each AIB_COMPONENT its labeled as, check if its violates - any of the link gaurd rules. - """ - for target_comp in target_comps: - for link_guard_rule in link_guard_rules: - if target_comp in link_guard_rules[link_guard_rule] and link_guard_rule in comp: - raise LibdepsLinkGuard( - textwrap.dedent(f"""\n - LibdepsLinkGuard: - \tTarget '{target[0]}' links LIBDEP '{lib}' - \tbut is listed as AIB_COMPONENT '{target_comp}' which is not allowed link libraries - \twith AIB_COMPONENTS that include the word '{link_guard_rule}'\n""") - ) - - -def get_comps(env): - """util function for extracting all AIB_COMPONENTS as a list""" - comps = env.get("AIB_COMPONENTS_EXTRA", []) - comp = env.get("AIB_COMPONENT", None) - if comp: - comps += [comp] - return comps - - -def link_guard_libdeps_tag_expand(source, target, env, for_signature): - """ - Callback function called on all binaries to check if a certain binary - from a given component is linked to another binary of a given component, - the goal being to define rules that prevents test components from being - linked into production or releaseable components. - """ - for lib in libdeps.get_libdeps(source, target, env, for_signature): - if not lib.env: - continue - - for comp in get_comps(lib.env): - checkComponentType(get_comps(env), comp, target, lib) - - return [] - - -env["LIBDEPS_TAG_EXPANSIONS"].append(link_guard_libdeps_tag_expand) - -env.Tool("forceincludes") - -# ---- other build setup ----- -if not debugBuild: - env.AppendUnique(CPPDEFINES=["NDEBUG"]) - -# Normalize our experimental optimiation and hardening flags -selected_experimental_optimizations = set() -for suboption in get_option("experimental-optimization"): - if suboption == "*": - selected_experimental_optimizations.update(experimental_optimizations) - elif suboption.startswith("-"): - selected_experimental_optimizations.discard(suboption[1:]) - elif suboption.startswith("+"): - selected_experimental_optimizations.add(suboption[1:]) - -selected_experimental_runtime_hardenings = set() -for suboption in get_option("experimental-runtime-hardening"): - if suboption == "*": - selected_experimental_runtime_hardenings.update(experimental_runtime_hardenings) - elif suboption.startswith("-"): - selected_experimental_runtime_hardenings.discard(suboption[1:]) - elif suboption.startswith("+"): - selected_experimental_runtime_hardenings.add(suboption[1:]) - -# Disable floating-point contractions such as forming of fused multiply-add operations. -if env.ToolchainIs("clang", "gcc"): - env.Append(CCFLAGS=["-ffp-contract=off"]) -else: - # msvc defaults to /fp:precise. Visual Studio 2022 does not emit floating-point contractions - # with /fp:precise, but previous versions can. Disable contractions altogether by using - # /fp:strict. - env.Append(CCFLAGS=["/fp:strict"]) - -if env.TargetOSIs("linux"): - env.Append(LIBS=["m"]) - if not env.TargetOSIs("android"): - env.Append(LIBS=["resolv"]) - -elif env.TargetOSIs("solaris"): - env.Append(LIBS=["socket", "resolv", "lgrp"]) - -elif env.TargetOSIs("freebsd"): - env.Append(LIBS=["kvm"]) - env.Append(CCFLAGS=["-fno-omit-frame-pointer"]) - -elif env.TargetOSIs("darwin"): - env.Append(LIBS=["resolv"]) - -elif env.TargetOSIs("openbsd"): - env.Append(LIBS=["kvm"]) - -elif env.TargetOSIs("windows"): - env["DIST_ARCHIVE_SUFFIX"] = ".zip" - - # If tools configuration fails to set up 'cl' in the path, fall back to importing the whole - # shell environment and hope for the best. This will work, for instance, if you have loaded - # an SDK shell. - for pathdir in env["ENV"]["PATH"].split(os.pathsep): - if os.path.exists(os.path.join(pathdir, "cl.exe")): - break - else: - print("NOTE: Tool configuration did not find 'cl' compiler, falling back to os environment") - env["ENV"] = dict(os.environ) - - env.Append( - CPPDEFINES=[ - # This tells the Windows compiler not to link against the .lib files - # and to use boost as a bunch of header-only libraries - "BOOST_ALL_NO_LIB", - ] - ) - - env.Append(CPPDEFINES=["_UNICODE"]) - env.Append(CPPDEFINES=["UNICODE"]) - - # Temporary fixes to allow compilation with VS2017 - env.Append( - CPPDEFINES=[ - "_SILENCE_CXX17_ALLOCATOR_VOID_DEPRECATION_WARNING", - "_SILENCE_CXX17_OLD_ALLOCATOR_MEMBERS_DEPRECATION_WARNING", - "_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING", - # TODO(SERVER-60151): Until we are fully in C++20 mode, it is - # easier to simply suppress C++20 deprecations. After we have - # switched over we should address any actual deprecated usages - # and then remove this flag. - "_SILENCE_ALL_CXX20_DEPRECATION_WARNINGS", - ] - ) - - # /EHsc exception handling style for visual studio - # /W3 warning level - env.Append(CCFLAGS=["/EHsc", "/W3"]) - - # Suppress some warnings we don't like, or find necessary to - # suppress. Please keep this list alphabetized and commented. - env.Append( - CCFLAGS=[ - # C4068: unknown pragma. added so that we can specify unknown - # pragmas for other compilers. - "/wd4068", - # C4244: 'conversion' conversion from 'type1' to 'type2', - # possible loss of data. An integer type is converted to a - # smaller integer type. - "/wd4244", - # C4267: 'var' : conversion from 'size_t' to 'type', possible - # loss of data. When compiling with /Wp64, or when compiling - # on a 64-bit operating system, type is 32 bits but size_t is - # 64 bits when compiling for 64-bit targets. To fix this - # warning, use size_t instead of a type. - "/wd4267", - # C4290: C++ exception specification ignored except to - # indicate a function is not __declspec(nothrow). A function - # is declared using exception specification, which Visual C++ - # accepts but does not implement. - "/wd4290", - # C4351: On extremely old versions of MSVC (pre 2k5), default - # constructing an array member in a constructor's - # initialization list would not zero the array members "in - # some cases". Since we don't target MSVC versions that old, - # this warning is safe to ignore. - "/wd4351", - # C4355: 'this' : used in base member initializer list. The - # this pointer is valid only within nonstatic member - # functions. It cannot be used in the initializer list for a - # base class. - "/wd4355", - # C4373: Older versions of MSVC would fail to make a function - # in a derived class override a virtual function in the - # parent, when defined inline and at least one of the - # parameters is made const. The behavior is incorrect under - # the standard. MSVC is fixed now, and the warning exists - # merely to alert users who may have relied upon the older, - # non-compliant behavior. Our code should not have any - # problems with the older behavior, so we can just disable - # this warning. - "/wd4373", - # C4800: 'type' : forcing value to bool 'true' or 'false' - # (performance warning). This warning is generated when a - # value that is not bool is assigned or coerced into type - # bool. - "/wd4800", - # C4251: This warning attempts to prevent usage of CRT (C++ - # standard library) types in DLL interfaces. That is a good - # idea for DLLs you ship to others, but in our case, we know - # that all DLLs are built consistently. Suppress the warning. - "/wd4251", - ] - ) - - # mozjs requires the following - # 'declaration' : no matching operator delete found; memory will not be freed if - # initialization throws an exception - env.Append(CCFLAGS=["/wd4291"]) - - # some warnings we should treat as errors: - # c4013 - # 'function' undefined; assuming extern returning int - # This warning occurs when files compiled for the C language use functions not defined - # in a header file. - # c4099 - # 'identifier' : type name first seen using 'objecttype1' now seen using 'objecttype2' - # This warning occurs when classes and structs are declared with a mix of struct and class - # which can cause linker failures - # c4930 - # 'identifier': prototyped function not called (was a variable definition intended?) - # This warning indicates a most-vexing parse error, where a user declared a function that - # was probably intended as a variable definition. A common example is accidentally - # declaring a function called lock that takes a mutex when one meant to create a guard - # object called lock on the stack. - env.Append(CCFLAGS=["/we4013", "/we4099", "/we4930"]) - - env.Append( - CPPDEFINES=[ - "_CONSOLE", - "_CRT_SECURE_NO_WARNINGS", - "_ENABLE_EXTENDED_ALIGNED_STORAGE", - "_SCL_SECURE_NO_WARNINGS", - ] - ) - - # this would be for pre-compiled headers, could play with it later - # env.Append( CCFLAGS=['/Yu"pch.h"'] ) - - # Don't send error reports in case of internal compiler error - env.Append(CCFLAGS=["/errorReport:none"]) - - # Select debugging format. /Zi gives faster links but seems to use more memory. - if get_option("msvc-debugging-format") == "codeview" and debug_symbols: - env["CCPDBFLAGS"] = "/Z7" - elif get_option("msvc-debugging-format") == "pdb" and debug_symbols: - env["CCPDBFLAGS"] = "/Zi /Fd${TARGET}.pdb" - - # The SCons built-in pdbGenerator always adds /DEBUG, but we would like - # control over that flag so that users can override with /DEBUG:fastlink - # for better local builds. So we overwrite the builtin. - def pdbGenerator(env, target, source, for_signature): - try: - return ["/PDB:%s" % target[0].attributes.pdb] - except (AttributeError, IndexError): - return None - - env["_PDB"] = pdbGenerator - - # SCons by default adds debug flags /Z7, /Zi and /Debug - # we want to remove these if debug_symbols are off - if not debug_symbols: - del env["CCPDBFLAGS"] - - # /DEBUG will tell the linker to create a .pdb file - # which WinDbg and Visual Studio will use to resolve - # symbols if you want to debug a release-mode image. - # Note that this means we can't do parallel links in the build. - # - # Please also note that this has nothing to do with _DEBUG or optimization. - - # If the user set a /DEBUG flag explicitly, don't add - # another. Otherwise use the standard /DEBUG flag, since we always - # want PDBs. - if not any(flag.startswith("/DEBUG") for flag in env["LINKFLAGS"]) and debug_symbols: - env.Append(LINKFLAGS=["/DEBUG"]) - - # /MD: use the multithreaded, DLL version of the run-time library (MSVCRT.lib/MSVCR###.DLL) - # /MDd: Defines _DEBUG, _MT, _DLL, and uses MSVCRTD.lib/MSVCRD###.DLL - env.Append(CCFLAGS=["/MDd" if debugBuild else "/MD"]) - - if optBuild == "off": - env.Append( - CCFLAGS=["/Od"], - # windows non optimized builds will cause the PDB to blow up in size, - # this allows a larger PDB. The flag is undocumented at the time of writing - # but the microsoft thread which brought about its creation can be found here: - # https://developercommunity.visualstudio.com/t/pdb-limit-of-4-gib-is-likely-to-be-a-problem-in-a/904784 - # - # Without this flag MSVC will report a red herring error message, about disk space or invalid path. - LINKFLAGS=["/pdbpagesize:16384"], - ) - - if debugBuild: - # /RTC1: - Enable Stack Frame Run-Time Error Checking; Reports when a variable is used - # without having been initialized (implies /Od: no optimizations) - env.Append(CCFLAGS=["/RTC1"]) - else: - # /O1: optimize for size - # /O2: optimize for speed (as opposed to size) - # /Oy-: disable frame pointer optimization (overrides /O2, only affects 32-bit) - # /INCREMENTAL: NO - disable incremental link - avoid the level of indirection for function - # calls - - optFlags = [] - if optBuild == "size": - optFlags += ["/Os"] - elif optBuild == "debug": - optFlags += ["/Ox", "/Zo"] - else: - optFlags += ["/O2"] - optFlags += ["/Oy-"] - - env.Append(CCFLAGS=optFlags) - env.Append(LINKFLAGS=["/INCREMENTAL:NO"]) - - # Support large object files since some unit-test sources contain a lot of code - env.Append(CCFLAGS=["/bigobj"]) - - # Set Source and Executable character sets to UTF-8, this will produce a warning C4828 if the - # file contains invalid UTF-8. - env.Append(CCFLAGS=["/utf-8"]) - - # Specify standards conformance mode to the compiler. - env.Append(CCFLAGS=["/permissive-"]) - - # Enables the __cplusplus preprocessor macro to report an updated value for recent C++ language - # standards support. - env.Append(CCFLAGS=["/Zc:__cplusplus"]) - - # Tells the compiler to preferentially call global operator delete or operator delete[] - # functions that have a second parameter of type size_t when the size of the object is available. - env.Append(CCFLAGS=["/Zc:sizedDealloc"]) - - # Treat volatile according to the ISO standard and do not guarantee acquire/release semantics. - env.Append(CCFLAGS=["/volatile:iso"]) - - # Tell CL to produce more useful error messages. - env.Append(CCFLAGS=["/diagnostics:caret"]) - - # This gives 32-bit programs 4 GB of user address space in WOW64, ignored in 64-bit builds. - env.Append(LINKFLAGS=["/LARGEADDRESSAWARE"]) - - env.Append( - LIBS=[ - "DbgHelp", - "Iphlpapi", - "Psapi", - "advapi32", - "bcrypt", - "crypt32", - "dnsapi", - "kernel32", - "shell32", - "pdh", - "version", - "winmm", - "ws2_32", - "secur32", - ], - ) - -# When building on visual studio, this sets the name of the debug symbols file -if env.ToolchainIs("msvc") and debug_symbols: - env["PDB"] = "${TARGET.base}.pdb" - -# Python uses APPDATA to determine the location of user installed -# site-packages. If we do not pass this variable down to Python -# subprocesses then anything installed with `pip install --user` -# will be inaccessible leading to import errors. -# -# Use env['PLATFORM'] instead of TargetOSIs since we always want this -# to run on Windows hosts but not always for Windows targets. -if env["PLATFORM"] == "win32": - appdata = os.getenv("APPDATA", None) - if appdata is not None: - env["ENV"]["APPDATA"] = appdata - -if env.TargetOSIs("posix"): - # On linux, C code compiled with gcc/clang -std=c11 causes - # __STRICT_ANSI__ to be set, and that drops out all of the feature - # test definitions, resulting in confusing errors when we run C - # language configure checks and expect to be able to find newer - # POSIX things. Explicitly enabling _XOPEN_SOURCE fixes that, and - # should be mostly harmless as on Linux, these macros are - # cumulative. The C++ compiler already sets _XOPEN_SOURCE, and, - # notably, setting it again does not disable any other feature - # test macros, so this is safe to do. Other platforms like macOS - # and BSD have crazy rules, so don't try this there. - # - # Furthermore, as both C++ compilers appear to define _GNU_SOURCE - # unconditionally (because libstdc++ requires it), it seems - # prudent to explicitly add that too, so that C language checks - # see a consistent set of definitions. - if env.TargetOSIs("linux"): - env.AppendUnique( - CPPDEFINES=[ - ("_XOPEN_SOURCE", 700), - "_GNU_SOURCE", - ], - ) - - # If shared and static object files stripped of their rightmost - # dot-delimited suffix would collide, modify the shared library - # ones so that they won't. We do this because if split dwarf is in - # play, static and dynamic builds would otherwise overwrite each - # other's .dwo files, because GCC strips the last suffix and adds - # .dwo, rather than simply appending .dwo to the full filename. - objsuffelts = env.subst("$OBJSUFFIX").split(".") - shobjsuffelts = env.subst("$SHOBJSUFFIX").split(".") - if objsuffelts[0:-1] == shobjsuffelts[0:-1]: - env["SHOBJSUFFIX"] = ".dyn${OBJSUFFIX}" - - # Everything on OS X is position independent by default. - if not env.TargetOSIs("darwin"): - if get_option("runtime-hardening") == "on": - # If runtime hardening is requested, then build anything - # destined for an executable with the necessary flags for PIE. - env.AppendUnique( - PROGCFLAGS=["-fPIE"], - PROGCCFLAGS=["-fPIE"], - PROGCXXFLAGS=["-fPIE"], - PROGLINKFLAGS=["-pie"], - ) - - # -Winvalid-pch Warn if a precompiled header (see Precompiled Headers) is found in the search path but can't be used. - env.Append( - CCFLAGS=[ - "-fasynchronous-unwind-tables", - "-g2" if not env.TargetOSIs("emscripten") else "-g", - "-Wall", - "-Wsign-compare", - "-Wno-unknown-pragmas", - "-Winvalid-pch", - ], - ) - - if env.get("DWARF_VERSION"): - if env.TargetOSIs("darwin"): - env.FatalError("Setting DWARF_VERSION on darwin is not supported.") - env.AppendUnique( - CCFLAGS=["-gdwarf-$DWARF_VERSION"], - LINKFLAGS=["-gdwarf-$DWARF_VERSION"], - ) - - # TODO: At least on x86, glibc as of 2.3.4 will consult the - # .eh_frame info via _Unwind_Backtrace to do backtracing without - # needing the frame pointer, despite what the backtrace man page - # actually says. We should see if we can drop the requirement that - # we use libunwind here. - can_nofp = env.TargetOSIs("darwin") or use_libunwind - - # For debug builds with tcmalloc, we need the frame pointer so it can - # record the stack of allocations. - can_nofp &= not ( - debugBuild and (env["MONGO_ALLOCATOR"] in ["tcmalloc-google", "tcmalloc-gperf"]) - ) - - # Only disable frame pointers if requested - can_nofp &= "nofp" in selected_experimental_optimizations - - if not can_nofp: - env.Append(CCFLAGS=["-fno-omit-frame-pointer"]) - - if "tbaa" not in selected_experimental_optimizations: - env.Append(CCFLAGS=["-fno-strict-aliasing"]) - - # Enabling hidden visibility on non-darwin requires that we have - # libunwind in play, since glibc backtrace will not work - # correctly. - if "vishidden" in selected_experimental_optimizations and ( - env.TargetOSIs("darwin") or use_libunwind - ): - if link_model.startswith("dynamic"): - # In dynamic mode, we can't make the default visibility - # hidden because not all libraries have export tags. But - # we can at least make inlines hidden. - # - # TODO: Except on macOS, where we observe lots of crashes - # when we enable this. We should investigate further but - # it isn't relevant for the purpose of exploring these - # flags on linux, where they seem to work fine. - if not env.TargetOSIs("darwin"): - env.Append(CXXFLAGS=["-fvisibility-inlines-hidden"]) - else: - # In static mode, we need an escape hatch for a few - # libraries that don't work correctly when built with - # hidden visiblity. - def conditional_visibility_generator(target, source, env, for_signature): - if "DISALLOW_VISHIDDEN" in env: - return - return "-fvisibility=hidden" - - env.Append( - CCFLAGS_VISIBILITY_HIDDEN_GENERATOR=conditional_visibility_generator, - CCFLAGS="$CCFLAGS_VISIBILITY_HIDDEN_GENERATOR", - ) - - # env.Append( " -Wconversion" ) TODO: this doesn't really work yet - env.Append(CXXFLAGS=["-Woverloaded-virtual"]) - - # On OS X, clang doesn't want the pthread flag at link time, or it - # issues warnings which make it impossible for us to declare link - # warnings as errors. See http://stackoverflow.com/a/19382663. - if not (env.TargetOSIs("darwin") and env.ToolchainIs("clang")): - env.Append(LINKFLAGS=["-pthread"]) - - # SERVER-9761: Ensure early detection of missing symbols in dependent - # libraries at program startup. For non-release dynamic builds we disable - # this behavior in the interest of improved mongod startup times. - - # Xcode15 removed bind_at_load functionality so we cannot have a selection for macosx here - # ld: warning: -bind_at_load is deprecated on macOS - if has_option("release") or get_option("link-model") != "dynamic": - if not env.TargetOSIs("macOS"): - env.Append(LINKFLAGS=["-Wl,-z,now"]) - - # We need to use rdynamic for backtraces with glibc unless we have libunwind. - nordyn = env.TargetOSIs("darwin") or use_libunwind - - # And of course only do rdyn if the experimenter asked for it. - nordyn &= "nordyn" in selected_experimental_optimizations - - if nordyn: - - def export_symbol_generator(source, target, env, for_signature): - symbols = copy.copy(env.get("EXPORT_SYMBOLS", [])) - for lib in libdeps.get_libdeps(source, target, env, for_signature): - if lib.env: - symbols.extend(lib.env.get("EXPORT_SYMBOLS", [])) - export_expansion = "${EXPORT_SYMBOL_FLAG}" - return [f"-Wl,{export_expansion}{symbol}" for symbol in symbols] - - env["EXPORT_SYMBOL_GEN"] = export_symbol_generator - - # For darwin, we need the leading underscore but for others we - # don't. Hacky but it works to jam that distinction into the - # flag itself, since it already differs on darwin. - if env.TargetOSIs("darwin"): - env["EXPORT_SYMBOL_FLAG"] = "-exported_symbol,_" - else: - env["EXPORT_SYMBOL_FLAG"] = "--export-dynamic-symbol," - - env.Append( - PROGLINKFLAGS=[ - "$EXPORT_SYMBOL_GEN", - ], - ) - elif not env.TargetOSIs("darwin"): - env.Append( - PROGLINKFLAGS=[ - "-rdynamic", - ], - ) - - # make scons colorgcc friendly - for key in ("HOME", "TERM"): - try: - env["ENV"][key] = os.environ[key] - except KeyError: - pass - - if has_option("gcov"): - if not (env.TargetOSIs("linux") and (env.ToolchainIs("gcc", "clang"))): - # TODO: This should become supported under: https://jira.mongodb.org/browse/SERVER-49877 - env.FatalError( - "Coverage option 'gcov' is currently only supported on linux with gcc and clang. See SERVER-49877." - ) - - env.AppendUnique( - CCFLAGS=["--coverage"], - LINKFLAGS=["--coverage"], - ) - - if optBuild == "off": - env.Append(CCFLAGS=["-O0"]) - else: - if optBuild == "size": - env.Append(CCFLAGS=["-Os"]) - elif optBuild == "debug": - env.Append(CCFLAGS=["-Og"]) - else: - if "O3" in selected_experimental_optimizations: - env.Append(CCFLAGS=["-O3"]) - else: - env.Append(CCFLAGS=["-O2"]) - - if "treevec" in selected_experimental_optimizations: - env.Append(CCFLAGS=["-ftree-vectorize"]) - -wiredtiger = False -if get_option("wiredtiger") == "on": - # Wiredtiger only supports 64-bit architecture, and will fail to compile on 32-bit - # so disable WiredTiger automatically on 32-bit since wiredtiger is on by default - if env["TARGET_ARCH"] == "i386": - env.FatalError( - "WiredTiger is not supported on 32-bit platforms\n" - "Re-run scons with --wiredtiger=off to build on 32-bit platforms" - ) - else: - wiredtiger = True - -if not env.TargetOSIs("windows", "macOS") and (env.ToolchainIs("GCC", "clang")): - # By default, apply our current microarchitecture minima. If the - # user has customized a flag of the same name in any of CCFLAGS, - # CFLAGS, or CXXFLAGS, we disable applying our default to - # CCFLAGS. We are assuming the user knows what they are doing, - # e.g. we don't try to be smart and notice that they applied it to - # CXXFLAGS and therefore still apply it to CFLAGS since they - # didn't customize that. Basically, don't send those flags in - # unless you a) mean it, and b) know what you are doing, and c) - # cover all your bases by either setting it via CCFLAGS, or - # setting it for both C and C++ by setting both of CFLAGS and - # CXXFLAGS. - - default_targeting_flags_for_architecture = { - "aarch64": {"-march=": "armv8.2-a", "-mtune=": "generic"}, - "i386": {"-march=": "nocona", "-mtune=": "generic"}, - "ppc64le": {"-mcpu=": "power8", "-mtune=": "power8", "-mcmodel=": "medium"}, - "s390x": {"-march=": "z196", "-mtune=": "zEC12"}, - } - - # If we are enabling vectorization in sandybridge mode, we'd - # rather not hit the 256 wide vector instructions because the - # heavy versions can cause clock speed reductions. - if "sandybridge" in selected_experimental_optimizations: - default_targeting_flags_for_architecture["x86_64"] = { - "-march=": "sandybridge", - "-mtune=": "generic", - "-mprefer-vector-width=": "128", - } - - default_targeting_flags = default_targeting_flags_for_architecture.get(env["TARGET_ARCH"]) - if default_targeting_flags: - search_variables = ["CCFLAGS", "CFLAGS", "CXXFLAGS"] - for targeting_flag, targeting_flag_value in default_targeting_flags.items(): - if not any( - flag_value.startswith(targeting_flag) - for search_variable in search_variables - for flag_value in env[search_variable] - ): - env.Append(CCFLAGS=[f"{targeting_flag}{targeting_flag_value}"]) - -# discover modules, and load the (python) module for each module's build.py -mongo_modules = moduleconfig.discover_modules("src/mongo/db/modules", get_option("modules")) - -has_ninja_module = False -for module in mongo_modules: - if hasattr(module, "NinjaFile"): - has_ninja_module = True - break - -if get_option("ninja") != "disabled" and has_ninja_module: - env.FatalError( - textwrap.dedent("""\ - ERROR: Ninja tool option '--ninja' should not be used with the ninja module. - Using both options simultaneously may clobber build.ninja files. - Remove the ninja module directory or use '--modules= ' to select no modules. - If using enterprise module, explicitly set '--modules=' to exclude the ninja module.""") - ) - -if has_ninja_module: - print( - "WARNING: You are attempting to use the unsupported/legacy ninja module, instead of the integrated ninja generator. You are strongly encouraged to remove the ninja module from your module list and invoke scons with --ninja generate-ninja" - ) - -# --- check system --- -ssl_provider = None -http_client = get_option("enable-http-client") - - -def isSanitizerEnabled(self, sanitizerName): - if "SANITIZERS_ENABLED" not in self: - return False - if sanitizerName == "fuzzer": - return "fuzzer-no-link" in self["SANITIZERS_ENABLED"] - return sanitizerName in self["SANITIZERS_ENABLED"] - - -env.AddMethod(isSanitizerEnabled, "IsSanitizerEnabled") - - -def doConfigure(myenv): - global wiredtiger - global ssl_provider - global http_client - - # Check that the compilers work. - # - # TODO: Currently, we have some flags already injected. Eventually, this should test the - # bare compilers, and we should re-check at the very end that TryCompile and TryLink still - # work with the flags we have selected. - if myenv.ToolchainIs("msvc"): - compiler_minimum_string = "Microsoft Visual Studio 2022 17.0" - compiler_test_body = textwrap.dedent( - """ - #if !defined(_MSC_VER) - #error - #endif - - #if _MSC_VER < 1930 - #error %s or newer is required to build MongoDB - #endif - - int main(int argc, char* argv[]) { - return 0; - } - """ - % compiler_minimum_string - ) - elif myenv.ToolchainIs("gcc"): - compiler_minimum_string = "GCC 11.3" - compiler_test_body = textwrap.dedent( - """ - #if !defined(__GNUC__) || defined(__clang__) - #error - #endif - - #if (__GNUC__ < 11) || (__GNUC__ == 11 && __GNUC_MINOR__ < 3) - #error %s or newer is required to build MongoDB - #endif - - int main(int argc, char* argv[]) { - return 0; - } - """ - % compiler_minimum_string - ) - elif env.ToolchainIs("clang"): - compiler_minimum_string = "clang 12.0 (or Apple XCode 13.0)" - compiler_test_body = textwrap.dedent( - """ - #if !defined(__clang__) - #error - #endif - - #if defined(__apple_build_version__) - #if __apple_build_version__ < 13000029 - #error %s or newer is required to build MongoDB - #endif - #elif (__clang_major__ < 12) || (__clang_major__ == 12 && __clang_minor__ < 0) - #error %s or newer is required to build MongoDB - #endif - - int main(int argc, char* argv[]) { - return 0; - } - """ - % (compiler_minimum_string, compiler_minimum_string) - ) - else: - myenv.ConfError("Error: can't check compiler minimum; don't know this compiler...") - - def CheckForMinimumCompiler(context, language): - extension_for = { - "C": ".c", - "C++": ".cpp", - } - context.Message( - "Checking if %s compiler is %s or newer..." % (language, compiler_minimum_string) - ) - result = context.TryCompile(compiler_test_body, extension_for[language]) - context.Result(result) - return result - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckForMinimumCompiler": CheckForMinimumCompiler, - }, - ) - - c_compiler_validated = conf.CheckForMinimumCompiler("C") - cxx_compiler_validated = conf.CheckForMinimumCompiler("C++") - - suppress_invalid = has_option("disable-minimum-compiler-version-enforcement") - if releaseBuild and suppress_invalid: - env.FatalError("--disable-minimum-compiler-version-enforcement is forbidden with --release") - - if not (c_compiler_validated and cxx_compiler_validated): - if not suppress_invalid: - env.ConfError("ERROR: Refusing to build with compiler that does not meet requirements") - print("WARNING: Ignoring failed compiler version check per explicit user request.") - print("WARNING: The build may fail, binaries may crash, or may run but corrupt data...") - - # Figure out what our minimum windows version is. If the user has specified, then use - # that. - if env.TargetOSIs("windows"): - if has_option("win-version-min"): - win_version_min = get_option("win-version-min") - else: - # If no minimum version has been specified, use our default. - win_version_min = "win10" - - env["WIN_VERSION_MIN"] = win_version_min - win_version_min = win_version_min_choices[win_version_min] - env.Append(CPPDEFINES=[("_WIN32_WINNT", "0x" + win_version_min[0])]) - env.Append(CPPDEFINES=[("BOOST_USE_WINAPI_VERSION", "0x" + win_version_min[0])]) - env.Append(CPPDEFINES=[("NTDDI_VERSION", "0x" + win_version_min[0] + win_version_min[1])]) - - conf.Finish() - - # We require macOS 10.14 or newer - if env.TargetOSIs("darwin"): - # TODO: Better error messages, mention the various -mX-version-min-flags in the error, and - # single source of truth for versions, plumbed through #ifdef ladder and error messages. - def CheckDarwinMinima(context): - test_body = """ - #include - #include - #include - - #if TARGET_OS_OSX && (__MAC_OS_X_VERSION_MIN_REQUIRED < __MAC_10_14) - #error 1 - #endif - """ - - context.Message( - "Checking for sufficient {0} target version minimum... ".format( - context.env["TARGET_OS"] - ) - ) - ret = context.TryCompile(textwrap.dedent(test_body), ".c") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckDarwinMinima": CheckDarwinMinima, - }, - ) - - if not conf.CheckDarwinMinima(): - conf.env.ConfError("Required target minimum of macOS 10.14 not found") - - conf.Finish() - - def CheckFlag(env, flag, tool, extension, link, **mutation): - def CheckFlagTest(context, tool, extension, flag): - if link: - if tool == "C": - test_body = """ - #include - #include - int main() { - printf("Hello, World!"); - return EXIT_SUCCESS; - }""" - elif tool == "C++": - test_body = """ - #include - #include - int main() { - std::cout << "Hello, World!" << std::endl; - return EXIT_SUCCESS; - }""" - context.Message("Checking if linker supports %s... " % (flag)) - ret = context.TryLink(textwrap.dedent(test_body), extension) - else: - test_body = "" - context.Message("Checking if %s compiler supports %s... " % (tool, flag)) - ret = context.TryCompile(textwrap.dedent(test_body), extension) - context.Result(ret) - return ret - - if env.ToolchainIs("msvc"): - env.FatalError("AddFlagIfSupported is not currently supported with MSVC") - - test_mutation = mutation - if env.ToolchainIs("gcc"): - test_mutation = copy.deepcopy(mutation) - # GCC helpfully doesn't issue a diagnostic on unknown flags of the form -Wno-xxx - # unless other diagnostics are triggered. That makes it tough to check for support - # for -Wno-xxx. To work around, if we see that we are testing for a flag of the - # form -Wno-xxx (but not -Wno-error=xxx), we also add -Wxxx to the flags. GCC does - # warn on unknown -Wxxx style flags, so this lets us probe for availablity of - # -Wno-xxx. - for kw in list(test_mutation.keys()): - test_flags = test_mutation[kw] - for test_flag in test_flags: - if test_flag.startswith("-Wno-") and not test_flag.startswith("-Wno-error="): - test_flags.append(re.sub("^-Wno-", "-W", test_flag)) - - # If the user has selected ``configure` in - # `disable-warnings-as-errors`, the usual mechanisms that - # would inject Werror or similar are disabled for - # conftests. But AddFlagIfSupported requires that those flags - # be used. Disable the generators so we have explicit control. - cloned = env.Clone( - CCFLAGS_GENERATE_WERROR=[], - CXXFLAGS_GENERATE_WERROR=[], - LINKFLAGS_GENERATE_WERROR=[], - ) - - cloned.Append(**test_mutation) - - # Add these *after* the test mutation, so that the mutation - # can't step on the warnings-as-errors state. - cloned.Append( - CCFLAGS=["$CCFLAGS_WERROR"], - CXXFLAGS=["$CXXFLAGS_WERROR"], - LINKFLAGS=["$LINKFLAGS_WERROR"], - ) - - conf = Configure( - cloned, - help=False, - custom_tests={ - "CheckFlag": lambda ctx: CheckFlagTest(ctx, tool, extension, flag), - }, - ) - available = conf.CheckFlag() - conf.Finish() - return available - - def AddFlagIfSupported(env, flag, tool, extension, link, **mutation): - available = CheckFlag(env, flag, tool, extension, link, **mutation) - - if available: - env.Append(**mutation) - return available - - conf_check_vars = { - "CFLAGS": {"tool": "C", "extension": ".c", "link": False}, - "CCFLAGS": {"tool": "C", "extension": ".c", "link": False}, - "CXXFLAGS": {"tool": "C++", "extension": ".cpp", "link": False}, - "LINKFLAGS": {"tool": "C", "extension": ".c", "link": True}, - "SHLINKFLAGS": {"tool": "C", "extension": ".c", "link": True}, - } - - def var_func(env, flag, var, func): - kwargs = dict({var: [flag]}, **conf_check_vars[var]) - return func(env, flag, **kwargs) - - for var in conf_check_vars: - myenv.AddMethod( - functools.partial(var_func, var=var, func=AddFlagIfSupported), f"AddTo{var}IfSupported" - ) - myenv.AddMethod( - functools.partial(var_func, var=var, func=CheckFlag), f"Check{var}Supported" - ) - - if myenv.ToolchainIs("gcc", "clang"): - # This tells clang/gcc to use the gold linker if it is available - we prefer the gold linker - # because it is much faster. Don't use it if the user has already configured another linker - # selection manually. - if any(flag.startswith("-fuse-ld=") for flag in env["LINKFLAGS"]): - myenv.FatalError( - "Use the '--linker' option instead of modifying the LINKFLAGS directly." - ) - - linker_ld = get_option("linker") - - if linker_ld == "bfd": - myenv.FatalError("The linker 'bfd' is not supported.") - elif linker_ld == "auto": - if not env.TargetOSIs("darwin", "macOS"): - if not myenv.AddToLINKFLAGSIfSupported("-fuse-ld=lld"): - myenv.FatalError( - "The recommended linker 'lld' is not supported with the current compiler configuration, you can try the 'gold' linker with '--linker=gold'." - ) - elif link_model.startswith("dynamic") and linker_ld == "bfd": - # BFD is not supported due to issues with it causing warnings from some of - # the third party libraries that mongodb is linked with: - # https://jira.mongodb.org/browse/SERVER-49465 - myenv.FatalError(f"Linker {linker_ld} is not supported with dynamic link model builds.") - else: - if not myenv.AddToLINKFLAGSIfSupported(f"-fuse-ld={linker_ld}"): - myenv.FatalError(f"Linker {linker_ld} could not be configured.") - - if has_option("gcov") and myenv.AddToCCFLAGSIfSupported("-fprofile-update=single"): - myenv.AppendUnique(LINKFLAGS=["-fprofile-update=single"]) - - detectCompiler = Configure( - myenv, - help=False, - custom_tests={ - "CheckForCXXLink": CheckForCXXLink, - }, - ) - - if not detectCompiler.CheckCC(): - env.ConfError( - "C compiler {0} doesn't work", - detectEnv["CC"], - ) - - if not detectCompiler.CheckCXX(): - env.ConfError( - "C++ compiler {0} doesn't work", - detectEnv["CXX"], - ) - - if not detectCompiler.CheckForCXXLink(): - env.ConfError( - "C++ compiler {0} can't link C++ programs", - detectEnv["CXX"], - ) - - detectCompiler.Finish() - - if myenv.ToolchainIs("clang", "gcc"): - # This warning was added in g++-4.8. - myenv.AddToCCFLAGSIfSupported("-Wno-unused-local-typedefs") - - # Clang likes to warn about unused functions, which seems a tad aggressive and breaks - # -Werror, which we want to be able to use. - myenv.AddToCCFLAGSIfSupported("-Wno-unused-function") - - # TODO: Note that the following two flags are added to CCFLAGS even though they are - # really C++ specific. We need to do this because SCons passes CXXFLAGS *before* - # CCFLAGS, but CCFLAGS contains -Wall, which re-enables the warnings we are trying to - # suppress. In the future, we should move all warning flags to CCWARNFLAGS and - # CXXWARNFLAGS and add these to CCOM and CXXCOM as appropriate. - # - # Clang likes to warn about unused private fields, but some of our third_party - # libraries have such things. - myenv.AddToCCFLAGSIfSupported("-Wno-unused-private-field") - - # Prevents warning about using deprecated features (such as auto_ptr in c++11) - # Using -Wno-error=deprecated-declarations does not seem to work on some compilers, - # including at least g++-4.6. - myenv.AddToCCFLAGSIfSupported("-Wno-deprecated-declarations") - - # As of clang-3.4, this warning appears in v8, and gets escalated to an error. - myenv.AddToCCFLAGSIfSupported("-Wno-tautological-constant-out-of-range-compare") - - # As of clang in Android NDK 17, these warnings appears in boost and/or ICU, and get escalated to errors - myenv.AddToCCFLAGSIfSupported("-Wno-tautological-constant-compare") - myenv.AddToCCFLAGSIfSupported("-Wno-tautological-unsigned-zero-compare") - myenv.AddToCCFLAGSIfSupported("-Wno-tautological-unsigned-enum-zero-compare") - - # New in clang-3.4, trips up things mostly in third_party, but in a few places in the - # primary mongo sources as well. - myenv.AddToCCFLAGSIfSupported("-Wno-unused-const-variable") - - # Prevents warning about unused but set variables found in boost version 1.49 - # in boost/date_time/format_date_parser.hpp which does not work for compilers - # GCC >= 4.6. Error explained in https://svn.boost.org/trac/boost/ticket/6136 . - myenv.AddToCCFLAGSIfSupported("-Wno-unused-but-set-variable") - - # This has been suppressed in gcc 4.8, due to false positives, but not in clang. So - # we explicitly disable it here. - myenv.AddToCCFLAGSIfSupported("-Wno-missing-braces") - - # Suppress warnings about not consistently using override everywhere in a class. It seems - # very pedantic, and we have a fair number of instances. - myenv.AddToCCFLAGSIfSupported("-Wno-inconsistent-missing-override") - - # Don't issue warnings about potentially evaluated expressions - myenv.AddToCCFLAGSIfSupported("-Wno-potentially-evaluated-expression") - - # SERVER-76472 we don't try to maintain ABI so disable warnings about possible ABI issues. - myenv.AddToCCFLAGSIfSupported("-Wno-psabi") - - # Warn about moves of prvalues, which can inhibit copy elision. - myenv.AddToCXXFLAGSIfSupported("-Wpessimizing-move") - - # Disable warning about variables that may not be initialized - # Failures are triggered in the case of boost::optional in GCC 4.8.x - # TODO: re-evaluate when we move to GCC 5.3 - # see: http://stackoverflow.com/questions/21755206/how-to-get-around-gcc-void-b-4-may-be-used-uninitialized-in-this-funct - myenv.AddToCXXFLAGSIfSupported("-Wno-maybe-uninitialized") - - # Disable warning about templates that can't be implicitly instantiated. It is an attempt to - # make a link error into an easier-to-debug compiler failure, but it triggers false - # positives if explicit instantiation is used in a TU that can see the full definition. This - # is a problem at least for the S2 headers. - myenv.AddToCXXFLAGSIfSupported("-Wno-undefined-var-template") - - # This warning was added in clang-4.0, but it warns about code that is required on some - # platforms. Since the warning just states that 'explicit instantiation of [a template] that - # occurs after an explicit specialization has no effect', it is harmless on platforms where - # it isn't required - myenv.AddToCXXFLAGSIfSupported("-Wno-instantiation-after-specialization") - - # This warning was added in clang-5 and flags many of our lambdas. Since it isn't actively - # harmful to capture unused variables we are suppressing for now with a plan to fix later. - myenv.AddToCCFLAGSIfSupported("-Wno-unused-lambda-capture") - - # Enable sized deallocation support. - myenv.AddToCXXFLAGSIfSupported("-fsized-deallocation") - - # This warning was added in Apple clang version 11 and flags many explicitly defaulted move - # constructors and assignment operators for being implicitly deleted, which is not useful. - myenv.AddToCXXFLAGSIfSupported("-Wno-defaulted-function-deleted") - - # SERVER-44856: Our windows builds complain about unused - # exception parameters, but GCC and clang don't seem to do - # that for us automatically. In the interest of making it more - # likely to catch these errors early, add the (currently clang - # only) flag that turns it on. - myenv.AddToCXXFLAGSIfSupported("-Wunused-exception-parameter") - - # TODO SERVER-58675 - Remove this suppression after abseil is upgraded - myenv.AddToCXXFLAGSIfSupported("-Wno-deprecated-builtins") - - # We do not define an ABI that must be stable from build to build, so inconsistent hardware - # interference sizes between builds does not affect correctness. - myenv.AddToCXXFLAGSIfSupported("-Wno-interference-size") - - # This warning overzealously warns on uses of non-virtual destructors which are benign. - myenv.AddToCXXFLAGSIfSupported("-Wno-non-virtual-dtor") - - # As of XCode 9, this flag must be present (it is not enabled - # by -Wall), in order to enforce that -mXXX-version-min=YYY - # will enforce that you don't use APIs from ZZZ. - if env.TargetOSIs("darwin"): - env.AddToCCFLAGSIfSupported("-Wunguarded-availability") - env.AddToCCFLAGSIfSupported("-Wno-enum-constexpr-conversion") - # TODO SERVER-54659 - ASIO depends on std::result_of which was removed in C++ 20 - myenv.Append(CPPDEFINES=["ASIO_HAS_STD_INVOKE_RESULT"]) - # This is needed to compile boost on the newer xcodes - myenv.Append(CPPDEFINES=["BOOST_NO_CXX98_FUNCTION_BASE"]) - - if get_option("runtime-hardening") == "on": - # Enable 'strong' stack protection preferentially, but fall back to 'all' if it is not - # available. Note that we need to add these to the LINKFLAGS as well, since otherwise we - # might not link libssp when we need to (see SERVER-12456). - if myenv.ToolchainIs("gcc", "clang"): - if myenv.AddToCCFLAGSIfSupported("-fstack-protector-strong"): - myenv.Append( - LINKFLAGS=[ - "-fstack-protector-strong", - ], - ) - elif myenv.AddToCCFLAGSIfSupported("-fstack-protector-all"): - myenv.Append( - LINKFLAGS=[ - "-fstack-protector-all", - ], - ) - - if "cfex" in selected_experimental_runtime_hardenings: - myenv.Append( - CFLAGS=[ - "-fexceptions", - ], - ) - - if "stackclash" in selected_experimental_runtime_hardenings: - myenv.AddToCCFLAGSIfSupported("-fstack-clash-protection") - - if "controlflow" in selected_experimental_runtime_hardenings: - myenv.AddToCCFLAGSIfSupported("-fcf-protection=full") - - if myenv.ToolchainIs("clang"): - # TODO: There are several interesting things to try here, but they each have - # consequences we need to investigate. - # - # - fsanitize=bounds: This does static bounds checking. We can - # probably turn this on along with fsanitize-trap so that we - # don't depend on the ASAN runtime. - # - # - fsanitize=safestack: This looks very interesting, and is - # probably what we want. However there are a few problems: - # - # - It relies on having the RT library available, and it is - # unclear whether we can ship binaries that depend on - # that. - # - # - It is incompatible with a shared object build. - # - # - It may not work with SpiderMonkey due to needing to - # inform the GC about the stacks so that mark-sweep - # - # - fsanitize=cfi: Again, very interesting, however it - # requires LTO builds. - pass - - if has_option("osx-version-min"): - message = """ - The --osx-version-min option is no longer supported. - - To specify a target minimum for Darwin platforms, please explicitly add the appropriate options - to CCFLAGS and LINKFLAGS on the command line: - - macOS: scons CCFLAGS="-mmacosx-version-min=10.14" LINKFLAGS="-mmacosx-version-min=10.14" .. - - Note that MongoDB requires macOS 10.14 or later. - """ - myenv.ConfError(textwrap.dedent(message)) - - usingLibStdCxx = False - if has_option("libc++"): - # TODO SERVER-54659 - ASIO depends on std::result_of which was removed in C++ 20 - myenv.Append(CPPDEFINES=["ASIO_HAS_STD_INVOKE_RESULT"]) - - if not myenv.ToolchainIs("clang"): - myenv.FatalError("libc++ is currently only supported for clang") - if myenv.AddToCXXFLAGSIfSupported("-stdlib=libc++"): - myenv.Append(LINKFLAGS=["-stdlib=libc++"]) - else: - myenv.ConfError("libc++ requested, but compiler does not support -stdlib=libc++") - else: - - def CheckLibStdCxx(context): - test_body = """ - #include - #if !defined(__GLIBCXX__) - #error - #endif - """ - - context.Message("Checking if we are using libstdc++... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckLibStdCxx": CheckLibStdCxx, - }, - ) - usingLibStdCxx = conf.CheckLibStdCxx() - conf.Finish() - - if myenv.ToolchainIs("msvc"): - if get_option("cxx-std") == "20": - myenv.AppendUnique(CCFLAGS=["/std:c++20"]) - else: - if get_option("cxx-std") == "20": - if not myenv.AddToCXXFLAGSIfSupported("-std=c++20"): - myenv.ConfError("Compiler does not honor -std=c++20") - - if not myenv.AddToCFLAGSIfSupported("-std=c11"): - myenv.ConfError("C++20 mode selected for C++ files, but can't enable C11 for C files") - - if using_system_version_of_cxx_libraries(): - print("WARNING: System versions of C++ libraries must be compiled with C++20 support") - - def CheckCxx20(context): - test_body = """ - #if __cplusplus < 202002L - #error - #endif - #include - [[maybe_unused]] constexpr auto spaceship_operator_is_a_cxx20_feature = 2 <=> 4; - """ - - context.Message("Checking for C++20... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckCxx20": CheckCxx20, - }, - ) - - if get_option("cxx-std") == "20" and not conf.CheckCxx20(): - myenv.ConfError("C++20 support is required to build MongoDB") - - conf.Finish() - - # If we are using libstdc++, check to see if we are using a - # libstdc++ that is older than our GCC minimum of 5.3.0. This is - # primarly to help people using clang on OS X but forgetting to - # use --libc++ (or set the target OS X version high enough to get - # it as the default). We would, ideally, check the __GLIBCXX__ - # version, but for various reasons this is not workable. Instead, - # we switch on the fact that the header - # wasn't introduced until libstdc++ 5.3.0. Yes, this is a terrible - # hack. - if usingLibStdCxx: - - def CheckModernLibStdCxx(context): - test_body = """ - #if !__has_include() - #error "libstdc++ from GCC 5.3.0 or newer is required" - #endif - """ - - context.Message("Checking for libstdc++ 5.3.0 or better... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckModernLibStdCxx": CheckModernLibStdCxx, - }, - ) - - suppress_invalid = has_option("disable-minimum-compiler-version-enforcement") - if not conf.CheckModernLibStdCxx() and not suppress_invalid: - myenv.ConfError( - "When using libstdc++, MongoDB requires libstdc++ from GCC 5.3.0 or newer" - ) - - conf.Finish() - - if has_option("use-glibcxx-debug"): - # If we are using a modern libstdc++ and this is a debug build and we control all C++ - # dependencies, then turn on the debugging features in libstdc++. - # TODO: Need a new check here. - if not debugBuild: - myenv.FatalError("--use-glibcxx-debug requires --dbg=on") - if not usingLibStdCxx: - myenv.FatalError( - "--use-glibcxx-debug is only compatible with the GNU implementation " - "of the C++ standard libary" - ) - if using_system_version_of_cxx_libraries(): - myenv.FatalError( - "--use-glibcxx-debug not compatible with system versions of " "C++ libraries." - ) - myenv.Append(CPPDEFINES=["_GLIBCXX_DEBUG"]) - - # Check if we have a modern Windows SDK - if env.TargetOSIs("windows"): - - def CheckWindowsSDKVersion(context): - test_body = """ - #include - #if !defined(NTDDI_WINBLUE) - #error Need Windows SDK Version 8.1 or higher - #endif - """ - - context.Message("Checking Windows SDK is 8.1 or newer... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".c") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckWindowsSDKVersion": CheckWindowsSDKVersion, - }, - ) - - if not conf.CheckWindowsSDKVersion(): - myenv.ConfError("Windows SDK Version 8.1 or higher is required to build MongoDB") - - conf.Finish() - - # Check if we are on a POSIX system by testing if _POSIX_VERSION is defined. - def CheckPosixSystem(context): - test_body = """ - // POSIX requires the existence of unistd.h, so if we can't include unistd.h, we - // are definitely not a POSIX system. - #include - #if !defined(_POSIX_VERSION) - #error not a POSIX system - #endif - """ - - context.Message("Checking if we are on a POSIX system... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".c") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckPosixSystem": CheckPosixSystem, - }, - ) - posix_system = conf.CheckPosixSystem() - - conf.Finish() - - # Check if we are on a system that support the POSIX clock_gettime function - # and the "monotonic" clock. - if posix_system: - conf = Configure( - myenv, - help=False, - ) - # On 32-bit systems, we need to define this in order to get access to - # the 64-bit versions of fseek, etc. - # except on 32 bit android where it breaks boost - if not conf.CheckTypeSize("off_t", includes="#include ", expect=8): - if not env.TargetOSIs("android"): - myenv.Append(CPPDEFINES=["_FILE_OFFSET_BITS=64"]) - - conf.Finish() - - if has_option("sanitize") and get_option("sanitize") != "": - if not myenv.ToolchainIs("clang", "gcc"): - env.FatalError("sanitize is only supported with clang or gcc") - - # sanitizer libs may inject undefined refs (for hooks) at link time, but - # the symbols will be available at runtime via the compiler runtime lib. - env.Append(LINKFLAGS="-Wl,--allow-shlib-undefined") - - if myenv.ToolchainIs("gcc"): - # GCC's implementation of ASAN depends on libdl. - env.Append(LIBS=["dl"]) - - sanitizer_list = get_option("sanitize").split(",") - - using_asan = "address" in sanitizer_list - using_fsan = "fuzzer" in sanitizer_list - using_lsan = "leak" in sanitizer_list - using_tsan = "thread" in sanitizer_list - using_ubsan = "undefined" in sanitizer_list - using_msan = "memory" in sanitizer_list - - if get_option("shared-libsan") == "on" and len(sanitizer_list) > 0: - if not myenv.ToolchainIs("clang") or not myenv.TargetOSIs("linux"): - env.FatalError("Error: --shared-libsan is only supported with clang on linux") - - def get_san_lib_path(sanitizer): - # TODO SERVER-83727: the v4 clang toolchain doesn't support shared TSAN. Add - # support here once the toolchain is upgraded. - san_to_lib = { - "address": "asan", - "undefined": "ubsan_standalone", - } - sanitizer_lib = san_to_lib.get(sanitizer) - if sanitizer_lib is None: - env.FatalError( - f"Error: --shared-libsan is not supported with {sanitizer} sanitizer" - ) - arch = env["TARGET_ARCH"] - san_rt_names = [ - f"libclang_rt.{sanitizer_lib}-{arch}.so", - f"libclang_rt.{sanitizer_lib}.so", - ] - for san_rt_name in san_rt_names: - p = subprocess.run( - [env["CXX"], f"-print-file-name={san_rt_name}"], - capture_output=True, - text=True, - ) - clang_rt_path = p.stdout.strip() - if os.path.isfile(clang_rt_path): - return clang_rt_path - san_rt_names_string = ",".join(san_rt_names) - env.FatalError( - f"Error: couldn't find sanitizer runtime library, one of {san_rt_names_string}" - ) - - env["SANITIZER_RUNTIME_LIBS"] = [ - get_san_lib_path(sanitizer) for sanitizer in sanitizer_list - ] - - if "thread" not in sanitizer_list: - env.Append(LINKFLAGS=["-rtlib=compiler-rt", "-unwindlib=libgcc"]) - - if using_lsan: - env.FatalError("Please use --sanitize=address instead of --sanitize=leak") - - if (using_asan or using_msan) and env["MONGO_ALLOCATOR"] in [ - "tcmalloc-google", - "tcmalloc-gperf", - ]: - # There are multiply defined symbols between the sanitizer and - # our vendorized tcmalloc. - env.FatalError("Cannot use --sanitize=address or --sanitize=memory with tcmalloc") - - if not myenv.ToolchainIs("clang") and using_msan: - env.FatalError("Memory Sanitizer (MSan) is only supported with clang.") - - if using_fsan: - - def CheckForFuzzerCompilerSupport(context): - test_body = """ - #include - #include - - // fuzz_target.cc - extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { - return 0; - } - """ - - context.Message("Checking if libfuzzer is supported by the compiler... ") - - context.env.AppendUnique( - LINKFLAGS=[ - "-fprofile-instr-generate", - "-fcoverage-mapping", - "-fsanitize=fuzzer", - ], - CCFLAGS=[ - "-fprofile-instr-generate", - "-fcoverage-mapping", - ], - ) - - ret = context.TryLink(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - confEnv = myenv.Clone() - fuzzerConf = Configure( - confEnv, - help=False, - custom_tests={ - "CheckForFuzzerCompilerSupport": CheckForFuzzerCompilerSupport, - }, - ) - if not fuzzerConf.CheckForFuzzerCompilerSupport(): - myenv.FatalError("libfuzzer is not supported by the compiler") - fuzzerConf.Finish() - - # We can't include the fuzzer flag with the other sanitize flags - # The libfuzzer library already has a main function, which will cause the dependencies check - # to fail - sanitizer_list.remove("fuzzer") - sanitizer_list.append("fuzzer-no-link") - # These flags are needed to generate a coverage report - myenv.Append( - LINKFLAGS=[ - "-fprofile-instr-generate", - "-fcoverage-mapping", - ], - ) - myenv.Append( - CCFLAGS=[ - "-fprofile-instr-generate", - "-fcoverage-mapping", - ], - ) - - sanitizer_option = "-fsanitize=" + ",".join(sanitizer_list) - - if myenv.AddToCCFLAGSIfSupported(sanitizer_option): - myenv.Append(LINKFLAGS=[sanitizer_option]) - myenv.Append(CCFLAGS=["-fno-omit-frame-pointer"]) - else: - myenv.ConfError("Failed to enable sanitizers with flag: {0}", sanitizer_option) - - if get_option("shared-libsan") == "on": - shared_libsan_option = "-shared-libsan" - if myenv.AddToCCFLAGSIfSupported(shared_libsan_option): - myenv.Append(LINKFLAGS=[shared_libsan_option]) - - myenv["SANITIZERS_ENABLED"] = sanitizer_list - - if has_option("sanitize-coverage") and using_fsan: - sanitize_coverage_list = get_option("sanitize-coverage") - sanitize_coverage_option = "-fsanitize-coverage=" + sanitize_coverage_list - if myenv.AddToCCFLAGSIfSupported(sanitize_coverage_option): - myenv.Append(LINKFLAGS=[sanitize_coverage_option]) - else: - myenv.ConfError( - "Failed to enable -fsanitize-coverage with flag: {0}", sanitize_coverage_option - ) - - denyfiles_map = { - "address": myenv.File("#etc/asan.denylist"), - "thread": myenv.File("#etc/tsan.denylist"), - "undefined": myenv.File("#etc/ubsan.denylist"), - "memory": myenv.File("#etc/msan.denylist"), - } - - # Select those unique deny files that are associated with the - # currently enabled sanitizers, but filter out those that are - # zero length. - denyfiles = {v for (k, v) in denyfiles_map.items() if k in sanitizer_list} - denyfiles = [f for f in denyfiles if os.stat(f.path).st_size != 0] - - # Filter out any denylist options that the toolchain doesn't support. - supportedDenyfiles = [] - denyfilesTestEnv = myenv.Clone() - for denyfile in denyfiles: - if denyfilesTestEnv.AddToCCFLAGSIfSupported(f"-fsanitize-blacklist={denyfile}"): - supportedDenyfiles.append(denyfile) - denyfilesTestEnv = None - supportedDenyfiles = sorted(supportedDenyfiles) - - # If we ended up with any denyfiles after the above filters, - # then expand them into compiler flag arguments, and use a - # generator to return at command line expansion time so that - # we can change the signature if the file contents change. - if supportedDenyfiles: - # Unconditionally using the full path can affect SCons cached builds, so we only do - # this in cases where we know it's going to matter. - denylist_options = [ - f"-fsanitize-blacklist={denyfile.path}" for denyfile in supportedDenyfiles - ] - - if "ICECC" in env and env["ICECC"]: - # Make these files available to remote icecream builds if requested. - # These paths *must* be absolute to match the paths in the remote - # toolchain archive. Local builds remain relative. - local_denylist_options = denylist_options[:] - denylist_options = [ - f"-fsanitize-blacklist={denyfile.abspath}" for denyfile in supportedDenyfiles - ] - - # Build a regex of all the regexes in the denylist - # the regex in the denylist are a shell wildcard format - # https://clang.llvm.org/docs/SanitizerSpecialCaseList.html#format - # so a bit of massaging (* -> .*) to get a python regex. - icecc_denylist_regexes = [] - for denyfile in supportedDenyfiles: - for line in denyfile.get_contents().decode("utf-8").split("\n"): - if line.strip().startswith("src:"): - regex_line = line.replace("src:", "").strip() - regex_line = re.escape(regex_line) - icecc_denylist_regexes += [regex_line.replace("\\*", ".*")] - - icecc_denylist_regex = re.compile("^(?:" + "|".join(icecc_denylist_regexes) + ")$") - - def is_local_compile(env, target, source, for_signature): - return icecc_denylist_regex.match(str(source[0])) is not None - - env["ICECC_LOCAL_COMPILATION_FILTER"] = is_local_compile - # If a sanitizer is in use with a denylist file, we have to ensure they get - # added to the toolchain package that gets sent to the remote hosts so they - # can be found by the remote compiler. - env.Append(ICECC_CREATE_ENV_ADDFILES=supportedDenyfiles) - - if "CCACHE" in env and env["CCACHE"]: - # Work around the fact that some versions of ccache either don't yet support - # -fsanitize-blacklist at all or only support one instance of it. This will - # work on any version of ccache because the point is only to ensure that the - # resulting hash for any compiled object is guaranteed to take into account - # the effect of any sanitizer denylist files used as part of the build. - # TODO: This will no longer be required when the following pull requests/ - # issues have been merged and deployed. - # https://github.com/ccache/ccache/pull/258 - # https://github.com/ccache/ccache/issues/318 - env.Append(CCACHE_EXTRAFILES=supportedDenyfiles) - env["CCACHE_EXTRAFILES_USE_SOURCE_PATHS"] = True - - def CCSanitizerDenylistGenerator(source, target, env, for_signature): - # TODO: SERVER-60915 use new conftest API - if "conftest" in str(target[0]): - return "" - - # TODO: SERVER-64620 use scanner instead of for_signature - if for_signature: - return [f.get_csig() for f in supportedDenyfiles] - - # Check if the denylist gets a match and if so it will be local - # build and should use the non-abspath. - # NOTE: in non icecream builds denylist_options becomes relative paths. - if ( - env.subst("$ICECC_LOCAL_COMPILATION_FILTER", target=target, source=source) - == "True" - ): - return local_denylist_options - - return denylist_options - - def LinkSanitizerDenylistGenerator(source, target, env, for_signature): - # TODO: SERVER-60915 use new conftest API - if "conftest" in str(target[0]): - return "" - - # TODO: SERVER-64620 use scanner instead of for_signature - if for_signature: - return [f.get_csig() for f in supportedDenyfiles] - - return denylist_options - - myenv.AppendUnique( - CC_SANITIZER_DENYLIST_GENERATOR=CCSanitizerDenylistGenerator, - LINK_SANITIZER_DENYLIST_GENERATOR=LinkSanitizerDenylistGenerator, - CCFLAGS="${CC_SANITIZER_DENYLIST_GENERATOR}", - LINKFLAGS="${LINK_SANITIZER_DENYLIST_GENERATOR}", - ) - - symbolizer_option = "" - if env.get("LLVM_SYMBOLIZER", False): - llvm_symbolizer = env["LLVM_SYMBOLIZER"] - - if not os.path.isabs(llvm_symbolizer): - # WhereIs looks at the path, but not the PWD. If it fails, try assuming - # the path is relative to the PWD. - llvm_symbolizer = myenv.WhereIs(llvm_symbolizer) or os.path.realpath( - llvm_symbolizer - ) - - if not myenv.File(llvm_symbolizer).exists(): - myenv.FatalError(f"Symbolizer binary at path {llvm_symbolizer} does not exist") - - symbolizer_option = f':external_symbolizer_path="{llvm_symbolizer}"' - - elif using_asan or using_tsan or using_ubsan or using_msan: - myenv.FatalError( - "The address, thread, memory, and undefined behavior sanitizers require llvm-symbolizer for meaningful reports. Please set LLVM_SYMBOLIZER to the path to llvm-symbolizer in your SCons invocation" - ) - - if using_asan: - # Unfortunately, abseil requires that we make these macros - # (this, and THREAD_ and UNDEFINED_BEHAVIOR_ below) set, - # because apparently it is too hard to query the running - # compiler. We do this unconditionally because abseil is - # basically pervasive via the 'base' library. - myenv.AppendUnique(CPPDEFINES=["ADDRESS_SANITIZER"]) - # If anything is changed, added, or removed in either asan_options or - # lsan_options, be sure to make the corresponding changes to the - # appropriate build variants in etc/evergreen.yml - asan_options_clear = [ - "detect_leaks=1", - "check_initialization_order=true", - "strict_init_order=true", - "abort_on_error=1", - "disable_coredump=0", - "handle_abort=1", - "strict_string_checks=true", - "detect_invalid_pointer_pairs=1", - ] - asan_options = ":".join(asan_options_clear) - lsan_options = ( - f"report_objects=1:suppressions={myenv.File('#etc/lsan.suppressions').abspath}" - ) - env["ENV"]["ASAN_OPTIONS"] = asan_options + symbolizer_option - env["ENV"]["LSAN_OPTIONS"] = lsan_options + symbolizer_option - - if using_msan: - # Makes it easier to debug memory failures at the cost of some perf - myenv.Append(CCFLAGS=["-fsanitize-memory-track-origins"]) - env["ENV"]["MSAN_OPTIONS"] = symbolizer_option - if using_tsan: - if use_libunwind: - # TODO: SERVER-48622 - # - # See https://github.com/google/sanitizers/issues/943 - # for why we disallow combining TSAN with - # libunwind. We could, atlernatively, have added logic - # to automate the decision about whether to enable - # libunwind based on whether TSAN is enabled, but that - # logic is already complex, and it feels better to - # make it explicit that using TSAN means you won't get - # the benefits of libunwind. Fixing this is: - env.FatalError( - "Cannot use libunwind with TSAN, please add --use-libunwind=off to your compile flags" - ) - - # We add supressions based on the library file in etc/tsan.suppressions - # so the link-model needs to be dynamic. - if not link_model.startswith("dynamic"): - env.FatalError("TSAN is only supported with dynamic link models") - - # If anything is changed, added, or removed in - # tsan_options, be sure to make the corresponding changes - # to the appropriate build variants in etc/evergreen.yml - # - # TODO SERVER-49121: die_after_fork=0 is a temporary - # setting to allow tests to continue while we figure out - # why we're running afoul of it. - # - # TODO SERVER-65936: report_thread_leaks=0 suppresses - # reporting thread leaks, which we have because we don't - # do a clean shutdown of the ServiceContext. - # - tsan_options = f"abort_on_error=1:disable_coredump=0:handle_abort=1:halt_on_error=1:report_thread_leaks=0:die_after_fork=0:history_size=4:suppressions={myenv.File('#etc/tsan.suppressions').abspath}" - myenv["ENV"]["TSAN_OPTIONS"] = tsan_options + symbolizer_option - myenv.AppendUnique(CPPDEFINES=["THREAD_SANITIZER"]) - - if using_ubsan: - # By default, undefined behavior sanitizer doesn't stop on - # the first error. Make it so. Newer versions of clang - # have renamed the flag. - if not myenv.AddToCCFLAGSIfSupported("-fno-sanitize-recover"): - myenv.AddToCCFLAGSIfSupported("-fno-sanitize-recover=undefined") - myenv.AppendUnique(CPPDEFINES=["UNDEFINED_BEHAVIOR_SANITIZER"]) - - # If anything is changed, added, or removed in ubsan_options, be - # sure to make the corresponding changes to the appropriate build - # variants in etc/evergreen.yml - ubsan_options = "print_stacktrace=1" - myenv["ENV"]["UBSAN_OPTIONS"] = ubsan_options + symbolizer_option - - # In dynamic builds, the `vptr` sanitizer check can - # require additional LIBDEPS edges. That is very - # inconvenient, because such builds can't use z,defs. The - # result is a very fragile link graph, where refactoring - # the link graph in one place can have surprising effects - # in others. Instead, we just disable the `vptr` sanitizer - # for dynamic builds. We tried some other approaches in - # SERVER-49798 of adding a new LIBDEPS_TYPEINFO type, but - # that didn't address the fundamental issue that the - # correct link graph for a dynamic+ubsan build isn't the - # same as the correct link graph for a regular dynamic - # build. - if link_model == "dynamic": - if myenv.AddToCCFLAGSIfSupported("-fno-sanitize=vptr"): - myenv.AppendUnique(LINKFLAGS=["-fno-sanitize=vptr"]) - - if myenv.ToolchainIs("msvc") and optBuild != "off": - # http://blogs.msdn.com/b/vcblog/archive/2013/09/11/introducing-gw-compiler-switch.aspx - # - myenv.Append(CCFLAGS=["/Gw", "/Gy"]) - myenv.Append(LINKFLAGS=["/OPT:REF"]) - - # http://blogs.msdn.com/b/vcblog/archive/2014/03/25/linker-enhancements-in-visual-studio-2013-update-2-ctp2.aspx - # - myenv.Append(CCFLAGS=["/Zc:inline"]) - - if myenv.ToolchainIs("clang"): - # We add this flag to make clang emit debug info for c++ stl types so that our pretty - # printers will work with newer clang's which omit this debug info. This does increase - # the overall debug info size. - myenv.AddToCCFLAGSIfSupported("-fno-limit-debug-info") - - if myenv.ToolchainIs("gcc", "clang"): - # Pass -gdwarf{32,64} if an explicit value was selected - # or defaulted. Fail the build if we can't honor the - # selection. - if myenv["DWARF_WIDTH"]: - if myenv.AddToCCFLAGSIfSupported("-gdwarf$DWARF_WIDTH"): - myenv.AppendUnique(LINKFLAGS=["-gdwarf$DWARF_WIDTH"]) - else: - myenv.FatalError("Could not enable selected dwarf width") - - # try to determine the if dwarf64 is viable, and fallback to dwarf32 if not - elif myenv.CheckCCFLAGSSupported("-gdwarf64"): - - def CheckForDWARF64Support(context): - context.Message("Checking that DWARF64 format is viable... ") - try: - dwarf_version = int(myenv.get("DWARF_VERSION", 0)) - except ValueError: - dwarf_version = None - - if dwarf_version is None or dwarf_version <= 4: - result = False - else: - test_body = """ - #include - #include - int main() { - std::cout << "Hello, World" << std::endl; - return EXIT_SUCCESS; - } - """ - original_ccflags = context.env.get("CCFLAGS") - original_linkflags = context.env.get("LINKFLAGS") - - context.env.Append(CCFLAGS=["-gdwarf64"], LINKFLAGS=["-gdwarf64"]) - - ret = context.TryLink(textwrap.dedent(test_body), ".cpp") - - context.env["CCFLAGS"] = original_ccflags - context.env["LINKFLAGS"] = original_linkflags - - if not ret: - context.Result("unknown") - return False - - regex = re.compile(r"^\s*Length:.*[64|32]-bit\)$", re.MULTILINE) - p = subprocess.run( - [context.env["READELF"], "-wi", context.lastTarget.path], - capture_output=True, - text=True, - ) - matches = re.findall(regex, p.stdout) - address_types = set() - for match in matches: - address_types.add(match[-len("(XX-bit)") :]) - result = len(address_types) == 1 and list(address_types)[0] == "(64-bit)" - - context.Result(result) - return result - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckForDWARF64Support": CheckForDWARF64Support, - }, - ) - - if conf.CheckForDWARF64Support(): - myenv["DWARF_WIDTH"] = 64 - myenv.AppendUnique(LINKFLAGS=["-gdwarf64"], CCFLAGS=["-gdwarf64"]) - else: - myenv["DWARF_WIDTH"] = 32 - myenv.AppendUnique(LINKFLAGS=["-gdwarf32"], CCFLAGS=["-gdwarf32"]) - - conf.Finish() - - if myenv["DWARF_WIDTH"] == 32 and link_model != "dynamic": - # This will create an extra section where debug types can be referred from, - # reducing other section sizes. This helps most with big static links as there - # will be lots of duplicate debug type info. - if myenv.AddToCCFLAGSIfSupported("-fdebug-types-section"): - myenv.AppendUnique(LINKFLAGS=["-fdebug-types-section"]) - - # Turn off debug symbols. Due to g0 disabling any previously added debugging flags, - # it is easier to append g0 near the end rather than trying to not add all the other - # debug flags. This should be added after any debug flags. - if get_option("debug-symbols") == "off": - myenv.AppendUnique(LINKFLAGS=["-g0"], CCFLAGS=["-g0"]) - elif get_option("debug-symbols") == "minimal": - myenv.AppendUnique(LINKFLAGS=["-g1"], CCFLAGS=["-g1"]) - - # Our build is already parallel. - if not myenv.AddToLINKFLAGSIfSupported("-Wl,--no-threads"): - myenv.AddToLINKFLAGSIfSupported("--Wl,--threads=1") - - # Explicitly enable GNU build id's if the linker supports it. - myenv.AddToLINKFLAGSIfSupported("-Wl,--build-id") - - # Explicitly use the new gnu hash section if the linker offers - # it, except on android since older runtimes seem to not - # support it. For that platform, use 'both'. - if env.TargetOSIs("android"): - myenv.AddToLINKFLAGSIfSupported("-Wl,--hash-style=both") - else: - myenv.AddToLINKFLAGSIfSupported("-Wl,--hash-style=gnu") - - # Try to have the linker tell us about ODR violations. Don't - # use it when using clang with libstdc++, as libstdc++ was - # probably built with GCC. That combination appears to cause - # false positives for the ODR detector. See SERVER-28133 for - # additional details. - if has_option("detect-odr-violations"): - if myenv.ToolchainIs("clang") and usingLibStdCxx: - env.FatalError( - "The --detect-odr-violations flag does not work with clang and libstdc++" - ) - if optBuild != "off": - env.FatalError( - "The --detect-odr-violations flag is expected to only be reliable with --opt=off" - ) - if linker_ld != "gold": - myenv.FatalError( - "The --detect-odr-violations flag currently only works with --linker=gold" - ) - myenv.AddToLINKFLAGSIfSupported("-Wl,--detect-odr-violations") - - # Disallow an executable stack. Also, issue a warning if any files are found that would - # cause the stack to become executable if the noexecstack flag was not in play, so that we - # can find them and fix them. We do this here after we check for ld.gold because the - # --warn-execstack is currently only offered with gold. - myenv.AddToLINKFLAGSIfSupported("-Wl,-z,noexecstack") - myenv.AddToLINKFLAGSIfSupported("-Wl,--warn-execstack") - - # If possible with the current linker, mark relocations as read-only. - myenv.AddToLINKFLAGSIfSupported("-Wl,-z,relro") - - if has_option("thin-lto"): - if not myenv.AddToCCFLAGSIfSupported( - "-flto=thin" - ) or not myenv.AddToLINKFLAGSIfSupported("-flto=thin"): - myenv.ConfError("Failed to enable thin LTO") - - if linker_ld != "gold" and not env.TargetOSIs("darwin", "macOS") and optBuild != "off": - if has_option("pgo"): - print("WARNING: skipping symbol ordering as pgo is enabled") - else: - myenv.AppendUnique( - CCFLAGS=["-ffunction-sections"], - LINKFLAGS=[ - "-Wl,--symbol-ordering-file=symbols.orderfile", - "-Wl,--no-warn-symbol-ordering", - ], - ) - else: - print("WARNING: lld linker is required to sort symbols") - - if has_option("pgo-profile"): - if ( - not myenv.ToolchainIs("clang") - or not myenv.TargetOSIs("linux") - or linker_ld == "gold" - ): - myenv.FatalError("Error: pgo only works on linux with clang + lld") - myenv.AppendUnique( - CCFLAGS=["-fprofile-instr-generate"], - LINKFLAGS=["-fprofile-instr-generate"], - ) - - if has_option("pgo"): - if ( - not myenv.ToolchainIs("clang") - or not myenv.TargetOSIs("linux") - or linker_ld == "gold" - ): - myenv.FatalError("Error: pgo only works on linux with clang + lld") - myenv.AppendUnique( - _NON_CONF_CCFLAGS_GEN=["-fprofile-use=./default.profdata"], - ) - myenv["CCFLAGS_WERROR"].remove("-Werror") - - # As far as we know these flags only apply on posix-y systems, - # and not on Darwin. - if env.TargetOSIs("posix") and not env.TargetOSIs("darwin"): - # Disable debug compression in both the assembler and linker - # by default. If the user requested compression, only allow - # the zlib-gabi form. - debug_compress = get_option("debug-compress") - - # If a value was provided on the command line for --debug-compress, it should - # inhibit the application of auto, so strip it out. - if "auto" in debug_compress and len(debug_compress) > 1: - debug_compress = debug_compress[1:] - - # Disallow saying --debug-compress=off --debug-compress=ld and similar - if "off" in debug_compress and len(debug_compress) > 1: - env.FatalError("Cannot combine 'off' for --debug-compress with other values") - - # Transform the 'auto' argument into a real value. - if "auto" in debug_compress: - debug_compress = [] - - # We only automatically enable ld compression for - # dynamic builds because it seems to use enormous - # amounts of memory in static builds. - if link_model.startswith("dynamic"): - debug_compress.append("ld") - - compress_type = "zlib-gabi" - compress_flag = "compress-debug-sections" - - myenv.AddToCCFLAGSIfSupported( - f"-Wa,--{compress_flag}={compress_type}" - if "as" in debug_compress - else f"-Wa,--no{compress_flag}", - ) - - # We shouldn't enable debug compression in the linker - # (meaning our final binaries contain compressed debug - # info) unless our local elf environment appears to at - # least be aware of SHF_COMPRESSED. This seems like a - # necessary precondition, but is it sufficient? - # - # https://gnu.wildebeest.org/blog/mjw/2016/01/13/elf-libelf-compressed-sections-and-elfutils/ - - def CheckElfHForSHF_COMPRESSED(context): - test_body = """ - #include - #if !defined(SHF_COMPRESSED) - #error - #endif - """ - - context.Message("Checking elf.h for SHF_COMPRESSED... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".c") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckElfHForSHF_COMPRESSED": CheckElfHForSHF_COMPRESSED, - }, - ) - - have_shf_compressed = conf.CheckElfHForSHF_COMPRESSED() - conf.Finish() - - if have_shf_compressed and "ld" in debug_compress: - myenv.AddToLINKFLAGSIfSupported( - f"-Wl,--{compress_flag}={compress_type}", - ) - else: - myenv.AddToLINKFLAGSIfSupported( - f"-Wl,--{compress_flag}=none", - ) - - if "fnsi" in selected_experimental_optimizations: - myenv.AddToCCFLAGSIfSupported("-fno-semantic-interposition") - - # Avoid deduping symbols on OS X debug builds, as it takes a long time. - if optBuild == "off" and myenv.ToolchainIs("clang") and env.TargetOSIs("darwin"): - myenv.AddToLINKFLAGSIfSupported("-Wl,-no_deduplicate") - - # Apply any link time optimization settings as selected by the 'lto' option. - if has_option("lto"): - if myenv.ToolchainIs("msvc"): - # Note that this is actually more aggressive than LTO, it is whole program - # optimization due to /GL. However, this is historically what we have done for - # windows, so we are keeping it. - # - # /GL implies /LTCG, so no need to say it in CCFLAGS, but we do need /LTCG on the - # link flags. - myenv.Append(CCFLAGS=["/GL"]) - myenv.Append(LINKFLAGS=["/LTCG"]) - myenv.Append(ARFLAGS=["/LTCG"]) - elif myenv.ToolchainIs("gcc", "clang"): - # For GCC and clang, the flag is -flto, and we need to pass it both on the compile - # and link lines. - if not myenv.AddToCCFLAGSIfSupported("-flto") or not myenv.AddToLINKFLAGSIfSupported( - "-flto" - ): - myenv.ConfError( - "Link time optimization requested, " - "but selected compiler does not honor -flto" - ) - - if myenv.TargetOSIs("darwin"): - myenv.AddToLINKFLAGSIfSupported("-Wl,-object_path_lto,${TARGET}.lto") - else: - # According to intel benchmarks -fno-plt increases perf - # See PM-2215 - if linker_ld != "gold": - myenv.ConfError("lto compilation currently only works with the --linker=gold") - if link_model != "object": - myenv.ConfError( - "lto compilation currently only works with the --link-model=object" - ) - if not myenv.AddToCCFLAGSIfSupported( - "-fno-plt" - ) or not myenv.AddToLINKFLAGSIfSupported("-fno-plt"): - myenv.ConfError("-fno-plt is not supported by the compiler") - - else: - myenv.ConfError("Don't know how to enable --lto on current toolchain") - - if get_option("runtime-hardening") == "on" and optBuild != "off": - # Older glibc doesn't work well with _FORTIFY_SOURCE=2. Selecting 2.11 as the minimum was an - # emperical decision, as that is the oldest non-broken glibc we seem to require. It is possible - # that older glibc's work, but we aren't trying. - # - # https://gforge.inria.fr/tracker/?func=detail&group_id=131&atid=607&aid=14070 - # https://github.com/jedisct1/libsodium/issues/202 - def CheckForGlibcKnownToSupportFortify(context): - test_body = """ - #include - #if !__GLIBC_PREREQ(2, 11) - #error - #endif - """ - context.Message("Checking for glibc with non-broken _FORTIFY_SOURCE...") - ret = context.TryCompile(textwrap.dedent(test_body), ".c") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "CheckForFortify": CheckForGlibcKnownToSupportFortify, - }, - ) - - # Fortify only possibly makes sense on POSIX systems, and we know that clang is not a valid - # combination: - # - # http://lists.llvm.org/pipermail/cfe-dev/2015-November/045852.html - # - if env.TargetOSIs("posix") and not env.ToolchainIs("clang") and conf.CheckForFortify(): - conf.env.Append( - CPPDEFINES=[ - ("_FORTIFY_SOURCE", 2), - ], - ) - - myenv = conf.Finish() - - # Our build generally assumes that we have C11-compliant libc headers for - # C++ source. On most systems, that will be the case. However, on systems - # using glibc older than 2.18 (or other libc implementations that have - # stubbornly refused to update), we need to add some preprocessor defines. - # - # See: https://sourceware.org/bugzilla/show_bug.cgi?id=15366 - # - # These headers are only fully standards-compliant on POSIX platforms. Windows - # in particular doesn't implement inttypes.h - if env.TargetOSIs("posix"): - - def NeedStdCLimitMacros(context): - test_body = """ - #undef __STDC_LIMIT_MACROS - #include - #if defined(INT64_MAX) - # error - #endif - """ - context.Message("Checking whether to define __STDC_LIMIT_MACROS... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - def NeedStdCConstantMacros(context): - test_body = """ - #undef __STDC_CONSTANT_MACROS - #include - #if defined(INTMAX_C) - # error - #endif - """ - context.Message("Checking whether to define __STDC_CONSTANT_MACROS... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - def NeedStdCFormatMacros(context): - test_body = """ - #undef __STDC_FORMAT_MACROS - #include - #if defined(PRIx64) - # error - #endif - """ - context.Message("Checking whether to define __STDC_FORMAT_MACROS... ") - ret = context.TryCompile(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - conf = Configure( - myenv, - help=False, - custom_tests={ - "NeedStdCLimitMacros": NeedStdCLimitMacros, - "NeedStdCConstantMacros": NeedStdCConstantMacros, - "NeedStdCFormatMacros": NeedStdCFormatMacros, - }, - ) - - conf.env.AppendUnique( - CPPDEFINES=[ - "__STDC_LIMIT_MACROS" if conf.NeedStdCLimitMacros() else "", - "__STDC_CONSTANT_MACROS" if conf.NeedStdCConstantMacros() else "", - "__STDC_FORMAT_MACROS" if conf.NeedStdCFormatMacros() else "", - ] - ) - - myenv = conf.Finish() - - # We set this with GCC on x86 platforms to work around - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=43052 - if myenv.ToolchainIs("gcc") and (env["TARGET_ARCH"] in ["i386", "x86_64"]): - if "builtin-memcmp" not in selected_experimental_optimizations: - myenv.AddToCCFLAGSIfSupported("-fno-builtin-memcmp") - - def CheckBoostMinVersion(context): - compile_test_body = textwrap.dedent(""" - #include - - #if BOOST_VERSION < 104900 - #error - #endif - """) - - context.Message("Checking if system boost version is 1.49 or newer...") - result = context.TryCompile(compile_test_body, ".cpp") - context.Result(result) - return result - - conf = Configure( - myenv, - custom_tests={ - "CheckBoostMinVersion": CheckBoostMinVersion, - }, - ) - - libdeps.setup_conftests(conf) - - ### --ssl checks - def checkOpenSSL(conf): - sslLibName = "ssl" - cryptoLibName = "crypto" - sslLinkDependencies = ["crypto", "dl"] - if conf.env.TargetOSIs("freebsd"): - sslLinkDependencies = ["crypto"] - - if conf.env.TargetOSIs("windows"): - sslLibName = "ssleay32" - cryptoLibName = "libeay32" - sslLinkDependencies = ["libeay32"] - - # Used to import system certificate keychains - if conf.env.TargetOSIs("darwin"): - conf.env.AppendUnique( - FRAMEWORKS=[ - "CoreFoundation", - "Security", - ] - ) - - def maybeIssueDarwinSSLAdvice(env): - if env.TargetOSIs("macOS"): - advice = textwrap.dedent("""\ - NOTE: Recent versions of macOS no longer ship headers for the system OpenSSL libraries. - NOTE: Either build without the --ssl flag, or describe how to find OpenSSL. - NOTE: Set the include path for the OpenSSL headers with the CPPPATH SCons variable. - NOTE: Set the library path for OpenSSL libraries with the LIBPATH SCons variable. - NOTE: If you are using HomeBrew, and have installed OpenSSL, this might look like: - \tscons CPPPATH=/usr/local/opt/openssl/include LIBPATH=/usr/local/opt/openssl/lib ... - NOTE: Consult the output of 'brew info openssl' for details on the correct paths.""") - print(advice) - brew = env.WhereIs("brew") - if brew: - try: - # TODO: If we could programmatically extract the paths from the info output - # we could give a better message here, but brew info's machine readable output - # doesn't seem to include the whole 'caveats' section. - message = subprocess.check_output([brew, "info", "openssl"]).decode("utf-8") - advice = textwrap.dedent("""\ - NOTE: HomeBrew installed to {0} appears to have OpenSSL installed. - NOTE: Consult the output from '{0} info openssl' to determine CPPPATH and LIBPATH.""").format( - brew, message - ) - - print(advice) - except: - pass - - if not conf.CheckLibWithHeader( - cryptoLibName, - ["openssl/crypto.h"], - "C", - "SSLeay_version(0);", - autoadd=True, - ): - maybeIssueDarwinSSLAdvice(conf.env) - conf.env.ConfError("Couldn't find OpenSSL crypto.h header and library") - - def CheckLibSSL(context): - res = SCons.Conftest.CheckLib( - context, - libs=[sslLibName], - extra_libs=sslLinkDependencies, - header='#include "openssl/ssl.h"', - language="C", - call="SSL_version(NULL);", - autoadd=True, - ) - context.did_show_result = 1 - return not res - - conf.AddTest("CheckLibSSL", CheckLibSSL) - - if not conf.CheckLibSSL(): - maybeIssueDarwinSSLAdvice(conf.env) - conf.env.ConfError("Couldn't find OpenSSL ssl.h header and library") - - def CheckLinkSSL(context): - test_body = """ - #include - #include - #include - - int main() { - SSL_library_init(); - SSL_load_error_strings(); - ERR_load_crypto_strings(); - - OpenSSL_add_all_algorithms(); - ERR_free_strings(); - - return EXIT_SUCCESS; - } - """ - context.Message("Checking that linking to OpenSSL works...") - ret = context.TryLink(textwrap.dedent(test_body), ".c") - context.Result(ret) - return ret - - conf.AddTest("CheckLinkSSL", CheckLinkSSL) - - if not conf.CheckLinkSSL(): - maybeIssueDarwinSSLAdvice(conf.env) - conf.env.ConfError("SSL is enabled, but is unavailable") - - # We require ssl by default unless the user has specified --ssl=off - require_ssl = get_option("ssl") != "off" - - # The following platform checks setup both - # ssl_provider for TLS implementation - # and MONGO_CRYPTO for hashing support. - # - # MONGO_CRYPTO is always enabled regardless of --ssl=on/off - # However, ssl_provider will be rewritten to 'none' if --ssl=off - if conf.env.TargetOSIs("windows"): - # SChannel on Windows - ssl_provider = "windows" - conf.env.Append(MONGO_CRYPTO=["windows"]) - - elif conf.env.TargetOSIs("darwin", "macOS"): - # SecureTransport on macOS - ssl_provider = "apple" - conf.env.Append(MONGO_CRYPTO=["apple"]) - conf.env.AppendUnique(FRAMEWORKS=["CoreFoundation", "Security"]) - - elif require_ssl: - checkOpenSSL(conf) - # Working OpenSSL available, use it. - conf.env.Append(MONGO_CRYPTO=["openssl"]) - ssl_provider = "openssl" - - else: - # If we don't need an SSL build, we can get by with TomCrypt. - conf.env.Append(MONGO_CRYPTO=["tom"]) - - if require_ssl: - # Either crypto engine is native, - # or it's OpenSSL and has been checked to be working. - print("Using SSL Provider: {0}".format(ssl_provider)) - else: - ssl_provider = "none" - - def checkHTTPLib(required=False): - # WinHTTP available on Windows - if env.TargetOSIs("windows"): - return True - - # libcurl on all other platforms - if conf.CheckLibWithHeader( - "curl", - ["curl/curl.h"], - "C", - "curl_global_init(0);", - autoadd=False, - ): - return True - - if required: - env.ConfError("Could not find and curl lib") - - return False - - if use_system_version_of_library("pcre2"): - conf.FindSysLibDep("pcre2", ["pcre2-8"]) - else: - conf.env.Prepend(CPPDEFINES=["PCRE2_STATIC"]) - - if use_system_version_of_library("snappy"): - conf.FindSysLibDep("snappy", ["snappy"]) - - if use_system_version_of_library("zlib"): - conf.FindSysLibDep("zlib", ["zdll" if conf.env.TargetOSIs("windows") else "z"]) - - if use_system_version_of_library("zstd"): - conf.FindSysLibDep("zstd", ["libzstd" if conf.env.TargetOSIs("windows") else "zstd"]) - - if use_system_version_of_library("stemmer"): - conf.FindSysLibDep("stemmer", ["stemmer"]) - - if use_system_version_of_library("yaml"): - conf.FindSysLibDep("yaml", ["yaml-cpp"]) - - if use_system_version_of_library("fmt"): - conf.FindSysLibDep("fmt", ["fmt"]) - - if use_system_version_of_library("tomcrypt"): - conf.FindSysLibDep("tomcrypt", ["tomcrypt"]) - - if use_system_version_of_library("libunwind"): - conf.FindSysLibDep("unwind", ["unwind"]) - if not conf.FindSysLibDep("lzma", ["lzma"]): - myenv.ConfError( - "Cannot find system library 'lzma' required for use with system libunwind" - ) - - if use_system_version_of_library("intel_decimal128"): - conf.FindSysLibDep("intel_decimal128", ["bid"]) - - if use_system_version_of_library("icu"): - conf.FindSysLibDep("icudata", ["icudata"]) - # We can't use FindSysLibDep() for icui18n and icuuc below, since SConf.CheckLib() (which - # FindSysLibDep() relies on) doesn't expose an 'extra_libs' parameter to indicate that the - # library being tested has additional dependencies (icuuc depends on icudata, and icui18n - # depends on both). As a workaround, we skip the configure check for these two libraries and - # manually assign the library name. We hope that if the user has icudata installed on their - # system, then they also have icu18n and icuuc installed. - conf.env["LIBDEPS_ICUI18N_SYSLIBDEP"] = "icui18n" - conf.env["LIBDEPS_ICUUC_SYSLIBDEP"] = "icuuc" - - if wiredtiger and use_system_version_of_library("wiredtiger"): - if not conf.CheckCXXHeader("wiredtiger.h"): - myenv.ConfError("Cannot find wiredtiger headers") - conf.FindSysLibDep("wiredtiger", ["wiredtiger"]) - - conf.env.Append( - CPPDEFINES=[ - "ABSL_FORCE_ALIGNED_ACCESS", - "BOOST_ENABLE_ASSERT_DEBUG_HANDLER", - # TODO: Ideally, we could not set this define in C++20 - # builds, but at least our current Xcode 12 doesn't offer - # std::atomic_ref, so we cannot. - "BOOST_FILESYSTEM_NO_CXX20_ATOMIC_REF", - "BOOST_LOG_NO_SHORTHAND_NAMES", - "BOOST_LOG_USE_NATIVE_SYSLOG", - "BOOST_LOG_WITHOUT_THREAD_ATTR", - "BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS", - "BOOST_SYSTEM_NO_DEPRECATED", - "BOOST_THREAD_USES_DATETIME", - ("BOOST_THREAD_VERSION", "5"), - ] - ) - - if link_model.startswith("dynamic") and not link_model == "dynamic-sdk": - conf.env.AppendUnique( - CPPDEFINES=[ - "BOOST_LOG_DYN_LINK", - ] - ) - - if use_system_version_of_library("protobuf"): - conf.FindSysLibDep("protobuf", ["protobuf"]) - conf.FindSysLibDep("protoc", ["protoc"]) - - if use_system_version_of_library("grpc"): - conf.FindSysLibDep("grpc", ["grpc"]) - conf.FindSysLibDep("grpcxx", ["grpc++"]) - conf.FindSysLibDep("grpcxx_reflection", ["grpc++_reflection"]) - - if posix_system: - conf.CheckLib("rt") - conf.CheckLib("dl") - - conf.env["_HAVEPCAP"] = conf.CheckLib(["pcap", "wpcap"], autoadd=False) - - if env.TargetOSIs("solaris"): - conf.CheckLib("nsl") - - conf.env["MONGO_BUILD_SASL_CLIENT"] = bool(has_option("use-sasl-client")) - if conf.env["MONGO_BUILD_SASL_CLIENT"] and not conf.CheckLibWithHeader( - "sasl2", - ["stddef.h", "sasl/sasl.h"], - "C", - "sasl_version_info(0, 0, 0, 0, 0, 0);", - autoadd=False, - ): - myenv.ConfError("Couldn't find SASL header/libraries") - - # requires ports devel/libexecinfo to be installed - if env.TargetOSIs("freebsd", "openbsd"): - if not conf.CheckLib("execinfo"): - myenv.ConfError("Cannot find libexecinfo, please install devel/libexecinfo.") - - # 'tcmalloc' needs to be the last library linked. Please, add new libraries before this - # point. - if myenv["MONGO_ALLOCATOR"] == "tcmalloc-google": - if use_system_version_of_library("tcmalloc-google"): - conf.FindSysLibDep("tcmalloc-google", ["tcmalloc"]) - elif myenv["MONGO_ALLOCATOR"] == "tcmalloc-gperf": - if use_system_version_of_library("tcmalloc-gperf"): - conf.FindSysLibDep("tcmalloc-gperf", ["tcmalloc"]) - elif myenv["MONGO_ALLOCATOR"] in ["system"]: - pass - else: - myenv.FatalError(f"Invalid --allocator parameter: {env['MONGO_ALLOCATOR']}") - - def CheckStdAtomic(context, base_type, extra_message): - test_body = """ - #include - - int main(int argc, char* argv[]) {{ - std::atomic<{0}> x; - - x.store(0); - // Use argc to ensure we can't optimize everything away. - {0} y = argc; - x.fetch_add(y); - x.fetch_sub(y); - x.exchange(y); - if (x.compare_exchange_strong(y, x) && x.is_lock_free()) - return 0; - return x.load(); - }} - """.format(base_type) - - context.Message( - "Checking if std::atomic<{0}> works{1}... ".format(base_type, extra_message) - ) - - ret = context.TryLink(textwrap.dedent(test_body), ".cpp") - context.Result(ret) - return ret - - conf.AddTest("CheckStdAtomic", CheckStdAtomic) - - def check_all_atomics(extra_message=""): - for t in ("int64_t", "uint64_t", "int32_t", "uint32_t"): - if not conf.CheckStdAtomic(t, extra_message): - return False - return True - - if not check_all_atomics(): - if not conf.CheckLib("atomic", symbol=None, header=None, language="C", autoadd=1): - myenv.ConfError( - "Some atomic ops are not intrinsically supported, but " "no libatomic found" - ) - if not check_all_atomics(" with libatomic"): - myenv.ConfError("The toolchain does not support std::atomic, cannot continue") - - def CheckMongoCMinVersion(context): - compile_test_body = textwrap.dedent(""" - #include - - #if !MONGOC_CHECK_VERSION(1,13,0) - #error - #endif - """) - - context.Message("Checking if mongoc version is 1.13.0 or newer...") - result = context.TryCompile(compile_test_body, ".cpp") - context.Result(result) - return result - - conf.AddTest("CheckMongoCMinVersion", CheckMongoCMinVersion) - - conf.env["MONGO_HAVE_LIBMONGOC"] = False - - # ask each module to configure itself and the build environment. - moduleconfig.configure_modules(mongo_modules, conf) - - # Resolve --enable-http-client - if http_client == "auto": - if checkHTTPLib(): - http_client = "on" - else: - print("Disabling http-client as libcurl was not found") - http_client = "off" - elif http_client == "on": - checkHTTPLib(required=True) - - if get_option("xray") == "on": - if not (myenv.ToolchainIs("clang") and env.TargetOSIs("linux")): - env.FatalError("LLVM Xray is only supported with clang on linux") - - myenv.AppendUnique( - CCFLAGS=[ - "-fxray-instrument", - "-fxray-instruction-threshold=" + str(get_option("xray-instruction-threshold")), - ], - LINKFLAGS=["-fxray-instrument"], - ) - - if "ldap" in myenv.get("MONGO_ENTERPRISE_FEATURES", []): - if myenv.TargetOSIs("windows"): - conf.env["MONGO_LDAP_LIB"] = ["Wldap32"] - else: - have_ldap_h = conf.CheckLibWithHeader( - "ldap", - ["ldap.h"], - "C", - 'ldap_is_ldap_url("ldap://127.0.0.1");', - autoadd=False, - ) - - have_lber_h = conf.CheckLibWithHeader( - "lber", - ["lber.h"], - "C", - "ber_free(NULL, 0);", - autoadd=False, - ) - - if have_ldap_h: - conf.env.AppendUnique(MONGO_LDAP_LIB=["ldap"]) - else: - myenv.ConfError( - "Could not find and ldap library from OpenLDAP, " - "required for LDAP authorization in the enterprise build" - ) - - if have_lber_h: - conf.env.AppendUnique(MONGO_LDAP_LIB=["lber"]) - else: - myenv.ConfError( - "Could not find and lber library from OpenLDAP, " - "required for LDAP authorizaton in the enterprise build" - ) - - if "sasl" in myenv.get("MONGO_ENTERPRISE_FEATURES", []): - if conf.env.TargetOSIs("windows"): - conf.env["MONGO_GSSAPI_IMPL"] = "sspi" - conf.env["MONGO_GSSAPI_LIB"] = ["secur32"] - else: - if conf.CheckLib(library="gssapi_krb5", autoadd=False): - conf.env["MONGO_GSSAPI_IMPL"] = "gssapi" - if conf.env.TargetOSIs("freebsd"): - conf.env.AppendUnique(MONGO_GSSAPI_LIB=["gssapi"]) - conf.env.AppendUnique(MONGO_GSSAPI_LIB=["gssapi_krb5"]) - - myenv = conf.Finish() - - return myenv - - -env = doConfigure(env) -saslconf = Configure(env) - -have_sasl_lib = saslconf.CheckLibWithHeader( - "sasl2", - ["stddef.h", "sasl/sasl.h"], - "C", - "sasl_version_info(0, 0, 0, 0, 0, 0);", - autoadd=False, -) - -saslconf.Finish() - -env["NINJA_SYNTAX"] = "#site_scons/third_party/ninja_syntax.py" - -if env.ToolchainIs("clang"): - env["ICECC_COMPILER_TYPE"] = "clang" -elif env.ToolchainIs("gcc"): - env["ICECC_COMPILER_TYPE"] = "gcc" - -# Now that we are done with configure checks, enable ccache and -# icecream if requested. -if "CCACHE" in env and env["CCACHE"]: - ccache = Tool("ccache") - if not ccache.exists(env): - env.FatalError(f"Failed to load ccache tool with CCACHE={env['CCACHE']}") - ccache(env) -if "ICECC" in env and env["ICECC"]: - env["ICECREAM_VERBOSE"] = env.Verbose() - env["ICECREAM_TARGET_DIR"] = "$BUILD_ROOT/scons/icecream" - - # Posssibly multiple ninja files are in play, and there are cases where ninja will - # use the wrong icecc run script, so we must create a unique script per ninja variant - # for ninja to track separately. We will use the variant dir which contains the each - # separate ninja builds meta files. This has to be under an additional flag then just - # ninja disabled, because the run icecc script is generated under a context where ninja - # is always disabled via the scons callback mechanism. The __NINJA_NO flag is intended - # to differentiate this particular context. - if env.get("__NINJA_NO") or get_option("ninja") != "disabled": - env["ICECREAM_RUN_SCRIPT_SUBPATH"] = "$VARIANT_DIR" - - icecream = Tool("icecream") - if not icecream.exists(env): - # SERVER-70648: Need to revert on how to update icecream - if "ICECREAM_VERSION" in env and env["ICECREAM_VERSION"] < parse_version("1.3"): - env.FatalError( - textwrap.dedent("""\ - Please refer to the following commands to update your icecream: - sudo add-apt-repository ppa:mongodb-dev-prod/mongodb-build - sudo apt update - sudo apt-get --only-upgrade install icecc - """) - ) - - env.FatalError(f"Failed to load icecream tool with ICECC={env['ICECC']}") - icecream(env) - -# Defaults for SCons provided flags. SetOption only sets the option to our value -# if the user did not provide it. So for any flag here if it's explicitly passed -# the values below set with SetOption will be overwritten. -# -# Default j to the number of CPUs on the system. Note: in containers this -# reports the number of CPUs for the host system. Perhaps in a future version of -# psutil it will instead report the correct number when in a container. -# -# The presence of the variable ICECC means the icecream tool is -# enabled and so the default j value should scale accordingly. In this -# scenario multiply the cpu count by 8 to set a reasonable default since the -# cluster can handle many more jobs than your local machine but is -# still throttled by your cpu count in the sense that you can only -# handle so many python threads sending out jobs. -# -# Capitalize on the weird way SCons handles arguments to determine if -# the user configured it or not. If not, it is under our control. Try -# to set some helpful defaults. -initial_num_jobs = env.GetOption("num_jobs") -altered_num_jobs = initial_num_jobs + 1 -env.SetOption("num_jobs", altered_num_jobs) -cpu_count = psutil.cpu_count() -if env.GetOption("num_jobs") == altered_num_jobs: - # psutil.cpu_count returns None when it can't determine the - # number. This always fails on BSD's for example. If the user - # didn't specify, and we can't determine for a parallel build, it - # is better to make the user restart and be explicit, rather than - # give them a very slow build. - if cpu_count is None: - if get_option("ninja") != "disabled": - env.FatalError( - "Cannot auto-determine the appropriate size for the Ninja local_job pool. Please regenerate with an explicit -j argument to SCons" - ) - else: - env.FatalError( - "Cannot auto-determine the appropriate build parallelism on this platform. Please build with an explicit -j argument to SCons" - ) - - if "ICECC" in env and env["ICECC"] and get_option("ninja") == "disabled": - # If SCons is driving and we are using icecream, scale up the - # number of jobs. The icerun integration will prevent us from - # overloading the local system. - env.SetOption("num_jobs", 8 * cpu_count) - else: - # Otherwise, either icecream isn't in play, so just use local - # concurrency for SCons builds, or we are generating for - # Ninja, in which case num_jobs controls the size of the local - # pool. Scale that up to the number of local CPUs. - env.SetOption("num_jobs", cpu_count) -else: - if ( - not has_option("force-jobs") - and ("ICECC" not in env or not env["ICECC"]) - and env.GetOption("num_jobs") > cpu_count - ): - env.FatalError( - "ERROR: Icecream not enabled while using -j higher than available cpu's. " - + "Use --force-jobs to override." - ) - -if ( - get_option("ninja") != "disabled" - and ("ICECC" not in env or not env["ICECC"]) - and not has_option("force-jobs") -): - print( - f"WARNING: Icecream not enabled - Ninja concurrency will be capped at {cpu_count} jobs " - + "without regard to the -j value passed to it. " - + "Generate your ninja file with --force-jobs to disable this behavior." - ) - env["NINJA_MAX_JOBS"] = cpu_count - -if get_option("ninja") != "disabled": - if env.ToolchainIs("gcc", "clang"): - env.AppendUnique(CCFLAGS=["-fdiagnostics-color"]) - - ninja_builder = Tool("ninja") - - env["NINJA_BUILDDIR"] = env.Dir("$NINJA_BUILDDIR") - ninja_builder.generate(env) - - ninjaConf = Configure( - env, - help=False, - custom_tests={ - "CheckNinjaCompdbExpand": env.CheckNinjaCompdbExpand, - }, - ) - env["NINJA_COMPDB_EXPAND"] = ninjaConf.CheckNinjaCompdbExpand() - ninjaConf.Finish() - - # TODO: API for getting the sconscripts programmatically - # exists upstream: https://github.com/SCons/scons/issues/3625 - def ninja_generate_deps(env, target, source, for_signature): - # TODO SERVER-72851 add api for vars files to exec other vars files - # this would allow us to get rid of this regex here - def find_nested_variable_files(variables_file): - variable_files = [variables_file] - - with open(variables_file, "r") as file: - data = file.read() - pattern = "exec\\(open\\(['\"](.*)['\"]\, ['\"][a-z]+['\"]\\).read\\(\\)\\)" - nested_files = re.findall(pattern, data) - for file_name in nested_files: - variable_files.extend(find_nested_variable_files(file_name)) - - return variable_files - - # vars files can be from outside of the repo dir and can exec other vars files - # so we cannot just glob them - variables_files = [] - for variable_file in variables_files_args: - variables_files.extend(find_nested_variable_files(variable_file)) - - dependencies = env.Flatten( - [ - "SConstruct", - "WORKSPACE.bazel", - "BUILD.bazel", - ".bazelrc", - ".bazelignore", - glob(os.path.join("src", "**", "SConscript"), recursive=True), - glob(os.path.join("src", "**", "BUILD.bazel"), recursive=True), - glob(os.path.join("buildscripts", "**", "*.py"), recursive=True), - glob(os.path.join("bazel", "**", "*.bzl"), recursive=True), - glob(os.path.join(os.path.expanduser("~/.scons/"), "**", "*.py"), recursive=True), - glob(os.path.join("site_scons", "**", "*.py"), recursive=True), - glob(os.path.join("buildscripts", "**", "*.py"), recursive=True), - glob(os.path.join("src/third_party/scons-*", "**", "*.py"), recursive=True), - glob(os.path.join("src/mongo/db/modules", "**", "*.py"), recursive=True), - variables_files, - ] - ) - - return dependencies - - env["NINJA_REGENERATE_DEPS"] = ninja_generate_deps - - if env.TargetOSIs("windows"): - # The /b option here will make sure that windows updates the mtime - # when copying the file. This allows to not need to use restat for windows - # copy commands. - copy_install_cmd = "cmd.exe /c copy /b $in $out 1>NUL" - else: - copy_install_cmd = "install $in $out" - - if env.GetOption("install-action") == "hardlink": - if env.TargetOSIs("windows"): - install_cmd = f"cmd.exe /c mklink /h $out $in 1>nul || {copy_install_cmd}" - else: - install_cmd = f"ln $in $out || {copy_install_cmd}" - - elif env.GetOption("install-action") == "symlink": - # macOS's ln and Windows mklink command do not support relpaths - # out of the box so we will precompute during generation in a - # custom handler. - def symlink_install_action_function(_env, node): - # should only be one output and input for this case - output_file = _env.NinjaGetOutputs(node)[0] - input_file = _env.NinjaGetDependencies(node)[0] - try: - relpath = os.path.relpath(input_file, os.path.dirname(output_file)) - except ValueError: - relpath = os.path.abspath(input_file) - - return { - "outputs": [output_file], - "rule": "INSTALL", - "inputs": [input_file], - "implicit": _env.NinjaGetDependencies(node), - "variables": {"precious": node.precious, "relpath": relpath}, - } - - env.NinjaRegisterFunctionHandler("installFunc", symlink_install_action_function) - - if env.TargetOSIs("windows"): - install_cmd = "cmd.exe /c mklink $out $relpath 1>nul" - else: - install_cmd = "ln -s $relpath $out" - - else: - install_cmd = copy_install_cmd - - env.NinjaRule("INSTALL", install_cmd, description="Installed $out", pool="install_pool") - - if env.TargetOSIs("windows"): - # This is a workaround on windows for SERVER-48691 where the line length - # in response files is too long: - # https://developercommunity.visualstudio.com/content/problem/441978/fatal-error-lnk1170-line-in-command-file-contains.html - # - # Ninja currently does not support - # storing a newline in the ninja file, and therefore you can not - # easily generate it to the response files. The only documented - # way to get newlines into the response file is to use the $in_newline - # variable in the rule. - # - # This workaround will move most of the object or lib links into the - # inputs and then make the respone file consist of the inputs plus - # whatever options are left in the original response content - # more info can be found here: - # https://github.com/ninja-build/ninja/pull/1223/files/e71bcceefb942f8355aab83ab447d702354ba272#r179526824 - # https://github.com/ninja-build/ninja/issues/1000 - - # we are making a new special rule which will leverage - # the $in_newline to get newlines into our response file - env.NinjaRule( - "WINLINK", - "$env$WINLINK @$out.rsp", - description="Linked $out", - deps=None, - pool="link_pool", - use_depfile=False, - use_response_file=True, - response_file_content="$rspc $in_newline", - ) - - # Setup the response file content generation to use our workaround rule - # for LINK commands. - provider = env.NinjaGenResponseFileProvider( - "WINLINK", - "$LINK", - ) - env.NinjaRuleMapping("${LINKCOM}", provider) - env.NinjaRuleMapping(env["LINKCOM"], provider) - - # The workaround function will move some of the content from the rspc - # variable into the nodes inputs. We only want to move build nodes because - # inputs must be files, so we make sure the the option in the rspc - # file starts with the build directory. - def winlink_workaround(env, node, ninja_build): - if ninja_build and "rspc" in ninja_build["variables"]: - rsp_content = [] - inputs = [] - for opt in ninja_build["variables"]["rspc"].split(): - # if its a candidate to go in the inputs add it, else keep it in the non-newline - # rsp_content list - if opt.startswith(str(env.Dir("$BUILD_DIR"))) and opt != str(node): - inputs.append(opt) - else: - rsp_content.append(opt) - - ninja_build["variables"]["rspc"] = " ".join(rsp_content) - ninja_build["inputs"] += [ - infile for infile in inputs if infile not in ninja_build["inputs"] - ] - - # We apply the workaround to all Program nodes as they have potential - # response files that have lines that are too long. - # This will setup a callback function for a node - # so that when its been processed, we can make some final adjustments before - # its generated to the ninja file. - def winlink_workaround_emitter(target, source, env): - env.NinjaSetBuildNodeCallback(target[0], winlink_workaround) - return target, source - - builder = env["BUILDERS"]["Program"] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter([base_emitter, winlink_workaround_emitter]) - builder.emitter = new_emitter - - # We can create empty files for FAKELIB in Ninja because it - # does not care about content signatures. We have to - # write_uuid_to_file for FAKELIB in SCons because SCons does. - env.NinjaRule( - rule="FAKELIB", - command="cmd /c copy 1>NUL NUL $out" if env["PLATFORM"] == "win32" else "touch $out", - ) - - def fakelib_in_ninja(env, node): - """Generates empty .a files""" - return { - "outputs": [node.get_path()], - "rule": "FAKELIB", - "implicit": [str(s) for s in node.sources], - } - - env.NinjaRegisterFunctionHandler("write_uuid_to_file", fakelib_in_ninja) - - def ninja_test_list_builder(env, node): - test_files = [test_file.path for test_file in env["MONGO_TEST_REGISTRY"][node.path]] - files = " ".join(test_files) - return { - "outputs": [node.get_path()], - "rule": "TEST_LIST", - "implicit": test_files, - "variables": { - "files": files, - }, - } - - if env["PLATFORM"] == "win32": - cmd = 'cmd.exe /c del "$out" && for %a in ($files) do (echo %a >> "$out")' - else: - cmd = 'rm -f "$out"; for i in $files; do echo "$$i" >> "$out"; done;' - - env.NinjaRule( - rule="TEST_LIST", - description="Compiled test list: $out", - command=cmd, - ) - env.NinjaRegisterFunctionHandler("test_list_builder_action", ninja_test_list_builder) - - env["NINJA_GENERATED_SOURCE_ALIAS_NAME"] = "generated-sources" - -gdb_index_enabled = env.get("GDB_INDEX") -if gdb_index_enabled == "auto" and link_model == "dynamic": - gdb_index_enabled = True - -if gdb_index_enabled is True: - gdb_index = Tool("gdb_index") - if gdb_index.exists(env): - gdb_index.generate(env) - elif env.get("GDB_INDEX") != "auto": - env.FatalError("Could not enable explicit request for gdb index generation.") - -if env.TargetOSIs("linux") and get_option("ssl") == "on": - env.Tool("protobuf_compiler") - -if (get_option("separate-debug") == "on" or env.TargetOSIs("windows")) and debug_symbols: - separate_debug = Tool("separate_debug") - if not separate_debug.exists(env): - env.FatalError( - "Cannot honor --separate-debug because the separate_debug.py Tool reported as nonexistent" - ) - separate_debug(env) - -if env["SPLIT_DWARF"] == "auto": - # For static builds, splitting out the dwarf info reduces memory requirments, link time - # and binary size significantly. It's affect is less prominent in dynamic builds. The downside - # is .dwo files use absolute paths in the debug info, so it's not relocatable. - # We also found the running splitdwarf with dwarf5 failed to compile - # so unless we set DWARF_VERSION = 4 we are going to turn off split dwarf - env["SPLIT_DWARF"] = ( - not link_model == "dynamic" - and env.ToolchainIs("gcc", "clang") - and not env.TargetOSIs("darwin") - and env.CheckCCFLAGSSupported("-gsplit-dwarf") - and env.get("DWARF_VERSION") == 4 - and not gdb_index_enabled - ) - -if env["SPLIT_DWARF"]: - if env.TargetOSIs("darwin"): - env.FatalError("Setting SPLIT_DWARF=1 on darwin is not supported.") - if env.get("DWARF_VERSION") != 4: - env.FatalError( - "Running split dwarf outside of DWARF4 has shown compilation issues when using DWARF5 and gdb index. Disabling this functionality for now. Use SPLIT_DWARF=0 to disable building with split dwarf or use DWARF_VERSION=4 to pin to DWARF version 4." - ) - if gdb_index_enabled: - env.FatalError( - "SPLIT_DWARF is not supported when using GDB_INDEX. Use GDB_INDEX=0 to allow enabling SPLIT_DWARF" - ) - env.Tool("split_dwarf") - -env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = "tgz" - -env["AIB_META_COMPONENT"] = "all" -env["AIB_BASE_COMPONENT"] = "common" -env["AIB_DEFAULT_COMPONENT"] = "mongodb" - -env.Tool("auto_install_binaries") -env.Tool("auto_archive") - -env.DeclareRoles( - roles=[ - env.Role( - name="base", - ), - env.Role( - name="debug", - ), - env.Role( - name="dev", - dependencies=[ - "runtime", - ], - ), - env.Role( - name="meta", - ), - env.Role( - name="runtime", - dependencies=[ - # On windows, we want the runtime role to depend - # on the debug role so that PDBs end in the - # runtime package. - "debug" if env.TargetOSIs("windows") else None, - ], - silent=True, - ), - ], - base_role="base", - meta_role="meta", -) - - -def _aib_debugdir(source, target, env, for_signature): - for s in source: - origin = getattr(s.attributes, "debug_file_for", None) - oentry = env.Entry(origin) - osuf = oentry.get_suffix() - map_entry = env["AIB_SUFFIX_MAP"].get(osuf) - if map_entry: - return map_entry[0] - env.FatalError( - "Unable to find debuginfo file in _aib_debugdir: (source='{}')".format(str(source)) - ) - - -if debug_symbols: - env["PREFIX_DEBUGDIR"] = _aib_debugdir - -env.AddSuffixMapping( - { - "$PROGSUFFIX": env.SuffixMap( - directory="$PREFIX_BINDIR", - default_role="runtime", - ), - "$SHLIBSUFFIX": env.SuffixMap( - directory="$PREFIX_BINDIR" - if mongo_platform.get_running_os_name() == "windows" - else "$PREFIX_LIBDIR", - default_role="runtime", - ), - ".debug": env.SuffixMap( - directory="$PREFIX_DEBUGDIR", - default_role="debug", - ), - ".dSYM": env.SuffixMap( - directory="$PREFIX_DEBUGDIR", - default_role="debug", - ), - ".pdb": env.SuffixMap( - directory="$PREFIX_DEBUGDIR", - default_role="debug", - ), - } -) - -env.AddPackageNameAlias( - component="dist", - role="runtime", - name="mongodb-dist", -) - -env.AddPackageNameAlias( - component="dist", - role="debug", - name="mongodb-dist-debugsymbols", -) - -env.AddPackageNameAlias( - component="dist-test", - role="runtime", - name="mongodb-binaries", -) - -env.AddPackageNameAlias( - component="dist-test", - role="debug", - name="mongo-debugsymbols", -) - -env.AddPackageNameAlias( - component="dbtest", - role="runtime", - name="dbtest-binary", -) - -env.AddPackageNameAlias( - component="dbtest", - role="debug", - name="dbtest-debugsymbols", -) - -env.AddPackageNameAlias( - component="jstestshell", - role="runtime", - name="mongodb-jstestshell", -) - -env.AddPackageNameAlias( - component="jstestshell", - role="debug", - name="mongodb-jstestshell-debugsymbols", -) - -env.AddPackageNameAlias( - component="mongocryptd", - role="runtime", - name="mongodb-cryptd", -) - -env.AddPackageNameAlias( - component="mongocryptd", - role="debug", - name="mongodb-cryptd-debugsymbols", -) - -env.AddPackageNameAlias( - component="benchmarks", - role="runtime", - name="benchmarks", -) - -env.AddPackageNameAlias( - component="benchmarks", - role="debug", - name="benchmarks-debugsymbols", -) - -env.AddPackageNameAlias( - component="mh", - role="runtime", - # TODO: we should be able to move this to where the mqlrun binary is - # defined when AIB correctly uses environments instead of hooking into - # the first environment used. - name="mh-binaries", -) - -env.AddPackageNameAlias( - component="mh", - role="debug", - # TODO: we should be able to move this to where the mqlrun binary is - # defined when AIB correctly uses environments instead of hooking into - # the first environment used. - name="mh-debugsymbols", -) - -env.AutoInstall( - target="$PREFIX", - source="$PRETTY_PRINTER_TEST_LIST", - AIB_ROLE="runtime", - AIB_COMPONENT="pretty-printer-tests", - AIB_COMPONENTS_EXTRA=["dist-test"], -) - -env.AutoInstall( - ".", - "$BENCHMARK_LIST", - AIB_COMPONENT="benchmarks", - AIB_ROLE="runtime", -) - -if "SANITIZER_RUNTIME_LIBS" in env: - env.AutoInstall( - target="$PREFIX_LIBDIR", - source=[env.File(path) for path in env["SANITIZER_RUNTIME_LIBS"]], - AIB_COMPONENT="dist", - AIB_ROLE="runtime", - AIB_COMPONENTS_EXTRA=["dist-test"], - ) - -for benchmark_tag in env.get_bazel_benchmark_tags(): - env.AddPackageNameAlias( - component=benchmark_tag, - role="runtime", - name=benchmark_tag, - ) - - env.AutoInstall( - ".", - f"$BUILD_ROOT/{benchmark_tag}.txt", - AIB_COMPONENT=benchmark_tag, - AIB_ROLE="runtime", - ) - -env["RPATH_ESCAPED_DOLLAR_ORIGIN"] = "\\$$$$ORIGIN" - - -def isSupportedStreamsPlatform(thisEnv): - # TODO https://jira.mongodb.org/browse/SERVER-74961: Support other platforms. - # linux x86 and ARM64 are supported. - return ( - thisEnv.TargetOSIs("linux") - and thisEnv["TARGET_ARCH"] in ("x86_64", "aarch64") - and ssl_provider == "openssl" - ) - - -def shouldBuildStreams(thisEnv): - if releaseBuild: - # The streaming enterprise module and dependencies are only included in release builds. - # when streams-release-build is set. - return get_option("streams-release-build") and isSupportedStreamsPlatform(thisEnv) - else: - return not get_option("disable-streams") and isSupportedStreamsPlatform(thisEnv) - - -env.AddMethod(shouldBuildStreams, "ShouldBuildStreams") - - -def prefix_libdir_rpath_generator(env, source, target, for_signature): - # If the PREFIX_LIBDIR has an absolute path, we will use that directly as - # RPATH because that indicates the final install destination of the libraries. - prefix_libdir = env.subst("$PREFIX_LIBDIR") - if os.path.isabs(prefix_libdir): - return ["$PREFIX_LIBDIR"] - - # If the PREFIX_LIBDIR is not an absolute path, we will use a relative path - # from the bin to the lib dir. - lib_rel = os.path.relpath(prefix_libdir, env.subst("$PREFIX_BINDIR")) - - if env["PLATFORM"] == "posix": - return f"$RPATH_ESCAPED_DOLLAR_ORIGIN/{lib_rel}" - - if env["PLATFORM"] == "darwin": - return f"@loader_path/{lib_rel}" - - -if get_option("link-model").startswith("dynamic"): - env["PREFIX_LIBDIR_RPATH_GENERATOR"] = prefix_libdir_rpath_generator - -if env["PLATFORM"] == "posix": - env.AppendUnique( - RPATH=["$PREFIX_LIBDIR_RPATH_GENERATOR"], - LINKFLAGS=[ - # Most systems *require* -z,origin to make origin work, but android - # blows up at runtime if it finds DF_ORIGIN_1 in DT_FLAGS_1. - # https://android.googlesource.com/platform/bionic/+/cbc80ba9d839675a0c4891e2ab33f39ba51b04b2/linker/linker.h#68 - # https://android.googlesource.com/platform/bionic/+/cbc80ba9d839675a0c4891e2ab33f39ba51b04b2/libc/include/elf.h#215 - "-Wl,-z,origin" if not env.TargetOSIs("android") else [], - "-Wl,--enable-new-dtags", - ], - SHLINKFLAGS=[ - # -h works for both the sun linker and the gnu linker. - "-Wl,-h,${TARGET.file}", - ], - ) -elif env["PLATFORM"] == "darwin": - # The darwin case uses an adhoc implementation of RPATH for SCons - # since SCons does not support RPATH directly for macOS: - # https://github.com/SCons/scons/issues/2127 - # so we setup RPATH and LINKFLAGS ourselves. - env["RPATHPREFIX"] = "-Wl,-rpath," - env["RPATHSUFFIX"] = "" - env.AppendUnique( - LINKFLAGS="${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}", - SHLINKFLAGS=[ - "-Wl,-install_name,@rpath/${TARGET.file}", - ], - RPATH=["$PREFIX_LIBDIR_RPATH_GENERATOR"], - ) - -env.Default(env.Alias("install-default")) - -# Load the compilation_db tool. We want to do this after configure so we don't end up with -# compilation database entries for the configure tests, which is weird. -# We also set a few tools we know will not work with compilationdb, these -# wrapper tools get appended on the front of the command and in most -# cases don't want that in the compilation database. -env["_COMPILATIONDB_IGNORE_WRAPPERS"] = env.get("COMPILATIONDB_IGNORE_WRAPPERS", "").split(",") -if get_option("ninja") == "disabled": - env.Tool("compilation_db") - -incremental_link = Tool("incremental_link") -if incremental_link.exists(env): - incremental_link(env) - - -# Resource Files are Windows specific -def env_windows_resource_file(env, path): - if env.TargetOSIs("windows"): - return [env.RES(path)] - else: - return [] - - -env.AddMethod(env_windows_resource_file, "WindowsResourceFile") - -# --- lint ---- - -if get_option("lint-scope") == "changed": - patch_file = env.Command( - target="$BUILD_DIR/current.git.patch", - source=[env.WhereIs("git")], - action="${SOURCES[0]} diff $GITDIFFFLAGS > $TARGET", - ) - - env.AlwaysBuild(patch_file) - - clang_format = env.Command( - target="#lint-clang-format", - source=[ - "buildscripts/clang_format.py", - patch_file, - ], - action="REVISION=$REVISION $PYTHON ${SOURCES[0]} lint-git-diff", - ) - - eslint = env.Command( - target="#lint-eslint", - source=[ - "buildscripts/eslint.py", - patch_file, - ], - action="REVISION=$REVISION $PYTHON ${SOURCES[0]} lint-git-diff", - ) - -else: - clang_format = env.Command( - target="#lint-clang-format", - source=[ - "buildscripts/clang_format.py", - ], - action="$PYTHON ${SOURCES[0]} lint-all", - ) - - eslint = env.Command( - target="#lint-eslint", - source=[ - "buildscripts/eslint.py", - ], - action="$PYTHON ${SOURCES[0]} --dirmode lint jstests/ src/mongo", - ) - -pylinters = env.Command( - target="#lint-pylinters", - source=[ - "buildscripts/pylinters.py", - ], - action="$PYTHON ${SOURCES[0]} lint", -) - -quickmongolint = env.Command( - target="#lint-lint.py", - source=["buildscripts/quickmongolint.py"], - action="$PYTHON ${SOURCES[0]} lint", -) - -lint_errorcodes = env.Command( - target="#lint-errorcodes", - source=["buildscripts/errorcodes.py"], - action="$PYTHON ${SOURCES[0]} --quiet", -) - -env.Alias("lint", [quickmongolint, eslint, clang_format, pylinters, lint_errorcodes]) -env.Alias("lint-fast", [eslint, clang_format, pylinters, lint_errorcodes]) -env.AlwaysBuild("lint") -env.AlwaysBuild("lint-fast") - -# ---- INSTALL ------- - - -def getSystemInstallName(): - arch_name = env.subst("$MONGO_DISTARCH") - - # We need to make sure the directory names inside dist tarballs are permanently - # consistent, even if the target OS name used in scons is different. Any differences - # between the names used by env.TargetOSIs/env.GetTargetOSName should be added - # to the translation dictionary below. - os_name_translations = { - "windows": "win32", - "macOS": "macos", - } - os_name = env.GetTargetOSName() - os_name = os_name_translations.get(os_name, os_name) - n = os_name + "-" + arch_name - - if len(mongo_modules): - n += "-" + "-".join(m.name for m in mongo_modules) - - dn = env.subst("$MONGO_DISTMOD") - if len(dn) > 0: - n = n + "-" + dn - - return n - - -# This function will add the version.txt file to the source tarball -# so that versioning will work without having the git repo available. -def add_version_to_distsrc(env, archive): - version_file_path = env.subst("$MONGO_DIST_SRC_PREFIX") + "version.json" - if version_file_path not in archive: - version_data = { - "version": env["MONGO_VERSION"], - "githash": env["MONGO_GIT_HASH"], - } - archive.append_file_contents( - version_file_path, - json.dumps( - version_data, - sort_keys=True, - indent=4, - separators=(",", ": "), - ), - ) - - -env.AddDistSrcCallback(add_version_to_distsrc) - -env["SERVER_DIST_BASENAME"] = env.subst("mongodb-%s-$MONGO_DISTNAME" % (getSystemInstallName())) -env["MH_DIST_BASENAME"] = "mh" -if get_option("legacy-tarball") == "true": - if ( - "tar-dist" not in COMMAND_LINE_TARGETS - and "zip-dist" not in COMMAND_LINE_TARGETS - and "archive-dist" not in COMMAND_LINE_TARGETS - ): - env.FatalError("option --legacy-tarball only valid with an archive-dist target") - env["PREFIX"] = "$SERVER_DIST_BASENAME" - -module_sconscripts = moduleconfig.get_module_sconscripts(mongo_modules) - -# This generates a numeric representation of the version string so that -# you can easily compare versions of MongoDB without having to parse -# the version string. -# -# Examples: -# 5.1.1-123 => ['5', '1', '1', '123', None, None] => [5, 1, 2, -100] -# 5.1.1-rc2 => ['5', '1', '1', 'rc2', 'rc', '2'] => [5, 1, 1, -23] -# 5.1.1-rc2-123 => ['5', '1', '1', 'rc2-123', 'rc', '2'] => [5, 1, 1, -23] -# 5.1.0-alpha-123 => ['5', '1', '0', 'alpha-123', 'alpha', ''] => [5, 1, 0, -50] -# 5.1.0-alpha1-123 => ['5', '1', '0', 'alpha1-123', 'alpha', '1'] => [5, 1, 0, -49] -# 5.1.1 => ['5', '1', '1', '', None, None] => [5, 1, 1, 0] - -version_parts = [ - x - for x in re.match( - r"^(\d+)\.(\d+)\.(\d+)-?((?:(rc|alpha)(\d?))?.*)?", env["MONGO_VERSION"] - ).groups() -] -version_extra = version_parts[3] if version_parts[3] else "" -if version_parts[4] == "rc": - version_parts[3] = int(version_parts[5]) + -25 -elif version_parts[4] == "alpha": - if version_parts[5] == "": - version_parts[3] = -50 - else: - version_parts[3] = int(version_parts[5]) + -50 -elif version_parts[3]: - version_parts[2] = int(version_parts[2]) + 1 - version_parts[3] = -100 -else: - version_parts[3] = 0 -version_parts = [int(x) for x in version_parts[:4]] - -# The following symbols are exported for use in subordinate SConscript files. -# Ideally, the SConscript files would be purely declarative. They would only -# import build environment objects, and would contain few or no conditional -# statements or branches. -# -# Currently, however, the SConscript files do need some predicates for -# conditional decision making that hasn't been moved up to this SConstruct file, -# and they are exported here, as well. -Export( - [ - "debugBuild", - "endian", - "get_option", - "have_sasl_lib", - "has_option", - "http_client", - "jsEngine", - "module_sconscripts", - "optBuild", - "releaseBuild", - "selected_experimental_optimizations", - "serverJs", - "ssl_provider", - "use_libunwind", - "use_system_libunwind", - "use_system_version_of_library", - "use_vendored_libunwind", - "version_extra", - "version_parts", - "wiredtiger", - ] -) - - -def injectModule(env, module, **kwargs): - injector = env["MODULE_INJECTORS"].get(module) - if injector: - return injector(env, **kwargs) - return env - - -env.AddMethod(injectModule, "InjectModule") - -msvc_version = "" -if "MSVC_VERSION" in env and env["MSVC_VERSION"]: - msvc_version = "--version " + env["MSVC_VERSION"] + " " - -# Microsoft Visual Studio Project generation for code browsing -if get_option("ninja") == "disabled": - vcxprojFile = env.Command( - "mongodb.vcxproj", - "compiledb", - r"$PYTHON buildscripts\make_vcxproj.py " + msvc_version + "mongodb", - ) - vcxproj = env.Alias("vcxproj", vcxprojFile) - -# TODO: maybe make these work like the other archive- aliases -# even though they aren't piped through AIB? -distSrc = env.DistSrc("distsrc.tar", NINJA_SKIP=True) -env.NoCache(distSrc) -env.Alias("distsrc-tar", distSrc) - -distSrcGzip = env.GZip( - target="distsrc.tgz", - source=[distSrc], - NINJA_SKIP=True, -) -env.NoCache(distSrcGzip) -env.Alias("distsrc-tgz", distSrcGzip) - -distSrcZip = env.DistSrc("distsrc.zip", NINJA_SKIP=True) -env.NoCache(distSrcZip) -env.Alias("distsrc-zip", distSrcZip) - -env.Alias("distsrc", "distsrc-tgz") - -# Do this as close to last as possible before reading SConscripts, so -# that any tools that may have injected other things via emitters are included -# among the side effect adornments. -env.Tool("task_limiter") -if has_option("jlink"): - link_jobs = env.SetupTaskLimiter( - name="jlink", - concurrency_ratio=get_option("jlink"), - builders=["Program", "SharedLibrary", "LoadableModule"], - ) - if get_option("ninja") != "disabled": - env["NINJA_LINK_JOBS"] = link_jobs - -if env.get("UNITTESTS_COMPILE_CONCURRENCY"): - if hasattr(SCons.Tool, "cxx"): - c_suffixes = SCons.Tool.cxx.CXXSuffixes - else: - c_suffixes = SCons.Tool.msvc.CXXSuffixes - - env.SetupTaskLimiter( - name="unit_cc", - concurrency_ratio=float(env.get("UNITTESTS_COMPILE_CONCURRENCY")), - builders={"Object": c_suffixes, "SharedObject": c_suffixes}, - source_file_regex=r"^.*_test\.cpp$", - ) - -first_half_flag = False - - -def half_source_emitter(target, source, env): - global first_half_flag - if first_half_flag: - first_half_flag = False - if "conftest" not in str(target[0]) and not str(source[0]).endswith("_test.cpp"): - env.Alias("compile_first_half_non_test_source", target) - else: - first_half_flag = True - return target, source - - -# Cribbed from Tool/cc.py and Tool/c++.py. It would be better if -# we could obtain this from SCons. -_CSuffixes = [".c"] -if not SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CSuffixes.append(".C") - -_CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"] -if SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CXXSuffixes.append(".C") - -for object_builder in SCons.Tool.createObjBuilders(env): - emitterdict = object_builder.builder.emitter - for suffix in emitterdict.keys(): - if suffix not in _CSuffixes + _CXXSuffixes: - continue - base = emitterdict[suffix] - emitterdict[suffix] = SCons.Builder.ListEmitter( - [ - base, - half_source_emitter, - ] - ) - -# Keep this late in the game so that we can investigate attributes set by all the tools that have run. -if has_option("cache"): - if get_option("cache") == "nolinked": - - def noCacheEmitter(target, source, env): - for t in target: - try: - if getattr(t.attributes, "thin_archive", False): - continue - except AttributeError: - pass - env.NoCache(t) - return target, source - - def addNoCacheEmitter(builder): - origEmitter = builder.emitter - if SCons.Util.is_Dict(origEmitter): - for k, v in origEmitter: - origEmitter[k] = SCons.Builder.ListEmitter([v, noCacheEmitter]) - elif SCons.Util.is_List(origEmitter): - origEmitter.append(noCacheEmitter) - else: - builder.emitter = SCons.Builder.ListEmitter([origEmitter, noCacheEmitter]) - - addNoCacheEmitter(env["BUILDERS"]["Program"]) - addNoCacheEmitter(env["BUILDERS"]["StaticLibrary"]) - addNoCacheEmitter(env["BUILDERS"]["SharedLibrary"]) - addNoCacheEmitter(env["BUILDERS"]["SharedArchive"]) - addNoCacheEmitter(env["BUILDERS"]["LoadableModule"]) - -if env.GetOption("patch-build-mongot-url"): - binary_url = env.GetOption("patch-build-mongot-url") - - env.Command( - target="mongot-localdev", - source=[], - action=[ - f"curl {binary_url} | tar xvz", - ], - ) - - env.AutoInstall( - target="$PREFIX_BINDIR", - source=["mongot-localdev"], - AIB_COMPONENT="mongot", - AIB_ROLE="runtime", - AIB_COMPONENTS_EXTRA=["dist-test"], - ) - -# mongot is a MongoDB-specific process written as a wrapper around Lucene. Using Lucene, mongot -# indexes MongoDB databases to provide our customers with full text search capabilities. -# -# --build-mongot is utilized as a compile flag by the evergreen build variants that run end-to-end -# search suites. It downloads & bundles mongot with the other mongo binaries. These binaries become -# available to the build variants in question when the binaries are extracted via archive_dist_test -# during compilation. -elif env.GetOption("build-mongot"): - # '--build-mongot` can be 'latest' or'release' - # - 'latest' describes the binaries created by the most recent commit merged to 10gen/mongot. - # - 'release' refers to the mongot binaries running in atlas prod. - binary_ver_str = env.GetOption("build-mongot") - - platform_str = "" - if mongo_platform.is_running_os("linux"): - platform_str = "linux" - elif mongo_platform.is_running_os("darwin"): - platform_str = "macos" - else: - print("mongot is only supported on macOS and linux") - Exit(1) - - arch_str = "x86_64" - # macos arm64 is not supported by mongot, but macos x86_64 runs on it successfully - if mongo_platform.is_arm_processor() and platform_str != "macos": - arch_str = "aarch64" - - db_contrib_tool = env.Command( - target=["$BUILD_ROOT/db_contrib_tool_venv/bin/db-contrib-tool"], - source=[], - action=[ - "rm -rf $BUILD_ROOT/db_contrib_tool_venv", - f"{sys.executable} -m virtualenv -p {sys.executable} $BUILD_ROOT/db_contrib_tool_venv", - "$BUILD_ROOT/db_contrib_tool_venv/bin/python3 -m pip install db-contrib-tool", - ], - BUILD_ROOT=env.Dir("$BUILD_ROOT").path, - ) - - env.Command( - target=["mongot-localdev"], - source=db_contrib_tool, - action=[ - f"$SOURCE setup-mongot-repro-env {binary_ver_str} --platform={platform_str} --architecture={arch_str}", - "mv build/mongot-localdev mongot-localdev", - ], - ENV=os.environ, - ) - - env.AutoInstall( - target="$PREFIX_BINDIR", - source=["mongot-localdev"], - AIB_COMPONENT="mongot", - AIB_ROLE="runtime", - AIB_COMPONENTS_EXTRA=["dist-test"], - ) - - -# load the tool late to make sure we can copy over any new -# emitters/scanners we may have created in the SConstruct when -# we go to make stand in bazel builders for the various scons builders - -# __NINJA_NO is ninja callback to scons signal, in that case we care about -# scons only targets not thin targets. -env.Tool("integrate_bazel") -if env.get("__NINJA_NO") == "1": - env.LoadBazelBuilders() - - def noop(*args, **kwargs): - pass - - env.AddMethod(noop, "WaitForBazel") - env.AddMethod(noop, "BazelAutoInstall") - -if env.get("__NINJA_NO") != "1": - BAZEL_AUTOINSTALLED_LIBDEPS = set() - - # the next emitters will read link lists - # to determine dependencies in order for scons - # to handle the install - def bazel_auto_install_emitter(target, source, env): - for libdep in env.Flatten(env.get("LIBDEPS", [])) + env.Flatten( - env.get("LIBDEPS_PRIVATE", []) - ): - libdep_node = libdeps._get_node_with_ixes( - env, env.Entry(libdep).abspath, "SharedLibrary" - ) - try: - shlib_suffix = env.subst("$SHLIBSUFFIX") - bazel_libdep = env.File( - f"#/{env['SCONS2BAZEL_TARGETS'].bazel_output(libdep_node.path)}" - ) - if str(bazel_libdep).endswith(shlib_suffix): - if bazel_libdep not in BAZEL_AUTOINSTALLED_LIBDEPS: - env.BazelAutoInstall(bazel_libdep, shlib_suffix) - BAZEL_AUTOINSTALLED_LIBDEPS.add(bazel_libdep) - env.Depends( - env.GetAutoInstalledFiles(target[0]), - env.GetAutoInstalledFiles(bazel_libdep), - ) - except KeyError: - pass - - return target, source - - for builder_name in ["Program", "SharedLibrary"]: - builder = env["BUILDERS"][builder_name] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter([base_emitter, bazel_auto_install_emitter]) - builder.emitter = new_emitter - - def bazel_program_auto_install_emitter(target, source, env): - if env.GetOption("link-model") == "dynamic-sdk": - return target, source - - bazel_target = env["SCONS2BAZEL_TARGETS"].bazel_target(target[0].path) - - linkfile = bazel_target.replace("//src/", "bazel-bin/src/") + "_links.list" - linkfile = "/".join(linkfile.rsplit(":", 1)) - - with open(os.path.join(env.Dir("#").abspath, linkfile)) as f: - query_results = f.read() - - filtered_results = "" - for lib in query_results.splitlines(): - bazel_out_path = lib.replace("\\", "/").replace( - f"{env['BAZEL_OUT_DIR']}/src", "bazel-bin/src" - ) - if os.path.exists( - env.File("#/" + bazel_out_path + ".exclude_lib").abspath.replace("\\", "/") - ): - continue - filtered_results += lib + "\n" - query_results = filtered_results - - t = target[0] - suffix = getattr(t.attributes, "aib_effective_suffix", t.get_suffix()) - - proj_path = env.Dir("#src").abspath.replace("\\", "/") - build_path = env.Dir("$BUILD_DIR").abspath.replace("\\", "/") - bazel_path = os.path.join(env.Dir("#").abspath, env["BAZEL_OUT_DIR"] + "/src").replace( - "\\", "/" - ) - - new_path = t.abspath.replace("\\", "/").replace(proj_path, build_path) - new_path = new_path.replace(build_path, bazel_path) - - bazel_node = env.File(new_path) - - debug_files = [] - debug_suffix = "" - # This was copied from separate_debug.py - if env.TargetOSIs("darwin"): - # There isn't a lot of great documentation about the structure of dSYM bundles. - # For general bundles, see: - # - # https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html - # - # But we expect to find two files in the bundle. An - # Info.plist file under Contents, and a file with the same - # name as the target under Contents/Resources/DWARF. - - target0 = bazel_node - dsym_dir_name = target0.name + ".dSYM" - dsym_dir = env.Dir(dsym_dir_name, directory=target0.get_dir()) - - dwarf_sym_with_debug = os.path.join( - dsym_dir.abspath, f"Contents/Resources/DWARF/{target0.name}_shared_with_debug.dylib" - ) - - # this handles shared libs or program binaries - if os.path.exists(dwarf_sym_with_debug): - dwarf_sym_name = f"{target0.name}.dylib" - else: - dwarf_sym_with_debug = os.path.join( - dsym_dir.abspath, f"Contents/Resources/DWARF/{target0.name}_with_debug" - ) - dwarf_sym_name = f"{target0.name}" - - plist_file = env.File("Contents/Info.plist", directory=dsym_dir) - setattr(plist_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - plist_file.attributes, - "aib_additional_directory", - "{}/Contents".format(dsym_dir_name), - ) - - dwarf_dir = env.Dir("Contents/Resources/DWARF", directory=dsym_dir) - - dwarf_file = env.File(dwarf_sym_with_debug, directory=dwarf_dir) - setattr(dwarf_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - dwarf_file.attributes, - "aib_additional_directory", - "{}/Contents/Resources/DWARF".format(dsym_dir_name), - ) - setattr(dwarf_file.attributes, "aib_new_name", dwarf_sym_name) - - debug_files.extend([plist_file, dwarf_file]) - debug_suffix = ".dSYM" - - elif env.TargetOSIs("posix"): - debug_suffix = env.subst("$SEPDBG_SUFFIX") - debug_file = env.File(f"{os.path.splitext(bazel_node.abspath)[0]}{debug_suffix}") - debug_files.append(debug_file) - elif env.TargetOSIs("windows"): - debug_suffix = ".pdb" - debug_file = env.File(f"{os.path.splitext(bazel_node.abspath)[0]}{debug_suffix}") - debug_files.append(debug_file) - else: - pass - - if debug_symbols: - for debug_file in debug_files: - setattr(debug_file.attributes, "debug_file_for", bazel_node) - setattr(bazel_node.attributes, "separate_debug_files", debug_files) - - installed_prog = env.BazelAutoInstallSingleTarget(bazel_node, suffix, bazel_node) - - installed_debugs = [] - if debug_symbols: - for debug_file in debug_files: - installed_debugs.append( - env.BazelAutoInstallSingleTarget(debug_file, debug_suffix, debug_file) - ) - - libs = [] - debugs = [] - for lib in query_results.splitlines(): - libdep = env.File( - lib.replace("\\", "/").replace(f"{env['BAZEL_OUT_DIR']}/src", "$BUILD_DIR") - ) - libdep_node = libdeps._get_node_with_ixes( - env, env.Entry(libdep).abspath, "SharedLibrary" - ) - bazel_libdep = env.File( - f"#/{env['SCONS2BAZEL_TARGETS'].bazel_output(libdep_node.path)}" - ) - shlib_suffix = env.subst("$SHLIBSUFFIX") - if str(bazel_libdep).endswith(shlib_suffix): - if bazel_libdep not in BAZEL_AUTOINSTALLED_LIBDEPS: - env.BazelAutoInstall(bazel_libdep, shlib_suffix) - BAZEL_AUTOINSTALLED_LIBDEPS.add(bazel_libdep) - libs.append(env.GetAutoInstalledFiles(bazel_libdep)[0]) - if hasattr(bazel_libdep.attributes, "separate_debug_files"): - debugs.append( - env.GetAutoInstalledFiles( - getattr(bazel_libdep.attributes, "separate_debug_files")[0] - )[0] - ) - - env.Depends(installed_prog, libs) - - for installed_debug_file in installed_debugs: - env.Depends(installed_debug_file, debugs) - - setattr(installed_prog[0].attributes, "separate_debug_files", installed_debugs) - setattr(t.attributes, "AIB_INSTALLED_FILES", installed_prog) - - return target, source - - builder = env["BUILDERS"]["BazelProgram"] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter([bazel_program_auto_install_emitter, base_emitter]) - builder.emitter = new_emitter - - def bazel_shared_library_auto_install_emitter(target, source, env): - bazel_target = env["SCONS2BAZEL_TARGETS"].bazel_target(target[0].path) - bazel_output = env["SCONS2BAZEL_TARGETS"].bazel_output(target[0].path) - - linkfile = bazel_target.replace("//src/", "bazel-bin/src/") + "_links.list" - linkfile = "/".join(linkfile.rsplit(":", 1)) - - with open(os.path.join(env.Dir("#").abspath, linkfile)) as f: - query_results = f.read() - - filtered_results = "" - for lib in query_results.splitlines(): - bazel_out_path = lib.replace("\\", "/").replace( - f"{env['BAZEL_OUT_DIR']}/src", "bazel-bin/src" - ) - if os.path.exists( - env.File("#/" + bazel_out_path + ".exclude_lib").abspath.replace("\\", "/") - ): - continue - filtered_results += lib + "\n" - query_results = filtered_results - - t = target[0] - suffix = getattr(t.attributes, "aib_effective_suffix", t.get_suffix()) - - bazel_node = env.File("#/" + bazel_output) - - debug_files = [] - debug_suffix = "" - # This was copied from separate_debug.py - if env.TargetOSIs("darwin"): - # There isn't a lot of great documentation about the structure of dSYM bundles. - # For general bundles, see: - # - # https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html - # - # But we expect to find two files in the bundle. An - # Info.plist file under Contents, and a file with the same - # name as the target under Contents/Resources/DWARF. - - target0 = bazel_node - dsym_dir_name = target0.name + ".dSYM" - dsym_dir = env.Dir(dsym_dir_name, directory=target0.get_dir()) - - dwarf_sym_with_debug = os.path.join( - dsym_dir.abspath, f"Contents/Resources/DWARF/{target0.name}_shared_with_debug.dylib" - ) - - # this handles shared libs or program binaries - if os.path.exists(dwarf_sym_with_debug): - dwarf_sym_name = f"{target0.name}.dylib" - else: - dwarf_sym_with_debug = os.path.join( - dsym_dir.abspath, f"Contents/Resources/DWARF/{target0.name}_with_debug" - ) - dwarf_sym_name = f"{target0.name}" - - plist_file = env.File("Contents/Info.plist", directory=dsym_dir) - setattr(plist_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - plist_file.attributes, - "aib_additional_directory", - "{}/Contents".format(dsym_dir_name), - ) - - dwarf_dir = env.Dir("Contents/Resources/DWARF", directory=dsym_dir) - - dwarf_file = env.File(dwarf_sym_with_debug, directory=dwarf_dir) - setattr(dwarf_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - dwarf_file.attributes, - "aib_additional_directory", - "{}/Contents/Resources/DWARF".format(dsym_dir_name), - ) - setattr(dwarf_file.attributes, "aib_new_name", dwarf_sym_name) - - debug_files.extend([plist_file, dwarf_file]) - debug_suffix = ".dSYM" - - elif env.TargetOSIs("posix"): - debug_suffix = env.subst("$SEPDBG_SUFFIX") - debug_file = env.File(f"{bazel_node.abspath}{debug_suffix}") - debug_files.append(debug_file) - elif env.TargetOSIs("windows"): - debug_suffix = ".pdb" - debug_file = env.File(f"{bazel_node.abspath}{debug_suffix}") - debug_files.append(debug_file) - else: - pass - - if debug_symbols: - for debug_file in debug_files: - setattr(debug_file.attributes, "debug_file_for", bazel_node) - setattr(bazel_node.attributes, "separate_debug_files", debug_files) - - if not env.TargetOSIs("windows"): - setattr(bazel_node.attributes, "aib_new_name", bazel_node.name[len("lib") :]) - - installed_prog = env.BazelAutoInstallSingleTarget(bazel_node, suffix, bazel_node) - - installed_debugs = [] - if debug_symbols: - for debug_file in debug_files: - installed_debugs.append( - env.BazelAutoInstallSingleTarget(debug_file, debug_suffix, debug_file) - ) - - setattr(t.attributes, "AIB_INSTALLED_FILES", installed_prog) - - return target, source - - builder = env["BUILDERS"]["BazelSharedLibrary"] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter( - [bazel_shared_library_auto_install_emitter, base_emitter] - ) - builder.emitter = new_emitter - - -def injectMongoIncludePaths(thisEnv): - if thisEnv.get("BAZEL_OUT_DIR"): - thisEnv.AppendUnique(CPPPATH=["#$BAZEL_OUT_DIR/src"]) - thisEnv.AppendUnique(CPPPATH=["$BUILD_DIR"]) - - -env.AddMethod(injectMongoIncludePaths, "InjectMongoIncludePaths") - -if get_option("bazel-includes-info"): - env.Tool("bazel_includes_info") - -env.WaitForBazel() - -if str(env["LIBDEPS_GRAPH_ALIAS"]) in COMMAND_LINE_TARGETS: - # The find_symbols binary is a small fast C binary which will extract the missing - # symbols from the target library, and discover what linked libraries supply it. This - # setups the binary to be built. - find_symbols_env = env.Clone() - find_symbols_env.VariantDir("${BUILD_DIR}/libdeps", "buildscripts/libdeps", duplicate=0) - find_symbols_env.Program( - target="${BUILD_DIR}/libdeps/find_symbols", - source=["${BUILD_DIR}/libdeps/find_symbols.c"], - CFLAGS=["-O3"], - ) - -env.SConscript( - must_exist=1, - dirs=[ - "src", - ], - duplicate=False, - exports=[ - "env", - ], - variant_dir="$BUILD_DIR", -) - -# TODO: find a way to consolidate SConscript calls to one call in -# SConstruct so they all use variant_dir -env.SConscript( - must_exist=1, - dirs=[ - "jstests", - ], - duplicate=False, - exports=[ - "env", - ], -) - -# Critically, this approach is technically incorrect. While all MongoDB -# SConscript files use our add_option wrapper, builtin tools can -# access SCons's GetOption/AddOption methods directly, causing their options -# to not be validated by this block. -(_, leftover) = _parser.parse_args(sys.argv) -# leftover contains unrecognized options, including environment variables,and -# the argv[0]. If we only look at flags starting with --, and we skip the first -# leftover value (argv[0]), anything that remains is an invalid option -invalid_options = list(filter(lambda x: x.startswith("--"), leftover[1:])) -if len(invalid_options) > 0: - # users frequently misspell "variables-files" (note two `s`s) as - # "variable-files" or "variables-file". Detect and help them out. - for opt in invalid_options: - bad_var_file_opts = ["--variable-file", "--variables-file", "--variable-files"] - if opt in bad_var_file_opts or any( - [opt.startswith(f"{bad_opt}=") for bad_opt in bad_var_file_opts] - ): - print( - f"WARNING: You supplied the invalid parameter '{opt}' to SCons. Did you mean --variables-files (both words plural)?" - ) - fatal_error(None, f"ERROR: unknown options supplied to scons: {invalid_options}") - -# Declare the cache prune target -cachePrune = env.Command( - target="#cache-prune", - source=[ - "#buildscripts/scons_cache_prune.py", - ], - action="$PYTHON ${SOURCES[0]} --cache-dir=${CACHE_DIR.abspath} --cache-size=${CACHE_SIZE} --prune-ratio=${CACHE_PRUNE_TARGET/100.00}", - CACHE_DIR=env.Dir(cacheDir), -) - -env.AlwaysBuild(cachePrune) - -# Add a trivial Alias called `configure`. This makes it simple to run, -# or re-run, the SConscript reading and conf tests, but not build any -# real targets. This can be helpful when you are planning a dry-run -# build, or simply want to validate your changes to SConstruct, tools, -# and all the other setup that happens before we begin a real graph -# walk. -env.Alias("configure", None) - -env.CreateConsolidatedTargets() - -# We have finished all SConscripts and targets, so we can ask -# auto_install_binaries to finalize the installation setup. -env.FinalizeInstallDependencies() - -# Create a install-all-meta alias that excludes unittests. This is most useful in -# static builds where the resource requirements of linking 100s of static unittest -# binaries is prohibitive. -candidate_nodes = set() -for child in env.Alias("install-all-meta")[0].all_children(): - candidate_nodes.add(child) - for gchild in child.all_children(): - candidate_nodes.add(gchild) - -names = [ - f'install-{env["AIB_META_COMPONENT"]}', - "install-tests", - env["UNITTEST_ALIAS"], - "install-first_group_unittests", - "install-second_group_unittests", - "install-third_group_unittests", - "install-fourth_group_unittests", - "install-fifth_group_unittests", - "install-sixth_group_unittests", - "install-seventh_group_unittests", - "install-eighth_group_unittests", - # TODO SERVER-97990 Not all unittests are being excluded. - "install-mongo-crypt-test", - "install-stitch-support-test", -] - -env.Alias( - "install-all-meta-but-not-unittests", - [ - node - for node in candidate_nodes - if str(node) not in names - and not str(node).startswith(tuple([prefix_name + "-" for prefix_name in names])) - ], -) - -# prove prefix only operates on real AIB_COMPONENTS, unittests is now a meta component -# made up of all the quarter components combined. We create the prove alias for this meta -# component for compatibility with the past and ease of use. -env.Alias( - "prove-unittests", - [ - "prove-first_group_unittests", - "prove-second_group_unittests", - "prove-third_group_unittests", - "prove-fourth_group_unittests", - "prove-fifth_group_unittests", - "prove-sixth_group_unittests", - "prove-seventh_group_unittests", - "prove-eighth_group_unittests", - ], -) - -# We don't want installing files to cause them to flow into the cache, -# since presumably we can re-install them from the origin if needed. -env.NoCache(env.FindInstalledFiles()) - -# Substitute environment variables in any build targets so that we can -# say, for instance: -# -# > scons --prefix=/foo/bar '$DESTDIR' -# or -# > scons \$BUILD_DIR/mongo/base -# -# That way, you can reference targets under the variant dir or install -# path via an invariant name. -# -# We need to replace the values in the BUILD_TARGETS object in-place -# because SCons wants it to be a particular object. -for i, s in enumerate(BUILD_TARGETS): - BUILD_TARGETS[i] = env.subst(s) - -# Do any final checks the Libdeps linter may need to do once all -# SConscripts have been read but before building begins. -libdeps.LibdepLinter(env).final_checks() -libdeps.generate_libdeps_graph(env) - -# We put this next section at the end of the SConstruct since all the targets -# have been declared, and we know all possible bazel targets so -# we can now generate this info into a file for the ninja build to consume. -if env.GetOption("ninja") != "disabled" and env.get("__NINJA_NO") != "1": - # convert the SCons FunctioAction into a format that ninja can understand - env.NinjaRegisterFunctionHandler("bazel_builder_action", env.NinjaBazelBuilder) - - # we generate the list of all targets that were labeled Bazel* builder targets - # via the emitter, this outputs a json file which will be read during the ninja - # build. - env.GenerateBazelInfoForNinja() diff --git a/bazel/BUILD.bazel b/bazel/BUILD.bazel index 26dd667269d..92ed5addd2b 100644 --- a/bazel/BUILD.bazel +++ b/bazel/BUILD.bazel @@ -2,7 +2,6 @@ package(default_visibility = ["//visibility:public"]) # Expose script for external usage through bazel. exports_files([ - "scons_link_list.py", "install_rules.py", ]) diff --git a/bazel/config/BUILD.bazel b/bazel/config/BUILD.bazel index ad8fe38ff30..5d303bf1749 100644 --- a/bazel/config/BUILD.bazel +++ b/bazel/config/BUILD.bazel @@ -2275,18 +2275,6 @@ config_setting( }, ) -bool_flag( - name = "scons_query", - build_setting_default = False, -) - -config_setting( - name = "scons_query_enabled", - flag_values = { - "//bazel/config:scons_query": "True", - }, -) - # -------------------------------------- # crypto options # -------------------------------------- @@ -2321,9 +2309,8 @@ selects.config_setting_group( # ssl_provider options # -------------------------------------- -# TODO(SERVER-94377): The `mongo_crypto` setting refers to the old -# `MONGO_CRYPTO` variable in SCons. The `ssl_provider` usually coincides with -# that, but if ssl is disabled it'll get overridden to `none`. That is, +# TODO(SERVER-94377): The `ssl_provider` usually coincides with +# mongo_crypto, but if ssl is disabled it'll get overridden to `none`. That is, # regardless of operating system, you'll get `ssl_provider_none` here if ssl is # disabled. We should figure out a more intuitive/robust solution to this. diff --git a/bazel/config/configs.bzl b/bazel/config/configs.bzl index b4e9caae74f..7e1098c7e50 100644 --- a/bazel/config/configs.bzl +++ b/bazel/config/configs.bzl @@ -409,12 +409,6 @@ detect_odr_violations = rule( # build_enterprise_module # ========= -# Original documentation is: -# Comma-separated list of modules to build. Empty means none. Default is all. -# As Bazel will not support the module building in the same way as Scons, the only -# module is supported at present is the enterprise -# more: https://mongodb.slack.com/archives/C05V4F6GZ6J/p1705687513581639 - build_enterprise_provider = provider( doc = """Build enterprise module""", fields = ["enabled"], diff --git a/bazel/config/generate_config_header.bzl b/bazel/config/generate_config_header.bzl index 09db25f062c..c582a98b166 100644 --- a/bazel/config/generate_config_header.bzl +++ b/bazel/config/generate_config_header.bzl @@ -149,11 +149,6 @@ generate_config_header_rule = rule( ) def generate_config_header(name, tags = [], **kwargs): - write_target( - name = name + "_gen_source_tag", - target_name = name, - tags = ["scons_link_lists"], - ) generate_config_header_rule( name = name, tags = tags + ["gen_source"], diff --git a/bazel/config/render_template.bzl b/bazel/config/render_template.bzl index 6a9bc06e533..4e3382f9060 100644 --- a/bazel/config/render_template.bzl +++ b/bazel/config/render_template.bzl @@ -50,11 +50,6 @@ render_template_rule = rule( ) def render_template(name, tags = [], **kwargs): - write_target( - name = name + "_gen_source_tag", - target_name = name, - tags = ["scons_link_lists"], - ) render_template_rule( name = name, tags = tags + ["gen_source"], diff --git a/bazel/docs/developer_workflow.md b/bazel/docs/developer_workflow.md index e46c5cc907c..50a53e95a5d 100644 --- a/bazel/docs/developer_workflow.md +++ b/bazel/docs/developer_workflow.md @@ -4,9 +4,7 @@ This document describes the Server Developer workflow for modifying Bazel build # Creating a new BUILD.bazel file -Similar to SCons, a build target is defined in the directory where its source code exists. To create a target that compiles **src/mongo/hello_world.cpp**, you would create **src/mongo/BUILD.bazel**. - -The Bazel equivalent of SConscript files are BUILD.bazel files. +A build target is defined in the directory where its source code exists. To create a target that compiles **src/mongo/hello_world.cpp**, you would create **src/mongo/BUILD.bazel**. src/mongo/BUILD.bazel would contain: @@ -33,8 +31,6 @@ The full target name is a combination between the directory of the BUILD.bazel f Bazel makes use of static analysis wherever possible to improve execution and querying speed. As part of this, source and header files must not be declared dynamically (ex. glob, wildcard, etc). Instead, you'll need to manually add a reference to each header or source file you add into your build target. -The divergence from SCons is that now source files have to be declared in addition to header files. - mongo_cc_binary( name = "hello_world", srcs = [ @@ -84,14 +80,6 @@ If a library or binary depends on another library, this must be declared in the ], } -## Depending on a Bazel Library in a SCons Build Target - -During migration from SCons to Bazel, the Build Team has created an integration layer between the two while working towards converting all SCons targets to Bazel targets. - -Targets which are built by bazel will be labeled as ThinTarget builder types. You can reference them by the same name you would use in scons in LIBDEPS lists. - -If adding a a new library to the build, check to see if it should be added as a bazel or scons library. This will depend on how deep it is in the dependency tree. You can ask the build team at #ask-devprod-build for advice on if a given library should be added to the bazel or scons part of the build. - ## Running clang-tidy via Bazel Note: This feature is still in development; see https://jira.mongodb.org/browse/SERVER-80396 for details) diff --git a/bazel/docs/engflow_credential_setup.md b/bazel/docs/engflow_credential_setup.md index 377812f6cca..43fa71e5b71 100644 --- a/bazel/docs/engflow_credential_setup.md +++ b/bazel/docs/engflow_credential_setup.md @@ -2,23 +2,6 @@ MongoDB uses EngFlow to enable remote execution with Bazel. This dramatically speeds up the build process, but is only available to internal MongoDB employees. -To install the necessary credentials to enable remote execution, run scons.py with any build command, then follow the setup instructions it prints out. Or: +Bazel uses a wrapper script to check the credentials on each invocation, if for some reason thats not working, you can also manually perform this process with this command alternatively: -(Only if not in the Engineering org) - -- Request access to the MANA group https://mana.corp.mongodbgov.com/resources/659ec4b9bccf3819e5608712 - -(For everyone) - -- Go to https://sodalite.cluster.engflow.com/gettingstarted -- Login with OKTA, then click the "GENERATE AND DOWNLOAD MTLS CERTIFICATE" button - - (If logging in with OKTA doesn't work) Login with Google using your MongoDB email, then click the "GENERATE AND DOWNLOAD MTLS CERTIFICATE" button -- On your local system (usually your MacBook), open a shell terminal and, after setting the variables on the first three lines, run: - - REMOTE_USER= - REMOTE_HOST= - ZIP_FILE=~/Downloads/engflow-mTLS.zip - - curl https://raw.githubusercontent.com/mongodb/mongo/master/buildscripts/setup_engflow_creds.sh -o setup_engflow_creds.sh - chmod +x ./setup_engflow_creds.sh - ./setup_engflow_creds.sh $REMOTE_USER $REMOTE_HOST $ZIP_FILE + python buildscripts/engflow_auth.py diff --git a/bazel/header_deps.bzl b/bazel/header_deps.bzl index 12ac9331d49..e57a831fc2b 100644 --- a/bazel/header_deps.bzl +++ b/bazel/header_deps.bzl @@ -1,5 +1,4 @@ HEADER_DEP_SUFFIX = "_header_dep" -LINK_DEP_SUFFIX = "_link_dep" def create_header_dep_impl(ctx): compilation_context = cc_common.create_compilation_context( diff --git a/bazel/install_rules/pretty_printer_tests.bzl b/bazel/install_rules/pretty_printer_tests.bzl index adecee5bb32..23e0a3aaaec 100644 --- a/bazel/install_rules/pretty_printer_tests.bzl +++ b/bazel/install_rules/pretty_printer_tests.bzl @@ -67,7 +67,7 @@ mongo_pretty_printer_test = rule( # TODO have a way to get to gdb from inside bazel #"_gdb": attr.label(allow_single_file = True, default = "//:gdb"), "_pretty_printer_creation_script": attr.label(allow_single_file = True, default = "//bazel/install_rules:pretty_printer_test_creator.py"), - "_pip_requirements_script": attr.label(allow_single_file = True, default = "//site_scons/mongo:pip_requirements.py"), + "_pip_requirements_script": attr.label(allow_single_file = True, default = "//buildscripts:pip_requirements.py"), "_pretty_printer_launcher_infile": attr.label(allow_single_file = True, default = "//src/mongo/util:pretty_printer_test_launcher.py.in"), }, doc = "Create pretty printer tests", diff --git a/bazel/mongo_src_rules.bzl b/bazel/mongo_src_rules.bzl index 1643533e6a9..334893417c2 100644 --- a/bazel/mongo_src_rules.bzl +++ b/bazel/mongo_src_rules.bzl @@ -11,9 +11,7 @@ load("@rules_proto//proto:defs.bzl", "proto_library") load( "//bazel:header_deps.bzl", "HEADER_DEP_SUFFIX", - "LINK_DEP_SUFFIX", "create_header_dep", - "create_link_deps", ) load( "//bazel:separate_debug.bzl", @@ -502,13 +500,6 @@ CLANG_WARNINGS_COPTS = select({ # only) flag that turns it on. "-Wunused-exception-parameter", - # TODO: Note that the following two flags are added to CCFLAGS even - # though they are really C++ specific. We need to do this because SCons - # passes CXXFLAGS *before* CCFLAGS, but CCFLAGS contains -Wall, which - # re-enables the warnings we are trying to suppress. In the future, we - # should move all warning flags to CCWARNFLAGS and CXXWARNFLAGS and add - # these to CCOM and CXXCOM as appropriate. - # # Clang likes to warn about unused private fields, but some of our # third_party libraries have such things. "-Wno-unused-private-field", @@ -1195,9 +1186,6 @@ DETECT_ODR_VIOLATIONS_LINKFLAGS = select({ GDWARF_FEATURES = select({ "//bazel/config:linux_clang": ["dwarf32"], "//bazel/config:linux_gcc_fission": ["dwarf32"], # gdb crashes with -gsplit-dwarf and -gdwarf64 - # SCons implementation originally used a compiler check to verify that - # -gdwarf64 was supported. If this creates incompatibility issues, we may - # need to fallback to -gdwarf32 in certain cases. "//bazel/config:linux_gcc": ["dwarf64"], # SUSE15 builds system libraries with dwarf32, use dwarf32 to be keep consistent "//bazel/config:suse15_gcc": ["dwarf32"], @@ -1861,14 +1849,6 @@ def mongo_cc_library( header_deps = header_deps, ) - create_link_deps( - name = name + LINK_DEP_SUFFIX, - target_name = name, - link_deps = [name] + deps + cc_deps, - tags = ["scons_link_lists"], - target_compatible_with = target_compatible_with + enterprise_compatible, - ) - # Create a cc_library entry to generate a shared archive of the target. cc_library( name = name + SHARED_ARCHIVE_SUFFIX, @@ -2188,21 +2168,6 @@ def _mongo_cc_binary_and_test( "env": env | SANITIZER_ENV, } | kwargs - create_link_deps( - name = name + LINK_DEP_SUFFIX, - target_name = name, - link_deps = all_deps, - tags = ["scons_link_lists"], - testonly = testonly, - target_compatible_with = target_compatible_with + enterprise_compatible, - ) - - write_sources( - name = name + "_sources_list", - sources = srcs, - tags = ["scons_link_lists"], - ) - original_tags = list(args["tags"]) if _program_type == "binary": args["tags"] += ["intermediate_target"] @@ -2548,11 +2513,6 @@ write_target = rule( ) def idl_generator(name, tags = [], **kwargs): - write_target( - name = name + "_gen_source_tag", - target_name = name, - tags = ["scons_link_lists"], - ) idl_generator_rule( name = name, tags = tags + ["gen_source"], @@ -2648,12 +2608,6 @@ def mongo_proto_library( **kwargs ) - dummy_file( - name = name + "_exclude_link", - output = "lib" + name + ".so.exclude_lib", - tags = ["scons_link_lists"], - ) - def mongo_cc_proto_library( name, deps, diff --git a/bazel/wrapper_hook/lint.py b/bazel/wrapper_hook/lint.py index 037d43cc495..1ab6ecad418 100644 --- a/bazel/wrapper_hook/lint.py +++ b/bazel/wrapper_hook/lint.py @@ -10,55 +10,7 @@ REPO_ROOT = pathlib.Path(__file__).parent.parent.parent sys.path.append(str(REPO_ROOT)) -def check_for_missing_test_stubs(): - bazel_tests = ( - subprocess.check_output( - [ - "bazel", - "query", - "attr(tags, 'mongo_unittest', //...) intersect attr(tags, 'final_target', //...)", - ], - stderr=subprocess.DEVNULL, - ) - .decode("utf-8") - .splitlines() - ) - bazel_tests = [bazel_test.split(":")[1] for bazel_test in bazel_tests] - - scons_targets = ( - subprocess.check_output( - ["grep -rPo 'target\s*=\s*\"\K\w*' ./src | awk -F: '{print $2}'"], - stderr=subprocess.STDOUT, - shell=True, - ) - .decode("utf-8") - .splitlines() - ) - - missing_tests = [] - for bazel_test in bazel_tests: - if bazel_test not in scons_targets: - missing_tests += [bazel_test] - - if len(missing_tests) == 0: - print("All bazel tests have SConscript stubs") - return True - - print("Tests found without SConscript stubs:") - for missing_test in missing_tests: - print(missing_test) - print("\nPlease add a stub in the SConscript file in the directory of each test similar to:") - print(""" -env.CppUnitTest( - target="test_name", - source=[], -) - -""") - return False - - -def create_build_files_in_new_js_dirs(): +def create_build_files_in_new_js_dirs() -> None: base_dirs = ["src/mongo/db/modules/enterprise/jstests", "jstests"] for base_dir in base_dirs: for root, dirs, _ in os.walk(base_dir): @@ -86,7 +38,7 @@ js_library( print(f"Created BUILD.bazel in {full_dir}") -def list_files_with_targets(bazel_bin: str): +def list_files_with_targets(bazel_bin: str) -> List: return [ line.strip() for line in subprocess.run( @@ -103,7 +55,7 @@ def list_files_without_targets( type_name: str, ext: str, dirs: List[str], -): +) -> bool: # rules_lint only checks files that are in targets, verify that all files in the source tree # are contained within targets. @@ -184,7 +136,7 @@ def list_files_without_targets( return True -def run_rules_lint(bazel_bin, args) -> bool: +def run_rules_lint(bazel_bin: str, args: List[str]) -> bool: if platform.system() == "Windows": print("eslint not supported on windows") return False @@ -201,9 +153,6 @@ def run_rules_lint(bazel_bin, args) -> bool: ): return False - if not check_for_missing_test_stubs(): - return False - # Default to linting everything if no path was passed in if len([arg for arg in args if not arg.startswith("--")]) == 0: args = ["//..."] + args diff --git a/buildfarm/README.md b/buildfarm/README.md index eb591749873..6cf7416f28d 100644 --- a/buildfarm/README.md +++ b/buildfarm/README.md @@ -1 +1 @@ -This directory exists to manage a Buildfarm; see docs/bazel.md for more details. +This directory exists to manage a Buildfarm diff --git a/buildscripts/BUILD.bazel b/buildscripts/BUILD.bazel index 24617083e01..12641cc0e94 100644 --- a/buildscripts/BUILD.bazel +++ b/buildscripts/BUILD.bazel @@ -5,7 +5,9 @@ exports_files([ "candle_wrapper.py", "cheetah_source_generator.py", "clang_tidy_config_gen.py", + "jstoh.py", "msitrim.py", + "pip_requirements.py", ]) py_binary( diff --git a/buildscripts/calculate_tooling_metrics.py b/buildscripts/calculate_tooling_metrics.py deleted file mode 100755 index c52ce69a034..00000000000 --- a/buildscripts/calculate_tooling_metrics.py +++ /dev/null @@ -1,86 +0,0 @@ -import datetime - -import mongo_tooling_metrics.client as metrics_client -import pkg_resources -import pymongo -from mongo_tooling_metrics.lib.top_level_metrics import ( - NinjaToolingMetrics, - ResmokeToolingMetrics, - SConsToolingMetrics, -) -from pydantic import ValidationError - -# Check cluster connectivity -try: - client = pymongo.MongoClient( - host=metrics_client.INTERNAL_TOOLING_METRICS_HOSTNAME, - username=metrics_client.INTERNAL_TOOLING_METRICS_USERNAME, - password=metrics_client.INTERNAL_TOOLING_METRICS_PASSWORD, - ) - client.server_info() -except Exception as exc: - print("Could not connect to Atlas cluster") - raise exc - -metrics_classes = { - "ninja": NinjaToolingMetrics, - "scons": SConsToolingMetrics, - "resmoke": ResmokeToolingMetrics, -} - - -def get_metrics_data(source, lookback=30): - try: - # Get SCons metrics for the lookback period - tooling_metrics_version = pkg_resources.get_distribution("mongo-tooling-metrics").version - lookback_datetime = datetime.datetime.utcnow() - datetime.timedelta(days=lookback) - last_week_metrics = client.metrics.tooling_metrics.find( - { - "source": source, - "utc_starttime": {"$gt": lookback_datetime}, - "tooling_metrics_version": tooling_metrics_version, - } - ) - - malformed_metrics = [] - invalid_metrics = [] - total_docs = 0 - - # Find any malformed/invalid documents in the cluster - for doc in last_week_metrics: - total_docs += 1 - try: - metrics = metrics_classes[source](**doc) - if metrics.is_malformed(): - malformed_metrics.append(doc["_id"]) - except ValidationError: - invalid_metrics.append(doc["_id"]) - - metrics_detailed = ( - f"METRICS DETAILED ({source}):\n" - f"malformed_metrics_last_week: {malformed_metrics}\n" - f"invalid_metrics_last_week: {invalid_metrics}\n" - f"total_docs_last_week: {total_docs}\n" - f"tooling_metrics_version: {tooling_metrics_version}\n" - ) - metrics_overview = ( - f"METRICS OVERVIEW ({source}):\n" - f"malformed_metrics_last_week: {len(malformed_metrics)} ({len(malformed_metrics)/total_docs*100:.2f}%)\n" - f"invalid_metrics_last_week: {len(invalid_metrics)} ({len(invalid_metrics)/total_docs*100:.2f}%)\n" - f"total_docs_last_week: {total_docs}\n" - f"tooling_metrics_version: {tooling_metrics_version}\n" - ) - - print(metrics_overview) - print(metrics_detailed) - - return metrics_overview - - except Exception as exc: - print("Unexpected failure while getting metrics") - raise exc - - -ninja_metrics_overview = get_metrics_data("ninja") -scons_metrics_overview = get_metrics_data("scons") -resmoke_metrics_overview = get_metrics_data("resmoke") diff --git a/buildscripts/clang_format.py b/buildscripts/clang_format.py index ede5a48b04c..73f79d04fa2 100755 --- a/buildscripts/clang_format.py +++ b/buildscripts/clang_format.py @@ -315,8 +315,8 @@ def get_list_from_lines(lines): def _get_build_dir(): - """Return the location of the scons' build directory.""" - return os.path.join(git.get_base_dir(), "build") + """Return the location of the default clang cache directory.""" + return os.path.join(git.get_base_dir(), ".clang_format_cache") def _lint_files(clang_format, files): diff --git a/buildscripts/clang_tidy.py b/buildscripts/clang_tidy.py index 34aa3129e87..3b457ded7b7 100755 --- a/buildscripts/clang_tidy.py +++ b/buildscripts/clang_tidy.py @@ -183,7 +183,7 @@ def _run_tidy(args, parser_defaults): if args.compile_commands == parser_defaults.compile_commands: print( f"Could not find compile commands: '{args.compile_commands}', to generate it, use the build command:\n\n" - + "python3 buildscripts/scons.py --build-profile=compiledb compiledb\n" + + "bazel build compiledb\n" ) else: print(f"Could not find compile commands: {args.compile_commands}") @@ -196,7 +196,7 @@ def _run_tidy(args, parser_defaults): if args.clang_tidy_cfg == parser_defaults.clang_tidy_cfg: print( f"Could not find config file: '{args.clang_tidy_cfg}', to generate it, use the build command:\n\n" - + "python3 buildscripts/scons.py --build-profile=compiledb compiledb\n" + + "bazel build compiledb\n" ) else: print(f"Could not find config file: {args.clang_tidy_cfg}") diff --git a/buildscripts/convert_bazel_headers.py b/buildscripts/convert_bazel_headers.py deleted file mode 100644 index 8e1ed11050b..00000000000 --- a/buildscripts/convert_bazel_headers.py +++ /dev/null @@ -1,380 +0,0 @@ -import concurrent.futures -import json -import os -import platform -import shlex -import shutil -import subprocess -import sys -import traceback -from typing import Annotated, List - -import typer - - -def work(target_library: str, silent: bool, cpu_count: int, cc: List[str]): - headers = set() - original_headers = set() - - def get_headers(line): - nonlocal headers - try: - with open(target_library + ".bazel_headers") as f: - bazel_headers = [line.strip() for line in f.readlines()] - bazel_headers += [ - "src/mongo/platform/basic.h", - "src/mongo/platform/windows_basic.h", - ] - - with open(target_library + ".env_vars") as f: - tmp_env_vars = json.load(f) - env_vars = {} - # subprocess requies only strings - for k, v in tmp_env_vars.items(): - env_vars[str(k)] = str(v) - - for command in cc: - cmd_output = command["output"].replace("\\", "/").strip("'").strip('"') - line_output = line.replace("\\", "/") - - if cmd_output == line_output: - os.makedirs(os.path.dirname(line), exist_ok=True) - if os.name == "nt": - header_arg = " /showIncludes" - else: - header_arg = " -H" - - if not silent: - print(f"compiling {line}") - - p = subprocess.run( - shlex.split((command["command"].replace("\\", "/") + header_arg)), - env=env_vars, - capture_output=True, - text=True, - ) - if p.returncode != 0: - print(f"Error compiling, exitcode: {p.returncode}", file=sys.stderr) - print(f"STDOUT: {p.stdout}", file=sys.stderr) - print(f"STDERR: {p.stderr}", file=sys.stderr) - sys.exit(1) - if os.name == "nt": - for line in p.stdout.split("\n"): - line = ( - line.replace("Note: including file:", "") - .strip(" ") - .replace("\\", "/") - ) - - if not line.startswith(os.getcwd().replace("\\", "/")): - continue - - line = os.path.relpath( - line, start=os.getcwd().replace("\\", "/") - ).replace("\\", "/") - if line not in bazel_headers: - if line.startswith("src/") or line.startswith("bazel-out/"): - original_headers.add(line) - line = "//" + line - line = ":".join(line.rsplit("/", 1)) - - headers.add(line) - else: - for line in p.stderr.split("\n"): - if ". src/" in line or ". bazel-out/" in line: - while line.startswith("."): - line = line[1:] - line = line.replace("\\", "/") - - if line[1:] not in bazel_headers: - original_headers.add(line[1:]) - line = "//" + line[1:] - line = ":".join(line.rsplit("/", 1)) - - headers.add(line) - except Exception as exc: - print(traceback.format_exc(), file=sys.stderr) - raise exc - - sources = [] - with open(target_library + ".obj_files") as f: - lines = f.readlines() - for line in lines: - line = line.strip() - line = line.replace("build/opt", "//src") - line = line[: line.find(".")] + ".cpp" - src_header = os.path.splitext(line[2:])[0] + ".h" - if os.path.exists(src_header): - src_header = "//" + ":".join(src_header.rsplit("/", 1)) - headers.add(src_header) - line = ":".join(line.rsplit("/", 1)) - if line.endswith("_gen.cpp"): - line = line[:-4] - sources.append(line) - - with concurrent.futures.ThreadPoolExecutor(max_workers=cpu_count) as executor: - jobs = {executor.submit(get_headers, line.strip()): line.strip() for line in lines} - for completed_job in concurrent.futures.as_completed(jobs): - if not silent: - print(f"finished {jobs[completed_job]}") - - with open(".bazel_include_info.json") as f: - bazel_include_info = json.load(f) - - header_map = bazel_include_info["header_map"] - gen_header_map = bazel_include_info["gen_header_map"] - bazel_exec = bazel_include_info["bazel_exec"] - bazel_config = bazel_include_info["config"] - - global_headers = ( - "src/mongo:config.h", - "src/mongo/config.h", - "src/mongo/platform/basic.h", - "src/mongo/platform/windows_basic.h", - ) - - reverse_header_map = {} - for k, v in header_map.items(): - for hdr in v: - if not hdr or hdr.endswith(global_headers): - continue - bazel_header = "//" + hdr.replace("\\", "/") - bazel_header = ":".join(bazel_header.rsplit("/", 1)) - if bazel_header.startswith("//src/third_party/SafeInt"): - reverse_header_map[bazel_header] = ["//src/third_party/SafeInt:headers"] - elif bazel_header.startswith("//src/third_party/immer"): - reverse_header_map[bazel_header] = ["//src/third_party/immer:headers"] - elif bazel_header in reverse_header_map: - if bazel_header.startswith("//src/third_party/"): - continue - reverse_header_map[bazel_header].append(k) - else: - reverse_header_map[bazel_header] = [k] - - for k, v in gen_header_map.items(): - for hdr in v: - if not hdr or hdr.endswith(global_headers): - continue - bazel_header = "//" + hdr.replace("\\", "/") - bazel_header = ":".join(bazel_header.rsplit("/", 1)) - if bazel_header not in reverse_header_map: - reverse_header_map[bazel_header] = [k] - else: - reverse_header_map[bazel_header].append(k) - - recommended_deps = set() - minimal_headers = set() - - basename_sources = [os.path.splitext(src.rsplit(":", 1)[1])[0] for src in sources] - for header in headers: - header_basename = os.path.splitext(header.rsplit(":", 1)[1])[0] - if header_basename in basename_sources: - minimal_headers.add(header) - continue - - if header in reverse_header_map: - found = False - for lib in reverse_header_map[header]: - recommended_deps.add(lib) - else: - if not header.endswith(global_headers): - minimal_headers.add(header) - - deps_order_by_height = [] - deps_queries = {} - - with open(target_library + ".bazel_deps") as f: - original_deps = [line.strip() for line in f.readlines()] - - for dep in recommended_deps | set(original_deps): - p = subprocess.run( - [bazel_exec, "cquery"] - + bazel_config - + [f'kind("extract_debuginfo|idl_generator|render_template", deps("@{dep}"))'], - capture_output=True, - text=True, - ) - deps_queries[dep] = [ - line.split(" ")[0] for line in p.stdout.splitlines() if line.startswith("//") - ] - deps_order_by_height.append((dep, len(deps_queries[dep]))) - - deps_order_by_height.sort(key=lambda x: x[1]) - - deps_order_by_height = [dep[0] for dep in deps_order_by_height] - optimal_header_deps = set() - for header in headers: - if header in minimal_headers: - continue - - path_header = "/".join(header.rsplit(":", 1)) - path_header = path_header[2:] - for dep in deps_order_by_height: - if dep in header_map and path_header in header_map[dep]: - optimal_header_deps.add(dep) - break - found = False - for other_dep in deps_order_by_height: - if other_dep in gen_header_map: - continue - if dep in deps_queries[other_dep]: - optimal_header_deps.add(other_dep) - found = True - break - if found: - continue - if dep in gen_header_map: - minimal_headers.add(dep) - else: - raise Exception( - f"Should not happen, did not find way to add dep {dep} for {target_library}" - ) - - optimal_header_deps = list(optimal_header_deps) - - working_deps = optimal_header_deps.copy() - for dep in optimal_header_deps: - if dep in working_deps: - for test_dep in optimal_header_deps: - if test_dep == dep: - continue - if test_dep in working_deps and test_dep in deps_queries[dep]: - working_deps.remove(test_dep) - - link_deps = [] - header_deps = [] - for dep in sorted(list(set(list(working_deps) + list(set(original_deps))))): - if dep in original_deps: - link_deps.append(dep) - else: - header_deps.append(dep) - - target_name = os.path.splitext(os.path.basename(target_library))[0] - if target_name.startswith("lib"): - target_name = target_name[3:] - - bazel_target = f"{target_library}\n" - bazel_target += "=" * 50 + "\n" - local_bazel_path = os.path.dirname(target_library.replace("build/opt", "//src")) + ":" - bazel_target += "mongo_cc_library(\n" - bazel_target += f' name = "{target_name}",\n' - if sources: - bazel_target += " srcs = [\n" - for src in sorted([src.replace(local_bazel_path, "") for src in sources]): - bazel_target += f' "{src}",\n' - bazel_target += " ],\n" - if minimal_headers: - bazel_target += " hdrs = [\n" - for header in sorted([header.replace(local_bazel_path, "") for header in minimal_headers]): - bazel_target += f' "{header}",\n' - bazel_target += " ],\n" - if header_deps: - bazel_target += " header_deps = [\n" - for dep in sorted([dep.strip().replace(local_bazel_path, "") for dep in header_deps]): - bazel_target += f' "{dep}",\n' - bazel_target += " ],\n" - if link_deps: - bazel_target += " deps = [\n" - for dep in sorted([dep.strip().replace(local_bazel_path, "") for dep in link_deps]): - bazel_target += f' "{dep}",\n' - bazel_target += " ],\n" - bazel_target += ")\n" - return bazel_target - - -def main( - target_libraries: Annotated[List[str], typer.Argument()], - silent: Annotated[bool, typer.Option()] = False, - skip_scons: Annotated[bool, typer.Option()] = False, - debug_mode: Annotated[bool, typer.Option()] = False, -): - extra_args = [] - if os.name == "nt": - extra_args += [ - "CPPPATH=C:\sasl\include", - "LIBPATH=C:\sasl\lib", - ] - target_library = os.path.join( - os.path.dirname(target_library), os.path.basename(target_library)[3:-2] + "lib" - ) - - path = shutil.which("icecc") - if path is None: - extra_args += ["ICECC="] - - # Define separate functions instead of using lambdas - def target_fmt_nt(target_library: str) -> str: - return os.path.join( - os.path.dirname(target_library), os.path.basename(target_library)[3:-2] + "lib" - ) - - def target_fmt_darwin(target_library: str) -> str: - return target_library[:-2] + "a" - - def target_fmt_default(x: str) -> None: - return None - - if os.name == "nt": - target_fmt = target_fmt_nt - elif platform.system() == "Darwin": - target_fmt = target_fmt_darwin - else: - target_fmt = target_fmt_default - - map(target_fmt, target_libraries) - - cmd = [ - sys.executable, - "buildscripts/scons.py", - "--build-profile=opt", - " ".join( - [f"--bazel-includes-info={target_library}" for target_library in target_libraries] - ), - "--libdeps-linting=off", - "--ninja=disabled", - "compiledb", - ] + extra_args - - if not skip_scons: - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) - - while True: - line = p.stdout.readline() - if not line: - break - print(line.strip(), file=sys.stderr) - - _, _ = p.communicate() - - if p.returncode != 0: - print(f"SCons build failed, exit code {p.returncode}", file=sys.stderr) - sys.exit(1) - - with open("compile_commands.json") as f: - cc = json.load(f) - if platform.system() == "Linux": - cpu_count = len(os.sched_getaffinity(0)) + 4 - else: - cpu_count = os.cpu_count() + 4 - - # Process pool makes it harder to debug what is happening - # so for debug mode, we disabled process pool so things happen in order - # you can just print from the process. - if debug_mode: - bazel_targets = [] - for target_library in target_libraries: - bazel_targets += [work(target_library, silent, cpu_count, cc)] - else: - with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor: - jobs = { - executor.submit(work, target_library, silent, cpu_count, cc): target_library - for target_library in target_libraries - } - bazel_targets = [job.result() for job in concurrent.futures.as_completed(jobs)] - - print("====== Bazel Targets ======\n") - print("\n".join(bazel_targets)) - - -if __name__ == "__main__": - typer.run(main) diff --git a/buildscripts/errorcodes.py b/buildscripts/errorcodes.py index 880bd5793ab..c32afd12477 100755 --- a/buildscripts/errorcodes.py +++ b/buildscripts/errorcodes.py @@ -142,11 +142,7 @@ def is_terminated(lines): def get_next_code(seen, server_ticket=0): - """Find next unused assertion code. - - Called by: SConstruct and main() - Since SConstruct calls us, codes[] must be global OR WE REPARSE EVERYTHING - """ + """Find next unused assertion code.""" if not codes: (_, _, seen) = read_error_codes() @@ -169,12 +165,6 @@ def get_next_code(seen, server_ticket=0): return iter(range(highest + 1, MAXIMUM_CODE)) -def check_error_codes(): - """Check error codes as SConstruct expects a boolean response from this function.""" - (_, errors, _) = read_error_codes() - return len(errors) == 0 - - def read_error_codes(src_root="src/mongo"): """Define callback, call parse_source_files() with callback, save matches to global codes list.""" seen = {} diff --git a/buildscripts/evergreen_gen_build_metrics_tasks.py b/buildscripts/evergreen_gen_build_metrics_tasks.py deleted file mode 100755 index e6e76d0c296..00000000000 --- a/buildscripts/evergreen_gen_build_metrics_tasks.py +++ /dev/null @@ -1,221 +0,0 @@ -#!/usr/bin/env python3 -import os -import platform -import sys - -from shrub.v2 import BuildVariant, FunctionCall, ShrubProject, Task, TaskGroup -from shrub.v2.command import BuiltInCommand - - -def main(): - tasks = { - "windows_tasks": {}, - "linux_x86_64_tasks": {}, - "linux_arm64_tasks": {}, - "macos_tasks": {}, - } - - tasks_prefixes = { - "windows_tasks": "build_metrics_msvc", - "linux_x86_64_tasks": "build_metrics_x86_64", - "linux_arm64_tasks": "build_metrics_arm64", - "macos_tasks": "build_metrics_xcode", - } - - task_group_targets = { - "dynamic": [ - "install-devcore", - "install-all-meta generate-libdeps-graph", - ], - "static": [ - "install-devcore", - "install-all-meta-but-not-unittests", - ], - } - - def create_build_metric_task_steps(task_build_flags, task_targets, split_num): - evg_flags = f"--debug=time,count,memory VARIANT_DIR=metrics BUILD_METRICS_EVG_TASK_ID={os.environ['task_id']} BUILD_METRICS_EVG_BUILD_VARIANT={os.environ['build_variant']}" - cache_flags = "--cache-dir=$PWD/scons-cache-{split_num} --cache-signature-mode=validate" - - scons_task_steps = [ - f"{evg_flags} --build-metrics=build_metrics_{split_num}.json", - f"{evg_flags} {cache_flags} --cache-populate --build-metrics=populate_cache_{split_num}.json", - f"{evg_flags} --clean", - f"{evg_flags} {cache_flags} --build-metrics=pull_cache_{split_num}.json", - ] - - task_steps = [ - FunctionCall( - "scons compile", - { - "patch_compile_flags": f"{task_build_flags} {step_flags}", - "targets": task_targets, - "compiling_for_test": "true", - }, - ) - for step_flags in scons_task_steps - ] - return task_steps - - def create_build_metric_task_list(task_list, link_model, build_flags): - tasks[task_list][link_model] = [] - prefix = tasks_prefixes[task_list] - index = 0 - for index, target in enumerate(task_group_targets[link_model]): - tasks[task_list][link_model].append( - Task( - f"{prefix}_{link_model}_build_split_{index}_{target.replace(' ', '_')}", - create_build_metric_task_steps(build_flags, target, index), - ) - ) - tasks[task_list][link_model].append( - Task( - f"{prefix}_{link_model}_build_split_{index+1}_combine_metrics", - [ - FunctionCall("combine build metrics"), - FunctionCall("attach build metrics"), - FunctionCall("print top N metrics"), - ], - ) - ) - - ############################# - if sys.platform == "win32": - build_flags = "--cache=nolinked" - - create_build_metric_task_list( - "windows_tasks", - "static", - build_flags, - ) - - ############################## - elif sys.platform == "darwin": - for link_model in ["dynamic", "static"]: - build_flags = f"--link-model={link_model} --force-macos-dynamic-link" + ( - " --cache=nolinked" if link_model == "static" else " --cache=all" - ) - - create_build_metric_task_list( - "macos_tasks", - link_model, - build_flags, - ) - - ############################## - else: - for toolchain in ["v4", "v5"]: - # possibly we want to add clang to the mix here, so leaving as an easy drop in - for compiler in ["gcc"]: - for link_model in ["dynamic", "static"]: - build_flags = ( - f"BUILD_METRICS_BLOATY=/opt/mongodbtoolchain/{toolchain}/bin/bloaty " - + f"--variables-files=etc/scons/mongodbtoolchain_{toolchain}_{compiler}.vars " - + f"--link-model={link_model}" - + (" --cache=nolinked" if link_model == "static" else " --cache=all") - ) - - create_build_metric_task_list( - "linux_x86_64_tasks", - link_model, - build_flags, - ) - - create_build_metric_task_list( - "linux_arm64_tasks", - link_model, - build_flags, - ) - - def create_task_group(target_platform, tasks): - task_group = TaskGroup( - name=f"build_metrics_{target_platform}_task_group_gen", - tasks=tasks, - max_hosts=1, - setup_group=[ - BuiltInCommand("manifest.load", {}), - FunctionCall("git get project and add git tag"), - FunctionCall("set task expansion macros"), - FunctionCall("f_expansions_write"), - FunctionCall("kill processes"), - FunctionCall("cleanup environment"), - FunctionCall("set up venv"), - FunctionCall("set up libdeps venv"), - FunctionCall("upload pip requirements"), - FunctionCall("f_expansions_write"), - FunctionCall("configure evergreen api credentials"), - FunctionCall("get buildnumber"), - FunctionCall("f_expansions_write"), - FunctionCall("generate compile expansions"), - FunctionCall("f_expansions_write"), - ], - setup_task=[ - FunctionCall("f_expansions_write"), - FunctionCall("apply compile expansions"), - FunctionCall("set task expansion macros"), - FunctionCall("f_expansions_write"), - ], - teardown_group=[ - FunctionCall("f_expansions_write"), - FunctionCall("cleanup environment"), - ], - teardown_task=[ - FunctionCall("f_expansions_write"), - FunctionCall("attach scons logs"), - FunctionCall("kill processes"), - FunctionCall("save disk statistics"), - FunctionCall("save system resource information"), - FunctionCall( - "remove files", {"files": " ".join(["src/build", "src/scons-cache", "*.tgz"])} - ), - ], - setup_group_can_fail_task=True, - ) - return task_group - - if sys.platform == "win32": - variant = BuildVariant( - name="enterprise-windows-build-metrics", - activate=True, - ) - variant.add_task_group( - create_task_group("windows", tasks["windows_tasks"]["static"]), - ["windows-2022-xlarge"], - ) - elif sys.platform == "darwin": - variant = BuildVariant( - name="macos-enterprise-build-metrics", - activate=True, - ) - for link_model, tasks in tasks["macos_tasks"].items(): - variant.add_task_group( - create_task_group(f"macos_{link_model}", tasks), ["macos-14-arm64"] - ) - else: - if platform.machine() == "x86_64": - variant = BuildVariant( - name="enterprise-rhel-8-64-bit-build-metrics", - activate=True, - ) - for link_model, tasks in tasks["linux_x86_64_tasks"].items(): - variant.add_task_group( - create_task_group(f"linux_X86_64_{link_model}", tasks), ["rhel8.8-xlarge"] - ) - else: - variant = BuildVariant( - name="enterprise-rhel-8-aarch64-build-metrics", - activate=True, - ) - for link_model, tasks in tasks["linux_arm64_tasks"].items(): - variant.add_task_group( - create_task_group(f"linux_arm64_{link_model}", tasks), - ["amazon2023-arm64-large"], - ) - - project = ShrubProject({variant}) - with open("build_metrics_task_gen.json", "w") as fout: - fout.write(project.json()) - - -if __name__ == "__main__": - main() diff --git a/buildscripts/fix_headers.py b/buildscripts/fix_headers.py index 0bfbc301d69..d5f08b91ab7 100644 --- a/buildscripts/fix_headers.py +++ b/buildscripts/fix_headers.py @@ -84,22 +84,7 @@ def useful_print(fixes: Dict) -> None: class HeaderFixer: def __init__(self): - # TODO(SERVER-94781) Remove SCons dep - subprocess.run( - [ - sys.executable, - "buildscripts/scons.py", - "--build-profile=opt", - "--bazel-includes-info=dummy", # TODO Allow no library to be passed. - "--libdeps-linting=off", - "--ninja=disabled", - "$BUILD_ROOT/scons/$VARIANT_DIR/sconf_temp", - ] - ) - with open(".bazel_include_info.json") as f: - bazel_include_info = json.load(f) - self.bazel_exec = bazel_include_info["bazel_exec"] - self.bazel_config = bazel_include_info["config"] + self.bazel_exec = "bazel" auth = JiraAuth() auth.pat = os.environ["JIRA_TOKEN"] self.jira_client = JiraClient(JIRA_SERVER, auth, dry_run=False) @@ -110,13 +95,9 @@ class HeaderFixer: self, query: str, config: bool = False, args: List[str] = [] ) -> subprocess.CompletedProcess: query_cmd = "cquery" - config_args = self.bazel_config - if not config: - query_cmd = "query" - config_args = [] p = subprocess.run( - [self.bazel_exec, query_cmd] + config_args + args + [query], + [self.bazel_exec, query_cmd] + args + [query], capture_output=True, text=True, check=True, @@ -125,7 +106,7 @@ class HeaderFixer: def _build(self, target: str) -> subprocess.CompletedProcess: p = subprocess.run( - [self.bazel_exec, "build"] + self.bazel_config + [target], + [self.bazel_exec, "build"] + [target], capture_output=True, text=True, ) diff --git a/buildscripts/generate_version_expansions.py b/buildscripts/generate_version_expansions.py index 4b06983ea1f..0caf5807657 100755 --- a/buildscripts/generate_version_expansions.py +++ b/buildscripts/generate_version_expansions.py @@ -19,7 +19,7 @@ VERSION_JSON = "version.json" def generate_expansions(): """Entry point for the script. - This calls functions to generate version and scons cache expansions and + This calls functions to generate version and writes them to a file. """ args = parse_args() diff --git a/buildscripts/iwyu/README.md b/buildscripts/iwyu/README.md deleted file mode 100644 index a79bad1049a..00000000000 --- a/buildscripts/iwyu/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# IWYU Analysis tool - -This tool will run -[include-what-you-use](https://github.com/include-what-you-use/include-what-you-use) -(IWYU) analysis across the codebase via `compile_commands.json`. - -The `iwyu_config.yml` file consists of the current options and automatic -pragma marking. You can exclude files from the analysis here. - -The tool has two main modes of operation, `fix` and `check` modes. `fix` -mode will attempt to make changes to the source files based off IWYU's -suggestions. The check mode will simply check if there are any suggestion -at all. - -`fix` mode will take a long time to run, as the tool needs to rerun any -source in which a underlying header was changed to ensure things are not -broken, and so therefore ends up recompile the codebase several times over. - -For more information please refer the the script `--help` option. - -# Example usage: - -First you must generate the `compile_commands.json` file via this command: - -``` -python3 buildscripts/scons.py --build-profile=compiledb compiledb -``` - -Next you can run the analysis: - -``` -python3 buildscripts/iwyu/run_iwyu_analysis.py -``` - -The default mode is fix mode, and it will start making changes to the code -if any changes are found. - -# Debugging failures - -Occasionally IWYU tool will run into problems where it is unable to suggest -valid changes and the changes will cause things to break (not compile). When -it his a failure it will copy the source and all the header's that were used -at the time of the compilation into a directory where the same command can be -run to reproduce the error. - -You can examine the suggested changes in the source and headers and compare -them to the working source tree. Then you can make corrective changes to allow -IWYU to get past the failure. - -IWYU is not perfect and it make several mistakes that a human can understand -and fix appropriately. - -# Running the tests - -This tool includes its own end to end testing. The test directory includes -sub directories which contain source and iwyu configs to run the tool against. -The tests will then compare the results to built in expected results and fail -if the the tests are not producing the expected results. - -To run the tests use the command: - -``` -cd buildscripts/iwyu/test -python3 run_tests.py -``` diff --git a/buildscripts/iwyu/iwyu_config.yml b/buildscripts/iwyu/iwyu_config.yml deleted file mode 100644 index eceb0f8afd8..00000000000 --- a/buildscripts/iwyu/iwyu_config.yml +++ /dev/null @@ -1,83 +0,0 @@ -# options passed to IWYU -iwyu_options: - - "--mapping_file=etc/iwyu_mapping.imp" - - "--no_fwd_decls" - - "--prefix_header_includes=add" - - "--transitive_includes_only" - -# options passed to the fix script -fix_options: - - "--blank_lines" - - "--nocomments" - - "--noreorder" - - "--separate_project_includes=mongo" - - "--safe_headers" - - '--only_re=^src/mongo\/.*' - # TODO SERVER-77051 we will eventually turn this on when our codebase is cleaned up with out. - # - '--nosafe_headers' - -# filename regex to swap no_include in place -# quotes and brackets not included in this config -# since this is targeting IWYU added headers -no_includes: - # avoid boost crazyness - - "boost/.+/detail/.+" - - "asio/impl/.+" - - 'boost/.+\.ipp' - # avoid stdlib detail headers - - 'ext/alloc_traits\.h' - - 'ext/type_traits\.h' - - 'cxxabi\.h' # https://github.com/include-what-you-use/include-what-you-use/issues/909 - - "bits/.+" - - 'syscall\.h' - # arch specific - - "boost/predef/hardware/simd/x86.+" - - 'emmintrin\.h' - # we use a third party format which confuses IWYU - - 'format\.h' - # this is a link time symbol overloading thing not meant to be included - - 'libunwind-x86_64\.h' - # abuse of preprocessor - - 'mongo/db/namespace_string_reserved\.def\.h' - -# path prefixes (non regex) to skip -skip_files: - - "src/third_party" - - "build/" - - "src/mongo/tools/mongo_tidy_checks" - - "src/mongo/util/net" # causes linkage issues - - "src/mongo/util/text.cpp" - # IWYU confused on forward declares - - "src/mongo/db/exec/near.cpp" - - "src/mongo/db/storage/wiredtiger/wiredtiger_index.cpp" - # Asio is going to need some special treatment, the headers are very finicky - - "src/mongo/transport/asio" - # causes IWYU to crash: - - "src/mongo/db/update/update_internal_node.cpp" - - "src/mongo/db/update/update_array_node.cpp" - - "src/mongo/db/update/update_object_node.cpp" - - "src/mongo/db/update/update_array_node_test.cpp" - - "src/mongo/db/update/update_object_node_test.cpp" - - "src/mongo/util/options_parser/environment.cpp" - - "src/mongo/util/options_parser/option_section.cpp" - -# regex file paths to add keep pragma -# include quotes are angle brackets -keep_includes: - - '".*\.cstruct"' # these are not true includes, but used for very large initializers - - '' - - '' - - '' - - "" - - '' - - '' - - "" - - "" # IWYU messes up template instantiation - - '"mongo/rpc/object_check\.h"' - - '"mongo/base/init\.h"' - - '"mongo/scripting/mozjs/wrapconstrainedmethod\.h"' - - '"mongo/dbtests/dbtests\.h"' # this is due to using statements in the header - - '"mongo/config\.h"' - - '"mongo/util/overloaded_visitor\.h"' - - '"mongo/db/query/optimizer/node\.h"' - - '"mongo/util/text\.h"' # includes platform specific functions diff --git a/buildscripts/iwyu/run_iwyu_analysis.py b/buildscripts/iwyu/run_iwyu_analysis.py deleted file mode 100644 index 328875fc45c..00000000000 --- a/buildscripts/iwyu/run_iwyu_analysis.py +++ /dev/null @@ -1,1048 +0,0 @@ -#!/usr/bin/env python3 -""" -TOOL FUNCTIONAL DESCRIPTION. - -Currently the tool works by running IWYU on a subset of compile_commands.json -(the ones we care about like checked in mongo source) and testing each change -in a copy of the original source/header tree so that other compiles are not -affected until it passes a normal compile itself. Due to header dependencies -we must recompile the source files to catch issue IWYU may have introduced -with some dependent header change. Header dependencies do not form a DAG so -we can not process sources in a deterministic fashion. The tool will loop -through all the compilations until all dependents in a compilation are -determined unchanged from the last time the compilation was performed. - -The general workflow used here is to run the tool till there no changes -(several hours on rhel-xxlarge) and fix the errors either in the tool config -or as a manual human change in the code. - -TOOL TECHNICAL DESCRIPTION: - -Regarding the code layout, the main function setups a thread pool executor -and processes each source from the compile_commands. From there it runs a -thread function and within that 5 parts (each there own function) for -each source file: - -1. Skip if deps are unchanged -2. Get the headers deps via -MMD -3. Run IWYU -4. Apply Fixes -5. test compile, record new header deps if passed - -The tool uses mtime and MD5 hashing to know if any header dep has changed. - -""" - -import argparse -import atexit -import concurrent.futures -import enum -import hashlib -import json -import os -import re -import shlex -import shutil -import signal -import subprocess -import sys -import tempfile -import threading -import traceback -from dataclasses import asdict, dataclass -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -import yaml -from colorama import Fore -from colorama import init as colorama_init -from tqdm import tqdm - -colorama_init() - -parser = argparse.ArgumentParser(description="Run include what you use and test output") - -parser.add_argument( - "--compile-commands", - metavar="FILE", - type=str, - default="compile_commands.json", - help="Path to the compile commands file to use.", -) -parser.add_argument( - "--check", - action="store_true", - help="Enables check mode, which does not apply fixes and only runs to see if any files produce IWYU changes. Exit 0 if no new changes detected.", -) -parser.add_argument( - "--config-file", - metavar="FILE", - type=str, - default="", - help="Enables check mode, which does not apply fixes and only runs to see if any files produce IWYU changes. Exit 0 if no new changes detected.", -) -parser.add_argument( - "--iwyu-data", - metavar="FILE", - type=str, - default="iwyu.dat", - help="Location of data used by IWYU, contains hash and status info about all files.", -) -parser.add_argument( - "--keep-going", - action="store_true", - help="Do not stop on errors, instead resubmit the job to try again later (after things may have been fixed elsewhere)", -) -parser.add_argument( - "--cycle-debugging", - action="store_true", - help="Once a cycle has been detected, each directory tree for each step in the cycle will be saved to a .cycle directory.", -) -parser.add_argument( - "--verbose", action="store_true", help="Prints more info about what is taking place." -) -parser.add_argument( - "--mongo-toolchain-bin-dir", - type=str, - help="Which toolchain bin directory to use for this analysis.", - default="/opt/mongodbtoolchain/v4/bin", -) -parser.add_argument( - "--start-ratio", - type=float, - help="decimal value between 0 and 1 which indicates what starting ratio index of the total compile commands to run over, can not be greater than the --end-ratio.", - default=0.0, -) -parser.add_argument( - "--end-ratio", - type=float, - help="decimal value between 0 and 1 which indicates what ending ratio index of the total compile commands to run over, can not be less than the --start-ratio.", - default=1.0, -) -command_line_args = parser.parse_args() - -# the current state of all files, contain the cmd_entry, hashes, successes -IWYU_ANALYSIS_STATE: Dict[str, Any] = {} - -# the current state cycles being tracked -IWYU_CYCLE_STATE: Dict[str, Any] = {} - -hash_lookup_locks: Dict[str, threading.Lock] = {} -mtime_hash_lookup: Dict[str, Dict[str, Any]] = {} - -if command_line_args.config_file: - config_file = command_line_args.config_file -else: - config_file = os.path.join(os.path.dirname(__file__), "iwyu_config.yml") - -with open(config_file, "r") as stream: - config = yaml.safe_load(stream) - for key, value in config.items(): - if value is None: - config[key] = [] - -IWYU_OPTIONS = config.get("iwyu_options", []) -IWYU_FIX_OPTIONS = config.get("fix_options", []) -NO_INCLUDES = config.get("no_includes", []) -KEEP_INCLUDES = config.get("keep_includes", []) -SKIP_FILES = tuple(config.get("skip_files", [])) -CYCLE_FILES: List[str] = [] - - -@dataclass -class CompileCommand: - """An entry from compile_commands.json.""" - - file: str - command: str - directory: str - output: str - - -class ResultType(enum.Enum): - """ - Descriptions of enums. - - ERROR: unexpected or unrecognized error cases - FAILED: the IWYU task for a given compile command entry failed - NO_CHANGE: the input header tree and source file have not changed since last time - NOT_RUNNING: sources which we intentionally skip running IWYU all together - RESUBMIT: the IWYU task failed, but it may work later after other header changes - SUCCESS: the IWYU task for a source file has succeeded - """ - - ERROR = enum.auto() - FAILED = enum.auto() - NO_CHANGE = enum.auto() - NOT_RUNNING = enum.auto() - RESUBMIT = enum.auto() - SUCCESS = enum.auto() - - -TOOLCHAIN_DIR = command_line_args.mongo_toolchain_bin_dir -SHUTDOWN_FLAG = False -CLANG_INCLUDES = None -IWYU_OPTIONS = [val for pair in zip(["-Xiwyu"] * len(IWYU_OPTIONS), IWYU_OPTIONS) for val in pair] -if NO_INCLUDES: - NO_INCLUDE_REGEX = re.compile(r"^\s*#include\s+[\",<](" + "|".join(NO_INCLUDES) + ')[",>]') -if KEEP_INCLUDES: - KEEP_INCLUDE_REGEX = re.compile(r"^\s*#include\s+(" + "|".join(KEEP_INCLUDES) + ")") -CHANGED_FILES_REGEX = re.compile(r"^The\sfull\sinclude-list\sfor\s(.+):$", re.MULTILINE) - - -def printer(message: str) -> None: - """ - Prints output as appropriate. - - We don't print output if we are shutting down because the logs will - explode and original error will be hard to locate. - """ - - if not SHUTDOWN_FLAG or command_line_args.verbose: - tqdm.write(str(message)) - - -def debug_printer(message: str) -> None: - """Print each step in the processing of IWYU.""" - - if command_line_args.verbose: - tqdm.write(str(message)) - - -def failed_return() -> ResultType: - """A common method to allow the processing to continue even after some file fails.""" - - if command_line_args.keep_going: - return ResultType.RESUBMIT - else: - return ResultType.FAILED - - -def in_project_root(file: str) -> bool: - """ - Return true if the file is in the project root. - - This is assuming the project root is the same location - as the compile_commands.json file (the format of compile_commands.json - expects this as well). - """ - - return os.path.abspath(file).startswith( - os.path.abspath(os.path.dirname(command_line_args.compile_commands)) - ) - - -def copy_error_state( - cmd_entry: CompileCommand, test_dir: str, dir_ext: str = ".iwyu_test_dir" -) -> Optional[str]: - """ - When we fail, we want to copy the current state of the temp dir. - - This is so that the command that was used can be replicated and rerun, - primarily for debugging purposes. - """ - - # we never use a test_dir in check mode, since no files are copied in that mode. - if command_line_args.check: - return None - - # make a directory in the output location that we can store the state of the the - # header dep and source file the compile command was run with, delete old results - base, _ = os.path.splitext(cmd_entry.output) - if os.path.exists(base + dir_ext): - shutil.rmtree(base + dir_ext) - os.makedirs(base + dir_ext, exist_ok=True) - basedir = os.path.basename(test_dir) - error_state_dir = os.path.join(base + dir_ext, basedir) - shutil.copytree(test_dir, error_state_dir) - return error_state_dir - - -def calc_hash_of_file(file: str) -> Optional[str]: - """ - Calculate the hash of a file. Use mtime as well. - - If the mtime is unchanged, don't do IO, just look up the last hash. - """ - - # we need to lock on specific file io because GIL does not cover system io, so two threads - # could be doing io on the same file at the same time. - if file not in hash_lookup_locks: - hash_lookup_locks[file] = threading.Lock() - with hash_lookup_locks[file]: - if file in mtime_hash_lookup and os.path.getmtime(file) == mtime_hash_lookup[file]["mtime"]: - return mtime_hash_lookup[file]["hash"] - else: - try: - hash_val = hashlib.md5(open(file, "rb").read()).hexdigest() - except FileNotFoundError: - return None - - mtime_hash_lookup[file] = {"mtime": os.path.getmtime(file), "hash": hash_val} - return hash_val - - -def find_no_include(line: str, lines: List[str], output_lines: List[str]) -> bool: - """ - We need to regex the line to see if it includes an include that matches our NO_INCLUDE_REGEX. - - If so then we do not include that line - when we rewrite the file, and instead we add a IWYU no_include pragma inplace - """ - - no_include_header_found = False - if "// IWYU pragma: keep" in line: - return no_include_header_found - no_include_header = re.findall(NO_INCLUDE_REGEX, line) - - if no_include_header: - no_include_header_found = True - no_include_line = f'// IWYU pragma: no_include "{no_include_header[0]}"\n' - if no_include_line not in lines: - output_lines.append(no_include_line) - return no_include_header_found - - -def add_pragmas(source_files: List[str]): - """ - We automate some of the pragmas so there is not so much manual work. - - There are general cases for some of the pragmas. In this case we open the target - source/header, search via regexes for specific includes we care about, then add - the pragma comments as necessary. - """ - - for source_file in source_files: - # before we run IWYU, we take a guess at the likely header by swapping .cpp for .h - # so it may not be a real header. After IWYU runs we know exactly where to add the pragmas - # in case we got it wrong the first time around - if not os.path.exists(source_file): - continue - - # we load in the file content operate on it, and then write it back out - output_lines: List[str] = [] - with open(source_file, "r") as fin: - file_lines = fin.readlines() - for line in file_lines: - if NO_INCLUDES and find_no_include(line, file_lines, output_lines): - continue - - if ( - KEEP_INCLUDES - and re.search(KEEP_INCLUDE_REGEX, line) - and "// IWYU pragma: keep" not in line - ): - output_lines.append(line.strip() + " // IWYU pragma: keep\n") - continue - - output_lines.append(line) - - with open(source_file, "w") as fout: - for line in output_lines: - fout.write(line) - - -def recalc_hashes(deps: List[str], change_dir: Optional[str] = None) -> Dict[str, Any]: - """ - We calculate the hashes from the header dep list generated by the compiler. - - We also create cumulative hash for convenance. - - Some cases we are operating a test directory, but deps are referenced as if they are - in the project root. The change_dir option here allows us to calc the the hashes from - the test directory we may be working in, but still record the deps files in a compat - fashion with other processes that work out of project root, e.g. testing if there was a - change from last time. - """ - - hashes: Dict[str, Any] = {"deps": {}} - full_hash = hashlib.new("md5") - for dep in sorted(list(deps)): - if not in_project_root(dep): - continue - if change_dir: - orig_dep = dep - dep = os.path.join(change_dir, dep) - dep_hash = calc_hash_of_file(dep) - if dep_hash is None: - continue - if change_dir: - dep = orig_dep - full_hash.update(dep_hash.encode("utf-8")) - hashes["deps"][dep] = dep_hash - hashes["full_hash"] = full_hash.hexdigest() - return hashes - - -def setup_test_dir(cmd_entry: CompileCommand, test_dir: str) -> List[str]: - """ - Here we are copying the source and required header tree from the main source tree. - - Returns the associate source and header that were copied into the test dir. - - We want an isolated location to perform analysis and apply changes so everything is not - clashing. At this point we don't know for sure what header IWYU is going to associate with the source - but for mongo codebase, 99.9% of the time its just swap the .cpp for .h. We need this to apply - some pragma to keep IWYU from removing headers it doesn't understand (cross platform or - third party like boost or asio). The pragmas are harmless in and of themselves so adding the - mistakenly in the 0.1% of the time is negligible. - """ - - original_sources = [ - orig_source - for orig_source in [cmd_entry.file, os.path.splitext(cmd_entry.file)[0] + ".h"] - if os.path.exists(orig_source) - ] - test_source_files = [os.path.join(test_dir, source_file) for source_file in original_sources] - dep_headers = [dep for dep in IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"]["deps"].keys()] - - # copy each required header from our source tree into our test dir - # this does cost some time, but the alternative (everything operating in the real source tree) - # was much longer due to constant failures. - for source_file in dep_headers + ["etc/iwyu_mapping.imp"]: - if in_project_root(source_file): - os.makedirs(os.path.join(test_dir, os.path.dirname(source_file)), exist_ok=True) - shutil.copyfile(source_file, os.path.join(test_dir, source_file)) - - # need to create dirs for outputs - for output in shlex.split(cmd_entry.output): - os.makedirs(os.path.join(test_dir, os.path.dirname(output)), exist_ok=True) - - return test_source_files - - -def get_clang_includes() -> List[str]: - """ - IWYU needs some extra help to know what default includes clang is going to bring in when it normally compiles. - - The query reliably gets the include dirs that would be used in normal compiles. We cache and reuse the result - so the subprocess only runs once. - """ - global CLANG_INCLUDES - if CLANG_INCLUDES is None: - clang_includes = subprocess.getoutput( - f"{TOOLCHAIN_DIR}/clang++ -Wp,-v -x c++ - -fsyntax-only < /dev/null 2>&1 | sed -e '/^#include <...>/,/^End of search/{{ //!b }};d'" - ).split("\n") - clang_includes = ["-I" + include.strip() for include in clang_includes] - CLANG_INCLUDES = clang_includes - return CLANG_INCLUDES - - -def write_cycle_diff(source_file: str, cycle_dir: str, latest_hashes: Dict[str, Any]) -> None: - """ - Write out the diffs between the last iteration and the latest iteration. - - The file contains the hash for before and after for each file involved in the compilation. - """ - - with open(os.path.join(cycle_dir, "hashes_diff.txt"), "w") as out: - dep_list = set( - list(IWYU_ANALYSIS_STATE[source_file]["hashes"]["deps"].keys()) - + list(latest_hashes["deps"].keys()) - ) - not_found_str = "not found" + (" " * 23) - for dep in sorted(dep_list): - out.write( - f"Original: {IWYU_ANALYSIS_STATE[source_file]['hashes']['deps'].get(dep, not_found_str)}, Latest: {latest_hashes['deps'].get(dep, not_found_str)} - {dep}\n" - ) - - -def check_for_cycles( - cmd_entry: CompileCommand, latest_hashes: Dict[str, Any], test_dir: str -) -> Optional[ResultType]: - """ - IWYU can induce cycles so we should check our previous results to see if a cycle has occurred. - - These cycles can happen if a header change induces some other header change which then inturn induces - the original header change. These cycles are generally harmless and are easily broken with a keep - pragma but finding what files are induces the cycle is the challenge. - - With cycle debug mode enabled, the entire header tree is saved for each iteration in the cycle so - all files can be fully examined. - """ - - if cmd_entry.file not in IWYU_CYCLE_STATE: - IWYU_CYCLE_STATE[cmd_entry.file] = { - "cycles": [], - } - - if latest_hashes["full_hash"] in IWYU_CYCLE_STATE[cmd_entry.file]["cycles"]: - if command_line_args.cycle_debugging: - if "debug_cycles" not in IWYU_CYCLE_STATE[cmd_entry.file]: - IWYU_CYCLE_STATE[cmd_entry.file]["debug_cycles"] = {} - - IWYU_CYCLE_STATE[cmd_entry.file]["debug_cycles"][latest_hashes["full_hash"]] = ( - latest_hashes - ) - - cycle_dir = copy_error_state( - cmd_entry, - test_dir, - dir_ext=f".{latest_hashes['full_hash']}.cycle{len(IWYU_CYCLE_STATE[cmd_entry.file]['debug_cycles'])}", - ) - write_cycle_diff(cmd_entry.file, cycle_dir, latest_hashes) - if latest_hashes["full_hash"] not in IWYU_CYCLE_STATE[cmd_entry.file]["debug_cycles"]: - printer(f"{Fore.YELLOW}[5] - Cycle Found!: {cmd_entry.file}{Fore.RESET}") - else: - printer(f"{Fore.RED}[5] - Cycle Done! : {cmd_entry.file}{Fore.RESET}") - return failed_return() - else: - printer(f"{Fore.RED}[5] - Cycle Found!: {cmd_entry.file}{Fore.RESET}") - CYCLE_FILES.append(cmd_entry.file) - return ResultType.SUCCESS - else: - IWYU_CYCLE_STATE[cmd_entry.file]["cycles"].append(latest_hashes["full_hash"]) - - return None - - -def write_iwyu_data() -> None: - """Store the data we have acquired during this run so we can resume at the same spot on subsequent runs.""" - - # There might be faster ways to store this like serialization or - # what not, but having human readable json is good for debugging. - # on a full build this takes around 10 seconds to write out. - if IWYU_ANALYSIS_STATE: - try: - # atomic move operation prevents ctrl+c mashing from - # destroying everything, at least we can keep the original - # data safe from emotional outbursts. - with tempfile.NamedTemporaryFile() as temp: - with open(temp.name, "w") as iwyu_data_file: - json.dump(IWYU_ANALYSIS_STATE, iwyu_data_file, sort_keys=True, indent=4) - shutil.move(temp.name, command_line_args.iwyu_data) - except FileNotFoundError as exc: - if temp.name in str(exc): - pass - - -def need_to_process( - cmd_entry: CompileCommand, custom_printer: Callable[[str], None] = printer -) -> Optional[ResultType]: - """ - The first step in the first step for processing a given source file. - - We have a list of skip prefixes, for example build or third_party, but others can be added. - - If it is a file we are not skipping, then we check if we have already done the work by calculating the - hashes and seeing if what we recorded last time has changed. - """ - - if ( - cmd_entry.file.startswith(SKIP_FILES) - or cmd_entry.file in CYCLE_FILES - or "/conftest_" in cmd_entry.file - ): - custom_printer(f"{Fore.YELLOW}[5] - Not running!: {cmd_entry.file}{Fore.RESET}") - return ResultType.NOT_RUNNING - - if IWYU_ANALYSIS_STATE.get(cmd_entry.file): - hashes = recalc_hashes(IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"]["deps"].keys()) - - # we only skip if the matching mode was successful last time, otherwise we assume we need to rerun - mode_success = "CHECK" if command_line_args.check else "FIX" - if command_line_args.verbose: - diff_files = list( - set(hashes["deps"].keys()).symmetric_difference( - set(IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"]["deps"].keys()) - ) - ) - if diff_files: - msg = f"[1] Need to process {cmd_entry.file} because different files:\n" - for file in diff_files: - msg += f"{file}\n" - debug_printer(msg) - for file in IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"]["deps"].keys(): - if ( - file in hashes["deps"] - and hashes["deps"][file] - != IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"]["deps"][file] - ): - debug_printer( - f"[1] Need to process {cmd_entry.file} because hash changed:\n{file}: {hashes['deps'][file]}\n{file}: {IWYU_ANALYSIS_STATE[cmd_entry.file]['hashes']['deps'][file]}" - ) - - if hashes["full_hash"] == IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"][ - "full_hash" - ] and mode_success in IWYU_ANALYSIS_STATE[cmd_entry.file].get("success", []): - custom_printer(f"{Fore.YELLOW}[5] - No Change! : {cmd_entry.file}{Fore.RESET}") - return ResultType.NO_CHANGE - - return None - - -def calc_dep_headers(cmd_entry: CompileCommand) -> Optional[ResultType]: - """ - The second step in the IWYU process. - - We need to get a list of headers which are dependencies so we can copy them to an isolated - working directory (so parallel IWYU changes don't break us). We will switch on preprocessor - for faster generation of the dep file. - - Once we have the deps list, we parse it and calc the hashes of the deps. - """ - - try: - with tempfile.NamedTemporaryFile() as depfile: - # first time we could be executing a real command so we make sure the dir - # so the compiler is not mad - outputs = shlex.split(cmd_entry.output) - for output in outputs: - out_dir = os.path.dirname(output) - if out_dir: - os.makedirs(out_dir, exist_ok=True) - - # setup up command for fast depfile generation - cmd = cmd_entry.command - cmd += f" -MD -MF {depfile.name}" - cmd = cmd.replace(" -c ", " -E ") - debug_printer(f"[1] - Getting Deps: {cmd_entry.file}") - - try: - deps_proc = subprocess.run( - cmd, shell=True, capture_output=True, text=True, timeout=300 - ) - except subprocess.TimeoutExpired: - deps_proc = None - pass - - # if successful, record the latest deps with there hashes, otherwise try again later - if deps_proc is None or deps_proc.returncode != 0: - printer(f"{Fore.RED}[5] - Deps Failed!: {cmd_entry.file}{Fore.RESET}") - printer(deps_proc.stderr) - return ResultType.RESUBMIT - else: - with open(depfile.name) as deps: - deps_str = deps.read() - deps_str = deps_str.replace("\\\n", "").strip() - - hashes = recalc_hashes(shlex.split(deps_str)[1:]) - if not IWYU_ANALYSIS_STATE.get(cmd_entry.file): - IWYU_ANALYSIS_STATE[cmd_entry.file] = asdict(cmd_entry) - IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"] = hashes - IWYU_ANALYSIS_STATE[cmd_entry.file]["success"] = [] - - # if the dep command failed the context will through an execption, we will ignore just - # that case - except FileNotFoundError as exc: - traceback.print_exc() - if depfile.name in str(exc): - pass - - return None - - -def execute_iwyu(cmd_entry: CompileCommand, test_dir: str) -> Union[ResultType, bytes]: - """ - The third step of IWYU analysis. Check mode will stop here. - - Here we want to execute IWYU on our source. Note at this point in fix mode - we will be working out of an isolated test directory which has the - required header tree copied over. Check mode will just pass in the original - project root as the test_dir (the real source tree). - """ - - # assert we are working with a pure clang++ build - if not cmd_entry.command.startswith(f"{TOOLCHAIN_DIR}/clang++"): - printer("unexpected compiler:") - printer(cmd_entry.command) - return ResultType.FAILED - - # swap out for our tool and add in extra options for IWYU - cmd = ( - f"{TOOLCHAIN_DIR}/include-what-you-use" - + cmd_entry.command[len(f"{TOOLCHAIN_DIR}/clang++") :] - ) - cmd += " " + " ".join(get_clang_includes()) - cmd += " " + " ".join(IWYU_OPTIONS) - - # mimic the PATH we normally use in our build - env = os.environ.copy() - env["PATH"] += f":{TOOLCHAIN_DIR}" - - debug_printer(f"[2] - Running IWYU: {cmd_entry.file}") - proc = subprocess.run(cmd, shell=True, env=env, capture_output=True, cwd=test_dir) - - # IWYU has some bugs about forward declares I am assuming, because in some cases even though - # we have passed --no_fwd_decls it still sometimes recommend forward declares and sometimes they - # are wrong and cause compilation errors. - remove_fwd_declares = [] - for line in proc.stderr.decode("utf-8").split("\n"): - line = line.strip() - if ( - not line.endswith(":") - and not line.startswith(("#include ", "-")) - and ("class " in line or "struct " in line) - ): - continue - remove_fwd_declares.append(line) - iwyu_output = "\n".join(remove_fwd_declares) - - # IWYU has weird exit codes, where a >=2 is considered success: - # https://github.com/include-what-you-use/include-what-you-use/blob/clang_12/iwyu_globals.h#L27-L34 - if command_line_args.check and proc.returncode != 2: - printer(f"{Fore.RED}[2] - IWYU Failed: {cmd_entry.file}{Fore.RESET}") - if proc.returncode < 2: - printer(f"exited with error: {proc.returncode}") - else: - printer(f"changes required: {proc.returncode - 2}") - printer(iwyu_output) - return failed_return() - elif proc.returncode < 2: - printer(f"{Fore.RED}[2] - IWYU Failed : {cmd_entry.file}{Fore.RESET}") - printer(cmd) - printer(str(proc.returncode)) - printer(proc.stderr.decode("utf-8")) - copy_error_state(cmd_entry, test_dir) - return failed_return() - - # save the output for debug or inspection later - with open(os.path.splitext(cmd_entry.output)[0] + ".iwyu", "w") as iwyu_out: - iwyu_out.write(iwyu_output) - - return iwyu_output.encode("utf-8") - - -def apply_fixes( - cmd_entry: CompileCommand, iwyu_output: bytes, test_dir: str -) -> Optional[ResultType]: - """ - Step 4 in the IWYU process. - - We need to run the fix_includes script to apply the output from the IWYU binary. - """ - cmd = [f"{sys.executable}", f"{TOOLCHAIN_DIR}/fix_includes.py"] + IWYU_FIX_OPTIONS - - debug_printer(f"[3] - Apply fixes : {cmd_entry.file}") - try: - subprocess.run(cmd, capture_output=True, input=iwyu_output, timeout=180, cwd=test_dir) - except subprocess.TimeoutExpired: - printer(f"{Fore.RED}[5] - Apply failed: {cmd_entry.file}{Fore.RESET}") - return ResultType.RESUBMIT - - return None - - -def test_compile(cmd_entry: CompileCommand, test_dir: str) -> Optional[ResultType]: - """ - Step 5 in the IWYU analysis and the last step for fix mode. - - We run the normal compile command in a test directory and make sure it is successful before - it will be copied back into the real source tree for inclusion into other jobs. - """ - - try: - with tempfile.NamedTemporaryFile() as depfile: - debug_printer(f"[4] - Test compile: {cmd_entry.file}") - - # we want to capture the header deps again because IWYU may have changed them - cmd = cmd_entry.command - cmd += f" -MMD -MF {depfile.name}" - try: - p3 = subprocess.run( - cmd, shell=True, capture_output=True, text=True, timeout=300, cwd=test_dir - ) - except (subprocess.TimeoutExpired, MemoryError): - p3 = None - pass - - # our test compile has failed so we need to report and setup for debug - if p3 is not None and p3.returncode != 0: - printer(f"{Fore.RED}[5] - IWYU Failed!: {cmd_entry.file}{Fore.RESET}") - printer(f"{cmd}") - printer(f"{p3.stderr}") - copy_error_state(cmd_entry, test_dir) - return failed_return() - - else: - with open(depfile.name) as deps: - # calculate the hashes of the deps used to create - # this successful compile. - deps_str = deps.read() - deps_str = deps_str.replace("\\\n", "").strip() - hashes = recalc_hashes(shlex.split(deps_str)[1:], change_dir=test_dir) - - if result := check_for_cycles(cmd_entry, hashes, test_dir): - return result - - IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"] = hashes - if "FIX" not in IWYU_ANALYSIS_STATE[cmd_entry.file]["success"]: - IWYU_ANALYSIS_STATE[cmd_entry.file]["success"].append("FIX") - printer(f"{Fore.GREEN}[5] - IWYU Success: {cmd_entry.file}{Fore.RESET}") - return ResultType.SUCCESS - - # if we failed, the depfile may not have been generated, so check for it - # ignore it - except FileNotFoundError as exc: - if depfile.name in str(exc): - pass - - return None - - -def intialize_deps(cmd_entry: CompileCommand) -> Tuple[ResultType, CompileCommand]: - """ - When running in fix mode, we take some time to initialize the header deps. - - This is mainly used to improve the overall time to complete full analysis. We want process - the source files in order of files with least dependencies to most dependencies. The rational - is that if it has a lot of dependencies we should do last so any changes in those dependencies - are automatically accounted for and the change of need to do rework is lessened. Also the - progress bar can be more accurate and not count skip files. - """ - - # step 1 - if result := need_to_process(cmd_entry, custom_printer=debug_printer): - return result, cmd_entry - - # if we have deps from a previous that should be a good enough indicator - # of how dependency heavy it is, and its worth just taking that over - # needing to invoke the compiler. - try: - if len(IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"]["deps"]): - return ResultType.SUCCESS, cmd_entry - - except KeyError: - pass - - if result := calc_dep_headers(cmd_entry): - return result, cmd_entry - - return ResultType.SUCCESS, cmd_entry - - -def check_iwyu(cmd_entry: CompileCommand) -> ResultType: - """ - One of the two thread functions the main thread pool executor will call. - - Here we execute up to step 3 (steps at the top comment) and report success - if IWYU reports no required changes. - """ - - # step 1 - if result := need_to_process(cmd_entry): - return result - - # step 2 - if result := calc_dep_headers(cmd_entry): - return result - - # step 3 - iwyu_out = execute_iwyu(cmd_entry, ".") - if isinstance(iwyu_out, ResultType): - return iwyu_out - - # success! - printer(f"{Fore.GREEN}[2] - IWYU Success: {cmd_entry.file}{Fore.RESET}") - if "CHECK" not in IWYU_ANALYSIS_STATE[cmd_entry.file]["success"]: - IWYU_ANALYSIS_STATE[cmd_entry.file]["success"].append("CHECK") - return ResultType.SUCCESS - - -def fix_iwyu(cmd_entry: CompileCommand) -> ResultType: - """ - One of the two thread functions the main thread pool executor will call. - - Here we execute up to step 5 (steps at the top comment) and report success - if we are able to successfully compile the original command after IWYU - has made its changes. - """ - - # step 1 - if result := need_to_process(cmd_entry): - return result - - # step 2 - if result := calc_dep_headers(cmd_entry): - return result - - with tempfile.TemporaryDirectory() as test_dir: - # the changes will be done in an isolated test dir so not to conflict with - # other concurrent processes. - test_source_files = setup_test_dir(cmd_entry, test_dir) - - # a first round of pragmas to make sure IWYU doesn't fail or remove things we dont want - add_pragmas(test_source_files) - - # step 3 - iwyu_out = execute_iwyu(cmd_entry, test_dir) - if isinstance(iwyu_out, ResultType): - return iwyu_out - - # now we can extract exactly what files IWYU operated on and copy only those back - changed_files = [ - os.path.join(test_dir, file) - for file in re.findall(CHANGED_FILES_REGEX, iwyu_out.decode("utf-8")) - if in_project_root(file) - ] - test_source_files += [file for file in changed_files if file not in test_source_files] - - # step 4 - if result := apply_fixes(cmd_entry, iwyu_out, test_dir): - return result - - # a final round of pragmas for the next time this is run through IWYU - add_pragmas(test_source_files) - - # step 5 - result = test_compile(cmd_entry, test_dir) - if result == ResultType.SUCCESS: - for file in test_source_files: - if os.path.exists(file): - shutil.move(file, file[len(test_dir) + 1 :]) - - return result - - -def run_iwyu(cmd_entry: CompileCommand) -> Tuple[ResultType, CompileCommand]: - """Intermediate function which delegates the underlying mode to run.""" - - if command_line_args.check: - return check_iwyu(cmd_entry), cmd_entry - else: - return fix_iwyu(cmd_entry), cmd_entry - - -def main() -> None: - """Main function.""" - global IWYU_ANALYSIS_STATE, SHUTDOWN_FLAG - atexit.register(write_iwyu_data) - - with concurrent.futures.ThreadPoolExecutor( - max_workers=len(os.sched_getaffinity(0)) + 4 - ) as executor: - # ctrl+c tru to shutdown as fast as possible. - def sigint_handler(the_signal, frame): - executor.shutdown(wait=False, cancel_futures=True) - sys.exit(1) - - signal.signal(signal.SIGINT, sigint_handler) - - # load in any data from prior runs - if os.path.exists(command_line_args.iwyu_data): - with open(command_line_args.iwyu_data) as iwyu_data_file: - IWYU_ANALYSIS_STATE = json.load(iwyu_data_file) - - # load in the compile commands - with open(command_line_args.compile_commands) as compdb_file: - compiledb = [CompileCommand(**json_data) for json_data in json.load(compdb_file)] - - # assert the generated source code has been generated - for cmd_entry in compiledb: - if cmd_entry.file.endswith("_gen.cpp") and not os.path.exists(cmd_entry.file): - printer(f"{Fore.RED}[5] - Missing Gen!: {cmd_entry.file}{Fore.RESET}") - printer( - f"Error: missing generated file {cmd_entry.file}, make sure generated-sources are generated." - ) - sys.exit(1) - - total_cmds = len(compiledb) - start_index = int(total_cmds * command_line_args.start_ratio) - if start_index < 0: - start_index = 0 - if start_index > total_cmds: - start_index = total_cmds - - end_index = int(total_cmds * command_line_args.end_ratio) - if end_index < 0: - end_index = 0 - if end_index > total_cmds: - end_index = total_cmds - - if start_index == end_index: - print(f"Error: start_index and end_index are the same: {start_index}") - sys.exit(1) - if start_index > end_index: - print( - f"Error: start_index {start_index} can not be greater than end_index {end_index}" - ) - sys.exit(1) - - print(f"Analyzing compile commands from {start_index} to {end_index}.") - compiledb = compiledb[start_index:end_index] - if not command_line_args.check: - # We can optimize the order we process things by processing source files - # with the least number of dependencies first. This is a cost up front - # but will result in huge gains in the amount of re-processing to be done. - printer("Getting Initial Header Dependencies...") - cmd_entry_list = [] - try: - with tqdm(total=len(compiledb), disable=None) as pbar: - # create and run the dependency check jobs - future_cmd = { - executor.submit(intialize_deps, cmd_entry): cmd_entry - for cmd_entry in compiledb - } - for future in concurrent.futures.as_completed(future_cmd): - result, cmd_entry = future.result() - if result != ResultType.NOT_RUNNING: - cmd_entry_list.append(cmd_entry) - pbar.update(1) - except Exception: - SHUTDOWN_FLAG = True - traceback.print_exc() - executor.shutdown(wait=True, cancel_futures=True) - sys.exit(1) - else: - cmd_entry_list = compiledb - - try: - # this loop will keep looping until a full run produce no new changes. - changes_left = True - while changes_left: - changes_left = False - - with tqdm(total=len(cmd_entry_list), disable=None) as pbar: - # create and run the IWYU jobs - def dep_sorted(cmd_entry): - try: - return len(IWYU_ANALYSIS_STATE[cmd_entry.file]["hashes"]["deps"]) - except KeyError: - return 0 - - future_cmd = { - executor.submit(run_iwyu, cmd_entry): cmd_entry - for cmd_entry in sorted(cmd_entry_list, key=dep_sorted) - } - - # process the results - for future in concurrent.futures.as_completed(future_cmd): - result, cmd_entry = future.result() - - # any result which implies there could be changes required sets the - # next loop - if result not in (ResultType.NO_CHANGE, ResultType.NOT_RUNNING): - changes_left = True - - # if a file is considered done for this loop, update the status bar - if result in [ - ResultType.SUCCESS, - ResultType.NO_CHANGE, - ResultType.NOT_RUNNING, - ]: - pbar.update(1) - # resubmit jobs which may have a better change to run later - elif result == ResultType.RESUBMIT: - executor.submit(run_iwyu, cmd_entry) - # handle a failure case, excpetion quickly drops us out of this loop. - else: - SHUTDOWN_FLAG = True - tqdm.write( - f"{result.name}: Shutting down other threads, please be patient." - ) - raise Exception( - f'Shutdown due to {result.name} {cmd_entry["file"]}' - ) - - except Exception: - SHUTDOWN_FLAG = True - traceback.print_exc() - executor.shutdown(wait=True, cancel_futures=True) - sys.exit(1) - finally: - if CYCLE_FILES: - printer(f"{Fore.YELLOW} Cycles detected:") - for file in CYCLE_FILES: - printer(f" {file}") - - -main() diff --git a/buildscripts/iwyu/test/basic/a.h b/buildscripts/iwyu/test/basic/a.h deleted file mode 100644 index ad792ace34b..00000000000 --- a/buildscripts/iwyu/test/basic/a.h +++ /dev/null @@ -1 +0,0 @@ -#include "b.h" diff --git a/buildscripts/iwyu/test/basic/b.cpp b/buildscripts/iwyu/test/basic/b.cpp deleted file mode 100644 index dcbc8627764..00000000000 --- a/buildscripts/iwyu/test/basic/b.cpp +++ /dev/null @@ -1,5 +0,0 @@ -#include "a.h" - -type_b return_b_function() { - return type_b(); -} diff --git a/buildscripts/iwyu/test/basic/b.h b/buildscripts/iwyu/test/basic/b.h deleted file mode 100644 index 422d7626e90..00000000000 --- a/buildscripts/iwyu/test/basic/b.h +++ /dev/null @@ -1 +0,0 @@ -class type_b {}; diff --git a/buildscripts/iwyu/test/basic/expected_results.py b/buildscripts/iwyu/test/basic/expected_results.py deleted file mode 100644 index ea160f7a9e7..00000000000 --- a/buildscripts/iwyu/test/basic/expected_results.py +++ /dev/null @@ -1,16 +0,0 @@ -import sys - -EXPECTED_B_CPP = """ -#include "b.h" - -type_b return_b_function() { - return type_b(); -} -""" - -with open("b.cpp") as f: - content = f.read() - if content != EXPECTED_B_CPP: - print(f'Actual:\n"""{content}"""') - print(f'Expected:\n"""{EXPECTED_B_CPP}"""') - sys.exit(1) diff --git a/buildscripts/iwyu/test/basic/test_config.yml b/buildscripts/iwyu/test/basic/test_config.yml deleted file mode 100644 index 66c2adc15dc..00000000000 --- a/buildscripts/iwyu/test/basic/test_config.yml +++ /dev/null @@ -1,25 +0,0 @@ -# options passed to IWYU -iwyu_options: - - "--max_line_length=100" - - "--no_fwd_decls" - - "--prefix_header_includes=add" - - "--transitive_includes_only" - -# options passed to the fix script -fix_options: - - "--blank_lines" - - "--nocomments" - - "--noreorder" - - "--safe_headers" - -# filename regex to swap no_include in place -# quotes and brackets not included quotes are always assumed -# since this is targeting IWYU added headers -no_includes: - -# prefixes (non regex) to skip -skip_files: - -# regex file paths to add keep pragma -# include quotes are angle brackets -keep_includes: diff --git a/buildscripts/iwyu/test/no_include/a.h b/buildscripts/iwyu/test/no_include/a.h deleted file mode 100644 index ad792ace34b..00000000000 --- a/buildscripts/iwyu/test/no_include/a.h +++ /dev/null @@ -1 +0,0 @@ -#include "b.h" diff --git a/buildscripts/iwyu/test/no_include/b.cpp b/buildscripts/iwyu/test/no_include/b.cpp deleted file mode 100644 index dcbc8627764..00000000000 --- a/buildscripts/iwyu/test/no_include/b.cpp +++ /dev/null @@ -1,5 +0,0 @@ -#include "a.h" - -type_b return_b_function() { - return type_b(); -} diff --git a/buildscripts/iwyu/test/no_include/b.h b/buildscripts/iwyu/test/no_include/b.h deleted file mode 100644 index 422d7626e90..00000000000 --- a/buildscripts/iwyu/test/no_include/b.h +++ /dev/null @@ -1 +0,0 @@ -class type_b {}; diff --git a/buildscripts/iwyu/test/no_include/expected_results.py b/buildscripts/iwyu/test/no_include/expected_results.py deleted file mode 100644 index 6f488af9d78..00000000000 --- a/buildscripts/iwyu/test/no_include/expected_results.py +++ /dev/null @@ -1,17 +0,0 @@ -import sys - -EXPECTED_B_CPP = """// IWYU pragma: no_include "b.h" - -#include "a.h" // IWYU pragma: keep - -type_b return_b_function() { - return type_b(); -} -""" - -with open("b.cpp") as f: - content = f.read() - if content != EXPECTED_B_CPP: - print(f'Actual:\n"""{content}"""') - print(f'Expected:\n"""{EXPECTED_B_CPP}"""') - sys.exit(1) diff --git a/buildscripts/iwyu/test/no_include/test_config.yml b/buildscripts/iwyu/test/no_include/test_config.yml deleted file mode 100644 index 37c7ebfbc47..00000000000 --- a/buildscripts/iwyu/test/no_include/test_config.yml +++ /dev/null @@ -1,27 +0,0 @@ -# options passed to IWYU -iwyu_options: - - "--max_line_length=100" - - "--no_fwd_decls" - - "--prefix_header_includes=add" - - "--transitive_includes_only" - -# options passed to the fix script -fix_options: - - "--blank_lines" - - "--nocomments" - - "--noreorder" - - "--safe_headers" - -# filename regex to swap no_include in place -# quotes and brackets not included quotes are always assumed -# since this is targeting IWYU added headers -no_includes: - - "b.h" - -# prefixes (non regex) to skip -skip_files: - -# regex file paths to add keep pragma -# include quotes are angle brackets -keep_includes: - - '"a.h"' diff --git a/buildscripts/iwyu/test/run_tests.py b/buildscripts/iwyu/test/run_tests.py deleted file mode 100644 index d94c383cdfd..00000000000 --- a/buildscripts/iwyu/test/run_tests.py +++ /dev/null @@ -1,114 +0,0 @@ -import argparse -import concurrent.futures -import glob -import json -import os -import pathlib -import shutil -import subprocess -import sys - -parser = argparse.ArgumentParser(description="Run tests for the IWYU analysis script.") - -parser.add_argument( - "--mongo-toolchain-bin-dir", - type=str, - help="Which toolchain bin directory to use for this analysis.", - default="/opt/mongodbtoolchain/v4/bin", -) - -args = parser.parse_args() - -if os.getcwd() != pathlib.Path(__file__).parent: - print( - f"iwyu test script must run in the tests directory, changing dirs to {pathlib.Path(__file__).parent.resolve()}" - ) - os.chdir(pathlib.Path(__file__).parent.resolve()) - -analysis_script = pathlib.Path(__file__).parent.parent / "run_iwyu_analysis.py" - - -def run_test(entry): - print(f"Running test {pathlib.Path(entry)}...") - test_dir = pathlib.Path(entry) / "test_run" - if os.path.exists(test_dir): - shutil.rmtree(test_dir) - - shutil.copytree(pathlib.Path(entry), test_dir) - - source_files = glob.glob("**/*.cpp", root_dir=test_dir, recursive=True) - compile_commands = [] - - for source_file in source_files: - output = os.path.splitext(source_file)[0] + ".o" - compile_commands.append( - { - "file": source_file, - "command": f"{args.mongo_toolchain_bin_dir}/clang++ -o {output} -c {source_file}", - "directory": os.path.abspath(test_dir), - "output": output, - } - ) - - with open(test_dir / "compile_commands.json", "w") as compdb: - json.dump(compile_commands, compdb) - - os.makedirs(test_dir / "etc", exist_ok=True) - with open(test_dir / "etc" / "iwyu_mapping.imp", "w") as mapping: - mapping.write( - '[{include: ["\\"placeholder.h\\"", "private", "\\"placeholder2.h\\"", "public"]}]' - ) - - iwyu_run = subprocess.run( - [sys.executable, analysis_script, "--verbose", "--config-file=test_config.yml"], - text=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=test_dir, - ) - - results_run = subprocess.run( - [sys.executable, pathlib.Path(entry) / "expected_results.py"], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True, - cwd=test_dir, - ) - - msg = "\n".join([iwyu_run.stdout, results_run.stdout, f"FAILED!: {pathlib.Path(entry)}"]) - msg = "\n".join([f"[{pathlib.Path(entry).name}] {line}" for line in msg.split("\n")]) - - if results_run.returncode != 0: - return results_run.returncode, msg, pathlib.Path(entry).name - else: - return ( - results_run.returncode, - f"[{pathlib.Path(entry).name}] PASSED!: {pathlib.Path(entry)}", - pathlib.Path(entry).name, - ) - - -failed_tests = [] -with concurrent.futures.ThreadPoolExecutor( - max_workers=len(os.sched_getaffinity(0)) + 4 -) as executor: - # create and run the IWYU jobs - future_cmd = { - executor.submit(run_test, entry): entry - for entry in pathlib.Path(__file__).parent.glob("*") - if os.path.isdir(entry) - } - - # process the results - for future in concurrent.futures.as_completed(future_cmd): - result, message, test_name = future.result() - if result != 0: - failed_tests += [test_name] - print(message) - -print("\n***Tests complete.***") -if failed_tests: - print("The following tests failed:") - for test in failed_tests: - print(" - " + test) - print("Please review the logs above for more information.") diff --git a/site_scons/site_tools/jstoh.py b/buildscripts/jstoh.py similarity index 100% rename from site_scons/site_tools/jstoh.py rename to buildscripts/jstoh.py diff --git a/buildscripts/large_file_check.py b/buildscripts/large_file_check.py index 46172d1cf6d..befe8e47ee1 100755 --- a/buildscripts/large_file_check.py +++ b/buildscripts/large_file_check.py @@ -56,14 +56,8 @@ MONGO_REVISION_ENV_VAR = "REVISION" def _get_repos_and_revisions() -> Tuple[List[Repo], RevisionMap]: """Get the repo object and a map of revisions to compare against.""" - modules = git.get_module_paths() - repos = [ - Repo(path) - for path in modules - # Exclude enterprise module; it's in the "modules" folder but does not correspond to a repo - if "src/mongo/db/modules/enterprise" not in path - ] + repos = [Repo(git.get_base_dir())] revision_map = generate_revision_map(repos, {"mongo": os.environ.get(MONGO_REVISION_ENV_VAR)}) return repos, revision_map diff --git a/buildscripts/libdeps/README.md b/buildscripts/libdeps/README.md deleted file mode 100644 index c86ed55555a..00000000000 --- a/buildscripts/libdeps/README.md +++ /dev/null @@ -1,74 +0,0 @@ -# Libdeps Graph Analysis Tools - -The Libdeps Graph analysis tools perform analysis and queries on graph representing the libdeps dependencies in the mongodb server builds. - -## Generating the graph file - -The scons build can create the graph files for analysis. To build the graphml file run the build with this minimal set of args required: - - python3 buildscripts/scons.py --link-model=dynamic --build-tools=next generate-libdeps-graph --linker=gold --modules= - -The target `generate-libdeps-graph` has special meaning and will turn on extra build items to generate the graph. This target will build everything so that the graph is fully representative of the build. The graph file by default will be found at `build/opt/libdeps/libdeps.graphml` (where `build/opt` is the `$BUILD_DIR`). - -## Command Line Tool - -The Command Line tool will process a single graph file based off a list of input args. To see the full list of args run the command: - - python3 buildscripts/libdeps/gacli.py --help - -By default it will performs some basic operations and print the output in human readable format: - - python3 buildscripts/libdeps/gacli.py --graph-file build/opt/libdeps/libdeps.graphml - -Which will give an output similar to this: - - Loading graph data...Loaded! - - Graph built from git hash: - 19da729e2696bbf15d3a35c340281e4385069b88 - - Graph Schema version: - 1 - - Build invocation: - "/usr/bin/python3.8" "buildscripts/scons.py" "--variables-files=etc/scons/mongodbtoolchain_stable_gcc.vars" "--dbg=on" "--opt=on" "--enable-free-mon=on" "--enable-http-client=on" "--cache=all" "--cache-dir=/home/ubuntu/scons-cache" "--install-action=hardlink" "--link-model=dynamic" "--build-tools=next" "--ssl" "--modules=enterprise" "CCACHE=ccache" "ICECC=icecc" "-j50" "generate-libdeps-graph" - - Nodes in Graph: 859 - Edges in Graph: 90843 - Direct Edges in Graph: 5808 - Transitive Edges in Graph: 85035 - Direct Public Edges in Graph: 3511 - Public Edges in Graph: 88546 - Private Edges in Graph: 2272 - Interface Edges in Graph: 25 - Shim Nodes in Graph: 20 - Program Nodes in Graph: 134 - Library Nodes in Graph: 725 - - LibdepsLinter: PUBLIC libdeps that could be PRIVATE: 0 - -## Graph Visualizer Tool - -The graph visualizer tools starts up a web service to provide a frontend GUI to navigating and examining the graph files. The Visualizer used a Python Flask backend and React Javascript frontend. You will need to install the libdeps requirements file to python to run the backend: - - python3 -m poetry install --no-root --sync -E libdeps - -For installing the dependencies for the frontend, you will need node >= 12.0.0 and npm installed and in the PATH. To install the dependencies navigate to directory where package.json lives, and run: - - cd buildscripts/libdeps/graph_visualizer_web_stack && npm install - -Alternatively if you are on linux, you can use the setup_node_env.sh script to automatically download node 12 and npm, setup the local environment and install the dependencies. Run the command: - - buildscripts/libdeps/graph_visualizer_web_stack/setup_node_end.sh install - -Assuming you are on a remote workstation and using defaults, you will need to make ssh tunnels to the web service to access the service in your local browser. The frontend and backend both use a port (this case 3000 is the frontend and 5000 is the backend), and the default host is localhost, so you will need to open two tunnels so the frontend running in your local web browser can communicate with the backend. If you are using the default host and port the tunnel command will look like this: - - ssh -L 3000:localhost:3000 -L 5000:localhost:5000 ubuntu@workstation.hostname - -Next we need to start the web service. It will require you to pass a directory where it will search for `.graphml` files which contain the graph data for various commits: - - python3 buildscripts/libdeps/graph_visualizer.py --graphml-dir build/opt/libdeps - -The script will download nodejs, use npm to install all required packages, launch the backend and then build the optimized production frontend. You can supply the `--debug` argument to work in development load which allows real time updates as files are modified. - -After the server has started up, it should notify you via the terminal that you can access it at http://localhost:3000 locally in your browser. diff --git a/buildscripts/libdeps/SCHEMA_CHANGE_LOG.txt b/buildscripts/libdeps/SCHEMA_CHANGE_LOG.txt deleted file mode 100644 index 5e61a0b26fa..00000000000 --- a/buildscripts/libdeps/SCHEMA_CHANGE_LOG.txt +++ /dev/null @@ -1,3 +0,0 @@ -3 removed shim node property -2 flipped edge direction in graph file data -1 initial schema diff --git a/buildscripts/libdeps/analyzer_unittests.py b/buildscripts/libdeps/analyzer_unittests.py deleted file mode 100755 index 5155061a7b8..00000000000 --- a/buildscripts/libdeps/analyzer_unittests.py +++ /dev/null @@ -1,400 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2021 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Unittests for the graph analyzer.""" - -import json -import unittest - -import libdeps.analyzer -from generate_test_graphs import get_basic_mock_graph, get_double_diamond_mock_graph -from libdeps.graph import ( - CountTypes, - DependsReportTypes, - LibdepsGraph, - LinterTypes, -) - - -class Tests(unittest.TestCase): - """Common unittest for the libdeps graph analyzer module.""" - - def run_analysis(self, expected, graph, algo, *args): - """Check results of analysis generically.""" - - analysis = [algo(graph, *args)] - ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis) - printer = libdeps.analyzer.GaJsonPrinter(ga) - result = json.loads(printer.get_json()) - self.assertEqual(result, expected) - - def run_counts(self, expected, graph): - """Check results of counts generically.""" - - analysis = libdeps.analyzer.counter_factory( - graph, - [name[0] for name in CountTypes.__members__.items() if name[0] != CountTypes.ALL.name], - ) - ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis) - printer = libdeps.analyzer.GaJsonPrinter(ga) - result = json.loads(printer.get_json()) - self.assertEqual(result, expected) - - def test_graph_paths_basic(self): - """Test for the GraphPaths analyzer on a basic graph.""" - - libdeps_graph = LibdepsGraph(get_basic_mock_graph()) - - expected_result = { - "GRAPH_PATHS": { - "('lib1.so', 'lib6.so')": [ - ["lib1.so", "lib2.so", "lib3.so", "lib6.so"], - ["lib1.so", "lib2.so", "lib4.so", "lib6.so"], - ] - } - } - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib1.so", "lib6.so" - ) - - expected_result = {"GRAPH_PATHS": {"('lib4.so', 'lib5.so')": []}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib4.so", "lib5.so" - ) - - expected_result = { - "GRAPH_PATHS": {"('lib2.so', 'lib5.so')": [["lib2.so", "lib3.so", "lib5.so"]]} - } - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib2.so", "lib5.so" - ) - - def test_graph_paths_double_diamond(self): - """Test path algorithm on the double diamond graph.""" - - libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph()) - - expected_result = { - "GRAPH_PATHS": { - "('lib1.so', 'lib9.so')": [ - ["lib1.so", "lib2.so", "lib3.so", "lib5.so", "lib6.so", "lib7.so", "lib9.so"], - ["lib1.so", "lib2.so", "lib3.so", "lib5.so", "lib6.so", "lib8.so", "lib9.so"], - ["lib1.so", "lib2.so", "lib4.so", "lib5.so", "lib6.so", "lib7.so", "lib9.so"], - ["lib1.so", "lib2.so", "lib4.so", "lib5.so", "lib6.so", "lib8.so", "lib9.so"], - ] - } - } - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib1.so", "lib9.so" - ) - - expected_result = { - "GRAPH_PATHS": { - "('lib5.so', 'lib9.so')": [ - ["lib5.so", "lib6.so", "lib7.so", "lib9.so"], - ["lib5.so", "lib6.so", "lib8.so", "lib9.so"], - ] - } - } - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib5.so", "lib9.so" - ) - - expected_result = { - "GRAPH_PATHS": { - "('lib2.so', 'lib6.so')": [ - ["lib2.so", "lib3.so", "lib5.so", "lib6.so"], - ["lib2.so", "lib4.so", "lib5.so", "lib6.so"], - ] - } - } - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib2.so", "lib6.so" - ) - - def test_critical_paths_basic(self): - """Test for the CriticalPaths for basic graph.""" - - libdeps_graph = LibdepsGraph(get_basic_mock_graph()) - - expected_result = {"CRITICAL_EDGES": {"('lib1.so', 'lib6.so')": [["lib1.so", "lib2.so"]]}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib1.so", "lib6.so" - ) - - expected_result = {"CRITICAL_EDGES": {"('lib1.so', 'lib5.so')": [["lib1.so", "lib2.so"]]}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib1.so", "lib5.so" - ) - - expected_result = {"CRITICAL_EDGES": {"('lib5.so', 'lib6.so')": []}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib5.so", "lib6.so" - ) - - def test_critical_paths_double_diamond(self): - """Test for the CriticalPaths for double diamond graph.""" - - libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph()) - - expected_result = {"CRITICAL_EDGES": {"('lib1.so', 'lib9.so')": [["lib1.so", "lib2.so"]]}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib1.so", "lib9.so" - ) - - expected_result = {"CRITICAL_EDGES": {"('lib2.so', 'lib9.so')": [["lib5.so", "lib6.so"]]}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib2.so", "lib9.so" - ) - - expected_result = {"CRITICAL_EDGES": {"('lib7.so', 'lib8.so')": []}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib7.so", "lib8.so" - ) - - def test_direct_depends_basic(self): - """Test for the DirectDependents for basic graph.""" - - libdeps_graph = LibdepsGraph(get_basic_mock_graph()) - - expected_result = {"DIRECT_DEPENDS": {"lib6.so": ["lib3.so", "lib4.so"]}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib6.so" - ) - - expected_result = {"DIRECT_DEPENDS": {"lib1.so": []}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib1.so" - ) - - def test_direct_depends_double_diamond(self): - """Test for the DirectDependents for double diamond graph.""" - - libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph()) - - expected_result = {"DIRECT_DEPENDS": {"lib9.so": ["lib7.so", "lib8.so"]}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib9.so" - ) - - expected_result = {"DIRECT_DEPENDS": {"lib6.so": ["lib5.so"]}} - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib6.so" - ) - - def test_common_depends_basic(self): - """Test for the CommonDependents for basic graph.""" - - libdeps_graph = LibdepsGraph(get_basic_mock_graph()) - - expected_result = { - "COMMON_DEPENDS": { - "('lib6.so', 'lib5.so')": ["lib1.so", "lib2.so", "lib3.so", "lib4.so"] - } - } - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.CommonDependents, - ["lib6.so", "lib5.so"], - ) - - expected_result = { - "COMMON_DEPENDS": { - "('lib5.so', 'lib6.so')": ["lib1.so", "lib2.so", "lib3.so", "lib4.so"] - } - } - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.CommonDependents, - ["lib5.so", "lib6.so"], - ) - - expected_result = {"COMMON_DEPENDS": {"('lib5.so', 'lib6.so', 'lib2.so')": ["lib1.so"]}} - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.CommonDependents, - ["lib5.so", "lib6.so", "lib2.so"], - ) - - def test_common_depends_double_diamond(self): - """Test for the CommonDependents for double diamond graph.""" - - libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph()) - - expected_result = { - "COMMON_DEPENDS": { - "('lib9.so',)": [ - "lib1.so", - "lib2.so", - "lib3.so", - "lib4.so", - "lib5.so", - "lib6.so", - "lib7.so", - "lib8.so", - ] - } - } - self.run_analysis( - expected_result, libdeps_graph, libdeps.analyzer.CommonDependents, ["lib9.so"] - ) - - expected_result = {"COMMON_DEPENDS": {"('lib9.so', 'lib2.so')": ["lib1.so"]}} - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.CommonDependents, - ["lib9.so", "lib2.so"], - ) - - expected_result = {"COMMON_DEPENDS": {"('lib1.so', 'lib4.so', 'lib3.so')": []}} - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.CommonDependents, - ["lib1.so", "lib4.so", "lib3.so"], - ) - - def test_exclude_depends_basic(self): - """Test for the ExcludeDependents for basic graph.""" - - libdeps_graph = LibdepsGraph(get_basic_mock_graph()) - - expected_result = {"EXCLUDE_DEPENDS": {"('lib6.so', 'lib5.so')": []}} - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.ExcludeDependents, - ["lib6.so", "lib5.so"], - ) - - expected_result = {"EXCLUDE_DEPENDS": {"('lib3.so', 'lib1.so')": ["lib1.so", "lib2.so"]}} - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.ExcludeDependents, - ["lib3.so", "lib1.so"], - ) - - expected_result = { - "EXCLUDE_DEPENDS": { - "('lib6.so', 'lib1.so', 'lib2.so')": ["lib2.so", "lib3.so", "lib4.so"] - } - } - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.ExcludeDependents, - ["lib6.so", "lib1.so", "lib2.so"], - ) - - def test_exclude_depends_double_diamond(self): - """Test for the ExcludeDependents for double diamond graph.""" - - libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph()) - - expected_result = { - "EXCLUDE_DEPENDS": {"('lib6.so', 'lib4.so')": ["lib3.so", "lib4.so", "lib5.so"]} - } - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.ExcludeDependents, - ["lib6.so", "lib4.so"], - ) - - expected_result = {"EXCLUDE_DEPENDS": {"('lib2.so', 'lib9.so')": []}} - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.ExcludeDependents, - ["lib2.so", "lib9.so"], - ) - - expected_result = { - "EXCLUDE_DEPENDS": { - "('lib8.so', 'lib1.so', 'lib2.so', 'lib3.so', 'lib4.so', 'lib5.so')": [ - "lib5.so", - "lib6.so", - ] - } - } - self.run_analysis( - expected_result, - libdeps_graph, - libdeps.analyzer.ExcludeDependents, - ["lib8.so", "lib1.so", "lib2.so", "lib3.so", "lib4.so", "lib5.so"], - ) - - def test_counts_basic(self): - """Test counts on basic graph.""" - - libdeps_graph = LibdepsGraph(get_basic_mock_graph()) - - expected_result = { - "NODE": 6, - "EDGE": 13, - "DIR_EDGE": 7, - "TRANS_EDGE": 6, - "DIR_PUB_EDGE": 6, - "PUB_EDGE": 12, - "PRIV_EDGE": 1, - "IF_EDGE": 0, - "PROG": 0, - "LIB": 6, - } - self.run_counts(expected_result, libdeps_graph) - - def test_counts_double_diamond(self): - """Test counts on double diamond graph.""" - - libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph()) - - expected_result = { - "NODE": 9, - "EDGE": 34, - "DIR_EDGE": 10, - "TRANS_EDGE": 24, - "DIR_PUB_EDGE": 10, - "PUB_EDGE": 34, - "PRIV_EDGE": 0, - "IF_EDGE": 0, - "PROG": 0, - "LIB": 9, - } - self.run_counts(expected_result, libdeps_graph) - - def test_unqiue_report_enums(self): - """Ensure uniqueness of enums used as keys when generating reports.""" - - enums = [enum.name for enum in LinterTypes] - enums += [enum.name for enum in DependsReportTypes] - enums_unique = set(enums) - self.assertEqual(len(enums), len(enums_unique)) - - -if __name__ == "__main__": - unittest.main() diff --git a/buildscripts/libdeps/gacli.py b/buildscripts/libdeps/gacli.py deleted file mode 100755 index bdff88d0d8b..00000000000 --- a/buildscripts/libdeps/gacli.py +++ /dev/null @@ -1,400 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -""" -Graph Analysis Command Line Interface. - -A Command line interface to the graph analysis module. -""" - -import argparse -import copy -import sys -import textwrap -from pathlib import Path - -import libdeps.analyzer as libdeps_analyzer -import networkx -from libdeps.graph import CountTypes, LibdepsGraph, LinterTypes - - -class LinterSplitArgs(argparse.Action): - """Custom argument action for checking multiple choice comma separated list.""" - - def __call__(self, parser, namespace, values, option_string=None): - """Create a multi choice comma separated list.""" - - selected_choices = [v.upper() for v in "".join(values).split(",") if v] - invalid_choices = [ - choice for choice in selected_choices if choice not in self.valid_choices - ] - if invalid_choices: - raise Exception( - f"Invalid choices: {invalid_choices}\nMust use choices from {self.valid_choices}" - ) - if CountTypes.ALL.name in selected_choices: - selected_choices = copy.copy(self.valid_choices) - selected_choices.remove(CountTypes.ALL.name) - if selected_choices == []: - selected_choices = copy.copy(self.default_choices) - if values == [""]: - selected_choices = [] - setattr(namespace, self.dest, [opt.replace("-", "_") for opt in selected_choices]) - - -class CountSplitArgs(LinterSplitArgs): - """Special case of common custom arg action for Count types.""" - - valid_choices = [name[0].replace("_", "-") for name in CountTypes.__members__.items()] - default_choices = [ - name[0] for name in CountTypes.__members__.items() if name[0] != CountTypes.ALL.name - ] - - -class LintSplitArgs(LinterSplitArgs): - """Special case of common custom arg action for Count types.""" - - valid_choices = [name[0].replace("_", "-") for name in LinterTypes.__members__.items()] - default_choices = [LinterTypes.PUBLIC_UNUSED.name] - - -class CustomFormatter(argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter): - """Custom arg help formatter for modifying the defaults printed for the custom list action.""" - - @staticmethod - def _get_help_length(enum_type): - max_length = max([len(name[0]) for name in enum_type.__members__.items()]) - help_text = {} - for name in enum_type.__members__.items(): - help_text[name[0]] = name[0].lower() + ("-" * (max_length - len(name[0]))) + ": " - return help_text - - def _get_help_string(self, action): - if isinstance(action, CountSplitArgs): - help_text = self._get_help_length(CountTypes) - return textwrap.dedent(f"""\ - {action.help} - default: all, choices: - {help_text[CountTypes.ALL.name]}perform all counts - {help_text[CountTypes.NODE.name]}count nodes - {help_text[CountTypes.EDGE.name]}count edges - {help_text[CountTypes.DIR_EDGE.name]}count edges declared directly on a node - {help_text[CountTypes.TRANS_EDGE.name]}count edges induced by direct public edges - {help_text[CountTypes.DIR_PUB_EDGE.name]}count edges that are directly public - {help_text[CountTypes.PUB_EDGE.name]}count edges that are public - {help_text[CountTypes.PRIV_EDGE.name]}count edges that are private - {help_text[CountTypes.IF_EDGE.name]}count edges that are interface - {help_text[CountTypes.LIB.name]}count library nodes - {help_text[CountTypes.PROG.name]}count program nodes - """) - elif isinstance(action, LintSplitArgs): - help_text = self._get_help_length(LinterTypes) - return textwrap.dedent(f"""\ - {action.help} - default: all, choices: - {help_text[LinterTypes.ALL.name]}perform all linters - {help_text[LinterTypes.PUBLIC_UNUSED.name]}find unnecessary public libdeps - """) - return super()._get_help_string(action) - - -def setup_args_parser(): - """Add and parse the input args.""" - - parser = argparse.ArgumentParser(formatter_class=CustomFormatter) - - parser.add_argument( - "--graph-file", - type=str, - action="store", - help="The LIBDEPS graph to load.", - default="build/opt/libdeps/libdeps.graphml", - ) - - parser.add_argument( - "--format", choices=["pretty", "json"], default="pretty", help="The output format type." - ) - - parser.add_argument( - "--build-data", - choices=["on", "off"], - default="on", - help="Print the invocation and git hash used to build the graph", - ) - - parser.add_argument( - "--counts", - metavar="COUNT,", - nargs="*", - action=CountSplitArgs, - default=CountSplitArgs.default_choices, - help="Output various counts from the graph. Comma separated list.", - ) - - parser.add_argument( - "--lint", - metavar="LINTER,", - nargs="*", - action=LintSplitArgs, - default=LintSplitArgs.default_choices, - help="Perform various linters on the graph. Comma separated list.", - ) - - parser.add_argument( - "--direct-depends", - action="append", - default=[], - help="Print the nodes which depends on a given node.", - ) - - parser.add_argument( - "--program-depends", - action="append", - default=[], - help="Print the programs which depend (transitively or directly) on a given node.", - ) - - parser.add_argument( - "--common-depends", - nargs="+", - action="append", - default=[], - help="Print the nodes which have a common dependency on all N nodes.", - ) - - parser.add_argument( - "--exclude-depends", - nargs="+", - action="append", - default=[], - help="Print nodes which depend on the first node of N nodes, but exclude all nodes listed there after.", - ) - - parser.add_argument( - "--graph-paths", - nargs="+", - action="append", - default=[], - help="[from_node] [to_node]: Print all paths between 2 nodes.", - ) - - parser.add_argument( - "--critical-edges", - nargs="+", - action="append", - default=[], - help="[from_node] [to_node]: Print edges between two nodes, which if removed would break the dependency between those " - + "nodes.", - ) - - parser.add_argument( - "--symbol-depends", - nargs="+", - action="append", - default=[], - help="[from_node] [to_node]: Print symbols defined in from_node used by to_node.", - ) - - parser.add_argument( - "--efficiency", - nargs="+", - action="append", - default=[], - help="[from_node ...]: Print efficiencies of public direct edges off each from_node in a list of nodes.", - ) - - parser.add_argument( - "--efficiency-lint", - nargs="?", - type=int, - const=2, - help="[threshold]: Analyze efficiency of all public direct edges, print those below efficiency threshold percentage.", - ) - - parser.add_argument( - "--indegree-one", - action="store_true", - default=False, - help="Find candidate nodes for merging by searching the graph for nodes with only one node which depends on them.", - ) - - parser.add_argument( - "--bazel-conv-candidates", - action="store_true", - default=False, - help="Find candidate nodes ready for bazel conversion. This effectively means the node is currently not being built " - "with bazel and the node does not have any dependency nodes that are not being built in bazel.", - ) - - args = parser.parse_args() - - for arg_list in args.graph_paths: - if len(arg_list) != 2: - parser.error( - f"Must pass two args for --graph-paths, [from_node] [to_node], not {arg_list}" - ) - - for arg_list in args.critical_edges: - if len(arg_list) != 2: - parser.error( - f"Must pass two args for --critical-edges, [from_node] [to_node], not {arg_list}" - ) - - for arg_list in args.symbol_depends: - if len(arg_list) != 2: - parser.error( - f"Must pass two args for --symbol-depends, [from_node] [to_node], not {arg_list}" - ) - - return parser.parse_args() - - -def strip_build_dir(build_dir, node): - """Small util function for making args match the graph paths.""" - - try: - return str(Path(node).relative_to(build_dir)) - except ValueError: - return node - - -def strip_build_dirs(build_dir, nodes): - """Small util function for making a list of nodes match graph paths.""" - - return [strip_build_dir(build_dir, node) for node in nodes] - - -def load_graph_data(graph_file, output_format): - """Load a graphml file.""" - - if output_format == "pretty": - sys.stdout.write("Loading graph data...") - sys.stdout.flush() - graph = networkx.read_graphml(graph_file) - if output_format == "pretty": - sys.stdout.write("Loaded!\n\n") - return graph - - -def main(): - """Perform graph analysis based on input args.""" - - args = setup_args_parser() - graph = load_graph_data(args.graph_file, args.format) - libdeps_graph = LibdepsGraph(graph=graph) - build_dir = libdeps_graph.graph["build_dir"] - - if libdeps_graph.graph["graph_schema_version"] == 1: - libdeps_graph = networkx.reverse_view(libdeps_graph) - - analysis = libdeps_analyzer.counter_factory(libdeps_graph, args.counts) - - for analyzer_args in args.direct_depends: - analysis.append( - libdeps_analyzer.DirectDependents( - libdeps_graph, strip_build_dir(build_dir, analyzer_args) - ) - ) - - for analyzer_args in args.program_depends: - analysis.append( - libdeps_analyzer.TransitiveProgramDependents( - libdeps_graph, strip_build_dir(build_dir, analyzer_args) - ) - ) - - for analyzer_args in args.common_depends: - analysis.append( - libdeps_analyzer.CommonDependents( - libdeps_graph, strip_build_dirs(build_dir, analyzer_args) - ) - ) - - for analyzer_args in args.exclude_depends: - analysis.append( - libdeps_analyzer.ExcludeDependents( - libdeps_graph, strip_build_dirs(build_dir, analyzer_args) - ) - ) - - for analyzer_args in args.graph_paths: - analysis.append( - libdeps_analyzer.GraphPaths( - libdeps_graph, - strip_build_dir(build_dir, analyzer_args[0]), - strip_build_dir(build_dir, analyzer_args[1]), - ) - ) - - for analyzer_args in args.symbol_depends: - analysis.append( - libdeps_analyzer.SymbolDependents( - libdeps_graph, - strip_build_dir(build_dir, analyzer_args[0]), - strip_build_dir(build_dir, analyzer_args[1]), - ) - ) - - for analyzer_args in args.efficiency: - nodes = [] - for arg in analyzer_args: - nodes.append(strip_build_dir(build_dir, arg)) - analysis.append(libdeps_analyzer.Efficiency(libdeps_graph, nodes)) - - if args.efficiency_lint: - analysis.append(libdeps_analyzer.EfficiencyLinter(libdeps_graph, args.efficiency_lint)) - - for analyzer_args in args.critical_edges: - analysis.append( - libdeps_analyzer.CriticalEdges( - libdeps_graph, - strip_build_dir(build_dir, analyzer_args[0]), - strip_build_dir(build_dir, analyzer_args[1]), - ) - ) - - if args.indegree_one: - analysis.append(libdeps_analyzer.InDegreeOne(libdeps_graph)) - - if args.bazel_conv_candidates: - analysis.append(libdeps_analyzer.BazelConversionCandidates(libdeps_graph)) - - analysis += libdeps_analyzer.linter_factory(libdeps_graph, args.lint) - - if args.build_data: - analysis.append(libdeps_analyzer.BuildDataReport(libdeps_graph)) - - ga = libdeps_analyzer.LibdepsGraphAnalysis(analysis) - - if args.format == "pretty": - ga_printer = libdeps_analyzer.GaPrettyPrinter(ga) - elif args.format == "json": - ga_printer = libdeps_analyzer.GaJsonPrinter(ga) - else: - return - - ga_printer.print() - - -if __name__ == "__main__": - main() diff --git a/buildscripts/libdeps/generate_test_graphs.py b/buildscripts/libdeps/generate_test_graphs.py deleted file mode 100644 index 1ea40b0430b..00000000000 --- a/buildscripts/libdeps/generate_test_graphs.py +++ /dev/null @@ -1,491 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Test graphs for the graph visualizer and analyzer.""" - -import argparse -import json -import os - -import networkx -from libdeps.graph import EdgeProps, LibdepsGraph, NodeProps - - -def get_args(): - """Create the argparse and return passed args.""" - - parser = argparse.ArgumentParser() - - parser.add_argument( - "--graph-output-dir", - type=str, - action="store", - help="Directory test graphml files will be saved.", - default="build/opt/libdeps", - ) - - parser.add_argument( - "--generate-big-graphs", - action="store_true", - help="Makes graphs which are large for testing scale.", - default=False, - ) - - return parser.parse_args() - - -def add_node(graph, node, builder): - """Add a node to the graph.""" - - graph.add_nodes_from([(node, {NodeProps.bin_type.name: builder})]) - - -def add_edge(graph, from_node, to_node, **kwargs): - """Add an edge to the graph.""" - - edge_props = { - EdgeProps.direct.name: kwargs[EdgeProps.direct.name], - EdgeProps.visibility.name: int(kwargs[EdgeProps.visibility.name]), - } - if kwargs.get("symbols"): - edge_props[EdgeProps.symbols.name] = kwargs.get("symbols") - - graph.add_edges_from([(from_node, to_node, edge_props)]) - - -def get_big_graph(int_id): - """Generate a big graph.""" - - graph = LibdepsGraph() - graph.graph["build_dir"] = "." - graph.graph["graph_schema_version"] = 2 - graph.graph["deptypes"] = json.dumps( - { - "Global": 0, - "Public": 1, - "Private": 2, - "Interface": 3, - } - ) - graph.graph["git_hash"] = f"BIG{int_id.zfill(4)}" - num_nodes = 200 - for i in range(num_nodes): - add_node(graph, f"lib{i}.so", "SharedLibrary") - for j in range(num_nodes - i): - add_edge( - graph, - f"lib{i}.so", - f"lib{j}.so", - direct=True, - visibility=graph.get_deptype("Public"), - symbols="\n".join([f"RandomString{i+j}" * 100 for i in range(10)]), - ) - return graph - - -def get_double_diamond_mock_graph(): - """Construct a mock graph which covers a double diamond structure.""" - - graph = LibdepsGraph() - graph.graph["build_dir"] = "." - graph.graph["graph_schema_version"] = 2 - graph.graph["deptypes"] = json.dumps( - { - "Global": 0, - "Public": 1, - "Private": 2, - "Interface": 3, - } - ) - graph.graph["git_hash"] = "TEST001" - - # builds a graph of mostly public edges that looks like this: - # - # - # /lib3.so /lib7.so - # | \ | \ - # <-lib1.so--lib2.so lib5.so--lib6.so lib9.so - # | / | / - # \lib4.so \lib8.so - # - - add_node(graph, "lib1.so", "SharedLibrary") - add_node(graph, "lib2.so", "SharedLibrary") - add_node(graph, "lib3.so", "SharedLibrary") - add_node(graph, "lib4.so", "SharedLibrary") - add_node(graph, "lib5.so", "SharedLibrary") - add_node(graph, "lib6.so", "SharedLibrary") - add_node(graph, "lib7.so", "SharedLibrary") - add_node(graph, "lib8.so", "SharedLibrary") - add_node(graph, "lib9.so", "SharedLibrary") - - add_edge(graph, "lib1.so", "lib2.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib3.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib4.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib3.so", "lib5.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib4.so", "lib5.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib5.so", "lib6.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib6.so", "lib7.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib6.so", "lib8.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib7.so", "lib9.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib8.so", "lib9.so", direct=True, visibility=graph.get_deptype("Public")) - - # trans for 3 and 4 - add_edge(graph, "lib1.so", "lib3.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib1.so", "lib4.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 5 - add_edge(graph, "lib1.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 6 - add_edge(graph, "lib1.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib3.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib4.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 7 - add_edge(graph, "lib1.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib3.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib4.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib5.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 8 - add_edge(graph, "lib1.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib3.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib4.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib5.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 9 - add_edge(graph, "lib1.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib3.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib4.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib5.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib6.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public")) - - return graph - - -def get_basic_mock_graph(): - """Construct a mock graph which covers most cases and is easy to understand.""" - - graph = LibdepsGraph() - graph.graph["build_dir"] = "." - graph.graph["graph_schema_version"] = 2 - graph.graph["deptypes"] = json.dumps( - { - "Global": 0, - "Public": 1, - "Private": 2, - "Interface": 3, - } - ) - graph.graph["git_hash"] = "TEST002" - - # builds a graph of mostly public edges: - # - # /-lib5.so - # /lib3.so - # | \-lib6.so - # <-lib1.so--lib2.so - # | /-lib5.so (private) - # \lib4.so - # \-lib6.so - - # nodes - add_node(graph, "lib1.so", "SharedLibrary") - add_node(graph, "lib2.so", "SharedLibrary") - add_node(graph, "lib3.so", "SharedLibrary") - add_node(graph, "lib4.so", "SharedLibrary") - add_node(graph, "lib5.so", "SharedLibrary") - add_node(graph, "lib6.so", "SharedLibrary") - - # direct edges - add_edge(graph, "lib1.so", "lib2.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib3.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib2.so", "lib4.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib4.so", "lib6.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib3.so", "lib5.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib3.so", "lib6.so", direct=True, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib4.so", "lib5.so", direct=True, visibility=graph.get_deptype("Private")) - - # trans for 3 - add_edge(graph, "lib1.so", "lib3.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 4 - add_edge(graph, "lib1.so", "lib4.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 5 - add_edge(graph, "lib2.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib1.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public")) - - # trans for 6 - add_edge(graph, "lib2.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public")) - add_edge(graph, "lib1.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public")) - - return graph - - -def get_basic_mock_directory_graph(): - """Construct a mock graph which covers most cases and is easy to understand.""" - - graph = LibdepsGraph() - graph.graph["build_dir"] = "." - graph.graph["graph_schema_version"] = 2 - graph.graph["deptypes"] = json.dumps( - { - "Global": 0, - "Public": 1, - "Private": 2, - "Interface": 3, - } - ) - graph.graph["git_hash"] = "TEST003" - - # builds a graph of mostly public edges: - # - # /-lib5.so - # /lib3 - # | \-lib6.so - # <-lib1.so--lib2 - # | /-lib5.so (private) - # \lib4.so - # \-lib6.so - - # nodes - add_node(graph, "dir1/lib1.so", "SharedLibrary") - add_node(graph, "dir1/sub1/lib2", "Program") - add_node(graph, "dir1/sub1/lib3", "Program") - add_node(graph, "dir1/sub2/lib4.so", "SharedLibrary") - add_node(graph, "dir2/lib5.so", "SharedLibrary") - add_node(graph, "dir2/lib6.so", "SharedLibrary") - - # direct edges - add_edge( - graph, "dir1/lib1.so", "dir1/sub1/lib2", direct=True, visibility=graph.get_deptype("Public") - ) - add_edge( - graph, - "dir1/sub1/lib2", - "dir1/sub1/lib3", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "dir1/sub1/lib2", - "dir1/sub2/lib4.so", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "dir1/sub2/lib4.so", - "dir2/lib6.so", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, "dir1/sub1/lib3", "dir2/lib5.so", direct=True, visibility=graph.get_deptype("Public") - ) - add_edge( - graph, "dir1/sub1/lib3", "dir2/lib6.so", direct=True, visibility=graph.get_deptype("Public") - ) - add_edge( - graph, - "dir1/sub2/lib4.so", - "dir2/lib5.so", - direct=True, - visibility=graph.get_deptype("Private"), - ) - - # trans for 3 - add_edge( - graph, - "dir1/lib1.so", - "dir1/sub1/lib3", - direct=False, - visibility=graph.get_deptype("Public"), - ) - - # trans for 4 - add_edge( - graph, - "dir1/lib1.so", - "dir1/sub2/lib4.so", - direct=False, - visibility=graph.get_deptype("Public"), - ) - - # trans for 5 - add_edge( - graph, - "dir1/sub1/lib2", - "dir2/lib5.so", - direct=False, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, "dir1/lib1.so", "dir2/lib5.so", direct=False, visibility=graph.get_deptype("Public") - ) - - # trans for 6 - add_edge( - graph, - "dir1/sub1/lib2", - "dir2/lib6.so", - direct=False, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, "dir1/lib1.so", "dir2/lib6.so", direct=False, visibility=graph.get_deptype("Public") - ) - - return graph - - -def get_simple_directory_graph(): - """Construct a mock graph which covers most cases and is easy to understand.""" - - graph = LibdepsGraph() - graph.graph["build_dir"] = "." - graph.graph["graph_schema_version"] = 2 - graph.graph["deptypes"] = json.dumps( - { - "Global": 0, - "Public": 1, - "Private": 2, - "Interface": 3, - } - ) - graph.graph["git_hash"] = "TEST004" - - # lib2.so <- lib4.so - # /∧ \∨ - # lib1.so prog1 <- lib5.so - # \∨ /∧ - # lib3.so -> prog2 - - # nodes - add_node(graph, "mongo/base/lib1.so", "SharedLibrary") - add_node(graph, "mongo/base/lib2.so", "SharedLibrary") - add_node(graph, "mongo/db/lib3.so", "SharedLibrary") - add_node(graph, "third_party/lib4.so", "SharedLibrary") - add_node(graph, "third_party/lib5.so", "SharedLibrary") - add_node(graph, "mongo/base/prog1", "Program") - add_node(graph, "mongo/db/prog2", "Program") - - # direct edges - add_edge( - graph, - "mongo/base/lib1.so", - "mongo/base/lib2.so", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "mongo/base/lib1.so", - "mongo/db/lib3.so", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "mongo/base/lib2.so", - "mongo/base/prog1", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "mongo/db/lib3.so", - "mongo/base/prog1", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "mongo/db/lib3.so", - "mongo/db/prog2", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "third_party/lib4.so", - "mongo/base/lib2.so", - direct=True, - visibility=graph.get_deptype("Public"), - ) - add_edge( - graph, - "third_party/lib5.so", - "mongo/base/prog1", - direct=True, - visibility=graph.get_deptype("Public"), - ) - - return graph - - -def save_graph_file(graph, output_dir): - """Save a graph locally as a .graphml.""" - - filename = output_dir + "/libdeps_" + graph.graph["git_hash"] + ".graphml" - networkx.write_graphml(graph, filename, named_key_ids=True) - - -def main(): - """Generate and save the test graphs as .graphml files.""" - - args = get_args() - output_dir = args.graph_output_dir - - os.makedirs(output_dir, exist_ok=True) - - graph = get_double_diamond_mock_graph() - save_graph_file(graph, output_dir) - - graph = get_basic_mock_graph() - save_graph_file(graph, output_dir) - - graph = get_basic_mock_directory_graph() - save_graph_file(graph, output_dir) - - graph = get_simple_directory_graph() - save_graph_file(graph, output_dir) - - if args.generate_big_graphs: - graph = get_big_graph("0") - for i in range(1, 30): - print(f"generating big graph {i}...") - graph.graph["git_hash"] = f"BIG{str(i).zfill(4)}" - save_graph_file(graph, output_dir) - - -if __name__ == "__main__": - main() diff --git a/buildscripts/libdeps/graph_visualizer.py b/buildscripts/libdeps/graph_visualizer.py deleted file mode 100644 index dd84e43b251..00000000000 --- a/buildscripts/libdeps/graph_visualizer.py +++ /dev/null @@ -1,246 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -""" -Libdeps Graph Visualization Tool. - -Starts a web service which creates a UI for interacting and examining the libdeps graph. -The web service front end consist of React+Redux for the framework, flask API for backend -communication, and Material UI for the GUI. The web service back end uses flask. - -This script will automatically install the npm modules, and build and run the production -web service if not debug. -""" - -import argparse -import os -import subprocess -import textwrap -import threading -from pathlib import Path - -from graph_visualizer_web_stack.flask.flask_backend import BackendServer -from werkzeug.serving import is_running_from_reloader - - -def get_args(): - """Create the argparse and return passed args.""" - - parser = argparse.ArgumentParser() - - parser.add_argument( - "--debug", - action="store_true", - help='Whether or not to run debug server. Note for non-debug, you must build the production frontend with "npm run build".', - ) - parser.add_argument( - "--graphml-dir", - type=str, - action="store", - help="Directory where libdeps graphml files live. The UI will allow selecting different graphs from this location", - default="build/opt", - ) - - parser.add_argument( - "--frontend-host", - type=str, - action="store", - help="Hostname where the front end will run.", - default="localhost", - ) - - parser.add_argument( - "--backend-host", - type=str, - action="store", - help="Hostname where the back end will run.", - default="localhost", - ) - - parser.add_argument( - "--frontend-port", - type=str, - action="store", - help="Port where the front end will run.", - default="3000", - ) - - parser.add_argument( - "--backend-port", - type=str, - action="store", - help="Port where the back end will run.", - default="5000", - ) - - parser.add_argument( - "--memory-limit", - type=float, - action="store", - help="Limit in GB for backend memory usage.", - default=8.0, - ) - - parser.add_argument( - "--launch", - choices=["frontend", "backend", "both"], - default="both", - help="Specifies which part of the web service to launch.", - ) - - return parser.parse_args() - - -def execute_and_read_stdout(cmd, cwd, env): - """Execute passed command and get realtime output.""" - - popen = subprocess.Popen( - cmd, stdout=subprocess.PIPE, cwd=str(cwd), env=env, universal_newlines=True - ) - for stdout_line in iter(popen.stdout.readline, ""): - yield stdout_line - popen.stdout.close() - return_code = popen.wait() - if return_code: - raise subprocess.CalledProcessError(return_code, cmd) - - -def check_node(node_check, cwd): - """Check node version and install npm packages.""" - - status, output = subprocess.getstatusoutput(node_check) - if status != 0 or not output.split("\n")[-1].startswith("v14"): - print( - textwrap.dedent(f"""\ - Failed to get node version 14 from 'node -v': - output: '{output}' - Perhaps run 'source {cwd}/setup_node_env.sh install'""") - ) - exit(1) - - node_modules = cwd / "node_modules" - - if not node_modules.exists(): - print( - textwrap.dedent(f"""\ - {node_modules} not found, you need to run 'npm install' in {cwd} - Perhaps run 'source {cwd}/setup_node_env.sh install'""") - ) - exit(1) - - -def start_backend(web_service_info, debug): - """Start the backend in debug mode.""" - - web_service_info["app"].run( - host=web_service_info["backend_host"], port=web_service_info["backend_port"], debug=debug - ) - - -def start_frontend_thread(web_service_info, npm_command, debug): - """Start the backend in debug mode.""" - env = os.environ.copy() - backend_url = f"http://{web_service_info['backend_host']}:{web_service_info['backend_port']}" - env["REACT_APP_API_URL"] = backend_url - - if debug: - env["HOST"] = web_service_info["frontend_host"] - env["PORT"] = web_service_info["frontend_port"] - - for output in execute_and_read_stdout(npm_command, cwd=web_service_info["cwd"], env=env): - print(output, end="") - else: - for output in execute_and_read_stdout(npm_command, cwd=web_service_info["cwd"], env=env): - print(output, end="") - - env["PATH"] = "node_modules/.bin:" + env["PATH"] - react_frontend = subprocess.Popen( - [ - "http-server", - "build", - "-a", - web_service_info["frontend_host"], - "-p", - web_service_info["frontend_port"], - f"--cors={backend_url}", - ], - env=env, - cwd=str(web_service_info["cwd"]), - ) - stdout, stderr = react_frontend.communicate() - print(f"frontend stdout: '{stdout}'\n\nfrontend stderr: '{stderr}'") - - -def main(): - """Start up the server.""" - - args = get_args() - - # TODO: add https command line option and support - server = BackendServer( - graphml_dir=args.graphml_dir, - frontend_url=f"http://{args.frontend_host}:{args.frontend_port}", - memory_limit=args.memory_limit, - ) - - app = server.get_app() - cwd = Path(__file__).parent / "graph_visualizer_web_stack" - - web_service_info = { - "app": app, - "cwd": cwd, - "frontend_host": args.frontend_host, - "frontend_port": args.frontend_port, - "backend_host": args.backend_host, - "backend_port": args.backend_port, - } - - node_check = "node -v" - npm_start = ["npm", "start"] - npm_build = ["npm", "run", "build"] - - if not is_running_from_reloader(): - check_node(node_check, cwd) - - frontend_thread = None - if args.launch in ["frontend", "both"]: - if args.debug: - npm_command = npm_start - else: - npm_command = npm_build - - frontend_thread = threading.Thread( - target=start_frontend_thread, args=(web_service_info, npm_command, args.debug) - ) - frontend_thread.start() - - if args.launch in ["backend", "both"]: - start_backend(web_service_info, args.debug) - - if not is_running_from_reloader(): - if frontend_thread: - frontend_thread.join() - - -if __name__ == "__main__": - main() diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/flask/flask_backend.py b/buildscripts/libdeps/graph_visualizer_web_stack/flask/flask_backend.py deleted file mode 100644 index 3b6f649c005..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/flask/flask_backend.py +++ /dev/null @@ -1,441 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -""" -Flask backend web server. - -The backend interacts with the graph_analyzer to perform queries on various libdeps graphs. -""" - -import gc -import threading -import time -from collections import OrderedDict, namedtuple -from pathlib import Path - -import cxxfilt -import flask -import libdeps.analyzer -import libdeps.graph -import networkx -from flask import request -from flask_cors import CORS -from lxml import etree -from pympler.asizeof import asizeof - - -class BackendServer: - """Create small class for storing variables and state of the backend.""" - - def __init__(self, graphml_dir, frontend_url, memory_limit): - """Create and setup the state variables.""" - self.app = flask.Flask(__name__) - self.app.config["CORS_HEADERS"] = "Content-Type" - CORS(self.app, resources={r"/*": {"origins": frontend_url}}) - - self.app.add_url_rule("/api/graphs", "return_graph_files", self.return_graph_files) - self.app.add_url_rule( - "/api/graphs//nodes", "return_node_list", self.return_node_list - ) - self.app.add_url_rule( - "/api/graphs//analysis", "return_analyze_counts", self.return_analyze_counts - ) - self.app.add_url_rule( - "/api/graphs//d3", "return_d3", self.return_d3, methods=["POST"] - ) - self.app.add_url_rule( - "/api/graphs//nodes/details", - "return_node_infos", - self.return_node_infos, - methods=["POST"], - ) - self.app.add_url_rule( - "/api/graphs//paths", - "return_paths_between", - self.return_paths_between, - methods=["POST"], - ) - - self.loaded_graphs = {} - self.total_graph_size = 0 - self.graphml_dir = Path(graphml_dir) - self.frontend_url = frontend_url - self.loading_locks = {} - self.memory_limit_bytes = memory_limit * (10**9) * 0.8 - self.unloading = False - self.unloading_lock = threading.Lock() - - self.graph_file_tuple = namedtuple("GraphFile", ["version", "git_hash", "graph_file"]) - self.graph_files = self.get_graphml_files() - - @staticmethod - def get_dependency_graph(graph): - """Returns the dependency graph of a given graph.""" - - if graph.graph["graph_schema_version"] == 1: - return networkx.reverse_view(graph) - else: - return graph - - @staticmethod - def get_dependents_graph(graph): - """Returns the dependents graph of a given graph.""" - - if graph.graph["graph_schema_version"] == 1: - return graph - else: - return networkx.reverse_view(graph) - - def get_app(self): - """Return the app instance.""" - - return self.app - - def get_graph_build_data(self, graph_file): - """Fast method for extracting basic build data from the graph file.""" - - version = "" - git_hash = "" - for _, element in etree.iterparse( - str(graph_file), tag="{http://graphml.graphdrawing.org/xmlns}data" - ): - if element.get("key") == "graph_schema_version": - version = element.text - if element.get("key") == "git_hash": - git_hash = element.text - element.clear() - if version and git_hash: - break - return self.graph_file_tuple(version, git_hash, graph_file) - - def get_graphml_files(self): - """Find all graphml files in the target graphml dir.""" - - graph_files = OrderedDict() - for graph_file in self.graphml_dir.glob("**/*.graphml"): - graph_file_tuple = self.get_graph_build_data(graph_file) - graph_files[graph_file_tuple.git_hash[:7]] = graph_file_tuple - return graph_files - - def return_graph_files(self): - """Prepare the list of graph files for the frontend.""" - - data = {"graph_files": []} - for i, (_, graph_file_data) in enumerate(self.graph_files.items(), start=1): - data["graph_files"].append( - { - "id": i, - "version": graph_file_data.version, - "git": graph_file_data.git_hash[:7], - "selected": False, - } - ) - return data - - def return_node_infos(self, git_hash): - """Returns details about a set of selected nodes.""" - - req_body = request.get_json() - if "selected_nodes" in req_body.keys(): - selected_nodes = req_body["selected_nodes"] - - if graph := self.load_graph(git_hash): - dependents_graph = self.get_dependents_graph(graph) - dependency_graph = self.get_dependency_graph(graph) - - nodeinfo_data = {"nodeInfos": []} - - for node in selected_nodes: - nodeinfo_data["nodeInfos"].append( - { - "id": len(nodeinfo_data["nodeInfos"]), - "node": str(node), - "name": Path(node).name, - "attribs": [ - {"name": key, "value": value} - for key, value in dependents_graph.nodes(data=True)[ - str(node) - ].items() - ], - "dependers": [ - { - "node": depender, - "symbols": dependents_graph[str(node)][depender].get("symbols"), - } - for depender in dependents_graph[str(node)] - ], - "dependencies": [ - { - "node": dependency, - "symbols": dependents_graph[dependency][str(node)].get( - "symbols" - ), - } - for dependency in dependency_graph[str(node)] - ], - } - ) - - return nodeinfo_data, 200 - return { - "error": "Git commit hash (" + git_hash + ") does not have a matching graph file." - }, 400 - return {"error": 'Request body does not contain "selected_nodes" attribute.'}, 400 - - def return_d3(self, git_hash): - """Convert the current selected rows into a format for D3.""" - - req_body = request.get_json() - if "selected_nodes" in req_body.keys(): - selected_nodes = req_body["selected_nodes"] - - if graph := self.load_graph(git_hash): - dependents_graph = self.get_dependents_graph(graph) - dependency_graph = self.get_dependency_graph(graph) - - nodes = {} - links = {} - links_trans = {} - - def add_node_to_graph_data(node): - nodes[str(node)] = { - "id": str(node), - "name": Path(node).name, - "type": dependents_graph.nodes()[str(node)].get("bin_type", ""), - } - - def add_link_to_graph_data(source, target, data): - links[str(source) + str(target)] = { - "source": str(source), - "target": str(target), - "data": data, - } - - for node in selected_nodes: - add_node_to_graph_data(node) - - for libdep in dependency_graph[str(node)]: - if dependents_graph[libdep][str(node)].get("direct"): - add_node_to_graph_data(libdep) - add_link_to_graph_data( - node, libdep, dependents_graph[libdep][str(node)] - ) - - if "transitive_edges" in req_body.keys() and req_body["transitive_edges"] is True: - for node in selected_nodes: - for libdep in dependency_graph[str(node)]: - if str(libdep) in nodes: - add_link_to_graph_data( - node, libdep, dependents_graph[libdep][str(node)] - ) - - if "extra_nodes" in req_body.keys(): - extra_nodes = req_body["extra_nodes"] - for node in extra_nodes: - add_node_to_graph_data(node) - - for libdep in dependency_graph.get_direct_nonprivate_graph()[str(node)]: - add_node_to_graph_data(libdep) - add_link_to_graph_data( - node, libdep, dependents_graph[libdep][str(node)] - ) - - node_data = { - "graphData": { - "nodes": [data for node, data in nodes.items()], - "links": [data for link, data in links.items()], - "links_trans": [data for link, data in links_trans.items()], - } - } - return node_data, 200 - return { - "error": "Git commit hash (" + git_hash + ") does not have a matching graph file." - }, 400 - return {"error": 'Request body does not contain "selected_nodes" attribute.'}, 400 - - def return_analyze_counts(self, git_hash): - """Perform count analysis and send the results back to frontend.""" - - with self.app.test_request_context(): - if graph := self.load_graph(git_hash): - dependency_graph = self.get_dependency_graph(graph) - - analysis = libdeps.analyzer.counter_factory( - dependency_graph, - [name[0] for name in libdeps.analyzer.CountTypes.__members__.items()], - ) - ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis) - results = ga.get_results() - - graph_data = [] - for i, data in enumerate(results): - graph_data.append({"id": i, "type": data, "value": results[data]}) - return {"results": graph_data}, 200 - return { - "error": "Git commit hash (" + git_hash + ") does not have a matching graph file." - }, 400 - - def return_paths_between(self, git_hash): - """Gather all the paths in the graph between a fromNode and toNode.""" - - message = request.get_json() - if "fromNode" in message.keys() and "toNode" in message.keys(): - if graph := self.load_graph(git_hash): - dependency_graph = self.get_dependency_graph(graph) - analysis = [ - libdeps.analyzer.GraphPaths( - dependency_graph, message["fromNode"], message["toNode"] - ) - ] - ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis=analysis) - results = ga.get_results() - - paths = results[libdeps.analyzer.DependsReportTypes.GRAPH_PATHS.name][ - (message["fromNode"], message["toNode"]) - ] - paths.sort(key=len) - nodes = set() - for path in paths: - for node in path: - nodes.add(node) - - # Need to handle self.send_graph_data(extra_nodes=list(nodes)) - return { - "fromNode": message["fromNode"], - "toNode": message["toNode"], - "paths": paths, - "extraNodes": list(nodes), - }, 200 - return { - "error": "Git commit hash (" + git_hash + ") does not have a matching graph file." - }, 400 - return {"error": "Body must contain toNode and fromNode"}, 400 - - def return_node_list(self, git_hash): - """Gather all the nodes in the graph for the node list.""" - - with self.app.test_request_context(): - node_data = {"nodes": [], "links": []} - if graph := self.load_graph(git_hash): - for node in sorted(graph.nodes()): - node_path = Path(node) - node_data["nodes"].append(str(node_path)) - return node_data, 200 - return { - "error": "Git commit hash (" + git_hash + ") does not have a matching graph file." - }, 400 - - def perform_unloading(self, git_hash): - """Perform the unloading of a graph in a separate thread.""" - if self.total_graph_size > self.memory_limit_bytes: - while self.total_graph_size > self.memory_limit_bytes: - self.app.logger.info( - f"Current graph memory: {self.total_graph_size / (10**9)} GB, Unloading to get to {self.memory_limit_bytes / (10**9)} GB" - ) - - self.unloading_lock.acquire() - - lru_hash = min( - [graph_hash for graph_hash in self.loaded_graphs if graph_hash != git_hash], - key=lambda x: self.loaded_graphs[x]["atime"], - ) - if lru_hash: - self.app.logger.info( - f"Unloading {[lru_hash]}, last used {round(time.time() - self.loaded_graphs[lru_hash]['atime'] , 1)}s ago" - ) - self.total_graph_size -= self.loaded_graphs[lru_hash]["size"] - del self.loaded_graphs[lru_hash] - del self.loading_locks[lru_hash] - self.unloading_lock.release() - gc.collect() - self.app.logger.info(f"Memory limit satisfied: {self.total_graph_size / (10**9)} GB") - self.unloading = False - - def unload_graphs(self, git_hash): - """Unload least recently used graph when hitting application memory threshold.""" - - if not self.unloading: - self.unloading = True - - thread = threading.Thread(target=self.perform_unloading, args=(git_hash,)) - thread.daemon = True - thread.start() - - def load_graph(self, git_hash): - """Load the graph into application memory.""" - - with self.app.test_request_context(): - self.unload_graphs(git_hash) - - loaded_graph = None - - self.unloading_lock.acquire() - if git_hash in self.loaded_graphs: - self.loaded_graphs[git_hash]["atime"] = time.time() - loaded_graph = self.loaded_graphs[git_hash]["graph"] - if git_hash not in self.loading_locks: - self.loading_locks[git_hash] = threading.Lock() - self.unloading_lock.release() - - self.loading_locks[git_hash].acquire() - if git_hash not in self.loaded_graphs: - if git_hash in self.graph_files: - file_path = self.graph_files[git_hash].graph_file - nx_graph = networkx.read_graphml(file_path) - if int(self.get_graph_build_data(file_path).version) > 3: - for source, target in nx_graph.edges: - try: - nx_graph[source][target]["symbols"] = list( - nx_graph[source][target].get("symbols").split("\n") - ) - except AttributeError: - nx_graph[source][target]["symbols"] = [] - else: - for source, target in nx_graph.edges: - try: - nx_graph[source][target]["symbols"] = list( - map( - cxxfilt.demangle, - nx_graph[source][target].get("symbols").split(), - ) - ) - except AttributeError: - try: - nx_graph[source][target]["symbols"] = list( - nx_graph[source][target].get("symbols").split() - ) - except AttributeError: - nx_graph[source][target]["symbols"] = [] - loaded_graph = libdeps.graph.LibdepsGraph(nx_graph) - - self.loaded_graphs[git_hash] = { - "graph": loaded_graph, - "size": asizeof(loaded_graph), - "atime": time.time(), - } - self.total_graph_size += self.loaded_graphs[git_hash]["size"] - else: - loaded_graph = self.loaded_graphs[git_hash]["graph"] - self.loading_locks[git_hash].release() - - return loaded_graph diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/package.json b/buildscripts/libdeps/graph_visualizer_web_stack/package.json deleted file mode 100644 index c375f0f2cee..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/package.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "name": "graph_visualizer", - "version": "4.0.0", - "private": true, - "engines": { - "node": ">=14.0.0" - }, - "engineStrict": true, - "scripts": { - "start": "react-scripts start", - "build": "react-scripts build", - "start-flask": "cd flask && flask run --no-debugger", - "test": "react-scripts test", - "eject": "react-scripts eject" - }, - "dependencies": { - "@emotion/react": "^11.11.0", - "@emotion/styled": "^11.11.0", - "@material-ui/core": "^5.0.0-alpha.22", - "@material-ui/icons": "^5.0.0-alpha.22", - "@material-ui/lab": "^5.0.0-alpha.22", - "bezier-js": "6.1.3", - "canvas": "^2.11.2", - "date-fns": "^2.30.0", - "dayjs": "^1.11.7", - "force-graph": "^1.43.1", - "http-proxy-middleware": "^2.0.6", - "http-server": "^14.1.1", - "luxon": "^3.3.0", - "moment": "^2.29.4", - "p-limit": "^4.0.0", - "react": "^18.2", - "react-dom": "^18.2.0", - "react-force-graph-2d": "1.25.0", - "react-force-graph-3d": "1.23.0", - "react-indiana-drag-scroll": "^2.2.0", - "react-redux": "^8.0.5", - "react-resize-aware": "3.1.1", - "react-resize-detector": "^9.1.0", - "react-scripts": "^5.0.1", - "react-split-pane": "^0.1.92", - "react-virtualized": "^9.22.5", - "react-window": "^1.8.9", - "redux": "^4.2.1", - "typescript": "^5.0.4" - }, - "browserslist": { - "production": [ - ">0.2%", - "not dead", - "not op_mini all" - ], - "development": [ - "last 1 chrome version", - "last 1 firefox version", - "last 1 safari version" - ] - } -} diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/public/favicon.ico b/buildscripts/libdeps/graph_visualizer_web_stack/public/favicon.ico deleted file mode 100644 index 9484edb2f18..00000000000 Binary files a/buildscripts/libdeps/graph_visualizer_web_stack/public/favicon.ico and /dev/null differ diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/public/index.html b/buildscripts/libdeps/graph_visualizer_web_stack/public/index.html deleted file mode 100644 index 593946a5ba0..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/public/index.html +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - Libdeps Graph - - - - - -
- - - diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/public/manifest.json b/buildscripts/libdeps/graph_visualizer_web_stack/public/manifest.json deleted file mode 100644 index d4885e3d415..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/public/manifest.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "short_name": "Libdeps Graph", - "name": "Libdeps Graph Visualizer Service", - "icons": [ - { - "src": "favicon.ico", - "sizes": "64x64 32x32 24x24 16x16", - "type": "image/x-icon" - } - ], - "start_url": ".", - "display": "standalone", - "theme_color": "#000000", - "background_color": "#ffffff" -} diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/setup_node_env.sh b/buildscripts/libdeps/graph_visualizer_web_stack/setup_node_env.sh deleted file mode 100755 index e680c85499b..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/setup_node_env.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash - -SCRIPTPATH="$( cd "$(dirname "$BASH_SOURCE")" >/dev/null 2>&1 ; pwd -P )" -pushd $SCRIPTPATH > /dev/null - -function quit { - popd > /dev/null -} -trap quit EXIT -trap quit SIGINT -trap quit SIGTERM - -export NVM_DIR="$HOME/.nvm" -if [ -s "$NVM_DIR/nvm.sh" ] -then - \. "$NVM_DIR/nvm.sh" -else - curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | sh - \. "$NVM_DIR/nvm.sh" -fi - -nvm install 14 - -if [ "$1" = "install" ] -then - npm install -fi - -if [ "$1" = "start" ] -then - npm start -fi - -if [ "$1" = "build" ] -then - npm run build -fi - -if [ "$1" = "update" ] -then - set -u - git -C "$NVM_DIR" fetch --tags - TAG=$(git -C "$NVM_DIR" describe --tags `git -C "$NVM_DIR" rev-list --tags --max-count=1`) - echo "Checking out tag $TAG..." - git -C "$NVM_DIR" checkout "$TAG" - - . "$NVM_DIR/nvm.sh" -fi -popd > /dev/null diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/AlgorithmExpander.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/AlgorithmExpander.js deleted file mode 100644 index 7473e6be830..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/AlgorithmExpander.js +++ /dev/null @@ -1,105 +0,0 @@ -import React from "react"; -import { connect } from "react-redux"; -import { makeStyles, withStyles } from "@material-ui/core/styles"; -import Typography from "@material-ui/core/Typography"; -import ExpandMoreIcon from "@material-ui/icons/ExpandMore"; -import Paper from "@material-ui/core/Paper"; -import MuiAccordion from "@material-ui/core/Accordion"; -import MuiAccordionSummary from "@material-ui/core/AccordionSummary"; -import MuiAccordionDetails from "@material-ui/core/AccordionDetails"; - -import { getSelected } from "./redux/store"; - -import GraphInfo from "./GraphInfo"; -import GraphPaths from "./GraphPaths"; -import LoadingBar from "./LoadingBar"; - -const useStyles = makeStyles((theme) => ({ - root: { - width: "100%", - }, - heading: { - fontSize: theme.typography.pxToRem(15), - fontWeight: theme.typography.fontWeightRegular, - }, -})); - -const Accordion = withStyles({ - root: { - border: "1px solid rgba(0, 0, 0, .125)", - boxShadow: "none", - "&:not(:last-child)": { - borderBottom: 0, - }, - "&:before": { - display: "none", - }, - "&$expanded": { - margin: "auto", - }, - }, - expanded: {}, -})(MuiAccordion); - -const AccordionSummary = withStyles({ - root: { - backgroundColor: "rgba(0, 0, 0, .03)", - borderBottom: "1px solid rgba(0, 0, 0, .125)", - marginBottom: -1, - minHeight: 56, - "&$expanded": { - minHeight: 56, - }, - }, - content: { - "&$expanded": { - margin: "12px 0", - }, - }, - expanded: {}, -})(MuiAccordionSummary); - -const AccordionDetails = withStyles((theme) => ({ - root: { - padding: theme.spacing(2), - }, -}))(MuiAccordionDetails); - -const AlgorithmExpander = ({ loading, width, transPathFrom, transPathTo }) => { - const classes = useStyles(); - - return ( -
- - - - } - aria-controls="panel1a-content" - id="panel1a-header" - > - Counts - - - - - - - } - aria-controls="panel1a-content" - id="panel1a-header" - > - Graph Paths - - - - - - - -
- ); -}; - -export default connect(getSelected)(AlgorithmExpander); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/App.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/App.js deleted file mode 100644 index 26f2e6fa074..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/App.js +++ /dev/null @@ -1,57 +0,0 @@ -import React from "react"; -import SplitPane from "react-split-pane"; - -import theme from "./theme"; - -import GraphCommitDisplay from "./GraphCommitDisplay"; -import GraphInfoTabs from "./GraphInfoTabs"; -import DrawGraph from "./DrawGraph"; - -const resizerStyle = { - background: theme.palette.text.secondary, - width: "1px", - cursor: "col-resize", - margin: "1px", - padding: "1px", - height: "100%", -}; - -const topPaneStyle = { - height: "100vh", - overflow: "visible", -}; - -export default function App() { - const [infosize, setInfosize] = React.useState(450); - const [drawsize, setDrawsize] = React.useState( - window.screen.width - infosize - ); - - React.useEffect(() => { - setInfosize(window.screen.width - drawsize); - }, [drawsize]); - - return ( - - - setDrawsize(window.screen.width - size)} - > - - - - - ); -} diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/DataGrid.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/DataGrid.js deleted file mode 100644 index 2fbc51e720d..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/DataGrid.js +++ /dev/null @@ -1,266 +0,0 @@ -import React from "react"; -import { connect } from "react-redux"; -import clsx from "clsx"; -import { AutoSizer, Column, Table } from "react-virtualized"; -import "react-virtualized/styles.css"; // only needs to be imported once -import { withStyles } from "@material-ui/core/styles"; -import TableCell from "@material-ui/core/TableCell"; -import { Checkbox } from "@material-ui/core"; -import Typography from "@material-ui/core/Typography"; - -import { getRows } from "./redux/store"; -import { updateSelected } from "./redux/nodes"; -import { setGraphData } from "./redux/graphData"; -import { setNodeInfos } from "./redux/nodeInfo"; -import { setLinks } from "./redux/links"; -import { setLinksTrans } from "./redux/linksTrans"; - -const {REACT_APP_API_URL} = process.env; - -function componentToHex(c) { - var hex = c.toString(16); - return hex.length == 1 ? "0" + hex : hex; -} - -function rgbToHex(r, g, b) { - return "#" + componentToHex(r) + componentToHex(g) + componentToHex(b); -} - -function hexToRgb(hex) { - // Expand shorthand form (e.g. "03F") to full form (e.g. "0033FF") - var shorthandRegex = /^#?([a-f\d])([a-f\d])([a-f\d])$/i; - hex = hex.replace(shorthandRegex, function (m, r, g, b) { - return r + r + g + g + b + b; - }); - - var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex); - return result - ? { - r: parseInt(result[1], 16), - g: parseInt(result[2], 16), - b: parseInt(result[3], 16), - } - : null; -} - -function incrementPallete(palleteColor, increment) { - var rgb = hexToRgb(palleteColor); - rgb.r += increment; - rgb.g += increment; - rgb.b += increment; - return rgbToHex(rgb.r, rgb.g, rgb.b); -} - -const styles = (theme) => ({ - flexContainer: { - display: "flex", - alignItems: "center", - }, - table: { - // temporary right-to-left patch, waiting for - // https://github.com/bvaughn/react-virtualized/issues/454 - "& .ReactVirtualized__Table__headerRow": { - flip: false, - paddingRight: theme.direction === "rtl" ? "0 !important" : undefined, - }, - }, - tableRowOdd: { - backgroundColor: incrementPallete(theme.palette.grey[800], 10), - }, - tableRowEven: { - backgroundColor: theme.palette.grey[800], - }, - tableRowHover: { - "&:hover": { - backgroundColor: theme.palette.grey[600], - }, - }, - tableCell: { - flex: 1, - }, - noClick: { - cursor: "initial", - }, -}); - -const DataGrid = ({ - rowGetter, - rowCount, - nodes, - rowHeight, - headerHeight, - columns, - onNodeClicked, - updateSelected, - classes, - setGraphData, - setLinks, - setLinksTrans, - selectedGraph, - setNodeInfos, - selectedNodes, - searchedNodes, - showTransitive -}) => { - const [checkBoxes, setCheckBoxes] = React.useState([]); - - React.useEffect(() => { - setCheckBoxes(searchedNodes); - }, [searchedNodes]); - - function newGraphData() { - let gitHash = selectedGraph; - if (gitHash) { - let postData = { - "selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node), - "transitive_edges": showTransitive - }; - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - setGraphData(data.graphData); - setLinks(data.graphData.links); - setLinksTrans(data.graphData.links_trans); - }); - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - setNodeInfos(data.nodeInfos); - }); - } - } - - const getRowClassName = ({ index }) => { - return clsx( - index % 2 == 0 ? classes.tableRowEven : classes.tableRowOdd, - classes.flexContainer, - { - [classes.tableRowHover]: index !== -1, - } - ); - }; - - const cellRenderer = ({ cellData, columnIndex, rowIndex }) => { - var finalCellData; - var style = { height: rowHeight, padding: "0px" }; - if (cellData == "checkbox") { - style["justifyContent"] = "space-evenly"; - finalCellData = ( - { - setCheckBoxes( - checkBoxes.map((checkbox, index) => { - if (index == rowIndex) { - checkbox.selected = event.target.checked; - } - return checkbox; - }) - ); - if (checkBoxes[rowIndex].selected != event.target.checked) { - updateSelected({ index: rowIndex, value: event.target.checked }); - } - newGraphData(); - }} - /> - ); - } else { - finalCellData = cellData; - } - - return ( - - {finalCellData} - - ); - }; - - const headerRenderer = ({ label, columnIndex }) => { - return ( - - - {label} - - - ); - }; - - return ( - - {({ height, width }) => ( - - {columns.map(({ dataKey, ...other }, index) => { - return ( - - headerRenderer({ - ...headerProps, - columnIndex: index, - }) - } - className={classes.flexContainer} - cellRenderer={cellRenderer} - dataKey={dataKey} - {...other} - /> - ); - })} -
- )} -
- ); -}; - -export default connect(getRows, { updateSelected, setGraphData, setNodeInfos, setLinks, setLinksTrans })( - withStyles(styles)(DataGrid) -); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/DrawGraph.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/DrawGraph.js deleted file mode 100644 index 2e374807997..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/DrawGraph.js +++ /dev/null @@ -1,499 +0,0 @@ -import React, { useRef, useEffect } from "react"; -import * as THREE from "three"; -import { connect } from "react-redux"; -import ForceGraph2D from "react-force-graph-2d"; -import ForceGraph3D from "react-force-graph-3d"; -import SwitchComponents from "./SwitchComponent"; -import Button from "@material-ui/core/Button"; -import TextField from "@material-ui/core/TextField"; -import FormControlLabel from "@material-ui/core/FormControlLabel"; -import Checkbox from "@material-ui/core/Checkbox"; - -import theme from "./theme"; -import { getGraphData } from "./redux/store"; -import { updateCheckbox } from "./redux/nodes"; -import { setFindNode } from "./redux/findNode"; -import { setGraphData } from "./redux/graphData"; -import { setNodeInfos } from "./redux/nodeInfo"; -import { setLinks } from "./redux/links"; -import { setLinksTrans } from "./redux/linksTrans"; -import { setShowTransitive } from "./redux/showTransitive"; -import LoadingBar from "./LoadingBar"; - -const {REACT_APP_API_URL} = process.env; - -const handleFindNode = (node_value, graphData, activeComponent, forceRef) => { - var targetNode = null; - if (graphData) { - for (var i = 0; i < graphData.nodes.length; i++) { - var node = graphData.nodes[i]; - if (node.name == node_value || node.id == node_value) { - targetNode = node; - break; - } - } - if (targetNode != null) { - if (activeComponent == "3D") { - if (forceRef.current != null) { - forceRef.current.centerAt(targetNode.x, targetNode.y, 2000); - forceRef.current.zoom(6, 1000); - } - } else { - const distance = 100; - const distRatio = - 1 + distance / Math.hypot(targetNode.x, targetNode.y, targetNode.z); - if (forceRef.current != null) { - forceRef.current.cameraPosition( - { - x: targetNode.x * distRatio, - y: targetNode.y * distRatio, - z: targetNode.z * distRatio, - }, // new position - targetNode, // lookAt ({ x, y, z }) - 3000 // ms transition duration - ); - } - } - } - } -}; - -const DrawGraph = ({ - size, - graphData, - nodes, - links, - loading, - graphPaths, - updateCheckbox, - findNode, - setFindNode, - setGraphData, - setNodeInfos, - selectedGraph, - setLinks, - setLinksTrans, - setShowTransitive, - showTransitive -}) => { - const [activeComponent, setActiveComponent] = React.useState("2D"); - const [pathNodes, setPathNodes] = React.useState({}); - const [pathEdges, setPathEdges] = React.useState([]); - const forceRef = useRef(null); - - const PARTICLE_SIZE = 5; - - React.useEffect(() => { - handleFindNode(findNode, graphData, activeComponent, forceRef); - setFindNode(""); - }, [findNode, graphData, activeComponent, forceRef]); - - React.useEffect(() => { - newGraphData(); - }, [showTransitive]); - - const selectedEdge = links.filter(link => link.selected == true)[0]; - const selectedNodes = nodes.filter(node => node.selected == true).map(node => node.node); - - React.useEffect(() => { - setPathNodes({ fromNode: graphPaths.fromNode, toNode: graphPaths.toNode }); - var paths = Array(); - for (var path = 0; path < graphPaths.paths.length; path++) { - var pathArr = Array(); - for (var i = 0; i < graphPaths.paths[path].length; i++) { - if (i == 0) { - continue; - } - pathArr.push({ - source: graphPaths.paths[path][i - 1], - target: graphPaths.paths[path][i], - }); - } - paths.push(pathArr); - } - setPathEdges(paths); - }, [graphPaths]); - - React.useEffect(() => { - if (forceRef.current != null) { - if (activeComponent == '3D'){ - forceRef.current.d3Force("charge").strength(-2000); - } - else { - forceRef.current.d3Force("charge").strength(-10000); - } - - } - }, [forceRef.current, activeComponent]); - - function newGraphData() { - let gitHash = selectedGraph; - if (gitHash) { - let postData = { - "selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node), - "transitive_edges": showTransitive - }; - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - setGraphData(data.graphData); - setLinks(data.graphData.links); - setLinksTrans(data.graphData.links_trans); - }); - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - setNodeInfos(data.nodeInfos); - }); - } - } - - const paintRing = React.useCallback( - (node, ctx) => { - // add ring just for highlighted nodes - ctx.beginPath(); - ctx.arc(node.x, node.y, 7 * 1.4, 0, 2 * Math.PI, false); - if (node.id == pathNodes.fromNode) { - ctx.fillStyle = "blue"; - } else if (node.id == pathNodes.toNode) { - ctx.fillStyle = "red"; - } else { - ctx.fillStyle = "green"; - } - ctx.fill(); - }, - [pathNodes] - ); - - function colorNodes(node) { - switch (node.type) { - case "SharedLibrary": - return "#e6ed11"; // yellow - case "Program": - return "#1120ed"; // blue - case "shim": - return "#800303"; // dark red - default: - return "#5a706f"; // grey - } - } - - function isSameEdge(edgeA, edgeB) { - if (edgeA.source.id && edgeA.target.id) { - if (edgeB.source.id && edgeB.target.id) { - return (edgeA.source.id == edgeB.source.id && - edgeA.target.id == edgeB.target.id); - } - } - if (edgeA.source == edgeB.source && - edgeA.target == edgeB.target) { - return true; - } - return false; - } - - return ( - - - { - handleFindNode( - event.target.value, - graphData, - activeComponent, - forceRef - ); - }} - /> - setShowTransitive(!showTransitive) } - />} - label="Show Viewable Transitive Edges" - /> - - { - if (graphPaths.selectedPath >= 0) { - for ( - var i = 0; - i < pathEdges[graphPaths.selectedPath].length; - i++ - ) { - if ( - pathEdges[graphPaths.selectedPath][i].source == d.source.id && - pathEdges[graphPaths.selectedPath][i].target == d.target.id - ) { - return PARTICLE_SIZE; - } - } - } - if (selectedEdge) { - if (isSameEdge(selectedEdge, d)) { - return PARTICLE_SIZE; - } - } - return 0; - }} - linkDirectionalParticleSpeed={(d) => { - return 0.01; - }} - nodeCanvasObjectMode={(node) => { - if (selectedNodes.includes(node.id)) { - return "before"; - } - }} - linkLineDash={(d) => { - if (d.data.direct) { - return []; - } - return [5, 3]; - }} - linkColor={(d) => { - if (selectedEdge) { - if (isSameEdge(selectedEdge, d)) { - return "#ED7811"; - } - } - if (graphPaths.selectedPath >= 0) { - for ( - var i = 0; - i < pathEdges[graphPaths.selectedPath].length; - i++ - ) { - if ( - pathEdges[graphPaths.selectedPath][i].source == d.source.id && - pathEdges[graphPaths.selectedPath][i].target == d.target.id - ) { - return "#12FF19"; - } - } - } - return "#FAFAFA"; - }} - linkDirectionalParticleWidth={6} - linkWidth={(d) => { - if (selectedEdge) { - if (isSameEdge(selectedEdge, d)) { - return 2; - } - } - if (graphPaths.selectedPath >= 0) { - for ( - var i = 0; - i < pathEdges[graphPaths.selectedPath].length; - i++ - ) { - if ( - pathEdges[graphPaths.selectedPath][i].source == d.source.id && - pathEdges[graphPaths.selectedPath][i].target == d.target.id - ) { - return 2; - } - } - } - return 1; - }} - onLinkClick={(link, event) => { - if (selectedEdge) { - if (isSameEdge(selectedEdge, link)) { - setLinks( - links.map((temp_link) => { - temp_link.selected = false; - return temp_link; - }) - ); - return; - } - } - setLinks( - links.map((temp_link, index) => { - if (index == link.index) { - temp_link.selected = true; - } else { - temp_link.selected = false; - } - return temp_link; - }) - ); - }} - nodeRelSize={7} - nodeCanvasObject={paintRing} - onNodeClick={(node, event) => { - updateCheckbox({ node: node.id, value: "flip" }); - newGraphData(); - }} - /> - { - if (!selectedNodes.includes(node.id)) { - return new THREE.Mesh( - new THREE.SphereGeometry(5, 5, 5), - new THREE.MeshLambertMaterial({ - color: colorNodes(node), - transparent: true, - opacity: 0.2, - }) - ); - } - }} - onNodeClick={(node, event) => { - updateCheckbox({ node: node.id, value: "flip" }); - newGraphData(); - }} - linkColor={(d) => { - if (graphPaths.selectedPath >= 0) { - for ( - var i = 0; - i < pathEdges[graphPaths.selectedPath].length; - i++ - ) { - if ( - pathEdges[graphPaths.selectedPath][i].source == d.source.id && - pathEdges[graphPaths.selectedPath][i].target == d.target.id - ) { - return "#12FF19"; - } - } - } - if (selectedEdge) { - if (isSameEdge(selectedEdge, d)) { - return "#ED7811"; - } - } - if (d.data.direct == false) { - return "#303030"; - } - return "#FFFFFF"; - }} - linkDirectionalParticleWidth={7} - linkWidth={(d) => { - if (graphPaths.selectedPath >= 0) { - for ( - var i = 0; - i < pathEdges[graphPaths.selectedPath].length; - i++ - ) { - if ( - pathEdges[graphPaths.selectedPath][i].source == d.source.id && - pathEdges[graphPaths.selectedPath][i].target == d.target.id - ) { - return 3; - } - } - } - if (selectedEdge) { - if (isSameEdge(selectedEdge, d)) { - return 3; - } - } - return 1; - }} - linkDirectionalParticles={(d) => { - if (graphPaths.selectedPath >= 0) { - for ( - var i = 0; - i < pathEdges[graphPaths.selectedPath].length; - i++ - ) { - if ( - pathEdges[graphPaths.selectedPath][i].source == d.source.id && - pathEdges[graphPaths.selectedPath][i].target == d.target.id - ) { - return PARTICLE_SIZE; - } - } - } - if (selectedEdge) { - if (isSameEdge(selectedEdge, d)) { - return PARTICLE_SIZE; - } - } - return 0; - }} - linkDirectionalParticleSpeed={(d) => { - return 0.01; - }} - linkDirectionalParticleResolution={10} - linkOpacity={0.6} - onLinkClick={(link, event) => { - if (selectedEdge) { - if (isSameEdge(selectedEdge, link)) { - setLinks( - links.map((temp_link) => { - temp_link.selected = false; - return temp_link; - }) - ); - return; - } - } - setLinks( - links.map((temp_link, index) => { - if (index == link.index) { - temp_link.selected = true; - } else { - temp_link.selected = false; - } - return temp_link; - }) - ); - }} - nodeRelSize={7} - backgroundColor={theme.palette.secondary.dark} - linkDirectionalArrowLength={3.5} - linkDirectionalArrowRelPos={1} - ref={forceRef} - /> - - - ); -}; - -export default connect(getGraphData, { setFindNode, updateCheckbox, setGraphData, setNodeInfos, setLinks, setLinksTrans, setShowTransitive })( - DrawGraph -); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/EdgeList.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/EdgeList.js deleted file mode 100644 index 69830ce83b1..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/EdgeList.js +++ /dev/null @@ -1,261 +0,0 @@ -import React, { useState } from "react"; -import clsx from "clsx"; -import { connect } from "react-redux"; -import { getEdges } from "./redux/store"; -import { setFindNode } from "./redux/findNode"; -import { setLinks } from "./redux/links"; -import { setGraphData } from "./redux/graphData"; -import { setSelectedPath } from "./redux/graphPaths"; -import { AutoSizer, Column, Table } from "react-virtualized"; -import TableCell from "@material-ui/core/TableCell"; -import Typography from "@material-ui/core/Typography"; -import Tooltip from '@material-ui/core/Tooltip'; -import GraphPaths from "./GraphPaths"; - -import { makeStyles, withStyles } from "@material-ui/core/styles"; - -import LoadingBar from "./LoadingBar"; -import TextField from "@material-ui/core/TextField"; -import { List, ListItemText, Paper, Button } from "@material-ui/core"; - -const columns = [ - { dataKey: "type", label: "Type", width: 30 }, - { dataKey: "source", label: "From", width: 180 }, - { dataKey: "to", label: "➔", width: 40 }, - { dataKey: "target", label: "To", width: 180 }, -]; - -const visibilityTypes = ['Global', 'Public', 'Private', 'Interface']; - -function componentToHex(c) { - var hex = c.toString(16); - return hex.length == 1 ? "0" + hex : hex; - } - - function rgbToHex(r, g, b) { - return "#" + componentToHex(r) + componentToHex(g) + componentToHex(b); - } - - function hexToRgb(hex) { - // Expand shorthand form (e.g. "03F") to full form (e.g. "0033FF") - var shorthandRegex = /^#?([a-f\d])([a-f\d])([a-f\d])$/i; - hex = hex.replace(shorthandRegex, function (m, r, g, b) { - return r + r + g + g + b + b; - }); - - var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex); - return result - ? { - r: parseInt(result[1], 16), - g: parseInt(result[2], 16), - b: parseInt(result[3], 16), - } - : null; - } - - function incrementPallete(palleteColor, increment) { - var rgb = hexToRgb(palleteColor); - rgb.r += increment; - rgb.g += increment; - rgb.b += increment; - return rgbToHex(rgb.r, rgb.g, rgb.b); - } - - const styles = (theme) => ({ - flexContainer: { - display: "flex", - alignItems: "center", - }, - table: { - // temporary right-to-left patch, waiting for - // https://github.com/bvaughn/react-virtualized/issues/454 - "& .ReactVirtualized__Table__headerRow": { - flip: false, - paddingRight: theme.direction === "rtl" ? "0 !important" : undefined, - }, - }, - tableRowOdd: { - backgroundColor: incrementPallete(theme.palette.grey[800], 10), - }, - tableRowEven: { - backgroundColor: theme.palette.grey[800], - }, - tableRowHover: { - "&:hover": { - backgroundColor: theme.palette.grey[600], - }, - }, - tableCell: { - flex: 1, - }, - noClick: { - cursor: "initial", - }, - }); - -const EdgeList = ({ selectedGraph, links, setLinks, linksTrans, loading, setFindNode, classes, setTransPath }) => { - const [searchTerm, setSearchTerm] = useState(''); - - const selectedLinks = links.filter(link => link.selected); - - function searchedLinks() { - if (searchTerm == '') { - return links; - } - return links.filter(link => { - if (link.source.name && link.target.name) { - return link.source.name.indexOf(searchTerm) > -1 || link.target.name.indexOf(searchTerm) > -1; - }}); - } - - function handleRowClick(event) { - setLinks( - links.map((temp_link, index) => { - if (index == searchedLinks()[event.index].index) { - temp_link.selected = !temp_link.selected; - } else { - temp_link.selected = false; - } - return temp_link; - }) - ); - setTransPath(event, '', ''); - } - - function handleSearchTermChange(event) { - setSearchTerm(event.target.value); - } - - function reduceNodeName(node) { - if (node.name) { - return node.name; - } - return node.substring(node.lastIndexOf('/') + 1); - } - - const getRowClassName = ({ index }) => { - return clsx( - index % 2 == 0 ? styles.tableRowEven : classes.tableRowOdd, - classes.flexContainer, - { - [classes.tableRowHover]: index !== -1, - } - ); - }; - - const cellRenderer = ({ cellData, columnIndex, rowIndex }) => { - - return ( - - { columnIndex == 0 ? - ( searchedLinks()[rowIndex].data?.direct ? -

D

- : -

T

- ) - : - "" - } - { columnIndex == 1 ? reduceNodeName(searchedLinks()[rowIndex].source) : "" } - { columnIndex == 2 ? (searchedLinks()[rowIndex].selected ? : "➔") : "" } - { columnIndex == 3 ? reduceNodeName(searchedLinks()[rowIndex].target) : "" } -
- ); - }; - - const headerRenderer = ({ label, columnIndex }) => { - return ( - - - {label} - - - ); - }; - - return ( - - event.target.select()} - label="Search for Edge" - /> -
- - {({ height, width }) => ( - searchedLinks()[index].target} - rowHeight={25} - onRowClick={handleRowClick} - gridStyle={{ - direction: "inherit", - }} - size={"small"} - rowClassName={getRowClassName} - headerHeight={35} - > - {columns.map(({ dataKey, ...other }, index) => { - return ( - - headerRenderer({ - ...headerProps, - columnIndex: index, - }) - } - cellRenderer={cellRenderer} - dataKey={dataKey} - {...other} - /> - ); - })} -
- )} -
-
- -
- ); -}; - -export default connect(getEdges, { setGraphData, setFindNode, setLinks, setSelectedPath })(withStyles(styles)(EdgeList)); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/GitHashButton.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/GitHashButton.js deleted file mode 100644 index 02afb784523..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/GitHashButton.js +++ /dev/null @@ -1,93 +0,0 @@ -import React from "react"; -import { connect } from "react-redux"; -import LoadingButton from "@material-ui/lab/LoadingButton"; -import GitIcon from "@material-ui/icons/GitHub"; -import { green, grey } from "@material-ui/core/colors"; - -import { getGraphFiles } from "./redux/store"; -import { setLoading } from "./redux/loading"; -import theme from "./theme"; -import { selectGraphFile } from "./redux/graphFiles"; -import { nodeInfo, setNodeInfos } from "./redux/nodeInfo"; - -const selectedStyle = { - color: theme.palette.getContrastText(green[500]), - backgroundColor: green[500], - "&:hover": { - backgroundColor: green[400], - }, - "&:active": { - backgroundColor: green[700], - }, -}; - -const unselectedStyle = { - color: theme.palette.getContrastText(grey[100]), - backgroundColor: grey[100], - "&:hover": { - backgroundColor: grey[200], - }, - "&:active": { - backgroundColor: grey[400], - }, -}; - -const GitHashButton = ({ loading, graphFiles, setLoading, selectGraphFile, setNodeInfos, text }) => { - const [selected, setSelected] = React.useState(false); - const [selfLoading, setSelfLoading] = React.useState(false); - const [firstLoad, setFirstLoad] = React.useState(true); - - function handleClick() { - const selectedGraphFiles = graphFiles.filter( - (graphFile) => graphFile.selected == true - ); - - if (selectedGraphFiles.length > 0) { - if (selectedGraphFiles[0]["git"] == text) { - return; - } - } - - setSelfLoading(true); - setLoading(true); - selectGraphFile({ - hash: text, - selected: true, - }); - } - - React.useEffect(() => { - const selectedGraphFile = graphFiles.filter( - (graphFile) => graphFile.git == text - ); - setSelected(selectedGraphFile[0].selected); - - if (firstLoad && graphFiles.length > 0) { - if (graphFiles[0]["git"] == text) { - handleClick(); - } - setFirstLoad(false); - } - }, [graphFiles]); - - React.useEffect(() => { - if (!loading) { - setSelfLoading(false); - } - }, [loading]); - - return ( - } - variant="contained" - style={selected ? selectedStyle : unselectedStyle} - onClick={handleClick} - > - {text} - - ); -}; - -export default connect(getGraphFiles, { setLoading, selectGraphFile, setNodeInfos })(GitHashButton); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphCommitDisplay.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphCommitDisplay.js deleted file mode 100644 index 86d01e88b87..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphCommitDisplay.js +++ /dev/null @@ -1,78 +0,0 @@ -import React from "react"; -import ScrollContainer from "react-indiana-drag-scroll"; -import { connect } from "react-redux"; -import Table from "@material-ui/core/Table"; -import TableBody from "@material-ui/core/TableBody"; -import TableCell from "@material-ui/core/TableCell"; -import Paper from "@material-ui/core/Paper"; -import TableRow from "@material-ui/core/TableRow"; -import List from "@material-ui/core/List"; -import ListItem from "@material-ui/core/ListItem"; -import TextField from "@material-ui/core/TextField"; - -import { getGraphFiles } from "./redux/store"; -import { setGraphFiles } from "./redux/graphFiles"; - -import GitHashButton from "./GitHashButton"; - -const { REACT_APP_API_URL } = process.env; - -const flexContainer = { - display: "flex", - flexDirection: "row", - padding: 0, - width: "50%", - height: "50%", -}; - -const textFields = [ - "Scroll to commit", - "Commit Range Begin", - "Commit Range End", -]; - -const GraphCommitDisplay = ({ graphFiles, setGraphFiles }) => { - React.useEffect(() => { - fetch(REACT_APP_API_URL + "/api/graphs") - .then((res) => res.json()) - .then((data) => { - setGraphFiles(data.graph_files); - }) - .catch((err) => { - /* eslint-disable no-console */ - console.log("Error Reading data " + err); - }); - }, []); - - return ( - - - {textFields.map((text) => ( - - - - ))} - - - - - - {graphFiles.map((file) => ( - - - - ))} - - -
-
-
- ); -}; - -export default connect(getGraphFiles, { setGraphFiles })(GraphCommitDisplay); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphInfo.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphInfo.js deleted file mode 100644 index 3552d26cd2c..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphInfo.js +++ /dev/null @@ -1,66 +0,0 @@ -import React from "react"; -import { makeStyles } from "@material-ui/core/styles"; -import Table from "@material-ui/core/Table"; -import TableBody from "@material-ui/core/TableBody"; -import TableCell from "@material-ui/core/TableCell"; -import TableContainer from "@material-ui/core/TableContainer"; -import TableHead from "@material-ui/core/TableHead"; -import TableRow from "@material-ui/core/TableRow"; -import Paper from "@material-ui/core/Paper"; -import { connect } from "react-redux"; -import { getCounts } from "./redux/store"; -import { setCounts } from "./redux/counts"; - -const {REACT_APP_API_URL} = process.env; - -const columns = [ - { id: "ID", field: "type", headerName: "Count Type", width: 50 }, - { field: "value", headerName: "Value", width: 50 }, -]; - -const useStyles = makeStyles({ - table: { - minWidth: 50, - }, -}); - -const GraphInfo = ({ selectedGraph, counts, datawidth, setCounts }) => { - React.useEffect(() => { - let gitHash = selectedGraph; - if (gitHash) { - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/analysis') - .then(response => response.json()) - .then(data => { - setCounts(data.results); - }); - } - }, [selectedGraph]); - - const classes = useStyles(); - - return ( - - - - - {columns.map((column, index) => { - return {column.headerName}; - })} - - - - {counts.map((row) => ( - - - {row.type} - - {row.value} - - ))} - -
-
- ); -}; - -export default connect(getCounts, { setCounts })(GraphInfo); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphInfoTabs.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphInfoTabs.js deleted file mode 100644 index 671a8d775af..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphInfoTabs.js +++ /dev/null @@ -1,78 +0,0 @@ -import React from "react"; -import { makeStyles } from "@material-ui/core/styles"; -import AppBar from "@material-ui/core/AppBar"; -import Tabs from "@material-ui/core/Tabs"; -import Tab from "@material-ui/core/Tab"; - -import NodeList from "./NodeList"; -import EdgeList from "./EdgeList"; -import InfoExpander from "./InfoExpander"; -import AlgorithmExpander from "./AlgorithmExpander"; - -function a11yProps(index) { - return { - id: `scrollable-auto-tab-${index}`, - "aria-controls": `scrollable-auto-tabpanel-${index}`, - }; -} - -const useStyles = makeStyles((theme) => ({ - root: { - flexGrow: 1, - width: "100%", - height: "100%", - backgroundColor: theme.palette.background.paper, - }, -})); - -export default function GraphInfoTabs({ nodes, width }) { - const classes = useStyles(); - const [tab, setTab] = React.useState(1); - const [transPathFrom, setTransPathFrom] = React.useState(''); - const [transPathTo, setTransPathTo] = React.useState(''); - - const handleChange = (event, newValue) => { - setTab(newValue); - }; - - const handleTransPath = (event, fromNode, toNode) => { - setTransPathFrom(fromNode); - setTransPathTo(toNode); - if (fromNode != '' && toNode != '') { - setTab(3); - } - }; - - return ( -
- - - - - - - - - - - - -
- ); -} diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphPaths.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphPaths.js deleted file mode 100644 index ed738f62f16..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/GraphPaths.js +++ /dev/null @@ -1,370 +0,0 @@ -import React from "react"; -import { connect } from "react-redux"; -import { FixedSizeList } from "react-window"; -import SplitPane from "react-split-pane"; -import { makeStyles, withStyles } from "@material-ui/core/styles"; -import ListItem from "@material-ui/core/ListItem"; -import ListItemText from "@material-ui/core/ListItemText"; -import Paper from "@material-ui/core/Paper"; -import Typography from "@material-ui/core/Typography"; -import Box from "@material-ui/core/Box"; -import ExpandMoreIcon from "@material-ui/icons/ExpandMore"; -import MuiAccordion from "@material-ui/core/Accordion"; -import MuiAccordionSummary from "@material-ui/core/AccordionSummary"; -import MuiAccordionDetails from "@material-ui/core/AccordionDetails"; -import useResizeAware from "react-resize-aware"; - -import { getSelected } from "./redux/store"; -import { selectedGraphPaths, setSelectedPath } from "./redux/graphPaths"; -import { setGraphData } from "./redux/graphData"; -import { setLinks } from "./redux/links"; -import { setLinksTrans } from "./redux/linksTrans"; - -import OverflowTooltip from "./OverflowTooltip"; - -const {REACT_APP_API_URL} = process.env; - -const rowHeight = 25; - -const Accordion = withStyles({ - root: { - border: "1px solid rgba(0, 0, 0, .125)", - boxShadow: "none", - "&:not(:last-child)": { - borderBottom: 0, - }, - "&:before": { - display: "none", - }, - "&$expanded": { - margin: "auto", - }, - }, - expanded: {}, -})(MuiAccordion); - -const AccordionSummary = withStyles({ - root: { - backgroundColor: "rgba(0, 0, 0, .03)", - borderBottom: "1px solid rgba(0, 0, 0, .125)", - marginBottom: -1, - minHeight: 56, - "&$expanded": { - minHeight: 56, - }, - }, - content: { - "&$expanded": { - margin: "12px 0", - }, - }, - expanded: {}, -})(MuiAccordionSummary); - -const AccordionDetails = withStyles((theme) => ({ - root: { - padding: theme.spacing(2), - }, -}))(MuiAccordionDetails); - -const GraphPaths = ({ - nodes, - selectedGraph, - selectedNodes, - graphPaths, - setSelectedPath, - width, - selectedGraphPaths, - setGraphData, - setLinks, - setLinksTrans, - showTransitive, - transPathFrom, - transPathTo -}) => { - const [fromNode, setFromNode] = React.useState(""); - const [toNode, setToNode] = React.useState(""); - const [fromNodeId, setFromNodeId] = React.useState(0); - const [toNodeId, setToNodeId] = React.useState(0); - const [fromNodeExpanded, setFromNodeExpanded] = React.useState(false); - const [toNodeExpanded, setToNodeExpanded] = React.useState(false); - const [paneSize, setPaneSize] = React.useState("50%"); - - const [fromResizeListener, fromSizes] = useResizeAware(); - const [toResizeListener, toSizes] = useResizeAware(); - - const useStyles = makeStyles((theme) => ({ - root: { - width: "100%", - maxWidth: width, - backgroundColor: theme.palette.background.paper, - }, - nested: { - paddingLeft: theme.spacing(4), - }, - listItem: { - width: width, - }, - })); - const classes = useStyles(); - - React.useEffect(() => { - setFromNode(transPathFrom); - setFromNodeExpanded(false); - setToNode(transPathFrom); - setToNodeExpanded(false); - setPaneSize("50%"); - if (transPathFrom != '' && transPathTo != '') { - getGraphPaths(transPathFrom, transPathTo); - } else { - selectedGraphPaths({ - fromNode: '', - toNode: '', - paths: [], - selectedPath: -1 - }); - } - }, [transPathFrom, transPathTo]); - - function getGraphPaths(fromNode, toNode) { - let gitHash = selectedGraph; - if (gitHash) { - let postData = { - "fromNode": fromNode, - "toNode": toNode - }; - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/paths', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - selectedGraphPaths(data); - let postData = { - "selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node), - "extra_nodes": data.extraNodes, - "transitive_edges": showTransitive - }; - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - setGraphData(data.graphData); - setLinks( - data.graphData.links.map((link) => { - if (link.source == fromNode && link.target == toNode) { - link.selected = true; - } else { - link.selected = false; - } - return link; - }) - ); - setLinksTrans(data.graphData.links_trans); - }); - }); - } - } - - function toNodeRow({ index, style, data }) { - return ( - { - setToNode(data[index].name); - setToNodeId(index); - setToNodeExpanded(false); - setPaneSize("50%"); - if (fromNode != "" && data[fromNodeId]) { - getGraphPaths(data[fromNodeId].node, data[index].node); - } - }} - > - - - ); - } - - function fromNodeRow({ index, style, data }) { - return ( - { - setFromNode(data[index].name); - setFromNodeId(index); - setFromNodeExpanded(false); - setPaneSize("50%"); - - if (toNode != "" && data[toNodeId]) { - getGraphPaths(data[fromNodeId].node, data[index].node); - } - }} - > - - - ); - } - - function pathRow({ index, style, data }) { - return ( - { - setSelectedPath(index); - }} - > - - - ); - } - - function listHeight(numItems, minHeight, maxHeight) { - const size = numItems * rowHeight; - if (size > maxHeight) { - return maxHeight; - } - if (size < minHeight) { - return minHeight; - } - return size; - } - - const handleToChange = (panel) => (event, newExpanded) => { - setPaneSize(newExpanded ? "0%" : "50%"); - setToNodeExpanded(newExpanded ? panel : false); - }; - - const handleFromChange = (panel) => (event, newExpanded) => { - setPaneSize(newExpanded ? "100%" : "50%"); - setFromNodeExpanded(newExpanded ? panel : false); - }; - - return ( - - - - } - aria-controls="panel1a-content" - id="panel1a-header" - > - - From Node: - - {fromResizeListener} - {fromNode} - - - - - - {fromNodeRow} - - - - - - } - aria-controls="panel1a-content" - id="panel1a-header" - > - - To Node: - - {toResizeListener} - {toNode} - - - - - - {toNodeRow} - - - - - - - Num Paths: {graphPaths.paths.length}{" "} - - - - {pathRow} - - - ); -}; - -export default connect(getSelected, { selectedGraphPaths, setSelectedPath, setGraphData, setLinks, setLinksTrans })( - GraphPaths -); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/InfoExpander.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/InfoExpander.js deleted file mode 100644 index dbe6f579eea..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/InfoExpander.js +++ /dev/null @@ -1,95 +0,0 @@ -import React from "react"; -import { connect } from "react-redux"; -import { makeStyles, withStyles } from "@material-ui/core/styles"; -import Typography from "@material-ui/core/Typography"; -import ExpandMoreIcon from "@material-ui/icons/ExpandMore"; -import Paper from "@material-ui/core/Paper"; -import MuiAccordion from "@material-ui/core/Accordion"; -import MuiAccordionSummary from "@material-ui/core/AccordionSummary"; -import MuiAccordionDetails from "@material-ui/core/AccordionDetails"; - -import { getSelected } from "./redux/store"; - -import GraphInfo from "./GraphInfo"; -import NodeInfo from "./NodeInfo"; -import LoadingBar from "./LoadingBar"; - -const useStyles = makeStyles((theme) => ({ - root: { - width: "100%", - }, - heading: { - fontSize: theme.typography.pxToRem(15), - fontWeight: theme.typography.fontWeightRegular, - }, -})); - -const Accordion = withStyles({ - root: { - border: "1px solid rgba(0, 0, 0, .125)", - boxShadow: "none", - "&:not(:last-child)": { - borderBottom: 0, - }, - "&:before": { - display: "none", - }, - "&$expanded": { - margin: "auto", - }, - }, - expanded: {}, -})(MuiAccordion); - -const AccordionSummary = withStyles({ - root: { - backgroundColor: "rgba(0, 0, 0, .03)", - borderBottom: "1px solid rgba(0, 0, 0, .125)", - marginBottom: -1, - minHeight: 56, - "&$expanded": { - minHeight: 56, - }, - }, - content: { - "&$expanded": { - margin: "12px 0", - }, - }, - expanded: {}, -})(MuiAccordionSummary); - -const AccordionDetails = withStyles((theme) => ({ - root: { - padding: theme.spacing(2), - }, -}))(MuiAccordionDetails); - -const InfoExpander = ({ selectedNodes, selectedEdges, loading, width }) => { - const classes = useStyles(); - - return ( -
- - - {selectedNodes.map((node) => ( - - } - aria-controls="panel1a-content" - id="panel1a-header" - > - {node.name} - - - - - - ))} - - -
- ); -}; - -export default connect(getSelected)(InfoExpander); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/LoadingBar.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/LoadingBar.js deleted file mode 100644 index 20b4ca1129e..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/LoadingBar.js +++ /dev/null @@ -1,25 +0,0 @@ -import React from "react"; -import LinearProgress from "@material-ui/core/LinearProgress"; -import Fade from "@material-ui/core/Fade"; - -export default function LoadingBar({ loading, height, children }) { - const dimOnTrue = (flag) => { - return { - opacity: flag ? 0.15 : 1, - height: "100%", - }; - }; - - return ( -
- - - -
{children}
-
- ); -} diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/NodeInfo.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/NodeInfo.js deleted file mode 100644 index 159613c7aab..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/NodeInfo.js +++ /dev/null @@ -1,187 +0,0 @@ -import React from "react"; -import { connect } from "react-redux"; -import { FixedSizeList } from "react-window"; -import { AutoSizer } from "react-virtualized"; -import { makeStyles } from "@material-ui/core/styles"; -import List from "@material-ui/core/List"; -import ListItem from "@material-ui/core/ListItem"; -import ListItemText from "@material-ui/core/ListItemText"; -import Collapse from "@material-ui/core/Collapse"; -import ExpandLess from "@material-ui/icons/ExpandLess"; -import ExpandMore from "@material-ui/icons/ExpandMore"; -import Paper from "@material-ui/core/Paper"; -import Box from "@material-ui/core/Box"; - -import { getNodeInfos } from "./redux/store"; - -import theme from "./theme"; - -import OverflowTooltip from "./OverflowTooltip"; - -const NodeInfo = ({ nodeInfos, node, width }) => { - const useStyles = makeStyles((theme) => ({ - root: { - width: "100%", - maxWidth: width, - backgroundColor: theme.palette.background.paper, - }, - nested: { - paddingLeft: theme.spacing(4), - }, - listItem: { - width: width, - }, - })); - - const rowHeight = 25; - const classes = useStyles(); - const [openDependers, setOpenDependers] = React.useState(false); - const [openDependencies, setOpenDependencies] = React.useState(false); - const [openNodeAttribs, setOpenNodeAttribs] = React.useState(false); - - const [nodeInfo, setNodeInfo] = React.useState({ - id: 0, - node: "test/test.so", - name: "test", - attribs: [{ name: "test", value: "test" }], - dependers: [{ node: "test/test3.so", symbols: [] }], - dependencies: [{ node: "test/test2.so", symbols: [] }], - }); - - React.useEffect(() => { - setNodeInfo(nodeInfos.filter((nodeInfo) => nodeInfo.node == node.node)[0]); - }, [nodeInfos]); - - function renderAttribRow({ index, style, data }) { - return ( - - - - - - - ); - } - - function renderNodeRow({ index, style, data }) { - return ( - - - - ); - } - - function listHeight(numItems) { - const size = numItems * rowHeight; - if (size > 350) { - return 350; - } - return size; - } - - if (nodeInfo == undefined) { - return ""; - } - return ( - - - - - - - - - - setOpenNodeAttribs(!openNodeAttribs)}> - - {openNodeAttribs ? : } - - - - - {({ height, width }) => ( - - {renderAttribRow} - - )} - - - - - setOpenDependers(!openDependers)}> - - {openDependers ? : } - - - - - {({ height, width }) => ( - - {renderNodeRow} - - )} - - - - - setOpenDependencies(!openDependencies)}> - - {openDependencies ? : } - - - - - {({ height, width }) => ( - - {renderNodeRow} - - )} - - - - - - ); -}; - -export default connect(getNodeInfos)(NodeInfo); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/NodeList.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/NodeList.js deleted file mode 100644 index 17c73a3cc28..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/NodeList.js +++ /dev/null @@ -1,179 +0,0 @@ -import React from "react"; - -import { connect } from "react-redux"; -import { getNodes } from "./redux/store"; -import { setFindNode } from "./redux/findNode"; - -import DataGrid from "./DataGrid"; -import LoadingBar from "./LoadingBar"; -import TextField from "@material-ui/core/TextField"; - -import { setNodes, updateCheckbox, updateSelected } from "./redux/nodes"; -import { setNodeInfos } from "./redux/nodeInfo"; -import { setGraphData } from "./redux/graphData"; -import { setLinks } from "./redux/links"; -import { setLinksTrans } from "./redux/linksTrans"; -import { setLoading } from "./redux/loading"; -import { setListSearchTerm } from "./redux/listSearchTerm"; -import { Button, Autocomplete, Grid } from "@material-ui/core"; - -const {REACT_APP_API_URL} = process.env; - -const columns = [ - { dataKey: "check", label: "Selected", width: 70 }, - { dataKey: "name", label: "Name", width: 200 }, - { id: "ID", dataKey: "node", label: "Node", width: 200 }, -]; - -const NodeList = ({ selectedGraph, nodes, searchedNodes, loading, setFindNode, setNodeInfos, setNodes, setLinks, setLinksTrans, setLoading, setListSearchTerm, updateCheckbox, updateSelected, setGraphData, showTransitive}) => { - const [searchPath, setSearchPath] = React.useState(''); - - React.useEffect(() => { - let gitHash = selectedGraph; - if (gitHash) { - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes') - .then(response => response.json()) - .then(data => { - setNodes(data.nodes.map((node, index) => { - return { - id: index, - node: node, - name: node.substring(node.lastIndexOf('/') + 1), - check: "checkbox", - selected: false, - }; - })); - setLoading(false); - }); - setSearchPath(null); - setListSearchTerm(''); - } - }, [selectedGraph]); - - function newGraphData() { - let gitHash = selectedGraph; - if (gitHash) { - let postData = { - "selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node), - "transitive_edges": showTransitive - }; - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - setGraphData(data.graphData); - setLinks(data.graphData.links); - setLinksTrans(data.graphData.links_trans); - }); - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - setNodeInfos(data.nodeInfos); - }); - } - } - - function nodePaths() { - const paths = nodes.map(node => node.node.substring(0, node.node.lastIndexOf('/') + 1)); - return [...new Set(paths)]; - } - - function handleRowClick(event) { - setFindNode(event.target.textContent); - } - - function handleSelectAll(event) { - searchedNodes.forEach(node => { - updateCheckbox({ node: node.id, value: "flip" }); - updateSelected({ index: node.id, value: true }); - }); - newGraphData(); - } - - function handleDeselectAll(event) { - searchedNodes.forEach(node => { - updateCheckbox({ node: node.id, value: "flip" }); - updateSelected({ index: node.id, value: false }); - }); - newGraphData(); - } - - function handleSearchTermChange(event, newTerm) { - if (newTerm == null) { - setSearchPath(''); - setListSearchTerm(''); - } else { - setSearchPath(newTerm); - setListSearchTerm(newTerm); - } - } - - return ( - - - - - } - /> - - - - - - - - - - - - - - - - ); -}; - -export default connect(getNodes, { setFindNode, setNodes, setNodeInfos, setLinks, setLinksTrans, setLoading, setListSearchTerm, updateCheckbox, updateSelected, setGraphData })(NodeList); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/OverflowTooltip.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/OverflowTooltip.js deleted file mode 100644 index cb76ba40739..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/OverflowTooltip.js +++ /dev/null @@ -1,110 +0,0 @@ -import React, { useRef, useEffect, useState } from "react"; -import { connect } from "react-redux"; -import Tooltip from "@material-ui/core/Tooltip"; -import Fade from "@material-ui/core/Fade"; -import Box from "@material-ui/core/Box"; -import IconButton from "@material-ui/core/IconButton"; -import AddCircleOutline from "@material-ui/icons/AddCircleOutline"; -import Typography from "@material-ui/core/Typography"; - -import { updateCheckbox } from "./redux/nodes"; -import { setGraphData } from "./redux/graphData"; -import { setNodeInfos } from "./redux/nodeInfo"; -import { getGraphData } from "./redux/store"; -import { setLinks } from "./redux/links"; -import { setLinksTrans } from "./redux/linksTrans"; - -const {REACT_APP_API_URL} = process.env; - -const OverflowTip = (props) => { - const textElementRef = useRef(null); - const [hoverStatus, setHover] = useState(false); - - const compareSize = (textElementRef) => { - if (textElementRef.current != null) { - const compare = - textElementRef.current.scrollWidth > textElementRef.current.offsetWidth; - setHover(compare); - } - }; - - function newGraphData() { - let gitHash = props.selectedGraph; - if (gitHash) { - let postData = { - "selected_nodes": props.nodes.filter(node => node.selected == true).map(node => node.node), - "transitive_edges": props.showTransitive - }; - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - props.setGraphData(data.graphData); - props.setLinks(data.graphData.links); - props.setLinksTrans(data.graphData.links_trans); - }); - fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(postData) - }) - .then(response => response.json()) - .then(data => { - props.setNodeInfos(data.nodeInfos); - }); - } - } - - useEffect(() => { - compareSize(textElementRef); - window.addEventListener("resize", compareSize); - return function () { - window.removeEventListener("resize", compareSize); - }; - }, [props, textElementRef.current]); - - return ( - - - - {props.button && ( - { - props.updateCheckbox({ node: props.text, value: "flip" }); - newGraphData(); - }} - > - - - )} - {props.text} - - - - ); -}; - -export default connect(getGraphData, { updateCheckbox, setGraphData, setNodeInfos, setLinks, setLinksTrans })(OverflowTip); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/SwitchComponent.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/SwitchComponent.js deleted file mode 100644 index 5d5da96c960..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/SwitchComponent.js +++ /dev/null @@ -1,4 +0,0 @@ -export default function SwitchComponents({ active, children }) { - // Switch all children and return the "active" one - return children.filter((child) => child.props.name == active); -} diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/index.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/index.js deleted file mode 100644 index 42533ed3202..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/index.js +++ /dev/null @@ -1,19 +0,0 @@ -import React from "react"; -import ReactDOM from "react-dom/client"; -import { Provider } from "react-redux"; -import CssBaseline from "@material-ui/core/CssBaseline"; -import { ThemeProvider } from "@material-ui/core/styles"; - -import theme from "./theme"; -import store from "./redux/store"; - -import App from "./App"; -const root = ReactDOM.createRoot(document.getElementById("root")); -root.render( - - - - - - -); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/counts.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/counts.js deleted file mode 100644 index eda9fe8327c..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/counts.js +++ /dev/null @@ -1,16 +0,0 @@ -import { initialState } from "./store"; - -export const counts = (state = initialState, action) => { - switch (action.type) { - case "setCounts": - return action.payload; - - default: - return state; - } -}; - -export const setCounts = (counts) => ({ - type: "setCounts", - payload: counts, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/findNode.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/findNode.js deleted file mode 100644 index 742232b950d..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/findNode.js +++ /dev/null @@ -1,16 +0,0 @@ -import { initialState } from "./store"; - -export const findNode = (state = initialState, action) => { - switch (action.type) { - case "setFindNode": - return action.payload; - - default: - return state; - } -}; - -export const setFindNode = (node) => ({ - type: "setFindNode", - payload: node, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphData.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphData.js deleted file mode 100644 index b30ff698de6..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphData.js +++ /dev/null @@ -1,16 +0,0 @@ -import { initialState } from "./store"; - -export const graphData = (state = initialState, action) => { - switch (action.type) { - case "setGraphData": - return action.payload; - - default: - return state; - } -}; - -export const setGraphData = (graphData) => ({ - type: "setGraphData", - payload: graphData, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphFiles.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphFiles.js deleted file mode 100644 index d0d4713c9be..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphFiles.js +++ /dev/null @@ -1,30 +0,0 @@ -import { initialState } from "./store"; - -export const graphFiles = (state = initialState, action) => { - switch (action.type) { - case "setGraphFiles": - return action.payload; - case "selectGraphFile": - const newState = state.map((graphFile, index) => { - if (action.payload.hash == graphFile.git) { - graphFile.selected = action.payload.selected; - } else { - graphFile.selected = false; - } - return graphFile; - }); - return newState; - default: - return state; - } -}; - -export const setGraphFiles = (graphFiles) => ({ - type: "setGraphFiles", - payload: graphFiles, -}); - -export const selectGraphFile = (graphFiles) => ({ - type: "selectGraphFile", - payload: graphFiles, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphPaths.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphPaths.js deleted file mode 100644 index 135c9c30e32..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/graphPaths.js +++ /dev/null @@ -1,23 +0,0 @@ -import { initialState } from "./store"; - -export const graphPaths = (state = initialState, action) => { - switch (action.type) { - case "selectedGraphPaths": - return action.payload; - case "setSelectedPath": - const newState = { ...state, selectedPath: action.payload }; - return newState; - default: - return state; - } -}; - -export const selectedGraphPaths = (pathData) => ({ - type: "selectedGraphPaths", - payload: pathData, -}); - -export const setSelectedPath = (path) => ({ - type: "setSelectedPath", - payload: path, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/links.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/links.js deleted file mode 100644 index 9d47584b7f5..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/links.js +++ /dev/null @@ -1,32 +0,0 @@ -import { initialState } from "./store"; - -export const links = (state = initialState, action) => { - switch (action.type) { - case "addLink": - var arr = Object.assign(state); - return [...arr, action.payload]; - case "setLinks": - return action.payload; - case "updateSelectedLinks": - var newState = Object.assign(state); - newState[action.payload.index].selected = action.payload.value; - return newState; - default: - return state; - } -}; - -export const addLink = (link) => ({ - type: "addLink", - payload: link, -}); - -export const setLinks = (links) => ({ - type: "setLinks", - payload: links, -}); - -export const updateSelectedLinks = (newValue) => ({ - type: "updateSelectedLinks", - payload: newValue, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/linksTrans.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/linksTrans.js deleted file mode 100644 index f0cff77a892..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/linksTrans.js +++ /dev/null @@ -1,32 +0,0 @@ -import { initialState } from "./store"; - -export const linksTrans = (state = initialState, action) => { - switch (action.type) { - case "addLinkTrans": - var arr = Object.assign(state); - return [...arr, action.payload]; - case "setLinksTrans": - return action.payload; - case "updateSelectedLinksTrans": - var newState = Object.assign(state); - newState[action.payload.index].selected = action.payload.value; - return newState; - default: - return state; - } -}; - -export const addLinkTrans = (link) => ({ - type: "addLinkTrans", - payload: link, -}); - -export const setLinksTrans = (links) => ({ - type: "setLinksTrans", - payload: links, -}); - -export const updateSelectedLinksTrans = (newValue) => ({ - type: "updateSelectedLinksTrans", - payload: newValue, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/listSearchTerm.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/listSearchTerm.js deleted file mode 100644 index 2d959451125..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/listSearchTerm.js +++ /dev/null @@ -1,16 +0,0 @@ -import { initialState } from "./store"; - -export const listSearchTerm = (state = initialState, action) => { - switch (action.type) { - case "setListSearchTerm": - return action.payload; - - default: - return state; - } -}; - -export const setListSearchTerm = (listSearchTerm) => ({ - type: "setListSearchTerm", - payload: listSearchTerm, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/loading.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/loading.js deleted file mode 100644 index 8b7a9c09c70..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/loading.js +++ /dev/null @@ -1,16 +0,0 @@ -import { initialState } from "./store"; - -export const loading = (state = initialState, action) => { - switch (action.type) { - case "setLoading": - return action.payload; - - default: - return state; - } -}; - -export const setLoading = (loading) => ({ - type: "setLoading", - payload: loading, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/nodeInfo.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/nodeInfo.js deleted file mode 100644 index 381c66f4bc7..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/nodeInfo.js +++ /dev/null @@ -1,18 +0,0 @@ -import { initialState } from "./store"; - -export const nodeInfo = (state = initialState, action) => { - switch (action.type) { - case "setNodeInfos": - return action.payload; - case "addNodeInfo": - return [...state, action.payload]; - - default: - return state; - } -}; - -export const setNodeInfos = (nodeInfos) => ({ - type: "setNodeInfos", - payload: nodeInfos, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/nodes.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/nodes.js deleted file mode 100644 index 755fd17d253..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/nodes.js +++ /dev/null @@ -1,66 +0,0 @@ -import { initialState } from "./store"; - -export const nodes = (state = initialState, action) => { - switch (action.type) { - case "addNode": - var arr = Object.assign(state); - return [...arr, action.payload]; - case "setNodes": - return action.payload; - case "updateSelected": - var newState = Object.assign(state); - newState[action.payload.index].selected = action.payload.value; - return newState; - case "updateCheckbox": - var newState = Object.assign(state); - newState = state.map((stateNode) => { - if (stateNode.node == action.payload.node) { - if (action.payload.value == "flip") { - stateNode.selected = !stateNode.selected; - } else { - stateNode.selected = action.payload.value; - } - } - return stateNode; - }); - return newState; - case "updateCheckboxes": - var newState = state.map((stateNode, index) => { - const nodeToUpdate = action.payload.filter( - (node) => stateNode.node == node.node - ); - if (nodeToUpdate.length > 0) { - stateNode.selected = nodeToUpdate[0].value; - } - return stateNode; - }); - return newState; - default: - return state; - } -}; - -export const addNode = (node) => ({ - type: "addNode", - payload: node, -}); - -export const setNodes = (nodes) => ({ - type: "setNodes", - payload: nodes, -}); - -export const updateSelected = (newValue) => ({ - type: "updateSelected", - payload: newValue, -}); - -export const updateCheckbox = (newValue) => ({ - type: "updateCheckbox", - payload: newValue, -}); - -export const updateCheckboxes = (newValue) => ({ - type: "updateCheckboxes", - payload: newValue, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/showTransitive.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/showTransitive.js deleted file mode 100644 index 05e1f727275..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/showTransitive.js +++ /dev/null @@ -1,16 +0,0 @@ -import { initialState } from "./store"; - -export const showTransitive = (state = initialState, action) => { - switch (action.type) { - case "setShowTransitive": - return action.payload; - - default: - return state; - } -}; - -export const setShowTransitive = (showTransitive) => ({ - type: "setShowTransitive", - payload: showTransitive, -}); diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/store.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/store.js deleted file mode 100644 index f12517980b8..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/redux/store.js +++ /dev/null @@ -1,180 +0,0 @@ -import { createStore, combineReducers } from "redux"; -import { nodes } from "./nodes"; -import { graphFiles } from "./graphFiles"; -import { counts } from "./counts"; -import { nodeInfo } from "./nodeInfo"; -import { loading } from "./loading"; -import { links } from "./links"; -import { linksTrans } from "./linksTrans"; -import { showTransitive } from "./showTransitive"; -import { graphData } from "./graphData"; -import { findNode } from "./findNode"; -import { graphPaths } from "./graphPaths"; -import { listSearchTerm } from "./listSearchTerm"; - -export const initialState = { - loading: false, - graphFiles: [ - // { id: 0, value: 'graphfile.graphml', version: 1, git: '1234567', selected: false } - ], - nodes: [ - // { id: 0, node: "test/test1.so", name: "test1", check: "checkbox", selected: false } - ], - links: [ - // { source: "test/test1.so", target: "test/test2.so" } - ], - linksTrans: [ - // { source: "test/test1.so", target: "test/test2.so" } - ], - showTransitive: false, - graphData: { - nodes: [ - // {id: 'test/test1.so', name: 'test1.so'}, - // {id: 'test/test2.so', name: 'test2.so'} - ], - links: [ - // {source: 'test/test1.so', target: 'test/test2.so'} - ], - }, - graphPaths: { - fromNode: "test", - toNode: "test", - paths: [ - ["test1", "test2"], - ["test1", "test3", "test2"], - ], - selectedPath: -1, - }, - counts: [{ id: 0, type: "node2", value: 0 }], - findNode: "", - nodeInfo: [ - { - id: 0, - node: "test/test.so", - name: "test", - attribs: [{ name: "test", value: "test" }], - dependers: [{ node: "test/test3.so", symbols: [] }], - dependencies: [{ node: "test/test2.so", symbols: [] }], - }, - ], - listSearchTerm: "", -}; - -export const getCurrentGraphHash = (state) => { - let selectedGraphFiles = state.graphFiles.filter(x => x.selected == true); - let selectedGraph = undefined; - if (selectedGraphFiles.length > 0) { - selectedGraph = selectedGraphFiles[0].git; - } - return selectedGraph; -}; - -export const getLoading = (state) => { - return { loading: state }; -}; - -export const getGraphFiles = (state) => { - return { - loading: state.loading, - graphFiles: state.graphFiles, - }; -}; - -export const getNodeInfos = (state) => { - return { - nodeInfos: state.nodeInfo, - }; -}; - -export const getCounts = (state) => { - const counts = state.counts; - return { - selectedGraph: getCurrentGraphHash(state), - counts: state.counts, - }; -}; - -export const getRows = (state) => { - let searchedNodes = state.nodes.filter(node => node.node.indexOf(state.listSearchTerm) > -1); - return { - selectedGraph: getCurrentGraphHash(state), - rowCount: searchedNodes.length, - rowGetter: ({ index }) => searchedNodes[index], - checkBox: ({ index }) => searchedNodes[index].selected, - nodes: state.nodes, - searchedNodes: searchedNodes, - showTransitive: state.showTransitive, - }; -}; - -export const getSelected = (state) => { - return { - selectedGraph: getCurrentGraphHash(state), - selectedNodes: state.nodes.filter((node) => node.selected), - nodes: state.nodes, - links: state.links, - selectedEdges: [], - loading: state.loading, - graphPaths: state.graphPaths, - showTransitive: state.showTransitive, - }; -}; - -export const getNodes = (state) => { - return { - selectedGraph: getCurrentGraphHash(state), - nodes: state.nodes, - loading: state.loading, - listSearchTerm: state.listSearchTerm, - searchedNodes: state.nodes.filter(node => node.node.indexOf(state.listSearchTerm) > -1), - showTransitive: state.showTransitive - }; -}; - -export const getEdges = (state) => { - return { - selectedGraph: getCurrentGraphHash(state), - nodes: state.nodes, - links: state.links, - linksTrans: state.linksTrans, - selectedLinks: state.links.filter(link => link.selected == true), - searchedNodes: state.nodes.filter(node => node.node.indexOf(state.listSearchTerm) > -1), - showTransitive: state.showTransitive, - }; -}; - -export const getGraphData = (state) => { - return { - selectedGraph: getCurrentGraphHash(state), - nodes: state.nodes, - links: state.links, - graphData: state.graphData, - loading: state.loading, - findNode: state.findNode, - graphPaths: state.graphPaths, - showTransitive: state.showTransitive, - }; -}; - -export const getFullState = (state) => { - return { state }; -}; - -const store = createStore( - combineReducers({ - nodes, - counts, - nodeInfo, - graphFiles, - loading, - links, - linksTrans, - graphData, - findNode, - graphPaths, - listSearchTerm, - showTransitive - }), - initialState -); -export default store; diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/setupProxy.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/setupProxy.js deleted file mode 100644 index 31f23ca4341..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/setupProxy.js +++ /dev/null @@ -1,16 +0,0 @@ -/** - * This proxy is intended to allow the visualizer to run in a development environment - * which includes SSH tunnels communicating with private remote hosts. - */ - -const { createProxyMiddleware } = require('http-proxy-middleware'); - -module.exports = function(app) { - app.use( - createProxyMiddleware('/api', { - target: 'http://localhost:5000', - changeOrigin: true, - secure: false, - }) - ); -}; diff --git a/buildscripts/libdeps/graph_visualizer_web_stack/src/theme.js b/buildscripts/libdeps/graph_visualizer_web_stack/src/theme.js deleted file mode 100644 index ab7f568301c..00000000000 --- a/buildscripts/libdeps/graph_visualizer_web_stack/src/theme.js +++ /dev/null @@ -1,22 +0,0 @@ -import { green, red, grey } from "@material-ui/core/colors"; -import { createMuiTheme } from "@material-ui/core/styles"; - -// A custom theme for this app -const theme = createMuiTheme({ - palette: { - primary: { - light: green[300], - main: green[500], - dark: green[700], - }, - secondary: { - light: grey[300], - main: grey[500], - dark: grey[800], - darkAccent: "#4d4d4d", - }, - mode: "dark", - }, -}); - -export default theme; diff --git a/buildscripts/libdeps/libdeps/analyzer.py b/buildscripts/libdeps/libdeps/analyzer.py deleted file mode 100644 index 03a29f5bab9..00000000000 --- a/buildscripts/libdeps/libdeps/analyzer.py +++ /dev/null @@ -1,1181 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -""" -Libdeps Graph Analysis Tool. - -This will perform various metric's gathering and linting on the -graph generated from SCons generate-libdeps-graph target. The graph -represents the dependency information between all binaries from the build. -""" - -import functools -import inspect -import json -import subprocess -import textwrap -from pathlib import Path - -import cxxfilt -import networkx -from libdeps.graph import CountTypes, DependsReportTypes, EdgeProps, LinterTypes, NodeProps - - -class UnsupportedAnalyzer(Exception): - """Thrown when an analyzer is run on a graph with an unsupported schema.""" - - pass - - -# https://stackoverflow.com/a/25959545/1644736 -def get_class_that_defined_method(meth): - """Get the name of the class for given function.""" - - if isinstance(meth, functools.partial): - return get_class_that_defined_method(meth.func) - if inspect.ismethod(meth) or ( - inspect.isbuiltin(meth) - and getattr(meth, "__self__", None) is not None - and getattr(meth.__self__, "__class__", None) - ): - for cls in inspect.getmro(meth.__self__.__class__): - if meth.__name__ in cls.__dict__: - return cls - meth = getattr(meth, "__func__", meth) # fallback to __qualname__ parsing - if inspect.isfunction(meth): - cls = getattr( - inspect.getmodule(meth), - meth.__qualname__.split(".", 1)[0].rsplit(".", 1)[0], - None, - ) - if isinstance(cls, type): - return cls - return getattr(meth, "__objclass__", None) # handle special descriptor objects - - -def parametrized(dec): - """Allow parameters passed to the decorator.""" - - def layer(*args, **kwargs): - def repl(func): - return dec(func, *args, **kwargs) - - return repl - - return layer - - -@parametrized -def schema_check(func, schema_version): - """Check the version for a function against the graph.""" - - def check(*args, **kwargs): - if schema_version <= args[0].graph_schema: - return func(*args, **kwargs) - else: - analyzer = get_class_that_defined_method(func) - if not analyzer: - analyzer = "UnknownAnalyzer" - else: - analyzer = analyzer.__name__ - - raise UnsupportedAnalyzer( - textwrap.dedent(f"""\ - - - ERROR: analysis for '{analyzer}' requires graph schema version '{schema_version}' - but detected graph schema version '{args[0].graph_schema}' - """) - ) - - return check - - -class Analyzer: - """Base class for different types of analyzers.""" - - def __init__(self, dependency_graph, progress=True): - """Store the graph and extract the build_dir from the graph.""" - - self.graph_schema = dependency_graph.graph.get("graph_schema_version") - self._dependency_graph = dependency_graph - - self._build_dir = Path(dependency_graph.graph["build_dir"]) - self.deptypes = json.loads(dependency_graph.graph.get("deptypes", "{}")) - self.set_progress(progress) - - @property - def _dependents_graph(self): - if not hasattr(self, "rgraph"): - setattr(self, "rgraph", networkx.reverse_view(self._dependency_graph)) - return self.rgraph - - @_dependents_graph.setter - def _dependents_graph(self, value): - self.rgraph = value - - @property - def _dependency_graph(self): - if not hasattr(self, "graph"): - setattr(self, "graph", networkx.reverse_view(self._dependents_graph)) - return self.graph - - @_dependency_graph.setter - def _dependency_graph(self, value): - self.graph = value - - def get_deptype(self, deptype): - """Call down to loaded graph to get the deptype from name.""" - - return int(self._dependency_graph.get_deptype(deptype)) - - def set_progress(self, value=None): - """Get a progress bar from the loaded graph.""" - - self._progressbar = self._dependency_graph.get_progress(value) - return self._progressbar - - -class Counter(Analyzer): - """Base Counter Analyzer class for various counters.""" - - def number_of_edge_types(self, edge_type, value): - """Count the graphs edges based on type.""" - - return len( - [ - edge - for edge in self._dependency_graph.edges(data=True) - if edge[2].get(edge_type) == value - ] - ) - - def node_type_count(self, node_type, value): - """Count the graphs nodes based on type.""" - - return len( - [ - node - for node in self._dependency_graph.nodes(data=True) - if node[1].get(node_type) == value - ] - ) - - def report(self, report): - """Report the results for the current type.""" - - report[self._count_type] = self.run() - - -class NodeCounter(Counter): - """Counts and reports number of nodes in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.NODE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs nodes.""" - - return self._dependency_graph.number_of_nodes() - - -class EdgeCounter(Counter): - """Counts and reports number of edges in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.EDGE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs edges.""" - - return self._dependency_graph.number_of_edges() - - -class DirectEdgeCounter(Counter): - """Counts and reports number of direct edges in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.DIR_EDGE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs direct edges.""" - - return self.number_of_edge_types(EdgeProps.direct.name, True) - - -class TransEdgeCounter(Counter): - """Counts and reports number of transitive edges in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.TRANS_EDGE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs transitive edges.""" - - return self.number_of_edge_types(EdgeProps.direct.name, False) - - -class DirectPubEdgeCounter(Counter): - """Counts and reports number of direct public edges in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.DIR_PUB_EDGE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs direct public edges.""" - return len( - [ - edge - for edge in self._dependency_graph.edges(data=True) - if edge[2].get(EdgeProps.direct.name) - and edge[2].get(EdgeProps.visibility.name) == int(self.get_deptype("Public")) - ] - ) - - -class PublicEdgeCounter(Counter): - """Counts and reports number of public edges in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.PUB_EDGE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs public edges.""" - - return self.number_of_edge_types(EdgeProps.visibility.name, int(self.get_deptype("Public"))) - - -class PrivateEdgeCounter(Counter): - """Counts and reports number of private edges in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.PRIV_EDGE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs private edges.""" - - return self.number_of_edge_types( - EdgeProps.visibility.name, int(self.get_deptype("Private")) - ) - - -class InterfaceEdgeCounter(Counter): - """Counts and reports number of interface edges in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.IF_EDGE.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs interface edges.""" - - return self.number_of_edge_types( - EdgeProps.visibility.name, int(self.get_deptype("Interface")) - ) - - -class LibCounter(Counter): - """Counts and reports number of library nodes in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.LIB.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs lib nodes.""" - - return self.node_type_count(NodeProps.bin_type.name, "SharedLibrary") - - -class ProgCounter(Counter): - """Counts and reports number of program nodes in the graph.""" - - def __init__(self, dependency_graph): - """Store graph and set type.""" - - super().__init__(dependency_graph) - self._count_type = CountTypes.PROG.name - - @schema_check(schema_version=1) - def run(self): - """Count the graphs program nodes.""" - - return self.node_type_count(NodeProps.bin_type.name, "Program") - - -def counter_factory(dependency_graph, counters, progressbar=True): - """Construct counters from a list of strings.""" - - counter_map = { - CountTypes.NODE.name: NodeCounter, - CountTypes.EDGE.name: EdgeCounter, - CountTypes.DIR_EDGE.name: DirectEdgeCounter, - CountTypes.TRANS_EDGE.name: TransEdgeCounter, - CountTypes.DIR_PUB_EDGE.name: DirectPubEdgeCounter, - CountTypes.PUB_EDGE.name: PublicEdgeCounter, - CountTypes.PRIV_EDGE.name: PrivateEdgeCounter, - CountTypes.IF_EDGE.name: InterfaceEdgeCounter, - CountTypes.LIB.name: LibCounter, - CountTypes.PROG.name: ProgCounter, - } - - if not isinstance(counters, list): - counters = [counters] - - counter_objs = [] - if CountTypes.ALL.name in counters: - for counter in counter_map: - counter_obj = counter_map[counter](dependency_graph) - counter_obj.set_progress(progressbar) - counter_objs.append(counter_obj) - else: - for counter in counters: - if counter in counter_map: - counter_obj = counter_map[counter](dependency_graph) - counter_obj.set_progress(progressbar) - counter_objs.append(counter_obj) - else: - print(f"Skipping unknown counter: {counter}") - - return counter_objs - - -class CommonDependents(Analyzer): - """Finds common dependent nodes for a set of given dependency nodes.""" - - def __init__(self, dependency_graph, nodes): - """Store graph and strip the nodes.""" - - super().__init__(dependency_graph) - self._nodes = nodes - - @schema_check(schema_version=1) - def run(self): - """For a given set of nodes, report what nodes depend on all nodes from that set.""" - - neighbor_sets = [set(self._dependents_graph[node]) for node in self._nodes] - return sorted(list(set.intersection(*neighbor_sets))) - - def report(self, report): - """Add the common depends list for this tuple of nodes.""" - - if DependsReportTypes.COMMON_DEPENDS.name not in report: - report[DependsReportTypes.COMMON_DEPENDS.name] = {} - report[DependsReportTypes.COMMON_DEPENDS.name][tuple(self._nodes)] = self.run() - - -class DirectDependents(Analyzer): - """Finds direct dependent nodes for a given dependency node.""" - - def __init__(self, dependency_graph, node): - """Store graph and strip the node.""" - - super().__init__(dependency_graph) - self._node = node - - @schema_check(schema_version=1) - def run(self): - """For given nodes, report what nodes depend directly on that node.""" - - return sorted( - [ - depender - for depender in self._dependents_graph[self._node] - if self._dependents_graph[self._node][depender].get(EdgeProps.direct.name) - ] - ) - - def report(self, report): - """Add the direct depends list for this node.""" - - if DependsReportTypes.DIRECT_DEPENDS.name not in report: - report[DependsReportTypes.DIRECT_DEPENDS.name] = {} - report[DependsReportTypes.DIRECT_DEPENDS.name][self._node] = self.run() - - -class TransitiveProgramDependents(Analyzer): - """Finds all program dependents for a given dependency node.""" - - def __init__(self, dependency_graph, node): - """Store graph and strip the node.""" - - super().__init__(dependency_graph) - self._node = node - - @schema_check(schema_version=1) - def run(self): - """For given node, report what nodes depend directly or transitively on that node.""" - all_reachable_nodes = networkx.single_source_shortest_path_length( - self._dependents_graph, self._node - ).keys() - return sorted( - [ - node - for node in all_reachable_nodes - if self._dependents_graph.nodes[node].get(NodeProps.bin_type.name) == "Program" - ] - ) - - def report(self, report): - """Add the direct or transitive depends list for this node.""" - if DependsReportTypes.PROGRAM_DEPENDS.name not in report: - report[DependsReportTypes.PROGRAM_DEPENDS.name] = {} - report[DependsReportTypes.PROGRAM_DEPENDS.name][self._node] = self.run() - - -class ExcludeDependents(Analyzer): - """Finds dependents which depend on the first input node, but exclude the other input nodes.""" - - def __init__(self, dependency_graph, nodes): - """Store graph and strip the nodes.""" - - super().__init__(dependency_graph) - self._nodes = nodes - - @schema_check(schema_version=1) - def run(self): - """Find depends with exclusions. - - Given a node, and a set of other nodes, find what nodes depend on the given - node, but do not depend on the set of nodes. - """ - - valid_depender_nodes = [] - for depender_node in set(self._dependents_graph[self._nodes[0]]): - if all( - bool(excludes_node not in set(self._dependency_graph[depender_node])) - for excludes_node in self._nodes[1:] - ): - valid_depender_nodes.append(depender_node) - return sorted(valid_depender_nodes) - - def report(self, report): - """Add the exclude depends list for this tuple of nodes.""" - - if DependsReportTypes.EXCLUDE_DEPENDS.name not in report: - report[DependsReportTypes.EXCLUDE_DEPENDS.name] = {} - report[DependsReportTypes.EXCLUDE_DEPENDS.name][tuple(self._nodes)] = self.run() - - -class InDegreeOne(Analyzer): - """ - Finds library nodes which have 1 or 0 dependers. - - Such libraries are good candidates for merging or deletion. - """ - - @schema_check(schema_version=1) - def run(self): - """Search the graph for in degree 1 or 0 nodes.""" - - in_degree_one_nodes = [] - for node, data in self._dependency_graph.nodes(data=True): - if ( - len(self._dependents_graph[node]) < 2 - and data.get(NodeProps.bin_type.name) == "SharedLibrary" - ): - if len(self._dependents_graph[node]) == 1: - depender = list(self._dependents_graph[node].items())[0][0] - else: - depender = None - - in_degree_one_nodes.append([node, depender]) - - return sorted(in_degree_one_nodes) - - def report(self, report): - """Add the indegree one list to the report.""" - - report[DependsReportTypes.IN_DEGREE_ONE.name] = self.run() - - -class BazelConversionCandidates(Analyzer): - """ - Finds nodes ready for bazel conversion. - - This effectively means that they are not currently being built with bazel and - do not have any dependencies that are not being built with bazel. - - Such nodes are ready to be built as bazel targets. - """ - - def get_bazel_converted_scons_targets(self): - # Extract a list of all bazel targets from the root of the tree. - - # Note: //... is the bazel catch-all for referencing all targets in that directory. For - # example, //src/... will expand to include all targets under //src/. - proc = subprocess.run( - ["/tmp/bazelisk", "query", "//..."], capture_output=True, text=True, check=True - ) - - # "bazel query" outputs how many packages were loaded in addition to the targets. - # Ignore lines not starting with // to skip over that line. - targets = [ - self.simplify_bazel_target(line) - for line in proc.stdout.split("\n") - if line.startswith("//") - ] - return targets - - def simplify_bazel_target(self, bazel_target: str): - # Remove leading // and "src" to make comparison with scons targets simpler. - bazel_target = bazel_target.lstrip("/") - if bazel_target.startswith("src/"): - bazel_target = bazel_target[4:] - return bazel_target - - def scons_target_to_bazel(self, scons_target: str): - # Remove library extensions, "lib" prefix, and replace final / with : to make it possible - # to compare scons target strings with bazel target strings. - if scons_target.endswith(".so") or scons_target.endswith(".a"): - scons_target = scons_target.rsplit(".", 1)[0] - scons_target = ":".join(scons_target.rsplit("/lib", 1)) - else: - scons_target = ":".join(scons_target.rsplit("/", 1)) - return scons_target - - @schema_check(schema_version=1) - def run(self): - """Finds bazel conversion candidate nodes.""" - - # Exclude counting dependencies that already have bazel targets. - bazelfied_scons_targets = set(self.get_bazel_converted_scons_targets()) - - candidate_nodes = [] - for node, _ in self._dependency_graph.nodes(data=True): - if self.scons_target_to_bazel(node) not in bazelfied_scons_targets: - non_bazelfied_deps = list( - filter( - lambda dep: self.scons_target_to_bazel(dep) not in bazelfied_scons_targets, - self._dependency_graph[node], - ) - ) - if len(non_bazelfied_deps) == 0: - candidate_nodes.append(node) - - return sorted(candidate_nodes) - - def report(self, report): - """Adds scons target list to the report.""" - - report[DependsReportTypes.BAZEL_CONV_CANDIDATES.name] = self.run() - - -class GraphPaths(Analyzer): - """Finds all paths between two nodes in the graph.""" - - def __init__(self, dependency_graph, from_node, to_node): - """Store graph and strip the nodes.""" - - super().__init__(dependency_graph) - self._from_node, self._to_node = from_node, to_node - - @schema_check(schema_version=1) - def run(self): - """Find all paths between the two nodes in the graph.""" - - # We can really help out networkx path finding algorithm by striping the graph down to - # just a graph containing only paths between the source and target node. This is done by - # getting a subtree from the target down, and then getting a subtree of that tree from the - # source up. - dependents_tree = self._dependents_graph.get_direct_nonprivate_graph().get_node_tree( - self._to_node - ) - - if self._from_node not in dependents_tree: - return [] - - path_tree = networkx.reverse_view(dependents_tree).get_node_tree(self._from_node) - return list( - networkx.all_simple_paths(G=path_tree, source=self._from_node, target=self._to_node) - ) - - def report(self, report): - """Add the path list to the report.""" - - if DependsReportTypes.GRAPH_PATHS.name not in report: - report[DependsReportTypes.GRAPH_PATHS.name] = {} - report[DependsReportTypes.GRAPH_PATHS.name][tuple([self._from_node, self._to_node])] = ( - self.run() - ) - - -class SymbolDependents(Analyzer): - """Find all symbol dependents between the two nodes in the graph.""" - - def __init__(self, dependency_graph, from_node, to_node): - """Store graph and strip the nodes.""" - - super().__init__(dependency_graph) - self._from_node, self._to_node = from_node, to_node - - @schema_check(schema_version=1) - def run(self): - """Find all symbol dependents between the two nodes in the graph.""" - - edge = self._dependents_graph.get_edge_data(u=self._from_node, v=self._to_node) - if "symbols" in edge: - return edge["symbols"].split() - return [] - - def report(self, report): - """Add the symbol dependents list to the report.""" - - if DependsReportTypes.SYMBOL_DEPENDS.name not in report: - report[DependsReportTypes.SYMBOL_DEPENDS.name] = {} - report[DependsReportTypes.SYMBOL_DEPENDS.name][tuple([self._from_node, self._to_node])] = ( - self.run() - ) - - -class Efficiency(Analyzer): - """Find efficiency of each public dependency originating from each node in a given set.""" - - def __init__(self, dependency_graph, from_nodes): - """Store graph and strip the nodes.""" - - super().__init__(dependency_graph) - self._from_nodes = from_nodes - - @schema_check(schema_version=1) - def run(self): - """Find efficiency of each public dependency originating from a node.""" - - efficiencies_data = {} - - for node_a in self._from_nodes: - efficiency_data = {} - - for node_b in self._dependency_graph[node_a]: - edge = self._dependency_graph.get_edge_data(u=node_a, v=node_b) - - if edge["direct"] and edge["visibility"] == 1: - needed, not_needed = [], [] - - for node_x in self._dependency_graph[node_b]: - edge = self._dependency_graph.get_edge_data(u=node_b, v=node_x) - - if not edge["direct"] and "symbols" in edge: - needed.append(node_x) - elif not edge["direct"]: - not_needed.append(node_x) - - total_count = len(needed) + len(not_needed) - - efficiency_data[node_b] = { - "needed": needed, - "not_needed": not_needed, - "count_needed": len(needed), - "count_not_needed": len(not_needed), - "count_total": total_count, - "efficiency": len(needed) / (total_count or 1), - } - efficiencies_data[node_a] = efficiency_data - - return efficiencies_data - - def report(self, report): - """Add the public libdeps efficiency of each input node to the report.""" - - if DependsReportTypes.EFFICIENCY.name not in report: - report[DependsReportTypes.EFFICIENCY.name] = {} - report[DependsReportTypes.EFFICIENCY.name] = self.run() - - -class EfficiencyLinter(Analyzer): - """Analyze efficiency of all public dependencies. List those with efficiencies under a given threshold.""" - - def __init__(self, dependency_graph, threshold=2): - """Store graph and strip the nodes.""" - - super().__init__(dependency_graph) - self._threshold = threshold - - @schema_check(schema_version=1) - def run(self): - """Find efficiency of all public dependencies in graph.""" - - data = {} - result = Efficiency(self._dependency_graph, self._dependency_graph.nodes).run() - for node_a in result: - for node_b in result[node_a]: - data[tuple([node_a, node_b])] = result[node_a][node_b] - - efficiencies = list(x["efficiency"] for x in data.values()) - - efficiencies_product = 1 - for efficiency in efficiencies: - efficiencies_product *= efficiency + 1 - efficiencies_geo_mean = (efficiencies_product ** (1 / len(efficiencies))) - 1 - - edges_zero = list(filter(lambda x: data[x]["efficiency"] == 0, data)) - edges_lt_threshold = list( - filter( - lambda x: data[x]["efficiency"] < (self._threshold / 100) - and data[x]["efficiency"] > 0, - data, - ) - ) - edges_lt_threshold = sorted(edges_lt_threshold, key=lambda x: data[x]["efficiency"]) - - return { - "threshold": self._threshold, - "edge_data": data, - "edges_lt_threshold": edges_lt_threshold, - "count_lt_threshold": len(edges_lt_threshold), - "edges_zero": edges_zero, - "count_zero": len(edges_zero), - "mean": round(efficiencies_geo_mean, 3), - } - - def report(self, report): - """Add efficiency lint result to report.""" - - if LinterTypes.EFFICIENCY_LINT.name not in report: - report[LinterTypes.EFFICIENCY_LINT.name] = {} - report[LinterTypes.EFFICIENCY_LINT.name] = self.run() - - -class CriticalEdges(Analyzer): - """Finds all edges between two nodes, where removing those edges disconnects the two nodes.""" - - def __init__(self, dependency_graph, from_node, to_node): - """Store graph and strip the nodes.""" - - super().__init__(dependency_graph) - self._from_node, self._to_node = from_node, to_node - - @schema_check(schema_version=1) - def run(self): - """Use networkx min cut algorithm to find a set of edges.""" - - from networkx.algorithms.connectivity import minimum_st_edge_cut - - # The min cut algorithm will get the min cut nearest the end - # of the direction of the graph, so we we use the reverse graph - # so that we get a cut nearest our from_node, or the first cut we - # would encounter on a given path from the from_node to the to_node. - subgraph = self._dependents_graph.get_direct_nonprivate_graph().get_node_tree(self._to_node) - if subgraph.has_node(self._from_node): - min_cut_edges = list( - minimum_st_edge_cut(G=subgraph, s=self._to_node, t=self._from_node) - ) - else: - min_cut_edges = [] - return [(edge[1], edge[0]) for edge in min_cut_edges] - - def report(self, report): - """Add the critical edges to report.""" - - if DependsReportTypes.CRITICAL_EDGES.name not in report: - report[DependsReportTypes.CRITICAL_EDGES.name] = {} - report[DependsReportTypes.CRITICAL_EDGES.name][tuple([self._from_node, self._to_node])] = ( - self.run() - ) - - -class UnusedPublicLinter(Analyzer): - """Lints the graph for any public libdeps that are unused in all resulting transitive edges.""" - - def _check_edge_no_symbols(self, edge, original_nodes, checked_edges): - """Check the edge's transitive tree and made sure no edges have symbols.""" - - if edge not in checked_edges: - checked_edges.add(edge) - original_node = edge[0] - depender = edge[1] - try: - edge_attribs = self._dependents_graph[original_node][depender] - - if edge_attribs.get(EdgeProps.visibility.name) == int( - self.get_deptype("Public") - ) or edge_attribs.get(EdgeProps.visibility.name) == int( - self.get_deptype("Interface") - ): - if not edge_attribs.get(EdgeProps.symbols.name): - if not self._tree_uses_no_symbols(depender, original_nodes, checked_edges): - return False - else: - return False - except KeyError: - pass - - return True - - def _tree_uses_no_symbols(self, node, original_nodes, checked_edges): - """Recursive walk for a public node. - - Walk the dependency tree for a given Public node, and check if all edges - in that tree do not have symbol dependencies. - """ - - for depender in self._dependents_graph[node]: - for original_node in original_nodes: - edge = (original_node, depender) - if not self._check_edge_no_symbols(edge, original_nodes, checked_edges): - return False - return True - - def _check_trans_nodes_no_symbols(self, edge, trans_pub_nodes): - """Check the edge against the transitive nodes for symbols.""" - - for trans_node in self._dependency_graph[edge[0]]: - if self._dependency_graph[edge[0]][trans_node].get(EdgeProps.visibility.name) == int( - self.get_deptype("Public") - ) or self._dependency_graph[edge[0]][trans_node].get(EdgeProps.visibility.name) == int( - self.get_deptype("Interface") - ): - trans_pub_nodes.add(trans_node) - try: - if self._dependents_graph[trans_node][edge[1]].get(EdgeProps.symbols.name): - return True - except KeyError: - pass - return False - - @schema_check(schema_version=1) - def run(self): - """Run the unused public linter. - - Run the linter to check for and PUBLIC libdeps which are - unnecessary and can be converted to PRIVATE. - """ - - unused_public_libdeps = [] - checked_edges = set() - - for edge in self._dependents_graph.edges: - edge_attribs = self._dependents_graph[edge[0]][edge[1]] - - if ( - edge_attribs.get(EdgeProps.direct.name) - and edge_attribs.get(EdgeProps.visibility.name) == int(self.get_deptype("Public")) - and self._dependents_graph.nodes()[edge[1]].get(NodeProps.bin_type.name) - == "SharedLibrary" - ): - # First we will get all the transitive libdeps the dependent node - # induces, while we are getting those we also check if the depender - # node has any symbol dependencies to that transitive libdep. - trans_pub_nodes = set([edge[0]]) - found_symbols = self._check_trans_nodes_no_symbols(edge, trans_pub_nodes) - - # If the depender node has no symbol dependencies on the induced libdeps, - # then we will walk up the tree for the depender node, checking if any of the - # induced dependencies have symbols. If there are no simples between all transitive - # edges from this direct public libdep, its safe to change it to public. - if not found_symbols and self._tree_uses_no_symbols( - edge[1], list(trans_pub_nodes), checked_edges - ): - unused_public_libdeps.append((edge[0], edge[1])) - - return unused_public_libdeps - - def report(self, report): - """Report the lint issies.""" - - report[LinterTypes.PUBLIC_UNUSED.name] = self.run() - - -def linter_factory(dependency_graph, linters, progressbar=True): - """Construct linters from a list of strings.""" - - linter_map = { - LinterTypes.PUBLIC_UNUSED.name: UnusedPublicLinter, - } - - if not isinstance(linters, list): - linters = [linters] - - linters_objs = [] - for linter in linters: - if linter in linter_map: - linters_objs.append(linter_map[linter](dependency_graph, progressbar)) - else: - print(f"Skipping unknown counter: {linter}") - - return linters_objs - - -class BuildDataReport(Analyzer): - """Adds the build and graph meta data to the report.""" - - @schema_check(schema_version=1) - def report(self, report): - """Add the build data from the graph to the report.""" - - report["invocation"] = self._dependency_graph.graph.get("invocation") - report["git_hash"] = self._dependency_graph.graph.get("git_hash") - report["graph_schema_version"] = self._dependency_graph.graph.get("graph_schema_version") - - -class LibdepsGraphAnalysis: - """Runs the given analysis on the input graph.""" - - def __init__(self, analysis): - """Perform analysis based off input args.""" - - self._results = {} - for analyzer in analysis: - analyzer.report(self._results) - - def get_results(self): - """Return the results fo the analysis.""" - - return self._results - - def run_linters(self, linters): - """Run the various dependency reports.""" - - if LinterTypes.PUBLIC_UNUSED.name in linters: - self.results[LinterTypes.PUBLIC_UNUSED.name] = self.libdeps_graph.unused_public_linter() - - -class GaPrinter: - """Base class for printers of the graph analysis.""" - - def __init__(self, libdeps_graph_analysis): - """Store the graph analysis for use when printing.""" - - self._libdeps_graph_analysis = libdeps_graph_analysis - - -class GaJsonPrinter(GaPrinter): - """Printer for json output.""" - - def serialize(self, dictionary): - """Serialize the k,v pairs in the dictionary.""" - - new = {} - for key, value in dictionary.items(): - if isinstance(value, dict): - value = self.serialize(value) - new[str(key)] = value - return new - - def print(self): - """Print the result data.""" - - print(self.get_json()) - - def get_json(self): - """Return the results as a JSON string.""" - - results = self._libdeps_graph_analysis.get_results() - return json.dumps(self.serialize(results)) - - -class GaPrettyPrinter(GaPrinter): - """Printer for pretty console output.""" - - _count_descs = { - CountTypes.NODE.name: "Nodes in Graph: {}", - CountTypes.EDGE.name: "Edges in Graph: {}", - CountTypes.DIR_EDGE.name: "Direct Edges in Graph: {}", - CountTypes.TRANS_EDGE.name: "Transitive Edges in Graph: {}", - CountTypes.DIR_PUB_EDGE.name: "Direct Public Edges in Graph: {}", - CountTypes.PUB_EDGE.name: "Public Edges in Graph: {}", - CountTypes.PRIV_EDGE.name: "Private Edges in Graph: {}", - CountTypes.IF_EDGE.name: "Interface Edges in Graph: {}", - CountTypes.LIB.name: "Library Nodes in Graph: {}", - CountTypes.PROG.name: "Program Nodes in Graph: {}", - } - - @staticmethod - def _print_results_node_list(heading, nodes): - """Util function for printing a list of nodes for depend reports.""" - - print(heading) - for i, depender in enumerate(nodes, start=1): - print(f" {i}: {depender}") - print("") - - def _print_depends_reports(self, results): - """Print the depends reports result data.""" - - if DependsReportTypes.DIRECT_DEPENDS.name in results: - print("\nNodes that directly depend on:") - for node in results[DependsReportTypes.DIRECT_DEPENDS.name]: - self._print_results_node_list( - f"=>depends on {node}:", results[DependsReportTypes.DIRECT_DEPENDS.name][node] - ) - - if DependsReportTypes.PROGRAM_DEPENDS.name in results: - print("\nPrograms that depend on:") - for node in results[DependsReportTypes.PROGRAM_DEPENDS.name]: - self._print_results_node_list( - f"=>depends on {node}:", results[DependsReportTypes.PROGRAM_DEPENDS.name][node] - ) - - if DependsReportTypes.COMMON_DEPENDS.name in results: - print("\nNodes that commonly depend on:") - for nodes in results[DependsReportTypes.COMMON_DEPENDS.name]: - self._print_results_node_list( - f"=>depends on {nodes}:", results[DependsReportTypes.COMMON_DEPENDS.name][nodes] - ) - - if DependsReportTypes.EXCLUDE_DEPENDS.name in results: - print("\nNodes that depend on a node, but exclude others:") - for nodes in results[DependsReportTypes.EXCLUDE_DEPENDS.name]: - self._print_results_node_list( - f"=>depends: {nodes[0]}, exclude: {nodes[1:]}:", - results[DependsReportTypes.EXCLUDE_DEPENDS.name][nodes], - ) - - if DependsReportTypes.GRAPH_PATHS.name in results: - print("\nDependency graph paths:") - for nodes in results[DependsReportTypes.GRAPH_PATHS.name]: - self._print_results_node_list( - f"=>start node: {nodes[0]}, end node: {nodes[1]}:", - [ - f"{' -> '.join(path)}" - for path in results[DependsReportTypes.GRAPH_PATHS.name][nodes] - ], - ) - - if DependsReportTypes.CRITICAL_EDGES.name in results: - print("\nCritical Edges:") - for nodes in results[DependsReportTypes.CRITICAL_EDGES.name]: - self._print_results_node_list( - f"=>critical edges between {nodes[0]} and {nodes[1]}:", - results[DependsReportTypes.CRITICAL_EDGES.name][nodes], - ) - - if DependsReportTypes.IN_DEGREE_ONE.name in results: - print("\nLibrary nodes with 1 or 0 dependers:") - for count, nodes in enumerate(results[DependsReportTypes.IN_DEGREE_ONE.name], start=1): - print(f" {count}: '{nodes[0]}' <- '{nodes[1]}'") - - if DependsReportTypes.SYMBOL_DEPENDS.name in results: - print("\nSymbol dependents:") - for nodes in results[DependsReportTypes.SYMBOL_DEPENDS.name]: - symbols = results[DependsReportTypes.SYMBOL_DEPENDS.name][nodes] - print( - f"{len(symbols)} symbols defined in '{nodes[0]}' which are used in '{nodes[1]}'" - ) - for symbol in symbols: - print(f"\t{cxxfilt.demangle(symbol)}") - - if DependsReportTypes.EFFICIENCY.name in results: - for from_node in results[DependsReportTypes.EFFICIENCY.name]: - print("\nEfficiency of all public direct edges on " + from_node + ":") - data = results[DependsReportTypes.EFFICIENCY.name][from_node] - for to_node in data: - print( - "[ " - + str(round(data[to_node]["efficiency"] * 100, 1)) - + "% ] " - + from_node - + " -> " - + to_node - ) - - if DependsReportTypes.BAZEL_CONV_CANDIDATES.name in results: - print("\nNon-bazelfied nodes with no non-bazelfied dependencies:") - for node in results[DependsReportTypes.BAZEL_CONV_CANDIDATES.name]: - print(f"\t{node}") - - if LinterTypes.EFFICIENCY_LINT.name in results: - data = results[LinterTypes.EFFICIENCY_LINT.name] - print("\nLibdepsLinter: Efficiency of Direct Public Edges") - - print(f" Geometric Mean: {round(data['mean'] * 100, 1)}%") - - print(f" 0%: {data['count_zero']} edges") - for edge in data["edges_zero"]: - print( - f" [ {str(round(data['edge_data'][edge]['efficiency'] * 100, 2))}% ] {edge[0]} -> {edge[1]}" - ) - - print(f" 0-{data['threshold']}%: {data['count_lt_threshold']} edges") - for edge in data["edges_lt_threshold"]: - print( - f" [ {str(round(data['edge_data'][edge]['efficiency'] * 100, 2))}% ] {edge[0]} -> {edge[1]}" - ) - - def print(self): - """Print the result data.""" - results = self._libdeps_graph_analysis.get_results() - - if "invocation" in results: - print( - textwrap.dedent(f"""\ - - Graph built from git hash: - {results['git_hash']} - - Graph Schema version: - {results['graph_schema_version']} - - Build invocation: - {results['invocation']} - """) - ) - - for count_type in CountTypes.__members__.items(): - if count_type[0] in self._count_descs and count_type[0] in results: - print(self._count_descs[count_type[0]].format(results[count_type[0]])) - - self._print_depends_reports(results) - - if LinterTypes.PUBLIC_UNUSED.name in results: - print( - f"\nLibdepsLinter: PUBLIC libdeps that could be PRIVATE: {len(results[LinterTypes.PUBLIC_UNUSED.name])}" - ) - for issue in sorted( - results[LinterTypes.PUBLIC_UNUSED.name], key=lambda item: item[1] + item[0] - ): - print(f" {issue[1]}: PUBLIC -> {issue[0]} -> PRIVATE") diff --git a/buildscripts/libdeps/libdeps/graph.py b/buildscripts/libdeps/libdeps/graph.py deleted file mode 100755 index f11c40cb90c..00000000000 --- a/buildscripts/libdeps/libdeps/graph.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -""" -Libdeps Graph Enums. - -These are used for attributing data across the build scripts and analyzer scripts. -""" - -import json -from enum import Enum, auto - -import networkx - -try: - import progressbar -except ImportError: - pass - - -class CountTypes(Enum): - """Enums for the different types of counts to perform on a graph.""" - - ALL = auto() - NODE = auto() - EDGE = auto() - DIR_EDGE = auto() - TRANS_EDGE = auto() - DIR_PUB_EDGE = auto() - PUB_EDGE = auto() - PRIV_EDGE = auto() - IF_EDGE = auto() - PROG = auto() - LIB = auto() - - -class DependsReportTypes(Enum): - """Enums for the different type of depends reports to perform on a graph.""" - - DIRECT_DEPENDS = auto() - PROGRAM_DEPENDS = auto() - COMMON_DEPENDS = auto() - EXCLUDE_DEPENDS = auto() - GRAPH_PATHS = auto() - CRITICAL_EDGES = auto() - IN_DEGREE_ONE = auto() - SYMBOL_DEPENDS = auto() - EFFICIENCY = auto() - BAZEL_CONV_CANDIDATES = auto() - - -class LinterTypes(Enum): - """Enums for the different types of counts to perform on a graph.""" - - ALL = auto() - PUBLIC_UNUSED = auto() - EFFICIENCY_LINT = auto() - - -class EdgeProps(Enum): - """Enums for edge properties.""" - - direct = auto() - visibility = auto() - symbols = auto() - - -class NodeProps(Enum): - """Enums for node properties.""" - - bin_type = auto() - - -def null_progressbar(items): - """Fake stand-in for normal progressbar.""" - for item in items: - yield item - - -class LibdepsGraph(networkx.DiGraph): - """Class for analyzing the graph.""" - - def __init__(self, graph=networkx.DiGraph()): - """Load the graph data.""" - super().__init__(incoming_graph_data=graph) - self._progressbar = None - self._deptypes = None - - def get_deptype(self, deptype): - """Convert graphs deptypes from json string to dict, and return requested value.""" - - if not self._deptypes: - self._deptypes = json.loads(self.graph.get("deptypes", "{}")) - if self.graph["graph_schema_version"] == 1: - # get and set the legacy values - self._deptypes["Global"] = self._deptypes.get("Global", 0) - self._deptypes["Public"] = self._deptypes.get("Public", 1) - self._deptypes["Private"] = self._deptypes.get("Private", 2) - self._deptypes["Interface"] = self._deptypes.get("Interface", 3) - - return self._deptypes[deptype] - - def get_direct_nonprivate_graph(self): - """Get a graph view of direct nonprivate edges.""" - - def filter_direct_nonprivate_edges(n1, n2): - return self[n1][n2].get(EdgeProps.direct.name) and ( - self[n1][n2].get(EdgeProps.visibility.name) == self.get_deptype("Public") - or self[n1][n2].get(EdgeProps.visibility.name) == self.get_deptype("Interface") - ) - - return networkx.subgraph_view(self, filter_edge=filter_direct_nonprivate_edges) - - def get_node_tree(self, node): - """Get a tree with the passed node as the single root.""" - - direct_nonprivate_graph = self.get_direct_nonprivate_graph() - substree_set = networkx.descendants(direct_nonprivate_graph, node) - - def subtree(n1): - return n1 in substree_set or n1 == node - - return networkx.subgraph_view(direct_nonprivate_graph, filter_node=subtree) - - def get_progress(self, value=None): - """ - Set if a progress bar should be used or not. - - No args means use progress bar if available. - """ - - if value is None: - value = "progressbar" in globals() - - if self._progressbar: - return self._progressbar - - if value: - - def get_progress_bar(title, *args): - custom_bar = progressbar.ProgressBar( - widgets=[ - title, - progressbar.Counter(format="[%(value)d/%(max_value)d]"), - progressbar.Timer(format=" Time: %(elapsed)s "), - progressbar.Bar(marker=">", fill=" ", left="|", right="|"), - ] - ) - return custom_bar(*args) - - self._progressbar = get_progress_bar - else: - self._progressbar = null_progressbar - - return self._progressbar - - -def load_libdeps_graph(graph_file): - """Load a graphml file and create a LibdepGraph.""" - - graph = networkx.read_graphml(graph_file) - return LibdepsGraph(graph=graph) diff --git a/buildscripts/linter/filediff.py b/buildscripts/linter/filediff.py index f132f1a3e80..1fc2d00cc7a 100644 --- a/buildscripts/linter/filediff.py +++ b/buildscripts/linter/filediff.py @@ -25,13 +25,8 @@ MONGO_REVISION_ENV_VAR = "REVISION" def _get_repos_and_revisions() -> Tuple[List[Repo], RevisionMap]: """Get the repo object and a map of revisions to compare against.""" - modules = [ - path - for path in git.get_module_paths() - # Exclude enterprise module; it's in the "modules" folder but does not correspond to a repo - if "src/mongo/db/modules/enterprise" not in path - ] - repos = [Repo(path) for path in modules] + + repos = [Repo(git.get_base_dir())] revision_map = generate_revision_map(repos, {"mongo": os.environ.get(MONGO_REVISION_ENV_VAR)}) return repos, revision_map diff --git a/buildscripts/linter/git.py b/buildscripts/linter/git.py index 7d163790dfb..4abc223b6ea 100644 --- a/buildscripts/linter/git.py +++ b/buildscripts/linter/git.py @@ -8,11 +8,9 @@ import re from pathlib import Path from typing import Callable, List -from buildscripts import moduleconfig from buildscripts.linter import git_base as _git # Path to the modules in the mongodb source tree -# Has to match the string in SConstruct MODULE_DIR = "src/mongo/db/modules" @@ -31,25 +29,9 @@ def get_base_dir(): return os.path.dirname(os.path.dirname(os.path.realpath(__file__))) -def get_module_paths() -> List[str]: - """Get a list of paths that contain modules.""" - base_dir = get_base_dir() - - # Get a list of modules - mongo_modules = moduleconfig.discover_module_directories( - os.path.join(base_dir, MODULE_DIR), None - ) - - paths = [os.path.join(base_dir, MODULE_DIR, m) for m in mongo_modules] - paths.append(base_dir) - - return paths - - def get_repos() -> List[Repo]: """Get a list of Repos to check linters for.""" - paths = get_module_paths() - return [Repo(p) for p in paths] + return [Repo(get_base_dir())] class Repo(_git.Repository): diff --git a/buildscripts/make_archive.py b/buildscripts/make_archive.py deleted file mode 100755 index 6c2bce60f1a..00000000000 --- a/buildscripts/make_archive.py +++ /dev/null @@ -1,225 +0,0 @@ -#!/usr/bin/env python3 -"""Helper script for constructing an archive (zip or tar) from a list of files. - -The output format (tar, tgz, zip) is determined from the file name, unless the user specifies ---format on the command line. - -This script simplifies the specification of filename transformations, so that, e.g., -src/mongo/foo.cpp and build/linux2/normal/buildinfo.cpp can get put into the same -directory in the archive, perhaps mongodb-2.0.2/src/mongo. - -Usage: - -make_archive.py -o [--format (tar|tgz|zip)] \\ - [--transform match1=replacement1 [--transform match2=replacement2 [...]]] \\ - [...] - -If the input file names start with "@", the file is expected to contain a list of -whitespace-separated file names to include in the archive. This helps get around the Windows -command line length limit. - -Transformations are processed in command-line order and are short-circuiting. So, if a file matches -match1, it is never compared against match2 or later. Matches are just python startswith() -comparisons. - -For a detailed usage example, see src/SConscript.client or src/mongo/SConscript. -""" - -import optparse -import os -import shlex -import shutil -import sys -import tempfile -import zipfile -from subprocess import PIPE, STDOUT, Popen - - -def main(argv): - """Execute Main program.""" - args = [] - for arg in argv[1:]: - if arg.startswith("@"): - file_name = arg[1:] - f_handle = open(file_name, "r") - args.extend(s1.strip('"') for s1 in shlex.split(f_handle.readline(), posix=False)) - f_handle.close() - else: - args.append(arg) - - opts = parse_options(args) - if opts.archive_format in ("tar", "tgz"): - make_tar_archive(opts) - elif opts.archive_format == "zip": - make_zip_archive(opts) - else: - raise ValueError('Unsupported archive format "%s"' % opts.archive_format) - - -def delete_directory(directory): - """Recursively deletes a directory and its contents.""" - try: - shutil.rmtree(directory) - except Exception: - pass - - -def make_tar_archive(opts): - """Generate tar archive. - - Given the parsed options, generates the 'opt.output_filename' - tarball containing all the files in 'opt.input_filename' renamed - according to the mappings in 'opts.transformations'. - - e.g. for an input file named "a/mongo/build/DISTSRC", and an - existing transformation {"a/mongo/build": "release"}, the input - file will be written to the tarball as "release/DISTSRC" - - All files to be compressed are copied into new directories as - required by 'opts.transformations'. Once the tarball has been - created, all temporary directory structures created for the - purposes of compressing, are removed. - """ - tar_options = "cvf" - if opts.archive_format == "tgz": - tar_options += "z" - - # clean and create a temp directory to copy files to - enclosing_archive_directory = tempfile.mkdtemp(prefix="archive_", dir=os.path.abspath("build")) - output_tarfile = os.path.join(os.getcwd(), opts.output_filename) - - tar_command = ["tar", tar_options, output_tarfile] - - for input_filename in opts.input_filenames: - preferred_filename = get_preferred_filename(input_filename, opts.transformations) - temp_file_location = os.path.join(enclosing_archive_directory, preferred_filename) - enclosing_file_directory = os.path.dirname(temp_file_location) - if not os.path.exists(enclosing_file_directory): - os.makedirs(enclosing_file_directory) - print("copying %s => %s" % (input_filename, temp_file_location)) - if os.path.isdir(input_filename): - shutil.copytree(input_filename, temp_file_location) - else: - shutil.copy2(input_filename, temp_file_location) - tar_command.append(preferred_filename) - - print(" ".join(tar_command)) - # execute the full tar command - run_directory = os.path.join(os.getcwd(), enclosing_archive_directory) - proc = Popen(tar_command, stdout=PIPE, stderr=STDOUT, bufsize=0, cwd=run_directory) - proc.wait() - - # delete temp directory - delete_directory(enclosing_archive_directory) - - -def make_zip_archive(opts): - """Generate the zip archive. - - Given the parsed options, generates the 'opt.output_filename' - zipfile containing all the files in 'opt.input_filename' renamed - according to the mappings in 'opts.transformations'. - - All files in 'opt.output_filename' are renamed before being - written into the zipfile. - """ - archive = open_zip_archive_for_write(opts.output_filename) - try: - for input_filename in opts.input_filenames: - archive.add( - input_filename, arcname=get_preferred_filename(input_filename, opts.transformations) - ) - finally: - archive.close() - - -def parse_options(args): - """Parse program options.""" - parser = optparse.OptionParser() - parser.add_option( - "-o", - dest="output_filename", - default=None, - help="Name of the archive to output.", - metavar="FILE", - ) - parser.add_option( - "--format", - dest="archive_format", - default=None, - choices=("zip", "tar", "tgz"), - help=( - "Format of archive to create. " - "If omitted, use the suffix of the output filename to decide." - ), - ) - parser.add_option("--transform", action="append", dest="transformations", default=[]) - - (opts, input_filenames) = parser.parse_args(args) - opts.input_filenames = [] - - for input_filename in input_filenames: - if input_filename.startswith("@"): - opts.input_filenames.extend(open(input_filename[1:], "r").read().split()) - else: - opts.input_filenames.append(input_filename) - - if opts.output_filename is None: - parser.error("-o switch is required") - - if opts.archive_format is None: - if opts.output_filename.endswith(".zip"): - opts.archive_format = "zip" - elif opts.output_filename.endswith("tar.gz") or opts.output_filename.endswith(".tgz"): - opts.archive_format = "tgz" - elif opts.output_filename.endswith(".tar"): - opts.archive_format = "tar" - else: - parser.error( - 'Could not deduce archive format from output filename "%s"' % opts.output_filename - ) - - try: - opts.transformations = [ - xform.replace(os.path.altsep or os.path.sep, os.path.sep).split("=", 1) - for xform in opts.transformations - ] - except Exception as err: - parser.error(err) - - return opts - - -def open_zip_archive_for_write(filename): - """Open a zip archive for writing and return it.""" - - # Infuriatingly, Zipfile calls the "add" method "write", but they're otherwise identical, - # for our purposes. WrappedZipFile is a minimal adapter class. - class WrappedZipFile(zipfile.ZipFile): - """WrappedZipFile class.""" - - def add(self, filename, arcname): - """Add filename to zip.""" - return self.write(filename, arcname) - - return WrappedZipFile(filename, "w", zipfile.ZIP_DEFLATED) - - -def get_preferred_filename(input_filename, transformations): - """Return preferred filename. - - Perform a prefix subsitution on 'input_filename' for the - first matching transformation in 'transformations' and - returns the substituted string. - """ - for match, replace in transformations: - match_lower = match.lower() - input_filename_lower = input_filename.lower() - if input_filename_lower.startswith(match_lower): - return replace + input_filename[len(match) :] - return input_filename - - -if __name__ == "__main__": - main(sys.argv) - sys.exit(0) diff --git a/buildscripts/make_vcxproj.py b/buildscripts/make_vcxproj.py index bd5e66b4d20..65e0706e434 100644 --- a/buildscripts/make_vcxproj.py +++ b/buildscripts/make_vcxproj.py @@ -1,11 +1,11 @@ """Generate vcxproj and vcxproj.filters files for browsing code in Visual Studio 2015. -To build mongodb, you must use scons. You can use this project to navigate code during debugging. +To build mongodb, you must use bazel. You can use this project to navigate code during debugging. HOW TO USE First, you need a compile_commands.json file, to generate run the following command: - scons compiledb + bazel build compiledb Next, run the following command python buildscripts/make_vcxproj.py FILE_NAME @@ -257,10 +257,6 @@ class ProjFileGenerator(object): break prev_arg = arg - # Skip files made by scons for configure testing - if "sconf_temp" in file_name: - return - if file_name not in self.files: self.files.add(file_name) @@ -300,7 +296,7 @@ class ProjFileGenerator(object): # 3. Output these lists of files to vcxproj and vcxproj.headers # Note: order of these lists does not matter, VS will sort them anyway dirs = set() - scons_files = set() + bazel_files = set() for file_name in self.files: dirs.add(os.path.dirname(file_name)) @@ -331,13 +327,12 @@ class ProjFileGenerator(object): dirs = dirs.union(base_dirs) - # Get all the scons files + # Get all the bazel files for directory in dirs: if os.path.exists(directory): for file_name in os.listdir(directory): - if file_name == "SConstruct" or "SConscript" in file_name: - scons_files.add(directory + "\\" + file_name) - scons_files.add("SConstruct") + if file_name == "BUILD.bazel" or ".bazel" in file_name: + bazel_files.add(directory + "\\" + file_name) # Write a list of directory entries with unique guids self.filters.write(" \n") @@ -365,9 +360,9 @@ class ProjFileGenerator(object): self.filters.write(" \n") self.filters.write(" \n") - # Write a list of scons files + # Write a list of bazel files self.filters.write(" \n") - for file_name in sorted(scons_files): + for file_name in sorted(bazel_files): self.filters.write(" \n" % file_name) self.filters.write(" %s\n" % os.path.dirname(file_name)) self.filters.write(" \n") @@ -380,9 +375,9 @@ class ProjFileGenerator(object): self.vcxproj.write(" \n" % file_name) self.vcxproj.write(" \n") - # Write a list of scons files into the vcxproj + # Write a list of bazel files into the vcxproj self.vcxproj.write(" \n") - for file_name in sorted(scons_files): + for file_name in sorted(bazel_files): self.vcxproj.write(" \n" % file_name) self.vcxproj.write(" \n") diff --git a/buildscripts/moduleconfig.py b/buildscripts/moduleconfig.py deleted file mode 100644 index b26fee23e2a..00000000000 --- a/buildscripts/moduleconfig.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Utility functions for SCons to discover and configure MongoDB modules. - -A MongoDB module is an organized collection of source code and build rules that can be provided at -compile-time to alter or extend the behavior of MongoDB. The files comprising a single MongoDB -module are arranged in a directory hierarchy, rooted in a directory whose name is by convention the -module name, and containing in that root directory at least two files: a build.py file and a -SConscript file. - -MongoDB modules are discovered by a call to the discover_modules() function, whose sole parameter is -the directory which is the immediate parent of all module directories. The exact directory is -chosen by the SConstruct file, which is the direct consumer of this python module. The only rule is -that it must be a subdirectory of the src/ directory, to correctly work with the SCons variant -directory system that separates build products for source. - -Once discovered, modules are configured by the configure_modules() function, and the build system -integrates their SConscript files into the rest of the build. - -MongoDB module build.py files implement a single function, configure(conf, env), which they may use -to configure the supplied "env" object. The configure functions may add extra LIBDEPS to mongod, -mongos and the mongo shell (TODO: other mongo tools and the C++ client), and through those libraries -alter those programs' behavior. - -MongoDB module SConscript files can describe libraries, programs and unit tests, just as other -MongoDB SConscript files do. -""" - -__all__ = ( - "discover_modules", - "discover_module_directories", - "configure_modules", - "register_module_test", -) - -import imp -import inspect -import os - - -def discover_modules(module_root, allowed_modules): - """Scan module_root for subdirectories that look like MongoDB modules. - - Return a list of imported build.py module objects. - """ - found_modules = [] - found_module_names = [] - - if allowed_modules is not None: - allowed_modules = allowed_modules.split(",") - # When `--modules=` is passed, the split on empty string is represented - # in memory as [''] - if allowed_modules == [""]: - allowed_modules = [] - - if not os.path.isdir(module_root): - if allowed_modules: - raise RuntimeError( - f"Requested the following modules: {allowed_modules}, but the module root '{module_root}' could not be found. Check the module root, or remove the module from the scons invocation." - ) - return found_modules - - for name in os.listdir(module_root): - root = os.path.join(module_root, name) - if name.startswith(".") or not os.path.isdir(root): - continue - - build_py = os.path.join(root, "build.py") - module = None - - if allowed_modules is not None and name not in allowed_modules: - print("skipping module: %s" % (name)) - continue - - try: - print("adding module: %s" % (name)) - fp = open(build_py, "r") - try: - module = imp.load_module( - "module_" + name, fp, build_py, (".py", "r", imp.PY_SOURCE) - ) - if getattr(module, "name", None) is None: - module.name = name - found_modules.append(module) - found_module_names.append(name) - finally: - fp.close() - except (FileNotFoundError, IOError): - pass - - if allowed_modules is not None: - missing_modules = set(allowed_modules) - set(found_module_names) - if missing_modules: - raise RuntimeError(f"Failed to locate all modules. Could not find: {missing_modules}") - - return found_modules - - -def discover_module_directories(module_root, allowed_modules): - """Scan module_root for subdirectories that look like MongoDB modules. - - Return a list of directory names. - """ - if not os.path.isdir(module_root): - return [] - - found_modules = [] - - if allowed_modules is not None: - allowed_modules = allowed_modules.split(",") - - for name in os.listdir(module_root): - root = os.path.join(module_root, name) - if name.startswith(".") or not os.path.isdir(root): - continue - - build_py = os.path.join(root, "build.py") - - if allowed_modules is not None and name not in allowed_modules: - print("skipping module: %s" % (name)) - continue - - if os.path.isfile(build_py): - print("adding module: %s" % (name)) - found_modules.append(name) - - return found_modules - - -def configure_modules(modules, conf): - """Run the configure() function in the build.py python modules for each module in "modules". - - The modules were created by discover_modules. - - The configure() function should prepare the Mongo build system for building the module. - """ - env = conf.env - env["MONGO_MODULES"] = [] - for module in modules: - name = module.name - print("configuring module: %s" % (name)) - modules_configured = module.configure(conf, env) - if modules_configured: - for module_name in modules_configured: - env["MONGO_MODULES"].append(module_name) - else: - env["MONGO_MODULES"].append(name) - - -def get_module_sconscripts(modules): - """Return all modules' sconscripts.""" - sconscripts = [] - for mod in modules: - module_dir_path = __get_src_relative_path(os.path.join(os.path.dirname(mod.__file__))) - sconscripts.append(os.path.join(module_dir_path, "SConscript")) - return sconscripts - - -def __get_src_relative_path(path): - """Return a path relative to ./src. - - The src directory is important because of its relationship to BUILD_DIR, - established in the SConstruct file. For variant directories to work properly - in SCons, paths relative to the src or BUILD_DIR must often be generated. - """ - src_dir = os.path.abspath("src") - path = os.path.abspath(os.path.normpath(path)) - if not path.startswith(src_dir): - raise ValueError('Path "%s" is not relative to the src directory "%s"' % (path, src_dir)) - result = path[len(src_dir) + 1 :] - return result - - -def __get_module_path(module_frame_depth): - """Return the path to the MongoDB module whose build.py is executing "module_frame_depth" frames. - - This is above this function, relative to the "src" directory. - """ - module_filename = inspect.stack()[module_frame_depth + 1][1] - return os.path.dirname(__get_src_relative_path(module_filename)) - - -def __get_module_src_path(module_frame_depth): - """Return the path relative to the SConstruct file of the MongoDB module's source tree. - - module_frame_depth is the number of frames above the current one in which one can find a - function from the MongoDB module's build.py function. - """ - return os.path.join("src", __get_module_path(module_frame_depth + 1)) - - -def __get_module_build_path(module_frame_depth): - """Return the path relative to the SConstruct file of the MongoDB module's build tree. - - module_frame_depth is the number of frames above the current one in which one can find a - function from the MongoDB module's build.py function. - """ - return os.path.join("$BUILD_DIR", __get_module_path(module_frame_depth + 1)) - - -def get_current_module_src_path(): - """Return the path relative to the SConstruct file of the current MongoDB module's source tree. - - May only meaningfully be called from within build.py - """ - return __get_module_src_path(1) - - -def get_current_module_build_path(): - """Return the path relative to the SConstruct file of the current MongoDB module's build tree. - - May only meaningfully be called from within build.py - """ - - return __get_module_build_path(1) - - -def get_current_module_libdep_name(libdep_rel_path): - """Return a $BUILD_DIR relative path to a "libdep_rel_path". - - The "libdep_rel_path" is relative to the MongoDB module's build.py file. - - May only meaningfully be called from within build.py - """ - return os.path.join(__get_module_build_path(1), libdep_rel_path) diff --git a/site_scons/mongo/pip_requirements.py b/buildscripts/pip_requirements.py similarity index 100% rename from site_scons/mongo/pip_requirements.py rename to buildscripts/pip_requirements.py diff --git a/buildscripts/resmokelib/suitesconfig.py b/buildscripts/resmokelib/suitesconfig.py index 96e283def12..8f1bdf66921 100644 --- a/buildscripts/resmokelib/suitesconfig.py +++ b/buildscripts/resmokelib/suitesconfig.py @@ -88,7 +88,7 @@ def create_test_membership_map(fail_on_missing_selector=False, test_kind=None): # We ignore errors from missing files referenced in the test suite's "selector" # section. Certain test suites (e.g. unittests.yml) have a dedicated text file to # capture the list of tests they run; the text file may not be available if the - # associated SCons target hasn't been built yet. + # associated bazel target hasn't been built yet. if err.filename in _config.EXTERNAL_SUITE_SELECTORS: if not fail_on_missing_selector: continue diff --git a/buildscripts/scons.py b/buildscripts/scons.py deleted file mode 100755 index 550a6ae83a4..00000000000 --- a/buildscripts/scons.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python3 -"""Scons module.""" - -import os -import sys - -SCONS_VERSION = os.environ.get("SCONS_VERSION", "3.1.2") - -MONGODB_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) -SCONS_DIR = os.path.join( - MONGODB_ROOT, "src", "third_party", "scons-" + SCONS_VERSION, "scons-local-" + SCONS_VERSION -) - -if not os.path.exists(SCONS_DIR): - print("Could not find SCons in '%s'" % (SCONS_DIR)) - sys.exit(1) - -SITE_TOOLS_DIR = os.path.join(MONGODB_ROOT, "site_scons") - -sys.path = [SCONS_DIR, SITE_TOOLS_DIR] + sys.path - -from mongo.pip_requirements import MissingRequirements, verify_requirements - -try: - verify_requirements() -except MissingRequirements as ex: - print(ex) - sys.exit(1) - -try: - import SCons.Script -except ImportError as import_err: - print("Could not import SCons from '%s'" % (SCONS_DIR)) - print("ImportError:", import_err) - sys.exit(1) - - -def entrypoint(): - SCons.Script.main() - - -if __name__ == "__main__": - entrypoint() diff --git a/buildscripts/scons_cache_prune.py b/buildscripts/scons_cache_prune.py deleted file mode 100644 index 7c4735424b9..00000000000 --- a/buildscripts/scons_cache_prune.py +++ /dev/null @@ -1,192 +0,0 @@ -#!/USSR/bin/python -# encoding: utf-8 -""" -Prune the scons cache. - -This script, borrowed from some waf code, with a stand alone interface, provides a way to -remove files from the cache on an LRU (least recently used) basis to prevent the scons cache -from outgrowing the storage capacity. -""" - -# Inspired by: https://github.com/krig/waf/blob/master/waflib/extras/lru_cache.py -# Thomas Nagy 2011 - -import argparse -import collections -import logging -import os -import shutil - -LOGGER = logging.getLogger("scons.cache.prune.lru") # type: ignore - -GIGBYTES = 1024 * 1024 * 1024 - -CacheItem = collections.namedtuple("CacheContents", ["path", "time", "size"]) - - -def get_cachefile_size(file_path, is_cksum): - """Get the size of the cachefile.""" - if is_cksum: - size = 0 - for cksum_path in os.listdir(file_path): - cksum_path = os.path.join(file_path, cksum_path) - size += os.stat(cksum_path).st_size - else: - size = os.stat(file_path).st_size - return size - - -def collect_cache_contents(cache_path): - """Collect the cache contents.""" - # map folder names to timestamps - contents = [] - total = 0 - - # collect names of directories and creation times - for name in os.listdir(cache_path): - path = os.path.join(cache_path, name) - - if os.path.isdir(path): - for file_name in os.listdir(path): - file_path = os.path.join(path, file_name) - # Cache prune script is allowing only directories with this extension - # which comes from the validate_cache_dir.py tool in SCons, it must match - # the extension set in that file. - cksum_type = False - if os.path.isdir(file_path): - hash_length = -32 - tmp_length = -len(".cksum.tmp") + hash_length - cksum_type = ( - file_path.lower().endswith(".cksum") - or file_path.lower().endswith(".del") - or file_path.lower()[tmp_length:hash_length] == ".cksum.tmp" - ) - - if not cksum_type: - LOGGER.warning( - "cache item %s is a directory and not a file. " - "The cache may be corrupt.", - file_path, - ) - continue - - try: - item = CacheItem( - path=file_path, - time=os.stat(file_path).st_atime, - size=get_cachefile_size(file_path, cksum_type), - ) - - total += item.size - - contents.append(item) - except OSError as err: - LOGGER.warning("Ignoring error querying file %s : %s", file_path, err) - - return (total, contents) - - -def prune_cache(cache_path, cache_size_gb, clean_ratio): - """Prune the cache.""" - # This function is taken as is from waf, with the interface cleaned up and some minor - # stylistic changes. - - cache_size = cache_size_gb * GIGBYTES - - (total_size, contents) = collect_cache_contents(cache_path) - - LOGGER.info("cache size %d, quota %d", total_size, cache_size) - - if total_size >= cache_size: - LOGGER.info("trimming the cache since %d > %d", total_size, cache_size) - - # make a list to sort the folders' by timestamp - contents.sort(key=lambda x: x.time, reverse=True) # sort by timestamp - - # now that the contents of things to delete is sorted by timestamp in reverse order, we - # just delete things until the total_size falls below the target cache size ratio. - while total_size >= cache_size * clean_ratio: - if not contents: - LOGGER.error( - "cache size is over quota, and there are no files in " "the queue to delete." - ) - return False - - cache_item = contents.pop() - - # check the atime again just to make sure something wasn't accessed while - # we pruning other files. - try: - if cache_item.time < os.stat(cache_item.path).st_atime: - continue - except FileNotFoundError as err: - LOGGER.warning("Unable to find file %s : %s", cache_item, err) - continue - - to_remove = cache_item.path + ".del" - try: - os.rename(cache_item.path, to_remove) - except Exception as err: - # another process may have already cleared the file. - LOGGER.warning("Unable to rename %s : %s", cache_item, err) - else: - try: - if os.path.isdir(to_remove): - shutil.rmtree(to_remove) - else: - os.remove(to_remove) - total_size -= cache_item.size - except Exception as err: - # this should not happen, but who knows? - LOGGER.error( - "error [%s, %s] removing file '%s', " "please report this error", - err, - type(err), - to_remove, - ) - - LOGGER.info("total cache size at the end of pruning: %d", total_size) - return True - LOGGER.info("cache size (%d) is currently within boundaries", total_size) - return True - - -def main(): - """Execute Main entry.""" - - logging.basicConfig(level=logging.INFO) - - parser = argparse.ArgumentParser(description="SCons cache pruning tool") - - parser.add_argument("--cache-dir", "-d", default=None, help="path to the cache directory.") - parser.add_argument( - "--cache-size", "-s", default=200, type=int, help="maximum size of cache in GB." - ) - parser.add_argument( - "--prune-ratio", - "-p", - default=0.8, - type=float, - help=( - "ratio (as 1.0 > x > 0) of total cache size to prune " "to when cache exceeds quota." - ), - ) - parser.add_argument("--print-cache-dir", default=False, action="store_true") - - args = parser.parse_args() - - if args.cache_dir is None or not os.path.isdir(args.cache_dir): - LOGGER.error("must specify a valid cache path, [%s]", args.cache_dir) - exit(1) - - ok = prune_cache( - cache_path=args.cache_dir, cache_size_gb=args.cache_size, clean_ratio=args.prune_ratio - ) - - if not ok: - LOGGER.error("encountered error cleaning the cache. exiting.") - exit(1) - - -if __name__ == "__main__": - main() diff --git a/buildscripts/simple_report.py b/buildscripts/simple_report.py index 1f860a1a8bd..1c8a1ff1aea 100644 --- a/buildscripts/simple_report.py +++ b/buildscripts/simple_report.py @@ -28,26 +28,9 @@ class Report(TypedDict): results: List[Result] -def _open_and_truncate_log_lines(log_file: pathlib.Path) -> List[str]: - with open(log_file) as fh: - lines = fh.read().splitlines() - for i, line in enumerate(lines): - if line == "scons: done reading SConscript files.": - offset = i - # if possible, also shave off the current and next line - # as they contain: - # scons: done reading SConscript files. - # scons: Building targets ... - # which is superfluous. - if len(lines) > i + 2: - offset = i + 2 - return lines[offset:] - - return lines - - def _clean_log_file(log_file: pathlib.Path, dedup_lines: bool) -> str: - lines = _open_and_truncate_log_lines(log_file) + with open(log_file) as fh: + lines = fh.readlines() if dedup_lines: lines = _dedup_lines(lines) return os.linesep.join(lines) diff --git a/buildscripts/tests/resmoke_validation/test_suites_configurations.py b/buildscripts/tests/resmoke_validation/test_suites_configurations.py index d382ce9cb73..356bdf8cecd 100644 --- a/buildscripts/tests/resmoke_validation/test_suites_configurations.py +++ b/buildscripts/tests/resmoke_validation/test_suites_configurations.py @@ -16,7 +16,7 @@ class TestSuitesConfigurations(unittest.TestCase): # We ignore errors from missing files referenced in the test suite's "selector" # section. Certain test suites (e.g. unittests.yml) have a dedicated text file to # capture the list of tests they run; the text file may not be available if the - # associated SCons target hasn't been built yet. + # associated bazel target hasn't been built yet. if err.filename in config.EXTERNAL_SUITE_SELECTORS: continue except Exception as ex: diff --git a/buildscripts/tests/test_simple_report.py b/buildscripts/tests/test_simple_report.py index 89923517b51..8c3c323a351 100644 --- a/buildscripts/tests/test_simple_report.py +++ b/buildscripts/tests/test_simple_report.py @@ -3,9 +3,8 @@ import os import random import sys -import textwrap import unittest -from unittest.mock import mock_open, patch +from unittest.mock import patch from click.testing import CliRunner @@ -55,57 +54,3 @@ class TestSimpleReport(unittest.TestCase): self.assertEqual(results[0]["status"], "fail") self.assertEqual(report["failures"], 1) self.assertEqual(result.exit_code, 0) - - @patch(ns("try_combine_reports")) - @patch(ns("put_report")) - def test_truncate_scons(self, mock_put_report, _mock_try_combine_reports): - exit_code = 0 - data = fix_newlines( - textwrap.dedent("""\ -TO BE TRUNCATED -TO BE TRUNCATED -TO BE TRUNCATED -TO BE TRUNCATED -scons: done reading SConscript files. -scons: Building targets ... -interesting part""") - ) - - with patch("builtins.open", mock_open(read_data=data)) as _mock_file: - runner = CliRunner() - result = runner.invoke( - buildscripts.simple_report.main, - ["--test-name", "potato", "--log-file", "test.log", "--exit-code", str(exit_code)], - ) - report = mock_put_report.call_args[0][0] - results = mock_put_report.call_args[0][0]["results"] - self.assertEqual(results[0]["status"], "pass") - self.assertEqual(results[0]["log_raw"], "interesting part") - self.assertEqual(report["failures"], 0) - self.assertEqual(result.exit_code, 0) - - @patch(ns("try_combine_reports")) - @patch(ns("put_report")) - def test_non_scons_log(self, mock_put_report, _mock_try_combine_reports): - exit_code = 0 - data = fix_newlines( - textwrap.dedent("""\ -*NOT* TO BE TRUNCATED -*NOT* TO BE TRUNCATED -*NOT* TO BE TRUNCATED -*NOT* TO BE TRUNCATED -interesting part""") - ) - - with patch("builtins.open", mock_open(read_data=data)) as _mock_file: - runner = CliRunner() - result = runner.invoke( - buildscripts.simple_report.main, - ["--test-name", "potato", "--log-file", "test.log", "--exit-code", str(exit_code)], - ) - report = mock_put_report.call_args[0][0] - results = mock_put_report.call_args[0][0]["results"] - self.assertEqual(results[0]["status"], "pass") - self.assertEqual(results[0]["log_raw"], data) - self.assertEqual(report["failures"], 0) - self.assertEqual(result.exit_code, 0) diff --git a/debian/mongodb-enterprise-unstable.rules b/debian/mongodb-enterprise-unstable.rules index 338dc6cff1a..171bacdbf59 100755 --- a/debian/mongodb-enterprise-unstable.rules +++ b/debian/mongodb-enterprise-unstable.rules @@ -18,10 +18,6 @@ build: build-stamp build-stamp: configure-stamp dh_testdir - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons - #docbook-to-man debian/mongodb.sgml > mongodb.1 - echo -n > debian/mongodb-enterprise-unstable-database-tools-extra.manpages for binary in ${TOOLS} ; \ do \ @@ -41,10 +37,6 @@ clean: dh_testroot rm -f build-stamp configure-stamp - # FIXME: scons freaks out at the presence of target files - # under debian/mongodb. - #scons -c - rm -f debian/*.manpages rm -rf $(CURDIR)/debian/mongodb-enterprise-unstable @@ -57,7 +49,6 @@ clean: rm -f mongod rm -f mongocryptd rm -f install_compass - rm -f .sconsign.dblite rm -f libmongoclient.a rm -rf client/*.o rm -rf tools/*.o @@ -74,8 +65,6 @@ install: build dh_prep dh_installdirs - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons --prefix=$(CURDIR)/debian/mongodb/usr install mkdir -p $(CURDIR)/debian/mongodb-enterprise-unstable-shell/usr/bin mkdir -p $(CURDIR)/debian/mongodb-enterprise-unstable-server/usr/bin mkdir -p $(CURDIR)/debian/mongodb-enterprise-unstable-mongos/usr/bin diff --git a/debian/mongodb-enterprise.rules b/debian/mongodb-enterprise.rules index 67d3d241ee6..7abbec12a36 100755 --- a/debian/mongodb-enterprise.rules +++ b/debian/mongodb-enterprise.rules @@ -18,9 +18,6 @@ build: build-stamp build-stamp: configure-stamp dh_testdir - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons - #docbook-to-man debian/mongodb.sgml > mongodb.1 echo -n > debian/mongodb-enterprise-database-tools-extra.manpages for binary in ${TOOLS} ; \ do \ @@ -40,10 +37,6 @@ clean: dh_testroot rm -f build-stamp configure-stamp - # FIXME: scons freaks out at the presence of target files - # under debian/mongodb. - #scons -c - rm -f debian/*.manpages rm -rf $(CURDIR)/debian/mongodb-enterprise @@ -56,7 +49,6 @@ clean: rm -f mongod rm -f mongocryptd rm -f install_compass - rm -f .sconsign.dblite rm -f libmongoclient.a rm -rf client/*.o rm -rf tools/*.o @@ -73,8 +65,6 @@ install: build dh_prep dh_installdirs - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons --prefix=$(CURDIR)/debian/mongodb/usr install mkdir -p $(CURDIR)/debian/mongodb-enterprise-shell/usr/bin mkdir -p $(CURDIR)/debian/mongodb-enterprise-server/usr/bin mkdir -p $(CURDIR)/debian/mongodb-enterprise-mongos/usr/bin diff --git a/debian/mongodb-org-unstable.rules b/debian/mongodb-org-unstable.rules index e9cc8f5edb1..503addf4a53 100755 --- a/debian/mongodb-org-unstable.rules +++ b/debian/mongodb-org-unstable.rules @@ -18,10 +18,6 @@ build: build-stamp build-stamp: configure-stamp dh_testdir - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons - #docbook-to-man debian/mongodb.sgml > mongodb.1 - echo -n > debian/mongodb-org-unstable-database-tools-extra.manpages for binary in ${TOOLS} ; \ do \ @@ -38,10 +34,6 @@ clean: dh_testroot rm -f build-stamp configure-stamp - # FIXME: scons freaks out at the presence of target files - # under debian/mongodb. - #scons -c - rm -f debian/*.manpages rm -rf $(CURDIR)/debian/mongodb-org-unstable @@ -52,7 +44,6 @@ clean: rm -f config.log rm -f mongod rm -f install_compass - rm -f .sconsign.dblite rm -f libmongoclient.a rm -rf client/*.o rm -rf tools/*.o @@ -69,8 +60,6 @@ install: build dh_prep dh_installdirs - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons --prefix=$(CURDIR)/debian/mongodb/usr install mkdir -p $(CURDIR)/debian/mongodb-org-unstable-shell/usr/bin mkdir -p $(CURDIR)/debian/mongodb-org-unstable-server/usr/bin mkdir -p $(CURDIR)/debian/mongodb-org-unstable-mongos/usr/bin diff --git a/debian/mongodb-org.rules b/debian/mongodb-org.rules index e1374ff81a7..eb331ed093e 100755 --- a/debian/mongodb-org.rules +++ b/debian/mongodb-org.rules @@ -18,9 +18,6 @@ build: build-stamp build-stamp: configure-stamp dh_testdir - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons - #docbook-to-man debian/mongodb.sgml > mongodb.1 echo -n > debian/mongodb-org-database-tools-extra.manpages for binary in ${TOOLS} ; \ do \ @@ -37,10 +34,6 @@ clean: dh_testroot rm -f build-stamp configure-stamp - # FIXME: scons freaks out at the presence of target files - # under debian/mongodb. - #scons -c - rm -f debian/*.manpages rm -rf $(CURDIR)/debian/mongodb-org @@ -51,7 +44,6 @@ clean: rm -f config.log rm -f mongod rm -f install_compass - rm -f .sconsign.dblite rm -f libmongoclient.a rm -rf client/*.o rm -rf tools/*.o @@ -68,8 +60,6 @@ install: build dh_prep dh_installdirs - # THE FOLLOWING LINE IS INTENTIONALLY COMMENTED. - # scons --prefix=$(CURDIR)/debian/mongodb/usr install mkdir -p $(CURDIR)/debian/mongodb-org-shell/usr/bin mkdir -p $(CURDIR)/debian/mongodb-org-server/usr/bin mkdir -p $(CURDIR)/debian/mongodb-org-mongos/usr/bin diff --git a/docs/OWNERS.yml b/docs/OWNERS.yml index 5f5564c7b38..f8c2324f8c9 100644 --- a/docs/OWNERS.yml +++ b/docs/OWNERS.yml @@ -1,14 +1,5 @@ version: 1.0.0 filters: - - "bazel.md": - approvers: - - 10gen/devprod-build - - "build_system_reference.md": - approvers: - - 10gen/devprod-build - - "build_system.md": - approvers: - - 10gen/devprod-build - "building.md": approvers: - 10gen/devprod-build diff --git a/docs/bazel.md b/docs/bazel.md deleted file mode 100644 index d3387795933..00000000000 --- a/docs/bazel.md +++ /dev/null @@ -1,31 +0,0 @@ -(Note: This is a work-in-progress for the Build team; contact #ask-devprod-build for questions) - -To perform a Bazel build via SCons: - -- You must be on a arm64 virtual workstation -- You must generate engflow credentials and store them in the correct location (see below) -- Build the Bazel-compatible target: `python3 ./buildscripts/scons.py --build-profile=fast --ninja=disabled --link-model=static -j 200 --modules= build/fast/mongo/db/commands/libfsync_locked.a` - -To generate and install the engflow credentials: - -- Navigate to and log in with your mongodb gmail account: https://sodalite.cluster.engflow.com/gettingstarted -- Generate and download the credentials; you will need to move them to the workstation machine (scp, copy paste plain text, etc...) -- Store them (the same filename they downloaded as) on your machine at the default location our build expects: `/engflow/creds/` -- You should run `chmod 600` on them to make sure they are readable only by your user -- If you don't want to use the cluster you can pass `BAZEL_FLAGS=--config=local` on the SCons command line or `--config=local` on the bazel command line - -To perform a Bazel build and _bypass_ SCons: - -- Install Bazelisk: `curl -L https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.19.0/bazelisk-linux-arm64 --output /tmp/bazelisk && chmod +x /tmp/bazelisk` -- Build the Bazel-compatible target: `/tmp/bazelisk build --verbose_failures src/mongo/db/commands:fsync_locked` - -To perform a Bazel build using a local Buildfarm (to test remote execution capability): - -- For more details on Buildfarm, see https://bazelbuild.github.io/bazel-buildfarm -- (One time only) Build and start the Buildfarm: - ** Change into the `buildfarm` directory: `cd buildfarm` - ** Build the image: `docker-compose build` - ** Start the container: `docker-compose up --detach` - ** Poll until the containers report status `running`: `docker ps --filter status=running --filter name=buildfarm` -- (Whenever you build): - \*\* Build the Bazel-compatible target with remote execution enabled: `/tmp/bazelisk build --verbose_failures --remote_executor=grpc://localhost:8980 src/mongo/db/commands:fsync_locked` diff --git a/docs/build_system.md b/docs/build_system.md deleted file mode 100644 index 762853608cb..00000000000 --- a/docs/build_system.md +++ /dev/null @@ -1,408 +0,0 @@ -# The MongoDB Build System - -## Introduction - -### System requirements and supported platforms - -## How to get Help - -### Where to go - -### What to bring when you go there (SCons version, server version, SCons command line, versions of relevant tools, `config.log`, etc.) - -## Known Issues - -### Commonly-encountered issues - -#### `--disable-warnings-as-errors` - -### Reference to known issues in the ticket system - -### How to report a problem - -#### For employees - -#### For non-employees - -## Set up the build environment - -### Set up the virtualenv and poetry - -See [Building Python Prerequisites](building.md#python-prerequisites) - -### The Enterprise Module - -#### Getting the module source - -#### Enabling the module - -## Building the software - -### Commonly-used build targets - -### Building a standard “debug” build - -#### `--dbg` - -### What goes where? - -#### `$BUILD_ROOT/scons` and its contents - -#### `$BUILD_ROOT/$VARIANT_DIR` and its contents - -#### `$BUILD_ROOT/install` and its contents - -#### `DESTDIR` and `PREFIX` - -#### `--build-dir` - -### Running core tests to verify the build - -### Building a standard “release” build - -#### `--separate-debug` - -### Installing from the build directory - -#### `--install-action` - -### Creating a release archive - -## Advanced Builds - -### Compiler and linker options - -#### `CC, CXX, CCFLAGS, CFLAGS, CXXFLAGS` - -#### `CPPDEFINES and CPPPATH` - -#### `LINKFLAGS` - -#### `MSVC_VERSION` - -#### `VERBOSE` - -### Advanced build options - -#### `-j` - -#### `--separate-debug` - -#### `--link-model` - -#### `--allocator` - -#### `--cxx-std` - -#### `--linker` - -#### `--variables-files` - -### Cross compiling - -#### `HOST_ARCH` and `TARGET_ARCH` - -### Using Ninja - -#### `--ninja` - -### Cached builds - -#### Using the SCons build cache - -##### `--cache` - -##### `--cache-dir` - -#### Using `ccache` - -##### `CCACHE` - -### Using Icecream - -#### `ICECC`, `ICECRUN`, `ICECC_CREATE_ENV` - -#### `ICECC_VERSION` and `ICECC_VERSION_ARCH` - -#### `ICECC_DEBUG` - -## Developer builds - -### Developer build options - -#### `MONGO_{VERSION,GIT_HASH}` - -By default, the server build system consults the local git repository -(assuming one exists) to automatically derive the current version of -MongoDB and current git revision that is being built. These values are -recorded in the SCons `MONGO_VERSION` and `MONGO_GIT_HASH` -`Environment` variables, respectively. The value of `MONGO_GIT_HASH` -is just that: the value of the currently checked out git hash. The -value computed for `MONGO_VERSION` is based on the result of `git -describe`, which looks for tags matching the release numbering -scheme. Since `git describe` relies on tags, it is important to ensure -that you periodically synchronize new tags to your local repository -with `git fetch` against the upstream server repository. - -While this automated scheme works well for release and CI builds, it -has unfortunate consequences for developer builds. Since the git hash -changes on every commit (whether locally authored or pulled from an -upstream repo), and since by default an abbreviated git hash forms -part of the result of `git describe`, a build after a commit or a pull -will see any target that has a direct or indirect dependency on the -parts of the codebase that care about `MONGO_VERSION` and -`MONGO_GIT_HASH` as out of date. Notably, you will at minimum need to -relink `mongod` and other core server binaries. - -It is possible to work around this by manually setting values for -`MONGO_VERSION` and `MONGO_GIT_HASH` on the SCons command -line. However, doing so in a way that results in an accurate value for -`MONGO_VERSION` in particular requires writing shell command -substitutions into your SCons invocation, which isn't very -friendly. The longstanding historical practice of setting -`MONGO_VERSION=0.0.0` was never well-advised, but because of recent -feature compatibility version related work it is no longer safe to do -that at all. - -To make it easier for developers to manage these variables in a way -which avoids useless rebuilds, has better ergonomics, and does not run -afoul of FCV management, the server build system provides a variables -file to manage these settings automatically: -`etc/scons/developer_versions.vars` . By using this file, you will get -an unchanging `MONGO_GIT_HASH` value of `unknown`, and a -`MONGO_VERSION` value that is still based on `git describe`, but with -`--abbrev=0` affixed, which will eliminate the dependency on the SHA -of the current commit. Note that you will still observe rebuilds if -you pull a new tag which changes the results of `git describe`, but -this should be a much less frequent event. - -You can opt into this variable by adding -`--variables-files=etc/scons/developer_versions.vars` to your SCons -command line, either for direct SCons builds, or when generating -Ninja. - -Support for `etc/scons/developer_versioning.vars` has been backported -as far back as MongoDB `v4.0`, so you can safely add this to your -SCons invocations on almost any branch you are likely to find yourself -using. - -#### Using sanitizers - -##### `--sanitize` - -##### `*SAN_OPTIONS` - -#### `--dbg` `--opt` - -#### `--build-tools=[stable|next]` - -### Setting up your development environment - -#### `mongo_custom_variables.py` - -##### Guidance on what to put in your custom variables - -##### How to suppress use of your custom variables - -##### Useful variables files (e.g. `mongodbtoolchain`) - -#### Using the Mongo toolchain - -##### Why do we have our own toolchain? - -##### When is it appropriate to use the MongoDB toolchain? - -##### How do I obtain the toolchain? - -##### How do I upgrade the toolchain? - -##### How do I tell the build system to use it? - -### Creating and using build variants - -#### Using `--build-dir` to separate variant build artifacts - -#### `BUILD_ROOT` and `BUILD_DIR` - -#### `VARIANT_DIR` - -#### `NINJA_PREFIX` and `NINJA_SUFFIX` - -### Building older versions - -#### Using` git-worktree` - -### Speeding up incremental builds - -#### Selecting minimal build targets - -#### Compiler arguments - -##### `-gsplit-dwarf` and `/DEBUG:FASTLINK` - -#### Don’t reinstall what you don’t have to (\*NIX only) - -##### `--install-action=hardlink` - -#### Speeding up SCons dependency evaluation - -##### `--implicit-cache` - -##### `--build-fast-and-loose` - -#### Using Ninja responsibly - -#### What about `ccache`? - -## Making source changes - -### Adding a new dependency - -### Linting and Lint Targets - -#### What lint targets are available? - -#### Using `clang-format` - -### Testing your changes - -#### How are test test suites defined? - -#### Running test suites - -#### Adding tests to a suite - -#### Running individual tests - -## Modifying the buid system - -### What is SCons? - -#### `SConstruct` and `SConscripts` - -#### `Environments `and their `Clone`s - -##### Overriding and altering variables - -#### `Targets` and `Sources` - -#### `Nodes` - -##### `File` Nodes - -##### `Program` and `Library` Nodes - -#### `Aliases`, `Depends` and `Requires` - -#### `Builders` - -#### `Emitters` - -#### `Scanners` - -#### `Actions` - -#### `Configure` objects - -#### DAG walk - -#### Reference to SCons documentation - -### Modules - -#### How modules work - -#### The Enterprise module - -##### The `build.py` file - -#### Adding a new module - -### Poetry - -#### What is Poetry - -[Poetry](https://python-poetry.org/) is a python dependency management system. Poetry tries to find dependencies in [pypi](https://pypi.org/) (similar to pip). For more details visit the poetry website. - -#### Why use Poetry - -Poetry creates a dependency lock file similar to that of a [Ruby Gemfile](https://bundler.io/guides/gemfile.html#gemfiles) or a [Rust Cargo File](https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html). This lock file has exact dependencies that will be the same no matter when they are installed. Even if dependencyA has an update available the older pinned dependency will still be installed. The means that there will be less errors that are based on two users having different versions of python dependencies. - -#### Poetry Lock File - -In a Poetry project there are two files that determine and resolve the dependencies. The first is [pyproject.toml](../pyproject.toml). This file loosely tells poetry what dependencies and needed and the constraints of those dependencies. For example the following are all valid selections. - -1. `dependencyA = "1.0.0" # dependencyA can only ever be 1.0.0` -2. `dependencyA = "^1.0.0" # dependencyA can be any version greater than or equal to 1.0.0 and less than 2.0.0` -3. `dependencyA = "*" # dependencyA can be any version` - -The [poetry.lock](../poetry.lock) file has the exact package versions. This file is generated by poetry by running `poetry lock`. This file contains a pinned list of all transitive dependencies that satisfy the requirements in [pyproject.toml](../pyproject.toml). - -### `LIBDEPS` and the `LIBDEPS` Linter - -#### Why `LIBDEPS`? - -Libdeps is a subsystem within the build, which is centered around the LIBrary DEPendency graph. It tracks and maintains the dependency graph as well as lints, analyzes and provides useful metrics about the graph. - -#### Different `LIBDEPS` variable types - -The `LIBDEPS` variables are how the library relationships are defined within the build scripts. The primary variables are as follows: - -- `LIBDEPS`: - The 'public' type which propagates lower level dependencies onward automatically. -- `LIBDEPS_PRIVATE`: - Creates a dependency only between the target and the dependency. -- `LIBDEPS_INTERFACE`: - Same as `LIBDEPS` but excludes itself from the propagation onward. -- `LIBDEPS_DEPENDENTS`: - Creates a reverse `LIBDEPS_PRIVATE` dependency where the dependency is the one declaring the relationship. -- `PROGDEPS_DEPENDENTS`: - Same as `LIBDEPS_DEPENDENTS` but for use with Program builders. - -Libraries are added to these variables as lists per each SCons builder instance in the SConscripts depending on what type of relationship is needed. For more detailed information on theses types, refer to [`The LIBDEPS variables`](build_system_reference.md#the-libdeps-variables) - -#### The `LIBDEPS` lint rules and tags - -The libdeps subsystem is capable of linting and automatically detecting issues. Some of these linting rules are automatically checked during build-time (while the SConscripts are read and the build is performed) while others need to be manually run post-build (after the the generated graph file has been built). Some rules will include exemption tags which can be added to a libraries `LIBDEPS_TAGS` to override a rule for that library. - -The build-time linter also has a print option `--libdeps-linting=print` which will print all issues without failing the build and ignoring exemption tags. This is useful for getting an idea of what issues are currently outstanding. - -For a complete list of build-time lint rules, please refer to [`Build-time Libdeps Linter`](build_system_reference.md#build-time-libdeps-linter) - -#### `LIBDEPS_TAGS` - -`LIBDEPS_TAGS` can also be used to supply flags to the libdeps subsystem to do special handling for certain libraries such as exemptions or inclusions for linting rules and also SCons command line expansion functions. - -For a full list of tags refer to [`LIBDEPS_TAGS`](build_system_reference.md#libdeps_tags) - -#### Using the post-build LIBDEPS Linter - -To use the post-build tools, you must first build the libdeps dependency graph by building the `generate-libdeps-graph` target. - -You must also install the requirements file: - -``` -python3 -m poetry install --no-root --sync -E libdeps -``` - -After the graph file is created, it can be used as input into the `gacli` tool to perform linting and analysis on the complete dependency graph. The `gacli` tool has options for what types of analysis to perform. A complete list can be found using the `--help` option. Minimally, you can run the `gacli` tool by just passing the graph file you wish to analyze: - -``` -python3 buildscripts/libdeps/gacli.py --graph-file build/cached/libdeps/libdeps.graphml -``` - -Another tool which provides a graphical interface as well as visual representation of the graph is the graph visualizer. Minimally, it requires passing in a directory in which any files with the `.graphml` extension will be available for analysis. By default it will launch the web interface which is reachable in a web browser at http://localhost:3000. - -``` -python3 buildscripts/libdeps/graph_visualizer.py --graphml-dir build/opt/libdeps -``` - -For more information about the details of using the post-build linting tools refer to [`post-build linting and analysis`](build_system_reference.md#post-build-linting-and-analysis) - -### Debugging build system failures - -#### Using` -k` and `-n` - -#### `--debug=[explain, time, stacktrace]` - -#### `--libdeps-debug` diff --git a/docs/build_system_reference.md b/docs/build_system_reference.md deleted file mode 100644 index 4ad8048ca0a..00000000000 --- a/docs/build_system_reference.md +++ /dev/null @@ -1,525 +0,0 @@ -# MongoDB Build System Reference - -## MongoDB Build System Requirements - -### Recommended minimum requirements - -### Python modules - -### External libraries - -### Enterprise module requirements - -### Testing requirements - -## MongoDB customizations - -### SCons modules - -### Development tools - -#### Compilation database generator - -### Build tools - -#### IDL Compiler - -### Auxiliary tools - -#### Ninja generator - -#### Icecream tool - -#### ccache tool - -### LIBDEPS - -Libdeps is a subsystem within the build, which is centered around the LIBrary DEPendency graph. It tracks and maintains the dependency graph as well as lints, analyzes and provides useful metrics about the graph. - -#### Design - -The libdeps subsystem is divided into several stages, described in order of use as follows. - -##### SConscript `LIBDEPS` definitions and built time linting - -During the build, the SConscripts are read and all the library relationships are setup via the `LIBDEPS` variables. Some issues can be identified early during processing of the SConscripts via the build-time linter. Most of these will be style and usage issues which can be realized without needing the full graph. This component lives within the build and is executed through the SCons emitters added via the libdeps subsystem. - -##### Libdeps graph generation for post-build analysis - -For more advanced analysis and linting, a full graph is necessary. The build target `generate-libdeps-graph` builds all libdeps and things which use libdeps, and generates the graph to a file in graphml format. - -##### The libdeps analyzer python module - -The libdeps analyzer module is a python library which provides and Application Programming Interface (API) to analyze and lint the graph. The library internally leverages the networkx python module for the generic graph interfaces. - -##### The CLI and Visualizer tools - -The libdeps analyzer module is used in the libdeps Graph Analysis Command Line Interface (gacli) tool and the libdeps Graph Visualizer web service. Both tools read in the graph file generated from the build and provide the Human Machine Interface (HMI) for analysis and linting. - -#### The `LIBDEPS` variables - -The variables include several types of lists to be added to libraries per a SCons builder instance: - -| Variable | Use | -| --------------------- | -------------------------------------- | -| `LIBDEPS` | transitive dependencies | -| `LIBDEPS_PRIVATE` | local dependencies | -| `LIBDEPS_INTERFACE` | transitive dependencies excluding self | -| `LIBDEPS_DEPENDENTS` | reverse dependencies | -| `PROGDEPS_DEPENDENTS` | reverse dependencies for Programs | - -_`LIBDEPS`_ is the 'public' type, such that libraries that are added to this list become a dependency of the current library, and also become dependencies of libraries which may depend on the current library. This propagation also includes not just the libraries in the `LIBDEPS` list, but all `LIBDEPS` of those `LIBDEPS` recursively, meaning that all dependencies of the `LIBDEPS` libraries, also become dependencies of the current library and libraries which depend on it. - -_`LIBDEPS_PRIVATE`_ should be a list of libraries which creates dependencies only between the current library and the libraries in the list. However, in static linking builds, this will behave the same as `LIBDEPS` due to the nature of static linking. - -_`LIBDEPS_INTERFACE`_ is very similar to `LIBDEPS`, however it does not create propagating dependencies for the libraries themselves in the `LIBDEPS_INTERFACE` list. Only the dependencies of those `LIBDEPS_INTERFACE` libraries are propagated forward. - -_`LIBDEPS_DEPENDENTS`_ are added to libraries which will force themselves as dependencies of the libraries in the supplied list. This is conceptually a reverse dependency, where the library which is a dependency is the one declaring itself as the dependency of some other library. By default this creates a `LIBDEPS_PRIVATE` like relationship, but a tuple can be used to force it to a `LIBDEPS` like or other relationship. - -_`PROGDEPS_DEPENDENTS`_ are the same as `LIBDEPS_DEPENDENTS`, but intended for use only with Program builders. - -#### `LIBDEPS_TAGS` - -The `LIBDEPS_TAGS` variable is used to mark certain libdeps for various reasons. Some `LIBDEPS_TAGS` are used to mark certain libraries for `LIBDEPS_TAG_EXPANSIONS` variable which is used to create a function which can expand to a string on the command line. Below is a table of available `LIBDEPS` tags: - -| Tag | Description | -| ------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------- | ------------------------------------------------------------------------------------- | -| `illegal_cyclic_or_unresolved_dependencies_allowlisted` | SCons subst expansion tag to handle dependency cycles | -| `init-no-global-side-effects` | SCons subst expansion tag for causing linkers to avoid pulling in all symbols | -| `lint-public-dep-allowed` | Linting exemption tag exempting the `lint-no-public-deps` tag | -| `lint-no-public-deps` | Linting inclusion tag ensuring a libdep has no `LIBDEPS` declared | -| `lint-allow-non-alphabetic` | Linting exemption tag allowing `LIBDEPS` variable lists to be non-alphabetic | -| `lint-leaf-node-allowed-dep` | Linting exemption tag exempting the `lint-leaf-node-no-deps` tag | -| `lint-leaf-node-no-deps` | Linting inclusion tag ensuring a libdep has no libdeps and is a leaf node | -| `lint-allow-nonlist-libdeps` | Linting exemption tag allowing a `LIBDEPS` variable to not be a list | `lint-allow-bidirectional-edges` | Linting exemption tag allowing reverse dependencies to also be a forward dependencies | -| `lint-allow-nonprivate-on-deps-dependents` | Linting exemption tag allowing reverse dependencies to be transitive | -| `lint-allow-dup-libdeps` | Linting exemption tag allowing `LIBDEPS` variables to contain duplicate libdeps on a given library | -| `lint-allow-program-links-private` | Linting exemption tag allowing `Program`s to have `PRIVATE_LIBDEPS` | - -##### The `illegal_cyclic_or_unresolved_dependencies_allowlisted` tag - -This tag should not be used anymore because the library dependency graph has been successfully converted to a Directed Acyclic Graph (DAG). Prior to this accomplishment, it was necessary to handle -cycles specifically with platform specific options on the command line. - -##### The `init-no-global-side-effects` tag - -Adding this flag to a library turns on platform specific compiler flags which will cause the linker to pull in just the symbols it needs. Note that by default, the build is configured to pull in all symbols from libraries because of the use of static initializers, however if a library is known to not have any of these initializers, then this flag can be added for some performance improvement. - -#### Linting and linter tags - -The libdeps linter features automatically detect certain classes of LIBDEPS usage errors. The libdeps linters are implemented as build-time linting and post-build linting procedures to maintain order in usage of the libdeps tool and the build’s library dependency graph. You will need to comply with the rules enforced by the libdeps linter, and fix issues that it raises when modifying the build scripts. There are exemption tags to prevent the linter from blocking things, however these exemption tags should only be used in extraordinary cases, and with good reason. A goal of the libdeps linter is to drive and maintain the number of exemption tags in use to zero. - -##### Exemption Tags - -There are a number of existing issues that need to be addressed, but they will be addressed in future tickets. In the meantime, the use of specific strings in the LIBDEPS_TAGS variable can allow the libdeps linter to skip certain issues on given libraries. For example, to have the linter skip enforcement of the lint rule against bidirectional edges for "some_library": - -``` -env.Library( - target=’some_library’ - ... - LIBDEPS_TAGS=[‘lint-allow-bidirectional-edges’] -) -``` - -#### build-time Libdeps Linter - -If there is a build-time issue, the build will fail until it is addressed. This linting feature will be on by default and takes about half a second to complete in a full enterprise build (at the time of writing this), but can be turned off by using the --libdeps-linting=off option on your SCons invocation. - -The current rules and there exemptions are listed below: - -1. **A 'Program' can not link a non-public dependency, it can only have LIBDEPS links.** - - ###### Example - - ``` - env.Program( - target=’some_program’, - ... - LIBDEPS=[‘lib1’], # OK - LIBDEPS_PRIVATE=[‘lib2’], # This is a Program, BAD - ) - ``` - - ###### Rationale - - A Program can not be linked into anything else, and there for the transitiveness does not apply. A default value of LIBDEPS was selected for consistency since most Program's were already doing this at the time the rule was created. - - ###### Exemption - - 'lint-allow-program-links-private' on the target node - - ###### - -2. **A 'Node' can only directly link a given library once.** - - ###### Example - - ``` - env.Library( - target=’some_library’, - ... - LIBDEPS=[‘lib1’], # Linked once, OK - LIBDEPS_PRIVATE=[‘lib1’], # Also linked in LIBDEPS, BAD - LIBDEPS_INTERFACE=[‘lib2’, 'lib2'], # Linked twice, BAD - ) - ``` - - ###### Rationale - - Libdeps will ignore duplicate links, so this rule is mostly for consistency and neatness in the build scripts. - - ###### Exemption - - 'lint-allow-dup-libdeps' on the target node - - ###### - -3. **A 'Node' which uses LIBDEPS_DEPENDENTS or PROGDEPS_DEPENDENTS can only have LIBDEPS_PRIVATE links.** - - ###### Example - - ``` - env.Library( - target=’some_library’, - ... - LIBDEPS_DEPENDENTS=['lib3'], - LIBDEPS=[‘lib1’], # LIBDEPS_DEPENDENTS is in use, BAD - LIBDEPS_PRIVATE=[‘lib2’], # OK - ) - ``` - - ###### Rationale - - The node that the library is using LIBDEPS_DEPENDENTS or PROGDEPS_DEPENDENT to inject its dependency onto should be conditional, therefore there should not be transitiveness for that dependency since it cannot be the source of any resolved symbols. - - ###### Exemption - - 'lint-allow-nonprivate-on-deps-dependents' on the target node - - ###### - -4. **A 'Node' can not link directly to a library that uses LIBDEPS_DEPENDENTS or PROGDEPS_DEPENDENTS.** - - ###### Example - - ``` - env.Library( - target='other_library', - ... - LIBDEPS=['lib1'], # BAD, 'lib1' has LIBDEPS_DEPENDENTS - - env.Library( - target=’lib1’, - ... - LIBDEPS_DEPENDENTS=['lib3'], - ) - ``` - - ###### Rationale - - A library that is using LIBDEPS_DEPENDENTS or PROGDEPS_DEPENDENT should only be used for reverse dependency edges. If a node does need to link directly to a library that does have reverse dependency edges, that indicates the library should be split into two separate libraries, containing its direct dependency content and its conditional reverse dependency content. - - ###### Exemption - - 'lint-allow-bidirectional-edges' on the target node - - ###### - -5. **All libdeps environment vars must be assigned as lists.** - - ###### Example - - ``` - env.Library( - target='some_library', - ... - LIBDEPS='lib1', # not a list, BAD - LIBDEPS_PRIVATE=['lib2'], # OK - ) - ``` - - ###### Rationale - - Libdeps will handle non-list environment variables, so this is more for consistency and neatness in the build scripts. - - ###### Exemption - - 'lint-allow-nonlist-libdeps' on the target node - - ###### - -6. **Libdeps with the tag 'lint-leaf-node-no-deps' shall not link any libdeps.** - - ###### Example - - ``` - env.Library( - target='lib2', - ... - LIBDEPS_TAGS=[ - 'lint-leaf-node-allowed-dep' - ] - ) - - env.Library( - target='some_library', - ... - LIBDEPS=['lib1'], # BAD, should have no LIBDEPS - LIBDEPS_PRIVATE=['lib2'], # OK, has exemption tag - LIBDEPS_TAGS=[ - 'lint-leaf-node-no-deps' - ] - ) - ``` - - ###### Rationale - - The special tag allows certain nodes to be marked and programmatically checked that they remain lead nodes. An example use-case is when we want to make sure certain nodes never link mongodb code. - - ###### Exemption - - 'lint-leaf-node-allowed-dep' on the exempted libdep - - ###### Inclusion - - 'lint-leaf-node-no-deps' on the target node - - ###### - -7. **Libdeps with the tag 'lint-no-public-deps' shall not link any libdeps.** - - ###### Example - - ``` - env.Library( - target='lib2', - ... - LIBDEPS_TAGS=[ - 'lint-public-dep-allowed' - ] - ) - - env.Library( - target='some_library', - ... - LIBDEPS=[ - 'lib1' # BAD - 'lib2' # OK, has exemption tag - ], - LIBDEPS_TAGS=[ - 'lint-no-public-deps' - ] - ) - ``` - - ###### Rationale - - The special tag allows certain nodes to be marked and programmatically checked that they do not link publicly. Some nodes such as mongod_main have special requirements that this programmatically checks. - - ###### Exemption - - 'lint-public-dep-allowed' on the exempted libdep - - ###### Inclusion - - 'lint-no-public-deps' on the target node - - ###### - -8. **Libdeps shall be sorted alphabetically in LIBDEPS lists in the SCons files.** - - ###### Example - - ``` - env.Library( - target='lib2', - ... - LIBDEPS=[ - '$BUILD/mongo/db/d', # OK, $ comes before c - 'c', # OK, c comes before s - 'src/a', # BAD, s should be after b - 'b', # BAD, b should be before c - ] - ) - ``` - - ###### Rationale - - Keeping the SCons files neat and ordered allows for easier Code Review diffs and generally better maintainability. - - ###### Exemption - - 'lint-allow-non-alphabetic' on the exempted libdep - - ###### - -##### The build-time print Option - -The libdeps linter also has the `--libdeps-linting=print` option which will perform linting, and instead of failing the build on an issue, just print and continue on. It will also ignore exemption tags, and still print the issue because it will not fail the build. This is a good way to see the entirety of existing issues that are exempted by tags, as well as printing other metrics such as time spent linting. - -#### post-build linting and analysis - -The dependency graph can be analyzed post-build by leveraging the completeness of the graph to perform more extensive analysis. You will need to install the libdeps requirements file to python when attempting to use the post-build analysis tools: - -``` -python3 -m poetry install --no-root --sync -E libdeps -``` - -The command line interface tool (gacli) has a comprehensive help text which will describe the available analysis options and interface. The visualizer tool includes a GUI which displays the available analysis options graphically. These tools will be briefly covered in the following sections. - -##### Generating the graph file - -To generate the full graph, build the target `generate-libdeps-graph`. This will build all things involving libdeps and construct a graphml file representing the library dependency graph. The graph can be used in the command line interface tool or the visualizer web service tool. The minimal set of required SCons arguments to build the graph file is shown below: - -``` -python3 buildscripts/scons.py --link-model=dynamic --build-tools=next generate-libdeps-graph --linker=gold --modules= -``` - -The graph file by default will be generate to `build/opt/libdeps/libdeps.graphml` (where `build/opt` is the `$BUILD_DIR`). - -##### General libdeps analyzer API usage - -Below is a basic example of usage of the libdeps analyzer API: - -``` -import libdeps - -libdeps_graph = libdeps.graph.load_libdeps_graph('path/to/libdeps.graphml') - -list_of_analysis_to_run = [ - libdeps.analyzer.NodeCounter(libdeps_graph), - libdeps.analyzer.DirectDependencies(libdeps_graph, node='path/to/library'), -] - -analysis_results = libdeps.graph.LibdepsGraphAnalysis(list_of_analysis_to_run) -libdeps.analyzer.GaPrettyPrinter(analysis_after_run).print() -``` - -Walking through this example, first the graph is loaded from file. Then a list of desired Analyzer instances is created. Some example analyzer classes are instantiated in the example above, but there are many others to choose from. Specific Analyzers have different interfaces and should be supplied an argument list corresponding to that analyzer. - -_Note:_ The graph file will contain the build dir that the graph data was created with and it expects all node arguments to be relative to the build dir. If you are using the libdeps module generically in some app, you can extract the build dir from the libdeps graph and append it to any generic library path. - -Once the list of analyzers is created, they can be used to create a LibdepsGraphAnalysis instance, which will upon instantiation, run the analysis list provided. Once the instance is created, it contains the results, and optionally can be fed into different printer classes. In this case, a human readable format printer called GaPrettyPrinter is used to print to the console. - -##### Using the gacli tool - -The command line interface tool can be used from the command line to run analysis on a given graph. The only required argument is the graph file. The default with no args will run all the counters and linters on the graph. Here is an example output: - -``` -(venv) Apr.20 02:46 ubuntu[mongo]: python buildscripts/libdeps/gacli.py --graph-file build/cached/libdeps/libdeps.graphml -Loading graph data...Loaded! - - -Graph built from git hash: -1358cdc6ff0e53e4f4c01ea0e6fcf544fa7e1672 - -Graph Schema version: -2 - -Build invocation: -"/home/ubuntu/venv/bin/python" "buildscripts/scons.py" "--variables-files=etc/scons/mongodbtoolchain_stable_gcc.vars" "--cache=all" "--cache-dir=/home/ubuntu/scons-cache" "--link-model=dynamic" "--build-tools=next" "ICECC=icecc" "CCACHE=ccache" "-j200" "--cache-signature-mode=validate" "--cache-debug=-" "generate-libdeps-graph" - -Nodes in Graph: 867 -Edges in Graph: 90706 -Direct Edges in Graph: 5948 -Transitive Edges in Graph: 84758 -Direct Public Edges in Graph: 3483 -Public Edges in Graph: 88241 -Private Edges in Graph: 2440 -Interface Edges in Graph: 25 -Shim Nodes in Graph: 20 -Program Nodes in Graph: 136 -Library Nodes in Graph: 731 - -LibdepsLinter: PUBLIC libdeps that could be PRIVATE: 0 -``` - -Use the `--help` option to see detailing information about all the available options. - -##### Using the graph visualizer Tool - -The graph visualizer tools starts up a web service to provide a frontend GUI for navigating and examining the graph files. The Visualizer uses a Python Flask backend and React/Redux Javascript frontend. - -For installing the dependencies for the frontend, you will need node >= 12.0.0 and npm installed and in the PATH. To install the dependencies navigate to directory where package.json lives, and run: - -``` -cd buildscripts/libdeps/graph_visualizer_web_stack && npm install -``` - -Alternatively if you are on linux, you can use the setup_node_env.sh script to automatically download node 12 and npm, setup the local environment and install the dependencies. Run the command: - -``` -source buildscripts/libdeps/graph_visualizer_web_stack/setup_node_env.sh install -``` - -Assuming you are on a remote workstation and using defaults, you will need to make ssh tunnels to the web service to access the service in your local browser. The frontend and backend both use a port (this case 3000 is the frontend and 5000 is the backend), and the default host is localhost, so you will need to open two tunnels so the frontend running in your local web browser can communicate with the backend. If you are using the default host and port the tunnel command will look like this: - -``` -ssh -L 3000:localhost:3000 -L 5000:localhost:5000 ubuntu@workstation.hostname -``` - -Next we need to start the web service. It will require you to pass a directory where it will search for `.graphml` files which contain the graph data for various commits: - -``` -python3 buildscripts/libdeps/graph_visualizer.py --graphml-dir build/opt/libdeps -``` - -The script will launch the backend and then build the optimized production frontend and launch it. You can supply the `--debug` argument to work in development load which starts up much faster and allows real time updates as files are modified, with a small cost to performance on the frontend. Other options allow more configuration and can be viewed in the `--help` text. - -After the server has started up, it should notify you via the terminal that you can access it at http://localhost:3000 locally in your browser. - -## Build system configuration - -### SCons configuration - -#### Frequently used flags and variables - -### MongoDB build configuration - -#### Frequently used flags and variables - -##### `MONGO_GIT_HASH` - -The `MONGO_GIT_HASH` SCons variable controls the value of the git hash -which will be interpolated into the build to identify the commit -currently being built. If not overridden, this defaults to the git -hash of the current commit. - -##### `MONGO_VERSION` - -The `MONGO_VERSION` SCons variable controls the value which will be -interpolated into the build to identify the version of the software -currently being built. If not overridden, this defaults to the result -of `git describe`, which will use the local tags to derive a version. - -### Targets and Aliases - -## Build artifacts and installation - -### Hygienic builds - -### AutoInstall - -### AutoArchive - -## MongoDB SCons style guide - -### Sconscript Formatting Guidelines - -#### Vertical list style - -#### Alphabetize everything - -### `Environment` Isolation - -### Declaring Targets (`Program`, `Library`, and `CppUnitTest`) - -### Invoking external tools correctly with `Command`s - -### Customizing an `Environment` for a target - -### Invoking subordinate `SConscript`s - -#### `Import`s and `Export`s - -### A Model `SConscript` with Comments diff --git a/docs/building.md b/docs/building.md index 17c33af148f..6d1e96e775b 100644 --- a/docs/building.md +++ b/docs/building.md @@ -7,8 +7,9 @@ way to get started, rather than building from source. To build MongoDB, you will need: - A modern C++ compiler capable of compiling C++20. One of the following is required: - - GCC 11.3 or newer - - Clang 12.0 (or Apple XCode 13.0 Clang) or newer + - GCC 14.2 or newer + - Clang 19.1 or newer + - Apple XCode 14 or newer - Visual Studio 2022 version 17.0 or newer (See Windows section below for details) - On Linux and macOS, the libcurl library and header is required. MacOS includes libcurl. - Fedora/RHEL - `dnf install libcurl-devel` @@ -18,8 +19,7 @@ To build MongoDB, you will need: - `libcurl4-gnutls-dev` - On Ubuntu, the lzma library is required. Install `liblzma-dev` - On Amazon Linux, the xz-devel library is required. `yum install xz-devel` -- Python 3.10.x and Pip modules: - - See the section "Python Prerequisites" below. +- Python 3.10 - About 13 GB of free disk space for the core binaries (`mongod`, `mongos`, and `mongo`) and about 600 GB for the install-all target. @@ -28,112 +28,55 @@ and x86-64. More detailed platform instructions can be found below. ## Quick (re)Start -### Linux Workstation +### Linux ```bash -git clean -fdx # USE WITH CAUTION! Delete every untracked file including .gitignored files (this is basically everything) -# Close and reopen your terminal to clear out any problems from your old environment -python3 --version # Should be 3.10. If it is not, follow [online instructions](https://www.python.org/downloads/) to install python 3.10. -python3 -m venv python3-venv --prompt mongo # Create a virtual environment. "python3-venv" is non standard but it is kept since it is assumed elsewhere in our code. -source python3-venv/bin/activate # You should see a (mongo) appear in your terminal -which python3 # This should point to the python in python3-venv - -# It is also non standard to install poetry into its own virtual environment. -# However, the idea is to make even fewer unpinned dependencies. -# Install poetry 2.0.0 into the virtual env, then install all -# required python dependencies to build and test. -buildscripts/poetry_sync.sh - -python3 buildscripts/scons.py --build-profile=opt -ninja -f opt.ninja -j 200 install-devcore +python buildscripts/install_bazel.py +export PATH=~/.local/bin:$PATH +bazel build install-dist-test +bazel-bin/install/mongod --version ``` -## Python Prerequisites - -In order to build MongoDB, Python 3.10 is required. Newer and older versions of Python are untested -and are known to cause build failures. Python 3.10 can be installed using Homebrew, MacPorts or -similar. - -Several Python modules must be installed and these Python dependencies are managed with Poetry. -You can see other install instructions for poetry by reading this [install guide](https://python-poetry.org/). - -Installing the requirements inside a python3 based virtualenv -dedicated to building MongoDB is optional but recommended. - - $ python3 -m venv --prompt mongo # Optional (venv_path can be a path of your choice) - $ source /bin/activate # Optional (might be slightly different based on your shell) - $ buildscripts/poetry_sync.sh - -Note: In order to compile C-based Python modules, you'll also need the -Python and OpenSSL C headers. Run: - -- Fedora/RHEL - `dnf install python3-devel openssl-devel` -- Ubuntu (20.04 and newer)/Debian (Bullseye and newer) - `apt install python-dev-is-python3 libssl-dev` -- Ubuntu (18.04 and older)/Debian (Buster and older) - `apt install python3.7-dev libssl-dev` - -Note: If you are running poetry manually and seeing errors involving "Prompt -dismissed.." you might need to run the following command before poetry install -(`buildscripts/poetry-sync.sh` does this internally). - - $ export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring - -## SCons +## Bazel If you only want to build the database server `mongod`: - $ python3 buildscripts/scons.py install-mongod + $ bazel build install-mongod **_Note_**: For C++ compilers that are newer than the supported version, the compiler may issue new warnings that cause MongoDB to fail to build since the build system treats compiler warnings as errors. To ignore the warnings, pass the switch -`--disable-warnings-as-errors` to scons. +`--disable_warnings_as_errors=True` to the bazel command. - $ python3 buildscripts/scons.py install-mongod --disable-warnings-as-errors - -**_Note_**: On memory-constrained systems, you may run into an error such as `g++: fatal error: Killed signal terminated program cc1plus`. To use less memory during building, pass the parameter `-j1` to scons. This can be incremented to `-j2`, `-j3`, and higher as appropriate to find the fastest working option on your system. - - $ python3 buildscripts/scons.py install-mongod -j1 - -To install `mongod` directly to `/opt/mongo` - - $ python3 buildscripts/scons.py DESTDIR=/opt/mongo install-mongod - -To create an installation tree of the servers in `/tmp/unpriv` that -can later be copied to `/usr/priv` - - $ python3 buildscripts/scons.py DESTDIR=/tmp/unpriv PREFIX=/usr/priv install-servers + $ bazel build install-mongod --disable_warnings_as_errors=True If you want to build absolutely everything (`mongod`, `mongo`, unit tests, etc): - $ python3 buildscripts/scons.py install-all-meta + $ bazel build --build_tag_filters=mongo_binary //src/mongo/... -## SCons Targets +## Bazel Targets -The following targets can be named on the scons command line to build and +The following targets can be named on the bazel command line to build and install a subset of components: - `install-mongod` - `install-mongos` - `install-core` (includes _only_ `mongod` and `mongos`) -- `install-servers` (includes all server components) +- `install-dist` (includes all server components) - `install-devcore` (includes `mongod`, `mongos`, and `jstestshell` (formerly `mongo` shell)) -- `install-all` (includes a complete end-user distribution and tests) -- `install-all-meta` (absolutely everything that can be built and installed) -**_NOTE_**: The `install-core` and `install-servers` targets are _not_ +**_NOTE_**: The `install-core` and `install-dist` targets are _not_ guaranteed to be identical. The `install-core` target will only ever include a -minimal set of "core" server components, while `install-servers` is intended +minimal set of "core" server components, while `install-dist` is intended for a functional end-user installation. If you are testing, you should use the -`install-core` or `install-devcore` targets instead. +`install-devcore` or `install-dist` targets instead. ## Where to find Binaries -The build system will produce an installation tree into -`$DESTDIR/$PREFIX`. `DESTDIR` by default is `build/install` while -`PREFIX` is by default empty. This means that with all of the listed -targets all built binaries will be in `build/install/bin` by default. +The build system will produce an installation tree into `bazel-bin/install`, as well +individual install target trees like `bazel-bin/`. ## Windows @@ -152,22 +95,4 @@ To install dependencies on Debian or Ubuntu systems: ## OS X -Install Xcode 13.0 or newer. - -## FreeBSD - -Install the following ports: - -- `devel/libexecinfo` -- `lang/llvm70` -- `lang/python` - -Add `CC=clang12 CXX=clang++12` to the `scons` options, when building. - -## OpenBSD - -Install the following ports: - -- `devel/libexecinfo` -- `lang/gcc` -- `lang/python` +Install Xcode 14.0 or newer. diff --git a/docs/libfuzzer.md b/docs/libfuzzer.md index 77adf8347ee..2dd32c92e4d 100644 --- a/docs/libfuzzer.md +++ b/docs/libfuzzer.md @@ -45,18 +45,21 @@ happens! As just a few ideas: - You could tease out individual bytes from `Data` and provide them as different arguments to the function under test. -Finally, your cpp file will need a SCons target. There is a method which +Finally, your cpp file will need a bazel target. There is a method which defines fuzzer targets, much like how we define unittests. For example: ```python - env.CppLibfuzzerTest( - target='op_msg_fuzzer', - source=[ + mongo_cc_fuzzer_test( + name = 'op_msg_fuzzer', + srcs = [ 'op_msg_fuzzer.cpp', ], - LIBDEPS=[ - '$BUILD_DIR/mongo/base', - 'op_msg_fuzzer_fixture', + hdrs = [ + 'op_msg_fuzzer.h', + ] + deps = [ + '//src/mongo:base', + ':op_msg_fuzzer_fixture', ], ) ``` @@ -70,7 +73,7 @@ variant, whose name will include the string "FUZZER", which will compile and run all of the fuzzer tests. The fuzzers can be built locally, for development and debugging. Check -our Evergreen configuration for the current SCons arguments. +our Evergreen configuration for the current bazel arguments. LibFuzzer binaries will accept a path to a directory containing its "corpus". A corpus is a list of examples known to produce interesting diff --git a/docs/packaging.md b/docs/packaging.md index f6c66ac7614..3eb6eb04bfa 100644 --- a/docs/packaging.md +++ b/docs/packaging.md @@ -5,7 +5,7 @@ sequenceDiagram participant e as Evergreen participant osfs as Obtain SBOM from Silk participant silk as Silk -participant scons as SCons +participant bazel as Bazel participant p as Packager participant s3 as S3 participant curator as Curator @@ -16,9 +16,9 @@ e ->> osfs: Invoke script osfs ->> silk: Query for SBOM silk ->> osfs: Return SBOM osfs ->> e: Return SBOM -e ->> scons: Invoke build (including SCons) -scons ->> scons: Build distribution tarball (including SBOM) -scons ->> e: Return distribution tarball +e ->> bazel: Invoke build (including Bazel) +bazel ->> bazel: Build distribution tarball (including SBOM) +bazel ->> e: Return distribution tarball e ->> p: Invoke packager p ->> p: Build local package p ->> s3: Upload package diff --git a/etc/evergreen_yml_components/configuration.yml b/etc/evergreen_yml_components/configuration.yml index 2274c79527f..08638360528 100644 --- a/etc/evergreen_yml_components/configuration.yml +++ b/etc/evergreen_yml_components/configuration.yml @@ -15,7 +15,7 @@ stepback: true ## Parameters for parameterized builds (see https://github.com/evergreen-ci/evergreen/wiki/Parameterized-Builds) parameters: - key: patch_compile_flags - description: "Additional SCons flags to be applied during scons compile invocations in this patch" + description: "Additional bazel flags to be applied during bazel compile invocations in this patch" - key: future_git_tag description: "Future git tag to be added. If empty, we will use the most recent git tag instead." @@ -116,7 +116,6 @@ post: vars: files: >- src/resmoke_error_code - src/build/scons/config.log src/*.gcda.gcov src/gcov-intermediate-files.tgz src/*.core src/*.mdmp src/*.core.gz src/*.mdmp.gz diff --git a/etc/evergreen_yml_components/definitions.yml b/etc/evergreen_yml_components/definitions.yml index 6f56d64163b..ce830a8a813 100644 --- a/etc/evergreen_yml_components/definitions.yml +++ b/etc/evergreen_yml_components/definitions.yml @@ -488,15 +488,6 @@ functions: args: - "./src/evergreen/functions/credentials_setup.sh" - "set up win mount script": &set_up_win_mount_script - command: subprocess.exec - display_name: "win mount script setup" - params: - binary: bash - silent: true - args: - - "./src/evergreen/functions/win_mount_script_setup.sh" - "set up notary client credentials": - *f_expansions_write - command: subprocess.exec @@ -655,17 +646,6 @@ functions: args: - "src/evergreen/functions/venv_setup.sh" - # This needs to be run after "set up venv" - # This depends on having a venv already setup - # This just installs the extra deps needed for libdeps - "set up libdeps venv": - command: subprocess.exec - display_name: "set up libdeps venv" - params: - binary: bash - args: - - "src/evergreen/libdeps_setup.sh" - "upload pip requirements": &upload_pip_requirements command: s3.put display_name: "upload pip requirements" @@ -2546,102 +2526,6 @@ functions: params: file_location: ${report_file|src/report.json} - "combine build metrics": - - *f_expansions_write - - command: subprocess.exec - params: - binary: bash - add_expansions_to_env: true - args: - - "src/evergreen/run_python_script.sh" - - "site_scons/site_tools/build_metrics/combine_metrics_unittest.py" - - *f_expansions_write - - command: subprocess.exec - params: - binary: bash - add_expansions_to_env: true - args: - - "src/evergreen/run_python_script.sh" - - "site_scons/site_tools/build_metrics/combine_metrics.py" - - "--prefix-name=build_metrics" - - "--prefix-name=populate_cache" - - "--prefix-name=pull_cache" - - "print top N metrics": - - *f_expansions_write - - command: subprocess.exec - params: - binary: bash - add_expansions_to_env: true - args: - - "src/evergreen/run_python_script.sh" - - "site_scons/site_tools/build_metrics/top_n_metrics.py" - - "--input=build_metrics.json" - - "--output=top_15_metrics.txt" - - "--num=15" - - - command: s3.put - params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} - local_file: src/top_15_metrics.txt - remote_file: ${project}/${build_variant}/${revision}/${build_id}-${task_name}-${execution}-top_15_metrics.txt - bucket: mciuploads - permissions: public-read - content_type: text/plain - display_name: Top 15 Metrics - - "attach build metrics": - - command: archive.targz_pack - params: - target: build-metrics.tgz - source_dir: src - include: - - "./build_metrics.json" - - "./populate_cache.json" - - "./pull_cache.json" - - - command: s3.put - params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} - local_file: build-metrics.tgz - remote_file: ${project}/${build_variant}/${revision}/${build_id}-${task_name}-${execution}-build-metrics.tgz - bucket: mciuploads - permissions: public-read - content_type: application/gzip - display_name: Metrics JSON - - - *f_expansions_write - - command: subprocess.exec - params: - binary: bash - add_expansions_to_env: true - args: - - "src/evergreen/run_python_script.sh" - - "evergreen/build_metric_cedar_report.py" - - "--build-metrics=build_metrics.json" - - "--cache-push-metrics=populate_cache.json" - - "--cache-pull-metrics=pull_cache.json" - - - command: archive.targz_pack - params: - target: build_metrics_cedar_report.tgz - source_dir: src - include: - - "./build_metrics_cedar_report.json" - - - command: s3.put - params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} - local_file: build_metrics_cedar_report.tgz - remote_file: ${project}/${build_variant}/${revision}/${build_id}-${task_name}-${execution}-build_metrics_cedar_report.tgz - bucket: mciuploads - permissions: public-read - content_type: application/gzip - display_name: Cedar Report JSON - "attach artifacts": command: attach.artifacts params: @@ -2887,8 +2771,6 @@ functions: - *f_expansions_write - *use_wiredtiger_develop - *f_expansions_write - - *set_up_win_mount_script - - *f_expansions_write - *set_task_expansion_macros - *f_expansions_write - *get_engflow_key diff --git a/etc/evergreen_yml_components/tasks/compile_tasks.yml b/etc/evergreen_yml_components/tasks/compile_tasks.yml index dd4cb9b643d..67ba9f404e3 100644 --- a/etc/evergreen_yml_components/tasks/compile_tasks.yml +++ b/etc/evergreen_yml_components/tasks/compile_tasks.yml @@ -59,7 +59,6 @@ variables: vars: files: >- src/resmoke_error_code - src/build/scons/config.log src/*.gcda.gcov src/gcov-intermediate-files.tgz src/*.core src/*.mdmp src/*.core.gz src/*.mdmp.gz @@ -93,8 +92,6 @@ variables: - func: "f_expansions_write" - func: "use WiredTiger develop" # noop if ${use_wt_develop} is not "true" - func: "f_expansions_write" - - func: "set up win mount script" - - func: "f_expansions_write" teardown_group: - func: "f_expansions_write" - func: "cleanup environment" @@ -1802,8 +1799,6 @@ task_groups: - func: "set up venv" - func: "upload pip requirements" - func: "get buildnumber" - - func: "f_expansions_write" - - func: "set up win mount script" max_hosts: 1 tasks: - "crypt_create_debug_lib" diff --git a/etc/evergreen_yml_components/tasks/compile_tasks_shared.yml b/etc/evergreen_yml_components/tasks/compile_tasks_shared.yml index 1e46907691d..941c174b5db 100644 --- a/etc/evergreen_yml_components/tasks/compile_tasks_shared.yml +++ b/etc/evergreen_yml_components/tasks/compile_tasks_shared.yml @@ -35,7 +35,6 @@ variables: vars: files: >- src/resmoke_error_code - src/build/scons/config.log src/*.gcda.gcov src/gcov-intermediate-files.tgz src/*.core src/*.mdmp src/*.core.gz src/*.mdmp.gz @@ -69,8 +68,6 @@ variables: - func: "f_expansions_write" - func: "use WiredTiger develop" # noop if ${use_wt_develop} is not "true" - func: "f_expansions_write" - - func: "set up win mount script" - - func: "f_expansions_write" teardown_group: - func: "f_expansions_write" - func: "cleanup environment" @@ -171,7 +168,7 @@ tasks: - "src/etc/evergreen.yml" - "src/etc/evergreen_yml_components/**" - "src/etc/repo_config.yaml" - - "src/etc/scons/**" + - "src/bazel/**" - "src/etc/macos_dev_entitlements.xml" - "src/docker_compose/**" - "src/buildscripts/**" diff --git a/etc/evergreen_yml_components/tasks/misc_tasks.yml b/etc/evergreen_yml_components/tasks/misc_tasks.yml index 268de136847..d2381ab37d5 100644 --- a/etc/evergreen_yml_components/tasks/misc_tasks.yml +++ b/etc/evergreen_yml_components/tasks/misc_tasks.yml @@ -429,22 +429,6 @@ tasks: - func: "upload pip requirements" - func: "run idl tests" - - name: iwyu_self_test - tags: - [ - "assigned_to_jira_team_devprod_build", - "development_critical_single_variant", - ] - commands: - - func: "do bazel setup" - - func: "f_expansions_write" - - command: subprocess.exec - params: - binary: bash - args: - - "src/evergreen/run_python_script.sh" - - "buildscripts/iwyu/test/run_tests.py" - - <<: *run_jepsen_template name: jepsen_config_fuzzer_list-append tags: ["assigned_to_jira_team_server_repl", "experimental", "jepsen_docker"] diff --git a/etc/evergreen_yml_components/tasks/resmoke/server_divisions/clusters_and_integrations/tasks.yml b/etc/evergreen_yml_components/tasks/resmoke/server_divisions/clusters_and_integrations/tasks.yml index a972ad72a6a..c55617a7098 100644 --- a/etc/evergreen_yml_components/tasks/resmoke/server_divisions/clusters_and_integrations/tasks.yml +++ b/etc/evergreen_yml_components/tasks/resmoke/server_divisions/clusters_and_integrations/tasks.yml @@ -149,7 +149,6 @@ variables: vars: files: >- src/resmoke_error_code - src/build/scons/config.log src/*.gcda.gcov src/gcov-intermediate-files.tgz src/*.core src/*.mdmp src/*.core.gz src/*.mdmp.gz @@ -183,8 +182,6 @@ variables: - func: "f_expansions_write" - func: "use WiredTiger develop" # noop if ${use_wt_develop} is not "true" - func: "f_expansions_write" - - func: "set up win mount script" - - func: "f_expansions_write" teardown_group: - func: "f_expansions_write" - func: "cleanup environment" diff --git a/etc/evergreen_yml_components/variants/amazon/test_dev.yml b/etc/evergreen_yml_components/variants/amazon/test_dev.yml index 00ea268a938..c8fcb1202dc 100644 --- a/etc/evergreen_yml_components/variants/amazon/test_dev.yml +++ b/etc/evergreen_yml_components/variants/amazon/test_dev.yml @@ -207,8 +207,8 @@ buildvariants: - name: .commit_check create_check_run: path_to_outputs: "github_annotations.json" - - name: .development_critical_single_variant !.requires_large_host !.clang_tidy !libdeps_graph_linting - - name: .development_critical_single_variant .requires_large_host !.clang_tidy !libdeps_graph_linting + - name: .development_critical_single_variant !.requires_large_host !.clang_tidy + - name: .development_critical_single_variant .requires_large_host !.clang_tidy distros: - amazon2023-arm64-latest-large - name: .development_critical !.requires_large_host_commit_queue diff --git a/etc/evergreen_yml_components/variants/amazon/test_dev_master_branch_only.yml b/etc/evergreen_yml_components/variants/amazon/test_dev_master_branch_only.yml index ab2b5d6fb92..503fe8a413d 100644 --- a/etc/evergreen_yml_components/variants/amazon/test_dev_master_branch_only.yml +++ b/etc/evergreen_yml_components/variants/amazon/test_dev_master_branch_only.yml @@ -214,8 +214,6 @@ buildvariants: cron: "0 0 * * *" # Run once a day. expansions: <<: *amazon_linux2023_arm64_dynamic_expansions - scons_cache_scope: shared - scons_cache_mode: all has_packages: false jstestfuzz_num_generated_files: 40 jstestfuzz_concurrent_num_files: 10 diff --git a/etc/evergreen_yml_components/variants/amazon/test_release.yml b/etc/evergreen_yml_components/variants/amazon/test_release.yml index a00fd6a4f10..e0447ced0e0 100644 --- a/etc/evergreen_yml_components/variants/amazon/test_release.yml +++ b/etc/evergreen_yml_components/variants/amazon/test_release.yml @@ -549,7 +549,6 @@ buildvariants: packager_arch: aarch64 packager_distro: amazon2 repo_edition: enterprise - scons_cache_scope: shared compile_variant: enterprise-amazon2-streams-arm64 large_distro_name: amazon2-arm64-latest-large tasks: diff --git a/etc/scons/compilers_from_env.vars b/etc/scons/compilers_from_env.vars deleted file mode 100644 index b2e87b29501..00000000000 --- a/etc/scons/compilers_from_env.vars +++ /dev/null @@ -1,6 +0,0 @@ -import os - -# Causes SCons to set the C and C++ compilers via the CC and CXX shell environment variables - -CC=os.environ['CC'] -CXX=os.environ['CXX'] diff --git a/etc/scons/developer_versions.vars b/etc/scons/developer_versions.vars deleted file mode 100644 index 3e34bccbc93..00000000000 --- a/etc/scons/developer_versions.vars +++ /dev/null @@ -1,13 +0,0 @@ -def short_describe(): - import os - import subprocess - with open(os.devnull, "r+") as devnull: - proc = subprocess.Popen("git describe --abbrev=0", - stdout=subprocess.PIPE, - stderr=devnull, - stdin=devnull, - shell=True) - return proc.communicate()[0].decode('utf-8').strip()[1:] - -MONGO_GIT_HASH="unknown" -MONGO_VERSION=short_describe() diff --git a/etc/scons/experimental_unified_ninja.vars b/etc/scons/experimental_unified_ninja.vars deleted file mode 100644 index fe1f1febe89..00000000000 --- a/etc/scons/experimental_unified_ninja.vars +++ /dev/null @@ -1,9 +0,0 @@ -# This file is now empty and the settings have been made the default. -# This file exists only to prevent breakage when used with existing command line invocations. - -# Configures the build for building with a unified ninja -# Each configuration will share a ninja log -# This allows the output binaries of each configuration to share a common directory - -# NINJA_BUILDDIR="$BUILD_ROOT/ninja" -# DESTDIR="$BUILD_ROOT/install" diff --git a/etc/scons/gold_incremental_link.vars b/etc/scons/gold_incremental_link.vars deleted file mode 100644 index e298392f506..00000000000 --- a/etc/scons/gold_incremental_link.vars +++ /dev/null @@ -1 +0,0 @@ -LINKFLAGS="-fuse-ld=gold -fno-use-linker-plugin -Wl,-z,norelro -Wl,--incremental" diff --git a/etc/scons/icecream_remote_mongodbtoolchain.vars b/etc/scons/icecream_remote_mongodbtoolchain.vars deleted file mode 100644 index b8f3e75f979..00000000000 --- a/etc/scons/icecream_remote_mongodbtoolchain.vars +++ /dev/null @@ -1,3 +0,0 @@ -ICECC = "icecc" -ICECC_VERSION = "http://mongodbtoolchain.build.10gen.cc/icecream/ubuntu1604/x86_64/latest" -ICECC_VERSION_ARCH = "x86_64" diff --git a/etc/scons/mongodbtoolchain_stable_clang.vars b/etc/scons/mongodbtoolchain_stable_clang.vars deleted file mode 100644 index 8ce342fc20a..00000000000 --- a/etc/scons/mongodbtoolchain_stable_clang.vars +++ /dev/null @@ -1 +0,0 @@ -exec(open('etc/scons/mongodbtoolchain_v5_clang.vars', "rb").read()) diff --git a/etc/scons/mongodbtoolchain_stable_gcc.vars b/etc/scons/mongodbtoolchain_stable_gcc.vars deleted file mode 100644 index 156752f6774..00000000000 --- a/etc/scons/mongodbtoolchain_stable_gcc.vars +++ /dev/null @@ -1 +0,0 @@ -exec(open('etc/scons/mongodbtoolchain_v5_gcc.vars', "rb").read()) diff --git a/etc/scons/mongodbtoolchain_testing_clang.vars b/etc/scons/mongodbtoolchain_testing_clang.vars deleted file mode 100644 index 8e272d27e6a..00000000000 --- a/etc/scons/mongodbtoolchain_testing_clang.vars +++ /dev/null @@ -1 +0,0 @@ -exec(open('etc/scons/mongodbtoolchain_stable_clang.vars', "rb").read()) diff --git a/etc/scons/mongodbtoolchain_testing_gcc.vars b/etc/scons/mongodbtoolchain_testing_gcc.vars deleted file mode 100644 index a1790bb0d2f..00000000000 --- a/etc/scons/mongodbtoolchain_testing_gcc.vars +++ /dev/null @@ -1 +0,0 @@ -exec(open('etc/scons/mongodbtoolchain_stable_gcc.vars', "rb").read()) diff --git a/etc/scons/mongodbtoolchain_v4_clang.vars b/etc/scons/mongodbtoolchain_v4_clang.vars deleted file mode 100644 index 8a0a1976fd5..00000000000 --- a/etc/scons/mongodbtoolchain_v4_clang.vars +++ /dev/null @@ -1,65 +0,0 @@ -# Configures the build to use the GCC toolchain in /opt/mongodbtoolchain/v4 - -import os -import subprocess -import platform -import SCons.Defaults -from SCons.Script import ARGUMENTS - -if not ARGUMENTS.get('CC') and not ARGUMENTS.get("CXX") and platform.machine() != "s390x": - - - toolchain_exec_root = SCons.Script.Main.GetOption('toolchain-root') - toolchain_root = toolchain_exec_root + "/external/mongo_toolchain_v4/v4" - local_toolchain_root = '/opt/mongodbtoolchain/v4' - if not toolchain_root: - toolchain_root = local_toolchain_root - - toolchain_bindir = os.path.join(toolchain_root, 'bin') - local_toolchain_bindir = os.path.join(local_toolchain_root, 'bin') - - # Get the default SCons path as a list - default_path = SCons.Defaults.DefaultEnvironment()['ENV']['PATH'].split(os.pathsep) - exec(open('bazel/toolchains/mongo_toolchain_flags_v4.bzl', "rb").read()) - exec(open('bazel/platforms/normalize.bzl', "rb").read()) - arch = ARCH_NORMALIZE_MAP[platform.machine()] - - # Put the toolchain path first so we prefer all tools from there in subprocs - ENV = { - 'PATH' : os.pathsep.join([toolchain_bindir, local_toolchain_bindir] + default_path) - } - TOOLCHAIN_LINKFLAGS=[] - for flag in COMMON_LINK_FLAGS: - TOOLCHAIN_LINKFLAGS.append("-L"+toolchain_exec_root+"/"+flag.format(arch=arch)) - TOOLCHAIN_CCFLAGS = ["-nostdinc++"] - for flag in CLANG_INCLUDE_DIRS+COMMON_INCLUDE_DIRECTORIES+COMMON_BUILTIN_INCLUDE_DIRECTORIES: - if flag.startswith("/"): - TOOLCHAIN_CCFLAGS.append("-isystem"+flag.format(arch=arch)) - else: - TOOLCHAIN_CCFLAGS.append("-isystem"+toolchain_exec_root+"/"+flag.format(arch=arch)) - - # TODO BUILD-16594 - # This is temporary workaround so that gcc can find the LLVM lld from the toolchain - # until we can build this into the toolchain's default search paths - LINKFLAGS_COMPILER_EXEC_PREFIX = [f'{toolchain_bindir}'] - for flag in COMMON_BINDIRS: - LINKFLAGS_COMPILER_EXEC_PREFIX.append(toolchain_exec_root+"/"+flag.format(arch=arch)) - - # Set any Variables for Tools from the toolchain here. Technically, we - # shouldn't need the full paths since SCons will find the toolchain - # ones first, but we don't want to accidentally get the system version - # if, say, the toolchain is missing. Also, it is clearer that we are - # getting the right toolchain in build log output when the path is - # printed for each compiler invocation. - - MONGO_TOOLCHAIN_VERSION = "v4" - CC = os.path.join(toolchain_bindir, 'clang') - CXX = os.path.join(toolchain_bindir, 'clang++') - DWP = os.path.join(local_toolchain_bindir, 'dwp') - READELF = os.path.join(local_toolchain_bindir, 'readelf') - GDB = os.path.join(local_toolchain_bindir, 'gdb') - AR = os.path.join(toolchain_bindir, 'ar') - AS = os.path.join(toolchain_bindir, 'as') - OBJCOPY = os.path.join(toolchain_bindir, 'llvm-objcopy') - LLVM_SYMBOLIZER = os.path.join(toolchain_bindir, 'llvm-symbolizer') - DWARF_VERSION=4 diff --git a/etc/scons/mongodbtoolchain_v4_gcc.vars b/etc/scons/mongodbtoolchain_v4_gcc.vars deleted file mode 100644 index 20257135b99..00000000000 --- a/etc/scons/mongodbtoolchain_v4_gcc.vars +++ /dev/null @@ -1,63 +0,0 @@ -# Configures the build to use the GCC toolchain in /opt/mongodbtoolchain/v4 - -import os -import subprocess -import platform -import SCons.Defaults -from SCons.Script import ARGUMENTS - -if not ARGUMENTS.get('CC') and not ARGUMENTS.get("CXX"): - - toolchain_exec_root = SCons.Script.Main.GetOption('toolchain-root') - toolchain_root = toolchain_exec_root + "/external/mongo_toolchain_v4/v4" - local_toolchain_root = '/opt/mongodbtoolchain/v4' - if not toolchain_root: - toolchain_root = local_toolchain_root - - toolchain_bindir = os.path.join(toolchain_root, 'bin') - local_toolchain_bindir = os.path.join(local_toolchain_root, 'bin') - - # Get the default SCons path as a list - default_path = SCons.Defaults.DefaultEnvironment()['ENV']['PATH'].split(os.pathsep) - exec(open('bazel/toolchains/mongo_toolchain_flags_v4.bzl', "rb").read()) - exec(open('bazel/platforms/normalize.bzl', "rb").read()) - arch = ARCH_NORMALIZE_MAP[platform.machine()] - - # Put the toolchain path first so we prefer all tools from there in subprocs - ENV = { - 'PATH' : os.pathsep.join([toolchain_bindir, local_toolchain_bindir] + default_path) - } - TOOLCHAIN_LINKFLAGS=[] - for flag in COMMON_LINK_FLAGS: - TOOLCHAIN_LINKFLAGS.append("-L"+toolchain_exec_root+"/"+flag.format(arch=arch)) - TOOLCHAIN_CCFLAGS = ["-nostdinc++"] - for flag in GCC_INCLUDE_DIRS+COMMON_INCLUDE_DIRECTORIES+COMMON_BUILTIN_INCLUDE_DIRECTORIES: - if flag.startswith("/"): - TOOLCHAIN_CCFLAGS.append("-isystem"+flag.format(arch=arch)) - else: - TOOLCHAIN_CCFLAGS.append("-isystem"+toolchain_exec_root+"/"+flag.format(arch=arch)) - - # TODO BUILD-16594 - # This is temporary workaround so that gcc can find the LLVM lld from the toolchain - # until we can build this into the toolchain's default search paths - LINKFLAGS_COMPILER_EXEC_PREFIX = [f'{toolchain_bindir}'] - for flag in COMMON_BINDIRS: - LINKFLAGS_COMPILER_EXEC_PREFIX.append(toolchain_exec_root+"/"+flag.format(arch=arch)) - - # Set any Variables for Tools from the toolchain here. Technically, we - # shouldn't need the full paths since SCons will find the toolchain - # ones first, but we don't want to accidentally get the system version - # if, say, the toolchain is missing. Also, it is clearer that we are - # getting the right toolchain in build log output when the path is - # printed for each compiler invocation. - - MONGO_TOOLCHAIN_VERSION = "v4" - CC = os.path.join(toolchain_bindir, 'gcc') - CXX = os.path.join(toolchain_bindir, 'g++') - AR = os.path.join(toolchain_bindir, 'ar') - AS = os.path.join(toolchain_bindir, 'as') - OBJCOPY = os.path.join(toolchain_bindir, 'objcopy') - DWP = os.path.join(local_toolchain_bindir, 'dwp') - READELF = os.path.join(local_toolchain_bindir, 'readelf') - GDB = os.path.join(local_toolchain_bindir, 'gdb') - DWARF_VERSION=5 diff --git a/etc/scons/mongodbtoolchain_v5_clang.vars b/etc/scons/mongodbtoolchain_v5_clang.vars deleted file mode 100644 index e401a0fc0a6..00000000000 --- a/etc/scons/mongodbtoolchain_v5_clang.vars +++ /dev/null @@ -1,64 +0,0 @@ -# Configures the build to use the clang toolchain in /opt/mongodbtoolchain/v5 - -import os -import subprocess -import platform -import SCons.Defaults -from SCons.Script import ARGUMENTS - -if not ARGUMENTS.get('CC') and not ARGUMENTS.get("CXX") and platform.machine() != "s390x": - - toolchain_exec_root = SCons.Script.Main.GetOption('toolchain-root') - toolchain_root = toolchain_exec_root + "/external/mongo_toolchain_v5/v5" - local_toolchain_root = '/opt/mongodbtoolchain/v5' - if not toolchain_root: - toolchain_root = local_toolchain_root - - toolchain_bindir = os.path.join(toolchain_root, 'bin') - local_toolchain_bindir = os.path.join(local_toolchain_root, 'bin') - - # Get the default SCons path as a list - default_path = SCons.Defaults.DefaultEnvironment()['ENV']['PATH'].split(os.pathsep) - exec(open('bazel/toolchains/mongo_toolchain_flags_v5.bzl', "rb").read()) - exec(open('bazel/platforms/normalize.bzl', "rb").read()) - arch = ARCH_NORMALIZE_MAP[platform.machine()] - - # Put the toolchain path first so we prefer all tools from there in subprocs - ENV = { - 'PATH' : os.pathsep.join([toolchain_bindir, local_toolchain_bindir] + default_path) - } - TOOLCHAIN_LINKFLAGS=[] - for flag in COMMON_LINK_FLAGS: - TOOLCHAIN_LINKFLAGS.append("-L"+toolchain_exec_root+"/"+flag.format(arch=arch)) - TOOLCHAIN_CCFLAGS = ["-nostdinc++"] - for flag in CLANG_INCLUDE_DIRS+COMMON_INCLUDE_DIRECTORIES+COMMON_BUILTIN_INCLUDE_DIRECTORIES: - if flag.startswith("/"): - TOOLCHAIN_CCFLAGS.append("-isystem"+flag.format(arch=arch)) - else: - TOOLCHAIN_CCFLAGS.append("-isystem"+toolchain_exec_root+"/"+flag.format(arch=arch)) - - # TODO BUILD-16594 - # This is temporary workaround so that gcc can find the LLVM lld from the toolchain - # until we can build this into the toolchain's default search paths - LINKFLAGS_COMPILER_EXEC_PREFIX = [f'{toolchain_bindir}'] - for flag in COMMON_BINDIRS: - LINKFLAGS_COMPILER_EXEC_PREFIX.append(toolchain_exec_root+"/"+flag.format(arch=arch)) - - # Set any Variables for Tools from the toolchain here. Technically, we - # shouldn't need the full paths since SCons will find the toolchain - # ones first, but we don't want to accidentally get the system version - # if, say, the toolchain is missing. Also, it is clearer that we are - # getting the right toolchain in build log output when the path is - # printed for each compiler invocation. - - MONGO_TOOLCHAIN_VERSION = "v5" - CC = os.path.join(toolchain_bindir, 'clang') - CXX = os.path.join(toolchain_bindir, 'clang++') - DWP = os.path.join(toolchain_bindir, 'dwp') - READELF = os.path.join(toolchain_bindir, 'readelf') - GDB = os.path.join(local_toolchain_bindir, 'gdb') - AR = os.path.join(toolchain_bindir, 'ar') - AS = os.path.join(toolchain_bindir, 'as') - OBJCOPY = os.path.join(toolchain_bindir, 'llvm-objcopy') - LLVM_SYMBOLIZER = os.path.join(toolchain_bindir, 'llvm-symbolizer') - DWARF_VERSION=4 diff --git a/etc/scons/mongodbtoolchain_v5_gcc.vars b/etc/scons/mongodbtoolchain_v5_gcc.vars deleted file mode 100644 index 243768f04e7..00000000000 --- a/etc/scons/mongodbtoolchain_v5_gcc.vars +++ /dev/null @@ -1,63 +0,0 @@ -# Configures the build to use the GCC toolchain in /opt/mongodbtoolchain/v5 - -import os -import subprocess -import platform -import SCons.Defaults -from SCons.Script import ARGUMENTS - -if not ARGUMENTS.get('CC') and not ARGUMENTS.get("CXX"): - - toolchain_exec_root = SCons.Script.Main.GetOption('toolchain-root') - toolchain_root = toolchain_exec_root + "/external/mongo_toolchain_v5/v5" - local_toolchain_root = '/opt/mongodbtoolchain/v5' - if not toolchain_root: - toolchain_root = local_toolchain_root - - toolchain_bindir = os.path.join(toolchain_root, 'bin') - local_toolchain_bindir = os.path.join(local_toolchain_root, 'bin') - - # Get the default SCons path as a list - default_path = SCons.Defaults.DefaultEnvironment()['ENV']['PATH'].split(os.pathsep) - exec(open('bazel/toolchains/mongo_toolchain_flags_v5.bzl', "rb").read()) - exec(open('bazel/platforms/normalize.bzl', "rb").read()) - arch = ARCH_NORMALIZE_MAP[platform.machine()] - - # Put the toolchain path first so we prefer all tools from there in subprocs - ENV = { - 'PATH' : os.pathsep.join([toolchain_bindir, local_toolchain_bindir] + default_path) - } - TOOLCHAIN_LINKFLAGS=[] - for flag in COMMON_LINK_FLAGS: - TOOLCHAIN_LINKFLAGS.append("-L"+toolchain_exec_root+"/"+flag.format(arch=arch)) - TOOLCHAIN_CCFLAGS = ["-nostdinc++"] - for flag in GCC_INCLUDE_DIRS+COMMON_INCLUDE_DIRECTORIES+COMMON_BUILTIN_INCLUDE_DIRECTORIES: - if flag.startswith("/"): - TOOLCHAIN_CCFLAGS.append("-isystem"+flag.format(arch=arch)) - else: - TOOLCHAIN_CCFLAGS.append("-isystem"+toolchain_exec_root+"/"+flag.format(arch=arch)) - - # TODO BUILD-16594 - # This is temporary workaround so that gcc can find the LLVM lld from the toolchain - # until we can build this into the toolchain's default search paths - LINKFLAGS_COMPILER_EXEC_PREFIX = [f'{toolchain_bindir}'] - for flag in COMMON_BINDIRS: - LINKFLAGS_COMPILER_EXEC_PREFIX.append(toolchain_exec_root+"/"+flag.format(arch=arch)) - - # Set any Variables for Tools from the toolchain here. Technically, we - # shouldn't need the full paths since SCons will find the toolchain - # ones first, but we don't want to accidentally get the system version - # if, say, the toolchain is missing. Also, it is clearer that we are - # getting the right toolchain in build log output when the path is - # printed for each compiler invocation. - - MONGO_TOOLCHAIN_VERSION = "v5" - CC = os.path.join(toolchain_bindir, 'gcc') - CXX = os.path.join(toolchain_bindir, 'g++') - AR = os.path.join(toolchain_bindir, 'ar') - AS = os.path.join(toolchain_bindir, 'as') - OBJCOPY = os.path.join(toolchain_bindir, 'objcopy') - DWP = os.path.join(toolchain_bindir, 'dwp') - READELF = os.path.join(toolchain_bindir, 'readelf') - GDB = os.path.join(local_toolchain_bindir, 'gdb') - DWARF_VERSION=5 diff --git a/etc/scons/msvc_dont_use_script.vars b/etc/scons/msvc_dont_use_script.vars deleted file mode 100644 index b5a510a4521..00000000000 --- a/etc/scons/msvc_dont_use_script.vars +++ /dev/null @@ -1 +0,0 @@ -MSVC_USE_SCRIPT=False diff --git a/etc/scons/propagate_shell_environment.vars b/etc/scons/propagate_shell_environment.vars deleted file mode 100644 index 3f9eedb5494..00000000000 --- a/etc/scons/propagate_shell_environment.vars +++ /dev/null @@ -1,6 +0,0 @@ -import os - -# Causes SCons to import the parent environment into subordinate process execution. This -# can lead to non-reproducible builds... use with caution. - -ENV=os.environ diff --git a/etc/scons/scons_experimental_scheduler.vars b/etc/scons/scons_experimental_scheduler.vars deleted file mode 100644 index 547a5b2f16c..00000000000 --- a/etc/scons/scons_experimental_scheduler.vars +++ /dev/null @@ -1,2 +0,0 @@ -import SCons -SCons.Job.Parallel = SCons.Job.ExperimentalParallel diff --git a/etc/scons/xcode_macosx.vars b/etc/scons/xcode_macosx.vars deleted file mode 100644 index 9461c0a9186..00000000000 --- a/etc/scons/xcode_macosx.vars +++ /dev/null @@ -1,20 +0,0 @@ -# Configures the build to use XCode targeting macOS - -import subprocess -import SCons - -CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang']).decode('utf-8').strip() -CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang++']).decode('utf-8').strip() -DSYMUTIL = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'dsymutil']).decode('utf-8').strip() -STRIP = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'strip']).decode('utf-8').strip() - -# TAPI is less useful when running with Bazel + Remote Execution. Disable since the initial implementation -# of the build system with Bazel will not support it. -# TODO(SERVER-88612): Uncomment if we decide to implement TAPI support in Bazel -# TAPI = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'tapi']).decode('utf-8').strip() - -sdk_path = subprocess.check_output(['xcrun', '--sdk', 'macosx', '--show-sdk-path']).decode('utf-8').strip() - -CCFLAGS = "-isysroot {} -mmacosx-version-min=14.0 --target=darwin20.0.0 -arch x86_64".format(sdk_path) -ASFLAGS = "-isysroot {} -mmacosx-version-min=14.0 --target=darwin20.0.0 -arch x86_64".format(sdk_path) -LINKFLAGS = "-Wl,-syslibroot,{} -mmacosx-version-min=14.0 --target=darwin20.0.0 -arch x86_64".format(sdk_path) diff --git a/etc/scons/xcode_macosx_arm.vars b/etc/scons/xcode_macosx_arm.vars deleted file mode 100644 index 4239403037c..00000000000 --- a/etc/scons/xcode_macosx_arm.vars +++ /dev/null @@ -1,20 +0,0 @@ -# Configures the build to use XCode targeting macOS - -import subprocess -import SCons - -CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang']).decode('utf-8').strip() -CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang++']).decode('utf-8').strip() -DSYMUTIL = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'dsymutil']).decode('utf-8').strip() -STRIP = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'strip']).decode('utf-8').strip() - -# TAPI is less useful when running with Bazel + Remote Execution. Disable since the initial implementation -# of the build system with Bazel will not support it. -# TODO(SERVER-88612): Uncomment if we decide to implement TAPI support in Bazel -# TAPI = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'tapi']).decode('utf-8').strip() - -sdk_path = subprocess.check_output(['xcrun', '--sdk', 'macosx', '--show-sdk-path']).decode('utf-8').strip() - -CCFLAGS = "-isysroot {} -mmacosx-version-min=14.0 --target=darwin20.0.0 -arch arm64".format(sdk_path) -ASFLAGS = "-isysroot {} -mmacosx-version-min=14.0 --target=darwin20.0.0 -arch arm64".format(sdk_path) -LINKFLAGS = "-Wl,-syslibroot,{} -mmacosx-version-min=14.0 --target=darwin20.0.0 -arch arm64".format(sdk_path) diff --git a/evergreen/build_metric_cedar_report.py b/evergreen/build_metric_cedar_report.py deleted file mode 100644 index 5e93179d961..00000000000 --- a/evergreen/build_metric_cedar_report.py +++ /dev/null @@ -1,185 +0,0 @@ -import argparse -import json -import sys - -parser = argparse.ArgumentParser(description="Print top n metrics from build metrics json files.") -parser.add_argument( - "--build-metrics", - metavar="FILE", - type=str, - default="build_metrics.json", - help="Path to build metrics input json.", -) -parser.add_argument( - "--cache-pull-metrics", - metavar="FILE", - type=str, - default="pull_cache.json", - help="Path to build metrics for cache pull input json.", -) -parser.add_argument( - "--cache-push-metrics", - metavar="FILE", - type=str, - default="populate_cache.json", - help="Path to build metrics for cache push input json.", -) -args = parser.parse_args() - -clean_build_metrics_json = args.build_metrics -populate_cache_metrics_json = args.cache_push_metrics -pull_cache_metrics_json = args.cache_pull_metrics -cedar_report = [] - - -def single_metric_test(test_name, metric_name, value): - return { - "info": { - "test_name": test_name, - }, - "metrics": [ - {"name": metric_name, "value": round(value, 2)}, - ], - } - - -with open(clean_build_metrics_json) as f: - aggregated_build_tasks = {} - build_metrics = json.load(f) - for task in build_metrics["build_tasks"]: - if task["builder"] in [ - "SharedLibrary", - "StaticLibrary", - "Program", - "Object", - "SharedObject", - "StaticObject", - ]: - outputs_key = " ".join(task["outputs"]) - if outputs_key in aggregated_build_tasks: - if aggregated_build_tasks[outputs_key]["mem_usage"] < task["mem_usage"]: - aggregated_build_tasks[outputs_key]["mem_usage"] = task["mem_usage"] - aggregated_build_tasks[outputs_key]["time"] += task["end_time"] - task["start_time"] - else: - aggregated_build_tasks[outputs_key] = { - "mem_usage": task["mem_usage"], - "time": task["end_time"] - task["start_time"], - } - - for output_files in aggregated_build_tasks: - cedar_report.append( - { - "info": { - "test_name": output_files, - }, - "metrics": [ - { - "name": "seconds", - "value": round( - aggregated_build_tasks[output_files]["time"] / (10.0**9.0), 2 - ), - }, - { - "name": "MBs", - "value": round( - aggregated_build_tasks[output_files]["mem_usage"] / 1024.0 / 1024.0, 2 - ), - }, - ], - } - ) - - try: - cedar_report.append( - single_metric_test( - "SCons memory usage", - "MBs", - build_metrics["scons_metrics"]["memory"]["post_build"] / 1024.0 / 1024.0, - ) - ) - except KeyError: - if sys.platform == "darwin": - # MacOS has known memory reporting issues, although this is not directly related to scons which does not use - # psutil for this case, I think both use underlying OS calls to determine the memory: https://github.com/giampaolo/psutil/issues/1908 - pass - - cedar_report.append( - single_metric_test( - "System Memory Peak", "MBs", build_metrics["system_memory"]["max"] / 1024.0 / 1024.0 - ) - ) - cedar_report.append( - single_metric_test( - "Total Build time", "seconds", build_metrics["scons_metrics"]["time"]["total"] - ) - ) - cedar_report.append( - single_metric_test( - "Total Build output size", - "MBs", - build_metrics["artifact_metrics"]["total_artifact_size"] / 1024.0 / 1024.0, - ) - ) - - try: - cedar_report.append( - single_metric_test( - "Transitive Libdeps Edges", "edges", build_metrics["libdeps_metrics"]["TRANS_EDGE"] - ) - ) - except KeyError: - pass - - mongod_metrics = None - for artifact in build_metrics["artifact_metrics"]["artifacts"]: - if not mongod_metrics and artifact["name"] == "build/metrics/mongo/db/mongod": - mongod_metrics = artifact - if artifact["name"] == "build/metrics/mongo/db/mongod.debug": - mongod_metrics = artifact - break - - if mongod_metrics and mongod_metrics.get("bin_metrics"): - cedar_report.append( - single_metric_test( - "Mongod debug info size", - "MBs", - mongod_metrics["bin_metrics"]["debug"]["filesize"] / 1024.0 / 1024.0, - ) - ) - -with open(populate_cache_metrics_json) as f: - build_metrics = json.load(f) - cedar_report.append( - { - "info": { - "test_name": "cache_push_time", - }, - "metrics": [ - { - "name": "seconds", - "value": build_metrics["cache_metrics"]["push_time"] / (10.0**9.0), - }, - ], - } - ) - -with open(pull_cache_metrics_json) as f: - build_metrics = json.load(f) - cedar_report.append( - { - "info": { - "test_name": "cache_pull_time", - }, - "metrics": [ - { - "name": "seconds", - "value": build_metrics["cache_metrics"]["pull_time"] / (10.0**9.0), - }, - ], - } - ) - -print(f"Generated Cedar Report with {len(cedar_report)} perf results.") - -with open("build_metrics_cedar_report.json", "w") as fh: - json.dump(cedar_report, fh) diff --git a/evergreen/functions/win_mount_script_setup.sh b/evergreen/functions/win_mount_script_setup.sh deleted file mode 100755 index fee19f93cb3..00000000000 --- a/evergreen/functions/win_mount_script_setup.sh +++ /dev/null @@ -1,9 +0,0 @@ -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" -. "$DIR/../prelude.sh" - -cd src - -cat << EOF > win_mount.sh -net use X: '\\\\${win_scons_endpoint}\\share' /USER:"wincache.build.com\\${win_scons_user}" '${win_scons_pass}' -EOF -chmod +x win_mount.sh diff --git a/evergreen/libdeps_run.sh b/evergreen/libdeps_run.sh deleted file mode 100755 index ca019ee4265..00000000000 --- a/evergreen/libdeps_run.sh +++ /dev/null @@ -1,14 +0,0 @@ -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" -. "$DIR/prelude.sh" - -cd src - -set -o errexit -set -o verbose - -activate_venv -GRAPH_FILE=$(find build -name "libdeps.graphml") -python buildscripts/libdeps/analyzer_unittests.py -python buildscripts/libdeps/gacli.py --graph-file $GRAPH_FILE > results.txt -gzip $GRAPH_FILE -mv $GRAPH_FILE.gz . diff --git a/evergreen/libdeps_setup.sh b/evergreen/libdeps_setup.sh deleted file mode 100755 index 38660c4367b..00000000000 --- a/evergreen/libdeps_setup.sh +++ /dev/null @@ -1,23 +0,0 @@ -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" -. "$DIR/prelude.sh" - -cd src - -activate_venv - -# Loop 5 times to retry libdeps install -# We have seen weird network errors that can sometimes mess up the pip install -# By retrying we would like to only see errors that happen consistently -for i in {1..5}; do - python -m poetry install --no-root --sync -E libdeps && RET=0 && break || RET=$? && sleep 1 -done - -if [ $RET -ne 0 ]; then - echo "Poetry install error for libdeps addition to venv" - exit $RET -fi - -cd .. - -# Overwrite pip-requirements since this is installing additional requirements -python -m pip freeze > pip-requirements.txt diff --git a/jstests/SConscript b/jstests/SConscript deleted file mode 100644 index 97cd134e513..00000000000 --- a/jstests/SConscript +++ /dev/null @@ -1,28 +0,0 @@ -# Includes the jstests in distribution tarballs generated by SCons - -from collections import defaultdict - -Import("env") -Import("get_option") - -Return() - -env = env.Clone() - -jstests = env.Glob("**/*.js") - -# Group by directory to avoid making a million calls to AutoInstall -jstests_by_dir = defaultdict(list) -for jstest in jstests: - jstests_by_dir[jstest.dir].append(jstest) - -for directory, files in jstests_by_dir.items(): - env.AutoInstall( - target="$PREFIX_SHAREDIR/jstests/" + str(directory), - source=files, - AIB_COMPONENT="jstests", - AIB_ROLE="runtime", - AIB_COMPONENTS_EXTRA=[ - "tests", - ], - ) diff --git a/pyproject.toml b/pyproject.toml index 63afc47e277..618a7c51e31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -197,13 +197,8 @@ resmoke = "buildscripts.resmoke:entrypoint" [tool.ruff] extend-exclude = [ - "site_scons/third_party", "src/third_party", ] -extend-include = [ - "SConstruct", - "*/SConscript" -] line-length = 100 target-version = "py310" diff --git a/sbom.json b/sbom.json index b3d8fb09c87..f65fb9c0a31 100644 --- a/sbom.json +++ b/sbom.json @@ -1624,45 +1624,6 @@ "scope": "excluded", "purl": "pkg:github/schemastore/schemastore@6847cfc3a17a04a7664474212db50c627e1e3408" }, - { - "supplier": { - "name": "Organization: github" - }, - "name": "SCons - a Software Construction tool", - "version": "3.1.2", - "licenses": [ - { - "license": { - "id": "MIT" - } - } - ], - "purl": "pkg:github/SCons/scons@3.1.2", - "properties": [ - { - "name": "internal:team_responsible", - "value": "Build" - }, - { - "name": "emits_persisted_data", - "value": "false" - }, - { - "name": "info_link", - "value": "https://github.com/SCons/scons" - } - ], - "type": "library", - "bom-ref": "144a085e-96cd-4061-acf6-262fd1b69abe", - "evidence": { - "occurrences": [ - { - "location": "src/third_party/scons-3.1.2" - } - ] - }, - "scope": "required" - }, { "supplier": { "name": "" diff --git a/site_scons/OWNERS.yml b/site_scons/OWNERS.yml deleted file mode 100644 index 1baf21091bc..00000000000 --- a/site_scons/OWNERS.yml +++ /dev/null @@ -1,5 +0,0 @@ -version: 1.0.0 -filters: - - "*": - approvers: - - 10gen/devprod-build diff --git a/site_scons/libdeps_tool.py b/site_scons/libdeps_tool.py deleted file mode 100644 index 4315eb83b47..00000000000 --- a/site_scons/libdeps_tool.py +++ /dev/null @@ -1,1720 +0,0 @@ -"""Extension to SCons providing advanced static library dependency tracking. - -These modifications to a build environment, which can be attached to -StaticLibrary and Program builders via a call to setup_environment(env), -cause the build system to track library dependencies through static libraries, -and to add them to the link command executed when building programs. - -For example, consider a program 'try' that depends on a lib 'tc', which in -turn uses a symbol from a lib 'tb' which in turn uses a library from 'ta'. - -Without this package, the Program declaration for "try" looks like this: - -Program('try', ['try.c', 'path/to/${LIBPREFIX}tc${LIBSUFFIX}', - 'path/to/${LIBPREFIX}tb${LIBSUFFIX}', - 'path/to/${LIBPREFIX}ta${LIBSUFFIX}',]) - -With this library, we can instead write the following - -Program('try', ['try.c'], LIBDEPS=['path/to/tc']) -StaticLibrary('tc', ['c.c'], LIBDEPS=['path/to/tb']) -StaticLibrary('tb', ['b.c'], LIBDEPS=['path/to/ta']) -StaticLibrary('ta', ['a.c']) - -And the build system will figure out that it needs to link libta.a and libtb.a -when building 'try'. - -A StaticLibrary S may also declare programs or libraries, [L1, ...] to be dependent -upon S by setting LIBDEPS_DEPENDENTS=[L1, ...], using the same syntax as is used -for LIBDEPS, except that the libraries and programs will not have LIBPREFIX/LIBSUFFIX -automatically added when missing. -""" - -# Copyright (c) 2010, Corensic Inc., All Rights Reserved. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -import copy -import enum -import fileinput -import glob -import hashlib -import json -import os -import subprocess -import sys -import textwrap -import time -import traceback -from collections import defaultdict -from functools import partial - -try: - import networkx - - from buildscripts.libdeps.libdeps.graph import EdgeProps, LibdepsGraph, NodeProps -except ImportError: - pass - -import SCons -import SCons.Errors -import SCons.Scanner -import SCons.Util -from SCons.Script import COMMAND_LINE_TARGETS - - -class Constants: - Libdeps = "LIBDEPS" - LibdepsCached = "LIBDEPS_cached" - LibdepsDependents = "LIBDEPS_DEPENDENTS" - LibdepsGlobal = "LIBDEPS_GLOBAL" - LibdepsNoInherit = "LIBDEPS_NO_INHERIT" - LibdepsInterface = "LIBDEPS_INTERFACE" - LibdepsPrivate = "LIBDEPS_PRIVATE" - LibdepsTags = "LIBDEPS_TAGS" - LibdepsTagExpansion = "LIBDEPS_TAG_EXPANSIONS" - MissingLibdep = "MISSING_LIBDEP_" - ProgdepsDependents = "PROGDEPS_DEPENDENTS" - SysLibdeps = "SYSLIBDEPS" - SysLibdepsCached = "SYSLIBDEPS_cached" - SysLibdepsPrivate = "SYSLIBDEPS_PRIVATE" - - -class deptype(tuple, enum.Enum): - Global: tuple = (0, "GLOBAL") - Public: tuple = (1, "PUBLIC") - Private: tuple = (2, "PRIVATE") - Interface: tuple = (3, "INTERFACE") - - def __lt__(self, other): - if self.__class__ is other.__class__: - return self.value[0] < other.value[0] - return NotImplemented - - def __str__(self): - return self.value[1] - - def __int__(self): - return self.value[0] - - -class dependency: - def __init__(self, value, deptype, listed_name): - self.target_node = value - self.dependency_type = deptype - self.listed_name = listed_name - - def __str__(self): - return str(self.target_node) - - -class FlaggedLibdep: - """ - Utility class used for processing prefix and postfix flags on libdeps. The class - can keep track of separate lists for prefix and postfix as well separators, - allowing for modifications to the lists and then re-application of the flags with - modifications to a larger list representing the link line. - """ - - def __init__(self, libnode=None, env=None, start_index=None): - """ - The libnode should be a Libdep SCons node, and the env is the target env in - which the target has a dependency on the libdep. The start_index is important as - it determines where this FlaggedLibdep starts in the larger list of libdeps. - - The start_index will cut the larger list, and then re-apply this libdep with flags - at that location. This class will exract the prefix and postfix flags - from the Libdep nodes env. - """ - self.libnode = libnode - self.env = env - - # We need to maintain our own copy so as not to disrupt the env's original list. - try: - self.prefix_flags = copy.copy(getattr(libnode.attributes, "libdeps_prefix_flags", [])) - self.postfix_flags = copy.copy(getattr(libnode.attributes, "libdeps_postfix_flags", [])) - except AttributeError: - self.prefix_flags = [] - self.postfix_flags = [] - - self.start_index = start_index - - def __str__(self): - return str(self.libnode) - - def add_lib_to_result_list(self, result): - """ - This function takes in the current list of libdeps for a given target, and will - apply the libdep taking care of the prefix, postfix and any required separators when - adding to the list. - """ - if self.start_index is not None: - result[:] = result[: self.start_index] - self._add_lib_and_flags(result) - - def _get_separators(self, flags): - separated_list = [] - - for flag in flags: - separators = self.env.get("LIBDEPS_FLAG_SEPARATORS", {}).get(flag, {}) - separated_list.append(separators.get("prefix", " ")) - separated_list.append(flag) - separated_list.append(separators.get("suffix", " ")) - - return separated_list - - def _get_lib_with_flags(self): - lib_and_flags = [] - - lib_and_flags += self._get_separators(self.prefix_flags) - lib_and_flags += [str(self)] - lib_and_flags += self._get_separators(self.postfix_flags) - - return lib_and_flags - - def _add_lib_and_flags(self, result): - """ - This function will clean up the flags for the link line after extracting everything - from the environment. This will mostly look for separators that are just a space, and - remove them from the list, as the final link line will add spaces back for each item - in the list. It will take to concat flags where the separators don't allow for a space. - """ - next_contig_str = "" - - for item in self._get_lib_with_flags(): - if item != " ": - next_contig_str += item - else: - if next_contig_str: - result.append(next_contig_str) - next_contig_str = "" - - if next_contig_str: - result.append(next_contig_str) - - -class LibdepLinter: - """ - This class stores the rules for linting the libdeps. Using a decorator, - new rules can easily be added to the class, and will be called when - linting occurs. Each rule is run on each libdep. - - When a rule is broken, a LibdepLinterError exception will be raised. - Optionally the class can be configured to print the error message and - keep going with the build. - - Each rule should provide a method to skip that rule on a given node, - by supplying the correct flag in the LIBDEPS_TAG environment var for - that node. - - """ - - skip_linting = False - print_linter_errors = False - - linting_time = 0 - linting_infractions = 0 - linting_rules_run = 0 - registered_linting_time = False - - dangling_dep_dependents = set() - bazel_header_info = dict() - - @staticmethod - def _make_linter_decorator(): - """ - This is used for gathering the functions - by decorator that will be used for linting a given libdep. - """ - - funcs = {} - - def linter_rule_func(func): - funcs[func.__name__] = func - return func - - linter_rule_func.all = funcs - return linter_rule_func - - linter_rule = _make_linter_decorator.__func__() - linter_final_check = _make_linter_decorator.__func__() - - @classmethod - def _skip_linting(cls): - return cls.skip_linting - - @classmethod - def _start_timer(cls): - # Record time spent linting if we are in print mode. - if cls.print_linter_errors: - from timeit import default_timer as timer - - return timer() - - @classmethod - def _stop_timer(cls, start, num_rules): - # Record time spent linting if we are in print mode. - if cls.print_linter_errors: - from timeit import default_timer as timer - - cls.linting_time += timer() - start - cls.linting_rules_run += num_rules - - def __init__(self, env, target=None): - self.env = env - self.target = target - self.unique_libs = set() - self._libdeps_types_previous = dict() - - # If we are in print mode, we will record some linting metrics, - # and print the results at the end of the build. - if self.__class__.print_linter_errors and not self.__class__.registered_linting_time: - import atexit - - def print_linting_time(): - print(f"Spent {self.__class__.linting_time} seconds linting libdeps.") - print( - f"Found {self.__class__.linting_infractions} issues out of {self.__class__.linting_rules_run} libdeps rules checked." - ) - - atexit.register(print_linting_time) - self.__class__.registered_linting_time = True - - def lint_libdeps(self, libdeps): - """ - Lint the given list of libdeps for all - rules. - """ - - # Build performance optimization if you - # are sure your build is clean. - if self._skip_linting(): - return - start = self._start_timer() - - linter_rules = [getattr(self, linter_rule) for linter_rule in self.linter_rule.all] - - for libdep in libdeps: - for linter_rule in linter_rules: - linter_rule(libdep) - - self._stop_timer(start, len(linter_rules) * len(libdeps)) - - def final_checks(self): - # Build performance optimization if you - # are sure your build is clean. - if self._skip_linting(): - return - start = self._start_timer() - - linter_rules = [ - getattr(self.__class__, rule) for rule in self.__class__.linter_final_check.all - ] - - for linter_rule in linter_rules: - linter_rule(self) - - self._stop_timer(start, len(linter_rules)) - - def _raise_libdep_lint_exception(self, message): - """ - Raises the LibdepLinterError exception or if configure - to do so, just prints the error. - """ - prefix = "LibdepLinter: \n\t" - message = prefix + message.replace("\n", "\n\t") + "\n" - if self.__class__.print_linter_errors: - self.__class__.linting_infractions += 1 - print(message) - else: - raise LibdepLinterError(message) - - def _check_for_lint_tags(self, lint_tag, env=None, inclusive_tag=False): - """ - Used to get the lint tag from the environment, - and if printing instead of raising exceptions, - will ignore the tags. - """ - - # If print mode is on, we want to make sure to bypass checking - # exclusive tags so we can make sure the exceptions are not excluded - # and are printed. If it's an inclusive tag, we want to ignore this - # early return completely, because we want to make sure the node - # gets included for checking, and the exception gets printed. - if not inclusive_tag and self.__class__.print_linter_errors: - return False - - target_env = env if env else self.env - - if lint_tag in target_env.get(Constants.LibdepsTags, []): - return True - - def _get_deps_dependents(self, env=None): - """util function to get all types of DEPS_DEPENDENTS""" - target_env = env if env else self.env - deps_dependents = target_env.get(Constants.LibdepsDependents, []).copy() - deps_dependents += target_env.get(Constants.ProgdepsDependents, []) - return deps_dependents - - def _get_deps_dependents_with_types(self, builder, type): - return [ - (dependent[0], builder) if isinstance(dependent, tuple) else (dependent, builder) - for dependent in self.env.get(type, []) - ] - - @linter_rule - def linter_rule_leaf_node_no_deps(self, libdep): - """ - LIBDEP RULE: - Nodes marked explicitly as a leaf node should not have any dependencies, - unless those dependencies are explicitly marked as allowed as leaf node - dependencies. - """ - if not self._check_for_lint_tags("lint-leaf-node-no-deps", inclusive_tag=True): - return - - # Ignore dependencies that explicitly exempt themselves. - if self._check_for_lint_tags("lint-leaf-node-allowed-dep", libdep.target_node.env): - return - - # Global dependencies will apply to leaf nodes, so they should - # be automatically exempted. - if libdep.dependency_type == deptype.Global: - return - - target_type = self.target[0].builder.get_name(self.env) - lib = os.path.basename(str(libdep)) - self._raise_libdep_lint_exception( - textwrap.dedent(f"""\ - {target_type} '{self.target[0]}' has dependency '{lib}' and is marked explicitly as a leaf node, - and '{lib}' does not exempt itself as an exception to the rule.""") - ) - - @linter_rule - def linter_rule_no_dangling_deps(self, libdep): - """ - LIBDEP RULE: - All reverse dependency edges must point to a node which will be built. - """ - if self._check_for_lint_tags("lint-allow-dangling-dep-dependent"): - return - - # Gather the DEPS_DEPENDENTS and store them for a final check to make sure they were - # eventually defined as being built by some builder - libdep_libbuilder = self.target[0].builder.get_name(self.env) - deps_depends = self._get_deps_dependents_with_types( - libdep_libbuilder, Constants.LibdepsDependents - ) - deps_depends += self._get_deps_dependents_with_types( - "Program", Constants.ProgdepsDependents - ) - deps_depends = [ - (_get_node_with_ixes(self.env, dep[0], dep[1]), dep[1]) for dep in deps_depends - ] - self.__class__.dangling_dep_dependents.update(deps_depends) - - for dep in deps_depends: - if dep[0] not in self.__class__.bazel_header_info: - self.__class__.bazel_header_info[dep[0]] = [] - self.__class__.bazel_header_info[dep[0]].append(self.target[0]) - - @linter_final_check - def linter_rule_no_dangling_dep_final_check(self): - # At this point the SConscripts have defined all the build items, - # and so we can go check any DEPS_DEPENDENTS listed and make sure a builder - # was instantiated to build them. - for dep_dependent in self.__class__.dangling_dep_dependents: - # This next block is for bazel header generation. We are co-opting - # the linter for simplicity to make sure we record the libdeps dependents - # which can't be access via a libraries emitter. - for target, deps in self.__class__.bazel_header_info.items(): - try: - with open(str(target.abspath) + ".libdeps", "a") as f: - for dep in deps: - f.write(os.path.relpath(dep.abspath, start=dep.Dir("#").abspath) + "\n") - except FileNotFoundError: - pass - - if not dep_dependent[0].has_builder(): - self._raise_libdep_lint_exception( - textwrap.dedent(f"""\ - Found reverse dependency linked to node '{dep_dependent[0]}' - which will never be built by any builder. - Remove the reverse dependency or add a way to build it.""") - ) - - @linter_rule - def linter_rule_no_public_deps(self, libdep): - """ - LIBDEP RULE: - Nodes explicitly marked as not allowed to have public dependencies, should not - have public dependencies, unless the dependency is explicitly marked as allowed. - """ - if not self._check_for_lint_tags("lint-no-public-deps", inclusive_tag=True): - return - - if libdep.dependency_type not in (deptype.Global, deptype.Private): - # Check if the libdep exempts itself from this rule. - if self._check_for_lint_tags("lint-public-dep-allowed", libdep.target_node.env): - return - - target_type = self.target[0].builder.get_name(self.env) - lib = os.path.basename(str(libdep)) - self._raise_libdep_lint_exception( - textwrap.dedent(f"""\ - {target_type} '{self.target[0]}' has public dependency '{lib}' - while being marked as not allowed to have public dependencies - and '{lib}' does not exempt itself.""") - ) - - @linter_rule - def linter_rule_no_dups(self, libdep): - """ - LIBDEP RULE: - A given node shall not link the same LIBDEP across public, private - or interface dependency types because it is ambiguous and unnecessary. - """ - if self._check_for_lint_tags("lint-allow-dup-libdeps"): - return - - if str(libdep) in self.unique_libs: - target_type = self.target[0].builder.get_name(self.env) - lib = os.path.basename(str(libdep)) - self._raise_libdep_lint_exception( - f"{target_type} '{self.target[0]}' links '{lib}' multiple times." - ) - - self.unique_libs.add(str(libdep)) - - @linter_rule - def linter_rule_alphabetic_deps(self, libdep): - """ - LIBDEP RULE: - Libdeps shall be listed alphabetically by type in the SCons files. - """ - - if self._check_for_lint_tags("lint-allow-non-alphabetic"): - return - - # Start checking order after the first item in the list is recorded to compare with. - if libdep.dependency_type in self._libdeps_types_previous: - if self._libdeps_types_previous[libdep.dependency_type] > libdep.listed_name: - target_type = self.target[0].builder.get_name(self.env) - self._raise_libdep_lint_exception( - f"{target_type} '{self.target[0]}' has '{libdep.listed_name}' listed in {dep_type_to_env_var[libdep.dependency_type]} out of alphabetical order." - ) - - self._libdeps_types_previous[libdep.dependency_type] = libdep.listed_name - - @linter_rule - def linter_rule_programs_link_private(self, libdep): - """ - LIBDEP RULE: - All Programs shall only have public dependency's - because a Program will never be a dependency of another Program - or Library, and LIBDEPS transitiveness does not apply. Public - transitiveness has no meaning in this case and is used just as default. - """ - if self._check_for_lint_tags("lint-allow-program-links-private"): - return - - if self.target[0].builder.get_name( - self.env - ) == "Program" and libdep.dependency_type not in (deptype.Global, deptype.Public): - lib = os.path.basename(str(libdep)) - self._raise_libdep_lint_exception( - textwrap.dedent(f"""\ - Program '{self.target[0]}' links non-public library '{lib}' - A 'Program' can only have {Constants.Libdeps} libs, - not {Constants.LibdepsPrivate} or {Constants.LibdepsInterface}.""") - ) - - @linter_rule - def linter_rule_no_bidirectional_deps(self, libdep): - """ - LIBDEP RULE: - And Library which issues reverse dependencies, shall not be directly - linked to by another node, to prevent forward and reverse linkages existing - at the same node. Instead the content of the library that needs to issue reverse - dependency needs to be separated from content that needs direct linkage into two - separate libraries, which can be linked correctly respectively. - """ - - if not libdep.target_node.env: - return - elif self._check_for_lint_tags("lint-allow-bidirectional-edges", libdep.target_node.env): - return - elif len(self._get_deps_dependents(libdep.target_node.env)) > 0: - target_type = self.target[0].builder.get_name(self.env) - lib = os.path.basename(str(libdep)) - self._raise_libdep_lint_exception( - textwrap.dedent(f"""\ - {target_type} '{self.target[0]}' links directly to a reverse dependency node '{lib}' - No node can link directly to a node that has {Constants.LibdepsDependents} or {Constants.ProgdepsDependents}.""") - ) - - @linter_rule - def linter_rule_nonprivate_on_deps_dependents(self, libdep): - """ - LIBDEP RULE: - A Library that issues reverse dependencies, shall not link libraries - with any kind of transitiveness, and will only link libraries privately. - This is because functionality that requires reverse dependencies should - not be transitive. - """ - if self._check_for_lint_tags("lint-allow-nonprivate-on-deps-dependents"): - return - - if ( - libdep.dependency_type != deptype.Private - and libdep.dependency_type != deptype.Global - and len(self._get_deps_dependents()) > 0 - ): - target_type = self.target[0].builder.get_name(self.env) - lib = os.path.basename(str(libdep)) - self._raise_libdep_lint_exception( - textwrap.dedent(f"""\ - {target_type} '{self.target[0]}' links non-private libdep '{lib}' and has a reverse dependency. - A {target_type} can only have {Constants.LibdepsPrivate} depends if it has {Constants.LibdepsDependents} or {Constants.ProgdepsDependents}.""") - ) - - @linter_rule - def linter_rule_libdeps_must_be_list(self, libdep): - """ - LIBDEP RULE: - LIBDEPS, LIBDEPS_PRIVATE, and LIBDEPS_INTERFACE must be set as lists in the - environment. - """ - if self._check_for_lint_tags("lint-allow-nonlist-libdeps"): - return - - libdeps_vars = list(dep_type_to_env_var.values()) + [ - Constants.LibdepsDependents, - Constants.ProgdepsDependents, - ] - - for dep_type_val in libdeps_vars: - libdeps_list = self.env.get(dep_type_val, []) - if not SCons.Util.is_List(libdeps_list): - target_type = self.target[0].builder.get_name(self.env) - self._raise_libdep_lint_exception( - textwrap.dedent(f"""\ - Found non-list type '{libdeps_list}' while evaluating {dep_type_val[1]} for {target_type} '{self.target[0]}' - {dep_type_val[1]} must be setup as a list.""") - ) - - -dependency_visibility_ignored = { - deptype.Global: deptype.Public, - deptype.Interface: deptype.Public, - deptype.Public: deptype.Public, - deptype.Private: deptype.Public, -} - -dependency_visibility_honored = { - deptype.Global: deptype.Private, - deptype.Interface: deptype.Interface, - deptype.Public: deptype.Public, - deptype.Private: deptype.Private, -} - -dep_type_to_env_var = { - deptype.Global: Constants.LibdepsGlobal, - deptype.Interface: Constants.LibdepsInterface, - deptype.Public: Constants.Libdeps, - deptype.Private: Constants.LibdepsPrivate, -} - - -class DependencyCycleError(SCons.Errors.UserError): - """Exception representing a cycle discovered in library dependencies.""" - - def __init__(self, first_node): - super(DependencyCycleError, self).__init__() - self.cycle_nodes = [first_node] - - def __str__(self): - return "Library dependency cycle detected: " + " => ".join(str(n) for n in self.cycle_nodes) - - -class LibdepLinterError(SCons.Errors.UserError): - """Exception representing a discongruent usages of libdeps""" - - -class MissingSyslibdepError(SCons.Errors.UserError): - """Exception representing a discongruent usages of libdeps""" - - -def _get_sorted_direct_libdeps(node): - direct_sorted = getattr(node.attributes, "libdeps_direct_sorted", None) - if direct_sorted is None: - direct = getattr(node.attributes, "libdeps_direct", []) - direct_sorted = sorted(direct, key=lambda t: str(t.target_node)) - setattr(node.attributes, "libdeps_direct_sorted", direct_sorted) - return direct_sorted - - -class LibdepsVisitationMark(enum.IntEnum): - UNMARKED = 0 - MARKED_PRIVATE = 1 - MARKED_PUBLIC = 2 - - -def _libdeps_visit_private(n, marked, walking, debug=False): - if marked[n.target_node] >= LibdepsVisitationMark.MARKED_PRIVATE: - return - - if n.target_node in walking: - raise DependencyCycleError(n.target_node) - - walking.add(n.target_node) - - try: - for child in _get_sorted_direct_libdeps(n.target_node): - _libdeps_visit_private(child, marked, walking) - - marked[n.target_node] = LibdepsVisitationMark.MARKED_PRIVATE - - except DependencyCycleError as e: - if len(e.cycle_nodes) == 1 or e.cycle_nodes[0] != e.cycle_nodes[-1]: - e.cycle_nodes.insert(0, n.target_node) - raise - - finally: - walking.remove(n.target_node) - - -def _libdeps_visit(n, tsorted, marked, walking, debug=False): - # The marked dictionary tracks which sorts of visitation a node - # has received. Values for a given node can be UNMARKED/absent, - # MARKED_PRIVATE, or MARKED_PUBLIC. These are to be interpreted as - # follows: - # - # 0/UNMARKED: Node is not not marked. - # - # MARKED_PRIVATE: Node has only been explored as part of looking - # for cycles under a LIBDEPS_PRIVATE edge. - # - # MARKED_PUBLIC: Node has been explored and any of its transiive - # dependencies have been incorporated into `tsorted`. - # - # The __libdeps_visit_private function above will only mark things - # at with MARKED_PRIVATE, while __libdeps_visit will mark things - # MARKED_PUBLIC. - if marked[n.target_node] == LibdepsVisitationMark.MARKED_PUBLIC: - return - - # The walking set is used for cycle detection. We record all our - # predecessors in our depth-first search, and if we observe one of - # our predecessors as a child, we know we have a cycle. - if n.target_node in walking: - raise DependencyCycleError(n.target_node) - - walking.add(n.target_node) - - if debug: - print(f" * {n.dependency_type} => {n.listed_name}") - - try: - children = _get_sorted_direct_libdeps(n.target_node) - - # We first walk all of our public dependencies so that we can - # put full marks on anything that is in our public transitive - # graph. We then do a second walk into any private nodes to - # look for cycles. While we could do just one walk over the - # children, it is slightly faster to do two passes, since if - # the algorithm walks into a private edge early, it would do a - # lot of non-productive (except for cycle checking) walking - # and marking, but if another public path gets into that same - # subtree, then it must walk and mark it again to raise it to - # the public mark level. Whereas, if the algorithm first walks - # the whole public tree, then those are all productive marks - # and add to tsorted, and then the private walk will only need - # to examine those things that are only reachable via private - # edges. - - for child in children: - if child.dependency_type != deptype.Private: - _libdeps_visit(child, tsorted, marked, walking, debug) - - for child in children: - if child.dependency_type == deptype.Private: - _libdeps_visit_private(child, marked, walking, debug) - - marked[n.target_node] = LibdepsVisitationMark.MARKED_PUBLIC - tsorted.append(n.target_node) - - except DependencyCycleError as e: - if len(e.cycle_nodes) == 1 or e.cycle_nodes[0] != e.cycle_nodes[-1]: - e.cycle_nodes.insert(0, n.target_node) - raise - - finally: - walking.remove(n.target_node) - - -BAZEL_LIBDEPS_AUTOINSTALLED = set() - - -def _get_libdeps(node, debug=False): - """Given a SCons Node, return its library dependencies, topologically sorted. - - Computes the dependencies if they're not already cached. - """ - - cache = getattr(node.attributes, Constants.LibdepsCached, None) - if cache is not None: - if debug: - print(" Cache:") - for dep in cache: - print(f" * {str(dep)}") - return cache - - if debug: - print(" Edges:") - - tsorted = [] - - marked = defaultdict(lambda: LibdepsVisitationMark.UNMARKED) - walking = set() - - for child in _get_sorted_direct_libdeps(node): - if child.dependency_type != deptype.Interface: - _libdeps_visit(child, tsorted, marked, walking, debug=debug) - tsorted.reverse() - - setattr(node.attributes, Constants.LibdepsCached, tsorted) - return tsorted - - -def _missing_syslib(name): - return Constants.MissingLibdep + name - - -def update_scanner(env, builder_name=None, debug=False): - """Update the scanner for "builder" to also scan library dependencies.""" - - builder = env["BUILDERS"][builder_name] - old_scanner = builder.target_scanner - - if old_scanner: - path_function = old_scanner.path_function - else: - path_function = None - - def new_scanner(node, env, path=()): - if debug: - print(f"LIBDEPS SCANNER: {str(node)}") - print(" Declared dependencies:") - print(f" global: {env.get(Constants.LibdepsGlobal, None)}") - print(f" private: {env.get(Constants.LibdepsPrivate, None)}") - print(f" public: {env.get(Constants.Libdeps, None)}") - print(f" interface: {env.get(Constants.LibdepsInterface, None)}") - print(f" no_inherit: {env.get(Constants.LibdepsNoInherit, None)}") - - if old_scanner: - result = old_scanner.function(node, env, path) - else: - result = [] - result.extend(_get_libdeps(node, debug=debug)) - if debug: - print(" Build dependencies:") - print("\n".join([" * " + str(t) for t in result])) - print("\n") - return result - - builder.target_scanner = SCons.Scanner.Scanner( - function=new_scanner, path_function=path_function - ) - - -def get_libdeps(source, target, env, for_signature, debug=False): - """Implementation of the special _LIBDEPS environment variable. - - Expands to the library dependencies for a target. - """ - - target = env.Flatten([target]) - return _get_libdeps(target[0], debug=debug) - - -def get_libdeps_objs(source, target, env, for_signature, debug=False): - objs = [] - for lib in get_libdeps(source, target, env, for_signature, debug=debug): - # This relies on Node.sources being order stable build-to-build. - objs.extend(lib.sources) - return objs - - -def stringify_deps(env, deps): - lib_link_prefix = env.subst("$LIBLINKPREFIX") - lib_link_suffix = env.subst("$LIBLINKSUFFIX") - - # Elements of libdeps are either strings (str or unicode), or they're File objects. - # If they're File objects, they can be passed straight through. If they're strings, - # they're believed to represent library short names, that should be prefixed with -l - # or the compiler-specific equivalent. I.e., 'm' becomes '-lm', but 'File("m.a") is passed - # through whole cloth. - return [f"{lib_link_prefix}{d}{lib_link_suffix}" if isinstance(d, str) else d for d in deps] - - -def get_syslibdeps(source, target, env, for_signature, debug=False, shared=True): - """Given a SCons Node, return its system library dependencies. - - These are the dependencies listed with SYSLIBDEPS, and are linked using -l. - """ - - deps = getattr(target[0].attributes, Constants.SysLibdepsCached, None) - if deps is None: - # Get the syslibdeps for the current node - deps = ( - target[0] - .get_env() - .Flatten(copy.copy(target[0].get_env().get(Constants.SysLibdepsPrivate)) or []) - ) - deps += target[0].get_env().Flatten(target[0].get_env().get(Constants.SysLibdeps) or []) - - for lib in _get_libdeps(target[0]): - # For each libdep get its syslibdeps, and then check to see if we can - # add it to the deps list. For static build we will also include private - # syslibdeps to be transitive. For a dynamic build we will only make - # public libdeps transitive. - syslibs = [] - if not shared: - syslibs += lib.get_env().get(Constants.SysLibdepsPrivate) or [] - syslibs += lib.get_env().get(Constants.SysLibdeps) or [] - - # Validate the libdeps, a configure check has already checked what - # syslibdeps are available so we can hard fail here if a syslibdep - # is being attempted to be linked with. - for syslib in syslibs: - if not syslib: - continue - - if isinstance(syslib, str) and syslib.startswith(Constants.MissingLibdep): - raise MissingSyslibdepError( - textwrap.dedent(f"""\ - LibdepsError: - Target '{str(target[0])}' depends on the availability of a - system provided library for '{syslib[len(Constants.MissingLibdep):]}', - but no suitable library was found during configuration.""") - ) - - deps.append(syslib) - - cleaned_deps = [] - seen = set() - for dep in reversed(deps): - if dep not in seen: - seen.add(str(dep)) - cleaned_deps.append(dep) - deps = list(reversed(cleaned_deps)) - - setattr(target[0].attributes, Constants.SysLibdepsCached, deps) - return stringify_deps(env, deps) - - -def _append_direct_libdeps(node, prereq_nodes): - # We do not bother to decorate nodes that are not actual Objects - if type(node) == str: - return - if getattr(node.attributes, "libdeps_direct", None) is None: - node.attributes.libdeps_direct = [] - node.attributes.libdeps_direct.extend(prereq_nodes) - - -def _get_libdeps_with_link_flags(source, target, env, for_signature): - for lib in get_libdeps(source, target, env, for_signature): - # Make sure lib is a Node so we can get the env to check for flags. - libnode = lib - if not isinstance(lib, (str, SCons.Node.FS.File, SCons.Node.FS.Entry)): - libnode = env.File(lib) - - # Virtual libdeps don't appear on the link line - if "virtual-libdep" in libnode.get_env().get("LIBDEPS_TAGS", []): - continue - - # Create a libdep and parse the prefix and postfix (and separators if any) - # flags from the environment. - cur_lib = FlaggedLibdep(libnode, env) - yield cur_lib - - -def _get_node_with_ixes(env, node, node_builder_type): - """ - Gets the node passed in node with the correct ixes applied - for the given builder type. - """ - - if not node: - return node - - node_builder = env["BUILDERS"][node_builder_type] - node_factory = node_builder.target_factory or env.File - - # Cache the 'ixes' in a function scope global so we don't need - # to run SCons performance intensive 'subst' each time - cache_key = (id(env), node_builder_type) - try: - prefix, suffix = _get_node_with_ixes.node_type_ixes[cache_key] - except KeyError: - prefix = node_builder.get_prefix(env) - suffix = node_builder.get_suffix(env) - - # TODO(SERVER-50681): Find a way to do this that doesn't hard - # code these extensions. See the code review for SERVER-27507 - # for additional discussion. - if suffix == ".dll": - suffix = ".lib" - - _get_node_with_ixes.node_type_ixes[cache_key] = (prefix, suffix) - - node_with_ixes = SCons.Util.adjustixes(node, prefix, suffix) - return node_factory(node_with_ixes) - - -_get_node_with_ixes.node_type_ixes = dict() - - -def add_node_from(env, node, bazel=False): - if bazel: - builder = "Bazel" - elif node.has_builder(): - builder = node.builder.get_name(env) - else: - builder = "Unkown" - - node_path = node if bazel else node.abspath - - env.GetLibdepsGraph().add_nodes_from( - [ - ( - node_path, - {NodeProps.bin_type.name: builder}, - ) - ] - ) - - return node_path - - -def add_edge_from(env, from_node, to_node, visibility, direct, bazel=False): - from_node_path = add_node_from(env, from_node) - to_node_path = add_node_from(env, to_node, bazel) - - env.GetLibdepsGraph().add_edges_from( - [ - ( - from_node_path, - to_node_path, - { - EdgeProps.direct.name: direct, - EdgeProps.visibility.name: int(visibility), - }, - ) - ] - ) - - -def add_libdeps_node(env, target, libdeps): - if str(target).endswith(env["SHLIBSUFFIX"]): - node = _get_node_with_ixes(env, str(target.abspath), target.get_builder().get_name(env)) - add_node_from(env, node) - - for libdep in libdeps: - if str(libdep.target_node).endswith(env["SHLIBSUFFIX"]): - add_edge_from( - env, - node, - libdep.target_node, - visibility=libdep.dependency_type, - direct=True, - ) - - -def get_libdeps_nodes(env, target, builder, debug=False, visibility_map=None): - if visibility_map is None: - visibility_map = dependency_visibility_ignored - - if not SCons.Util.is_List(target): - target = [target] - - # Get the current list of nodes not to inherit on each target - no_inherit = set(env.get(Constants.LibdepsNoInherit, [])) - - # Get all the libdeps from the env so we can - # can append them to the current target_node. - libdeps = [] - for dep_type in sorted(visibility_map.keys()): - if dep_type == deptype.Global: - if any("conftest" in str(t) for t in target): - # Ignore global dependencies for conftests - continue - - # Libraries may not be stored as a list in the env, - # so we must convert single library strings to a list. - libs = env.get(dep_type_to_env_var[dep_type], []).copy() - if not SCons.Util.is_List(libs): - libs = [libs] - - for lib in libs: - if not lib: - continue - - lib_with_ixes = _get_node_with_ixes(env, lib, builder) - - if lib in no_inherit: - if debug and not any("conftest" in str(t) for t in target): - print(f" {dep_type[1]} =/> {lib}") - - else: - if debug and not any("conftest" in str(t) for t in target): - print(f" {dep_type[1]} => {lib}") - - libdeps.append(dependency(lib_with_ixes, dep_type, lib)) - - return libdeps - - -def libdeps_emitter( - target, source, env, debug=False, builder=None, visibility_map=None, ignore_progdeps=False -): - """SCons emitter that takes values from the LIBDEPS environment variable and - converts them to File node objects, binding correct path information into - those File objects. - - Emitters run on a particular "target" node during the initial execution of - the SConscript file, rather than during the later build phase. When they - run, the "env" environment's working directory information is what you - expect it to be -- that is, the working directory is considered to be the - one that contains the SConscript file. This allows specification of - relative paths to LIBDEPS elements. - - This emitter also adds LIBSUFFIX and LIBPREFIX appropriately. - - NOTE: For purposes of LIBDEPS_DEPENDENTS propagation, only the first member - of the "target" list is made a prerequisite of the elements of LIBDEPS_DEPENDENTS. - """ - - if visibility_map is None: - visibility_map = dependency_visibility_ignored - - if debug and not any("conftest" in str(t) for t in target): - print(f"LIBDEPS EMITTER: {str(target[0])}") - print(" Declared dependencies:") - print(f" global: {env.get(Constants.LibdepsGlobal, None)}") - print(f" private: {env.get(Constants.LibdepsPrivate, None)}") - print(f" public: {env.get(Constants.Libdeps, None)}") - print(f" interface: {env.get(Constants.LibdepsInterface, None)}") - print(f" no_inherit: {env.get(Constants.LibdepsNoInherit, None)}") - print(" Edges:") - - libdeps = get_libdeps_nodes(env, target, builder, debug, visibility_map) - - if debug and not any("conftest" in str(t) for t in target): - print("\n") - - # Lint the libdeps to make sure they are following the rules. - # This will skip some or all of the checks depending on the options - # and LIBDEPS_TAGS used. - if not any("conftest" in str(t) for t in target): - LibdepLinter(env, target).lint_libdeps(libdeps) - - if env.get("SYMBOLDEPSSUFFIX", None): - for t in target: - add_libdeps_node(env, t, libdeps) - - # We ignored the visibility_map until now because we needed to use - # original dependency value for linting. Now go back through and - # use the map to convert to the desired dependencies, for example - # all Public in the static linking case. - for libdep in libdeps: - libdep.dependency_type = visibility_map[libdep.dependency_type] - - for t in target: - # target[0] must be a Node and not a string, or else libdeps will fail to - # work properly. - _append_direct_libdeps(t, libdeps) - - for dependent in env.get(Constants.LibdepsDependents, []): - if dependent is None: - continue - - visibility = deptype.Private - if isinstance(dependent, tuple): - visibility = dependent[1] - dependent = dependent[0] - - dependentNode = _get_node_with_ixes(env, dependent, builder) - _append_direct_libdeps( - dependentNode, [dependency(target[0], visibility_map[visibility], dependent)] - ) - - if not ignore_progdeps: - for dependent in env.get(Constants.ProgdepsDependents, []): - if dependent is None: - continue - - visibility = deptype.Public - if isinstance(dependent, tuple): - # TODO: Error here? Non-public PROGDEPS_DEPENDENTS probably are meaningless - visibility = dependent[1] - dependent = dependent[0] - - dependentNode = _get_node_with_ixes(env, dependent, "Program") - _append_direct_libdeps( - dependentNode, [dependency(target[0], visibility_map[visibility], dependent)] - ) - - return target, source - - -def expand_libdeps_tags(source, target, env, for_signature): - results = [] - for expansion in env.get(Constants.LibdepsTagExpansion, []): - results.append(expansion(source, target, env, for_signature)) - return results - - -def get_digest(file_path): - h = hashlib.sha256() - - with open(file_path, "rb") as file: - while True: - # Reading is buffered, so we can read smaller chunks. - chunk = file.read(h.block_size) - if not chunk: - break - h.update(chunk) - - return h.hexdigest() - - -def handle_bazel_lib_link_flags(env, libext, libs): - global EMITTING_SHARED - if env.TargetOSIs("linux", "freebsd", "openbsd"): - if libext == env.subst("$SHLIBSUFFIX") and not EMITTING_SHARED == "dynamic-sdk": - return [env["LINK_AS_NEEDED_LIB_END"]] + libs - else: - return ( - [env["LINK_WHOLE_ARCHIVE_LIB_START"]] + libs + [env["LINK_WHOLE_ARCHIVE_LIB_END"]] - ) - - elif env.TargetOSIs("darwin"): - if libext != env.subst("$SHLIBSUFFIX") or EMITTING_SHARED == "dynamic-sdk": - return env.Flatten([[env["LINK_WHOLE_ARCHIVE_LIB_START"], lib] for lib in libs]) - else: - return env.Flatten([[env["LINK_AS_NEEDED_LIB_START"], lib] for lib in libs]) - - elif env.TargetOSIs("windows"): - return [env["LINK_WHOLE_ARCHIVE_LIB_START"] + ":" + lib for lib in libs] - return [] - - -def add_bazel_libdep(env, libdep, bazel_libdeps): - if libdep.has_builder() and libdep.get_builder().get_name(env) == "ThinTarget": - bazel_libdep = env["SCONS2BAZEL_TARGETS"].bazel_target(libdep) - if bazel_libdep not in bazel_libdeps: - bazel_libdeps.append(bazel_libdep) - return True - return False - - -def query_for_results(env, bazel_target, libdeps_ext, bazel_targets_checked): - global EMITTING_SHARED - # first check if the deps query is in the cache - results = env.CheckBazelDepsCache(bazel_target) - if results is None: - # new query to run, run and cache it - linkfile = bazel_target.replace("//src/", "bazel-bin/src/") + "_links.list" - linkfile = "/".join(linkfile.rsplit(":", 1)) - with open(linkfile) as f: - results = f.read() - - env.AddBazelDepsCache(bazel_target, results) - - # now we have some hidden deps to process, if they are the correct - # ext we want to link with, make scons node, verify its a ThinTarget, and then add - # to the results - - libs_to_cache = [] - for line in results.splitlines(): - if line.endswith(libdeps_ext): - scons_node = env.File( - line.replace(f"{env['BAZEL_OUT_DIR']}/src", env.Dir("$BUILD_DIR").path) - ) - if scons_node.has_builder(): - if scons_node.get_builder().get_name(env) == "ThinTarget": - if EMITTING_SHARED == "dynamic-sdk": - basefile = os.path.splitext(line)[0] - line = basefile + env.subst("$SHLIBSUFFIX") + env.subst("$LIBSUFFIX") - libs_to_cache.append(line) - # Since the deps from the query are transitive we can look for other targets that will be - # covered by that transitive tree for the given link command. This allow us to skip doing - # unnecessary queries and processing - bazel_targets_checked.add(env["SCONS2BAZEL_TARGETS"].bazel_target(scons_node)) - - # now we have some the deps in link specific form, we can cache the for linking specificaly to - # save time on the processing, not just the original query - env.AddBazelLinkDepsCache(bazel_target, libs_to_cache) - - return libs_to_cache - - -BAZEL_SIG_CACHE = {} - - -def process_bazel_libdeps(env, bazel_libdeps_to_add, libdeps_ext, for_sig): - global BAZEL_SIG_CACHE - - bazel_libs = [] - bazel_libs_set = set() - bazel_targets_checked = set() - signature = [] - bazel_libs_to_append = [] - start_time = time.time() - try: - # check the cache for any queries we need to run, and add the hidden deps to the list to link - for bazel_target in bazel_libdeps_to_add: - if bazel_target not in bazel_targets_checked: - bazel_targets_checked.add(bazel_target) - results = env.CheckBazelLinkDepsCache(bazel_target) - if not results: - results = query_for_results( - env, bazel_target, libdeps_ext, bazel_targets_checked - ) - new_libs = [lib for lib in results if lib not in bazel_libs_set] - bazel_libs_set.update(new_libs) - bazel_libs.extend(new_libs) - # if this is running to generate a signature to determine up to dateness, generate one for scons - if for_sig and env.GetOption("ninja") == "disabled": - for lib in bazel_libs: - if str(lib) in BAZEL_SIG_CACHE: - signature += [BAZEL_SIG_CACHE[str(lib)]] - else: - sig = [get_digest(str(lib))] - BAZEL_SIG_CACHE[str(lib)] = sig - signature += sig - return signature, bazel_libs - - # add any per library link flags (whole archive flags) - if bazel_libs: - bazel_libs_to_append = handle_bazel_lib_link_flags(env, libdeps_ext, bazel_libs) - - except: - traceback.print_exc() - # record time for metrics - env.AddLibdepsTime(time.time() - start_time) - - return bazel_libs_to_append, bazel_libs - - -EMITTING_SHARED = None - - -def expand_libdeps_for_link(source, target, env, for_signature): - global EMITTING_SHARED, BAZEL_SIG_CACHE, BAZEL_LIBDEPS_AUTOINSTALLED - - libdeps_with_flags = [] - # Used to make modifications to the previous libdep on the link line - # if needed. An empty class here will make the switch_flag conditionals - # below a bit cleaner. - prev_libdep = None - bazel_libdeps_to_add = [] - - if EMITTING_SHARED == "dynamic": - libdeps_ext = env.subst("$SHLIBSUFFIX") - elif EMITTING_SHARED == "dynamic-sdk": - if env.TargetOSIs("windows"): - libdeps_ext = env.subst("$LIBSUFFIX") - else: - libdeps_ext = env.subst("$SHLIBSUFFIX") - else: - libdeps_ext = env.subst("$LIBSUFFIX") - - # check if we are ThinTarget ourselves - add_bazel_libdep(env, target[0], bazel_libdeps_to_add) - - for flagged_libdep in _get_libdeps_with_link_flags(source, target, env, for_signature): - # thin targets will be processed different so continue if we find one - if add_bazel_libdep(env, flagged_libdep.libnode, bazel_libdeps_to_add): - continue - - # If there are no flags to process we can move on to the next lib. - # start_index wont mater in the case because if there are no flags - # on the previous lib, then we will never need to do the chopping - # mechanism on the next iteration. - if not flagged_libdep.prefix_flags and not flagged_libdep.postfix_flags: - libdeps_with_flags.append(str(flagged_libdep)) - prev_libdep = flagged_libdep - continue - - # This for loop will go through the previous results and remove the 'off' - # flag as well as removing the new 'on' flag. For example, let libA and libB - # both use on and off flags which would normally generate on the link line as: - # -Wl--on-flag libA.a -Wl--off-flag -Wl--on-flag libA.a -Wl--off-flag - # This loop below will spot the cases were the flag was turned off and then - # immediately turned back on - for switch_flag in getattr(flagged_libdep.libnode.attributes, "libdeps_switch_flags", []): - if ( - prev_libdep - and switch_flag["on"] in flagged_libdep.prefix_flags - and switch_flag["off"] in prev_libdep.postfix_flags - ): - flagged_libdep.prefix_flags.remove(switch_flag["on"]) - prev_libdep.postfix_flags.remove(switch_flag["off"]) - - # prev_lib has had its list modified, and it has a start index - # from the last iteration, so it will chop of the end the current - # list and reapply the end with the new flags. - prev_libdep.add_lib_to_result_list(libdeps_with_flags) - - # Store the information of the len of the current list before adding - # the next set of flags as that will be the start index for the previous - # lib next time around in case there are any switch flags to chop off. - start_index = len(libdeps_with_flags) - flagged_libdep.add_lib_to_result_list(libdeps_with_flags) - - # Done processing the current lib, so set it to previous for the next iteration. - prev_libdep = flagged_libdep - prev_libdep.start_index = start_index - - if "conftest" not in str(target[0]): - # process all the thin targets we gathers to search for hidden deps to link - bazel_libdeps_args, bazel_libdeps = process_bazel_libdeps( - env, bazel_libdeps_to_add, libdeps_ext, for_signature - ) - setattr(target[0].attributes, "bazel_libdeps", bazel_libdeps) - - else: - bazel_libdeps_args = [] - - return libdeps_with_flags + bazel_libdeps_args - - -def generate_libdeps_graph(env): - if env.get("SYMBOLDEPSSUFFIX", None): - find_symbols = env.Dir("$BUILD_DIR").path + "/libdeps/find_symbols" - - symbol_deps = [] - for symbols_file, target_node in env.get("LIBDEPS_SYMBOL_DEP_FILES", []): - direct_libdeps = [] - bazel_libdeps = [] - for direct_libdep in _get_sorted_direct_libdeps(target_node): - add_node_from(env, direct_libdep.target_node) - add_edge_from( - env, - target_node, - direct_libdep.target_node, - visibility=int(direct_libdep.dependency_type), - direct=True, - ) - direct_libdeps.append(direct_libdep.target_node.abspath) - if direct_libdep.target_node.builder.get_name(env) == "ThinTarget": - add_bazel_libdep(env, direct_libdep.target_node, bazel_libdeps) - _, bazel_libdeps = process_bazel_libdeps( - env, bazel_libdeps, env.subst("$SHLIBSUFFIX"), False - ) - for libdep in bazel_libdeps: - add_node_from(env, libdep, bazel=True) - add_edge_from( - env, - direct_libdep.target_node, - libdep, - visibility=int(deptype.Private), - direct=False, - bazel=True, - ) - - for libdep in _get_libdeps(target_node): - if libdep.abspath not in direct_libdeps: - add_node_from(env, libdep) - add_edge_from( - env, - target_node, - libdep, - visibility=int(deptype.Public), - direct=False, - ) - if env["PLATFORM"] == "darwin": - sep = " " - else: - sep = ":" - ld_path = [os.path.dirname(str(libdep)) for libdep in _get_libdeps(target_node)] - ld_path.extend( - [ - path.replace(env.Dir("$BUILD_DIR").path, f"{env['BAZEL_OUT_DIR']}/src") - for path in ld_path - ] - ) - ld_path = sep.join(ld_path) - symbol_deps.append( - env.Command( - target=symbols_file, - source=target_node, - action=SCons.Action.Action( - f'{find_symbols} $SOURCE "{ld_path}" $TARGET', - "Generating $SOURCE symbol dependencies" if not env["VERBOSE"] else "", - ), - ) - ) - - def write_graph_hash(env, target, source): - with open(target[0].path, "w") as f: - json_str = json.dumps( - networkx.readwrite.json_graph.node_link_data(env.GetLibdepsGraph()), - sort_keys=True, - ).encode("utf-8") - f.write(hashlib.sha256(json_str).hexdigest()) - - graph_hash = env.Command( - target="$BUILD_DIR/libdeps/graph_hash.sha256", - source=symbol_deps, - action=SCons.Action.FunctionAction( - write_graph_hash, - {"cmdstr": None}, - ), - ) - env.Depends( - graph_hash, - [env.File("#SConstruct")] - + glob.glob("**/SConscript", recursive=True) - + [ - os.path.abspath(__file__), - env.File("$BAZEL_OUT_DIR/src/mongo/util/version_constants.h"), - ], - ) - - graph_node = env.Command( - target=env.get("LIBDEPS_GRAPH_FILE", None), - source=symbol_deps, - action=SCons.Action.FunctionAction( - generate_graph, - {"cmdstr": "Generating libdeps graph"}, - ), - ) - - env.Depends(graph_node, [graph_hash] + env.Glob("#buildscripts/libdeps/libdeps/*")) - - -def generate_graph(env, target, source): - libdeps_graph = env.GetLibdepsGraph() - - demangled_symbols = {} - for symbol_deps_file in source: - with open(str(symbol_deps_file)) as f: - symbols = {} - try: - for symbol, lib in json.load(f).items(): - # ignore symbols from external libraries, - # they will just clutter the graph - if lib.startswith(env.Dir("$BUILD_DIR").path): - if lib not in symbols: - symbols[lib] = [] - symbols[lib].append(symbol) - except json.JSONDecodeError: - env.FatalError(f"Failed processing json file: {str(symbol_deps_file)}") - - demangled_symbols[str(symbol_deps_file)] = symbols - - p1 = subprocess.Popen( - ["c++filt", "-n"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - stdout, stderr = p1.communicate(json.dumps(demangled_symbols).encode("utf-8")) - demangled_symbols = json.loads(stdout.decode("utf-8")) - - for deps_file in demangled_symbols: - for libdep in demangled_symbols[deps_file]: - from_node = os.path.abspath(str(deps_file)[: -len(env["SYMBOLDEPSSUFFIX"])]) - to_node = os.path.abspath(libdep).strip() - libdeps_graph.add_edges_from( - [ - ( - from_node, - to_node, - {EdgeProps.symbols.name: "\n".join(demangled_symbols[deps_file][libdep])}, - ) - ] - ) - node = env.File(str(deps_file)[: -len(env["SYMBOLDEPSSUFFIX"])]) - add_node_from(env, node) - - libdeps_graph_file = f"{env.Dir('$BUILD_DIR').path}/libdeps/libdeps.graphml" - networkx.write_graphml(libdeps_graph, libdeps_graph_file, named_key_ids=True) - with fileinput.FileInput(libdeps_graph_file, inplace=True) as file: - for line in file: - print(line.replace(str(env.Dir("$BUILD_DIR").abspath + os.sep), ""), end="") - - -def setup_environment(env, emitting_shared=False, debug="off", linting="on"): - """Set up the given build environment to do LIBDEPS tracking.""" - - LibdepLinter.skip_linting = linting == "off" - LibdepLinter.print_linter_errors = linting == "print" - global EMITTING_SHARED - EMITTING_SHARED = emitting_shared - - try: - env["_LIBDEPS"] - except KeyError: - env["_LIBDEPS"] = "$_LIBDEPS_LIBS" - - env["_LIBDEPS_TAGS"] = expand_libdeps_tags - env["_LIBDEPS_GET_LIBS"] = partial(get_libdeps, debug=debug) - env["_LIBDEPS_OBJS"] = partial(get_libdeps_objs, debug=debug) - env["_SYSLIBDEPS"] = partial( - get_syslibdeps, debug=debug, shared=emitting_shared.startswith("dynamic") - ) - - env[Constants.Libdeps] = SCons.Util.CLVar() - env[Constants.SysLibdeps] = SCons.Util.CLVar() - - # Create the alias for graph generation, the existence of this alias - # on the command line will cause the libdeps-graph generation to be - # configured. - env["LIBDEPS_GRAPH_ALIAS"] = env.Alias( - "generate-libdeps-graph", - "${BUILD_DIR}/libdeps/libdeps.graphml", - )[0] - - if str(env["LIBDEPS_GRAPH_ALIAS"]) in COMMAND_LINE_TARGETS: - # Detect if the current system has the tools to perform the generation. - if env.GetOption("ninja") != "disabled": - env.FatalError("Libdeps graph generation is not supported with ninja builds.") - if not emitting_shared.startswith("dynamic"): - env.FatalError("Libdeps graph generation currently only supports dynamic builds.") - - if env["PLATFORM"] == "darwin": - required_bins = ["awk", "sed", "otool", "nm"] - else: - required_bins = ["awk", "grep", "ldd", "nm"] - for bin in required_bins: - if not env.WhereIs(bin): - env.FatalError(f"'{bin}' not found, Libdeps graph generation requires {bin}.") - - # Here we are setting up some functions which will return single instance of the - # network graph and symbol deps list. We also setup some environment variables - # which are used along side the functions. - symbol_deps = [] - - def append_symbol_deps(env, symbol_deps_file): - env.Depends(env["LIBDEPS_GRAPH_FILE"], symbol_deps_file[0]) - symbol_deps.append(symbol_deps_file) - - env.AddMethod(append_symbol_deps, "AppendSymbolDeps") - - env["LIBDEPS_SYMBOL_DEP_FILES"] = symbol_deps - env["LIBDEPS_GRAPH_FILE"] = env.File("${BUILD_DIR}/libdeps/libdeps.graphml") - env["LIBDEPS_GRAPH_SCHEMA_VERSION"] = 5 - env["SYMBOLDEPSSUFFIX"] = ".symbol_deps" - - libdeps_graph = LibdepsGraph() - libdeps_graph.graph["invocation"] = " ".join( - [env["ESCAPE"](str(sys.executable))] + [env["ESCAPE"](arg) for arg in sys.argv] - ) - libdeps_graph.graph["git_hash"] = env["MONGO_GIT_HASH"] - libdeps_graph.graph["graph_schema_version"] = env["LIBDEPS_GRAPH_SCHEMA_VERSION"] - libdeps_graph.graph["build_dir"] = env.Dir("$BUILD_DIR").path - libdeps_graph.graph["deptypes"] = json.dumps( - { - key: value[0] - for key, value in deptype.__members__.items() - if isinstance(value, tuple) - } - ) - - def get_libdeps_graph(env): - return libdeps_graph - - env.AddMethod(get_libdeps_graph, "GetLibdepsGraph") - - # Now we will setup an emitter, and an additional action for several - # of the builder involved with dynamic builds. - def libdeps_graph_emitter(target, source, env): - if "conftest" not in str(target[0]): - symbol_deps_file = env.File(str(target[0]) + env["SYMBOLDEPSSUFFIX"]) - env.Depends(symbol_deps_file, "${BUILD_DIR}/libdeps/find_symbols") - env.AppendSymbolDeps((symbol_deps_file, target[0])) - - return target, source - - for builder_name in ("Program", "SharedLibrary", "LoadableModule"): - builder = env["BUILDERS"][builder_name] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter([base_emitter, libdeps_graph_emitter]) - builder.emitter = new_emitter - - env.Append( - LIBDEPS_LIBEMITTER=partial( - libdeps_emitter, - debug=debug, - builder="StaticLibrary", - ), - LIBEMITTER=lambda target, source, env: env["LIBDEPS_LIBEMITTER"](target, source, env), - LIBDEPS_SHAREMITTER=partial( - libdeps_emitter, - debug=debug, - builder="SharedArchive", - ignore_progdeps=True, - ), - SHAREMITTER=lambda target, source, env: env["LIBDEPS_SHAREMITTER"](target, source, env), - LIBDEPS_SHLIBEMITTER=partial( - libdeps_emitter, - debug=debug, - builder="SharedLibrary", - visibility_map=dependency_visibility_honored, - ), - SHLIBEMITTER=lambda target, source, env: env["LIBDEPS_SHLIBEMITTER"](target, source, env), - LIBDEPS_PROGEMITTER=partial( - libdeps_emitter, - debug=debug, - builder="SharedLibrary" if emitting_shared.startswith("dynamic") else "StaticLibrary", - ), - PROGEMITTER=lambda target, source, env: env["LIBDEPS_PROGEMITTER"](target, source, env), - ) - - env["_LIBDEPS_LIBS_FOR_LINK"] = expand_libdeps_for_link - - env["_LIBDEPS_LIBS"] = "$LINK_LIBGROUP_START " "$_LIBDEPS_LIBS_FOR_LINK " "$LINK_LIBGROUP_END " - - env.Prepend(_LIBFLAGS="$_LIBDEPS_TAGS $_LIBDEPS $_SYSLIBDEPS ") - for builder_name in ("Program", "SharedLibrary", "LoadableModule", "SharedArchive"): - try: - update_scanner(env, builder_name, debug=debug) - except KeyError: - pass - - -def setup_conftests(conf): - def FindSysLibDep(context, name, libs, **kwargs): - var = "LIBDEPS_" + name.upper() + "_SYSLIBDEP" - kwargs["autoadd"] = False - for lib in libs: - result = context.sconf.CheckLib(lib, **kwargs) - context.did_show_result = 1 - if result: - context.env[var] = lib - context.Result(result) - return result - context.env[var] = _missing_syslib(name) - context.Result(result) - return result - - conf.AddTest("FindSysLibDep", FindSysLibDep) diff --git a/site_scons/mongo/BUILD.bazel b/site_scons/mongo/BUILD.bazel deleted file mode 100644 index 13eb083863d..00000000000 --- a/site_scons/mongo/BUILD.bazel +++ /dev/null @@ -1,3 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -exports_files(["pip_requirements.py"]) diff --git a/site_scons/mongo/__init__.py b/site_scons/mongo/__init__.py deleted file mode 100644 index 26d6cf8366b..00000000000 --- a/site_scons/mongo/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- mode: python; -*- - -# General utility functions live in this file. - -import bisect - - -def print_build_failures(): - from SCons.Script import GetBuildFailures - - for bf in GetBuildFailures(): - print("%s failed: %s" % (bf.node, bf.errstr)) - - -def insort_wrapper(target_list, target_string): - """ - Removes instances of empty list inside the list before handing it to insort. - """ - from SCons.Util import flatten - - target_list[:] = flatten(target_list) - bisect.insort(target_list, target_string) diff --git a/site_scons/mongo/build_profiles.py b/site_scons/mongo/build_profiles.py deleted file mode 100644 index d19e4c43661..00000000000 --- a/site_scons/mongo/build_profiles.py +++ /dev/null @@ -1,575 +0,0 @@ -"""Dictionary to store available build profiles.""" - -import enum -from dataclasses import dataclass -from typing import Any, List, Optional - -import mongo.generators as mongo_generators -from site_scons.mongo import platform - - -class BuildProfileType(str, enum.Enum): - DEFAULT = "default" - FAST = "fast" - OPT = "opt" - SAN = "san" - TSAN = "tsan" - COMPILE_DB = "compiledb" - RELEASE = "release" - - -class BuildProfileNotSupported(Exception): - pass - - -@dataclass -class BuildProfile: - ninja: str - variables_files: List - allocator: str - sanitize: Optional[str] - link_model: str - dbg: str - debug_symbols: str - opt: str - ICECC: Optional[str] - CCACHE: Optional[str] - NINJA_PREFIX: str - VARIANT_DIR: Any - disable_warnings_as_errors: Optional[List] - release: str - remote_exec_release: str - jlink: float - libunwind: str - - -def get_build_profile(type): - os_name = platform.get_running_os_name() - build_profile = _get_build_profile(type, os_name) - - if not build_profile: - raise BuildProfileNotSupported(f"{type} is not supported on {os_name}") - - return build_profile - - -def _get_build_profile(type, os_name): - if os_name == "windows": - return WINDOWS_BUILD_PROFILES[type] - elif os_name == "macOS": - if platform.is_arm_processor(): - return MACOS_ARM_BUILD_PROFILES[type] - else: - return MACOS_BUILD_PROFILES[type] - else: - return LINUX_BUILD_PROFILES[type] - - -LINUX_BUILD_PROFILES = { - # These options were the default settings before implementing build profiles. - BuildProfileType.DEFAULT: BuildProfile( - ninja="disabled", - variables_files=["./etc/scons/mongodbtoolchain_stable_gcc.vars"], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="on", - opt="auto", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="build", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & fast build time at the cost of debuggability. - BuildProfileType.FAST: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/mongodbtoolchain_stable_clang.vars", - ], - allocator="auto", - sanitize=None, - link_model="dynamic", - dbg="off", - debug_symbols="off", - opt="off", - ICECC="icecc", - CCACHE="ccache", - NINJA_PREFIX="fast", - VARIANT_DIR="fast", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & debuggability at the cost of build time. - BuildProfileType.OPT: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/mongodbtoolchain_stable_clang.vars", - ], - allocator="auto", - sanitize=None, - link_model="dynamic", - dbg="off", - debug_symbols="on", - opt="on", - ICECC="icecc", - CCACHE="ccache", - NINJA_PREFIX="opt", - VARIANT_DIR="opt", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build leverages santizers & is the suggested build profile to use for development. - BuildProfileType.SAN: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/mongodbtoolchain_stable_clang.vars", - ], - allocator="system", - sanitize="undefined,address", - link_model="dynamic", - dbg="on", - debug_symbols="on", - opt="debug", - ICECC="icecc", - CCACHE="ccache", - NINJA_PREFIX="san", - VARIANT_DIR="san", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build leverages thread sanitizers. - BuildProfileType.TSAN: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/mongodbtoolchain_stable_clang.vars", - ], - allocator="system", - sanitize="thread", - link_model="dynamic", - dbg="on", - debug_symbols="on", - opt="on", - ICECC="icecc", - CCACHE="ccache", - NINJA_PREFIX="tsan", - VARIANT_DIR="tsan", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="off", - ), - # These options are the preferred settings for compiledb to generating compile_commands.json - BuildProfileType.COMPILE_DB: BuildProfile( - ninja="disabled", - variables_files=[ - "./etc/scons/mongodbtoolchain_stable_clang.vars", - "./etc/scons/developer_versions.vars", - ], - allocator="auto", - sanitize=None, - link_model="dynamic", - dbg="on", - debug_symbols="on", - opt="off", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="compiledb", - VARIANT_DIR="compiledb", - disable_warnings_as_errors=["source"], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # These options were the default settings before implementing build profiles. - BuildProfileType.RELEASE: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/mongodbtoolchain_stable_gcc.vars", - ], - allocator="auto", - sanitize=None, - link_model="static", - dbg="off", - debug_symbols="on", - opt="on", - ICECC="icecc", - CCACHE="ccache", - NINJA_PREFIX="release", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="on", - remote_exec_release="on", - jlink=0.01, - libunwind="auto", - ), -} - -WINDOWS_BUILD_PROFILES = { - # These options were the default settings before implementing build profiles. - BuildProfileType.DEFAULT: BuildProfile( - ninja="disabled", - variables_files=[], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="on", - opt="auto", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="build", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & fast build time at the cost of debuggability. - BuildProfileType.FAST: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="off", - opt="off", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="fast", - VARIANT_DIR="fast", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & debuggability at the cost of build time. - BuildProfileType.OPT: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="on", - opt="on", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="opt", - VARIANT_DIR="opt", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build leverages santizers & is the suggested build profile to use for development. - BuildProfileType.SAN: None, - # This build leverages thread sanitizers. - BuildProfileType.TSAN: None, - # These options are the preferred settings for compiledb to generating compile_commands.json - BuildProfileType.COMPILE_DB: BuildProfile( - ninja="disabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="on", - debug_symbols="on", - opt="off", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="compiledb", - VARIANT_DIR="compiledb", - disable_warnings_as_errors=["source"], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # These options were the default settings before implementing build profiles. - BuildProfileType.RELEASE: BuildProfile( - ninja="enabled", - variables_files=[], - allocator="auto", - sanitize=None, - link_model="static", - dbg="off", - debug_symbols="on", - opt="on", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="release", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="on", - remote_exec_release="on", - jlink=0.01, - libunwind="auto", - ), -} - -MACOS_BUILD_PROFILES = { - # These options were the default settings before implementing build profiles. - BuildProfileType.DEFAULT: BuildProfile( - ninja="disabled", - variables_files=[], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="on", - opt="auto", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="build", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & fast build time at the cost of debuggability. - BuildProfileType.FAST: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="off", - opt="off", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="fast", - VARIANT_DIR="fast", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & debuggability at the cost of build time. - BuildProfileType.OPT: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="on", - opt="on", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="opt", - VARIANT_DIR="opt", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build leverages santizers & is the suggested build profile to use for development. - BuildProfileType.SAN: None, - # This build leverages thread sanitizers. - BuildProfileType.TSAN: None, - # These options are the preferred settings for compiledb to generating compile_commands.json - BuildProfileType.COMPILE_DB: BuildProfile( - ninja="disabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="on", - debug_symbols="on", - opt="off", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="compiledb", - VARIANT_DIR="compiledb", - disable_warnings_as_errors=["source"], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # These options were the default settings before implementing build profiles. - BuildProfileType.RELEASE: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="static", - dbg="off", - debug_symbols="on", - opt="on", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="release", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="on", - remote_exec_release="on", - jlink=0.01, - libunwind="auto", - ), -} - -MACOS_ARM_BUILD_PROFILES = { - # These options were the default settings before implementing build profiles. - BuildProfileType.DEFAULT: BuildProfile( - ninja="disabled", - variables_files=[], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="on", - opt="auto", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="build", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & fast build time at the cost of debuggability. - BuildProfileType.FAST: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="off", - opt="off", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="fast", - VARIANT_DIR="fast", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build has fast runtime speed & debuggability at the cost of build time. - BuildProfileType.OPT: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="off", - debug_symbols="on", - opt="on", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="opt", - VARIANT_DIR="opt", - disable_warnings_as_errors=[], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # This build leverages santizers & is the suggested build profile to use for development. - BuildProfileType.SAN: None, - # This build leverages thread sanitizers. - BuildProfileType.TSAN: None, - # These options are the preferred settings for compiledb to generating compile_commands.json - BuildProfileType.COMPILE_DB: BuildProfile( - ninja="disabled", - variables_files=[ - "./etc/scons/developer_versions.vars", - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="auto", - dbg="on", - debug_symbols="on", - opt="off", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="compiledb", - VARIANT_DIR="compiledb", - disable_warnings_as_errors=["source"], - release="off", - remote_exec_release="off", - jlink=0.99, - libunwind="auto", - ), - # These options were the default settings before implementing build profiles. - BuildProfileType.RELEASE: BuildProfile( - ninja="enabled", - variables_files=[ - "./etc/scons/xcode_macosx_arm.vars", - ], - allocator="auto", - sanitize=None, - link_model="static", - dbg="off", - debug_symbols="on", - opt="on", - ICECC=None, - CCACHE=None, - NINJA_PREFIX="release", - VARIANT_DIR=mongo_generators.default_variant_dir_generator, - disable_warnings_as_errors=[], - release="on", - remote_exec_release="on", - jlink=0.01, - libunwind="auto", - ), -} diff --git a/site_scons/mongo/download_windows_sasl.py b/site_scons/mongo/download_windows_sasl.py deleted file mode 100644 index 6640dc2c69b..00000000000 --- a/site_scons/mongo/download_windows_sasl.py +++ /dev/null @@ -1,70 +0,0 @@ -import hashlib -import os -import time -import urllib -import urllib.request -import zipfile - -SASL_HASH = "3e22e2b16f802277123590f64dfda44f1c9c8a2b7e758180cd956d8ab0965817" -SASL_URL = "https://s3.amazonaws.com/boxes.10gen.com/build/windows_cyrus_sasl-2.1.28.zip" - - -def hash_sasl(sasl_dir): - md5_hash = hashlib.md5() - for root, _, files in os.walk(sasl_dir): - for name in files: - if name.endswith("md5sum"): - continue - with open(os.path.join(root, name), "rb") as f: - for block in iter(lambda: f.read(4096), b""): - md5_hash.update(block) - return md5_hash.hexdigest() - - -def hash_sasl_zip(sasl_zip): - sha_hash = hashlib.sha256() - with open(sasl_zip, "rb") as f: - for block in iter(lambda: f.read(4096), b""): - sha_hash.update(block) - return sha_hash.hexdigest() - - -def download_sasl(env): - complete = False - sasl_dir = env.Dir("$BUILD_ROOT/sasl_2_1_28").path - sasl_md5 = os.path.join(sasl_dir, "sasl.md5sum") - os.makedirs(sasl_dir, exist_ok=True) - if os.path.exists(sasl_md5): - with open(sasl_md5) as md5_file: - if hash_sasl(sasl_dir) == md5_file.read(): - complete = True - - if not complete: - print(f"Downloading sasl {SASL_URL}...") - for i in range(1, 5): - try: - local_filename, _ = urllib.request.urlretrieve(SASL_URL) - downloaded_hash = hash_sasl_zip(local_filename) - if downloaded_hash != SASL_HASH: - raise urllib.error.URLError( - f"Downloaded file hash: {downloaded_hash} does not match expected hash: {SASL_HASH}" - ) - except urllib.error.URLError as exc: - wait_time = i * i * 10 - if i == 4: - raise exc - else: - print(f"Failed to download {SASL_URL} because of:\n{exc}") - print(f"Retrying in {wait_time}...") - time.sleep(wait_time) - - zip_file_object = zipfile.ZipFile(local_filename, "r") - zip_file_object.extractall(sasl_dir) - zip_file_object.close() - os.remove(local_filename) - - with open(sasl_md5, "w") as md5_file: - md5_file.write(hash_sasl(sasl_dir)) - - env.Append(CPPPATH=[f"#{sasl_dir}/include"]) - env.Append(LIBPATH=[f"#{sasl_dir}/lib"]) diff --git a/site_scons/mongo/generators.py b/site_scons/mongo/generators.py deleted file mode 100644 index 6cd111fca7d..00000000000 --- a/site_scons/mongo/generators.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- mode: python; -*- - -import hashlib - -# Default and alternative generator definitions go here. - - -# This is the key/value mapping that will be returned by the buildInfo command and -# printed by the --version command-line option to mongod. -# Each mapped value is in turn a dict consisting of: -# key: -# value: -# inBuildInfo: : should it be included in buildInfo output -# inVersion: : should it be included in --version output -# The `value` field will be passed through env.subst, so you can use any SCons variables you -# want to define them. -def default_buildinfo_environment_data(): - data = ( - ( - "distmod", - "$MONGO_DISTMOD", - True, - True, - ), - ( - "distarch", - "$MONGO_DISTARCH", - True, - True, - ), - ( - "cc", - "$CC_VERSION", - True, - False, - ), - ( - "ccflags", - "$CCFLAGS", - True, - False, - ), - ( - "cxx", - "$CXX_VERSION", - True, - False, - ), - ( - "cxxflags", - "$CXXFLAGS", - True, - False, - ), - ( - "linkflags", - "$LINKFLAGS", - True, - False, - ), - ( - "target_arch", - "$TARGET_ARCH", - True, - True, - ), - ( - "target_os", - "$TARGET_OS", - True, - False, - ), - ( - "cppdefines", - "$CPPDEFINES", - True, - False, - ), - ) - return { - k: {"key": k, "value": v, "inBuildInfo": ibi, "inVersion": iv} for k, v, ibi, iv in data - } - - -# If you want buildInfo and --version to be relatively empty, set -# MONGO_BUILDINFO_ENVIRONMENT_DATA = empty_buildinfo_environment_data() -def empty_buildinfo_environment_data(): - return {} - - -# Special cases - if debug is not enabled and optimization is not specified, -# default to full optimizationm otherwise turn it off. -def get_opt_options(env) -> str: - if env.GetOption("opt") == "auto": - return "on" if not env.GetOption("dbg") == "on" else "off" - else: - return env.GetOption("opt") - - -def default_variant_dir_generator(target, source, env, for_signature): - if env.GetOption("cache") is not None: - return "cached" - - # If an option should affect the variant directory, name it here. - variant_options = [ - "opt", - "dbg", - ] - - # Hash the named options and their values, and take the first 8 characters of the hash as - # the variant name - hasher = hashlib.md5() - for option in variant_options: - hasher.update(option.encode("utf-8")) - if option == "opt": - hasher.update(get_opt_options(env).encode("utf-8")) - else: - hasher.update(str(env.GetOption(option)).encode("utf-8")) - variant_dir = str(hasher.hexdigest()[0:8]) - - # If our option hash yields a well known hash, replace it with its name. - known_variant_hashes = { - "343e6678": "debug", - "85fcf9b0": "opt", - "981ce870": "debug", - "9fface73": "optdebug", - "c52b1cc3": "opt", - } - - return known_variant_hashes.get(variant_dir, variant_dir) - - -def os_specific_variant_dir_generator(target, source, env, for_signature): - return "-".join( - [ - env["TARGET_OS"], - default_variant_dir_generator(target, source, env, for_signature), - ] - ) diff --git a/site_scons/mongo/install_actions.py b/site_scons/mongo/install_actions.py deleted file mode 100644 index 092df379052..00000000000 --- a/site_scons/mongo/install_actions.py +++ /dev/null @@ -1,149 +0,0 @@ -# -*- mode: python; -*- - -import os -import shutil -import stat - - -def _copy(src, dst): - shutil.copy2(src, dst) - st = os.stat(src) - os.chmod(dst, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - - -def _symlink(src, dst): - if os.path.islink(src): - _copy(src, dst) - else: - os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst) - - -def _hardlink(src, dst): - if os.path.islink(src): - _copy(src, dst) - else: - try: - os.link(src, dst) - except: - _copy(src, dst) - - -available_actions = { - "copy": _copy, - "hardlink": _hardlink, - "symlink": _symlink, -} - - -class _CopytreeError(EnvironmentError): - pass - - -def _generate_install_actions(base_action): - # This is a patched version of shutil.copytree from python 2.5. It - # doesn't fail if the dir exists, which regular copytree does - # (annoyingly). Note the XXX comment in the docstring. - def _mongo_copytree(src, dst, symlinks=False): - """Recursively copy a directory tree using copy2(). - - The destination directory must not already exist. - If exception(s) occur, an _CopytreeError is raised with a list of reasons. - - If the optional symlinks flag is true, symbolic links in the - source tree result in symbolic links in the destination tree; if - it is false, the contents of the files pointed to by symbolic - links are copied. - - XXX Consider this example code rather than the ultimate tool. - - """ - names = os.listdir(src) - # garyo@genarts.com fix: check for dir before making dirs. - if not os.path.exists(dst): - os.makedirs(dst) - errors = [] - for name in names: - srcname = os.path.join(src, name) - dstname = os.path.join(dst, name) - try: - if symlinks and os.path.islink(srcname): - linkto = os.readlink(srcname) - os.symlink(linkto, dstname) - elif os.path.isdir(srcname): - _mongo_copytree(srcname, dstname, symlinks) - else: - base_action(srcname, dstname) - # XXX What about devices, sockets etc.? - except (IOError, os.error) as why: - errors.append((srcname, dstname, str(why))) - # catch the _CopytreeError from the recursive copytree so that we can - # continue with other files - except _CopytreeError as err: - errors.extend(err.args[0]) - try: - shutil.copystat(src, dst) - except SCons.Util.WinError: - # can't copy file access times on Windows - pass - except OSError as why: - errors.extend((src, dst, str(why))) - if errors: - raise _CopytreeError(errors) - - # - # Functions doing the actual work of the Install Builder. - # - def _mongo_copyFunc(dest, source, env): - """Install a source file or directory into a destination by copying, - (including copying permission/mode bits).""" - - if os.path.isdir(source): - if os.path.exists(dest): - if not os.path.isdir(dest): - raise SCons.Errors.UserError( - "cannot overwrite non-directory `%s' with a directory `%s'" - % (str(dest), str(source)) - ) - else: - parent = os.path.split(dest)[0] - if not os.path.exists(parent): - os.makedirs(parent) - _mongo_copytree(source, dest) - else: - base_action(source, dest) - - return 0 - - # - # Functions doing the actual work of the InstallVersionedLib Builder. - # - def _mongo_copyFuncVersionedLib(dest, source, env): - """Install a versioned library into a destination by copying, - (including copying permission/mode bits) and then creating - required symlinks.""" - - if os.path.isdir(source): - raise SCons.Errors.UserError( - "cannot install directory `%s' as a version library" % str(source) - ) - else: - # remove the link if it is already there - try: - os.remove(dest) - except: - pass - base_action(source, dest) - SCons.tool.install.installShlibLinks(dest, source, env) - - return 0 - - return (_mongo_copyFunc, _mongo_copyFuncVersionedLib) - - -def setup(env, action): - if action == "default": - return - base_action = available_actions.get(action, None) - handlers = _generate_install_actions(base_action) - env["INSTALL"] = handlers[0] - env["INSTALLVERSIONEDLIB"] = handlers[1] diff --git a/site_scons/mongo/ninja_bazel_build.py b/site_scons/mongo/ninja_bazel_build.py deleted file mode 100644 index cf3582b6f2b..00000000000 --- a/site_scons/mongo/ninja_bazel_build.py +++ /dev/null @@ -1,162 +0,0 @@ -import argparse -import glob -import json -import os -import shlex -import shutil -import subprocess -import sys -import tempfile - -parser = argparse.ArgumentParser(description="Ninja Bazel builder.") - -parser.add_argument("--ninja-file", type=str, help="The ninja file in use", default="build.ninja") -parser.add_argument("--verbose", action="store_true", help="Turn on verbose mode") -parser.add_argument( - "--integration-debug", - action="store_true", - help="Turn on extra debug output about the ninja-bazel integration", -) - -args = parser.parse_args() - -# This corresponds to BAZEL_INTEGRATION_DEBUG=1 from SCons command line -if args.integration_debug: - - def print_debug(msg): - print("[BAZEL_INTEGRATION_DEBUG] " + msg) -else: - - def print_debug(msg): - pass - - -# our ninja python module intercepts the command lines and -# prints out the targets everytime ninja is executed -ninja_command_line_targets = [] -try: - ninja_last_cmd_file = ".ninja_last_command_line_targets.txt" - with open(ninja_last_cmd_file) as f: - ninja_command_line_targets = [target.strip() for target in f.readlines() if target.strip()] -except OSError as exc: - print( - f"Failed to open {ninja_last_cmd_file}, this is expected to be generated on ninja execution by the mongo-ninja-python module." - ) - raise exc - - -# Our ninja generation process generates all the build info related to -# the specific ninja file -ninja_build_info = dict() -try: - ninja_prefix = args.ninja_file.split(".")[0] - bazel_info_file = f".{ninja_prefix}.bazel_info_for_ninja.txt" - with open(bazel_info_file) as f: - ninja_build_info = json.load(f) -except OSError as exc: - print( - f"Failed to open {bazel_info_file}, this is expected to be generated by scons during ninja generation." - ) - raise exc - - -# flip the targets map for optimized use later -bazel_out_to_bazel_target = dict() -for bazel_t in ninja_build_info["targets"].values(): - bazel_out_to_bazel_target[bazel_t["bazel_output"]] = bazel_t["bazel_target"] - -# run ninja and get the deps from the passed command line targets so we can check if any deps are bazel targets -ninja_inputs_cmd = ["ninja", "-f", args.ninja_file, "-t", "inputs"] + ninja_command_line_targets -print_debug(f"NINJA GET INPUTS CMD: {' '.join(ninja_inputs_cmd)}") - -ninja_proc = subprocess.run(ninja_inputs_cmd, capture_output=True, text=True, check=True) -deps = [dep.replace("\\", "/") for dep in ninja_proc.stdout.split("\n") if dep] -print_debug(f"COMMAND LINE DEPS:{os.linesep}{os.linesep.join(deps)}") -os.unlink(ninja_last_cmd_file) - -# isolate just the raw output files for the list intersection -bazel_outputs = [bazel_t["bazel_output"] for bazel_t in ninja_build_info["targets"].values()] -print_debug(f"BAZEL OUTPUTS:{os.linesep}{os.linesep.join(bazel_outputs)}") - -# now out of possible bazel outputs find which are deps of the requested command line targets -outputs_to_build = list(set(deps).intersection(bazel_outputs)) -print_debug(f"BAZEL OUTPUTS TO BUILD: {outputs_to_build}") - -# convert from outputs (raw files) to bazel targets (bazel labels i.e //src/db/mongo:target) -targets_to_build = [bazel_out_to_bazel_target[out] for out in outputs_to_build] - -if ( - not targets_to_build - and "compiledb" not in ninja_command_line_targets - and "compile_commands.json" not in ninja_command_line_targets -): - print( - "WARNING: Did not resolve any bazel specific targets to build, this might not be correct." - ) - -list_files = glob.glob("bazel-out/**/*.gen_source_list", recursive=True) -gen_source_targets = [] -for list_file in list_files: - with open(list_file) as f: - gen_source_targets.append(f.read().strip()) -targets_to_build += gen_source_targets - -# ninja will automatically create directories for any outputs, but in this case -# bazel will be creating a symlink for the bazel-out dir to its cache. We don't want -# ninja to interfere so delete the dir if it was not a link (made by bazel) -if sys.platform == "win32": - if os.path.exists("bazel-out"): - try: - os.readlink("bazel-out") - except OSError: - shutil.rmtree("bazel-out") - -else: - if not os.path.islink("bazel-out"): - shutil.rmtree("bazel-out") - -env_flags = os.environ.get("BAZEL_FLAGS", "") -if env_flags: - print(f"Using shell env BAZEL_FLAGS: {env_flags}") - -if args.verbose: - extra_args = [] -else: - extra_args = ["--output_filter=DONT_MATCH_ANYTHING"] - -extra_args += shlex.split(env_flags, posix=(sys.platform != "win32")) - -bazel_env = os.environ.copy() -if ninja_build_info.get("USE_NATIVE_TOOLCHAIN"): - bazel_env["CC"] = ninja_build_info.get("CC") - bazel_env["CXX"] = ninja_build_info.get("CXX") - bazel_env["USE_NATIVE_TOOLCHAIN"] = "1" - -with tempfile.NamedTemporaryFile(mode="w+", delete=False) as tf: - tf_name = tf.name - tpf = f"--target_pattern_file={tf_name}" - extra_args += [tpf] - bazel_cmd = shlex.join(ninja_build_info["bazel_cmd"] + extra_args) - sys.stderr.write(f"Running bazel command:\n{bazel_cmd} [{len(targets_to_build)} targets...]\n") - tf.write("\n".join(targets_to_build)) - tf.close() - bazel_proc = subprocess.run( - ninja_build_info["bazel_cmd"] + extra_args, - env=bazel_env, - ) - -if bazel_proc.returncode != 0: - print("Command that failed:") - print(bazel_cmd) - sys.exit(1) -else: - os.remove(tf_name) -if ( - "compiledb" in ninja_command_line_targets - or "compile_commands.json" in ninja_command_line_targets -): - bazel_proc = subprocess.run(ninja_build_info["compiledb_cmd"], env=bazel_env) - if bazel_proc.returncode != 0: - print("Command that failed:") - print(" ".join(ninja_build_info["compiledb_cmd"])) - sys.exit(1) diff --git a/site_scons/mongo/platform.py b/site_scons/mongo/platform.py deleted file mode 100644 index a89f1af8f06..00000000000 --- a/site_scons/mongo/platform.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- mode: python; -*- -""" -Support code related to OS detection in general. System specific facilities or customization -hooks live in mongo_platform_.py files. -""" - -import os -import platform - -# --- OS identification --- -# -# This needs to precede the options section so that we can only offer some options on certain -# operating systems. - - -# This function gets the running OS as identified by Python -# It should only be used to set up defaults for options/variables, because -# its value could potentially be overridden by setting TARGET_OS on the -# command-line. Treat this output as the value of HOST_OS -def get_running_os_name(): - running_os = os.sys.platform - if running_os.startswith("linux"): - running_os = "linux" - elif running_os.startswith("freebsd"): - running_os = "freebsd" - elif running_os.startswith("openbsd"): - running_os = "openbsd" - elif running_os == "sunos5": - running_os = "solaris" - elif running_os == "win32": - running_os = "windows" - elif running_os == "darwin": - running_os = "macOS" - else: - running_os = "unknown" - return running_os - - -def env_get_os_name_wrapper(self): - return self["TARGET_OS"] - - -def is_os_raw(target_os, os_list_to_check): - darwin_os_list = ["macOS", "tvOS", "tvOS-sim", "iOS", "iOS-sim", "watchOS", "watchOS-sim"] - linux_os_list = ["android", "linux"] - posix_os_list = ["openbsd", "freebsd", "solaris", "emscripten"] + darwin_os_list + linux_os_list - - os_families = { - "darwin": darwin_os_list, - "posix": posix_os_list, - "linux": linux_os_list, - } - - for os in os_list_to_check: - if os == target_os or (os in os_families and target_os in os_families[os]): - return True - return False - - -# This function tests the running OS as identified by Python -# It should only be used to set up defaults for options/variables, because -# its value could potentially be overridden by setting TARGET_OS on the -# command-line. Treat this output as the value of HOST_OS -def is_running_os(*os_list): - return is_os_raw(get_running_os_name(), os_list) - - -def env_os_is_wrapper(self, *os_list): - return is_os_raw(self["TARGET_OS"], os_list) - - -def is_arm_processor(): - arch = platform.machine().lower() - - if "arm" in arch or "aarch64" in arch: - return True - - return False diff --git a/site_scons/mongo/toolchain.py b/site_scons/mongo/toolchain.py deleted file mode 100644 index 755467c5da7..00000000000 --- a/site_scons/mongo/toolchain.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- mode: python; -*- - -import subprocess - -import SCons - -# Helper functions for generic toolchain things go here - - -def get_toolchain_ver(env, tool): - # By default we don't know the version of each tool, and only report what - # command gets executed (gcc vs /opt/mongodbtoolchain/bin/gcc). - verstr = "version unknown" - proc = None - if env.ToolchainIs("clang", "gcc"): - proc = SCons.Action._subproc( - env, - env.subst("${%s} --version" % tool), - stdout=subprocess.PIPE, - stderr="devnull", - stdin="devnull", - universal_newlines=True, - error="raise", - shell=True, - ) - verstr = proc.stdout.readline() - - elif env.ToolchainIs("msvc") and env.TargetOSIs("windows"): - proc = SCons.Action._subproc( - env, - env.subst("${%s}" % tool), - stdout="devnull", - stderr=subprocess.PIPE, - stdin="devnull", - universal_newlines=True, - error="raise", - shell=True, - ) - verstr = proc.stderr.readline() - - # If we started a process, we should drain its stdout/stderr and wait for - # it to end. - if proc: - proc.communicate() - - return env.subst("${%s}: %s" % (tool, verstr)) diff --git a/site_scons/site_tools/BUILD.bazel b/site_scons/site_tools/BUILD.bazel deleted file mode 100644 index cb54ad89bd5..00000000000 --- a/site_scons/site_tools/BUILD.bazel +++ /dev/null @@ -1,3 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -exports_files(["jstoh.py"]) diff --git a/site_scons/site_tools/LICENSE b/site_scons/site_tools/LICENSE deleted file mode 100644 index 7745ab0a556..00000000000 --- a/site_scons/site_tools/LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -Copyright 2020 MongoDB Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/site_scons/site_tools/abilink.py b/site_scons/site_tools/abilink.py deleted file mode 100644 index e075dd3a88d..00000000000 --- a/site_scons/site_tools/abilink.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import SCons - -# TODO: Make a SUFF variable for the suffix to write to -# TODO: Prevent using abilink when -gsplit-dwarf is in play, since it doesn't work -# TODO: Make a variable for the md5sum utility (allow any hasher) -# TODO: Add an ABILINKCOM variable to the Action, so it can be silenced. - - -def _detect(env): - try: - abidw = env["ABIDW"] - if not abidw: - return None - return abidw - except KeyError: - pass - - return env.WhereIs("abidw") - - -def _add_emitter(builder): - base_emitter = builder.emitter - - def new_emitter(target, source, env): - new_targets = [] - for t in target: - abidw = str(t) + ".abidw" - abidw = (t.builder.target_factory or env.File)(abidw) - new_targets.append(abidw) - setattr(t.attributes, "abidw", abidw) - targets = target + new_targets - return (targets, source) - - new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter]) - builder.emitter = new_emitter - - -def _add_scanner(builder): - old_scanner = builder.target_scanner - path_function = old_scanner.path_function - - def new_scanner(node, env, path): - old_results = old_scanner(node, env, path) - new_results = [] - for base in old_results: - abidw = getattr(env.Entry(base).attributes, "abidw", None) - new_results.append(abidw if abidw else base) - return new_results - - builder.target_scanner = SCons.Scanner.Scanner( - function=new_scanner, - path_function=path_function, - ) - - -def _add_action(builder): - actions = builder.action - builder.action = actions + SCons.Action.Action( - "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw" - ) - - -def exists(env): - result = _detect(env) is not None - return result - - -def generate(env): - if not exists(env): - return - - builder = env["BUILDERS"]["SharedLibrary"] - _add_emitter(builder) - _add_action(builder) - _add_scanner(builder) - _add_scanner(env["BUILDERS"]["Program"]) - _add_scanner(env["BUILDERS"]["LoadableModule"]) diff --git a/site_scons/site_tools/auto_archive.py b/site_scons/site_tools/auto_archive.py deleted file mode 100644 index 07740619897..00000000000 --- a/site_scons/site_tools/auto_archive.py +++ /dev/null @@ -1,358 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import os -import sys - -import SCons - -PACKAGE_ALIAS_MAP = "AIB_PACKAGE_ALIAS_MAP" -AUTO_ARCHIVE_MAKE_ARCHIVE_CONTENT = """ -import os -import sys - -USAGE = ''' -Usage: {} ARCHIVE_TYPE ARCHIVE_NAME ROOT_DIRECTORY FILES... - -FILES should be absolute paths or relative to ROOT_DIRECTORY. - -ARCHIVE_TYPE is one of zip or tar. -''' - -if __name__ == "__main__": - if len(sys.argv) < 4: - print(sys.argv[0], "takes at minimum four arguments.") - print(USAGE.format(sys.argv[0])) - sys.exit(1) - - archive_type = sys.argv[1] - archive_name = sys.argv[2] - root_dir = sys.argv[3] - file_list = sys.argv[4] - - files = [] - with open(file_list) as f: - files = f.read().splitlines() - - if archive_type not in ("zip", "tar"): - print("unsupported archive_type", archive_type) - print(USAGE.format(sys.argv[0])) - sys.exit(1) - - if archive_type == "zip": - import zipfile - archive = zipfile.ZipFile(archive_name, mode='w', compression=zipfile.ZIP_DEFLATED) - add_file = archive.write - else: - import tarfile - archive = tarfile.open(archive_name, mode='w:gz') - add_file = archive.add - - os.chdir(root_dir) - - for filename in files: - add_file(filename) - - archive.close() -""" - - -def add_package_name_alias(env, component, role, name): - """Add a package name mapping for the combination of component and role.""" - # Verify we didn't get a None or empty string for any argument - if not name: - raise Exception("when setting a package name alias must provide a name parameter") - if not component: - raise Exception("No component provided for package name alias") - if not role: - raise Exception("No role provided for package name alias") - env[PACKAGE_ALIAS_MAP][(component, role)] = name - - -def get_package_name(env, component, role): - """Return the package file name for the component and role combination.""" - basename = env[PACKAGE_ALIAS_MAP].get( - # TODO: silent roles shouldn't be included here - (component, role), - "{component}-{role}".format(component=component, role=role), - ) - - return basename - - -def collect_transitive_files(env, entry): - """ - Collect all installed and transitively installed files for entry. - """ - cache = set() - files = [] - stack = [entry] - - # Find all the files directly contained in the component DAG for entry and - # it's dependencies. - while stack: - s = stack.pop() - if s in cache: - continue - cache.add(s) - - stack.extend(s.dependencies) - files.extend(s.files) - - cache.clear() - files, stack = stack, files - - # Now we will call the scanner to find the transtive files of any files that - # we found from the component DAG. - bazel_installed = set() - while stack: - s = stack.pop() - if s in cache: - continue - cache.add(s) - - files.append(s) - # scan_for_transitive_install is memoized so it's safe to call it in - # this loop. If it hasn't already run for a file we need to run it - # anyway. - stack.extend(env.GetTransitivelyInstalledFiles(s)) - - # if the current file is a bazel target we need to find its bazel deps - # and add them to the archive. - if env.GetOption("ninja") == "disabled" and env.GetOption("link-model") == "dynamic": - env.BazelAutoArchive(s, bazel_installed, stack) - real_node = getattr(s.attributes, "AIB_INSTALL_FROM", s) - # usually you might use scons .children call but we know we only care about - # direct libdeps in this case as they may be transition nodes that are between - # scons and bazels graph. - for child in getattr(real_node.attributes, "libdeps_direct_sorted", []): - try: - bazel_libdep = env.File(f"#/{env['SCONS2BAZEL_TARGETS'].bazel_output(child)}") - install_file = env.GetAutoInstalledFiles(bazel_libdep) - if not install_file: - shlib_suffix = env.subst("$SHLIBSUFFIX") - env.BazelAutoInstall(bazel_libdep, shlib_suffix) - install_file = env.GetAutoInstalledFiles(bazel_libdep) - env.BazelAutoArchive(install_file[0], bazel_installed, stack) - except KeyError: - pass - - # Setting the AIB_NO_ARCHIVE attribute to True prevents outputs from an - # AutoInstall builder from being included into archives produced by this - # tool - # Usage: - # node = env.AutoInstall(...) - # setattr(node[0].attributes, 'AIB_NO_ARCHIVE', True) - return sorted(f for f in files if not getattr(f.attributes, "AIB_NO_ARCHIVE", False)) - - -def auto_archive_gen(first_env, make_archive_script, pkg_fmt): - """Generate an archive task function for pkg_fmt where pkg_fmt is one of zip, tar, or auto.""" - - if pkg_fmt == "auto": - if first_env["PLATFORM"] == "win32": - pkg_fmt = "zip" - else: - pkg_fmt = "tar" - - def auto_archive(env, component, role): - pkg_name = get_package_name(env, component, role) - install_alias = "install-{component}{role}".format( - component=component, - role="" if env.GetRoleDeclaration(role).silent else "-" + role, - ) - - if pkg_fmt == "zip": - pkg_suffix = "$AUTO_ARCHIVE_ZIP_SUFFIX" - else: - pkg_suffix = "$AUTO_ARCHIVE_TARBALL_SUFFIX" - - archive = env.AutoArchive( - target="$PKGDIR/{}.{}".format(pkg_name, pkg_suffix), - source=[make_archive_script] + env.Alias(install_alias), - __AUTO_ARCHIVE_TYPE=pkg_fmt, - AIB_COMPONENT=component, - AIB_ROLE=role, - ) - - # TODO: perhaps caching of packages / tarballs should be - # configurable? It's possible someone would want to do it. - env.NoCache(archive) - return archive - - return auto_archive - - -def archive_builder(source, target, env, for_signature): - """Build archives of the AutoInstall'd sources.""" - if not source: - return [] - - source = env.Flatten([source]) - common_ancestor = None - - # Get the path elements that make up both DESTDIR and PREFIX. Then - # iterate the dest_dir_elems with the prefix path elements - # stripped off the end of the path converting them to strings for - # joining to make the common_ancestor. - # - # We pass the common_ancestor to tar via -C so that $PREFIX is - # preserved in the tarball. - common_ancestor = env.Dir("$DESTDIR") - - archive_type = env["__AUTO_ARCHIVE_TYPE"] - make_archive_script = source[0] - compression_flags = "" - - tar_cmd = env.WhereIs("tar") - if archive_type == "tar" and tar_cmd: - pigz_cmd = env.WhereIs("pigz") - if pigz_cmd: - # pigz is the parallel implementation of gizp, - # it uses all available cores on the machine. - # if available we use it to speedup compression. - compression_flags = "--use-compress-program='{pigz_cmd}'".format(pigz_cmd=pigz_cmd) - else: - compression_flags = "-z" - - command_prefix = ( - "{tar} -vc {compression_flags} -C {common_ancestor} -T {file_list} -f {archive_name}" - ) - else: - command_prefix = "{python} {make_archive_script} {archive_type} {archive_name} {common_ancestor} {file_list}" - - archive_name = env.File(target[0]) - command_sig = command_prefix.format( - tar=tar_cmd, - compression_flags=compression_flags, - python=sys.executable, - archive_type=archive_type, - archive_name=archive_name, - make_archive_script=make_archive_script, - common_ancestor=common_ancestor, - file_list="", - ) - - # If we are just being invoked for our signature, we can omit the indirect dependencies - # found by expanding the transitive dependencies, since we really only have a hard dependency - # on our direct dependencies. - if for_signature: - return command_sig - - component = env["AIB_COMPONENT"] - role = env["AIB_ROLE"] - entry = env["AIB_ALIAS_MAP"][component][role] - - # Pre-process what should be in the archive. We need to pass the - # set of known installed files along to the transitive dependency - # walk so we can filter out files that aren't in the install - # directory. - installed = set(env.FindInstalledFiles()) - - # Collect all the installed files for our entry. This is doing a pure DAG - # walk idea of what should be. So we filter out any that are not in the - # installed set. - transitive_files = [f for f in collect_transitive_files(env, entry) if f in installed] - if not transitive_files: - return [] - - # TODO: relpath is costly, and we do it for every file in the archive here. - # We should find a way to avoid the repeated relpath invocation, probably by - # bucketing by directory. - relative_files = [ - os.path.relpath(file.get_abspath(), common_ancestor.get_abspath()) - for file in transitive_files - ] - - # This is not great for ninja, essentially we are doing realtime operations here, which is - # not terrible for scons CommandActionGenerators, because the generation happens right before - # executing the command. However, this means for ninja that the realtime things happen during - # ninja generation, and are far removed from ninja execution. Even if we split this into a - # separate action for ninja's sake, there would still be issues because the reason to make - # such a filelist is to prevent creating command lines which are too long, and actions must - # be converted to command lines for ninja. When the ninja tool is able to process scons - # callbacks in order and not via aggregation then this could be moved to a simple Textfile call. - file_list = str(target[0].abspath) + ".filelist" - os.makedirs(os.path.dirname(file_list), exist_ok=True) - with open(file_list, "w") as f: - for file in relative_files: - f.write(file + "\n") - - cmd = command_prefix.format( - tar=tar_cmd, - compression_flags=compression_flags, - python=sys.executable, - archive_type=archive_type, - archive_name=archive_name, - make_archive_script=make_archive_script, - common_ancestor=common_ancestor, - file_list=file_list, - ) - if env.GetOption("ninja") != "disabled": - if env.TargetOSIs("windows"): - cmd = 'echo "archive not supported with ninja, use scons only." & exit /b 1' - else: - cmd = 'echo "archive not supported with ninja, use scons only."; exit 1' - - return cmd - - -def exists(env): - return True - - -def generate(env): - if not env.get("AUTO_INSTALL_ENABLED"): - env.Tool("auto_install_binaries") - - bld = SCons.Builder.Builder( - action=SCons.Action.CommandGeneratorAction( - archive_builder, - {"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"} - if not env.Verbose() - else {"cmdstr": ""}, - ) - ) - env.Append(BUILDERS={"AutoArchive": bld}) - env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = env.get( - "AUTO_ARCHIVE_TARBALL_SUFFIX", - "tar.gz", - ) - env["AUTO_ARCHIVE_ZIP_SUFFIX"] = env.get("AUTO_ARCHIVE_ZIP_SUFFIX", "zip") - env[PACKAGE_ALIAS_MAP] = {} - - env.AddMethod(add_package_name_alias, "AddPackageNameAlias") - - # TODO: $BUILD_ROOT should be $VARIANT_DIR after we fix our dir - # setup later on. - make_archive_script = env.Textfile( - target="$BUILD_ROOT/aib_make_archive.py", - source=[AUTO_ARCHIVE_MAKE_ARCHIVE_CONTENT], - ) - - env.AppendUnique( - AIB_TASKS={ - "tar": (auto_archive_gen(env, make_archive_script, "tar"), False), - "zip": (auto_archive_gen(env, make_archive_script, "zip"), False), - "archive": (auto_archive_gen(env, make_archive_script, "auto"), False), - } - ) diff --git a/site_scons/site_tools/auto_install_binaries.py b/site_scons/site_tools/auto_install_binaries.py deleted file mode 100644 index 99a97664cbc..00000000000 --- a/site_scons/site_tools/auto_install_binaries.py +++ /dev/null @@ -1,662 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -# TODO: Handle chmod state - -from collections import defaultdict, namedtuple -from typing import List - -import SCons -from SCons.Tool import install - -ALIAS_MAP = "AIB_ALIAS_MAP" -BASE_COMPONENT = "AIB_BASE_COMPONENT" -BASE_ROLE = "AIB_BASE_ROLE" -COMPONENT = "AIB_COMPONENT" -REVERSE_COMPONENT_DEPENDENCIES = "AIB_COMPONENTS_EXTRA" -DEFAULT_COMPONENT = "AIB_DEFAULT_COMPONENT" -INSTALLED_FILES = "AIB_INSTALLED_FILES" -META_COMPONENT = "AIB_META_COMPONENT" -META_ROLE = "AIB_META_ROLE" -ROLE = "AIB_ROLE" -ROLE_DECLARATIONS = "AIB_ROLE_DECLARATIONS" -SUFFIX_MAP = "AIB_SUFFIX_MAP" -TASKS = "AIB_TASKS" - -SuffixMap = namedtuple( - "SuffixMap", - ["directory", "default_role"], -) - - -class RoleInfo: - """A component/role union Node.""" - - def __init__(self, component, role, files=None, dependencies=None): - self.id = "{}-{}".format(component, role) - self.component = component - self.role = role - if files is None: - self.files = set() - else: - self.files = set(files) - - if dependencies is None: - self.dependencies = set() - else: - self.dependencies = set(dependencies) - - def __str__(self): - return "RoleInfo({})".format(self.id) - - def __repr__(self): - return self.__str__() - - -class DeclaredRole: - def __init__(self, name, dependencies=None, transitive=False, silent=False): - self.name = name - - if dependencies is None: - self.dependencies = set() - else: - self.dependencies = {dep for dep in dependencies if dep is not None} - - self.silent = silent - - -def declare_role(env, **kwargs): - """Construct a new role declaration""" - return DeclaredRole(**kwargs) - - -def declare_roles(env, roles, base_role=None, meta_role=None): - """Given a list of role declarations, validate them and store them in the environment""" - role_names = [role.name for role in roles] - if len(role_names) != len(set(role_names)): - raise Exception("Cannot declare duplicate roles") - - # Ensure that all roles named in dependency lists actually were - # passed in as a role. - for role in roles: - for d in role.dependencies: - if d not in role_names: - raise Exception("Role dependency '{}' does not name a declared role".format(d)) - - if isinstance(base_role, str): - if base_role not in role_names: - raise Exception( - "A base_role argument was provided but it does not name a declared role" - ) - elif isinstance(base_role, DeclaredRole): - if base_role not in roles: - raise Exception("A base_role argument was provided but it is not a declared role") - elif base_role is not None: - raise Exception("The base_role argument must be a string name of a role or a role object") - else: - # Set it to something falsey - base_role = str() - - if isinstance(meta_role, str): - if meta_role not in role_names: - raise Exception( - "A meta_role argument was provided but it does not name a declared role" - ) - elif isinstance(meta_role, DeclaredRole): - if meta_role not in roles: - raise Exception("A meta_role argument was provided but it is not a declared role") - elif meta_role is not None: - raise Exception("The meta_role argument must be a string name of a role or a role object") - else: - # Set it to something falsy - meta_role = str() - - silents = [role for role in roles if role.silent] - if len(silents) > 1: - raise Exception("No more than one role can be declared as silent") - - # If a base role was given, then add it as a dependency of every - # role that isn't the base role (which would be circular). - if base_role: - for role in roles: - if role.name != base_role: - role.dependencies.add(base_role) - - # Become a dictionary, so we can look up roles easily. - roles = {role.name: role for role in roles} - - # If a meta role was given, then add every role which isn't the - # meta role as one of its dependencies. - if meta_role: - roles[meta_role].dependencies.update(r for r in roles.keys() if r != meta_role) - - # TODO: Check for DAG - - # TODO: What if base_role or meta_role is really None? - env[BASE_ROLE] = base_role - env[META_ROLE] = meta_role - env[ROLE_DECLARATIONS] = roles - - -def generate_alias_name(env, component, role, task): - """Generate a scons alias for the component and role combination""" - return "{task}-{component}{role}".format( - task=task, - component=component, - role="" if env[ROLE_DECLARATIONS][role].silent else "-" + role, - ) - - -def get_alias_map_entry(env, component, role): - c_entry = env[ALIAS_MAP][component] - - try: - return c_entry[role] - except KeyError: - r_entry = RoleInfo(component=component, role=role) - c_entry[role] = r_entry - - declaration = env[ROLE_DECLARATIONS].get(role) - for dep in declaration.dependencies: - dep_entry = get_alias_map_entry(env, component, dep) - r_entry.dependencies.add(dep_entry) - - meta_component = env.get(META_COMPONENT) - if meta_component and component != meta_component: - meta_c_entry = get_alias_map_entry(env, meta_component, role) - meta_c_entry.dependencies.add(r_entry) - - base_component = env.get(BASE_COMPONENT) - if base_component and component != base_component: - base_c_entry = get_alias_map_entry(env, base_component, role) - r_entry.dependencies.add(base_c_entry) - - meta_role = env.get(META_ROLE) - if meta_role and role != meta_role and meta_component and component != meta_component: - meta_r_entry = get_alias_map_entry(env, component, meta_role) - meta_c_r_entry = get_alias_map_entry(env, meta_component, meta_role) - meta_c_r_entry.dependencies.add(meta_r_entry) - - return r_entry - - -def get_component(node): - return getattr(node.attributes, COMPONENT, None) - - -def get_role(node): - return getattr(node.attributes, ROLE, None) - - -def scan_for_transitive_install(node, env, _path): - """Walk the children of node finding all installed dependencies of it.""" - component = get_component(node.sources[0]) - role = get_role(node.sources[0]) - if component is None: - return [] - - scanned = getattr(node.attributes, "AIB_SCANNED", None) - if scanned is not None: - return scanned - - # Access directly by keys because we don't want to accidentally - # create a new entry via get_alias_map_entry and instead should - # throw a KeyError if we got here without valid components and - # roles - alias_map = env[ALIAS_MAP] - entry = alias_map[component][role] - role_deps = env[ROLE_DECLARATIONS].get(role).dependencies - results = set() - - # We have to explicitly look at the various BASE files here since it's not - # guaranteed they'll be pulled in anywhere in our grandchildren but we need - # to always depend upon them. For example if env.AutoInstall some file 'foo' - # tagged as common base but it's never used as a source for the - # AutoInstalled file we're looking at or the children of our children (and - # so on) then 'foo' would never get scanned in here without this explicit - # dependency adding. - base_component = env.get(BASE_COMPONENT) - if base_component and component != base_component: - base_role_entry = alias_map[base_component][role] - if base_role_entry.files: - results.update(base_role_entry.files) - - base_role = env.get(BASE_ROLE) - if base_role and role != base_role: - component_base_entry = alias_map[component][base_role] - if component_base_entry.files: - results.update(component_base_entry.files) - - if base_role and base_component and component != base_component and role != base_role: - base_base_entry = alias_map[base_component][base_role] - if base_base_entry.files: - results.update(base_base_entry.files) - - installed_children = set( - grandchild - for child in node.children() - for direct_children in child.children() - for grandchild in direct_children.get_executor().get_all_targets() - if direct_children.get_executor() and grandchild.has_builder() - ) - for child in installed_children: - auto_installed_files = get_auto_installed_files(env, child) - bazel_child = getattr(child.attributes, "AIB_INSTALL_FROM", child) - if str(bazel_child).startswith("bazel-out"): - auto_installed_files += get_auto_installed_files(env, bazel_child) - - if not auto_installed_files: - continue - - child_role = get_role(child) - if child_role == role or child_role in role_deps: - child_component = get_component(child) - child_entry = get_alias_map_entry(env, child_component, child_role) - - # This is where component inheritance happens. We need a default - # component for everything so we can store it but if during - # transitive scanning we see a child with the default component here - # we will move that file to our component. This prevents - # over-stepping the DAG bounds since the default component is likely - # to be large and an explicitly tagged file is unlikely to depend on - # everything in it. - if child_component == env.get(DEFAULT_COMPONENT): - setattr(node.attributes, COMPONENT, component) - for f in auto_installed_files: - child_entry.files.discard(f) - entry.files.update(auto_installed_files) - elif component != child_component: - entry.dependencies.add(child_entry) - - results.update(auto_installed_files) - - # Produce deterministic output for caching purposes - results = sorted(results, key=str) - setattr(node.attributes, "AIB_SCANNED", results) - - return results - - -def scan_for_transitive_install_pseudobuilder(env, node): - return scan_for_transitive_install(node, env, None) - - -def tag_components(env, target, **kwargs): - """Create component and role dependency objects""" - target = env.Flatten([target]) - component = kwargs.get(COMPONENT) - role = kwargs.get(ROLE) - if component is not None and (not isinstance(component, str) or " " in component): - raise Exception("AIB_COMPONENT must be a string and contain no whitespace.") - - if component is None: - raise Exception( - "AIB_COMPONENT must be provided; untagged targets: {}".format([t.path for t in target]) - ) - - if role is None: - raise Exception("AIB_ROLE was not provided.") - - for t in target: - t.attributes.keep_targetinfo = 1 - setattr(t.attributes, COMPONENT, component) - setattr(t.attributes, ROLE, role) - - entry = get_alias_map_entry(env, component, role) - - # We cannot wire back dependencies to any combination of meta role, meta - # component or base component. These cause dependency cycles because - # get_alias_map_entry will do that wiring for us then we will try to - # map them back on themselves in our loop. - if ( - component != env.get(BASE_COMPONENT) - and role != env.get(META_ROLE) - and component != env.get(META_COMPONENT) - ): - for component in kwargs.get(REVERSE_COMPONENT_DEPENDENCIES, []): - component_dep = get_alias_map_entry(env, component, role) - component_dep.dependencies.add(entry) - - return entry - - -def auto_install_task(env, component, role): - """Auto install task.""" - entry = get_alias_map_entry(env, component, role) - return list(entry.files) - - -bazel_installs = set() - - -def auto_install_pseudobuilder(env, target, source, **kwargs): - """Auto install pseudo-builder.""" - source = env.Flatten([source]) - source = [env.File(s) for s in source] - entry = env.TagComponents(source, **kwargs) - - installed_files = [] - for s in source: - target_for_source = target - - if not target_for_source: - # AIB currently uses file suffixes to do mapping. However, sometimes we need - # to do the mapping based on a different suffix. This is used for things like - # dSYM files, where we really just want to describe where .dSYM bundles should - # be placed, but need to actually handle the substructure. Currently, this is - # only used by separate_debug.py. - # - # TODO: Find a way to do this without the tools needing to coordinate. - suffix = getattr(s.attributes, "aib_effective_suffix", s.get_suffix()) - auto_install_mapping = env[SUFFIX_MAP].get(suffix) - - if not auto_install_mapping: - raise Exception("No target provided and no auto install mapping found for:", str(s)) - - target_for_source = auto_install_mapping.directory - - # We've already auto installed this file and it may have belonged to a - # different role since it wouldn't get retagged above. So we just skip - # this files since SCons will already wire the dependency since s is a - # source and so the file will get installed. A common error here is - # adding debug files to the runtime component file if we do not skip - # this. - existing_installed_files = get_auto_installed_files(env, s) - if existing_installed_files: - continue - - # We must do an early subst here so that the _aib_debugdir - # generator has a chance to run while seeing 'source'. We need - # to do two substs here. The first is to expand an variables - # in `target_for_source` while we can see `source`. This is - # needed for things like _aib_debugdir. Then, we need to do a - # second subst to expand DESTDIR, interpolating - # `target_for_source` in as $TARGET. Yes, this is confusing. - target_for_source = env.subst(target_for_source, source=s) - target_for_source = env.Dir(env.subst("$DESTDIR/$TARGET", target=target_for_source)) - - aib_additional_directory = getattr(s.attributes, "aib_additional_directory", None) - if aib_additional_directory is not None: - target_for_source = env.Dir(aib_additional_directory, directory=target_for_source) - aib_new_name = getattr(s.attributes, "aib_new_name", None) - if aib_new_name is not None: - install_file = env.File(aib_new_name, target_for_source) - new_installed_files = env.InstallAs(install_file, s) - else: - new_installed_files = env.Install(target=target_for_source, source=s) - setattr(s.attributes, INSTALLED_FILES, new_installed_files) - setattr(new_installed_files[0].attributes, "AIB_INSTALL_FROM", s) - installed_files.extend(new_installed_files) - - entry.files.update(installed_files) - return installed_files - - -def finalize_install_dependencies(env): - """Generates task aliases and wires install dependencies.""" - - # Wire up component dependencies and generate task aliases - for task, func in env[TASKS].items(): - generate_dependent_aliases = True - - # The task map is a map of string task names (i.e. "install" by default) - # to either a tuple or function. If it's a function we assume that we - # generate dependent aliases for that task, otherwise if it's a tuple we - # deconstruct it here to get the function (the first element) and a - # boolean indicating whether or not to generate dependent aliases for - # that task. For example the "archive" task added by the auto_archive - # tool disables them because tarballs do not track dependencies so you - # do not want archive-foo to build archive-bar as well if foo depends on - # bar. - if isinstance(func, tuple): - func, generate_dependent_aliases = func - - for component, rolemap in env[ALIAS_MAP].items(): - for role, info in rolemap.items(): - alias_name = generate_alias_name(env, component, role, task) - alias = env.Alias(alias_name, func(env, component, role)) - if generate_dependent_aliases: - dependent_aliases = env.Flatten( - [ - env.Alias(generate_alias_name(env, d.component, d.role, task)) - for d in info.dependencies - ] - ) - env.Alias(alias, dependent_aliases) - - -def auto_install_emitter(target, source, env): - """When attached to a builder adds an appropriate AutoInstall to that Builder.""" - - for t in target: - if isinstance(t, str): - t = env.File(t) - - if env.get("AIB_IGNORE", False): - continue - - if t.has_builder() and t.get_builder().get_name(env) in set( - ["BazelProgram", "BazelSharedLibrary"] - ): - continue - - # There is no API for determining if an Entry is operating in - # a SConf context. We obviously do not want to auto tag, and - # install conftest Programs. So we filter them out the only - # way available to us. - # - # We're working with upstream to expose this information. - if "conftest" in str(t): - continue - - # Get the suffix, unless overridden - suffix = getattr(t.attributes, "aib_effective_suffix", t.get_suffix()) - auto_install_mapping = env[SUFFIX_MAP].get(suffix) - - if auto_install_mapping is not None: - env.AutoInstall( - auto_install_mapping.directory, - t, - AIB_COMPONENT=env.get(COMPONENT, env.get(DEFAULT_COMPONENT, None)), - AIB_ROLE=env.get(ROLE, auto_install_mapping.default_role), - AIB_COMPONENTS_EXTRA=env.get(REVERSE_COMPONENT_DEPENDENCIES, []), - ) - - return (target, source) - - -def add_suffix_mapping(env, suffix, role=None): - """Map suffix to role""" - if isinstance(suffix, str): - if role not in env[ROLE_DECLARATIONS]: - raise Exception( - "target {} is not a known role available roles are {}".format( - role, env[ROLE_DECLARATIONS].keys() - ) - ) - env[SUFFIX_MAP][env.subst(suffix)] = role - - if not isinstance(suffix, dict): - raise Exception("source must be a dictionary or a string") - - for _, mapping in suffix.items(): - role = mapping.default_role - if role not in env[ROLE_DECLARATIONS]: - raise Exception( - "target {} is not a known role. Available roles are {}".format( - target, env[ROLE_DECLARATIONS].keys() - ) - ) - - env[SUFFIX_MAP].update({env.subst(key): value for key, value in suffix.items()}) - - -def suffix_mapping(env, directory="", default_role=False): - """Generate a SuffixMap object from source and target.""" - return SuffixMap(directory=directory, default_role=default_role) - - -def get_auto_installed_files(env, node): - return getattr(node.attributes, INSTALLED_FILES, []) - - -def list_components(env, **kwargs): - """List registered components for env.""" - print("Known AIB components:") - for key in env[ALIAS_MAP]: - print("\t", key) - - -def list_hierarchical_aib_recursive(mapping, counter=0): - if counter == 0: - print(" " * counter, mapping.id) - counter += 1 - for dep in mapping.dependencies: - print(" " * counter, dep.id) - list_hierarchical_aib_targets(dep, counter=counter) - - -def list_hierarchical_aib_targets(dag_mode=False): - def target_lister(env, **kwargs): - if dag_mode: - installed_files = set(env.FindInstalledFiles()) - for f in installed_files: - scan_for_transitive_install(f, env, None) - - mapping = env[ALIAS_MAP][env[META_COMPONENT]][env[META_ROLE]] - list_hierarchical_aib_recursive(mapping) - - return target_lister - - -def list_recursive(mapping) -> List[str]: - items = set() - items.add(mapping.id) - for dep in mapping.dependencies: - items |= list_recursive(dep) - return items - - -def list_targets(): - def target_lister(env, **kwargs): - mapping = env[ALIAS_MAP][env[META_COMPONENT]][env[META_ROLE]] - tasks = sorted(list(env[TASKS].keys())) - roles = sorted(list(env[ROLE_DECLARATIONS].keys())) - targets_with_role = list(list_recursive(mapping)) + [mapping.id] - targets: List[str] = [] - for target_role in targets_with_role: - # Does this target_role end with one of our speicifed roles - matching_roles = list(filter(target_role.endswith, [f"-{role}" for role in roles])) - assert len(matching_roles) == 1 - - targets.append(target_role[: -len(matching_roles[0])]) - - # dedup and sort targets - targets = sorted(list(set(targets))) - print( - "The following are AIB targets. Note that runtime role is implied if not specified. For example, install-mongod" - ) - tasks_str = ",".join(tasks) - print(f"TASK={{{tasks_str}}}") - roles_str = ",".join(roles) - print(f"ROLE={{{roles_str}}}") - for target in targets: - print(f" TASK-{target}-ROLE") - - return target_lister - - -def get_role_declaration(env, role): - return env[ROLE_DECLARATIONS][role] - - -def exists(_env): - """Always activate this tool.""" - return True - - -def generate(env): - """Generate the auto install builders.""" - env["AUTO_INSTALL_ENABLED"] = True - - # Matches the autoconf documentation: - # https://www.gnu.org/prep/standards/html_node/Directory-Variables.html - env["DESTDIR"] = env.Dir(env.get("DESTDIR", "#install")) - env["PREFIX"] = env.get("PREFIX", ".") - env["PREFIX_BINDIR"] = env.get("PREFIX_BINDIR", "$PREFIX/bin") - env["PREFIX_LIBDIR"] = env.get("PREFIX_LIBDIR", "$PREFIX/lib") - env["PREFIX_SHAREDIR"] = env.get("PREFIX_SHAREDIR", "$PREFIX/share") - env["PREFIX_DOCDIR"] = env.get("PREFIX_DOCDIR", "$PREFIX_SHAREDIR/doc") - env["PREFIX_INCLUDEDIR"] = env.get("PREFIX_INCLUDEDIR", "$PREFIX/include") - env[SUFFIX_MAP] = {} - env[ALIAS_MAP] = defaultdict(dict) - - env.AppendUnique( - AIB_TASKS={ - "install": auto_install_task, - } - ) - - env.AddMethod( - scan_for_transitive_install_pseudobuilder, - "GetTransitivelyInstalledFiles", - ) - env.AddMethod(get_role_declaration, "GetRoleDeclaration") - env.AddMethod(get_auto_installed_files, "GetAutoInstalledFiles") - env.AddMethod(tag_components, "TagComponents") - env.AddMethod(auto_install_pseudobuilder, "AutoInstall") - env.AddMethod(add_suffix_mapping, "AddSuffixMapping") - env.AddMethod(declare_role, "Role") - env.AddMethod(declare_roles, "DeclareRoles") - env.AddMethod(finalize_install_dependencies, "FinalizeInstallDependencies") - env.AddMethod(suffix_mapping, "SuffixMap") - env.Tool("install") - - # TODO: we should probably expose these as PseudoBuilders and let - # users define their own aliases for them. - env.Alias("list-aib-components", [], [list_components]) - env.AlwaysBuild("list-aib-components") - - env.Alias("list-hierarchical-aib-targets", [], [list_hierarchical_aib_targets(dag_mode=False)]) - env.AlwaysBuild("list-hierarchical-aib-targets") - - env.Alias("list-hierarchical-aib-dag", [], [list_hierarchical_aib_targets(dag_mode=True)]) - env.AlwaysBuild("list-hierarchical-aib-dag") - - env.Alias("list-targets", [], [list_targets()]) - env.AlwaysBuild("list-targets") - - for builder in ["Program", "SharedLibrary", "LoadableModule", "StaticLibrary"]: - builder = env["BUILDERS"][builder] - base_emitter = builder.emitter - # TODO: investigate if using a ListEmitter here can cause - # problems if AIB is not loaded last - new_emitter = SCons.Builder.ListEmitter([base_emitter, auto_install_emitter]) - builder.emitter = new_emitter - - base_install_builder = install.BaseInstallBuilder - assert base_install_builder.target_scanner is None - - base_install_builder.target_scanner = SCons.Scanner.Scanner( - function=scan_for_transitive_install, - path_function=None, - ) diff --git a/site_scons/site_tools/bazel_includes_info.py b/site_scons/site_tools/bazel_includes_info.py deleted file mode 100644 index 26aaeaf3657..00000000000 --- a/site_scons/site_tools/bazel_includes_info.py +++ /dev/null @@ -1,260 +0,0 @@ -import hashlib -import json -import os -import sys -from functools import partial -from pathlib import Path - -import libdeps_tool -import SCons - - -def exists(env): - return True - - -def get_md5(file_path): - h = hashlib.md5() - - with open(file_path, "rb") as file: - while True: - # Reading is buffered, so we can read smaller chunks. - chunk = file.read(h.block_size) - if not chunk: - break - h.update(chunk) - - return h.hexdigest() - - -def get_target_headers(env, target, header_query, symlink_query=None): - header_list_cache_dir = Path(".bazel_header_list_cache") - target_path = "/".join(target.rsplit(":", 1))[2:] - bazel_file = Path(os.path.dirname(target_path)) / "BUILD.bazel" - cache_file = str(header_list_cache_dir / bazel_file) + ".json" - build_file_hash = get_md5(bazel_file) - cache_data = None - - if os.path.exists(cache_file): - with open(cache_file) as f: - cache_data = json.load(f) - if cache_data["MD5"] == build_file_hash: - if target in cache_data: - return cache_data[target]["headers"], cache_data[target]["macro_name"] - else: - # invalidate the cache - cache_data = None - - if cache_data is None: - print(f"{bazel_file} changed, invalidating cache") - os.makedirs(os.path.dirname(cache_file), exist_ok=True) - cache_data = dict() - cache_data["MD5"] = build_file_hash - - print(f"getting {target} headers") - results = env.RunBazelQuery(header_query, f"getting {target} headers") - cache_data[target] = {"headers": [], "macro_name": target} - for line in results.stdout.split("\n"): - cache_data[target]["headers"] += [line] - - if symlink_query is not None: - target_results = env.RunBazelQuery(symlink_query, f"getting macro name for {target}") - cache_data[target]["macro_name"] = target_results.stdout.split(" ")[0] - - with open(cache_file, "w") as f: - json.dump(cache_data, f) - - return cache_data[target]["headers"], cache_data[target]["macro_name"] - - -def add_headers_from_all_libraries(env, header_map): - bazel_query = ["aquery"] + env["BAZEL_FLAGS_STR"] + ['mnemonic("CppArchive", //src/...)'] - results = env.RunBazelQuery(bazel_query, "getting all bazel libraries") - targets = set() - for line in results.stdout.split("\n"): - if " Target: //src" in line: - target = line.split(" Target: ")[-1] - targets.add(target) - - for target in targets: - header_query = ( - ["cquery"] - + env["BAZEL_FLAGS_STR"] - + [ - f'labels(hdrs, "@{target}")', - "--output", - "files", - ] - ) - macro_name_query = ( - ["cquery"] - + env["BAZEL_FLAGS_STR"] - + [ - f'kind("extract_debuginfo", rdeps(@//src/mongo/..., "@{target}", 1))', - ] - ) - headers, macro_name = get_target_headers(env, target, header_query, macro_name_query) - header_map[macro_name] = [hdr for hdr in headers if not hdr.endswith("src/mongo/config.h")] - - -def add_headers_from_gen_code(env, header_map): - source_generators_query = ( - ["aquery"] - + env["BAZEL_FLAGS_STR"] - + ['mnemonic("IdlcGenerator|TemplateRenderer|ConfigHeaderGen", //src/...)'] - ) - - idl_gen_targets = set() - results = env.RunBazelQuery(source_generators_query, "getting all idl gen targets") - for line in results.stdout.split("\n"): - if " Target: //src" in line: - target = line.split(" Target: ")[-1] - idl_gen_targets.add(target) - - for target in idl_gen_targets: - header_query = ( - ["cquery"] - + env["BAZEL_FLAGS_STR"] - + [ - f"@{target}", - "--output", - "files", - ] - ) - headers, macro_name = get_target_headers(env, target, header_query) - header_map[macro_name] = [ - hdr for hdr in headers if hdr.endswith(target.split(":")[-1] + ".h") - ] - - source_generators_query = ( - ["aquery"] - + env["BAZEL_FLAGS_STR"] - + ['mnemonic("TemplateRenderer|ConfigHeaderGen", //src/...)'] - ) - - source_gen_targets = set() - results = env.RunBazelQuery(source_generators_query, "getting all source gen targets") - for line in results.stdout.split("\n"): - if " Target: //src" in line: - target = line.split(" Target: ")[-1] - source_gen_targets.add(target) - - for target in source_gen_targets: - header_query = ( - ["cquery"] - + env["BAZEL_FLAGS_STR"] - + [ - f"@{target}", - "--output", - "files", - ] - ) - headers, macro_name = get_target_headers(env, target, header_query) - header_map[macro_name] = [ - hdr for hdr in headers if hdr.endswith(".h") and not hdr.endswith("src/mongo/config.h") - ] - - -def bazel_includes_emitter(target_libraries, target, source, env): - rel_target = os.path.relpath(str(target[0].abspath), start=env.Dir("#").abspath).replace( - "\\", "/" - ) - - if rel_target in target_libraries: - objsuffix = ( - env.subst("$OBJSUFFIX") if not env.TargetOSIs("linux") else env.subst("$SHOBJSUFFIX") - ) - builder_name = "StaticLibrary" if not env.TargetOSIs("linux") == "nt" else "SharedLibrary" - os.makedirs(os.path.dirname(str(target[0].abspath)), exist_ok=True) - with open(str(target[0].abspath) + ".obj_files", "w") as f: - for s in source: - if str(s).endswith(objsuffix): - f.write(os.path.relpath(str(s.abspath), start=env.Dir("#").abspath) + "\n") - with open(str(target[0].abspath) + ".env_vars", "w") as f: - json.dump(env["ENV"], f) - - with ( - open(str(target[0].abspath) + ".bazel_headers", "w") as fheaders, - open(str(target[0].abspath) + ".bazel_deps", "w") as fdeps, - ): - # note we can't know about LIBDEPS_DEPDENDENTS (reverse deps) in an emitter - # however we do co-opt the libdeps linter to check for these at the end of reading - # sconscripts - - deps = [] - for s in ( - env.get("LIBDEPS", []) - + env.get("LIBDEPS_PRIVATE", []) - + env.get("LIBDEPS_INTERFACE", []) - ): - if not s: - continue - - libnode = libdeps_tool._get_node_with_ixes(env, s, builder_name) - - libnode_path = os.path.relpath( - str(libnode.abspath), start=env.Dir("#").abspath - ).replace("\\", "/") - if libnode.has_builder() and libnode.get_builder().get_name(env) != "ThinTarget": - print( - f"ERROR: can generate correct bazel header list because {target[0]} has non-bazel dependency: {libnode}" - ) - sys.exit(1) - if str(libnode_path) in env["SCONS2BAZEL_TARGETS"].scons2bazel_targets: - bazel_target = env["SCONS2BAZEL_TARGETS"].bazel_target(str(libnode_path)) - # new query to run, run and cache it - deps.append(bazel_target) - bazel_query = ( - ["cquery"] - + env["BAZEL_FLAGS_STR"] - + [ - f'filter("[\\.h,\\.ipp,\\.hpp].*$", kind("source", deps("@{bazel_target}")))', - "--output", - "files", - ] - ) - results = env.RunBazelQuery(bazel_query, "getting bazel headers") - - if results.returncode != 0: - print("ERROR: bazel libdeps query failed:") - print(results) - sys.exit(1) - results = set( - [line for line in results.stdout.split("\n") if line.startswith("src/")] - ) - - for header in results: - fheaders.write(header + "\n") - for dep in deps: - fdeps.write(dep + "\n") - - return target, source - - -def generate(env): - header_map = {} - add_headers_from_all_libraries(env, header_map) - gen_header_map = {} - add_headers_from_gen_code(env, gen_header_map) - target_libraries = { - target_library.split("=")[-1].replace("\\", "/") - for target_library in env.GetOption("bazel-includes-info")[0].split() - } - - bazel_include_info = { - "header_map": header_map, - "gen_header_map": gen_header_map, - "bazel_exec": env["SCONS2BAZEL_TARGETS"].bazel_executable, - "config": env["BAZEL_FLAGS_STR"] + ["--config=local"], - } - - with open(".bazel_include_info.json", "w") as f: - json.dump(bazel_include_info, f) - - for builder_name in ["SharedLibrary", "StaticLibrary", "Program"]: - builder = env["BUILDERS"][builder_name] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter( - [base_emitter, partial(bazel_includes_emitter, target_libraries)] - ) - builder.emitter = new_emitter diff --git a/site_scons/site_tools/build_auto_retry.py b/site_scons/site_tools/build_auto_retry.py deleted file mode 100644 index 6023a357e42..00000000000 --- a/site_scons/site_tools/build_auto_retry.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright 2023 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import functools -import os -import random -import subprocess -import sys -import time -from typing import Callable, Dict, List - -import SCons - - -def command_spawn_func( - sh: str, - escape: Callable[[str], str], - cmd: str, - args: List, - env: Dict, - target: List, - source: List, -): - retries = 0 - success = False - - build_env = target[0].get_build_env() - max_retries = build_env.get("BUILD_RETRY_ATTEMPTS", 10) - build_max_retry_delay = build_env.get("BUILD_RETRY_MAX_DELAY_SECONDS", 120) - - while not success and retries <= max_retries: - try: - start_time = time.time() - if sys.platform[:3] == "win": - # have to use shell=True for windows because of https://github.com/python/cpython/issues/53908 - proc = subprocess.run( - " ".join(args), - env=env, - close_fds=True, - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True, - check=True, - ) - else: - proc = subprocess.run( - [sh, "-c", " ".join(args)], - env=env, - close_fds=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True, - check=True, - ) - except subprocess.CalledProcessError as exc: - print(f"{os.path.basename(__file__)} captured error:") - print(exc.stdout) - retries += 1 - retry_delay = int((time.time() - start_time) + build_max_retry_delay * random.random()) - print( - f"Failed while trying to build {target[0]}", - ) - if retries <= max_retries: - print(f"trying again in {retry_delay} seconds with retry attempt {retries}") - time.sleep(retry_delay) - continue - - # No more retries left - return exc.returncode - else: - if proc.stdout: - print(proc.stdout) - return proc.returncode - - -def generate(env): - original_command_execute = SCons.Action.CommandAction.execute - - def build_retry_execute(command_action_instance, target, source, env, executor=None): - if ( - "conftest" not in str(target[0]) - and target[0].has_builder() - and target[0].get_builder().get_name(env) - in [ - "Object", - "SharedObject", - "StaticObject", - "Program", - "StaticLibrary", - "SharedLibrary", - ] - ): - original_spawn = env["SPAWN"] - - env["SPAWN"] = functools.partial(command_spawn_func, target=target, source=source) - result = original_command_execute( - command_action_instance, target, source, env, executor - ) - env["SPAWN"] = original_spawn - - else: - result = original_command_execute( - command_action_instance, target, source, env, executor - ) - return result - - SCons.Action.CommandAction.execute = build_retry_execute - - -def exists(env): - return True diff --git a/site_scons/site_tools/build_metrics/__init__.py b/site_scons/site_tools/build_metrics/__init__.py deleted file mode 100644 index e125541f505..00000000000 --- a/site_scons/site_tools/build_metrics/__init__.py +++ /dev/null @@ -1,174 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Configure the build to track build performance.""" - -import atexit -import json -import os -import sys -import time -from timeit import default_timer as timer - -import psutil -from jsonschema import validate - -from .artifacts import CollectArtifacts -from .cache_dir import CacheDirCollector, CacheDirValidateWithMetrics -from .libdeps import LibdepsCollector -from .memory import MemoryMonitor -from .per_action_metrics import PerActionMetrics -from .scons import SConsStats -from .util import CaptureAtexits, add_meta_data, get_build_metric_dict - -_SEC_TO_NANOSEC_FACTOR = 1000000000.0 -_METRICS_COLLECTORS = [] - - -def finalize_build_metrics(env): - metrics = get_build_metric_dict() - metrics["end_time"] = time.time_ns() - for m in _METRICS_COLLECTORS: - start_time = timer() - sys.stdout.write(f"Processing {m.get_name()}...") - sys.stdout.flush() - key, value = m.finalize() - sys.stdout.write(f" {round(timer() - start_time, 2)}s\n") - metrics[key] = value - - with open(os.path.join(os.path.dirname(__file__), "build_metrics_format.schema")) as f: - validate(metrics, json.load(f)) - - build_metrics_file = env.GetOption("build-metrics") - if build_metrics_file == "-": - json.dump(metrics, sys.stdout, indent=4, sort_keys=True) - else: - with open(build_metrics_file, "w") as f: - json.dump(metrics, f, indent=4, sort_keys=True) - with open(f"{os.path.splitext(build_metrics_file)[0]}-chrome-tracer.json", "w") as f: - json.dump(generate_chrome_tracer_json(metrics), f, indent=4) - - -def generate_chrome_tracer_json(metrics): - tracer_json = {"traceEvents": []} - job_slots = [] - task_stack = sorted(metrics["build_tasks"], reverse=True, key=lambda x: x["start_time"]) - - # Chrome trace organizes tasks per pids, so if we want to have a clean layout which - # clearly shows concurrent processes, we are creating job slots by comparing start and - # end times, and using "pid" as the job slot identifier. job_slots are a list of chronologically - # in order tasks. We keep a list of job slots and always check at the end of the job slot to - # compare the lowest end time that will accommodate the next task start time. If there are no - # job slots which can accommodate the next task, we create a new job slot. Note the job slots - # ordering is similar to how the OS process scheduler would organize and start the processes - # from the build, however we are reproducing this retroactively and simplistically and it - # is not guaranteed to match exactly. - while task_stack: - task = task_stack.pop() - candidates = [ - job_slot for job_slot in job_slots if job_slot[-1]["end_time"] < task["start_time"] - ] - if candidates: - # We need to find the best job_slot to add this next task too, so we look at the - # end_times, the one with the lowest would have been the first one available. We just - # arbitrarily guess the first one will be the best, then iterate to find out which - # one is the best. We then add to the existing job_slot which best_candidate points to. - min_end = candidates[0][-1]["end_time"] - best_candidate = candidates[0] - for candidate in candidates: - if candidate[-1]["end_time"] < min_end: - best_candidate = candidate - min_end = candidate[-1]["end_time"] - - best_candidate.append(task) - else: - # None of the current job slots were available to accommodate the new task so we - # make a new one. - job_slots.append([task]) - - for i, job_slot in enumerate(job_slots): - for build_task in job_slot: - tracer_json["traceEvents"].append( - { - "name": build_task["outputs"][0] - if build_task["outputs"] - else build_task["builder"], - "cat": build_task["builder"], - "ph": "X", - "ts": build_task["start_time"] / 1000.0, - "dur": (build_task["end_time"] - build_task["start_time"]) / 1000.0, - "pid": i, - "args": { - "cpu": build_task["cpu_time"], - "mem": build_task["mem_usage"], - }, - } - ) - - return tracer_json - - -def generate(env, **kwargs): - global _METRICS_COLLECTORS - - # This will force our at exit to the of the stack ensuring - # that it is the last thing called when exiting. - c = CaptureAtexits() - atexit.unregister(c) - for func in c.captured: - atexit.unregister(func) - atexit.register(finalize_build_metrics, env) - for func in c.captured: - atexit.register(func) - - env.AddMethod(get_build_metric_dict, "GetBuildMetricDictionary") - env.AddMethod(add_meta_data, "AddBuildMetricsMetaData") - - metrics = get_build_metric_dict() - p = psutil.Process(os.getpid()) - - metrics["start_time"] = int(p.create_time() * _SEC_TO_NANOSEC_FACTOR) - metrics["scons_command"] = " ".join([sys.executable] + sys.argv) - - _METRICS_COLLECTORS = [ - MemoryMonitor(psutil.Process().memory_info().vms), - PerActionMetrics(), - CollectArtifacts(env), - SConsStats(), - CacheDirCollector(), - LibdepsCollector(env), - ] - - env["CACHEDIR_CLASS"] = CacheDirValidateWithMetrics - - -def exists(env): - return True - - -def options(opts): - """ - Add command line Variables for build metrics tool. - """ - opts.AddVariables( - ("BUILD_METRICS_ARTIFACTS_DIR", "Path to scan for artifacts after the build has stopped."), - ("BUILD_METRICS_BLOATY", "Path to the bloaty bin"), - ) diff --git a/site_scons/site_tools/build_metrics/artifacts.py b/site_scons/site_tools/build_metrics/artifacts.py deleted file mode 100644 index 542de923375..00000000000 --- a/site_scons/site_tools/build_metrics/artifacts.py +++ /dev/null @@ -1,223 +0,0 @@ -import csv -import enum -import os -import pathlib -import platform -import subprocess -from typing import Optional - -import puremagic -from typing_extensions import TypedDict - -from .protocol import BuildMetricsCollector - - -class ArtifactType(str, enum.Enum): - UNKNOWN = "unknown" - PROGRAM = "Program" # .exe - LIBRARY = "Library" # .so, .a - ARCHIVE = "archive" # .zip, .tgz, not .a - OBJECT = "Object" # .o - TEXT = "text" # .h, .hpp, .cpp - - -# Types to run bloaty against -ARTIFACT_BIN_TYPES = [ArtifactType.PROGRAM, ArtifactType.LIBRARY, ArtifactType.OBJECT] - - -class BinSize(TypedDict): - vmsize: int - filesize: int - - -class BinMetrics(TypedDict, total=False): - text: BinSize - data: BinSize - rodata: BinSize - bss: BinSize - debug: BinSize - symtab: BinSize - dyntab: BinSize - - -def _run_bloaty(bloaty, target) -> Optional[BinMetrics]: - out = BinMetrics() - try: - # -n 0 -> do not collapse small sections into a section named [Other] - # --csv -> generate csv output to stdout - # -d sections -> only list sections, not symbols - proc = subprocess.run( - [bloaty, "-n", "0", "--csv", "-d", "sections", str(target)], - capture_output=True, - universal_newlines=True, - ) - if proc.returncode != 0: - # if we run bloaty against a thin archive, it will fail. Detect - # this and allow thin archives to pass, otherwise raise an - # exception. - # Note that our thin_archive tool sets the thin_archive - # attribute to True - if proc.stderr.startswith("bloaty: unknown file type for file") and getattr( - target.attributes, "thin_archive", False - ): - # this is a thin archive, pass it - return None - - raise RuntimeError(f"Failed to call bloaty on '{str(target)}': {proc.stderr}") - - for row in csv.DictReader(proc.stdout.splitlines()): - # sections,vmsize,filesize - section = row["sections"] - vmsize = int(row["vmsize"]) - filesize = int(row["filesize"]) - binsize = BinSize(vmsize=vmsize, filesize=filesize) - if section == ".text": - out["text"] = binsize - elif section == ".data": - out["data"] = binsize - elif section == ".rodata": - out["rodata"] = binsize - elif section == ".bss": - out["bss"] = binsize - elif section.startswith(".debug"): - # there are multiple sections that start with .debug, and we - # need to sum them up. - if "debug" not in out: - out["debug"] = BinSize(vmsize=0, filesize=0) - out["debug"]["vmsize"] += vmsize - out["debug"]["filesize"] += filesize - elif section == ".symtab": - out["symtab"] = binsize - elif section == ".dyntab": - out["dyntab"] = binsize - - return out - - except FileNotFoundError: - if not _run_bloaty.printed_missing_bloaty_warning: - print( - "WARNING: could not find the bloaty binary. Binary section metrics will not be collected." - ) - _run_bloaty.printed_missing_bloaty_warning = True - return None - - -_run_bloaty.printed_missing_bloaty_warning = False - - -class Artifact(TypedDict, total=False): - array_index: int - name: str - type: str - size: int - bin_metrics: BinMetrics - - -# First key: platform.system() -# Tuple key 1: ArtifactType -# Tuple Key 2: string to search for -_PLATFORM_LIBMAGIC_BINARY_IDENTITIES = { - "Windows": [(ArtifactType.LIBRARY, "executable (DLL)"), (ArtifactType.PROGRAM, "executable")], - "Linux": [(ArtifactType.PROGRAM, "interpreter"), (ArtifactType.LIBRARY, "shared object")], - "Darwin": [ - (ArtifactType.PROGRAM, "Mach-O universal binary"), - (ArtifactType.LIBRARY, "linked shared library"), - ], -} - -_ARTIFACT_TYPE_FROM_BUILDER = { - "SharedObject": ArtifactType.OBJECT, # .dyn.o - "StaticObject": ArtifactType.OBJECT, # .o - "StaticLibrary": ArtifactType.LIBRARY, # .a - "Idlc": ArtifactType.TEXT, # _gen.{h,cpp} - "Program": ArtifactType.PROGRAM, # .exe/*nix binaries - "Substfile": ArtifactType.TEXT, # build/opt/mongo/config.h and others - "InstallBuilder": ArtifactType.TEXT, # build/opt/third_party/wiredtiger/wiredtiger_ext.h - "Textfile": ArtifactType.TEXT, # build/opt/third_party/third_party_shim.cpp -} - -_TEXT_IDENTIFIERS = ["ASCII text", "Unicode text"] - -_EXTENSION_FALLBACK = { - ".cpp": ArtifactType.TEXT, - ".h": ArtifactType.TEXT, - ".hpp": ArtifactType.TEXT, - ".js": ArtifactType.TEXT, - ".idl": ArtifactType.TEXT, - ".so": ArtifactType.LIBRARY, - ".o": ArtifactType.OBJECT, - # Windows - ".obj": ArtifactType.OBJECT, - ".lib": ArtifactType.LIBRARY, - # ilk, exp, pdb and res files on Windows have no appropriate tag, so we - # allow them to fallthrough to UNKNOWN -} - - -class CollectArtifacts(BuildMetricsCollector): - def __init__(self, env): - self._env = env - self._env = env - self._build_dir = env.get("BUILD_METRICS_ARTIFACTS_DIR", env.Dir("#").abspath) - self._artifacts = [] - self._bloaty_bin = env.get("BUILD_METRICS_BLOATY", env.WhereIs("bloaty")) - if self._bloaty_bin is None: - self._bloaty_bin = "bloaty" - self._metrics = {"total_artifact_size": 0, "num_artifacts": 0, "artifacts": []} - - def get_name(self): - return "CollectArtifacts" - - def walk(self, dirname): - for root, dirs, files in os.walk(dirname): - self._artifacts += list(map(lambda x: os.path.join(root, x), files)) - - def finalize(self): - self.walk(self._env.Dir(self._env.subst(self._build_dir)).path) - - for artifact in self._artifacts: - artifact_dict = self._identify_artifact(artifact) - artifact_dict["array_index"] = len(self._metrics["artifacts"]) - self._metrics["artifacts"].append(artifact_dict) - self._metrics["total_artifact_size"] += artifact_dict["size"] - self._metrics["num_artifacts"] = len(self._metrics["artifacts"]) - return "artifact_metrics", self._metrics - - def _identify_artifact(self, file_) -> Artifact: - def _type_from_builder(builder) -> ArtifactType: - name = builder.get_name(self._env) - return _ARTIFACT_TYPE_FROM_BUILDER.get(name, ArtifactType.UNKNOWN) - - type_ = ArtifactType.UNKNOWN - file_str = str(file_) - node = self._env.File(file_) - builder = node.get_builder() - if builder is not None: - type_ = _type_from_builder(builder) - - if type_ == ArtifactType.UNKNOWN: - try: - magic_out = puremagic.from_file(file_str) - system = platform.system() - for search_type in _PLATFORM_LIBMAGIC_BINARY_IDENTITIES.get(system): - if search_type[1] in magic_out: - type_ = search_type[0] - break - - if type_ == ArtifactType.UNKNOWN and any(s in magic_out for s in _TEXT_IDENTIFIERS): - type_ = ArtifactType.TEXT - except (puremagic.main.PureError, ValueError): - # exception means that puremagic failed to id the filetype. We'll - # fallback to file extension in this case. - pass - if type_ == ArtifactType.UNKNOWN: - type_ = _EXTENSION_FALLBACK.get(pathlib.Path(file_str).suffix, ArtifactType.UNKNOWN) - - out = Artifact({"name": file_, "type": type_, "size": node.get_size()}) - - if type_ in ARTIFACT_BIN_TYPES: - bin_metrics = _run_bloaty(self._bloaty_bin, node) - if bin_metrics is not None: - out["bin_metrics"] = bin_metrics - - return out diff --git a/site_scons/site_tools/build_metrics/build_metrics_format.schema b/site_scons/site_tools/build_metrics/build_metrics_format.schema deleted file mode 100644 index 118028ad090..00000000000 --- a/site_scons/site_tools/build_metrics/build_metrics_format.schema +++ /dev/null @@ -1,198 +0,0 @@ -{ - "$defs": { - "timestamp": { - "type": "integer", - "description": "Nanoseconds since Unix epoch" - }, - "memory": { - "type": "integer", - "description": "Virtual memory used in bytes" - }, - "bytes": { - "type": "integer", - "description": "Size in bytes", - "minimum": 0 - }, - "binsize": { - "type": "object", - "properties": { - "filesize": { "$ref": "#/$defs/bytes" }, - "vmsize": { "$ref": "#/$defs/bytes" } - }, - "required": ["filesize", "vmsize"], - "additionalProperties": false - } - }, - "type" : "object", - "properties" : { - "start_time" : { "$ref": "#/$defs/timestamp" }, - "end_time" : { "$ref": "#/$defs/timestamp" }, - "evg_id" : {"type" : "string"}, - "variant" : {"type" : "string"}, - "scons_command" : {"type" : "string"}, - "system_memory": { - "type": "object", - "properties": { - "mem_over_time": { - "type": "array", - "items": { - "type": "object", - "properties": { - "timestamp": { "$ref": "#/$defs/timestamp" }, - "memory": { "$ref": "#/$defs/memory" } - } - } - }, - "max": { "$ref": "#/$defs/memory" }, - "arithmetic_mean": {"type": "number"}, - "start_mem": { "$ref": "#/$defs/memory" } - }, - "required": ["mem_over_time", "max", "arithmetic_mean", "start_mem"], - "additionalProperties": false - }, - "artifact_metrics": { - "type": "object", - "properties": { - "total_artifact_size": { "$ref": "#/$defs/bytes" }, - "num_artifacts": { "type": "integer" }, - "artifacts": { - "type": "array", - "items": { - "type": "object", - "required": ["array_index", "name", "type", "size"], - "properties": { - "array_index": { "type": "integer" }, - "name": { "type": "string" }, - "type": { - "type": "string", - "enum": ["Object", "Library", "Program", "text", "json", "archive", "unknown"] - }, - "size": { "$ref": "#/$defs/bytes" }, - "bin_metrics": { - "type": "object", - "properties": { - "text": { "$ref": "#/$defs/binsize" }, - "data": { "$ref": "#/$defs/binsize" }, - "rodata": { "$ref": "#/$defs/binsize" }, - "bss": { "$ref": "#/$defs/binsize" }, - "debug": { "$ref": "#/$defs/binsize" }, - "symtab": { "$ref": "#/$defs/binsize" }, - "dyntab": { "$ref": "#/$defs/binsize" } - } - } - } - } - } - }, - "additionalProperties": false - }, - "build_tasks" : { - "type": "array", - "items": { - "type": "object", - "properties": { - "array_index": {"type": "integer"}, - "start_time": {"$ref": "#/$defs/timestamp"}, - "end_time" : {"$ref": "#/$defs/timestamp"}, - "cpu_time" : {"type" : "integer"}, - "builder" : {"type" : "string"}, - "mem_usage": {"$ref": "#/$defs/memory"}, - "inputs" : { - "type": "array", - "items": { "type": "string"} - }, - "outputs" : { - "type": "array", - "items": { "type": "string"} - }, - "action": {"type" : "string"} - } - }, - "required": ["array_index", "start_time", "end_time", "cpu_time", "builder", "mem_usage", "inputs", "outputs", "action"], - "additionalProperties": false - }, - "cache_metrics": { - "type": "object", - "properties": { - "cache_artifacts": { - "type": "array", - "items": { - "type": "object", - "properties": { - "array_index": {"type": "integer"}, - "name": {"type": "string"}, - "size": {"$ref": "#/$defs/bytes"} - }, - "required": ["array_index", "name", "size"], - "additionalProperties": false - } - }, - "push_time": { "$ref": "#/$defs/timestamp" }, - "pull_time": { "$ref": "#/$defs/timestamp" }, - "cache_size": { "$ref": "#/$defs/bytes" } - }, - "required": ["cache_artifacts", "push_time", "pull_time", "cache_size"], - "additionalProperties": false - }, - "libdeps_metrics": { - "type": "object", - "properties": { - "NODE": {"type": "integer"}, - "EDGE": {"type": "integer"}, - "DIR_EDGE": {"type": "integer"}, - "TRANS_EDGE": {"type": "integer"}, - "DIR_PUB_EDGE": {"type": "integer"}, - "PUB_EDGE": {"type": "integer"}, - "PRIV_EDGE": {"type": "integer"}, - "IF_EDGE": {"type": "integer"}, - "PROG": {"type": "integer"}, - "LIB": {"type": "integer"} - }, - "additionalProperties": false - } - }, - "scons_metrics": { - "type": "object", - "properties": { - "memory": { - "type": "object", - "properties": { - "pre_read": {"$ref": "#/$defs/bytes"}, - "post_read": {"$ref": "#/$defs/bytes"}, - "pre_build": {"$ref": "#/$defs/bytes"}, - "post_build": {"$ref": "#/$defs/bytes"} - }, - "required": ["pre_read", "post_read", "pre_build", "post_build"], - "additionalProperties": false - }, - "time": { - "type": "object", - "properties": { - "total": {"type": "number"}, - "sconscript_exec": {"type": "number"}, - "scons_exec": {"type": "number"}, - "command_exec": {"type": "number"} - }, - "required": ["total", "sconscript_exec", "scons_exec", "command_exec"], - "additionalProperties": false - }, - "counts": { - "type": "array", - "items": { - "type": "object", - "properties": { - "array_index": {"type": "integer"}, - "item_name": {"type": "string"}, - "pre_read": {"$ref": "#/$defs/bytes"}, - "post_read": {"$ref": "#/$defs/bytes"}, - "pre_build": {"$ref": "#/$defs/bytes"}, - "post_build": {"$ref": "#/$defs/bytes"} - }, - "required": ["array_index", "item_name", "pre_read", "post_read", "pre_build", "post_build"], - "additionalProperties": false - } - } - }, - "additionalProperties": false - } -} diff --git a/site_scons/site_tools/build_metrics/cache_dir.py b/site_scons/site_tools/build_metrics/cache_dir.py deleted file mode 100644 index 0296034f0b0..00000000000 --- a/site_scons/site_tools/build_metrics/cache_dir.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import time -from typing import List, Set - -from site_tools.validate_cache_dir import CacheDirValidate -from typing_extensions import TypedDict - -from .protocol import BuildMetricsCollector - - -class CacheArtifact(TypedDict): - array_index: int - name: str - size: int - - -class CacheMetrics(TypedDict): - cache_artifacts: List[CacheArtifact] - push_time: int - pull_time: int - cache_size: int - - -class CacheDirValidateWithMetrics(CacheDirValidate): - DATA: CacheMetrics = CacheMetrics(push_time=0, pull_time=0, cache_artifacts=[], cache_size=0) - SET: Set[str] = set() - - @classmethod - def keep_stats(cls, target): - if target in cls.SET: - return - - size = os.path.getsize(target) - cls.DATA["cache_artifacts"].append( - CacheArtifact(array_index=len(cls.DATA["cache_artifacts"]), name=target, size=size) - ) - cls.DATA["cache_size"] += size - cls.SET.add(target) - - @classmethod - def copy_from_cache(cls, env, src, dst): - start = time.time_ns() - super().copy_from_cache(env, src, dst) - pull_time = time.time_ns() - start - cls.DATA["pull_time"] += pull_time - cls.keep_stats(dst) - - @classmethod - def copy_to_cache(cls, env, src, dst): - start = time.time_ns() - super().copy_to_cache(env, src, dst) - push_time = time.time_ns() - start - cls.DATA["push_time"] += push_time - cls.keep_stats(src) - - -class CacheDirCollector(BuildMetricsCollector): - def get_name(self): - return "CacheDirCollector" - - def finalize(self): - return "cache_metrics", CacheDirValidateWithMetrics.DATA diff --git a/site_scons/site_tools/build_metrics/combine_metrics.py b/site_scons/site_tools/build_metrics/combine_metrics.py deleted file mode 100644 index a5fafd471b8..00000000000 --- a/site_scons/site_tools/build_metrics/combine_metrics.py +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/env python3 -import argparse -import glob -import json -import statistics -import sys -from typing import Any, Dict, List - -parser = argparse.ArgumentParser(description="Combine metrics json files into a single file.") -parser.add_argument( - "--prefix-name", - metavar="FILES", - action="append", - default=[], - help='Prefix path to collect json files of the form "{prefix_path}*.json" for combining into a single json: "{prefix_path}.json"', -) -parser.add_argument("unittest_args", nargs="*") -args = parser.parse_args() - - -def set_lowest(existing: Dict, current: Dict, key: str): - existing_data = existing.get(key) - current_data = existing.get(key) - - if existing_data and current_data and existing_data > current_data: - existing[key] = current_data - - elif not existing_data and current_data: - existing[key] = current_data - - -def set_greatest(existing: Dict, current: Dict, key: str): - existing_data = existing.get(key) - current_data = current.get(key) - - if existing_data and current_data and existing_data < current_data: - existing[key] = current_data - - elif not existing_data and current_data: - existing[key] = current_data - - -def combine_command_line(existing: Dict, current: Dict, key: str): - existing_data = existing.get(key) - current_data = current.get(key) - - if not existing_data: - existing[key] = current_data - else: - existing_data = existing.get(key).split() - current_data = current.get(key).split() - for current_arg in current_data: - if current_arg not in existing_data: - existing_data.append(current_arg) - - existing[key] = " ".join(existing_data) - - -def if_set_should_match(existing: Dict, current: Dict, key: str): - existing_data = existing.get(key) - current_data = current.get(key) - - if existing_data and current_data and existing_data != current_data: - print( - f"WARNING: Expected data to match - existing: {existing_data}, current: {current_data}", - file=sys.stderr, - ) - - elif not existing_data and current_data: - existing[key] = current_data - - -def recalc_list_indexes(target_list: List): - index_found = None - - for index, elem in enumerate(target_list): - if index_found is None and index == 0: - index_found = elem.get("array_index") - - if (index_found is None and elem.get("array_index")) or ( - index_found is not None and elem.get("array_index") is None - ): - raise Exception("Attempted to combine list with incompat index keys.") - - if elem.get("array_index") is not None: - elem["array_index"] = index - - -def extend_list(existing: Dict, current: Dict, key: str): - existing_data = existing.get(key) - current_data = current.get(key) - - if existing_data and current_data: - existing_data.extend(current_data) - - elif not existing_data and current_data: - existing[key] = current_data - - recalc_list_indexes(existing[key]) - - -def extend_list_no_dups(existing: Dict, current: Dict, key: str, list_unqiue_key: str): - extend_list(existing, current, key) - unique_list = {} - for elem in existing[key]: - if elem.get("array_index") is not None: - elem["array_index"] = -1 - if elem[list_unqiue_key] not in unique_list: - unique_list[elem[list_unqiue_key]] = elem - elif unique_list[elem[list_unqiue_key]] != elem: - if sys.platform == "win32": - # build metrics performs a clean and pull from cachse and windows does not produce the same output - # with the same input (non deterministic), so we can not make these garuntees and or perform - # this check. - pass - else: - print( - f"WARNING: Expected data to match - existing: {unique_list[elem[list_unqiue_key]]}, current: {elem}", - file=sys.stderr, - ) - - existing[key] = list(unique_list.values()) - - recalc_list_indexes(existing[key]) - - -def combine_system_memory(existing: Dict, current: Dict): - extend_list(existing, current, "mem_over_time") - set_greatest(existing, current, "max") - existing["arithmetic_mean"] = statistics.mean( - [mem["memory"] for mem in existing["mem_over_time"]] - ) - set_lowest(existing, current, "start_mem") - - -def combine_artifact_metrics(existing: Dict, current: Dict): - extend_list_no_dups(existing, current, "artifacts", "name") - existing["total_artifact_size"] = sum([artifact["size"] for artifact in existing["artifacts"]]) - existing["num_artifacts"] = len(existing["artifacts"]) - - -def combine_cache_metrics(existing: Dict, current: Dict): - extend_list_no_dups(existing, current, "cache_artifacts", "name") - existing["push_time"] += current["push_time"] - existing["pull_time"] += current["pull_time"] - existing["cache_size"] += sum([cache["size"] for cache in existing["cache_artifacts"]]) - - -def combine_scons_metrics(existing: Dict, current: Dict): - try: - set_greatest(existing["memory"], current["memory"], "pre_read") - set_greatest(existing["memory"], current["memory"], "post_read") - set_greatest(existing["memory"], current["memory"], "pre_build") - set_greatest(existing["memory"], current["memory"], "post_build") - except KeyError: - if sys.platform == "darwin": - # MacOS has known memory reporting issues, although this is not directly related to scons which does not use - # psutil for this case, I think both use underlying OS calls to determine the memory: https://github.com/giampaolo/psutil/issues/1908 - pass - - existing["time"]["total"] += current["time"]["total"] - existing["time"]["sconscript_exec"] += current["time"]["sconscript_exec"] - existing["time"]["scons_exec"] += current["time"]["scons_exec"] - existing["time"]["command_exec"] += current["time"]["command_exec"] - - for new_item in current["counts"]: - found_new_item = False - for existing_item in existing["counts"]: - if existing_item["item_name"] == new_item["item_name"]: - found_new_item = True - set_greatest(existing_item, new_item, "pre_read") - set_greatest(existing_item, new_item, "post_read") - set_greatest(existing_item, new_item, "pre_build") - set_greatest(existing_item, new_item, "post_build") - break - if not found_new_item: - existing["counts"].append(new_item) - - -for prefix_name in args.prefix_name: - combined_json: Dict[str, Any] = {"combined_files": []} - - json_files = glob.glob(f"{prefix_name}*.json") - for json_file in json_files: - if json_file.endswith("chrome-tracer.json"): - continue - - with open(json_file) as fjson: - combined_json["combined_files"].append(json_file) - current_json = json.load(fjson) - - set_lowest(combined_json, current_json, "start_time") - set_greatest(combined_json, current_json, "end_time") - if_set_should_match(combined_json, current_json, "evg_id") - if_set_should_match(combined_json, current_json, "variant") - combine_command_line(combined_json, current_json, "scons_command") - - ########################### - # system_memory - if "system_memory" not in combined_json: - combined_json["system_memory"] = current_json.get("system_memory", {}) - else: - combine_system_memory(combined_json["system_memory"], current_json["system_memory"]) - - ############################ - # artifact_metrics - if "artifact_metrics" not in combined_json: - combined_json["artifact_metrics"] = current_json.get("artifact_metrics", {}) - else: - combine_artifact_metrics( - combined_json["artifact_metrics"], current_json["artifact_metrics"] - ) - - ############################ - # build_tasks - if "build_tasks" not in combined_json: - combined_json["build_tasks"] = current_json.get("build_tasks", []) - else: - extend_list(combined_json, current_json, "build_tasks") - - ############################ - # cache_metrics - if "cache_metrics" not in combined_json: - combined_json["cache_metrics"] = current_json.get("cache_metrics", {}) - else: - combine_cache_metrics(combined_json["cache_metrics"], current_json["cache_metrics"]) - - ############################ - # libdeps_metrics - if "libdeps_metrics" in combined_json and current_json.get("libdeps_metrics"): - raise Exception("found a second libdeps_metrics dataset in {json_file}") - if "libdeps_metrics" not in combined_json and current_json.get("libdeps_metrics"): - combined_json["libdeps_metrics"] = current_json.get("libdeps_metrics") - - ############################ - # scons_metrics - if "scons_metrics" not in combined_json: - combined_json["scons_metrics"] = current_json.get("scons_metrics", {}) - else: - combine_scons_metrics(combined_json["scons_metrics"], current_json["scons_metrics"]) - - with open(f"{prefix_name}.json", "w") as out: - json.dump(combined_json, out, indent=4, sort_keys=True) diff --git a/site_scons/site_tools/build_metrics/combine_metrics_unittest.py b/site_scons/site_tools/build_metrics/combine_metrics_unittest.py deleted file mode 100644 index 66b0abd09e0..00000000000 --- a/site_scons/site_tools/build_metrics/combine_metrics_unittest.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python3 -import io -import os -import sys -import unittest - -sys.path.append(os.path.dirname(__file__)) - -from combine_metrics import ( - combine_command_line, - extend_list, - extend_list_no_dups, - if_set_should_match, - recalc_list_indexes, - set_greatest, - set_lowest, -) - - -class CombineUnittests(unittest.TestCase): - def setUp(self): - self.existing = { - "int": 4, - "match_same": "test", - "command_line": "arg1 arg2 dup_arg", - "recalc_list": [{"array_index": 93}, {"array_index": 3}], - "extend_list": [ - {"array_index": 0, "key": "text", "val": "data1"}, - {"array_index": 1, "key": "text2", "val": "data2"}, - ], - } - self.current = { - "int": 5, - "match_same": "test", - "command_line": "arg3 dup_arg arg4", - "extend_list": [ - {"array_index": 0, "key": "text", "val": "data1"}, - {"array_index": 1, "key": "text3", "val": "data3"}, - ], - } - - def test_set_lowest(self): - set_lowest(self.existing, self.current, "int") - self.assertEqual(self.existing["int"], 4) - - def test_set_greatest(self): - set_greatest(self.existing, self.current, "int") - self.assertEqual(self.existing["int"], 5) - - def test_combine_command_line(self): - combine_command_line(self.existing, self.current, "command_line") - self.assertEqual(self.existing["command_line"], "arg1 arg2 dup_arg arg3 arg4") - - def test_if_set_should_match(self): - if_set_should_match(self.existing, self.current, "match_same") - del self.current["match_same"] - if_set_should_match(self.existing, self.current, "match_same") - self.assertEqual(self.existing["match_same"], "test") - self.current["match_same"] = "test2" - capturedOutput = io.StringIO() - sys.stderr = capturedOutput - if_set_should_match(self.existing, self.current, "match_same") - sys.stderr = sys.__stderr__ - self.assertTrue("WARNING: Expected data to match - existing:" in capturedOutput.getvalue()) - - def test_recalc_list_indexes(self): - recalc_list_indexes(self.existing["recalc_list"]) - self.assertEqual(self.existing["recalc_list"], [{"array_index": 0}, {"array_index": 1}]) - - def test_extend_list(self): - extend_list(self.existing, self.current, "extend_list") - self.assertEqual( - self.existing["extend_list"], - [ - {"array_index": 0, "key": "text", "val": "data1"}, - {"array_index": 1, "key": "text2", "val": "data2"}, - {"array_index": 2, "key": "text", "val": "data1"}, - {"array_index": 3, "key": "text3", "val": "data3"}, - ], - ) - - def test_extend_list_no_dups(self): - extend_list_no_dups(self.existing, self.current, "extend_list", "key") - self.assertEqual( - self.existing["extend_list"], - [ - {"array_index": 0, "key": "text", "val": "data1"}, - {"array_index": 1, "key": "text2", "val": "data2"}, - {"array_index": 2, "key": "text3", "val": "data3"}, - ], - ) - - def test_extend_list_no_dups_bad_data(self): - if sys.platform != "win32": - self.current["extend_list"][0]["val"] = "bad_data" - capturedOutput = io.StringIO() - sys.stderr = capturedOutput - extend_list_no_dups(self.existing, self.current, "extend_list", "key") - sys.stderr = sys.__stderr__ - self.assertTrue( - "WARNING: Expected data to match - existing:" in capturedOutput.getvalue() - ) - - -unittest.main() diff --git a/site_scons/site_tools/build_metrics/libdeps.py b/site_scons/site_tools/build_metrics/libdeps.py deleted file mode 100644 index 0b89dcb61c1..00000000000 --- a/site_scons/site_tools/build_metrics/libdeps.py +++ /dev/null @@ -1,71 +0,0 @@ -import json -import os -import sys - -import networkx - -from .protocol import BuildMetricsCollector - -# libdeps analyzer does not assume the root build directory, so we need to add its own root to the path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.insert(0, os.path.join(dir_path, "..", "..", "..", "buildscripts", "libdeps")) - -from buildscripts.libdeps.libdeps.analyzer import ( - GaJsonPrinter, - LibdepsGraphAnalysis, - counter_factory, -) -from buildscripts.libdeps.libdeps.graph import CountTypes, LibdepsGraph - -_ALLOWED_KEYS = set( - [ - "NODE", - "EDGE", - "DIR_EDGE", - "TRANS_EDGE", - "DIR_PUB_EDGE", - "PUB_EDGE", - "PRIV_EDGE", - "IF_EDGE", - "PROG", - "LIB", - ] -) - - -class LibdepsCollector(BuildMetricsCollector): - def __init__(self, env): - self._env = env - - def get_name(self): - return "LibdepsCollector" - - @staticmethod - def _libdeps(graph_file): - libdeps_graph = LibdepsGraph(graph=networkx.read_graphml(graph_file)) - - if libdeps_graph.graph["graph_schema_version"] == 1: - libdeps_graph = networkx.reverse_view(libdeps_graph) - - return GaJsonPrinter( - LibdepsGraphAnalysis(counter_factory(libdeps_graph, CountTypes.ALL.name)) - ).get_json() - - @staticmethod - def _finalize(libdeps_graph_file): - out = {} - for key, value in json.loads(LibdepsCollector._libdeps(libdeps_graph_file)).items(): - if key in _ALLOWED_KEYS: - out[key] = value - return out - - def finalize(self): - libdeps_graph_file = self._env.get("LIBDEPS_GRAPH_FILE") - out = {} - if libdeps_graph_file is not None and os.path.exists(libdeps_graph_file.path): - out = self._finalize(libdeps_graph_file.path) - else: - print( - f"WARNING: libdeps graph file '{libdeps_graph_file}' could not be found. Skipping libdeps metrics" - ) - return "libdeps_metrics", out diff --git a/site_scons/site_tools/build_metrics/memory.py b/site_scons/site_tools/build_metrics/memory.py deleted file mode 100644 index 4d7ad51bdac..00000000000 --- a/site_scons/site_tools/build_metrics/memory.py +++ /dev/null @@ -1,62 +0,0 @@ -import threading -import time - -import psutil - -from .protocol import BuildMetricsCollector -from .util import timestamp_now - - -class MemoryMonitor(BuildMetricsCollector): - INTERVAL = 0.1 # seconds - - def __init__(self, starting_memory_adjustment=0): - self._stop = False - self.system_memory_metrics = { - "mem_over_time": [], - "start_mem": used_memory() - starting_memory_adjustment, - } - - self._thread = threading.Thread(target=self.memory_monitor, daemon=True) - self._thread.start() - - def get_name(self): - return "System Memory Monitor" - - def finalize(self): - self._stop = True - self._record_data_point() - - mean = 0 - max_ = 0 - count = 1 - for val in self.system_memory_metrics["mem_over_time"]: - max_ = max(val["memory"], max_) - # iterative mean calculation algorithm from https://stackoverflow.com/a/1934266 - mean += (val["memory"] - mean) / count - count += 1 - - self.system_memory_metrics["arithmetic_mean"] = mean - self.system_memory_metrics["max"] = max_ - - return "system_memory", self.system_memory_metrics - - def memory_monitor(self): - while not self._stop: - time.sleep(self.INTERVAL) - if self._stop: - break - - self._record_data_point() - - def _record_data_point(self): - used_mem = used_memory() - now_time = timestamp_now() - - self.system_memory_metrics["mem_over_time"].append( - {"timestamp": now_time, "memory": used_mem} - ) - - -def used_memory(): - return psutil.virtual_memory().used diff --git a/site_scons/site_tools/build_metrics/per_action_metrics.py b/site_scons/site_tools/build_metrics/per_action_metrics.py deleted file mode 100644 index 283d7ca89d3..00000000000 --- a/site_scons/site_tools/build_metrics/per_action_metrics.py +++ /dev/null @@ -1,197 +0,0 @@ -import functools -import sys -import time - -import memory_profiler -import psutil -import SCons - -from .protocol import BuildMetricsCollector -from .util import fullname, mem_adjustment - - -class ProfiledFunction: - """ - A class which mimics a FunctionAction function, behaving exactly the same - as the original FunctionAction function, except for gather perf metrics - during the __call__ of the function. - """ - - def __init__(self, per_action_instance, original_func) -> None: - self.original_func = original_func - self.per_action_instance = per_action_instance - - if hasattr(original_func, "strfunction"): - self.strfunction = original_func.strfunction - - if isinstance(self.original_func, SCons.Action.ActionCaller): - self.original_func = original_func.__call__ - - self.__name__ = "profiled_function" - - def __call__(self, target, source, env): - return self.function_action_execute(target, source, env) - - def __str__(self) -> str: - return str(self.original_func) - - def function_action_execute(self, target, source, env): - task_metrics = { - "outputs": [str(t) for t in target], - "inputs": [str(s) for s in source], - "action": fullname(self.original_func), - "builder": target[0].get_builder().get_name(target[0].get_env()), - } - profile = memory_profiler.LineProfiler(include_children=False) - - task_metrics["start_time"] = time.time_ns() - thread_start_time = time.thread_time_ns() - return_value = profile(self.original_func)(target=target, source=source, env=env) - task_metrics["cpu_time"] = time.thread_time_ns() - thread_start_time - task_metrics["end_time"] = time.time_ns() - - memory_increases_per_line = [] - for file_where_code_is, lines_of_code in profile.code_map.items(): - # skip the first item in the list because this is just the initial - # memory state, and we are interested just in the increases - for line_number, memory_usage in list(lines_of_code)[1:]: - if memory_usage: - memory_increase = memory_usage[0] - memory_increases_per_line.append(memory_increase) - - task_metrics["mem_usage"] = int(sum(memory_increases_per_line) * 1024 * 1024) - - self.per_action_instance.build_tasks_metrics.append(task_metrics) - task_metrics["array_index"] = self.per_action_instance.build_tasks_metrics.index( - task_metrics - ) - - return return_value - - -class PerActionMetrics(BuildMetricsCollector): - """ - Creates hooks the CommandAction and FunctionAction execute calls in SCons to track - CPU, memory and duration of execution of said action types. - """ - - def __init__(self) -> None: - self.build_tasks_metrics = [] - - # place hooks into scons internals to give us a chance to - # adjust things to take measurements - original_command_execute = SCons.Action.CommandAction.execute - - def build_metrics_CommandAction_execute( - command_action_instance, target, source, env, executor=None - ): - if "conftest" not in str(target[0]): - # We use the SPAWN var to control the SCons proper execute to call our spawn. - # We set the spawn back after the proper execute is done - original_spawn = env["SPAWN"] - env["SPAWN"] = functools.partial( - self.command_spawn_func, target=target, source=source - ) - result = original_command_execute( - command_action_instance, target, source, env, executor - ) - env["SPAWN"] = original_spawn - else: - result = original_command_execute( - command_action_instance, target, source, env, executor - ) - return result - - SCons.Action.CommandAction.execute = build_metrics_CommandAction_execute - - original_function_action_execute = SCons.Action.FunctionAction.execute - - def build_metrics_FunctionAction_execute( - function_action_instance, target, source, env, executor=None - ): - if ( - target - and "conftest" not in str(target[0]) - and not isinstance(function_action_instance.execfunction, ProfiledFunction) - ): - # set our profiled function class as the function action call. Profiled function - # should look and behave exactly as the original function, besides the __call__ - # behaving differently. We set back the original function for posterity just in case - original_func = function_action_instance.execfunction - function_action_instance.execfunction = ProfiledFunction( - self, function_action_instance.execfunction - ) - original_function_action_execute( - function_action_instance, target, source, env, executor - ) - function_action_instance.execfunction = original_func - else: - return original_function_action_execute( - function_action_instance, target, source, env, executor - ) - - SCons.Action.FunctionAction.execute = build_metrics_FunctionAction_execute - - def get_name(self): - return "Per-Action Metrics" - - def get_mem_cpu(self, proc): - with proc.oneshot(): - cpu = proc.cpu_times().system + proc.cpu_times().user - mem = proc.memory_info().vms - for p in proc.children(recursive=True): - with p.oneshot(): - cpu += p.cpu_times().system + p.cpu_times().user - mem += p.memory_info().vms - return cpu, mem - - def track_process(self, proc, target): - """Poll virtual memory of a process and children.""" - try: - peak_cpu, peak_mem = self.get_mem_cpu(proc) - except (psutil.NoSuchProcess, psutil.AccessDenied): - return 0, 0 - - while proc.poll() is None: - try: - cpu, mem = self.get_mem_cpu(proc) - if peak_cpu < cpu: - peak_cpu = cpu - if peak_mem < mem: - peak_mem = mem - except (psutil.NoSuchProcess, psutil.AccessDenied): - pass - else: - time.sleep(0.01) - - return peak_cpu, peak_mem - - def command_spawn_func(self, sh, escape, cmd, args, env, target, source): - task_metrics = { - "outputs": [str(t) for t in target], - "inputs": [str(s) for s in source], - "action": " ".join(args), - "start_time": time.time_ns(), - "builder": target[0].get_builder().get_name(target[0].get_env()), - } - - if sys.platform[:3] == "win": - # have to use shell=True for windows because of https://github.com/python/cpython/issues/53908 - proc = psutil.Popen(" ".join(args), env=env, close_fds=True, shell=True) - else: - proc = psutil.Popen([sh, "-c", " ".join(args)], env=env, close_fds=True) - - cpu_usage, mem_usage = self.track_process(proc, target[0]) - return_code = proc.wait() - - task_metrics["end_time"] = time.time_ns() - task_metrics["cpu_time"] = int(cpu_usage * (10.0**9.0)) - task_metrics["mem_usage"] = mem_adjustment(int(mem_usage)) - - self.build_tasks_metrics.append(task_metrics) - task_metrics["array_index"] = self.build_tasks_metrics.index(task_metrics) - - return return_code - - def finalize(self): - return "build_tasks", self.build_tasks_metrics diff --git a/site_scons/site_tools/build_metrics/protocol.py b/site_scons/site_tools/build_metrics/protocol.py deleted file mode 100644 index 2a61fa211bc..00000000000 --- a/site_scons/site_tools/build_metrics/protocol.py +++ /dev/null @@ -1,14 +0,0 @@ -from abc import abstractmethod -from typing import Any, Tuple - -from typing_extensions import Protocol - - -class BuildMetricsCollector(Protocol): - @abstractmethod - def finalize(self) -> Tuple[str, Any]: - raise NotImplementedError - - @abstractmethod - def get_name() -> str: - raise NotImplementedError diff --git a/site_scons/site_tools/build_metrics/scons.py b/site_scons/site_tools/build_metrics/scons.py deleted file mode 100644 index 4b4b4660757..00000000000 --- a/site_scons/site_tools/build_metrics/scons.py +++ /dev/null @@ -1,161 +0,0 @@ -from typing import Any, List, Optional, Tuple - -import SCons.Script -from typing_extensions import TypedDict - -from .protocol import BuildMetricsCollector - - -class _HookedStartTime(float): - def __init__(self, val) -> None: - float.__init__(val) - self.hooked_end_time = None - - def __rsub__(self, other): - self.hooked_end_time = other - return other - float(self) - - -def _safe_list_get(list_, i, default=None): - try: - return list_[i] - except IndexError: - return default - - -class MemoryMetrics(TypedDict): - pre_read: int - post_read: int - pre_build: int - post_build: int - - -class TimeMetrics(TypedDict): - total: int - sconscript_exec: int - scons_exec: int - command_exec: int - - -class CountsMetrics(TypedDict): - array_index: int - item_name: str - pre_read: int - post_read: int - pre_build: int - post_build: int - - -class SConsStats(BuildMetricsCollector): - def __init__(self): - # hook start_time so we can also capture the end time - if not isinstance(SCons.Script.start_time, _HookedStartTime): - SCons.Script.start_time = _HookedStartTime(SCons.Script.start_time) - - def get_name(self) -> str: - return "SConsStats" - - def finalize(self) -> Tuple[str, Any]: - out = {} - memory = self._finalize_memory() - if memory is not None: - out["memory"] = memory - time = self._finalize_time() - if time is not None: - out["time"] = time - counts = self._finalize_counts() - if counts is not None: - out["counts"] = counts - return "scons_metrics", out - - def _finalize_memory(self) -> Optional[MemoryMetrics]: - memory_stats = SCons.Script.Main.memory_stats.stats - pre_read = _safe_list_get(memory_stats, 0, 0) - post_read = _safe_list_get(memory_stats, 1, 0) - pre_build = _safe_list_get(memory_stats, 2, 0) - post_build = _safe_list_get(memory_stats, 3, 0) - if pre_read == 0 and post_read == 0 and pre_build == 0 and post_build == 0: - print( - "WARNING: SConsStats read all memory statistics as 0. Did you pass --debug=memory?" - ) - return None - return MemoryMetrics( - pre_read=pre_read, post_read=post_read, pre_build=pre_build, post_build=post_build - ) - - def _finalize_counts(self) -> Optional[List[CountsMetrics]]: - count_stats = SCons.Script.Main.count_stats.stats - if len(count_stats) != 4: - print( - f"WARNING: SConsStats expected 4 counts, found {len(count_stats)}. Did you pass --debug=count?" - ) - return None - - # This incomprehensible block taken from SCons produces stats_table, - # a mapping of class name to a list of counts with the same order as - # count_stats.labels - # From SCons/Script/Main.py:517 - stats_table = {} - for s in count_stats: - for n in [t[0] for t in s]: - stats_table[n] = [0, 0, 0, 0] - i = 0 - for s in count_stats: - for n, c in s: - stats_table[n][i] = c - i = i + 1 - # End section copied from SCons - - out = [] - for key, value in stats_table.items(): - out.append( - CountsMetrics( - array_index=len(out), - item_name=key, - pre_read=value[0], - post_read=value[1], - pre_build=value[2], - post_build=value[3], - ) - ) - - return out - - def _finalize_time(self) -> Optional[TimeMetrics]: - # unfortunately, much of the SCons time keeping is encased in the - # main() function with local variables, so we're stuck copying - # a bit of logic from SCons.Script.Main - - end_time = SCons.Script.start_time.hooked_end_time - try: - total_time = end_time - SCons.Script.start_time - except TypeError as e: - if str(e) == "unsupported operand type(s) for -: 'NoneType' and 'float'": - print( - "WARNING: SConsStats failed to calculate SCons total time. Did you pass --debug=time?" - ) - return None - raise e - - sconscript_time = SCons.Script.Main.sconscript_time - - # From SCons/Script/Main.py:1428 - if SCons.Script.Main.num_jobs == 1: - ct = SCons.Script.Main.cumulative_command_time - else: - if ( - SCons.Script.Main.last_command_end is None - or SCons.Script.Main.first_command_start is None - ): - ct = 0.0 - else: - ct = SCons.Script.Main.last_command_end - SCons.Script.Main.first_command_start - scons_time = total_time - sconscript_time - ct - # End section copied from SCons - - return TimeMetrics( - total=total_time, - sconscript_exec=sconscript_time, - scons_exec=scons_time, - command_exec=ct, - ) diff --git a/site_scons/site_tools/build_metrics/top_n_metrics.py b/site_scons/site_tools/build_metrics/top_n_metrics.py deleted file mode 100644 index a19da6d1fb4..00000000000 --- a/site_scons/site_tools/build_metrics/top_n_metrics.py +++ /dev/null @@ -1,98 +0,0 @@ -import argparse -import datetime -import json -import logging - -from tabulate import tabulate - -parser = argparse.ArgumentParser(description="Print top n metrics from build metrics json files.") -parser.add_argument( - "--input", - metavar="FILE", - type=str, - default="build-metrics.json", - help="Path to build metrics input json.", -) -parser.add_argument( - "--output", - metavar="FILE", - type=str, - default="top_n_metrics.txt", - help="Path to output text file.", -) -parser.add_argument( - "--num", - metavar="N", - type=int, - default=10, - help="Positive integer which represent the top N metrics to report on.", -) -args = parser.parse_args() - -logger = logging.getLogger() -logger.setLevel(logging.INFO) -logger.addHandler(logging.FileHandler(args.output)) -log_format = logging.Formatter("%(message)s") -for handler in logger.handlers: - handler.setFormatter(log_format) - -with open(args.input) as f: - metrics = json.load(f) - - logger.info(f"Time of report: {datetime.datetime.now()}") - logger.info(f"Task ID: {metrics['evg_id']}") - logger.info(f"Distro: {metrics['variant']}") - logger.info( - f"Peak Memory Used:\n{round(metrics['system_memory']['max'] / 1024.0 / 1024.0, 2)} MBs" - ) - logger.info(f"SCons Command:\n{metrics['scons_command']}") - - build_tasks_sort = metrics["build_tasks"].copy() - build_tasks_sort.sort(reverse=True, key=lambda x: x["mem_usage"]) - logger.info(f"\nTop {args.num} Memory tasks:") - table_data = [] - for i, val in enumerate(build_tasks_sort[: args.num], start=1): - table_data.append([i, val["mem_usage"] / 1024.0 / 1024.0, val["outputs"][0]]) - logger.info(tabulate(table_data, headers=["Num", "MBs", "Output"], floatfmt=".2f")) - - build_tasks_sort = metrics["build_tasks"].copy() - build_tasks_sort.sort(reverse=True, key=lambda x: x["end_time"] - x["start_time"]) - logger.info(f"\nTop {args.num} duration tasks:") - table_data = [] - for i, val in enumerate(build_tasks_sort[: args.num], start=1): - table_data.append([i, (val["end_time"] - val["start_time"]) / 10.0**9, val["outputs"][0]]) - logger.info(tabulate(table_data, headers=["Num", "Secs", "Output"], floatfmt=".2f")) - - build_tasks_sort = metrics["artifact_metrics"]["artifacts"].copy() - build_tasks_sort.sort(reverse=True, key=lambda x: x["size"]) - logger.info(f"\nTop {args.num} sized artifacts:") - table_data = [] - for i, val in enumerate(build_tasks_sort[: args.num], start=1): - table_data.append([i, val["size"] / 1024.0 / 1024.0, val["name"]]) - logger.info(tabulate(table_data, headers=["Num", "MBs", "Output"], floatfmt=".2f")) - - build_tasks_sort = [ - metric - for metric in metrics["artifact_metrics"]["artifacts"] - if metric.get("bin_metrics") and metric["bin_metrics"].get("text") - ] - build_tasks_sort.sort(reverse=True, key=lambda x: x["bin_metrics"]["text"]["vmsize"]) - logger.info(f"\nTop {args.num} Text sections:") - table_data = [] - for i, val in enumerate(build_tasks_sort[: args.num], start=1): - table_data.append([i, val["bin_metrics"]["text"]["vmsize"] / 1024.0 / 1024.0, val["name"]]) - logger.info(tabulate(table_data, headers=["Num", "MBs", "Output"], floatfmt=".2f")) - - build_tasks_sort = [ - metric - for metric in metrics["artifact_metrics"]["artifacts"] - if metric.get("bin_metrics") and metric["bin_metrics"].get("debug") - ] - build_tasks_sort.sort(reverse=True, key=lambda x: x["bin_metrics"]["debug"]["filesize"]) - logger.info(f"\nTop {args.num} Debug sections:") - table_data = [] - for i, val in enumerate(build_tasks_sort[: args.num], start=1): - table_data.append( - [i, val["bin_metrics"]["debug"]["filesize"] / 1024.0 / 1024.0, val["name"]] - ) - logger.info(tabulate(table_data, headers=["Num", "MBs", "Output"], floatfmt=".2f")) diff --git a/site_scons/site_tools/build_metrics/util.py b/site_scons/site_tools/build_metrics/util.py deleted file mode 100644 index 7b4d0a5237d..00000000000 --- a/site_scons/site_tools/build_metrics/util.py +++ /dev/null @@ -1,68 +0,0 @@ -import subprocess -import sys -import time - -_BUILD_METRIC_DATA = {} - - -# This section is from the original -# https://stackoverflow.com/a/70693158/1644736 -def fullname(o): - try: - # if o is a class or function, get module directly - module = o.__module__ - except AttributeError: - # then get module from o's class - module = o.__class__.__module__ - try: - # if o is a class or function, get name directly - name = o.__qualname__ - except AttributeError: - # then get o's class name - name = o.__class__.__qualname__ - # if o is a method of builtin class, then module will be None - if module == "builtins" or module is None: - return name - return module + "." + name - - -# This section is an excerpt of the original -# https://stackoverflow.com/a/63029332/1644736 -class CaptureAtexits: - def __init__(self): - self.captured = [] - - def __eq__(self, other): - self.captured.append(other) - return False - - -def mem_adjustment(mem_usage): - # apparently macos big sur (11) changed some of the api for getting memory, - # so the memory comes up a bit larger than expected. Testing shows it about - # 10 times large then what native macos tools report, so we will do some - # adjustment in the mean time until its fixed: - # https://github.com/giampaolo/psutil/issues/1908 - try: - if sys.platform == "darwin": - mem_adjust_version = subprocess.run( - ["sw_vers", "-productVersion"], capture_output=True, text=True, check=False - ).stdout.split(".")[0] - if int(mem_adjust_version) > 10: - return int(mem_usage / 10) - except (IndexError, ValueError): - pass - return mem_usage - - -def get_build_metric_dict(): - global _BUILD_METRIC_DATA - return _BUILD_METRIC_DATA - - -def add_meta_data(env, key, value): - get_build_metric_dict()[key] = value - - -def timestamp_now() -> int: - return time.time_ns() diff --git a/site_scons/site_tools/ccache.py b/site_scons/site_tools/ccache.py deleted file mode 100644 index 0f70f3879c8..00000000000 --- a/site_scons/site_tools/ccache.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import os -import re -import subprocess - -import SCons -from pkg_resources import parse_version - -# This is the oldest version of ccache that offers support for -gsplit-dwarf -_ccache_version_min = parse_version("3.2.3") - - -def exists(env): - """Look for a viable ccache implementation that meets our version requirements.""" - if not env.subst("$CCACHE"): - return False - - ccache = env.WhereIs("$CCACHE") - if not ccache: - print(f"Error: ccache not found at {env['CCACHE']}") - return False - - if "CCACHE_VERSION" in env and env["CCACHE_VERSION"] >= _ccache_version_min: - return True - - pipe = SCons.Action._subproc( - env, - SCons.Util.CLVar(ccache) + ["--version"], - stdin="devnull", - stderr="devnull", - stdout=subprocess.PIPE, - ) - - if pipe.wait() != 0: - print(f"Error: failed to execute '{env['CCACHE']}'") - return False - - validated = False - for line in pipe.stdout: - line = line.decode("utf-8") - if validated: - continue # consume all data - version_banner = re.search(r"^ccache version", line) - if not version_banner: - continue - ccache_version = re.split("ccache version (.+)", line) - if len(ccache_version) < 2: - continue - ccache_version = parse_version(ccache_version[1]) - if ccache_version >= _ccache_version_min: - validated = True - - if validated: - env["CCACHE_VERSION"] = ccache_version - else: - print( - f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}" - ) - - return validated - - -def generate(env): - """Add ccache support.""" - - # Absoluteify - env["CCACHE"] = env.WhereIs("$CCACHE") - - # Propagate CCACHE related variables into the command environment - for var, host_value in os.environ.items(): - if var.startswith("CCACHE_"): - env["ENV"][var] = host_value - - # SERVER-48289: Adding roll-your-own CFLAGS and CXXFLAGS can cause some very "weird" issues - # with using icecc and ccache if they turn out not to be supported by the compiler. Rather - # than try to filter each and every flag someone might try for the ones we know don't - # work, we'll just let the compiler ignore them. A better approach might be to pre-filter - # flags coming in from the environment by passing them through the appropriate *IfSupported - # method, but that's a much larger effort. - if env.ToolchainIs("clang"): - env.AppendUnique(CCFLAGS=["-Qunused-arguments"]) - - # Check whether icecream is requested and is a valid tool. - if "ICECC" in env: - icecream = SCons.Tool.Tool("icecream") - icecream_enabled = bool(icecream) and icecream.exists(env) - else: - icecream_enabled = False - - # Set up a performant ccache configuration. Here, we don't use a second preprocessor and - # pass preprocessor arguments that deterministically expand source files so a stable - # hash can be calculated on them. This both reduces the amount of work ccache needs to - # do and increases the likelihood of a cache hit. - if env.ToolchainIs("clang"): - if not env.get("CCACHE_EXTRAFILES_USE_SOURCE_PATHS", False): - env["ENV"].pop("CCACHE_CPP2", None) - env["ENV"]["CCACHE_NOCPP2"] = "1" - env.AppendUnique(CCFLAGS=["-frewrite-includes"]) - else: - env["ENV"].pop("CCACHE_NOCPP2", None) - env["ENV"]["CCACHE_CPP2"] = "1" - elif env.ToolchainIs("gcc"): - if icecream_enabled and not env.get("CCACHE_EXTRAFILES_USE_SOURCE_PATHS", False): - # Newer versions of Icecream will drop -fdirectives-only from - # preprocessor and compiler flags if it does not find a remote - # build host to build on. ccache, on the other hand, will not - # pass the flag to the compiler if CCACHE_NOCPP2=1, but it will - # pass it to the preprocessor. The combination of setting - # CCACHE_NOCPP2=1 and passing the flag can lead to build - # failures. - - # See: https://jira.mongodb.org/browse/SERVER-48443 - # We have an open issue with Icecream and ccache to resolve the - # cause of these build failures. Once the bug is resolved and - # the fix is deployed, we can remove this entire conditional - # branch and make it like the one for clang. - # TODO: https://github.com/icecc/icecream/issues/550 - env["ENV"].pop("CCACHE_CPP2", None) - env["ENV"]["CCACHE_NOCPP2"] = "1" - else: - env["ENV"].pop("CCACHE_NOCPP2", None) - env["ENV"]["CCACHE_CPP2"] = "1" - env.AppendUnique(CCFLAGS=["-fdirectives-only"]) - - # Ensure ccache accounts for any extra files in use that affects the generated object - # file. This can be used for situations where a file is passed as an argument to a - # compiler parameter and differences in the file need to be accounted for in the - # hash result to prevent erroneous cache hits. - if "CCACHE_EXTRAFILES" in env and env["CCACHE_EXTRAFILES"]: - env["ENV"]["CCACHE_EXTRAFILES"] = ":".join( - [denyfile.path for denyfile in env["CCACHE_EXTRAFILES"]] - ) - - # Make a generator to expand to CCACHE in the case where we are - # not a conftest. We don't want to use ccache for configure tests - # because we don't want to use icecream for configure tests, but - # when icecream and ccache are combined we can't easily filter out - # configure tests for icecream since in that combination we use - # CCACHE_PREFIX to express the icecc tool, and at that point it is - # too late for us to meaningfully filter out conftests. So we just - # disable ccache for conftests entirely. Which feels safer - # somehow anyway. - def ccache_generator(target, source, env, for_signature): - if "conftest" not in str(target[0]): - return "$CCACHE" - return "" - - env["CCACHE_GENERATOR"] = ccache_generator - - # Add ccache to the relevant command lines. Wrap the reference to - # ccache in the $( $) pattern so that turning ccache on or off - # doesn't invalidate your build. - env["CCCOM"] = "$( $CCACHE_GENERATOR $)" + env["CCCOM"] - env["CXXCOM"] = "$( $CCACHE_GENERATOR $)" + env["CXXCOM"] - env["SHCCCOM"] = "$( $CCACHE_GENERATOR $)" + env["SHCCCOM"] - env["SHCXXCOM"] = "$( $CCACHE_GENERATOR $)" + env["SHCXXCOM"] diff --git a/site_scons/site_tools/compdb_adjust.py b/site_scons/site_tools/compdb_adjust.py deleted file mode 100644 index cf19bf59136..00000000000 --- a/site_scons/site_tools/compdb_adjust.py +++ /dev/null @@ -1,113 +0,0 @@ -import json - -import typer -from typing_extensions import Annotated - - -def main( - input_compdb: Annotated[str, typer.Option()], - output_compdb: Annotated[str, typer.Option()], - bazel_compdb: str = "", - ninja: bool = False, -): - compdb_list = [] - bazel_compdb_list = [] - bazel_files = [] - compdb_files = [] - - def print_dupes(target_list, file): - seen = set() - dupes = [] - - for x in target_list: - if x in seen: - dupes.append(x) - else: - seen.add(x) - print(f"ERROR, found duplicate entries for {file}:\n{dupes}") - - def find_output_file(arg_list): - output_file = None - for i, arg in enumerate(arg_list): - if arg == "-o" or arg == "--output": - output_file = arg_list[i + 1] - break - elif arg.startswith("/Fo") or arg.startswith("-Fo"): - output_file = arg[3:] - break - elif arg.startswith("--output="): - output_file = arg[9:] - break - if output_file is None: - raise Exception(f"Failed to find output arg in {arg_list}") - return output_file - - def fix_mongo_toolchain_path(arg_list): - return - - with open(input_compdb) as f: - compdb_list = json.load(f) - compdb_files = [f"{entry['file']}->{entry['output']}" for entry in compdb_list] - - if ninja: - for command in compdb_list: - if command["output"].endswith(".compdb"): - command["output"] = command["output"][: -(len(".compdb"))] - else: - print(f"compdb entry does not contain '.compdb': {command['output']}") - - if bazel_compdb: - with open(bazel_compdb) as f: - bazel_compdb_list = json.load(f) - bazel_compdb_adjusted = [] - for entry in bazel_compdb_list: - output_file = find_output_file(entry["arguments"]) - - quoted_args = [] - for arg in entry["arguments"]: - if arg.startswith('"') and arg.endswith('"'): - quoted_args.append(arg) - continue - if arg.startswith("'") and arg.endswith("'"): - quoted_args.append(arg) - continue - if " " in arg: - arg = '"' + arg + '"' - quoted_args.append(arg) - else: - quoted_args.append(arg) - - new_entry = { - "file": entry["file"], - "command": " ".join(quoted_args), - "directory": entry["directory"], - "output": output_file, - } - bazel_compdb_adjusted.append(new_entry) - bazel_files = [f"{entry['file']}->{entry['output']}" for entry in bazel_compdb_adjusted] - bazel_compdb_list = bazel_compdb_adjusted - - try: - assert len(bazel_files) == len(set(bazel_files)) - except AssertionError as exc: - print_dupes(bazel_files, bazel_compdb) - raise exc - - try: - assert len(compdb_files) == len(set(compdb_files)) - except AssertionError as exc: - print_dupes(compdb_files, input_compdb) - raise exc - - try: - assert not bool(set(bazel_files) & set(compdb_files)) - except AssertionError as exc: - print_dupes(compdb_files + bazel_files, f"{input_compdb} + {bazel_compdb}") - raise exc - - with open(output_compdb, "w") as f: - json.dump(compdb_list + bazel_compdb_list, f, indent=2) - - -if __name__ == "__main__": - typer.run(main) diff --git a/site_scons/site_tools/compilation_db.py b/site_scons/site_tools/compilation_db.py deleted file mode 100644 index 812b5fd7a52..00000000000 --- a/site_scons/site_tools/compilation_db.py +++ /dev/null @@ -1,290 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import itertools -import json -import os -import shlex -import subprocess -import sys - -import SCons - -# Implements the ability for SCons to emit a compilation database for the MongoDB project. See -# http://clang.llvm.org/docs/JSONCompilationDatabase.html for details on what a compilation -# database is, and why you might want one. The only user visible entry point here is -# 'env.CompilationDatabase'. This method takes an optional 'target' to name the file that -# should hold the compilation database, otherwise, the file defaults to compile_commands.json, -# which is the name that most clang tools search for by default. - -# TODO: Is there a better way to do this than this global? Right now this exists so that the -# emitter we add can record all of the things it emits, so that the scanner for the top level -# compilation database can access the complete list, and also so that the writer has easy -# access to write all of the files. But it seems clunky. How can the emitter and the scanner -# communicate more gracefully? -__COMPILATION_DB_ENTRIES = {} - -# Cribbed from Tool/cc.py and Tool/c++.py. It would be better if -# we could obtain this from SCons. -_CSuffixes = [".c"] -if not SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CSuffixes.append(".C") - -_CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"] -if SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CXXSuffixes.append(".C") - - -# We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even -# integrate with the cache, but there doesn't seem to be much call for it. -class __CompilationDbNode(SCons.Node.Python.Value): - def __init__(self, value): - SCons.Node.Python.Value.__init__(self, value) - self.Decider(changed_since_last_build_node) - - -def changed_since_last_build_node(child, target, prev_ni, node): - """Dummy decider to force always building""" - return True - - -def makeEmitCompilationDbEntry(comstr): - """ - Effectively this creates a lambda function to capture: - * command line - * source - * target - :param comstr: unevaluated command line - :return: an emitter which has captured the above - """ - - def EmitCompilationDbEntry(target, source, env): - """ - This emitter will be added to each c/c++ object build to capture the info needed - for clang tools - :param target: target node(s) - :param source: source node(s) - :param env: Environment for use building this node - :return: target(s), source(s) - """ - - dbtarget = __CompilationDbNode(source) - - entry = env.__COMPILATIONDB_Entry( - target=dbtarget, - source=[], - __COMPILATIONDB_UTARGET=target, - __COMPILATIONDB_USOURCE=source, - __COMPILATIONDB_COMSTR=comstr, - __COMPILATIONDB_ENV=env, - ) - - # TODO: Technically, these next two lines should not be required: it should be fine to - # cache the entries. However, they don't seem to update properly. Since they are quick - # to re-generate disable caching and sidestep this problem. - env.AlwaysBuild(entry) - env.NoCache(entry) - - compiledb_target = env.get("COMPILEDB_TARGET") - - if compiledb_target not in __COMPILATION_DB_ENTRIES: - __COMPILATION_DB_ENTRIES[compiledb_target] = [] - - __COMPILATION_DB_ENTRIES[compiledb_target].append(dbtarget) - - return target, source - - return EmitCompilationDbEntry - - -def CompilationDbEntryAction(target, source, env, **kw): - """ - Create a dictionary with evaluated command line, target, source - and store that info as an attribute on the target - (Which has been stored in __COMPILATION_DB_ENTRIES array - :param target: target node(s) - :param source: source node(s) - :param env: Environment for use building this node - :param kw: - :return: None - """ - - # We will do some surgery on the command line. First we separate the args - # into a list, then we determine the index of the corresponding compiler - # value. Then we can extract a list of things before the compiler where are - # wrappers would be found. We extract the wrapper and put the command back - # together. - cmd_list = [ - str(elem) - for elem in env["__COMPILATIONDB_ENV"].subst_list( - env["__COMPILATIONDB_COMSTR"], - target=env["__COMPILATIONDB_UTARGET"], - source=env["__COMPILATIONDB_USOURCE"], - )[0] - ] - - if "CXX" in env["__COMPILATIONDB_COMSTR"]: - tool_subst = "$CXX" - else: - tool_subst = "$CC" - tool = env["__COMPILATIONDB_ENV"].subst( - tool_subst, target=env["__COMPILATIONDB_UTARGET"], source=env["__COMPILATIONDB_USOURCE"] - ) - - tool_index = cmd_list.index(tool) + 1 - tool_list = cmd_list[:tool_index] - cmd_list = cmd_list[tool_index:] - - for wrapper_ignore in env.get("_COMPILATIONDB_IGNORE_WRAPPERS", []): - wrapper = env.subst(wrapper_ignore, target=target, source=source) - if wrapper in tool_list: - tool_list.remove(wrapper) - - tool_abspaths = [] - for tool in tool_list: - tool_abspath = env.WhereIs(tool) - if tool_abspath is None: - tool_abspath = os.path.abspath(str(tool)) - tool_abspaths.append('"' + tool_abspath + '"') - cmd_list = tool_abspaths + cmd_list - - entry = { - "directory": env.Dir("#").abspath, - "command": " ".join(cmd_list), - "file": str(env["__COMPILATIONDB_USOURCE"][0]), - "output": shlex.quote(" ".join([str(t) for t in env["__COMPILATIONDB_UTARGET"]])), - } - - target[0].write(entry) - - -def WriteCompilationDb(target, source, env): - entries = [] - - for s in __COMPILATION_DB_ENTRIES[target[0].abspath]: - entries.append(s.read()) - file, ext = os.path.splitext(str(target[0])) - scons_compdb = f"{file}_scons{ext}" - with open(scons_compdb, "w") as target_file: - json.dump( - entries, - target_file, - sort_keys=True, - indent=4, - separators=(",", ": "), - ) - - adjust_script_out = env.File("#site_scons/site_tools/compdb_adjust.py").path - if env.get("COMPDB_IGNORE_BAZEL"): - bazel_compdb = [] - else: - bazel_compdb = ["--bazel-compdb", "compile_commands.json"] - env.RunBazelCommand( - [env["SCONS2BAZEL_TARGETS"].bazel_executable, "build"] - + env["BAZEL_FLAGS_STR"] - + ["//:compiledb"] - ) - - subprocess.run( - [ - sys.executable, - adjust_script_out, - "--input-compdb", - scons_compdb, - "--output-compdb", - str(target[0]), - ] - + bazel_compdb - ) - - -def ScanCompilationDb(node, env, path): - all_entries = [] - for compiledb_target in __COMPILATION_DB_ENTRIES: - all_entries.extend(__COMPILATION_DB_ENTRIES[compiledb_target]) - return all_entries - - -def generate(env, **kwargs): - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - env["COMPILATIONDB_COMSTR"] = kwargs.get( - "COMPILATIONDB_COMSTR", - "Building compilation database $TARGET", - ) - - components_by_suffix = itertools.chain( - itertools.product( - _CSuffixes, - [ - (static_obj, SCons.Defaults.StaticObjectEmitter, "$CCCOM"), - (shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCCCOM"), - ], - ), - itertools.product( - _CXXSuffixes, - [ - (static_obj, SCons.Defaults.StaticObjectEmitter, "$CXXCOM"), - (shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCXXCOM"), - ], - ), - ) - - for entry in components_by_suffix: - suffix = entry[0] - builder, base_emitter, command = entry[1] - - # Assumes a dictionary emitter - emitter = builder.emitter[suffix] - builder.emitter[suffix] = SCons.Builder.ListEmitter( - [ - emitter, - makeEmitCompilationDbEntry(command), - ] - ) - - env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder( - action=SCons.Action.Action(CompilationDbEntryAction, None), - ) - - env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder( - action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"), - target_scanner=SCons.Scanner.Scanner( - function=ScanCompilationDb, - node_class=None, - ), - ) - - def CompilationDatabase(env, target): - result = env.__COMPILATIONDB_Database(target=target, source=[]) - env["COMPILEDB_TARGET"] = result[0].abspath - - env.AlwaysBuild(result) - env.NoCache(result) - - return result - - env.AddMethod(CompilationDatabase, "CompilationDatabase") - - -def exists(env): - return True diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py deleted file mode 100644 index e28bffebbf7..00000000000 --- a/site_scons/site_tools/distsrc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import io -import os -import tarfile -import time -import zipfile -from distutils.spawn import find_executable - -import git -import SCons - -__distsrc_callbacks = [] - - -class DistSrcFile: - def __init__(self, **kwargs): - [setattr(self, key, val) for (key, val) in list(kwargs.items())] - - def __str__(self): - return self.name - - -class DistSrcArchive: - def __init__(self, archive_type, archive_file, filename, mode): - self.archive_type = archive_type - self.archive_file = archive_file - self.archive_name = filename - self.archive_mode = mode - - @staticmethod - def Open(filename): - if filename.endswith("tar"): - return DistSrcTarArchive( - "tar", - tarfile.open(filename, "r", format=tarfile.PAX_FORMAT), - filename, - "r", - ) - elif filename.endswith("zip"): - return DistSrcZipArchive( - "zip", - zipfile.ZipFile(filename, "a"), - filename, - "a", - ) - - def close(self): - self.archive_file.close() - - -class DistSrcTarArchive(DistSrcArchive): - def __iter__(self): - file_list = self.archive_file.getnames() - for name in file_list: - yield name - - def __getitem__(self, key): - item_data = self.archive_file.getmember(key) - return DistSrcFile( - name=key, - size=item_data.size, - mtime=item_data.mtime, - mode=item_data.mode, - type=item_data.type, - uid=item_data.uid, - gid=item_data.gid, - uname=item_data.uname, - gname=item_data.uname, - ) - - def append_file_contents( - self, - filename, - file_contents, - mtime=None, - mode=0o644, - uname="root", - gname="root", - ): - if mtime is None: - mtime = time.time() - file_metadata = tarfile.TarInfo(name=filename) - file_metadata.mtime = mtime - file_metadata.mode = mode - file_metadata.uname = uname - file_metadata.gname = gname - file_metadata.size = len(file_contents) - file_buf = io.BytesIO(file_contents.encode("utf-8")) - if self.archive_mode == "r": - self.archive_file.close() - self.archive_file = tarfile.open( - self.archive_name, - "a", - format=tarfile.PAX_FORMAT, - ) - self.archive_mode = "a" - self.archive_file.addfile(file_metadata, fileobj=file_buf) - - def append_file(self, filename, localfile): - self.archive_file.add(localfile, arcname=filename) - - -class DistSrcZipArchive(DistSrcArchive): - def __iter__(self): - file_list = self.archive_file.namelist() - for name in file_list: - yield name - - def __getitem__(self, key): - item_data = self.archive_file.getinfo(key) - fixed_time = item_data.date_time + (0, 0, 0) - is_dir = key.endswith("/") - return DistSrcFile( - name=key, - size=item_data.file_size, - mtime=time.mktime(fixed_time), - mode=0o775 if is_dir else 0o664, - type=tarfile.DIRTYPE if is_dir else tarfile.REGTYPE, - uid=0, - gid=0, - uname="root", - gname="root", - ) - - def append_file_contents( - self, - filename, - file_contents, - mtime=None, - mode=0o644, - uname="root", - gname="root", - ): - if mtime is None: - mtime = time.time() - self.archive_file.writestr(filename, file_contents) - - def append_file(self, filename, localfile): - self.archive_file.write(localfile, arcname=filename) - - -def build_error_action(msg): - def error_stub(target=None, source=None, env=None): - print(msg) - env.Exit(1) - - return [error_stub] - - -def distsrc_action_generator(source, target, env, for_signature): - # This is done in two stages because env.WhereIs doesn't seem to work - # correctly on Windows, but we still want to be able to override the PATH - # using the env. - git_path = env.WhereIs("git") - if not git_path: - git_path = find_executable("git") - - if not git_path: - return build_error_action("Could not find git - cannot create distsrc archive") - - def run_distsrc_callbacks(target=None, source=None, env=None): - archive_wrapper = DistSrcArchive.Open(str(target[0])) - for fn in __distsrc_callbacks: - fn(env, archive_wrapper) - archive_wrapper.close() - - target_ext = str(target[0])[-3:] - if target_ext not in ["zip", "tar"]: - print("Invalid file format for distsrc. Must be tar or zip file") - env.Exit(1) - - def create_archive(target=None, source=None, env=None): - try: - git_repo = git.Repo(os.getcwd()) - # get the original HEAD position of repo - head_commit_sha = git_repo.head.object.hexsha - - # add and commit the uncommited changes - git_repo.git.add(all=True) - # only commit changes if there are any - if len(git_repo.index.diff("HEAD")) != 0: - with git_repo.git.custom_environment( - GIT_COMMITTER_NAME="Evergreen", GIT_COMMITTER_EMAIL="evergreen@mongodb.com" - ): - git_repo.git.commit("--author='Evergreen <>'", "-m", "temp commit") - - # archive repo - dist_src_prefix = env.get("MONGO_DIST_SRC_PREFIX") - git_repo.git.archive( - "--format", target_ext, "--output", target[0], "--prefix", dist_src_prefix, "HEAD" - ) - - # reset branch to original state - git_repo.git.reset("--mixed", head_commit_sha) - except Exception as e: - env.FatalError(f"Error archiving: {e}") - - return [ - SCons.Action.Action(create_archive, "Creating archive for $TARGET"), - SCons.Action.Action( - run_distsrc_callbacks, - "Running distsrc callbacks for $TARGET", - ), - ] - - -def add_callback(env, fn): - __distsrc_callbacks.append(fn) - - -def generate(env, **kwargs): - env.AddMethod(add_callback, "AddDistSrcCallback") - env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder( - generator=distsrc_action_generator, - ) - - def DistSrc(env, target, **kwargs): - result = env.__DISTSRC(target=target, source=[], **kwargs) - env.AlwaysBuild(result) - env.NoCache(result) - return result - - env.AddMethod(DistSrc, "DistSrc") - - -def exists(env): - return True diff --git a/site_scons/site_tools/forceincludes.py b/site_scons/site_tools/forceincludes.py deleted file mode 100644 index 7a4091cf050..00000000000 --- a/site_scons/site_tools/forceincludes.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2021 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import SCons - - -def _add_scanner(builder): - # We are taking over the target scanner here. If we want to not do - # that we need to invent a ListScanner concept to inject. What if - # the other scanner wants a different path_function? - assert builder.target_scanner is None - - def new_scanner(node, env, path, argument): - # Use the path information that FindPathDirs gave us to resolve - # the forced includes into nodes given the search path. - fis = [env.FindFile(f, path) for f in env.get("FORCEINCLUDES", [])] - - # If all nodes could not be resolved, there are missing headers. - if not all(fis): - missing_headers = [ - header for node, header in zip(fis, env.get("FORCEINCLUDES")) if not node - ] - errstring = f"Could not find force include header(s): {missing_headers} in any path in CPPPATH:\n" - for cpppath in env.get("CPPPATH", []): - errstring += f"\t{env.Dir(cpppath).path}\n" - - raise SCons.Errors.SConsEnvironmentError(errstring) - - # Use the nodes *source* scanner, which was provided to us as - # `argument` when we created this scanner, to scan the forced - # includes for transitive includes. - node.get_executor().scan(scanner=argument, node_list=fis) - - # The forced includes will be added as implicit dependencies - # for us when we return them. - return fis - - # The 'builder.builder' here is because we need to reach inside - # the CompositeBuilder that wraps the object builders that come - # back from createObjBuilders. - builder.builder.target_scanner = SCons.Scanner.Scanner( - function=new_scanner, - path_function=SCons.Script.FindPathDirs("CPPPATH"), - argument=builder.source_scanner, - ) - - -def generate(env, **kwargs): - if "FORCEINCLUDEPREFIX" not in env: - if "msvc" in env.get("TOOLS", []): - env["FORCEINCLUDEPREFIX"] = "/FI" - else: - env["FORCEINCLUDEPREFIX"] = "-include " - - if "FORCEINCLUDESUFFIX" not in env: - env["FORCEINCLUDESUFFIX"] = "" - - # Expand FORCEINCLUDES with the indicated prefixes and suffixes. - env["_FORCEINCLUDES"] = ( - "${_concat(FORCEINCLUDEPREFIX, FORCEINCLUDES, FORCEINCLUDESUFFIX, __env__, lambda x: x, TARGET, SOURCE)}" - ) - - env.Append( - # It might be better if this went in _CPPINCFLAGS, but it - # breaks the MSVC RC builder because the `rc` tool doesn't - # honor /FI. It should be OK to put it in CCFLAGS, unless - # there is a compiler that requires that an forced include - # only come after the include file search path arguments that - # would enable discovery. - CCFLAGS=[ - "$_FORCEINCLUDES", - ] - ) - - for object_builder in SCons.Tool.createObjBuilders(env): - _add_scanner(object_builder) - - -def exists(env): - return True diff --git a/site_scons/site_tools/gdb_index.py b/site_scons/site_tools/gdb_index.py deleted file mode 100644 index c2662f43a75..00000000000 --- a/site_scons/site_tools/gdb_index.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import SCons - - -def _update_builder(env, builder): - verbose = "" if env.Verbose() else None - - base_action = builder.action - if not isinstance(base_action, SCons.Action.ListAction): - base_action = SCons.Action.ListAction([base_action]) - - # There are cases were a gdb-index file is NOT generated from gdb 'save gdb-index' command, - # mostly shim libraries where there is no code in the library, and the following Actions would - # then fail. The files are created make sure there is always a file to operate on, if its an - # empty file then the following actions are basically NOOP, and its cleaner then writing - # conditions into each action. - - # Because this is all taking under one task, the list action will always run all actions if - # the target is out of date. So the gdb-index files would always be regenerated, and there is - # no value in keeping them around, it will just waste disk space. Therefore they should be - # removed as if they never existed from the task. The build system doesn't need to know about - # them. - if env.get("DWARF_VERSION") <= 4: - base_action.list.extend( - [ - SCons.Action.Action( - "touch ${TARGET}.gdb-index", - verbose, - ), - SCons.Action.Action( - '$GDB --batch-silent --quiet --nx --eval-command "save gdb-index ${TARGET.dir}" $TARGET', - "$GDB_INDEX_GEN_INDEX_STR", - ), - SCons.Action.Action( - "$OBJCOPY --add-section .gdb_index=${TARGET}.gdb-index --set-section-flags .gdb_index=readonly ${TARGET} ${TARGET}", - "$GDB_INDEX_ADD_SECTION_STR", - ), - SCons.Action.Action( - "rm -f ${TARGET}.gdb-index", - verbose, - ), - ] - ) - else: - base_action.list.extend( - [ - SCons.Action.Action( - "touch ${TARGET}.debug_names ${TARGET}.debug_str", - verbose, - ), - SCons.Action.Action( - '$GDB --batch-silent --quiet --nx --eval-command "save gdb-index -dwarf-5 ${TARGET.dir}" $TARGET', - "$GDB_INDEX_GEN_INDEX_STR", - ), - SCons.Action.Action( - "$OBJCOPY --dump-section .debug_str=${TARGET}.debug_str.new $TARGET", - verbose, - ), - SCons.Action.Action( - "$OBJCOPY --remove-section .debug_str $TARGET", - verbose, - ), - SCons.Action.Action( - "cat ${TARGET}.debug_str >>${TARGET}.debug_str.new", - verbose, - ), - SCons.Action.Action( - "$OBJCOPY --add-section .debug_names=${TARGET}.debug_names --set-section-flags .debug_names=readonly --add-section .debug_str=${TARGET}.debug_str.new ${TARGET} ${TARGET}", - "$GDB_INDEX_ADD_SECTION_STR", - ), - SCons.Action.Action( - "rm -f ${TARGET}.debug_names ${TARGET}.debug_str.new ${TARGET}.debug_str", - verbose, - ), - ] - ) - - builder.action = base_action - - -def generate(env): - if env.get("OBJCOPY", None) is None: - env["OBJCOPY"] = env.WhereIs("objcopy") - if env.get("GDB", None) is None: - env["GDB"] = env.WhereIs("gdb") - - if not env.Verbose(): - env.Append( - GDB_INDEX_GEN_INDEX_STR="Using $GDB to generate index for $TARGET", - GDB_INDEX_ADD_SECTION_STR="Adding index sections into $TARGET", - ) - - for builder in ["Program", "SharedLibrary", "LoadableModule"]: - _update_builder(env, env["BUILDERS"][builder]) - - -def exists(env): - result = False - if env.TargetOSIs("posix"): - objcopy = env.get("OBJCOPY", None) or env.WhereIs("objcopy") - gdb = env.get("GDB", None) or env.WhereIs("gdb") - try: - dwarf_version = int(env.get("DWARF_VERSION")) - except ValueError: - dwarf_version = None - - unset_vars = [] - if not objcopy: - unset_vars += ["OBJCOPY"] - if not gdb: - unset_vars += ["GDB"] - if not dwarf_version: - unset_vars += ["DWARF_VERSION"] - - if not unset_vars: - print("Enabled generation of gdb index into binaries.") - result = True - else: - print(f"Disabled generation gdb index because {', '.join(unset_vars)} were not set.") - return result diff --git a/site_scons/site_tools/git_decider.py b/site_scons/site_tools/git_decider.py deleted file mode 100644 index cd464b2369f..00000000000 --- a/site_scons/site_tools/git_decider.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - - -def generate(env, **kwargs): - # Grab the existing decider functions out of the environment - # so we can invoke them when we can't use Git. - base_decider = env.decide_target - if base_decider != env.decide_source: - raise Exception("Decider environment seems broken") - - from git import Git - - thisRepo = Git(env.Dir("#").abspath) - currentGitState = thisRepo.ls_files("--stage") - lines = currentGitState.split("\n") - - file_sha1_map = {} - for line in lines: - line_content = line.split() - file_sha1_map[env.File(line_content[3]).path] = line_content[1] - - for m in thisRepo.ls_files("-m").split("\n"): - if m: - del file_sha1_map[env.File(m).path] - - def is_known_to_git(dependency): - return str(dependency) in file_sha1_map - - def git_says_file_is_up_to_date(dependency, prev_ni): - gitInfoForDep = file_sha1_map[str(dependency)] - - if prev_ni is None: - dependency.get_ninfo().csig = gitInfoForDep - return False - - if not (hasattr(prev_ni, "csig")): - prev_ni.csig = gitInfoForDep - - result = gitInfoForDep == prev_ni.csig - return result - - def MongoGitDecider(dependency, target, prev_ni, node): - if not is_known_to_git(dependency): - return base_decider(dependency, target, prev_ni, node) - return not git_says_file_is_up_to_date(dependency, prev_ni) - - env.Decider(MongoGitDecider) - - -def exists(env): - try: - from git import Git - - Git(env.Dir("#").abspath).ls_files("--stage") - return True - except: - return False diff --git a/site_scons/site_tools/gziptool.py b/site_scons/site_tools/gziptool.py deleted file mode 100644 index 6ed13759fa7..00000000000 --- a/site_scons/site_tools/gziptool.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import gzip -import shutil - -import SCons - - -def GZipAction(target, source, env, **kw): - dst_gzip = gzip.GzipFile(str(target[0]), "wb") - with open(str(source[0]), "rb") as src_file: - shutil.copyfileobj(src_file, dst_gzip) - dst_gzip.close() - - -def generate(env, **kwargs): - env["BUILDERS"]["__GZIPTOOL"] = SCons.Builder.Builder( - action=SCons.Action.Action( - GZipAction, - "$GZIPTOOL_COMSTR", - ) - ) - env["GZIPTOOL_COMSTR"] = kwargs.get( - "GZIPTOOL_COMSTR", - "Compressing $TARGET with gzip", - ) - - def GZipTool(env, target, source, **kwargs): - result = env.__GZIPTOOL(target=target, source=source, **kwargs) - env.AlwaysBuild(result) - return result - - env.AddMethod(GZipTool, "GZip") - - -def exists(env): - return True diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py deleted file mode 100644 index ea258b50088..00000000000 --- a/site_scons/site_tools/icecream.py +++ /dev/null @@ -1,604 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import hashlib -import os -import re -import subprocess -import urllib - -import SCons -from pkg_resources import parse_version - -_icecream_version_min = parse_version("1.3") -_icecream_version_gcc_remote_cpp = parse_version("1.2") - - -def icecc_create_env(env, target, source, for_signature): - # Safe to assume unix here because icecream only works on Unix - mkdir = "mkdir -p ${TARGET.dir}" - - # Create the env, use awk to get just the tarball name and we store it in - # the shell variable $ICECC_VERSION_TMP so the subsequent mv command and - # store it in a known location. Add any files requested from the user environment. - create_env = ( - "ICECC_VERSION_TMP=$$(${SOURCES[0]} --$ICECC_COMPILER_TYPE ${SOURCES[1]} ${SOURCES[2]}" - ) - - # TODO: SERVER-57393 It would be a little more elegant if things in - # ICECC_CREATE_ENV_ADDFILES were handled as sources, because we - # would get automatic dependency tracking. However, there are some - # wrinkles around the mapped case so we have opted to leave it as - # just interpreting the env for now. - for addfile in env.get("ICECC_CREATE_ENV_ADDFILES", []): - if isinstance(addfile, tuple): - if len(addfile) == 2: - if env["ICECREAM_VERSION"] > parse_version("1.1"): - raise Exception("This version of icecream does not support addfile remapping.") - create_env += " --addfile {}={}".format( - env.File(addfile[0]).srcnode().abspath, env.File(addfile[1]).srcnode().abspath - ) - env.Depends(target, addfile[1]) - else: - raise Exception( - f"Found incorrect icecream addfile format: {str(addfile)}" - + "\ntuple must two elements of the form" - + "\n('chroot dest path', 'source file path')" - ) - else: - try: - create_env += f" --addfile {env.File(addfile).srcnode().abspath}" - env.Depends(target, addfile) - except: - # NOTE: abspath is required by icecream because of - # this line in icecc-create-env: - # https://github.com/icecc/icecream/blob/10b9468f5bd30a0fdb058901e91e7a29f1bfbd42/client/icecc-create-env.in#L534 - # which cuts out the two files based off the equals sign and - # starting slash of the second file - raise Exception( - f"Found incorrect icecream addfile format: {type(addfile)}" - + "\nvalue provided cannot be converted to a file path" - ) - - create_env += " | awk '/^creating .*\\.tar\\.gz/ { print $$2 }')" - - # Simply move our tarball to the expected locale. - mv = "mv $$ICECC_VERSION_TMP $TARGET" - - # Daisy chain the commands and then let SCons Subst in the rest. - cmdline = f"{mkdir} && {create_env} && {mv}" - return cmdline - - -def generate(env): - # Absoluteify, so we can derive ICERUN - env["ICECC"] = env.WhereIs("$ICECC") - - if "ICERUN" in env: - # Absoluteify, for parity with ICECC - icerun = env.WhereIs("$ICERUN") - else: - icerun = env.File("$ICECC").File("icerun") - env["ICERUN"] = icerun - - if "ICECC_CREATE_ENV" in env: - icecc_create_env_bin = env.WhereIs("$ICECC_CREATE_ENV") - else: - icecc_create_env_bin = env.File("ICECC").File("icecc-create-env") - env["ICECC_CREATE_ENV"] = icecc_create_env_bin - - # Make CC and CXX absolute paths too. This ensures the correct paths to - # compilers get passed to icecc-create-env rather than letting it - # potentially discover something we don't expect via PATH. - cc_path = env.WhereIs("$CC") - cxx_path = env.WhereIs("$CXX") - - if cc_path is None: - env["CC"] = os.path.abspath(env["CC"]) - if cxx_path is None: - env["CXX"] = os.path.abspath(env["CXX"]) - - # Set up defaults for configuration options - env["ICECREAM_TARGET_DIR"] = env.Dir( - env.get( - "ICECREAM_TARGET_DIR", - "#./.icecream", - ), - ) - verbose = env.get("ICECREAM_VERBOSE", False) - env["ICECC_DEBUG"] = env.get("ICECC_DEBUG", False) - - # We have a lot of things to build and run that the final user - # environment doesn't need to see or know about. Make a custom env - # that we use consistently from here to where we end up setting - # ICECREAM_RUN_ICECC in the user env. - setupEnv = env.Clone(NINJA_SKIP=True) - - if "ICECC_VERSION" in setupEnv and bool(setupEnv["ICECC_VERSION"]): - if setupEnv["ICECC_VERSION"].startswith("http"): - quoted = urllib.parse.quote(setupEnv["ICECC_VERSION"], safe=[]) - - # Use curl / wget to download the toolchain because SCons (and ninja) - # are better at running shell commands than Python functions. - # - # TODO: This all happens SCons side now. Should we just use python to - # fetch instead? - curl = setupEnv.WhereIs("curl") - wget = setupEnv.WhereIs("wget") - - if curl: - cmdstr = "curl -L" - elif wget: - cmdstr = "wget" - else: - raise Exception( - "You have specified an ICECC_VERSION that is a URL but you have neither wget nor curl installed." - ) - - # Copy ICECC_VERSION into ICECC_VERSION_URL so that we can - # change ICECC_VERSION without perturbing the effect of - # the action. - setupEnv["ICECC_VERSION_URL"] = setupEnv["ICECC_VERSION"] - setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command( - target=f"$ICECREAM_TARGET_DIR/{quoted}", - source=[setupEnv.Value(quoted)], - action=SCons.Action.Action( - f"{cmdstr} -o $TARGET $ICECC_VERSION_URL", - "Downloading compiler package from $ICECC_VERSION_URL" - if not verbose - else str(), - ), - )[0] - - else: - # Convert the users selection into a File node and do some basic validation - setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.File("$ICECC_VERSION") - - if not icecc_version_file.exists(): - raise Exception( - "The ICECC_VERSION variable set set to {}, but this file does not exist".format( - icecc_version_file, - ) - ) - - # This is what we are going to call the file names as known to SCons on disk - setupEnv["ICECC_VERSION_ID"] = "user_provided." + icecc_version_file.name - - else: - setupEnv["ICECC_COMPILER_TYPE"] = setupEnv.get( - "ICECC_COMPILER_TYPE", - os.path.basename(env["CC"]), - ) - - # This is what we are going to call the file names as known to SCons on disk. We do the - # subst early so that we can call `replace` on the result. - cc_names = setupEnv.subst("${CC}${CXX}") - # file name limit is 256 - if len(cc_names) > 100: - cc_names = hashlib.md5(cc_names.encode()).hexdigest() - setupEnv["ICECC_VERSION_ID"] = f"icecc-create-env.{cc_names}.tar.gz".replace("/", "_") - - setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command( - target="$ICECREAM_TARGET_DIR/$ICECC_VERSION_ID", - source=[ - "$ICECC_CREATE_ENV", - "$CC", - "$CXX", - ], - action=SCons.Action.Action( - icecc_create_env, - "Generating icecream compiler package: $TARGET" if not verbose else str(), - generator=True, - ), - )[0] - - # At this point, all paths above have produced a file of some sort. We now move on - # to producing our own signature for this local file. - - setupEnv.Append( - ICECREAM_TARGET_BASE_DIR="$ICECREAM_TARGET_DIR", - ICECREAM_TARGET_BASE_FILE="$ICECC_VERSION_ID", - ICECREAM_TARGET_BASE="$ICECREAM_TARGET_BASE_DIR/$ICECREAM_TARGET_BASE_FILE", - ) - - # If the file we are planning to use is not within - # ICECREAM_TARGET_DIR then make a local copy of it that is. - if icecc_version_file.dir != env["ICECREAM_TARGET_DIR"]: - setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command( - target=[ - "${ICECREAM_TARGET_BASE}.local", - ], - source=icecc_version_file, - action=SCons.Defaults.Copy("$TARGET", "$SOURCE"), - ) - - # There is no point caching the copy. - setupEnv.NoCache(icecc_version_file) - - # Now, we compute our own signature of the local compiler package, - # and create yet another link to the compiler package with a name - # containing our computed signature. Now we know that we can give - # this filename to icecc and it will be assured to really reflect - # the contents of the package, and not the arbitrary naming of the - # file as found on the users filesystem or from - # icecc-create-env. We put the absolute path to that filename into - # a file that we can read from. - icecc_version_info = setupEnv.File( - setupEnv.Command( - target=[ - "${ICECREAM_TARGET_BASE}.sha256", - "${ICECREAM_TARGET_BASE}.sha256.path", - ], - source=icecc_version_file, - action=SCons.Action.ListAction( - [ - # icecc-create-env run twice with the same input will - # create files with identical contents, and identical - # filenames, but with different hashes because it - # includes timestamps. So we compute a new hash based - # on the actual stream contents of the file by - # untarring it into shasum. - SCons.Action.Action( - "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}", - "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(), - ), - SCons.Action.Action( - "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz", - "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(), - ), - SCons.Action.Action( - "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}", - "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}" - if not verbose - else str(), - ), - ], - ), - ), - ) - - # We can't allow these to interact with the cache because the - # second action produces a file unknown to SCons. If caching were - # permitted, the other two files could be retrieved from cache but - # the file produced by the second action could not (and would not) - # be. We would end up with a broken setup. - setupEnv.NoCache(icecc_version_info) - - # Create a value node that, when built, contains the result of - # reading the contents of the sha256.path file. This way we can - # pull the value out of the file and substitute it into our - # wrapper script. - icecc_version_string_value = setupEnv.Command( - target=setupEnv.Value(None), - source=[icecc_version_info[1]], - action=SCons.Action.Action( - lambda env, target, source: target[0].write(source[0].get_text_contents()), - "Reading compiler package sha256 sum path from $SOURCE" if not verbose else str(), - ), - )[0] - - def icecc_version_string_generator(source, target, env, for_signature): - if for_signature: - return icecc_version_string_value.get_csig() - return icecc_version_string_value.read() - - # Set the values that will be interpolated into the run-icecc script. - setupEnv["ICECC_VERSION"] = icecc_version_string_generator - - # If necessary, we include the users desired architecture in the - # interpolated file. - icecc_version_arch_string = str() - if "ICECC_VERSION_ARCH" in setupEnv: - icecc_version_arch_string = "${ICECC_VERSION_ARCH}:" - - # Finally, create the run-icecc wrapper script. The contents will - # re-invoke icecc with our sha256 sum named file, ensuring that we - # trust the signature to be appropriate. In a pure SCons build, we - # actually wouldn't need this Substfile, we could just set - # env['ENV]['ICECC_VERSION'] to the Value node above. But that - # won't work for Ninja builds where we can't ask for the contents - # of such a node easily. Creating a Substfile means that SCons - # will take care of generating a file that Ninja can use. - run_icecc = setupEnv.Textfile( - target="$ICECREAM_TARGET_DIR/$ICECREAM_RUN_SCRIPT_SUBPATH/run-icecc.sh", - source=[ - "#!/bin/sh", - 'ICECC_VERSION=@icecc_version_arch@@icecc_version@ exec @icecc@ "$@"', - "", - ], - SUBST_DICT={ - "@icecc@": "$ICECC", - "@icecc_version@": "$ICECC_VERSION", - "@icecc_version_arch@": icecc_version_arch_string, - }, - # Don't change around the suffixes - TEXTFILEPREFIX=str(), - TEXTFILESUFFIX=str(), - # Somewhat surprising, but even though Ninja will defer to - # SCons to invoke this, we still need ninja to be aware of it - # so that it knows to invoke SCons to produce it as part of - # TEMPLATE expansion. Since we have set NINJA_SKIP=True for - # setupEnv, we need to reverse that here. - NINJA_SKIP=False, - ) - - setupEnv.AddPostAction( - run_icecc, - action=SCons.Defaults.Chmod("$TARGET", "u+x"), - ) - - setupEnv.Depends( - target=run_icecc, - dependency=[ - # TODO: Without the ICECC dependency, changing ICECC doesn't cause the Substfile - # to regenerate. Why is this? - "$ICECC", - # This dependency is necessary so that we build into this - # string before we create the file. - icecc_version_string_value, - # TODO: SERVER-50587 We need to make explicit depends here because of NINJA_SKIP. Any - # dependencies in the nodes created in setupEnv with NINJA_SKIP would have - # that dependency chain hidden from ninja, so they won't be rebuilt unless - # added as dependencies here on this node that has NINJA_SKIP=False. - "$CC", - "$CXX", - icecc_version_file, - ], - ) - - # From here out, we make changes to the users `env`. - setupEnv = None - - env["ICECREAM_RUN_ICECC"] = run_icecc[0] - - def icecc_toolchain_dependency_emitter(target, source, env): - if "conftest" not in str(target[0]): - # Requires or Depends? There are trade-offs: - # - # If it is `Depends`, then enabling or disabling icecream - # will cause a global recompile. But, if you regenerate a - # new compiler package, you will get a rebuild. If it is - # `Requires`, then enabling or disabling icecream will not - # necessarily cause a global recompile (it depends if - # C[,C,XX]FLAGS get changed when you do so), but on the - # other hand if you regenerate a new compiler package you - # will *not* get a rebuild. - # - # For now, we are opting for `Requires`, because it seems - # preferable that opting in or out of icecream shouldn't - # force a rebuild. - env.Requires(target, "$ICECREAM_RUN_ICECC") - return target, source - - # Cribbed from Tool/cc.py and Tool/c++.py. It would be better if - # we could obtain this from SCons. - _CSuffixes = [".c"] - if not SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CSuffixes.append(".C") - - _CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"] - if SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CXXSuffixes.append(".C") - - suffixes = _CSuffixes + _CXXSuffixes - for object_builder in SCons.Tool.createObjBuilders(env): - emitterdict = object_builder.builder.emitter - for suffix in emitterdict.keys(): - if suffix not in suffixes: - continue - base = emitterdict[suffix] - emitterdict[suffix] = SCons.Builder.ListEmitter( - [base, icecc_toolchain_dependency_emitter], - ) - - # Check whether ccache is requested and is a valid tool. - if "CCACHE" in env: - ccache = SCons.Tool.Tool("ccache") - ccache_enabled = bool(ccache) and ccache.exists(env) - else: - ccache_enabled = False - - if env.ToolchainIs("clang"): - env["ENV"]["ICECC_CLANG_REMOTE_CPP"] = 1 - elif env.ToolchainIs("gcc"): - if env["ICECREAM_VERSION"] < _icecream_version_gcc_remote_cpp: - # We aren't going to use ICECC_REMOTE_CPP because icecc - # 1.1 doesn't offer it. We disallow fallback to local - # builds because the fallback is serial execution. - env["ENV"]["ICECC_CARET_WORKAROUND"] = 0 - elif not ccache_enabled: - # If we can, we should make Icecream do its own preprocessing - # to reduce concurrency on the local host. We should not do - # this when ccache is in use because ccache will execute - # Icecream to do its own preprocessing and then execute - # Icecream as the compiler on the preprocessed source. - env["ENV"]["ICECC_REMOTE_CPP"] = 1 - - if "ICECC_SCHEDULER" in env: - env["ENV"]["USE_SCHEDULER"] = env["ICECC_SCHEDULER"] - - # Make a generator to expand to what icecream binary to use in - # the case where we are not a conftest or a deny list source file. - def icecc_generator(target, source, env, for_signature): - # TODO: SERVER-60915 use new conftest API - if "conftest" in str(target[0]): - return "" - - if env.subst("$ICECC_LOCAL_COMPILATION_FILTER", target=target, source=source) == "True": - return "$ICERUN" - - return "$ICECREAM_RUN_ICECC" - - env["ICECC_GENERATOR"] = icecc_generator - - if ccache_enabled: - # Don't want to overwrite some existing generator - # if there is an existing one, we will need to chain them - if env.get("SHELL_ENV_GENERATOR") is not None: - existing_gen = env.get("SHELL_ENV_GENERATOR") - else: - existing_gen = None - - # If ccache is in play we actually want the icecc binary in the - # CCACHE_PREFIX environment variable, not on the command line, per - # the ccache documentation on compiler wrappers. Otherwise, just - # put $ICECC on the command line. We wrap it in the magic "don't - # consider this part of the build signature" sigils in the hope - # that enabling and disabling icecream won't cause rebuilds. This - # is unlikely to really work, since above we have maybe changed - # compiler flags (things like -fdirectives-only), but we still try - # to do the right thing. - # - # If the path to CCACHE_PREFIX isn't absolute, then it will - # look it up in PATH. That isn't what we want here, we make - # the path absolute. - def icecc_ccache_prefix_gen(env, target, source): - # TODO: SERVER-60915 use new conftest API - if "conftest" in str(target[0]): - return env["ENV"] - - if existing_gen: - shell_env = existing_gen(env, target, source) - else: - shell_env = env["ENV"].copy() - shell_env["CCACHE_PREFIX"] = env.File( - env.subst("$ICECC_GENERATOR", target=target, source=source) - ).abspath - return shell_env - - env["SHELL_ENV_GENERATOR"] = icecc_ccache_prefix_gen - - else: - # We wrap it in the magic "don't - # consider this part of the build signature" sigils in the hope - # that enabling and disabling icecream won't cause rebuilds. This - # is unlikely to really work, since above we have maybe changed - # compiler flags (things like -fdirectives-only), but we still try - # to do the right thing. - icecc_string = "$( $ICECC_GENERATOR $)" - env["CCCOM"] = " ".join([icecc_string, env["CCCOM"]]) - env["CXXCOM"] = " ".join([icecc_string, env["CXXCOM"]]) - env["SHCCCOM"] = " ".join([icecc_string, env["SHCCCOM"]]) - env["SHCXXCOM"] = " ".join([icecc_string, env["SHCXXCOM"]]) - - # Make common non-compile jobs flow through icerun so we don't - # kill the local machine. It would be nice to plumb ICERUN in via - # SPAWN or SHELL but it is too much. You end up running `icerun - # icecc ...`, and icecream doesn't handle that. We could try to - # filter and only apply icerun if icecc wasn't present but that - # seems fragile. If you find your local machine being overrun by - # jobs, figure out what sort they are and extend this part of the - # setup. - def icerun_generator(target, source, env, for_signature): - if "conftest" not in str(target[0]): - return "$ICERUN" - return "" - - env["ICERUN_GENERATOR"] = icerun_generator - - icerun_commands = [ - "ARCOM", - "LINKCOM", - "PYTHON", - "SHLINKCOM", - ] - - for command in icerun_commands: - if command in env: - env[command] = " ".join(["$( $ICERUN_GENERATOR $)", env[command]]) - - # Uncomment these to debug your icecc integration - if env["ICECC_DEBUG"]: - env["ENV"]["ICECC_DEBUG"] = "debug" - env["ENV"]["ICECC_LOGFILE"] = "icecc.log" - - -def exists(env): - if not env.subst("$ICECC"): - return False - - icecc = env.WhereIs("$ICECC") - if not icecc: - # TODO: We should not be printing here because we don't always know the - # use case for loading this tool. It may be that the user desires - # writing this output to a log file or not even displaying it at all. - # We should instead be invoking a callback to SConstruct that it can - # interpret as needed. Or better yet, we should use some SCons logging - # and error API, if and when one should emerge. - print(f"Error: icecc not found at {env['ICECC']}") - return False - - if "ICECREAM_VERSION" in env and env["ICECREAM_VERSION"] >= _icecream_version_min: - return True - - pipe = SCons.Action._subproc( - env, - SCons.Util.CLVar(icecc) + ["--version"], - stdin="devnull", - stderr="devnull", - stdout=subprocess.PIPE, - ) - - if pipe.wait() != 0: - print(f"Error: failed to execute '{env['ICECC']}'") - return False - - validated = False - - if "ICERUN" in env: - # Absoluteify, for parity with ICECC - icerun = env.WhereIs("$ICERUN") - else: - icerun = env.File("$ICECC").File("icerun") - if not icerun: - print("Error: the icerun wrapper does not exist which is needed for icecream") - return False - - if "ICECC_CREATE_ENV" in env: - icecc_create_env_bin = env.WhereIs("$ICECC_CREATE_ENV") - else: - icecc_create_env_bin = env.File("ICECC").File("icecc-create-env") - if not icecc_create_env_bin: - print("Error: the icecc-create-env utility does not exist which is needed for icecream") - return False - - for line in pipe.stdout: - line = line.decode("utf-8") - if validated: - continue # consume all data - version_banner = re.search(r"^ICECC ", line) - if not version_banner: - continue - icecc_version = re.split("ICECC (.+)", line) - if len(icecc_version) < 2: - continue - icecc_current_version = parse_version(icecc_version[1]) - if icecc_current_version >= _icecream_version_min: - validated = True - if icecc_current_version: - env["ICECREAM_VERSION"] = icecc_current_version - if not validated: - print( - f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_current_version}" - ) - - return validated diff --git a/site_scons/site_tools/incremental_link.py b/site_scons/site_tools/incremental_link.py deleted file mode 100644 index 756f77d80d4..00000000000 --- a/site_scons/site_tools/incremental_link.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import SCons - - -def _tag_as_precious(target, source, env): - env.Precious(target) - return target, source - - -def generate(env): - builders = env["BUILDERS"] - for builder in ("Program", "SharedLibrary", "LoadableModule"): - emitter = builders[builder].emitter - builders[builder].emitter = SCons.Builder.ListEmitter( - [ - emitter, - _tag_as_precious, - ] - ) - - -def exists(env): - # By default, the windows linker is incremental, so unless - # overridden in the environment with /INCREMENTAL:NO, the tool is - # in play. - if env.TargetOSIs("windows") and "/INCREMENTAL:NO" not in env["LINKFLAGS"]: - return True - - # On posix platforms, excluding darwin, we may have enabled - # incremental linking. Check for the relevant flags. - if ( - env.TargetOSIs("posix") - and not env.TargetOSIs("darwin") - and "-fuse-ld=gold" in env["LINKFLAGS"] - and "-Wl,--incremental" in env["LINKFLAGS"] - ): - return True - - return False diff --git a/site_scons/site_tools/integrate_bazel.py b/site_scons/site_tools/integrate_bazel.py deleted file mode 100644 index 2b1e9d7979a..00000000000 --- a/site_scons/site_tools/integrate_bazel.py +++ /dev/null @@ -1,1757 +0,0 @@ -import atexit -import errno -import getpass -import glob -import hashlib -import json -import os -import platform -import queue -import shlex -import shutil -import signal -import stat -import subprocess -import sys -import tarfile -import threading -import time -import traceback -import urllib.request -from io import StringIO -from typing import Any, Dict, List, Set, Tuple - -import distro -import psutil -import requests -import SCons -from retry import retry -from retry.api import retry_call -from SCons.Script import ARGUMENTS - -from bazel.wrapper_hook.developer_bes_keywords import write_workstation_bazelrc -from buildscripts.install_bazel import install_bazel -from buildscripts.util.read_config import read_config_file -from evergreen.api import RetryingEvergreenApi - -# Disable retries locally -_LOCAL_MAX_RETRY_ATTEMPTS = 1 - -# Enable up to 3 attempts in -_CI_MAX_RETRY_ATTEMPTS = 3 - -_SUPPORTED_PLATFORM_MATRIX = [ - "linux:arm64:gcc", - "linux:arm64:clang", - "linux:amd64:gcc", - "linux:amd64:clang", - "linux:ppc64le:gcc", - "linux:ppc64le:clang", - "linux:s390x:gcc", - "linux:s390x:clang", - "windows:amd64:msvc", - "macos:amd64:clang", - "macos:arm64:clang", -] - -_SANITIZER_MAP = { - "address": "asan", - "fuzzer": "fsan", - "memory": "msan", - "leak": "lsan", - "thread": "tsan", - "undefined": "ubsan", -} - -_DISTRO_PATTERN_MAP = { - "Ubuntu 18*": "ubuntu18", - "Ubuntu 20*": "ubuntu20", - "Ubuntu 22*": "ubuntu22", - "Ubuntu 24*": "ubuntu24", - "Amazon Linux 2": "amazon_linux_2", - "Amazon Linux 2023": "amazon_linux_2023", - "Debian GNU/Linux 10": "debian10", - "Debian GNU/Linux 12": "debian12", - "Red Hat Enterprise Linux 8*": "rhel8", - "Red Hat Enterprise Linux 9*": "rhel9", - "SLES 15*": "suse15", -} - -_S3_HASH_MAPPING = { - "https://mdb-build-public.s3.amazonaws.com/bazel-binaries/bazel-7.5.0-ppc64le": "8a54bddf927b92876c737926dbbba808c0c9b840c27d118b59503a07a69cc651", - "https://mdb-build-public.s3.amazonaws.com/bazel-binaries/bazel-7.5.0-s390x": "43f75e0a4dc2d377b78e82614335b205629f6bada087f1efb8b76179704f0297", - "https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.19.0/bazelisk-darwin-amd64": "f2ba5f721a995b54bab68c6b76a340719888aa740310e634771086b6d1528ecd", - "https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.19.0/bazelisk-darwin-arm64": "69fa21cd2ccffc2f0970c21aa3615484ba89e3553ecce1233a9d8ad9570d170e", - "https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.19.0/bazelisk-linux-amd64": "d28b588ac0916abd6bf02defb5433f6eddf7cba35ffa808eabb65a44aab226f7", - "https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.19.0/bazelisk-linux-arm64": "861a16ba9979613e70bd3d2f9d9ab5e3b59fe79471c5753acdc9c431ab6c9d94", - "https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.19.0/bazelisk-windows-amd64.exe": "d04555245a99dfb628e33da24e2b9198beb8f46d7e7661c313eb045f6a59f5e4", -} - - -class Globals: - # key: scons target, value: {bazel target, bazel output} - scons2bazel_targets: Dict[str, Dict[str, str]] = dict() - - # key: scons output, value: bazel outputs - scons_output_to_bazel_outputs: Dict[str, List[str]] = dict() - - # targets bazel needs to build - bazel_targets_work_queue: queue.Queue[str] = queue.Queue() - - # targets bazel has finished building - bazel_targets_done: Set[str] = set() - - # lock for accessing the targets done list - bazel_target_done_CV: threading.Condition = threading.Condition() - - # bazel command line with options, but not targets - bazel_base_build_command: List[str] = None - - # environment variables to set when invoking bazel - bazel_env_variables: Dict[str, str] = {} - - # Flag to signal that scons is ready to build, but needs to wait on bazel - waiting_on_bazel_flag: bool = False - - # Flag to signal that scons is ready to build, but needs to wait on bazel - bazel_build_success: bool = False - - bazel_build_exitcode: int = 1 - - # a IO object to hold the bazel output in place of stdout - bazel_thread_terminal_output = StringIO() - - bazel_executable = None - - max_retry_attempts: int = _LOCAL_MAX_RETRY_ATTEMPTS - - bazel_remote_timeout: int = -1 - - timeout_event = threading.Event() - - # Timeout when stuck scheduling without making progress for more than 15 minutes - # Ex string: - # [21,537 / 21,603] [Sched] Compiling src/mongo/db/s/migration_chunk_cloner_source.cpp; 1424s - last_sched_target_progress = "" - sched_time_start = 0 - sched_timeout_sec = 60 * 15 - - @staticmethod - def bazel_output(scons_node): - scons_node = str(scons_node).replace("\\", "/") - if platform.system() != "Windows": - scons_node = scons_node.replace("/mongo_crypt_v1", "/libmongo_crypt_v1") - return Globals.scons2bazel_targets[scons_node]["bazel_output"] - - @staticmethod - def bazel_target(scons_node): - scons_node = str(scons_node).replace("\\", "/") - if platform.system() != "Windows": - scons_node = scons_node.replace("/mongo_crypt_v1", "/libmongo_crypt_v1") - return Globals.scons2bazel_targets[scons_node]["bazel_target"] - - @staticmethod - def bazel_link_file(scons_node): - bazel_target = Globals.scons2bazel_targets[str(scons_node).replace("\\", "/")][ - "bazel_target" - ] - linkfile = bazel_target.replace("//src/", "bazel-bin/src/") + "_links.list" - return "/".join(linkfile.rsplit(":", 1)) - - @staticmethod - def bazel_sources_file(scons_node): - bazel_target = Globals.scons2bazel_targets[str(scons_node).replace("\\", "/")][ - "bazel_target" - ] - sources_file = ( - bazel_target.replace("//src/", "bazel-bin/src/") + "_sources_list.sources_list" - ) - return "/".join(sources_file.rsplit(":", 1)) - - -def bazel_debug(msg: str): - pass - - -def bazel_target_emitter( - target: List[SCons.Node.Node], source: List[SCons.Node.Node], env: SCons.Environment.Environment -) -> Tuple[List[SCons.Node.Node], List[SCons.Node.Node]]: - """This emitter will map any scons outputs to bazel outputs so copy can be done later.""" - - for t in target: - # bazel will cache the results itself, don't recache - env.NoCache(t) - - return (target, source) - - -def bazel_builder_action( - env: SCons.Environment.Environment, target: List[SCons.Node.Node], source: List[SCons.Node.Node] -): - if env.GetOption("separate-debug") == "on": - shlib_suffix = env.subst("$SHLIBSUFFIX") - sep_dbg = env.subst("$SEPDBG_SUFFIX") - if sep_dbg and str(target[0]).endswith(shlib_suffix): - target.append(env.File(str(target[0]) + sep_dbg)) - - # now copy all the targets out to the scons tree, note that target is a - # list of nodes so we need to stringify it for copyfile - for t in target: - dSYM_found = False - if ".dSYM/" in str(t): - # ignore dSYM plist file, as we skipped it prior - if str(t).endswith(".plist"): - continue - - dSYM_found = True - - if dSYM_found: - # Here we handle the difference between scons and bazel for dSYM dirs. SCons uses list - # actions to perform operations on the same target during some action. Bazel does not - # have an exact corresponding feature. Each action in bazel should have unique inputs and - # outputs. The file and targets wont line up exactly between scons and our mongo_cc_library, - # custom rule, specifically the way dsymutil generates the dwarf file inside the dSYM dir. So - # we remap the special filename suffixes we use for our bazel intermediate cc_library rules. - # - # So we will do the renaming of dwarf file to what scons expects here, before we copy to scons tree - substring_end = str(t).find(".dSYM/") + 5 - t = str(t)[:substring_end] - # This is declared as an output folder, so bazel appends (TreeArtifact) to it - s = Globals.bazel_output(t + " (TreeArtifact)") - s = str(s).removesuffix(" (TreeArtifact)") - dwarf_info_base = os.path.splitext(os.path.splitext(os.path.basename(t))[0])[0] - dwarf_sym_with_debug = os.path.join( - s, f"Contents/Resources/DWARF/{dwarf_info_base}_shared_with_debug.dylib" - ) - - # this handles shared libs or program binaries - if os.path.exists(dwarf_sym_with_debug): - dwarf_sym = os.path.join(s, f"Contents/Resources/DWARF/{dwarf_info_base}.dylib") - else: - dwarf_sym_with_debug = os.path.join( - s, f"Contents/Resources/DWARF/{dwarf_info_base}_with_debug" - ) - dwarf_sym = os.path.join(s, f"Contents/Resources/DWARF/{dwarf_info_base}") - - # copy the whole dSYM in one operation. Clean any existing files that might be in the way. - print(f"Moving .dSYM from {s} over to {t}.") - shutil.rmtree(str(t), ignore_errors=True) - shutil.copytree(s, str(t)) - # we want to change the permissions back to normal permissions on the folders copied rather than read only - os.chmod(t, 0o755) - for root, dirs, files in os.walk(t): - for name in files: - os.chmod(os.path.join(root, name), 0o755) - for name in dirs: - os.chmod(os.path.join(root, name), 0o755) - # shouldn't write our own files to the bazel directory, renaming file for scons - shutil.copy(dwarf_sym_with_debug.replace(s, t), dwarf_sym.replace(s, t)) - else: - s = Globals.bazel_output(t) - try: - # Check if the current directory and .cache files are on the same mount - # because hardlinking doesn't work between drives and when it fails - # it leaves behind a symlink that is hard to clean up - # We don't hardlink on windows because SCons will run link commands against - # the files in the bazel directory, and if its running the link command - # while SCons cleans up files in the output directory you get file permission errors - if ( - platform.system() != "Windows" - and os.stat(".").st_dev == os.stat(s, follow_symlinks=True).st_dev - ): - if os.path.exists(str(t)): - os.remove(str(t)) - os.link(s, str(t)) - os.chmod(str(t), os.stat(str(t)).st_mode | stat.S_IWUSR) - else: - print( - f"Copying {s} to {t} instead of hardlinking because files are on different mounts or we are on Windows." - ) - shutil.copy(s, str(t)) - os.chmod(str(t), os.stat(str(t)).st_mode | stat.S_IWUSR) - # Fall back on the original behavior of copying, likely if we hit here this - # will still fail due to hardlinking leaving some symlinks around - except Exception as e: - print(e) - print(f"Failed to hardlink {s} to {t}, trying to copying file instead.") - shutil.copy(s, str(t)) - os.chmod(str(t), os.stat(str(t)).st_mode | stat.S_IWUSR) - - -BazelCopyOutputsAction = SCons.Action.FunctionAction( - bazel_builder_action, - {"cmdstr": "Hardlinking $TARGETS from bazel build directory.", "varlist": ["BAZEL_FLAGS_STR"]}, -) - -total_query_time = 0 -total_queries = 0 - - -def bazel_query_func( - env: SCons.Environment.Environment, query_command_args: List[str], query_name: str = "query" -): - full_command = [Globals.bazel_executable] + query_command_args - global total_query_time, total_queries - start_time = time.time() - # these args prune the graph we need to search through a bit since we only care about our - # specific library target dependencies - full_command += ["--implicit_deps=False", "--tool_deps=False", "--include_aspects=False"] - # prevent remote connection and invocations since we just want to query the graph - full_command += [ - "--remote_executor=", - "--remote_cache=", - "--bes_backend=", - "--bes_results_url=", - ] - bazel_debug(f"Running query: {' '.join(full_command)}") - results = subprocess.run( - full_command, - capture_output=True, - text=True, - cwd=env.Dir("#").abspath, - env={**os.environ.copy(), **Globals.bazel_env_variables}, - ) - delta = time.time() - start_time - bazel_debug(f"Spent {delta} seconds running {query_name}") - total_query_time += delta - total_queries += 1 - - # Manually throw the error instead of using subprocess.run(... check=True) to print out stdout and stderr. - if results.returncode != 0: - print(results.stdout) - print(results.stderr) - raise subprocess.CalledProcessError( - results.returncode, full_command, results.stdout, results.stderr - ) - return results - - -# the ninja tool has some API that doesn't support using SCons env methods -# instead of adding more API to the ninja tool which has a short life left -# we just add the unused arg _dup_env -def ninja_bazel_builder( - env: SCons.Environment.Environment, - _dup_env: SCons.Environment.Environment, - node: SCons.Node.Node, -) -> Dict[str, Any]: - """ - Translator for ninja which turns the scons bazel_builder_action - into a build node that ninja can digest. - """ - - outs = env.NinjaGetOutputs(node) - ins = [Globals.bazel_output(out) for out in outs] - - # this represents the values the ninja_syntax.py will use to generate to real - # ninja syntax defined in the ninja manaul: https://ninja-build.org/manual.html#ref_ninja_file - return { - "outputs": outs, - "inputs": ins, - "rule": "BAZEL_COPY_RULE", - "variables": { - "cmd": " && ".join( - [ - f"$COPY {input_node.replace('/',os.sep)} {output_node}" - for input_node, output_node in zip(ins, outs) - ] - + [ - # Touch output files to make sure that the modified time of inputs is always older than the modified time of outputs. - f"copy /b {output_node} +,, {output_node}" - if env["PLATFORM"] == "win32" - else f"touch {output_node}" - for output_node in outs - ] - ) - }, - } - - -def write_bazel_build_output(line: str) -> None: - if Globals.waiting_on_bazel_flag: - if Globals.bazel_thread_terminal_output is not None: - Globals.bazel_thread_terminal_output.seek(0) - sys.stdout.write(Globals.bazel_thread_terminal_output.read()) - Globals.bazel_thread_terminal_output = None - sys.stdout.write(line) - else: - Globals.bazel_thread_terminal_output.write(line) - - -def bazel_server_timeout_dumper(jvm_out, proc_pid, project_root): - p = psutil.Process(proc_pid) - - Globals.timeout_event.wait() - if p.is_running(): - os.kill(int(proc_pid), signal.SIGTERM) - p.wait() - - if os.environ.get("CI"): - if os.path.exists(".bazel_real"): - with tarfile.open(os.path.join(project_root, "jvm.out.tar.gz"), "w:gz") as tar: - tar.add(jvm_out) - - try: - expansions = read_config_file(os.path.join(project_root, "../expansions.yml")) - task_id = expansions.get("task_id", None) - error_msg = ( - "Bazel timed out waiting for remote action (from BF-35762).\n" - f"See task: ." - ) - - evg_api = RetryingEvergreenApi.get_api( - config_file=os.path.join(project_root, ".evergreen.yml") - ) - evg_api.send_slack_message( - target="#devprod-build-automation", - msg=error_msg, - ) - except Exception: - traceback.print_exc() - - -def bazel_build_subproc_func(**kwargs): - project_root = os.path.abspath(".") - try: - output_base = subprocess.run( - [Globals.bazel_executable, "info", "output_base"], - capture_output=True, - text=True, - check=True, - env=kwargs["env"], - ).stdout.strip() - except subprocess.CalledProcessError: - output_base = "" - - if os.environ.get("CI"): - if os.path.exists(".bazel_real"): - with open(".bazel_real") as f: - kwargs["args"][0] = f.read().strip() - - jvm_out = os.path.join(output_base, "server/jvm.out") - - bazel_proc = subprocess.Popen(**kwargs) - - if output_base: - t = threading.Thread( - target=bazel_server_timeout_dumper, - args=(jvm_out, bazel_proc.pid, project_root), - ) - - # the bazel calls are wrapped in retries so we can rely on them to restart the attempt. - t.daemon = True - t.start() - - return bazel_proc - - -def check_timeout_condition(line): - if "[Sched]" in line: - target_progress = line.split("[Sched]")[0].strip() - if len(target_progress) > 0: - if Globals.last_sched_target_progress == target_progress: - if time.time() - Globals.sched_time_start > Globals.sched_timeout_sec: - Globals.last_sched_target_progress = "" - write_bazel_build_output("Stuck scheduling for too long, terminating") - Globals.timeout_event.set() - return True - else: - Globals.sched_time_start = time.time() - Globals.last_sched_target_progress = target_progress - - -def perform_tty_bazel_build(bazel_cmd: str) -> None: - # Importing pty will throw on certain platforms, the calling code must catch this exception - # and fallback to perform_non_tty_bazel_build. - import pty - - parent_fd, child_fd = pty.openpty() # provide tty - Globals.timeout_event.clear() - bazel_proc = bazel_build_subproc_func( - args=bazel_cmd, - stdin=child_fd, - stdout=child_fd, - stderr=subprocess.STDOUT, - env={**os.environ.copy(), **Globals.bazel_env_variables}, - ) - - buffer = "" - os.close(child_fd) - Globals.last_sched_target_progress = "" - Globals.sched_time_start = time.time() - try: - # This loop will terminate with an EOF or EOI when the process ends. - while True: - try: - data = os.read(parent_fd, 512) - except OSError as e: - if e.errno != errno.EIO: - raise - break # EIO means EOF on some systems - else: - if not data: # EOF - break - - write_bazel_build_output(data.decode()) - buffer += data.decode() - if "\n" in buffer: - line, buffer = buffer.split("\n", 1) - if check_timeout_condition(line): - raise subprocess.CalledProcessError(-1, bazel_cmd, "", "") - finally: - os.close(parent_fd) - if bazel_proc.poll() is None: - bazel_proc.terminate() - bazel_proc.wait() - - Globals.bazel_build_exitcode = bazel_proc.returncode - - if bazel_proc.returncode != 0: - raise subprocess.CalledProcessError(bazel_proc.returncode, bazel_cmd, "", "") - - -def perform_non_tty_bazel_build(bazel_cmd: str) -> None: - Globals.timeout_event.clear() - bazel_proc = bazel_build_subproc_func( - args=bazel_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - env={**os.environ.copy(), **Globals.bazel_env_variables}, - text=True, - ) - - Globals.last_sched_target_progress = "" - Globals.sched_time_start = time.time() - - # This loop will terminate when the process ends. - while True: - line = bazel_proc.stdout.readline() - if not line: - break - write_bazel_build_output(line) - if check_timeout_condition(line): - raise subprocess.CalledProcessError(-1, bazel_cmd, "", "") - - stdout, stderr = bazel_proc.communicate() - - Globals.bazel_build_exitcode = bazel_proc.returncode - - if bazel_proc.returncode != 0: - raise subprocess.CalledProcessError(bazel_proc.returncode, bazel_cmd, stdout, stderr) - - -def run_bazel_command(env, bazel_cmd, tries_so_far=0): - try: - server_pid = subprocess.run( - [Globals.bazel_executable, "info", "server_pid"], - capture_output=True, - text=True, - check=True, - env={**os.environ.copy(), **Globals.bazel_env_variables}, - ).stdout.strip() - except subprocess.CalledProcessError: - server_pid = None - - try: - tty_import_fail = False - try: - retry_call( - perform_tty_bazel_build, - [bazel_cmd], - tries=Globals.max_retry_attempts, - exceptions=(subprocess.CalledProcessError,), - ) - except ImportError: - # Run the actual build outside of the except clause to avoid confusion in the stack trace, - # otherwise, build failures on platforms that don't support tty will be displayed as import errors. - tty_import_fail = True - pass - - if tty_import_fail: - retry_call( - perform_non_tty_bazel_build, - [bazel_cmd], - tries=Globals.max_retry_attempts, - exceptions=(subprocess.CalledProcessError,), - ) - except subprocess.CalledProcessError as ex: - if os.environ.get("CI") is not None and tries_so_far == 0: - if platform.system() == "Windows": - print( - "Killing Bazel between retries on Windows to work around file access deadlock" - ) - try: - subprocess.run( - [os.path.abspath(Globals.bazel_executable), "shutdown"], - check=True, - text=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - env={**os.environ.copy(), **Globals.bazel_env_variables}, - ) - except subprocess.CalledProcessError as exc: - print(exc.stdout) - print(exc.stderr) - try: - if psutil.pid_exists(int(server_pid)): - proc = psutil.Process(int(server_pid)) - if proc.is_running(): - proc.terminate() - proc.wait(timeout=10) - proc.kill() - except Exception as kill_exc: - print("Tried to force kill server, but something happened:") - print(kill_exc) - print("Optimistically continuing anyways...") - - linker_jobs = 4 - sanitizers = env.GetOption("sanitize") - if sanitizers is not None and "fuzzer" in sanitizers.split(","): - linker_jobs = 1 - print( - f"Build failed, retrying with --jobs={linker_jobs} in case linking failed due to hitting concurrency limits..." - ) - run_bazel_command( - env, - bazel_cmd + ["--jobs", str(linker_jobs), "--link_timeout=False"], - tries_so_far=1, - ) - return - - print("ERROR: Bazel build failed:") - Globals.timeout_event.set() - - if Globals.bazel_thread_terminal_output is not None: - Globals.bazel_thread_terminal_output.seek(0) - ex.output += Globals.bazel_thread_terminal_output.read() - Globals.bazel_thread_terminal_output = None - print(ex.output) - - raise ex - Globals.bazel_build_success = True - Globals.timeout_event.set() - - -def bazel_build_thread_func(env, log_dir: str, verbose: bool, ninja_generate: bool) -> None: - """This thread runs the bazel build up front.""" - - if verbose: - extra_args = [] - else: - extra_args = ["--output_filter=DONT_MATCH_ANYTHING"] - - if ninja_generate: - for file in glob.glob("bazel-out/**/*.gen_source_list", recursive=True): - os.remove(file) - extra_args += ["--build_tag_filters=scons_link_lists"] - bazel_cmd = Globals.bazel_base_build_command + extra_args + ["//src/..."] - - elif SCons.Script.BUILD_TARGETS == ["compiledb", "+mongo-tidy-tests"]: - extra_args += [ - "--build_tag_filters=scons_link_lists,compiledb,gen_source,mongo-tidy-tests,mongo-tidy-checks" - ] - bazel_cmd = Globals.bazel_base_build_command + extra_args + ["//:compiledb", "//src/..."] - else: - build_tags = env.GetOption("bazel-build-tag") - if not build_tags: - build_tags += ["all"] - if "all" not in build_tags: - build_tags += ["scons_link_lists", "gen_source"] - extra_args += [f"--build_tag_filters={','.join(build_tags)}"] - bazel_cmd = Globals.bazel_base_build_command + extra_args + ["//src/..."] - if "compiledb" in SCons.Script.BUILD_TARGETS: - bazel_cmd = bazel_cmd + ["//:compiledb"] - - if ninja_generate: - print("Generating bazel link deps...") - else: - print(f"Bazel build command:\n{' '.join(bazel_cmd)}") - - if env.GetOption("coverity-build"): - print(f"BAZEL_COMMAND: {' '.join(bazel_cmd)}") - Globals.bazel_build_success = True - Globals.bazel_build_exitcode = 0 - return - - print("Starting bazel build thread...") - run_bazel_command(env, bazel_cmd) - - # This is used to detect if a user is running under bazelisk, delete this since - # the SCons integration sets up bazelisk internally, but we want to trigger failures - # afterwards if the user calls a non-bazelisk bazel directly. - if os.path.exists(".bazelrc.bazelisk"): - os.remove(".bazelrc.bazelisk") - - -def create_bazel_builder(builder: SCons.Builder.Builder) -> SCons.Builder.Builder: - return SCons.Builder.Builder( - action=BazelCopyOutputsAction, - prefix=builder.prefix, - suffix=builder.suffix, - src_suffix=builder.src_suffix, - source_scanner=builder.source_scanner, - target_scanner=builder.target_scanner, - emitter=SCons.Builder.ListEmitter([bazel_target_emitter]), - ) - - -def create_scons_and_bazel_builder(builder: SCons.Builder.Builder) -> SCons.Builder.Builder: - return SCons.Builder.Builder( - action=BazelCopyOutputsAction, - prefix=builder.prefix, - suffix=builder.suffix, - src_suffix=builder.src_suffix, - source_scanner=builder.source_scanner, - target_scanner=builder.target_scanner, - emitter=SCons.Builder.ListEmitter([builder.emitter, bazel_target_emitter]), - ) - - -# TODO delete this builder when we have testlist support in bazel -def create_program_builder(env: SCons.Environment.Environment) -> None: - env["BUILDERS"]["BazelProgram"] = create_bazel_builder(env["BUILDERS"]["Program"]) - - -def create_shared_library_builder(env: SCons.Environment.Environment) -> None: - env["BUILDERS"]["BazelSharedLibrary"] = create_bazel_builder(env["BUILDERS"]["SharedLibrary"]) - - -def get_default_cert_dir(): - if platform.system() == "Windows": - return f"C:/cygwin/home/{getpass.getuser()}/.engflow" - elif platform.system() == "Linux": - return f"/home/{getpass.getuser()}/.engflow" - elif platform.system() == "Darwin": - return f"{os.path.expanduser('~')}/.engflow" - - -def get_default_engflow_auth_path(): - bin_dir = os.path.expanduser("~/.local/bin/") - executable_name = "engflow_auth" - if platform.system() == "Windows": - executable_name += ".exe" - return os.path.join(bin_dir, executable_name) - - -def validate_remote_execution_certs(env: SCons.Environment.Environment) -> bool: - running_in_evergreen = os.environ.get("CI") - - # Check engflow_auth existence - if os.path.exists(get_default_engflow_auth_path()): - # Check engflow_auth token presence - appdata = os.getenv("APPDATA", "").replace("\\", "/") - if os.name == "nt" and os.path.exists( - os.path.expanduser(f"{appdata}/engflow_auth/tokens/sodalite.cluster.engflow.com") - ): - return True - elif platform.system() == "Darwin" and os.path.exists( - os.path.expanduser( - "~/Library/Application Support/engflow_auth/tokens/sodalite.cluster.engflow.com" - ) - ): - return True - elif os.path.exists( - os.path.expanduser("~/.config/engflow_auth/tokens/sodalite.cluster.engflow.com") - ): - return True - else: - print( - "engflow_auth is installed, but found no token. Please run the following to authenticate with EngFlow:\nbazel run --config=local //buildscripts:engflow_auth" - ) - return False - - if running_in_evergreen and not os.path.exists("./engflow.cert"): - print( - "ERROR: ./engflow.cert not found, which is required to build in evergreen without BAZEL_FLAGS=--config=local set. Please reach out to #ask-devprod-build for help." - ) - return False - - if os.name == "nt" and not os.path.exists(f"{os.path.expanduser('~')}/.bazelrc"): - with open(f"{os.path.expanduser('~')}/.bazelrc", "a") as bazelrc: - bazelrc.write( - f"common --tls_client_certificate={get_default_cert_dir()}/creds/engflow.crt\n" - ) - bazelrc.write(f"common --tls_client_key={get_default_cert_dir()}/creds/engflow.key\n") - - if not running_in_evergreen and not os.path.exists( - f"{get_default_cert_dir()}/creds/engflow.crt" - ): - # Temporary logic to copy over the credentials for users that ran the installation steps using the old directory (/engflow/). - if os.path.exists("/engflow/creds/engflow.crt") and os.path.exists( - "/engflow/creds/engflow.key" - ): - print( - "Moving EngFlow credentials from the legacy directory (/engflow/) to the new directory (~/.engflow/)." - ) - try: - os.makedirs(f"{get_default_cert_dir()}/creds/", exist_ok=True) - shutil.move( - "/engflow/creds/engflow.crt", - f"{get_default_cert_dir()}/creds/engflow.crt", - ) - shutil.move( - "/engflow/creds/engflow.key", - f"{get_default_cert_dir()}/creds/engflow.key", - ) - with open(f"{get_default_cert_dir()}/.bazelrc", "a") as bazelrc: - bazelrc.write( - f"common --tls_client_certificate={get_default_cert_dir()}/creds/engflow.crt\n" - ) - bazelrc.write( - f"common --tls_client_key={get_default_cert_dir()}/creds/engflow.key\n" - ) - except OSError as exc: - print(exc) - print( - "Failed to update cert location, please move them manually. Otherwise you can pass 'BAZEL_FLAGS=\"--config=local\"' on the SCons command line." - ) - - return True - - # Pull the external hostname of the system from aws - try: - response = requests.get( - "http://instance-data.ec2.internal/latest/meta-data/public-hostname" - ) - status_code = response.status_code - except Exception as _: - status_code = 500 - if status_code == 200: - public_hostname = response.text - else: - public_hostname = "localhost" - print( - f"""\nERROR: {get_default_cert_dir()}/creds/engflow.crt not found. Please reach out to #ask-devprod-build if you need help with the steps below. - -(If the below steps are not working or you are an external person to MongoDB, remote execution can be disabled by passing BAZEL_FLAGS=--config=local at the end of your scons.py invocation) - -Please complete the following steps to generate a certificate: -- (If not in the Engineering org) Request access to the MANA group https://mana.corp.mongodbgov.com/resources/659ec4b9bccf3819e5608712 -- Go to https://sodalite.cluster.engflow.com/gettingstarted (Uses mongodbcorp.okta.com auth URL) -- Login with OKTA, then click the \"GENERATE AND DOWNLOAD MTLS CERTIFICATE\" button - - (If logging in with OKTA doesn't work) Login with Google using your MongoDB email, then click the "GENERATE AND DOWNLOAD MTLS CERTIFICATE" button -- On your local system (usually your MacBook), open a terminal and run: - -ZIP_FILE=~/Downloads/engflow-mTLS.zip - -curl https://raw.githubusercontent.com/mongodb/mongo/master/buildscripts/setup_engflow_creds.sh -o setup_engflow_creds.sh -chmod +x ./setup_engflow_creds.sh -./setup_engflow_creds.sh {getpass.getuser()} {public_hostname} $ZIP_FILE {"local" if public_hostname == "localhost" else ""}\n""" - ) - return False - - if not running_in_evergreen and ( - not os.access(f"{get_default_cert_dir()}/creds/engflow.crt", os.R_OK) - or not os.access(f"{get_default_cert_dir()}/creds/engflow.key", os.R_OK) - ): - print( - f"Invalid permissions set on {get_default_cert_dir()}/creds/engflow.crt or {get_default_cert_dir()}/creds/engflow.key" - ) - print("Please run the following command to fix the permissions:\n") - print( - f"sudo chown {getpass.getuser()}:{getpass.getuser()} {get_default_cert_dir()}/creds/engflow.crt {get_default_cert_dir()}/creds/engflow.key" - ) - print( - f"sudo chmod 600 {get_default_cert_dir()}/creds/engflow.crt {get_default_cert_dir()}/creds/engflow.key" - ) - return False - return True - - -def generate_bazel_info_for_ninja(env: SCons.Environment.Environment) -> None: - # create a json file which contains all the relevant info from this generation - # that bazel will need to construct the correct command line for any given targets - ninja_bazel_build_json = { - "bazel_cmd": Globals.bazel_base_build_command, - "compiledb_cmd": [Globals.bazel_executable, "build"] - + env["BAZEL_FLAGS_STR"] - + ["//:compiledb"], - "defaults": [str(t) for t in SCons.Script.DEFAULT_TARGETS], - "targets": Globals.scons2bazel_targets, - "CC": env.get("CC", ""), - "CXX": env.get("CXX", ""), - "USE_NATIVE_TOOLCHAIN": os.environ.get("USE_NATIVE_TOOLCHAIN"), - } - with open(f".{env.subst('$NINJA_PREFIX')}.bazel_info_for_ninja.txt", "w") as f: - json.dump(ninja_bazel_build_json, f) - - # we also store the outputs in the env (the passed env is intended to be - # the same main env ninja tool is constructed with) so that ninja can - # use these to contruct a build node for running bazel where bazel list the - # correct bazel outputs to be copied to the scons tree. We also handle - # calculating the inputs. This will be the all the inputs of the outs, - # but and input can not also be an output. If a node is found in both - # inputs and outputs, remove it from the inputs, as it will be taken care - # internally by bazel build. - ninja_bazel_outs = [] - ninja_bazel_ins = [] - for scons_t, bazel_t in Globals.scons2bazel_targets.items(): - ninja_bazel_outs += [bazel_t["bazel_output"]] - ninja_bazel_ins += env.NinjaGetInputs(env.File(scons_t)) - - if platform.system() == "Linux" and not os.environ.get("USE_NATIVE_TOOLCHAIN"): - ninja_bazel_outs += [env.get("CC"), env.get("CXX")] - - # This is to be used directly by ninja later during generation of the ninja file - env["NINJA_BAZEL_OUTPUTS"] = ninja_bazel_outs - env["NINJA_BAZEL_INPUTS"] = ninja_bazel_ins - - -@retry(tries=5, delay=3) -def download_path_with_retry(*args, **kwargs): - urllib.request.urlretrieve(*args, **kwargs) - - -install_query_cache = {} - - -def bazel_deps_check_query_cache(env, bazel_target): - return install_query_cache.get(bazel_target, None) - - -def bazel_deps_add_query_cache(env, bazel_target, results): - install_query_cache[bazel_target] = results - - -link_query_cache = {} - - -def bazel_deps_check_link_query_cache(env, bazel_target): - return link_query_cache.get(bazel_target, None) - - -def bazel_deps_add_link_query_cache(env, bazel_target, results): - link_query_cache[bazel_target] = results - - -def sha256_file(filename: str) -> str: - sha256_hash = hashlib.sha256() - with open(filename, "rb") as f: - for block in iter(lambda: f.read(4096), b""): - sha256_hash.update(block) - return sha256_hash.hexdigest() - - -def verify_s3_hash(s3_path: str, local_path: str) -> None: - if s3_path not in _S3_HASH_MAPPING: - raise Exception( - "S3 path not found in hash mapping, unable to verify downloaded for s3 path: s3_path" - ) - - hash = sha256_file(local_path) - if hash != _S3_HASH_MAPPING[s3_path]: - raise Exception( - f"Hash mismatch for {s3_path}, expected {_S3_HASH_MAPPING[s3_path]} but got {hash}" - ) - - -def find_distro_match(distro_str: str) -> str: - for distro_pattern, simplified_name in _DISTRO_PATTERN_MAP.items(): - if "*" in distro_pattern: - prefix_suffix = distro_pattern.split("*") - if distro_str.startswith(prefix_suffix[0]) and distro_str.endswith(prefix_suffix[1]): - return simplified_name - elif distro_str == distro_pattern: - return simplified_name - return None - - -time_auto_installing = 0 -count_of_auto_installing = 0 - - -def timed_auto_install_bazel(env, libdep, shlib_suffix): - global time_auto_installing, count_of_auto_installing - start_time = time.time() - auto_install_bazel(env, libdep, shlib_suffix) - time_auto_installing += time.time() - start_time - count_of_auto_installing += 1 - - -def auto_install_single_target(env, libdep, suffix, bazel_node): - auto_install_mapping = env["AIB_SUFFIX_MAP"].get(suffix) - if auto_install_mapping is not None: - env.AutoInstall( - target=auto_install_mapping.directory, - source=[bazel_node], - AIB_COMPONENT=env.get("AIB_COMPONENT", "AIB_DEFAULT_COMPONENT"), - AIB_ROLE=auto_install_mapping.default_role, - AIB_COMPONENTS_EXTRA=env.get("AIB_COMPONENTS_EXTRA", []), - ) - auto_installed_libdep = env.GetAutoInstalledFiles(libdep) - auto_installed_bazel_node = env.GetAutoInstalledFiles(bazel_node) - - if auto_installed_libdep[0] != auto_installed_bazel_node[0]: - env.Depends(auto_installed_libdep[0], auto_installed_bazel_node[0]) - - return env.GetAutoInstalledFiles(bazel_node) - - -def auto_install_bazel(env, libdep, shlib_suffix): - scons_target = str(libdep).replace( - f"{env.Dir('#').abspath}/{env['BAZEL_OUT_DIR']}/src", env.Dir("$BUILD_DIR").path - ) - bazel_target = env["SCONS2BAZEL_TARGETS"].bazel_target(scons_target) - bazel_libdep = env.File(f"#/{env['SCONS2BAZEL_TARGETS'].bazel_output(scons_target)}") - - query_results = env.CheckBazelDepsCache(bazel_target) - - if query_results is None: - linkfile = env["SCONS2BAZEL_TARGETS"].bazel_link_file(scons_target) - with open(os.path.join(env.Dir("#").abspath, linkfile)) as f: - query_results = f.read() - - filtered_results = "" - for lib in query_results.splitlines(): - bazel_out_path = lib.replace(f"{env['BAZEL_OUT_DIR']}/src", "bazel-bin/src") - if os.path.exists(env.File("#/" + bazel_out_path + ".exclude_lib").abspath): - continue - filtered_results += lib + "\n" - query_results = filtered_results - - env.AddBazelDepsCache(bazel_target, query_results) - - for line in query_results.splitlines(): - # We are only interested in installing shared libs and their debug files - if not line.endswith(shlib_suffix): - continue - - bazel_node = env.File(f"#/{line}") - debug_files = [] - debug_suffix = "" - # This was copied from separate_debug.py - if env.TargetOSIs("darwin"): - # There isn't a lot of great documentation about the structure of dSYM bundles. - # For general bundles, see: - # - # https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html - # - # But we expect to find two files in the bundle. An - # Info.plist file under Contents, and a file with the same - # name as the target under Contents/Resources/DWARF. - - target0 = bazel_node - dsym_dir_name = target0.name + ".dSYM" - dsym_dir = env.Dir(f"#/{line}.dSYM") - - dwarf_sym_with_debug = os.path.join( - dsym_dir.abspath, f"Contents/Resources/DWARF/{target0.name}_shared_with_debug.dylib" - ) - - # this handles shared libs or program binaries - if os.path.exists(dwarf_sym_with_debug): - dwarf_sym_name = f"{target0.name}.dylib" - else: - dwarf_sym_with_debug = os.path.join( - dsym_dir.abspath, f"Contents/Resources/DWARF/{target0.name}_with_debug" - ) - dwarf_sym_name = f"{target0.name}" - - plist_file = env.File("Contents/Info.plist", directory=dsym_dir) - setattr(plist_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - plist_file.attributes, - "aib_additional_directory", - "{}/Contents".format(dsym_dir_name), - ) - - dwarf_dir = env.Dir("Contents/Resources/DWARF", directory=dsym_dir) - - dwarf_file = env.File(dwarf_sym_with_debug, directory=dwarf_dir) - setattr(dwarf_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - dwarf_file.attributes, - "aib_additional_directory", - "{}/Contents/Resources/DWARF".format(dsym_dir_name), - ) - setattr(dwarf_file.attributes, "aib_new_name", dwarf_sym_name) - - debug_files.extend([plist_file, dwarf_file]) - debug_suffix = ".dSYM" - - elif env.TargetOSIs("posix"): - debug_suffix = env.subst("$SEPDBG_SUFFIX") - debug_file = env.File(f"#/{line}{debug_suffix}") - debug_files.append(debug_file) - elif env.TargetOSIs("windows"): - debug_suffix = ".pdb" - debug_file = env.File(f"#/{line}{debug_suffix}") - debug_files.append(debug_file) - else: - pass - - for debug_file in debug_files: - setattr(debug_file.attributes, "debug_file_for", bazel_node) - setattr(bazel_node.attributes, "separate_debug_files", debug_files) - - auto_install_single_target(env, bazel_libdep, shlib_suffix, bazel_node) - - if env.GetAutoInstalledFiles(bazel_libdep): - for debug_file in debug_files: - auto_install_single_target( - env, - getattr(bazel_libdep.attributes, "separate_debug_files")[0], - debug_suffix, - debug_file, - ) - - return env.GetAutoInstalledFiles(libdep) - - -def auto_archive_bazel(env, node, already_archived, search_stack): - bazel_child = getattr(node.attributes, "AIB_INSTALL_FROM", node) - if not str(bazel_child).startswith("bazel-out"): - try: - bazel_child = env["SCONS2BAZEL_TARGETS"].bazel_output(bazel_child.path) - except KeyError: - return - - if str(bazel_child) not in already_archived: - already_archived.add(str(bazel_child)) - scons_target = str(bazel_child).replace( - f"{env['BAZEL_OUT_DIR']}/src", env.Dir("$BUILD_DIR").path - ) - linkfile = env["SCONS2BAZEL_TARGETS"].bazel_link_file(scons_target) - - with open(os.path.join(env.Dir("#").abspath, linkfile)) as f: - query_results = f.read() - - filtered_results = "" - for lib in query_results.splitlines(): - bazel_out_path = lib.replace("\\", "/").replace( - f"{env['BAZEL_OUT_DIR']}/src", "bazel-bin/src" - ) - if os.path.exists( - env.File("#/" + bazel_out_path + ".exclude_lib").abspath.replace("\\", "/") - ): - continue - filtered_results += lib + "\n" - query_results = filtered_results - for lib in query_results.splitlines(): - if str(bazel_child).endswith(env.subst("$SEPDBG_SUFFIX")): - debug_file = getattr(env.File("#/" + lib).attributes, "separate_debug_files")[0] - bazel_install_file = env.GetAutoInstalledFiles(debug_file)[0] - else: - bazel_install_file = env.GetAutoInstalledFiles(env.File("#/" + lib))[0] - - if bazel_install_file: - search_stack.append(bazel_install_file) - - -def load_bazel_builders(env): - # === Builders === - create_program_builder(env) - create_shared_library_builder(env) - - if env.GetOption("ninja") != "disabled": - env.NinjaRule( - "BAZEL_COPY_RULE", "$env$cmd", description="Copy from Bazel", pool="local_pool" - ) - - -total_libdeps_linking_time = 0 -count_of_libdeps_links = 0 - - -def add_libdeps_time(env, delate_time): - global total_libdeps_linking_time, count_of_libdeps_links - total_libdeps_linking_time += delate_time - count_of_libdeps_links += 1 - - -def bazel_execroot(env): - return f'bazel-{os.path.basename(env.Dir("#").abspath)}' - - -def prefetch_toolchain(env, version): - setup_max_retry_attempts() - bazel_bin_dir = ( - env.GetOption("evergreen-tmp-dir") - if env.GetOption("evergreen-tmp-dir") - else os.path.expanduser("~/.local/bin") - ) - if not os.path.exists(bazel_bin_dir): - os.makedirs(bazel_bin_dir) - Globals.bazel_executable = install_bazel(bazel_bin_dir) - if platform.system() == "Linux" and not ARGUMENTS.get("CC") and not ARGUMENTS.get("CXX"): - exec_root = bazel_execroot(env) - if exec_root and not os.path.exists(f"{exec_root}/external/mongo_toolchain_{version}"): - print("Prefetch the mongo toolchain...") - try: - retry_call( - subprocess.run, - [ - [ - Globals.bazel_executable, - "build", - "mongo_toolchain", - "--config=local", - f"--//bazel/config:mongo_toolchain_version={version}", - ] - ], - fkwargs={ - "env": {**os.environ.copy(), **Globals.bazel_env_variables}, - "check": True, - }, - tries=Globals.max_retry_attempts, - exceptions=(subprocess.CalledProcessError,), - ) - except subprocess.CalledProcessError as ex: - print(f"ERROR: Bazel fetch of {version} toolchain failed!") - print(ex) - if version == "v4": - print("Please ask about this in #ask-devprod-build slack channel.") - else: - print(f"The {version} toolchain may not be supported on this platform.") - sys.exit(1) - - return exec_root - - -# Required boilerplate function -def exists(env: SCons.Environment.Environment) -> bool: - # === Bazelisk === - - write_workstation_bazelrc(sys.argv) - cleanup_gitinfo_bazelrc() - env.AddMethod(prefetch_toolchain, "PrefetchToolchain") - env.AddMethod(bazel_execroot, "BazelExecroot") - env.AddMethod(load_bazel_builders, "LoadBazelBuilders") - return True - - -def handle_bazel_program_exception(env, target, outputs): - if sys.platform == "win32" and ( - env.GetOption("link-model") == "dynamic-sdk" - or "cyrus_sasl_windows_test_plugin" in target - or "mongoca" in target - ): - is_shared_library = False - for bazel_output_file in outputs: - if os.path.splitext(bazel_output_file)[1] in set([".dll", ".pdb"]): - is_shared_library = True - scons_node_str = bazel_output_file.replace( - f"{env['BAZEL_OUT_DIR']}/src", env.Dir("$BUILD_DIR").path.replace("\\", "/") - ) - Globals.scons2bazel_targets[scons_node_str.replace("\\", "/")] = { - "bazel_target": target, - "bazel_output": bazel_output_file.replace("\\", "/"), - } - return is_shared_library - - prog_suf = env.subst("$PROGSUFFIX") - dbg_suffix = ".pdb" if sys.platform == "win32" else env.subst("$SEPDBG_SUFFIX") - bazel_program = False - - # on windows the pdb for dlls contains no double extensions - # so we need to check all the outputs up front to know - for bazel_output_file in outputs: - if bazel_output_file.endswith(".dll"): - return False - - if os.path.splitext(outputs[0])[1] in [prog_suf, dbg_suffix]: - for bazel_output_file in outputs: - first_ext = os.path.splitext(bazel_output_file)[1] - if dbg_suffix and first_ext == dbg_suffix: - second_ext = os.path.splitext(os.path.splitext(bazel_output_file)[0])[1] - else: - second_ext = None - - if ( - (second_ext is not None and second_ext + first_ext == prog_suf + dbg_suffix) - or (second_ext is None and first_ext == prog_suf) - or first_ext == ".exe" - or first_ext == ".pdb" - ): - bazel_program = True - scons_node_str = bazel_output_file.replace( - f"{env['BAZEL_OUT_DIR']}/src", env.Dir("$BUILD_DIR").path.replace("\\", "/") - ) - Globals.scons2bazel_targets[scons_node_str.replace("\\", "/")] = { - "bazel_target": target, - "bazel_output": bazel_output_file.replace("\\", "/"), - } - return bazel_program - - -def cleanup_gitinfo_bazelrc(): - if os.environ.get("CI") is None: - gitinfo_bazelrc_file = ".bazelrc.gitinfo" - if os.path.exists(gitinfo_bazelrc_file): - try: - os.remove(gitinfo_bazelrc_file) - except: - pass - - -def setup_max_retry_attempts() -> None: - Globals.max_retry_attempts = ( - _CI_MAX_RETRY_ATTEMPTS if os.environ.get("CI") is not None else _LOCAL_MAX_RETRY_ATTEMPTS - ) - - -def is_local_execution(env: SCons.Environment.Environment) -> bool: - normalized_arch = ( - platform.machine().lower().replace("aarch64", "arm64").replace("x86_64", "amd64") - ) - user_flags = shlex.split(env.get("BAZEL_FLAGS", "")) - return ( - os.environ.get("USE_NATIVE_TOOLCHAIN") - or normalized_arch not in ["arm64", "amd64"] - or "--config=local" in user_flags - or "--config=public-release" in user_flags - ) - - -def generate(env: SCons.Environment.Environment) -> None: - if env["BAZEL_INTEGRATION_DEBUG"]: - global bazel_debug - - def bazel_debug_func(msg: str): - print("[BAZEL_INTEGRATION_DEBUG] " + str(msg)) - - bazel_debug = bazel_debug_func - - # this should be populated from the sconscript and include list of targets scons - # indicates it wants to build - env["SCONS_SELECTED_TARGETS"] = [] - - # === Architecture/platform === - - # Bail if current architecture not supported for Bazel: - normalized_arch = ( - platform.machine().lower().replace("aarch64", "arm64").replace("x86_64", "amd64") - ) - normalized_os = sys.platform.replace("win32", "windows").replace("darwin", "macos") - current_platform = f"{normalized_os}:{normalized_arch}:{env.ToolchainName()}" - if current_platform not in _SUPPORTED_PLATFORM_MATRIX: - raise Exception( - f'Bazel not supported on this platform ({current_platform}); supported platforms are: [{", ".join(_SUPPORTED_PLATFORM_MATRIX)}]' - ) - - # === Build settings === - - # We don't support DLL generation on Windows, but need shared object generation in dynamic-sdk mode - # on linux. - linkstatic = env.GetOption("link-model") in ["auto", "static", "dynamic-sdk"] - - allocator = env.get("MONGO_ALLOCATOR", "tcmalloc-google") - - distro_or_os = normalized_os - if normalized_os == "linux": - distro_id = find_distro_match(f"{distro.name()} {distro.version()}") - if distro_id is not None: - distro_or_os = distro_id - - mongo_version = env["MONGO_VERSION"] - # For developer builds we don't want to pass things - # that might change between commits - if os.environ.get("CI") is None: - mongo_version = "8.1.0-alpha" - - bazel_internal_flags = [ - "--config=dbg", - f"--compiler_type={env.ToolchainName()}", - f'--opt={env.GetOption("opt")}', - f'--dbg={env.GetOption("dbg") == "on"}', - f'--debug_symbols={env.GetOption("debug-symbols") != "off"}', - f'--dbg_level={1 if env.GetOption("debug-symbols") == "minimal" else 2}', - f'--thin_lto={env.GetOption("thin-lto") is not None}', - f'--separate_debug={True if env.GetOption("separate-debug") == "on" else False}', - f'--libunwind={env.GetOption("use-libunwind")}', - f'--use_gdbserver={False if env.GetOption("gdbserver") is None else True}', - f'--spider_monkey_dbg={True if env.GetOption("spider-monkey-dbg") == "on" else False}', - f"--allocator={allocator}", - f'--use_lldbserver={False if env.GetOption("lldb-server") is None else True}', - f'--use_wait_for_debugger={False if env.GetOption("wait-for-debugger") is None else True}', - f'--use_ocsp_stapling={True if env.GetOption("ocsp-stapling") == "on" else False}', - f'--use_disable_ref_track={False if env.GetOption("disable-ref-track") is None else True}', - f'--use_wiredtiger={True if env.GetOption("wiredtiger") == "on" else False}', - f'--use_glibcxx_debug={env.GetOption("use-glibcxx-debug") is not None}', - f'--use_tracing_profiler={env.GetOption("use-tracing-profiler") == "on"}', - f'--build_otel={True if env["ENABLE_OTEL_BUILD"] else False}', - f'--use_libcxx={env.GetOption("libc++") is not None}', - f'--detect_odr_violations={env.GetOption("detect-odr-violations") is not None}', - f"--linkstatic={linkstatic}", - f'--shared_archive={env.GetOption("link-model") == "dynamic-sdk"}', - f'--linker={env.GetOption("linker")}', - f'--streams_release_build={env.GetOption("streams-release-build")}', - f'--disable_streams={env.GetOption("disable-streams")}', - f'--release={env.GetOption("release") == "on"}', - f'--build_enterprise={"MONGO_ENTERPRISE_VERSION" in env}', - f'--visibility_support={env.GetOption("visibility-support")}', - f'--disable_warnings_as_errors={"source" in env.GetOption("disable-warnings-as-errors")}', - f'--gcov={env.GetOption("gcov") is not None}', - f'--pgo_profile={env.GetOption("pgo-profile") is not None}', - f'--server_js={env.GetOption("server-js") == "on"}', - f'--ssl={"True" if env.GetOption("ssl") == "on" else "False"}', - f'--js_engine={env.GetOption("js-engine")}', - f'--use_sasl_client={env.GetOption("use-sasl-client") is not None}', - f'--skip_archive={env.GetOption("skip-archive") != "off" and normalized_os == "linux"}', - "--bes_keywords=scons_invocation", - "--define", - f"MONGO_VERSION={mongo_version}", - "--define", - f"MONGO_DISTMOD={env['MONGO_DISTMOD']}", - "--compilation_mode=dbg", # always build this compilation mode as we always build with -g - "--dynamic_mode=off", - "--fission=no", - ] - - if normalized_os == "linux" and os.environ.get("CI") is None: - print(""" - -------- ANNOUNCEMENT -------- - The SCons interface will soon be deprecated on the master branch, please try - your workflow with Bazel directly by visiting https://wiki.corp.mongodb.com/display/HGTC/Building+with+Bazel - - If your workflow does not work with Bazel now, please post in #ask-devprod-build with details. - - Build concurrency is now limited to 100 jobs, please switch over to calling Bazel directly to get full concurrency! - ------------------------------""") - bazel_internal_flags += ["--jobs=100"] - - # Timeout linking at 8 minutes to retry with a lower concurrency. - if os.environ.get("CI") is not None: - bazel_internal_flags += [ - "--link_timeout=True", - ] - - if not os.environ.get("USE_NATIVE_TOOLCHAIN"): - if ( - not is_local_execution(env) - and normalized_os == "linux" - and os.environ.get("evergreen_remote_exec") != "on" - and os.environ.get("CI") is not None - ): - cache_silo = "_cache_silo" - bazel_internal_flags += [ - f"--platforms=//bazel/platforms:{distro_or_os}_{normalized_arch}{cache_silo}", - f"--host_platform=//bazel/platforms:{distro_or_os}_{normalized_arch}{cache_silo}", - "--spawn_strategy=local", - "--jobs=auto", - "--remote_executor=", - ] - else: - bazel_internal_flags += [ - f"--platforms=//bazel/platforms:{distro_or_os}_{normalized_arch}", - f"--host_platform=//bazel/platforms:{distro_or_os}_{normalized_arch}", - ] - - if tc := env.get("MONGO_TOOLCHAIN_VERSION"): - bazel_internal_flags += [f"--//bazel/config:mongo_toolchain_version={tc}"] - - if "MONGO_ENTERPRISE_VERSION" in env: - enterprise_features = env.GetOption("enterprise_features") - if enterprise_features == "*": - bazel_internal_flags += ["--//bazel/config:enterprise_feature_all=True"] - else: - bazel_internal_flags += ["--//bazel/config:enterprise_feature_all=False"] - bazel_internal_flags += [ - f"--//bazel/config:enterprise_feature_{feature}=True" - for feature in enterprise_features.split(",") - ] - - if env.GetOption("gcov") is not None: - bazel_internal_flags += ["--collect_code_coverage"] - - if env["DWARF_VERSION"]: - bazel_internal_flags.append(f"--dwarf_version={env['DWARF_VERSION']}") - - if normalized_os == "macos": - bazel_internal_flags.append( - f"--developer_dir={os.environ.get('DEVELOPER_DIR', '/Applications/Xcode.app')}" - ) - minimum_macos_version = "11.0" - bazel_internal_flags.append(f"--macos_minimum_os={minimum_macos_version}") - - if normalized_os == "windows": - windows_temp_dir = "Z:/bazel_tmp" - if os.path.isdir(windows_temp_dir): - bazel_internal_flags.append(f"--action_env=TMP={windows_temp_dir}") - bazel_internal_flags.append(f"--action_env=TEMP={windows_temp_dir}") - else: - print( - f"Tried to use {windows_temp_dir} as TMP and TEMP environment variables but it did not exist. This will lead to a low cache hit rate." - ) - - http_client_option = env.GetOption("enable-http-client") - if http_client_option is not None: - if http_client_option in ["on", "auto"]: - bazel_internal_flags.append("--http_client=True") - elif http_client_option == "off": - bazel_internal_flags.append("--http_client=False") - - sanitizer_option = env.GetOption("sanitize") - - if sanitizer_option is not None and sanitizer_option != "": - options = sanitizer_option.split(",") - formatted_options = [f"--{_SANITIZER_MAP[opt]}=True" for opt in options] - bazel_internal_flags.extend(formatted_options) - - if normalized_arch not in ["arm64", "amd64"]: - bazel_internal_flags.append("--config=no-remote-exec") - elif os.environ.get("USE_NATIVE_TOOLCHAIN"): - print("Custom toolchain detected, using --config=local for bazel build.") - bazel_internal_flags.append("--config=local") - - if normalized_arch == "s390x": - # s390x systems don't have enough RAM to handle the default job count and will - # OOM unless we reduce it. - bazel_internal_flags.append("--jobs=16") - elif normalized_arch == "ppc64le": - # ppc64le builds are OOMing with default concurrency, but it's not clear if it's - # an issue with the bazel client itself or in the compiler. - bazel_internal_flags.append("--jobs=48") - - public_release = False - # Disable remote execution for public release builds. - if ( - env.GetOption("release") == "on" - and env.GetOption("remote-exec-release") == "off" - and ( - env.GetOption("cache-dir") is None - or env.GetOption("cache-dir") == "$BUILD_ROOT/scons/cache" - ) - ): - bazel_internal_flags.append("--config=public-release") - public_release = True - - evergreen_tmp_dir = env.GetOption("evergreen-tmp-dir") - if normalized_os == "macos" and evergreen_tmp_dir: - bazel_internal_flags.append(f"--sandbox_writable_path={evergreen_tmp_dir}") - - setup_max_retry_attempts() - - if not is_local_execution(env) and not public_release: - if not validate_remote_execution_certs(env): - sys.exit(1) - - if env.GetOption("bazel-dynamic-execution"): - try: - docker_detected = ( - subprocess.run(["docker", "info"], capture_output=True).returncode == 0 - ) - except Exception: - docker_detected = False - try: - podman_detected = ( - subprocess.run(["podman", "--help"], capture_output=True).returncode == 0 - ) - except Exception: - podman_detected = False - - if not docker_detected: - print("Not using dynamic scheduling because docker not detected ('docker info').") - elif docker_detected and podman_detected: - print( - "Docker and podman detected, disabling dynamic scheduling due to uncertainty in docker setup." - ) - else: - # TODO: SERVER-95737 fix docker issues on ubuntu24 - if distro_or_os == "ubuntu24": - print("Ubuntu24 is not supported to with dynamic scheduling. See SERVER-95737") - else: - remote_execution_containers = {} - container_file_path = "bazel/platforms/remote_execution_containers.bzl" - with open(container_file_path, "r") as f: - code = compile(f.read(), container_file_path, "exec") - exec(code, {}, remote_execution_containers) - - docker_image = remote_execution_containers["REMOTE_EXECUTION_CONTAINERS"][ - f"{distro_or_os}" - ]["container-url"] - - jobs = int(psutil.cpu_count() * 2) if os.environ.get("CI") else 400 - - bazel_internal_flags += [ - "--experimental_enable_docker_sandbox", - f"--experimental_docker_image={docker_image}", - "--experimental_docker_use_customized_images", - "--internal_spawn_scheduler", - "--dynamic_local_strategy=docker", - "--spawn_strategy=dynamic", - f"--jobs={jobs}", - ] - - Globals.bazel_base_build_command = ( - [ - os.path.abspath(Globals.bazel_executable), - "build", - ] - + bazel_internal_flags - + shlex.split(env.get("BAZEL_FLAGS", "")) - ) - - log_dir = env.Dir("$BUILD_ROOT/scons/bazel").path - os.makedirs(log_dir, exist_ok=True) - with open(os.path.join(log_dir, "bazel_command"), "w") as f: - f.write(" ".join(Globals.bazel_base_build_command)) - - # Store the bazel command line flags so scons can check if it should rerun the bazel targets - # if the bazel command line changes. - env["BAZEL_FLAGS_STR"] = bazel_internal_flags + shlex.split(env.get("BAZEL_FLAGS", "")) - - # We always use --compilation_mode debug for now as we always want -g, so assume -dbg location - out_dir_platform = "$TARGET_ARCH" - if normalized_os == "macos": - out_dir_platform = "darwin_arm64" if normalized_arch == "arm64" else "darwin_x86_64" - elif normalized_os == "windows": - out_dir_platform = "x64_windows" - elif normalized_os == "linux" and normalized_arch == "amd64": - # For c++ toolchains, bazel has some wierd behaviour where it thinks the default - # cpu is "k8" which is another name for x86_64 cpus, so its not wrong, but abnormal - out_dir_platform = "k8" - elif normalized_arch == "ppc64le": - out_dir_platform = "ppc" - - env["BAZEL_OUT_DIR"] = env.Dir(f"#/bazel-out/{out_dir_platform}-dbg/bin/").path.replace( - "\\", "/" - ) - - if env.get("__NINJA_NO") == "1": - return - - # ThinTarget builder is a special bazel target and should not be prefixed with Bazel in the builder - # name to exclude it from the other BazelBuilder's. This builder excludes any normal builder - # mechanisms like scanners or emitters and functions as a pass through for targets which exist - # only in bazel. It contains no dependency information and is not meant to fully function within - # the scons dependency graph. - env["BUILDERS"]["ThinTarget"] = SCons.Builder.Builder( - action=BazelCopyOutputsAction, - emitter=SCons.Builder.ListEmitter([bazel_target_emitter]), - ) - - cmd = ( - ["aquery"] - + env["BAZEL_FLAGS_STR"] - + [ - "mnemonic('StripDebuginfo|ExtractDebuginfo|Symlink|IdlcGenerator|TemplateRenderer', (outputs('bazel-out/.*/bin/src/.*', deps(@//src/...))))" - ] - ) - - try: - results = retry_call( - bazel_query_func, - [env, cmd.copy(), "discover ThinTargets"], - tries=Globals.max_retry_attempts, - exceptions=(subprocess.CalledProcessError,), - ) - except subprocess.CalledProcessError as ex: - print("ERROR: bazel thin targets query failed:") - print(ex.cmd) - print(ex.stdout) - print(ex.stderr) - print("Please ask about this in #ask-devprod-build slack channel.") - sys.exit(1) - - for action in results.stdout.split("\n\n"): - action = action.strip() - if not action: - continue - - lines = action.splitlines() - bazel_program = False - for line in lines: - if line.startswith(" Target: "): - target = line.replace(" Target: ", "").strip() - - if line.startswith(" Outputs: ["): - outputs = [ - line.strip() - for line in line.replace(" Outputs: [", "").replace("]", "").strip().split(",") - ] - - # TODO when we support test lists in bazel we can make BazelPrograms thin targets - bazel_program = handle_bazel_program_exception(env, target, outputs) - - scons_node_strs = [ - bazel_output_file.replace( - f"{env['BAZEL_OUT_DIR']}/src", env.Dir("$BUILD_DIR").path.replace("\\", "/") - ) - for bazel_output_file in outputs - ] - - if bazel_program: - for scons_node, bazel_output_file in zip(scons_node_strs, outputs): - Globals.scons2bazel_targets[scons_node.replace("\\", "/")] = { - "bazel_target": target, - "bazel_output": bazel_output_file.replace("\\", "/"), - } - continue - - scons_nodes = env.ThinTarget( - target=scons_node_strs, source=outputs, NINJA_GENSOURCE_INDEPENDENT=True - ) - env.NoCache(scons_nodes) - - for scons_node, bazel_output_file in zip(scons_nodes, outputs): - Globals.scons2bazel_targets[scons_node.path.replace("\\", "/")] = { - "bazel_target": target, - "bazel_output": bazel_output_file.replace("\\", "/"), - } - compiledb_nodes = env.ThinTarget( - target=env.Alias("compiledb"), - source="compile_commands.json", - NINJA_GENSOURCE_INDEPENDENT=True, - ) - env.NoCache(compiledb_nodes) - - Globals.scons2bazel_targets["compiledb"] = { - "bazel_target": "//:compiledb", - "bazel_output": "compile_commands.json", - } - - globals = Globals() - env["SCONS2BAZEL_TARGETS"] = globals - - def print_total_query_time(): - global total_query_time, total_queries - global time_auto_installing, count_of_auto_installing - global total_libdeps_linking_time, count_of_libdeps_links - bazel_debug( - f"Bazel integration spent {total_query_time} seconds in total performing {total_queries} queries." - ) - bazel_debug( - f"Bazel integration spent {time_auto_installing} seconds in total performing {count_of_auto_installing} auto_install." - ) - bazel_debug( - f"Bazel integration spent {total_libdeps_linking_time} seconds in total performing {count_of_libdeps_links} libdeps linking." - ) - - atexit.register(print_total_query_time) - - load_bazel_builders(env) - bazel_build_thread = threading.Thread( - target=bazel_build_thread_func, - args=(env, log_dir, env["VERBOSE"], env.GetOption("ninja") != "disabled"), - ) - bazel_build_thread.start() - - def wait_for_bazel(env): - nonlocal bazel_build_thread - Globals.waiting_on_bazel_flag = True - print("SCons done, switching to bazel build thread...") - bazel_build_thread.join() - if Globals.bazel_thread_terminal_output is not None: - Globals.bazel_thread_terminal_output.seek(0) - sys.stdout.write(Globals.bazel_thread_terminal_output.read()) - if not Globals.bazel_build_success: - raise SCons.Errors.BuildError( - errstr=f"Bazel Build failed with {Globals.bazel_build_exitcode}!", - status=Globals.bazel_build_exitcode, - exitstatus=1, - ) - - env.AddMethod(wait_for_bazel, "WaitForBazel") - - env.AddMethod(run_bazel_command, "RunBazelCommand") - env.AddMethod(add_libdeps_time, "AddLibdepsTime") - env.AddMethod(generate_bazel_info_for_ninja, "GenerateBazelInfoForNinja") - env.AddMethod(bazel_deps_check_query_cache, "CheckBazelDepsCache") - env.AddMethod(bazel_deps_add_query_cache, "AddBazelDepsCache") - env.AddMethod(bazel_deps_check_link_query_cache, "CheckBazelLinkDepsCache") - env.AddMethod(bazel_deps_add_link_query_cache, "AddBazelLinkDepsCache") - env.AddMethod(bazel_query_func, "RunBazelQuery") - env.AddMethod(ninja_bazel_builder, "NinjaBazelBuilder") - env.AddMethod(auto_install_bazel, "BazelAutoInstall") - env.AddMethod(auto_install_single_target, "BazelAutoInstallSingleTarget") - env.AddMethod(auto_archive_bazel, "BazelAutoArchive") diff --git a/site_scons/site_tools/libtool.py b/site_scons/site_tools/libtool.py deleted file mode 100644 index c636a387f0d..00000000000 --- a/site_scons/site_tools/libtool.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - - -def generate(env): - env["AR"] = "libtool" - env["ARCOM"] = "$AR -static -o $TARGET $ARFLAGS $SOURCES" - env["ARFLAGS"] = ["-s", "-no_warning_for_no_symbols"] - - # Disable running ranlib, since we added 's' above - env["RANLIBCOM"] = "" - env["RANLIBCOMSTR"] = "Skipping ranlib for libtool generated target $TARGET" - - -def exists(env): - return env.detect("libtool") diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py deleted file mode 100644 index 59c3cb54761..00000000000 --- a/site_scons/site_tools/mongo_benchmark.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -""" -Pseudo-builders for building and registering benchmarks. -""" - -from collections import defaultdict - -from site_scons.mongo import insort_wrapper - -BAZEL_BENCHMARK_TAGS = defaultdict(list) -BAZEL_BENCHMARK_TAGS["repl_bm"] = [] -BAZEL_BENCHMARK_TAGS["query_bm"] = [] -BAZEL_BENCHMARK_TAGS["bsoncolumn_bm"] = [] -BAZEL_BENCHMARK_TAGS["first_half_bm"] = [] -BAZEL_BENCHMARK_TAGS["second_half_bm"] = [] -BAZEL_BENCHMARK_TAGS["storage_bm"] = [] -BAZEL_BENCHMARK_TAGS["sharding_bm"] = [] -BAZEL_BENCHMARK_TAGS["sep_bm"] = [] - - -def exists(env): - return True - - -def get_bazel_benchmark_tags(env): - return BAZEL_BENCHMARK_TAGS - - -def build_benchmark(env, target, source, **kwargs): - bmEnv = env.Clone() - bmEnv.InjectThirdParty(libraries=["benchmark"]) - - if bmEnv.TargetOSIs("windows"): - bmEnv.Append(LIBS=["ShLwApi"]) - - libdeps = kwargs.get("LIBDEPS", bmEnv.get("LIBDEPS", [])).copy() - insort_wrapper(libdeps, "$BUILD_DIR/mongo/unittest/benchmark_main") - - kwargs["LIBDEPS"] = libdeps - benchmark_test_components = {"tests", "benchmarks"} - primary_component = kwargs.get("AIB_COMPONENT", bmEnv.get("AIB_COMPONENT", "")) - if primary_component and not primary_component.endswith("-benchmark"): - kwargs["AIB_COMPONENT"] += "-benchmark" - elif primary_component: - kwargs["AIB_COMPONENT"] = primary_component - else: - kwargs["AIB_COMPONENT"] = "benchmarks" - benchmark_test_components = {"tests"} - - if "AIB_COMPONENTS_EXTRA" in kwargs: - benchmark_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( - benchmark_test_components - ) - - kwargs["AIB_COMPONENTS_EXTRA"] = list(benchmark_test_components) - if ( - env.GetOption("consolidated-test-bins") == "on" - and "CONSOLIDATED_TARGET" in kwargs - and kwargs["CONSOLIDATED_TARGET"] - and "BAZEL_BENCHMARK_TAG" not in kwargs - ): - kwargs["AIB_COMPONENTS_EXTRA"] = ["benchmarks"] - return bmEnv.AddToConsolidatedTarget( - target, source, kwargs, "$BENCHMARK_ALIAS", "$BENCHMARK_LIST" - ) - - if "BAZEL_BENCHMARK_TAG" in kwargs: - kwargs["AIB_COMPONENT"] = kwargs["BAZEL_BENCHMARK_TAG"] - kwargs["AIB_COMPONENTS_EXTRA"] = [] - if not source: - result = bmEnv.BazelProgram(target, source, **kwargs) - else: - print(f"sources included in SCons in {target}, please move the target definition to bazel!") - exit(-1) - - if "BAZEL_BENCHMARK_TAG" in kwargs: - tag = kwargs["BAZEL_BENCHMARK_TAG"] - BAZEL_BENCHMARK_TAGS[tag] += [target] - bmEnv.RegisterTest(f"$BUILD_ROOT/{tag}.txt", result[0]) - bmEnv.Alias(f"install-{tag}", result) - else: - bmEnv.RegisterTest("$BENCHMARK_LIST", result[0]) - bmEnv.Alias("$BENCHMARK_ALIAS", result) - - return result - - -def generate(env): - for tag in BAZEL_BENCHMARK_TAGS: - env.TestList(f"$BUILD_ROOT/{tag}.txt", source=[]) - env.Alias(f"install-{tag}", f"$BUILD_ROOT/{tag}.txt") - env.TestList("$BENCHMARK_LIST", source=[]) - env.AddMethod(build_benchmark, "Benchmark") - env.Alias("$BENCHMARK_ALIAS", "$BENCHMARK_LIST") - env.AddMethod(get_bazel_benchmark_tags, "get_bazel_benchmark_tags") diff --git a/site_scons/site_tools/mongo_consolidated_targets.py b/site_scons/site_tools/mongo_consolidated_targets.py deleted file mode 100644 index 00314bd88e8..00000000000 --- a/site_scons/site_tools/mongo_consolidated_targets.py +++ /dev/null @@ -1,82 +0,0 @@ -import os -import sys - -CONSOLIDATED_TARGETS_MAP = {} - - -def create_consolidated_targets(env): - global CONSOLIDATED_TARGETS_MAP - for _, v in CONSOLIDATED_TARGETS_MAP.items(): - kwargs = v["kwargs"] - kwargs["LIBDEPS"] = sorted(list(set(kwargs["LIBDEPS"]))) - - result = v["env"].Program(f"$BUILD_DIR/{v['target']}", v["sources"], **kwargs) - v["env"].RegisterTest(v["list_alias"], result[0]) - v["env"].Alias(v["alias"], result) - v["env"].Alias("CONSOLIDATED_TARGET_" + v["target"] + "_ALIAS", result) - - -def add_to_consolidated_target(env, target, source, kwargs, test_alias, list_alias): - if not isinstance(target, list): - target = [target] - - if not isinstance(source, list): - source = [source] - - global CONSOLIDATED_TARGETS_MAP - consol_target = kwargs["CONSOLIDATED_TARGET"] - kwargs["AIB_COMPONENT"] = consol_target + "_AIB" - - build_dir = env.Dir("$BUILD_DIR").path.replace("\\", "/") - libdeps = [ - os.path.relpath(os.path.join(os.getcwd(), libdep), env.Dir("#").abspath).replace("\\", "/") - if not libdep.startswith("$BUILD_DIR") - else libdep - for libdep in kwargs["LIBDEPS"] - ] - libdeps = [ - "$BUILD_DIR/" + libdep[len("src/") :] if libdep.startswith("src/") else libdep - for libdep in libdeps - ] - libdeps = [ - "$BUILD_DIR" + libdep[len(build_dir) :] if libdep.startswith(build_dir) else libdep - for libdep in libdeps - ] - kwargs["LIBDEPS"] = libdeps - - if consol_target not in CONSOLIDATED_TARGETS_MAP: - CONSOLIDATED_TARGETS_MAP[consol_target] = { - "env": env, - "target": consol_target, - "sources": [ - os.path.relpath(os.path.join(os.getcwd(), s), env.Dir("#").abspath) for s in source - ], - "kwargs": kwargs, - "alias": test_alias, - "list_alias": list_alias, - } - else: - CONSOLIDATED_TARGETS_MAP[consol_target]["sources"].extend( - [os.path.relpath(os.path.join(os.getcwd(), s), env.Dir("#").abspath) for s in source] - ) - - for k, v in kwargs.items(): - if k not in ["LIBDEPS", "AIB_COMPONENT", "AIB_COMPONENTS_EXTRA", "CONSOLIDATED_TARGET"]: - print(f"ERROR: Consolidating target {target[0]} will drop information in {k}") - sys.exit(1) - else: - if isinstance(CONSOLIDATED_TARGETS_MAP[consol_target]["kwargs"][k], list): - CONSOLIDATED_TARGETS_MAP[consol_target]["kwargs"][k].extend(v) - else: - CONSOLIDATED_TARGETS_MAP[consol_target]["kwargs"][k] = v - - return env.Alias("CONSOLIDATED_TARGET_" + consol_target + "_ALIAS") - - -def exists(env): - return True - - -def generate(env): - env.AddMethod(create_consolidated_targets, "CreateConsolidatedTargets") - env.AddMethod(add_to_consolidated_target, "AddToConsolidatedTarget") diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py deleted file mode 100644 index 2c0aa1533bd..00000000000 --- a/site_scons/site_tools/mongo_integrationtest.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -""" -Pseudo-builders for building and registering integration tests. -""" - -from site_scons.mongo import insort_wrapper - - -def exists(env): - return True - - -def build_cpp_integration_test(env, target, source, **kwargs): - libdeps = kwargs.get("LIBDEPS", env.get("LIBDEPS", [])).copy() - insort_wrapper(libdeps, "$BUILD_DIR/mongo/unittest/integration_test_main") - - kwargs["LIBDEPS"] = libdeps - integration_test_components = {"tests", "integration-tests"} - - primary_component = kwargs.get("AIB_COMPONENT", env.get("AIB_COMPONENT", "")) - if primary_component and not primary_component.endswith("-test"): - kwargs["AIB_COMPONENT"] += "-test" - elif primary_component: - kwargs["AIB_COMPONENT"] = primary_component - else: - kwargs["AIB_COMPONENT"] = "integration-tests" - integration_test_components = {"tests"} - - if "AIB_COMPONENTS_EXTRA" in kwargs: - kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( - integration_test_components - ) - else: - kwargs["AIB_COMPONENTS_EXTRA"] = list(integration_test_components) - - # Integration tests are currently undecidable (see - # mongo_test_execution.py for details on undecidability) because - # we don't correctly express the dependency on the server - # components required to run them. - kwargs["UNDECIDABLE_TEST"] = True - - if not source: - result = env.BazelProgram(target, source, **kwargs) - else: - print(f"sources included in SCons in {target}, please move the target definition to bazel!") - exit(-1) - env.RegisterTest("$INTEGRATION_TEST_LIST", result[0]) - env.Alias("$INTEGRATION_TEST_ALIAS", result[0]) - - return result - - -def generate(env): - env.TestList("$INTEGRATION_TEST_LIST", source=[]) - env.AddMethod(build_cpp_integration_test, "CppIntegrationTest") - env.Alias("$INTEGRATION_TEST_ALIAS", "$INTEGRATION_TEST_LIST") diff --git a/site_scons/site_tools/mongo_libfuzzer.py b/site_scons/site_tools/mongo_libfuzzer.py deleted file mode 100644 index 75de1944a75..00000000000 --- a/site_scons/site_tools/mongo_libfuzzer.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Pseudo-builders for building and registering libfuzzer tests.""" - - -def exists(env): - return True - - -def libfuzzer_test_list_builder_action(env, target, source): - with open(str(target[0]), "w") as ofile: - for s in _libfuzzer_tests: - print("\t" + str(s)) - ofile.write("%s\n" % s) - - -def build_cpp_libfuzzer_test(env, target, source, **kwargs): - myenv = env.Clone() - if not myenv.IsSanitizerEnabled("fuzzer"): - return [] - - libdeps = kwargs.get("LIBDEPS", myenv.get("LIBDEPS", [])).copy() - kwargs["LIBDEPS"] = libdeps - kwargs["INSTALL_ALIAS"] = ["tests"] - sanitizer_option = "-fsanitize=fuzzer" - myenv.Prepend(LINKFLAGS=[sanitizer_option]) - - libfuzzer_test_components = {"tests", "fuzzertests"} - primary_component = kwargs.get("AIB_COMPONENT", env.get("AIB_COMPONENT", "")) - if primary_component and not primary_component.endswith("-fuzzertest"): - kwargs["AIB_COMPONENT"] = primary_component + "-fuzzertest" - elif primary_component: - kwargs["AIB_COMPONENT"] = primary_component - else: - kwargs["AIB_COMPONENT"] = "fuzzertests" - libfuzzer_test_components = {"tests"} - - if "AIB_COMPONENTS_EXTRA" in kwargs: - kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( - libfuzzer_test_components - ) - else: - kwargs["AIB_COMPONENTS_EXTRA"] = list(libfuzzer_test_components) - - # Fuzzer tests are inherenently undecidable (see - # mongo_test_execution.py for details on undecidability). - kwargs["UNDECIDABLE_TEST"] = True - - if not source: - result = myenv.BazelProgram(target, source, **kwargs) - else: - result = myenv.Program(target, source, **kwargs) - myenv.RegisterTest("$LIBFUZZER_TEST_LIST", result[0]) - myenv.Alias("$LIBFUZZER_TEST_ALIAS", result[0]) - - return result - - -def generate(env): - env.TestList("$LIBFUZZER_TEST_LIST", source=[]) - env.AddMethod(build_cpp_libfuzzer_test, "CppLibfuzzerTest") - env.Alias("$LIBFUZZER_TEST_ALIAS", "$LIBFUZZER_TEST_LIST") diff --git a/site_scons/site_tools/mongo_pretty_printer_tests.py b/site_scons/site_tools/mongo_pretty_printer_tests.py deleted file mode 100644 index 037afac7632..00000000000 --- a/site_scons/site_tools/mongo_pretty_printer_tests.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Pseudo-builders for building and registering tests for pretty printers.""" - -import os -import sys - -import SCons -from SCons.Script import Chmod - -not_building_already_warned = False - - -def print_warning(message: str): - global not_building_already_warned - if not not_building_already_warned: - not_building_already_warned = True - print(message) - - -def exists(env): - return True - - -ninja_fake_testlist = None - - -def build_pretty_printer_test(env, target, **kwargs): - if not isinstance(target, list): - target = [target] - - if env.GetOption("ninja") != "disabled": - return [] - - if env.GetOption("link-model") == "dynamic-sdk": - return [] - - gdb_bin = None - if env.get("GDB"): - gdb_bin = env.get("GDB") - elif env.ToolchainIs("gcc", "clang"): - # Always prefer v4 gdb, otherwise try anything in the path - gdb_bin = env.WhereIs("gdb", ["/opt/mongodbtoolchain/v4/bin"]) or env.WhereIs("gdb") - - if gdb_bin is None: - print_warning("Can't find gdb, not building pretty printer tests.") - return [] - - test_component = {"dist-test", "pretty-printer-tests-pyonly"} - - if "AIB_COMPONENTS_EXTRA" in kwargs: - kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(test_component) - else: - kwargs["AIB_COMPONENTS_EXTRA"] = list(test_component) - - test_program = kwargs.get("TEST_PROGRAM", ["$DESTDIR/$PREFIX/bin/mongod"]) - if isinstance(test_program, list): - test_program = test_program[0] - test_args = kwargs.get("TEST_ARGS", []) - gdb_test_script = env.File(target[0]).srcnode().abspath - - if not gdb_test_script: - env.FatalError( - f"{target[0]}: You must supply a gdb python script to use in the pretty printer test." - ) - - with open(gdb_test_script) as test_script: - verify_reqs_file = env.File("#site_scons/mongo/pip_requirements.py") - - gen_test_script = env.Textfile( - target=os.path.basename(gdb_test_script), - source=verify_reqs_file.get_contents().decode("utf-8").split("\n") - + [ - "import os,subprocess,sys,traceback", - "cmd = 'python -c \"import os,sys;print(os.linesep.join(sys.path).strip())\"'", - "paths = subprocess.check_output(cmd,shell=True).decode('utf-8').split()", - "sys.path.extend(paths)", - "symbols_loaded = False", - "try:", - " if gdb.objfiles()[0].lookup_global_symbol('main') is not None:", - " symbols_loaded = True", - "except Exception:", - " pass", - "if not symbols_loaded:", - r" gdb.write('Could not find main symbol, debug info may not be loaded.\n')", - r" gdb.write('TEST FAILED -- No Symbols.\\\n')", - " gdb.execute('quit 1', to_string=True)", - "else:", - r" gdb.write('Symbols loaded.\n')", - "gdb.execute('set confirm off')", - "gdb.execute('source .gdbinit')", - "try:", - " verify_requirements(executable='python3')", - "except MissingRequirements as ex:", - " print(ex)", - " print('continuing testing anyways!')", - "except Exception as exc:", - " print('ERROR: failed while verifying requirements.')", - " traceback.print_exc()", - " sys.exit(1)", - ] - + [line.rstrip() for line in test_script.readlines()], - ) - - gen_test_script_install = env.AutoInstall( - target="$PREFIX_BINDIR", - source=gen_test_script, - AIB_ROLE="runtime", - AIB_COMPONENT="pretty-printer-tests", - AIB_COMPONENTS_EXTRA=kwargs["AIB_COMPONENTS_EXTRA"], - ) - - pretty_printer_test_launcher = env.Substfile( - target=f"pretty_printer_test_launcher_{target[0]}", - source="#/src/mongo/util/pretty_printer_test_launcher.py.in", - SUBST_DICT={ - "@VERBOSE@": str(env.Verbose()), - "@pretty_printer_test_py@": gen_test_script_install[0].path, - "@gdb_path@": gdb_bin, - "@pretty_printer_test_program@": env.File(test_program).path, - "@test_args@": '["' - + '", "'.join([env.subst(arg, target=target) for arg in test_args]) - + '"]', - }, - AIB_ROLE="runtime", - AIB_COMPONENT="pretty-printer-tests", - AIB_COMPONENTS_EXTRA=kwargs["AIB_COMPONENTS_EXTRA"], - ) - env.Depends( - pretty_printer_test_launcher[0], - ([] if env.get("GDB_PPTEST_PYONLY") else [test_program]) + [gen_test_script_install], - ) - env.AddPostAction( - pretty_printer_test_launcher[0], Chmod(pretty_printer_test_launcher[0], "ugo+x") - ) - - pretty_printer_test_launcher_install = env.AutoInstall( - target="$PREFIX_BINDIR", - source=pretty_printer_test_launcher, - AIB_ROLE="runtime", - AIB_COMPONENT="pretty-printer-tests", - AIB_COMPONENTS_EXTRA=kwargs["AIB_COMPONENTS_EXTRA"], - ) - - def new_scanner(node, env, path=()): - source_binary = getattr( - env.File(env.get("TEST_PROGRAM")).attributes, "AIB_INSTALL_FROM", None - ) - if source_binary: - debug_files = getattr(env.File(source_binary).attributes, "separate_debug_files", None) - if debug_files: - if debug_files: - installed_debug_files = getattr( - env.File(debug_files[0]).attributes, "AIB_INSTALLED_FILES", None - ) - if installed_debug_files: - if env.Verbose(): - print( - f"Found and installing pretty_printer_test {node} test_program {env.File(env.get('TEST_PROGRAM'))} debug file {installed_debug_files[0]}" - ) - return installed_debug_files - if env.Verbose(): - print(f"Did not find separate debug files for pretty_printer_test {node}") - return [] - - scanner = SCons.Scanner.Scanner(function=new_scanner) - - run_test = env.Command( - target="+" + os.path.splitext(os.path.basename(gdb_test_script))[0], - source=pretty_printer_test_launcher_install, - action=str(pretty_printer_test_launcher_install[0]), - TEST_PROGRAM=test_program, - target_scanner=scanner, - ) - env.Pseudo(run_test) - env.Alias("+" + os.path.splitext(os.path.basename(gdb_test_script))[0], run_test) - env.Depends( - pretty_printer_test_launcher_install, - ([] if env.get("GDB_PPTEST_PYONLY") else [test_program]) + [gen_test_script_install], - ) - - env.RegisterTest("$PRETTY_PRINTER_TEST_LIST", pretty_printer_test_launcher_install[0]) - env.Alias("$PRETTY_PRINTER_TEST_ALIAS", pretty_printer_test_launcher_install[0]) - env.Alias("+pretty-printer-tests", run_test) - return run_test - - -def generate(env): - global ninja_fake_testlist - if env.GetOption("ninja") != "disabled" and ninja_fake_testlist is None: - print_warning("Can't run pretty printer tests with ninja.") - ninja_fake_testlist = env.Command( - "$PRETTY_PRINTER_TEST_LIST", - __file__, - "type nul >>$TARGET" if sys.platform == "win32" else "touch $TARGET", - ) - else: - env.TestList("$PRETTY_PRINTER_TEST_LIST", source=[]) - - env.AddMethod(build_pretty_printer_test, "PrettyPrinterTest") - alias = env.Alias("$PRETTY_PRINTER_TEST_ALIAS", "$PRETTY_PRINTER_TEST_LIST") - env.Alias("+pretty-printer-tests", alias) diff --git a/site_scons/site_tools/mongo_test_execution.py b/site_scons/site_tools/mongo_test_execution.py deleted file mode 100644 index 9a5fd5fe51f..00000000000 --- a/site_scons/site_tools/mongo_test_execution.py +++ /dev/null @@ -1,213 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import os - -import auto_install_binaries -import SCons -from SCons.Node.Alias import default_ans - -_proof_scanner_cache_key = "proof_scanner_cache" -_associated_proof = "associated_proof_key" - - -def proof_generator_command_scanner_func(node, env, path): - results = getattr(node.attributes, _proof_scanner_cache_key, None) - if results is not None: - return results - results = env.GetTransitivelyInstalledFiles(node) - setattr(node.attributes, _proof_scanner_cache_key, results) - return results - - -proof_generator_command_scanner = SCons.Scanner.Scanner( - function=proof_generator_command_scanner_func, - path_function=None, - recursive=True, -) - - -def auto_prove_task(env, component, role): - entry = auto_install_binaries.get_alias_map_entry(env, component, role) - return [ - getattr(f.attributes, _associated_proof) - for f in entry.files - if hasattr(f.attributes, _associated_proof) - ] - - -def generate_test_execution_aliases(env, test): - installed = [test] - if env.get("AUTO_INSTALL_ENABLED", False) and env.GetAutoInstalledFiles(test): - installed = env.GetAutoInstalledFiles(test) - - target_name = os.path.basename(installed[0].path) - - test_env = env.Clone() - test_env["ENV"]["TMPDIR"] = test_env.Dir("$LOCAL_TMPDIR").abspath - target_command = test_env.Command( - target=f"#+{target_name}", - source=installed[0], - action="$( $ICERUN $) ${SOURCES[0]} $UNITTEST_FLAGS", - NINJA_POOL="console", - ) - env.Pseudo(target_command) - env.Alias("test-execution-aliases", target_command) - - try: - scons_node = env.File(os.path.join(os.getcwd(), str(test))) - root_path = scons_node.abspath.replace("\\", "/").replace( - env.Dir("#").abspath.replace("\\", "/") + "/", "" - ) - if root_path.startswith("src"): - root_path = env.Dir("$BUILD_DIR").path + root_path[3:] - root_path = root_path.replace("\\", "/") - sources_list = env["SCONS2BAZEL_TARGETS"].bazel_sources_file(root_path) - sources = [] - with open(os.path.join(env.Dir("#").abspath, sources_list)) as f: - for s in f.readlines(): - if s.strip().endswith(".cpp"): - sources.append(env.File(s.strip().replace("//", "#").replace(":", "/"))) - except KeyError: - sources = test.sources - - for source in sources: - source_base_name = os.path.basename(source.get_path()) - - # Strip suffix - dot_idx = source_base_name.rfind(".") - suffix = source_base_name[dot_idx:] - if suffix in env["TEST_EXECUTION_SUFFIX_DENYLIST"]: - continue - - source_name = source_base_name[:dot_idx] - - # We currently create two types of commands: legacy and verbose - # ex legacy command: cancelable_operation_context_test - # ex verbose command: db_unittest_test_cancelable_operation_context_test - # i.e. Verbose incorporates the name of the unittest binary, while - # legacy only has the source file name. - # We always create the verbose command, but we only create the legacy - # command if there isn't a conflict between the target_name and - # source_name. Legacy commands must be unique - verbose_source_command = test_env.Command( - target=f"#+{target_name}-{source_name}", - source=installed[0], - action="$( $ICERUN $) ${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS", - TEST_SOURCE_FILE_NAME=source_name, - NINJA_POOL="console", - ) - env.Pseudo(verbose_source_command) - env.Alias("test-execution-aliases", verbose_source_command) - - if target_name == source_name: - continue - - if default_ans.lookup(f"+{source_name}") is not None: - raise SCons.Errors.BuildError( - str(verbose_source_command[0]), - f"There exists multiple unittests with a source file named {source_name}: {source.abspath} and {env.Alias(f'+{source_name}')[0].children()[1].abspath}", - ) - env.Alias(f"+{source_name}", [verbose_source_command, source]) - - proof_generator_command = test_env.Command( - target=[ - "${SOURCE}.log", - "${SOURCE}.status", - ], - source=installed[0], - action=SCons.Action.Action("$PROOF_GENERATOR_COMMAND", "$PROOF_GENERATOR_COMSTR"), - source_scanner=proof_generator_command_scanner, - ) - - # We assume tests are provable by default, but some tests may not - # be. Such tests can be tagged with UNDECIDABLE_TEST=True. If a - # test isn't provable, we disable caching its results and require - # it to be always rebuilt. - if installed[0].env.get("UNDECIDABLE_TEST", False): - env.NoCache(proof_generator_command) - env.AlwaysBuild(proof_generator_command) - - proof_analyzer_command = test_env.Command( - target="${SOURCES[1].base}.proof", - source=proof_generator_command, - action=SCons.Action.Action("$PROOF_ANALYZER_COMMAND", "$PROOF_ANALYZER_COMSTR"), - ) - - proof_analyzer_alias = env.Alias( - f"prove-{target_name}", - proof_analyzer_command, - ) - - setattr(installed[0].attributes, _associated_proof, proof_analyzer_alias) - - # TODO: Should we enable proof at the file level? - - -def exists(env): - return True - - -def generate(env): - # Used for Ninja generator to collect the test execution aliases - env.Alias("test-execution-aliases") - env.AddMethod(generate_test_execution_aliases, "GenerateTestExecutionAliases") - - env["TEST_EXECUTION_SUFFIX_DENYLIST"] = env.get( - "TEST_EXECUTION_SUFFIX_DENYLIST", - [".in"], - ) - - env.AppendUnique( - AIB_TASKS={ - "prove": (auto_prove_task, False), - } - ) - - # TODO: Should we have some sort of prefix_xdir for the output location for these? Something like - # $PREFIX_VARCACHE and which in our build is pre-populated to $PREFIX/var/cache/mongo or similar? - - if env["PLATFORM"] == "win32": - env["PROOF_GENERATOR_COMMAND"] = ( - "$( $ICERUN $) ${SOURCES[0]} $UNITTEST_FLAGS > ${TARGETS[0]} 2>&1 & call echo %^errorlevel% > ${TARGETS[1]}" - ) - - # Keeping this here for later, but it only works if cmd.exe is - # launched with /V, and SCons doesn't do that. - # - # env["PROOF_ANALYZER_COMMAND"] = "set /p nextErrorLevel=<${SOURCES[1]} & if \"!nextErrorLevel!\"==\"0 \" (type nul > $TARGET) else (exit 1)" - # - # Instead, use grep! I mean findstr. - env["PROOF_ANALYZER_COMMAND"] = ( - "findstr /B /L 0 ${SOURCES[1]} && (type nul > $TARGET) || (exit 1)" - ) - else: - env["PROOF_GENERATOR_COMMAND"] = ( - "$( $ICERUN $) ${SOURCES[0]} $UNITTEST_FLAGS > ${TARGETS[0]} 2>&1 ; echo $? > ${TARGETS[1]}" - ) - env["PROOF_ANALYZER_COMMAND"] = ( - "if $$(exit $$(cat ${SOURCES[1]})) ; then touch $TARGET ; else cat ${SOURCES[0]}; exit 1 ; fi" - ) - - # TODO: Condition this on verbosity - env["PROOF_GENERATOR_COMSTR"] = "Running test ${SOURCES[0]}" - env["PROOF_ANALYZER_COMSTR"] = "Analyzing test results in ${SOURCES[1]}" diff --git a/site_scons/site_tools/mongo_test_list.py b/site_scons/site_tools/mongo_test_list.py deleted file mode 100644 index bc9e5d1de30..00000000000 --- a/site_scons/site_tools/mongo_test_list.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Pseudo-builders for building test lists for Resmoke""" - -from collections import defaultdict - -import SCons - -TEST_REGISTRY = defaultdict(list) - - -def register_test(env, file, test, generate_alias=True): - """Register test into the dictionary of tests for file_name""" - test_path = test - if env.get("AUTO_INSTALL_ENABLED", False) and env.GetAutoInstalledFiles(test): - test_path = env.GetAutoInstalledFiles(test)[0] - - if SCons.Util.is_String(file): - file = env.File(file) - - env.Depends(file, test_path) - file_name = file.path - TEST_REGISTRY[file_name].append(test_path) - if generate_alias: - env.GenerateTestExecutionAliases(test) - - -def test_list_builder_action(env, target, source): - """Build a test list used by resmoke.py to execute binary tests.""" - if SCons.Util.is_String(target[0]): - filename = env.subst(target[0]) - else: - filename = target[0].path - - source = [env.File(s).path if SCons.Util.is_String(s) else s.path for s in source] - - with open(filename, "w") as ofile: - tests = TEST_REGISTRY[filename] - if source: - tests.extend(source) - - for s in tests: - ofile.write("{}\n".format(str(s))) - - -TEST_LIST_BUILDER = SCons.Builder.Builder( - action=SCons.Action.FunctionAction( - test_list_builder_action, - {"cmdstr": "Generating $TARGETS"}, - ) -) - - -def exists(env): - return True - - -def generate(env): - env["MONGO_TEST_REGISTRY"] = TEST_REGISTRY - env.Append(BUILDERS={"TestList": TEST_LIST_BUILDER}) - env.AddMethod(register_test, "RegisterTest") diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py deleted file mode 100644 index a0c66f6b61b..00000000000 --- a/site_scons/site_tools/mongo_unittest.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Pseudo-builders for building and registering unit tests.""" - -import json -import os - -from buildscripts.unittest_grouper import find_group -from site_scons.mongo import insort_wrapper - - -def exists(env): - return True - - -TEST_GROUPS = [] - - -def build_cpp_unit_test(env, target, source, **kwargs): - if not isinstance(target, list): - target = [target] - - for t in target: - if not t.endswith("_test"): - env.ConfError(f"CppUnitTest target `{t}' does not end in `_test'") - - scons_node = env.File(os.path.join(os.getcwd(), str(target[0]))) - root_path = scons_node.abspath.replace("\\", "/").replace( - env.Dir("#").abspath.replace("\\", "/") + "/", "" - ) - if root_path.startswith(env.Dir("$BUILD_DIR").path.replace("\\", "/")): - root_path = "src" + root_path[len(env.Dir("$BUILD_DIR").path.replace("\\", "/")) :] - root_path = root_path.replace("\\", "/") - - test_group = list(json.loads(find_group([root_path])).keys())[0] - - if test_group not in TEST_GROUPS: - TEST_GROUPS.append(test_group) - env.TestList(f"$BUILD_ROOT/{test_group}_group_unittests.txt", source=[]) - env.Alias( - f"install-{test_group}_group_unittests", - f"$BUILD_ROOT/{test_group}_group_unittests.txt", - ) - - if not kwargs.get("UNITTEST_HAS_CUSTOM_MAINLINE", False): - libdeps = kwargs.get("LIBDEPS", env.get("LIBDEPS", [])).copy() - insort_wrapper(libdeps, "$BUILD_DIR/mongo/unittest/unittest_main") - kwargs["LIBDEPS"] = libdeps - - unit_test_components = {"tests", "unittests"} - primary_component = kwargs.get("AIB_COMPONENT", env.get("AIB_COMPONENT", "")) - if primary_component and not primary_component.endswith("-test"): - kwargs["AIB_COMPONENT"] = primary_component + "-test" - elif primary_component: - kwargs["AIB_COMPONENT"] = primary_component - else: - kwargs["AIB_COMPONENT"] = f"{test_group}_group_unittests" - - if "AIB_COMPONENTS_EXTRA" in kwargs: - kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( - unit_test_components - ) - else: - kwargs["AIB_COMPONENTS_EXTRA"] = list(unit_test_components) - - if "PROVE_ALIASES" in kwargs: - for alias in kwargs.get("PROVE_ALIASES"): - env.Alias(f"prove-{alias}", env.Alias(f"prove-{target[0]}")) - - if not source: - result = env.BazelProgram(target, source, **kwargs) - else: - print(f"sources included in SCons in {target}, please move the target definition to bazel!") - exit(-1) - - env.RegisterTest("$UNITTEST_LIST", result[0]) - env.Alias("$UNITTEST_ALIAS", result[0]) - - env.RegisterTest( - f"$BUILD_ROOT/{test_group}_group_unittests.txt", result[0], generate_alias=False - ) - install_file = env.GetAutoInstalledFiles(result[0]) - if install_file: - debug_file = getattr(install_file[0].attributes, "separate_debug_files") - env.Alias(f"install-{test_group}_group_unittests", env.GetAutoInstalledFiles(result[0])) - env.Alias(f"install-{test_group}_group_unittests-debug", debug_file) - return result - - -def generate(env): - env.TestList("$UNITTEST_LIST", source=[]) - env.AddMethod(build_cpp_unit_test, "CppUnitTest") - env.Alias("$UNITTEST_ALIAS", "$UNITTEST_LIST") diff --git a/site_scons/site_tools/mongo_workload_simulator.py b/site_scons/site_tools/mongo_workload_simulator.py deleted file mode 100644 index caccf49eae1..00000000000 --- a/site_scons/site_tools/mongo_workload_simulator.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2023 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Pseudo-builders for building workload simulators.""" - -from site_scons.mongo import insort_wrapper - - -def exists(env): - return True - - -def build_workload_simulator(env, target, source, **kwargs): - if not isinstance(target, list): - target = [target] - - for t in target: - if not t.endswith("_simulator"): - env.ConfError(f"WorkloadSimulator target `{t}' does not end in `_simulator'") - - libdeps = kwargs.get("LIBDEPS", env.get("LIBDEPS", [])).copy() - insort_wrapper(libdeps, "$BUILD_DIR/mongo/tools/workload_simulation/simulator_main") - kwargs["LIBDEPS"] = libdeps - - if not source: - result = env.BazelProgram(target, source, **kwargs) - else: - result = env.Program(target, source, **kwargs) - - return result - - -def generate(env): - env.AddMethod(build_workload_simulator, "WorkloadSimulator") diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py deleted file mode 100644 index d81fe79b3b0..00000000000 --- a/site_scons/site_tools/ninja.py +++ /dev/null @@ -1,1996 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Generate build.ninja files from SCons aliases.""" - -import importlib -import io -import os -import shlex -import shutil -import sys -import tempfile -import textwrap -from collections import OrderedDict -from glob import glob -from os.path import join as joinpath -from os.path import splitext - -import SCons -from SCons.Action import _string_from_cmd_list, get_default_ENV -from SCons.Script import COMMAND_LINE_TARGETS -from SCons.Util import flatten_sequence, is_List - -NINJA_STATE = None -NINJA_SYNTAX = "NINJA_SYNTAX" -NINJA_RULES = "__NINJA_CUSTOM_RULES" -NINJA_POOLS = "__NINJA_CUSTOM_POOLS" -NINJA_CUSTOM_HANDLERS = "__NINJA_CUSTOM_HANDLERS" -NINJA_BUILD = "NINJA_BUILD" -NINJA_WHEREIS_MEMO = {} -NINJA_STAT_MEMO = {} - -__NINJA_RULE_MAPPING = {} - -# These are the types that get_command can do something with -COMMAND_TYPES = ( - SCons.Action.CommandAction, - SCons.Action.CommandGeneratorAction, -) - - -def _install_action_function(_env, node): - """Install files using the install or copy commands""" - return { - "outputs": get_outputs(node), - "rule": "INSTALL", - "inputs": [get_path(src_file(s)) for s in node.sources], - "implicit": get_dependencies(node), - "variables": {"precious": node.precious}, - } - - -def _mkdir_action_function(env, node): - return { - "outputs": get_outputs(node), - "rule": "CMD", - # implicit explicitly omitted, we translate these so they can be - # used by anything that depends on these but commonly this is - # hit with a node that will depend on all of the fake - # srcnode's that SCons will never give us a rule for leading - # to an invalid ninja file. - "variables": { - # On Windows mkdir "-p" is always on - "cmd": "mkdir {args}".format( - args=" ".join(get_outputs(node)) + " & exit /b 0" - if env["PLATFORM"] == "win32" - else "-p " + " ".join(get_outputs(node)), - ), - "variables": {"precious": node.precious}, - }, - } - - -def _lib_symlink_action_function(_env, node): - """Create shared object symlinks if any need to be created""" - symlinks = getattr(getattr(node, "attributes", None), "shliblinks", None) - - if not symlinks or symlinks is None: - return None - - outputs = [link.get_dir().rel_path(linktgt) for link, linktgt in symlinks] - inputs = [link.get_path() for link, _ in symlinks] - - return { - "outputs": outputs, - "inputs": inputs, - "rule": "SYMLINK", - "implicit": get_dependencies(node), - "variables": {"precious": node.precious}, - } - - -def is_valid_dependent_node(node): - """ - Return True if node is not an alias or is an alias that has children - - This prevents us from making phony targets that depend on other - phony targets that will never have an associated ninja build - target. - - We also have to specify that it's an alias when doing the builder - check because some nodes (like src files) won't have builders but - are valid implicit dependencies. - """ - if isinstance(node, SCons.Node.Alias.Alias): - return node.children() - - if not node.env: - return True - - return not node.env.get("NINJA_SKIP") - - -def alias_to_ninja_build(node): - """Convert an Alias node into a Ninja phony target""" - return { - "outputs": get_outputs(node), - "rule": "phony", - "implicit": [get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)], - } - - -def get_order_only(node): - """Return a list of order only dependencies for node.""" - if node.prerequisites is None: - return [] - return [ - get_path(src_file(prereq)) - for prereq in node.prerequisites - if is_valid_dependent_node(prereq) - ] - - -def get_dependencies(node, skip_sources=False): - """Return a list of dependencies for node.""" - if skip_sources: - return [ - get_path(src_file(child)) - for child in node.children() - if child not in node.sources and is_valid_dependent_node(child) - ] - return [ - get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child) - ] - - -def get_inputs(node, skip_unknown_types=False): - """ - Collect the Ninja inputs for node. - - If the given node has inputs which can not be converted into something - Ninja can process, this will throw an exception. Optionally, those nodes - that are not processable can be skipped as inputs with the - skip_unknown_types keyword arg. - """ - executor = node.get_executor() - if executor is not None: - inputs = executor.get_all_sources() - else: - inputs = node.sources - - # Some Nodes (e.g. Python.Value Nodes) won't have files associated. We allow these to be - # optionally skipped to enable the case where we will re-invoke SCons for things - # like TEMPLATE. Otherwise, we have no direct way to express the behavior for such - # Nodes in Ninja, so we raise a hard error - ninja_nodes = [] - for input_node in inputs: - if isinstance(input_node, (SCons.Node.FS.Base, SCons.Node.Alias.Alias)): - ninja_nodes.append(input_node) - else: - if skip_unknown_types: - continue - raise Exception( - "Can't process {} node '{}' as an input for '{}'".format( - type(input_node), - str(input_node), - str(node), - ), - ) - - # convert node items into raw paths/aliases for ninja - return [get_path(src_file(o)) for o in ninja_nodes] - - -def get_outputs(node): - """Collect the Ninja outputs for node.""" - executor = node.get_executor() - if executor is not None: - outputs = executor.get_all_targets() - else: - if hasattr(node, "target_peers"): - outputs = node.target_peers - else: - outputs = [node] - - outputs = [get_path(o) for o in outputs] - - return outputs - - -def generate_depfile(env, node, dependencies): - """ - Ninja tool function for writing a depfile. The depfile should include - the node path followed by all the dependent files in a makefile format. - - dependencies arg can be a list or a subst generator which returns a list. - """ - - depfile = os.path.join(get_path(env["NINJA_BUILDDIR"]), str(node) + ".depfile") - - # subst_list will take in either a raw list or a subst callable which generates - # a list, and return a list of CmdStringHolders which can be converted into raw strings. - # If a raw list was passed in, then scons_list will make a list of lists from the original - # values and even subst items in the list if they are substitutable. Flatten will flatten - # the list in that case, to ensure for either input we have a list of CmdStringHolders. - deps_list = env.Flatten(env.subst_list(dependencies)) - - # Now that we have the deps in a list as CmdStringHolders, we can convert them into raw strings - # and make sure to escape the strings to handle spaces in paths. We also will sort the result - # keep the order of the list consistent. - escaped_depends = sorted([dep.escape(env.get("ESCAPE", lambda x: x)) for dep in deps_list]) - depfile_contents = str(node) + ": " + " ".join(escaped_depends) - - need_rewrite = False - try: - with open(depfile, "r") as f: - need_rewrite = f.read() != depfile_contents - except FileNotFoundError: - need_rewrite = True - - if need_rewrite: - os.makedirs(os.path.dirname(depfile) or ".", exist_ok=True) - with open(depfile, "w") as f: - f.write(depfile_contents) - - -def _extract_cmdstr_for_list_action(ninja_build_list): - cmdline = "" - for cmd in ninja_build_list: - # Occasionally a command line will expand to a - # whitespace only string (i.e. ' '). Which is not a - # valid command but does not trigger the empty command - # condition if not cmdstr. So here we trim the whitespace - # to make strings like the above become empty strings and - # so they will be skipped. - cmdstr = cmd["variables"]["cmd"].strip() - if not cmdstr: - continue - - # Skip duplicate commands - if cmdstr in cmdline: - continue - - if cmdline: - cmdline += " && " - - cmdline += cmdstr - - # Remove all preceding and proceeding whitespace - cmdline = cmdline.strip() - - return cmdline - - -class SConsToNinjaTranslator: - """Translates SCons Actions into Ninja build objects.""" - - def __init__(self, env): - self.env = env - self.func_handlers = { - # Skip conftest builders - "_createSource": ninja_noop, - # SCons has a custom FunctionAction that just makes sure the - # target isn't static. We let the commands that ninja runs do - # this check for us. - "SharedFlagChecker": ninja_noop, - # The install builder is implemented as a function action. - "installFunc": _install_action_function, - "MkdirFunc": _mkdir_action_function, - "LibSymlinksActionFunction": _lib_symlink_action_function, - } - - self.loaded_custom = False - - def action_to_ninja_build(self, node, action=None): - """Generate build arguments dictionary for node.""" - if not self.loaded_custom: - self.func_handlers.update(self.env[NINJA_CUSTOM_HANDLERS]) - self.loaded_custom = True - - if node.builder is None: - return None - - if action is None: - action = node.builder.action - - if node.env and node.env.get("NINJA_SKIP"): - return None - - build = {} - env = node.env if node.env else self.env - - # Ideally this should never happen, and we do try to filter - # Ninja builders out of being sources of ninja builders but I - # can't fix every DAG problem so we just skip ninja_builders - # if we find one - if node.builder == self.env["BUILDERS"]["Ninja"]: - build = None - elif isinstance(action, SCons.Action.FunctionAction): - build = self.handle_func_action(node, action) - elif isinstance(action, SCons.Action.LazyAction): - action = action._generate_cache(env) - build = self.action_to_ninja_build(node, action=action) - elif isinstance(action, SCons.Action.ListAction): - build = self.handle_list_action(node, action) - elif isinstance(action, COMMAND_TYPES): - build = get_command(env, node, action) - else: - raise Exception("Got an unbuildable ListAction for: {}".format(str(node))) - - if build is not None: - build["order_only"] = get_order_only(node) - - if "conftest" not in str(node): - node_callback = getattr(node.attributes, "ninja_build_callback", None) - if callable(node_callback): - node_callback(env, node, build) - - if build is not None and node.precious: - if not build.get("variables"): - build["variables"] = {} - build["variables"]["precious"] = node.precious - - return build - - def handle_func_action(self, node, action): - """Determine how to handle the function action.""" - name = action.function_name() - # This is the name given by the Subst/Textfile builders. So return the - # node to indicate that SCons is required. We skip sources here because - # dependencies don't really matter when we're going to shove these to - # the bottom of ninja's DAG anyway and Textfile builders can have text - # content as their source which doesn't work as an implicit dep in - # ninja. We suppress errors on input Nodes types that we cannot handle - # since we expect that the re-invocation of SCons will handle dependency - # tracking for those Nodes and their dependents. - if name == "_action": - return { - "rule": "TEMPLATE", - "outputs": get_outputs(node), - "inputs": get_inputs(node, skip_unknown_types=True), - "implicit": get_dependencies(node, skip_sources=True), - } - - handler = self.func_handlers.get(name, None) - if handler is not None: - return handler(node.env if node.env else self.env, node) - - raise Exception( - "Found unhandled function action {}, " - " generating scons command to build\n" - "Note: this is less efficient than Ninja," - " you can write your own ninja build generator for" - " this function using NinjaRegisterFunctionHandler".format(name) - ) - - def handle_list_action(self, node, action): - """TODO write this comment""" - results = [ - self.action_to_ninja_build(node, action=act) for act in action.list if act is not None - ] - results = [result for result in results if result is not None and result["outputs"]] - if not results: - return None - - # No need to process the results if we only got a single result - if len(results) == 1: - return results[0] - - all_outputs = list({output for build in results for output in build["outputs"]}) - dependencies = list({dep for build in results for dep in build["implicit"]}) - - if all([result["rule"] == "CMD" for result in results]): - cmdline = _extract_cmdstr_for_list_action(results) - - # Make sure we didn't generate an empty cmdline - if cmdline: - env = node.env if node.env else self.env - sources = [get_path(src_file(s)) for s in node.sources] - - ninja_build = { - "outputs": all_outputs, - "rule": "CMD", - "variables": { - "cmd": cmdline, - "env": get_command_env(env, all_outputs, sources), - }, - "implicit": dependencies, - } - - if node.env and node.env.get("NINJA_POOL", None) is not None: - ninja_build["pool"] = node.env["pool"] - - return ninja_build - - elif results[0]["rule"] == "LINK" and all( - [result["rule"] == "CMD" for result in results[1:]] - ): - cmdline = _extract_cmdstr_for_list_action(results[1:]) - - # Make sure we didn't generate an empty cmdline - if cmdline: - env = node.env if node.env else self.env - sources = [get_path(src_file(s)) for s in node.sources] - - ninja_build = results[0] - - ninja_build.update( - { - "outputs": all_outputs, - "rule": "LINK_CHAINED_CMD", - "implicit": dependencies, - } - ) - - ninja_build["variables"].update( - { - "cmd": cmdline, - "env": get_command_env(env, all_outputs, sources), - } - ) - - if node.env and node.env.get("NINJA_POOL", None) is not None: - ninja_build["pool"] = node.env["pool"] - - return ninja_build - - elif results[0]["rule"] == "phony": - return { - "outputs": all_outputs, - "rule": "phony", - "implicit": dependencies, - } - - raise Exception("Unhandled list action with rule: " + results[0]["rule"]) - - -class NinjaState: - """Maintains state of Ninja build system as it's translated from SCons.""" - - def __init__(self, env, ninja_syntax): - self.env = env - self.writer_class = ninja_syntax.Writer - self.__generated = False - self.translator = SConsToNinjaTranslator(env) - self.generated_suffixes = env.get("NINJA_GENERATED_SOURCE_SUFFIXES", []) - - # List of generated builds that will be written at a later stage - self.builds = dict() - - # List of targets for which we have generated a build. This - # allows us to take multiple Alias nodes as sources and to not - # fail to build if they have overlapping targets. - self.built = set() - - # SCons sets this variable to a function which knows how to do - # shell quoting on whatever platform it's run on. Here we use it - # to make the SCONS_INVOCATION variable properly quoted for things - # like CCFLAGS - scons_escape = env.get("ESCAPE", lambda x: x) - - self.variables = { - # The /b option here will make sure that windows updates the mtime - # when copying the file. This allows to not need to use restat for windows - # copy commands. - "COPY": "cmd.exe /c 1>NUL copy /b" if sys.platform == "win32" else "cp", - "NOOP": "cmd.exe /c 1>NUL echo 0" if sys.platform == "win32" else "echo 0 >/dev/null", - "SCONS_INVOCATION": "{} {} __NINJA_NO=1 $out".format( - sys.executable, - " ".join( - [ - ninja_syntax.escape(scons_escape(arg)) - for arg in sys.argv - if arg not in COMMAND_LINE_TARGETS - ] - ), - ), - "SCONS_INVOCATION_W_TARGETS": "{} {}".format( - sys.executable, - " ".join([ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv]), - ), - # This must be set to a global default per: - # https://ninja-build.org/manual.html - # - # (The deps section) - "msvc_deps_prefix": "Note: including file:", - } - - self.rules = { - "CMD": { - "command": "cmd.exe /c $env$cmd" if sys.platform == "win32" else "$env$cmd", - "description": "Built $out", - "pool": "local_pool", - }, - # We add the deps processing variables to this below. We - # don't pipe these through cmd.exe on Windows because we - # use this to generate a compile_commands.json database - # which can't use the shell command as it's compile - # command. - "CC": { - "command": "$env$CC @$out.rsp", - "description": "Compiled $out", - "rspfile": "$out.rsp", - "rspfile_content": "$rspc", - }, - "CXX": { - "command": "$env$CXX @$out.rsp", - "description": "Compiled $out", - "rspfile": "$out.rsp", - "rspfile_content": "$rspc", - }, - "COMPDB_CC": { - "command": "$CC $rspc", - "description": "Compiling $out", - "rspfile": "$out.rsp", - "rspfile_content": "$rspc", - }, - "COMPDB_CXX": { - "command": "$CXX $rspc", - "description": "Compiling $out", - "rspfile": "$out.rsp", - "rspfile_content": "$rspc", - }, - "LINK": { - "command": "$env$LINK @$out.rsp", - "description": "Linked $out", - "rspfile": "$out.rsp", - "rspfile_content": "$rspc", - "pool": "link_pool", - }, - "LINK_CHAINED_CMD": { - "command": "$env$LINK @$out.rsp && $cmd", - "description": "Linked $out", - "rspfile": "$out.rsp", - "rspfile_content": "$rspc", - "pool": "link_pool", - }, - "AR": { - "command": "$env$AR @$out.rsp", - "description": "Archived $out", - "rspfile": "$out.rsp", - "rspfile_content": "$rspc", - "pool": "local_pool", - }, - "SYMLINK": { - "command": ( - "cmd /c mklink $out $in" if sys.platform == "win32" else "ln -s $in $out" - ), - "description": "Symlinked $in -> $out", - }, - "NOOP": { - "command": "$NOOP", - "description": "Checked $out", - "pool": "local_pool", - }, - "BAZEL_BUILD_INDIRECTION": { - "command": "$NOOP", - "description": "Checking Bazel outputs...", - "pool": "local_pool", - "restat": 1, - }, - "INSTALL": { - "command": "$COPY $in $out", - "description": "Installed $out", - "pool": "install_pool", - }, - "TEMPLATE": { - "command": "$SCONS_INVOCATION $out", - "description": "Rendered $out", - "pool": "scons_pool", - "restat": 1, - }, - "SCONS": { - "command": "$SCONS_INVOCATION $out", - "description": "SCons $out", - "pool": "scons_pool", - # restat - # if present, causes Ninja to re-stat the command's outputs - # after execution of the command. Each output whose - # modification time the command did not change will be - # treated as though it had never needed to be built. This - # may cause the output's reverse dependencies to be removed - # from the list of pending build actions. - # - # We use restat any time we execute SCons because - # SCons calls in Ninja typically create multiple - # targets. But since SCons is doing it's own up to - # date-ness checks it may only update say one of - # them. Restat will find out which of the multiple - # build targets did actually change then only rebuild - # those targets which depend specifically on that - # output. - "restat": 1, - }, - "REGENERATE": { - "command": "$SCONS_INVOCATION_W_TARGETS", - "description": "Regenerated $self", - "depfile": os.path.join(get_path(env["NINJA_BUILDDIR"]), "$out.depfile"), - "generator": 1, - # Console pool restricts to 1 job running at a time, - # it additionally has some special handling about - # passing stdin, stdout, etc to process in this pool - # that we need for SCons to behave correctly when - # regenerating Ninja - "pool": "console", - # Again we restat in case Ninja thought the - # build.ninja should be regenerated but SCons knew - # better. - "restat": 1, - }, - } - - command = [ - f"{sys.executable}", - "site_scons/mongo/ninja_bazel_build.py", - f"--ninja-file={self.env.get('NINJA_PREFIX')}.{self.env.get('NINJA_SUFFIX')}", - ] - if self.env.get("VERBOSE"): - command += ["--verbose"] - if self.env.get("BAZEL_INTEGRATION_DEBUG"): - command += ["--integration-debug"] - - self.rules.update( - { - "RUN_BAZEL_BUILD": { - "command": " ".join(command), - "description": "Running bazel build", - "pool": "console", - "restat": 1, - } - } - ) - - num_jobs = self.env.get("NINJA_MAX_JOBS", self.env.GetOption("num_jobs")) - self.pools = { - "local_pool": num_jobs, - "install_pool": num_jobs / 2, - "scons_pool": 1, - } - - for rule in ["CC", "CXX"]: - if env["PLATFORM"] == "win32": - self.rules[rule]["deps"] = "msvc" - else: - self.rules[rule]["deps"] = "gcc" - self.rules[rule]["depfile"] = "$out.d" - - def add_build(self, node): - if not node.has_builder(): - return False - - if isinstance(node, SCons.Node.Alias.Alias): - build = alias_to_ninja_build(node) - else: - build = self.translator.action_to_ninja_build(node) - - # Some things are unbuild-able or need not be built in Ninja - if build is None: - return False - - node_string = str(node) - if node_string in self.builds: - raise Exception("Node {} added to ninja build state more than once".format(node_string)) - self.builds[node_string] = build - self.built.update(build["outputs"]) - return True - - def is_generated_source(self, output): - """Check if output ends with a known generated suffix.""" - _, suffix = splitext(output) - return suffix in self.generated_suffixes - - def has_generated_sources(self, output): - """ - Determine if output indicates this is a generated header file. - """ - for generated in output: - if self.is_generated_source(generated): - return True - return False - - def generate(self, ninja_file): - """ - Generate the build.ninja. - - This should only be called once for the lifetime of this object. - """ - if self.__generated: - return - - self.rules.update(self.env.get(NINJA_RULES, {})) - self.pools.update(self.env.get(NINJA_POOLS, {})) - - content = io.StringIO() - ninja = self.writer_class(content, width=100) - - ninja.comment("Generated by scons. DO NOT EDIT.") - - # This version is needed because it is easy to get from pip and it support compile_commands.json - ninja.variable("ninja_required_version", "1.10") - ninja.variable("builddir", get_path(self.env["NINJA_BUILDDIR"])) - ninja.variable("artifact_dir", self.env.Dir("$BUILD_DIR")) - - link_jobs = self.env.get("NINJA_LINK_JOBS", self.env.GetOption("num_jobs")) - self.pools.update({"link_pool": link_jobs}) - - for pool_name, size in self.pools.items(): - ninja.pool(pool_name, min(self.env.get("NINJA_MAX_JOBS", size), size)) - - for var, val in self.variables.items(): - ninja.variable(var, val) - - # This is the command that is used to clean a target before building it, - # excluding precious targets. - if sys.platform == "win32": - rm_cmd = "cmd.exe /c del /q $rm_outs >nul 2>&1 &" - else: - rm_cmd = "rm -f $rm_outs;" - - precious_rule_suffix = "_PRECIOUS" - - # Make two sets of rules to honor scons Precious setting. The build nodes themselves - # will then reselect their rule according to the precious being set for that node. - precious_rules = {} - for rule, kwargs in self.rules.items(): - if self.env.get("NINJA_MAX_JOBS") is not None and "pool" not in kwargs: - kwargs["pool"] = "local_pool" - # Do not worry about precious for commands that don't have targets (phony) - # or that will callback to scons (which maintains its own precious). - if rule not in ["phony", "TEMPLATE", "REGENERATE", "COMPDB_CC", "COMPDB_CXX"]: - precious_rule = rule + precious_rule_suffix - precious_rules[precious_rule] = kwargs.copy() - ninja.rule(precious_rule, **precious_rules[precious_rule]) - - kwargs["command"] = f"{rm_cmd} " + kwargs["command"] - ninja.rule(rule, **kwargs) - else: - ninja.rule(rule, **kwargs) - self.rules.update(precious_rules) - - # If the user supplied an alias to determine generated sources, use that, otherwise - # determine what the generated sources are dynamically. - generated_sources_alias = self.env.get("NINJA_GENERATED_SOURCE_ALIAS_NAME") - generated_sources_build = None - - if generated_sources_alias: - generated_sources_build = self.builds.get(generated_sources_alias) - if generated_sources_build is None or generated_sources_build["rule"] != "phony": - raise Exception( - "ERROR: 'NINJA_GENERATED_SOURCE_ALIAS_NAME' set, but no matching Alias object found." - ) - - if generated_sources_alias and generated_sources_build: - generated_source_files = sorted( - [] if not generated_sources_build else generated_sources_build["implicit"] - ) - - def check_generated_source_deps(build): - return build != generated_sources_build and set(build["outputs"]).isdisjoint( - generated_source_files - ) - else: - generated_sources_build = None - generated_source_files = sorted( - { - output - # First find builds which have header files in their outputs. - for build in self.builds.values() - if self.has_generated_sources(build["outputs"]) - for output in build["outputs"] - # Collect only the header files from the builds with them - # in their output. We do this because is_generated_source - # returns True if it finds a header in any of the outputs, - # here we need to filter so we only have the headers and - # not the other outputs. - if self.is_generated_source(output) - } - ) - - if generated_source_files: - generated_sources_alias = "_ninja_generated_sources" - ninja_sorted_build( - ninja, - outputs=generated_sources_alias, - rule="phony", - implicit=generated_source_files, - ) - - def check_generated_source_deps(build): - return ( - not build["rule"] == "INSTALL" - and set(build["outputs"]).isdisjoint(generated_source_files) - and set(build.get("implicit", [])).isdisjoint(generated_source_files) - ) - - template_builders = [] - - self.builds["compiledb"] = { - "rule": "phony", - "outputs": ["compiledb"], - "implicit": ["compile_commands.json"], - } - - # Now for all build nodes, we want to select the precious rule or not. - # If it's not precious, we need to save all the outputs into a variable - # on that node. Later we will be removing outputs and switching them to - # phonies so that we can generate response and depfiles correctly. - for build, kwargs in self.builds.items(): - if kwargs.get("variables") and kwargs["variables"].get("precious"): - kwargs["rule"] = kwargs["rule"] + precious_rule_suffix - elif kwargs["rule"] not in ["phony", "TEMPLATE", "REGENERATE"]: - if not kwargs.get("variables"): - kwargs["variables"] = {} - kwargs["variables"]["rm_outs"] = kwargs["outputs"].copy() - - for build in [self.builds[key] for key in sorted(self.builds.keys())]: - if build["rule"] == "TEMPLATE": - template_builders.append(build) - continue - - if "order_only" not in build: - build["order_only"] = ["bazel_run_first"] - else: - build["order_only"].append("bazel_run_first") - - if "implicit" in build: - build["implicit"].sort() - - # Don't make generated sources depend on each other. We - # have to check that none of the outputs are generated - # sources and none of the direct implicit dependencies are - # generated sources or else we will create a dependency - # cycle. - if generated_source_files and check_generated_source_deps(build): - depends_on_gen_source = build["rule"] != "INSTALL" - if build["outputs"]: - if ( - self.env.Entry(build["outputs"][0]) - .get_build_env() - .get("NINJA_GENSOURCE_INDEPENDENT") - ): - depends_on_gen_source = False - - if depends_on_gen_source: - # Make all non-generated source targets depend on - # _generated_sources. We use order_only for generated - # sources so that we don't rebuild the world if one - # generated source was rebuilt. We just need to make - # sure that all of these sources are generated before - # other builds. - order_only = build.get("order_only", []) - order_only.append(generated_sources_alias) - build["order_only"] = order_only - if "order_only" in build: - build["order_only"].sort() - - # When using a depfile Ninja can only have a single output - # but SCons will usually have emitted an output for every - # thing a command will create because it's caching is much - # more complex than Ninja's. This includes things like DWO - # files. Here we make sure that Ninja only ever sees one - # target when using a depfile. It will still have a command - # that will create all of the outputs but most targets don't - # depend direclty on DWO files and so this assumption is safe - # to make. - rule = self.rules.get(build["rule"]) - - # Some rules like 'phony' and other builtins we don't have - # listed in self.rules so verify that we got a result - # before trying to check if it has a deps key. - # - # Anything using deps or rspfile in Ninja can only have a single - # output, but we may have a build which actually produces - # multiple outputs which other targets can depend on. Here we - # slice up the outputs so we have a single output which we will - # use for the "real" builder and multiple phony targets that - # match the file names of the remaining outputs. This way any - # build can depend on any output from any build. - # - # We assume that the first listed output is the 'key' - # output and is stably presented to us by SCons. For - # instance if -gsplit-dwarf is in play and we are - # producing foo.o and foo.dwo, we expect that outputs[0] - # from SCons will be the foo.o file and not the dwo - # file. If instead we just sorted the whole outputs array, - # we would find that the dwo file becomes the - # first_output, and this breaks, for instance, header - # dependency scanning. - if rule is not None and (rule.get("deps") or rule.get("rspfile")): - first_output, remaining_outputs = ( - build["outputs"][0], - build["outputs"][1:], - ) - - if remaining_outputs: - ninja_sorted_build( - ninja, - outputs=sorted(remaining_outputs), - rule="phony", - implicit=first_output, - ) - - build["outputs"] = first_output - - # Optionally a rule can specify a depfile, and SCons can generate implicit - # dependencies into the depfile. This allows for dependencies to come and go - # without invalidating the ninja file. The depfile was created in ninja specifically - # for dealing with header files appearing and disappearing across rebuilds, but it can - # be repurposed for anything, as long as you have a way to regenerate the depfile. - # More specific info can be found here: https://ninja-build.org/manual.html#_depfile - if rule is not None and rule.get("depfile") and build.get("deps_files"): - path = ( - build["outputs"] if SCons.Util.is_List(build["outputs"]) else [build["outputs"]] - ) - generate_depfile(self.env, path[0], build.pop("deps_files", [])) - - if "inputs" in build: - build["inputs"].sort() - - ninja_sorted_build(ninja, **build) - - for build, kwargs in self.builds.items(): - if kwargs["rule"] in [ - "CC", - f"CC{precious_rule_suffix}", - "CXX", - f"CXX{precious_rule_suffix}", - ]: - rule = ( - kwargs["rule"].replace(precious_rule_suffix) - if precious_rule_suffix in kwargs["rule"] - else kwargs["rule"] - ) - - compdb_build = kwargs.copy() - - # the tool list is stored in the rule variable, so remove any wrappers we find. - for wrapper in compdb_build["variables"]["_COMPILATIONDB_IGNORE_WRAPPERS"]: - if wrapper in compdb_build["variables"][rule]: - compdb_build["variables"][rule].remove(wrapper) - - rule = "COMPDB_" + rule - - compdb_build["rule"] = rule - compdb_build["outputs"] = [kwargs["outputs"] + ".compdb"] - ninja.build(**compdb_build) - - template_builds = {"rule": "TEMPLATE"} - for template_builder in template_builders: - # Special handling for outputs and implicit since we need to - # aggregate not replace for each builder. - for agg_key in ["outputs", "implicit", "inputs"]: - new_val = template_builds.get(agg_key, []) - - # Use pop so the key is removed and so the update - # below will not overwrite our aggregated values. - cur_val = template_builder.pop(agg_key, []) - if is_List(cur_val): - new_val += cur_val - else: - new_val.append(cur_val) - template_builds[agg_key] = new_val - - if template_builds.get("outputs", []): - ninja_sorted_build(ninja, **template_builds) - - # We have to glob the SCons files here to teach the ninja file - # how to regenerate itself. We'll never see ourselves in the - # DAG walk so we can't rely on action_to_ninja_build to - # generate this rule even though SCons should know we're - # dependent on SCons files. - # - # The REGENERATE rule uses depfile, so we need to generate the depfile - # in case any of the SConscripts have changed. The depfile needs to be - # path with in the build and the passed ninja file is an abspath, so - # we will use SCons to give us the path within the build. Normally - # generate_depfile should not be called like this, but instead be called - # through the use of custom rules, and filtered out in the normal - # list of build generation about. However, because the generate rule - # is hardcoded here, we need to do this generate_depfile call manually. - ninja_file_path = self.env.File(ninja_file).path - ninja_in_file_path = ( - os.path.join(get_path(self.env["NINJA_BUILDDIR"]), os.path.basename(ninja_file)) + ".in" - ) - generate_depfile( - self.env, - ninja_in_file_path, - self.env["NINJA_REGENERATE_DEPS"], - ) - - ninja_sorted_build( - ninja, - outputs=ninja_in_file_path, - rule="REGENERATE", - variables={ - "self": ninja_file_path, - }, - ) - - ninja_sorted_build( - ninja, - outputs=["bazel_run_first_internal"], - inputs=[], - rule="RUN_BAZEL_BUILD", - ) - - ninja_sorted_build( - ninja, - outputs=self.env["NINJA_BAZEL_OUTPUTS"] + ["bazel_run_first"], - inputs=[], - implicit=["bazel_run_first_internal"], - rule="BAZEL_BUILD_INDIRECTION", - ) - - # This sets up a dependency edge between build.ninja.in and build.ninja - # without actually taking any action to transform one into the other - # because we write both files ourselves later. - ninja_sorted_build( - ninja, - outputs=ninja_file_path, - rule="NOOP", - inputs=[ninja_in_file_path], - implicit=[__file__], - ) - - # Look in SCons's list of DEFAULT_TARGETS, find the ones that - # we generated a ninja build rule for. - scons_default_targets = [ - get_path(tgt) for tgt in SCons.Script.DEFAULT_TARGETS if get_path(tgt) in self.built - ] - - # If we found an overlap between SCons's list of default - # targets and the targets we created ninja builds for then use - # those as ninja's default as well. - if scons_default_targets: - ninja.default(" ".join(scons_default_targets)) - - with tempfile.NamedTemporaryFile(delete=False, mode="w") as temp_ninja_file: - temp_ninja_file.write(content.getvalue()) - shutil.move(temp_ninja_file.name, ninja_file) - shutil.copy2(ninja_file, ninja_in_file_path) - - self.__generated = True - - -def get_path(node): - """ - Return a fake path if necessary. - - As an example Aliases use this as their target name in Ninja. - """ - if hasattr(node, "get_path"): - return node.get_path() - return str(node) - - -def rfile(node): - """ - Return the repository file for node if it has one. Otherwise return node - """ - if hasattr(node, "rfile"): - return node.rfile() - return node - - -def src_file(node): - """Returns the src code file if it exists.""" - if hasattr(node, "srcnode"): - src = node.srcnode() - if src.stat() is not None: - return src - return get_path(node) - - -def get_comstr(env, action, targets, sources): - """Get the un-substituted string for action.""" - # Despite being having "list" in it's name this member is not - # actually a list. It's the pre-subst'd string of the command. We - # use it to determine if the command we're about to generate needs - # to use a custom Ninja rule. By default this redirects CC, CXX, - # AR, SHLINK, and LINK commands to their respective rules but the - # user can inject custom Ninja rules and tie them to commands by - # using their pre-subst'd string. - if hasattr(action, "process"): - return action.cmd_list - - return action.genstring(targets, sources, env) - - -def ninja_recursive_sorted_dict(build): - sorted_dict = OrderedDict() - for key, val in sorted(build.items()): - if isinstance(val, dict): - sorted_dict[key] = ninja_recursive_sorted_dict(val) - elif isinstance(val, list) and key in ( - "inputs", - "outputs", - "implicit", - "order_only", - "implicit_outputs", - ): - sorted_dict[key] = sorted(val) - else: - sorted_dict[key] = val - return sorted_dict - - -def ninja_sorted_build(ninja, **build): - sorted_dict = ninja_recursive_sorted_dict(build) - ninja.build(**sorted_dict) - - -def get_command_env(env, target, source): - """ - Return a string that sets the environment for any environment variables that - differ between the OS environment and the SCons command ENV. - - It will be compatible with the default shell of the operating system. - """ - try: - return env["NINJA_ENV_VAR_CACHE"] - except KeyError: - pass - - # Scan the ENV looking for any keys which do not exist in - # os.environ or differ from it. We assume if it's a new or - # differing key from the process environment then it's - # important to pass down to commands in the Ninja file. - ENV = env.get("SHELL_ENV_GENERATOR", get_default_ENV)(env, target, source) - scons_specified_env = { - key: value - for key, value in ENV.items() - if key not in os.environ or os.environ.get(key, None) != value - } - - windows = env["PLATFORM"] == "win32" - command_env = "" - for key, value in sorted(scons_specified_env.items()): - # Ensure that the ENV values are all strings: - if is_List(value): - # If the value is a list, then we assume it is a - # path list, because that's a pretty common list-like - # value to stick in an environment variable: - value = flatten_sequence(value) - value = joinpath(map(str, value)) - else: - # If it isn't a string or a list, then we just coerce - # it to a string, which is the proper way to handle - # Dir and File instances and will produce something - # reasonable for just about everything else: - value = str(value) - - if windows: - command_env += "set '{}={}' && ".format(key, value) - else: - # We address here *only* the specific case that a user might have - # an environment variable which somehow gets included and has - # spaces in the value. These are escapes that Ninja handles. This - # doesn't make builds on paths with spaces (Ninja and SCons issues) - # nor expanding response file paths with spaces (Ninja issue) work. - value = value.replace(r" ", r"$ ") - command_env += "export {}='{}';".format( - key, env.subst(value, target=target, source=source) - ) - - env["NINJA_ENV_VAR_CACHE"] = command_env - return command_env - - -def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom_env=None): - """Generate a response file command provider for rule name.""" - - if custom_env is None: - custom_env = {} - - # If win32 using the environment with a response file command will cause - # ninja to fail to create the response file. Additionally since these rules - # generally are not piping through cmd.exe /c any environment variables will - # make CreateProcess fail to start. - # - # On POSIX we can still set environment variables even for compile - # commands so we do so. - use_command_env = not env["PLATFORM"] == "win32" - if "$" in tool: - tool_is_dynamic = True - - def get_response_file_command(env, node, action, targets, sources, executor=None): - if hasattr(action, "process"): - cmd_list, _, _ = action.process(targets, sources, env, executor=executor) - cmd_list = [str(c).replace("$", "$$") for c in cmd_list[0]] - else: - command = generate_command(env, node, action, targets, sources, executor=executor) - cmd_list = shlex.split(command) - - if tool_is_dynamic: - tool_command = env.subst(tool, target=targets, source=sources, executor=executor) - else: - tool_command = tool - - try: - # Add 1 so we always keep the actual tool inside of cmd - tool_idx = cmd_list.index(tool_command) + 1 - except ValueError: - raise Exception( - "Could not find tool {}({}) in {} generated from {}".format( - tool, tool_command, cmd_list, get_comstr(env, action, targets, sources) - ) - ) - - cmd, rsp_content = cmd_list[:tool_idx], cmd_list[tool_idx:] - rsp_content = " ".join(rsp_content) - - variables = {"rspc": rsp_content} - variables[rule] = cmd - - if rule == "CC" or rule == "CXX": - # resolve and store any wrappers we want to remove later when we - # are constructing the compdb entries for the compiles. - wrappers = [ - env.subst(wrapper, target=targets, source=sources) - for wrapper in env.get("_COMPILATIONDB_IGNORE_WRAPPERS", []) - ] - variables["_COMPILATIONDB_IGNORE_WRAPPERS"] = wrappers - - if use_command_env: - variables["env"] = get_command_env(env, targets, sources) - - for key, value in custom_env.items(): - variables["env"] += ( - env.subst( - f"export {key}={value};", - target=targets, - source=sources, - executor=executor, - ) - + " " - ) - return rule, variables, [tool_command] - - return get_response_file_command - - -def generate_command(env, node, action, targets, sources, executor=None): - # Actions like CommandAction have a method called process that is - # used by SCons to generate the cmd_line they need to run. So - # check if it's a thing like CommandAction and call it if we can. - if hasattr(action, "process"): - cmd_list, _, _ = action.process(targets, sources, env, executor=executor) - cmd = _string_from_cmd_list(cmd_list[0]) - else: - # Anything else works with genstring, this is most commonly hit by - # ListActions which essentially call process on all of their - # commands and concatenate it for us. - genstring = action.genstring(targets, sources, env) - if executor is not None: - cmd = env.subst(genstring, executor=executor) - else: - cmd = env.subst(genstring, targets, sources) - - cmd = cmd.replace("\n", " && ").strip() - if cmd.endswith("&&"): - cmd = cmd[0:-2].strip() - - # Escape dollars as necessary - return cmd.replace("$", "$$") - - -def get_generic_shell_command(env, node, action, targets, sources, executor=None): - if env.get("NINJA_TEMPLATE"): - rule = "TEMPLATE" - else: - rule = "CMD" - - return ( - rule, - { - "cmd": generate_command(env, node, action, targets, sources, executor=None), - "env": get_command_env(env, targets, sources), - }, - # Since this function is a rule mapping provider, it must return a list of dependencies, - # and usually this would be the path to a tool, such as a compiler, used for this rule. - # However this function is to generic to be able to reliably extract such deps - # from the command, so we return a placeholder empty list. It should be noted that - # generally this function will not be used soley and is more like a template to generate - # the basics for a custom provider which may have more specific options for a provier - # function for a custom NinjaRuleMapping. - [], - ) - - -def get_command(env, node, action): - """Get the command to execute for node.""" - if node.env: - sub_env = node.env - else: - sub_env = env - - executor = node.get_executor() - if executor is not None: - tlist = executor.get_all_targets() - slist = executor.get_all_sources() - else: - if hasattr(node, "target_peers"): - tlist = node.target_peers - else: - tlist = [node] - slist = node.sources - - # Retrieve the repository file for all sources - slist = [rfile(s) for s in slist] - - # Generate a real CommandAction - if isinstance(action, SCons.Action.CommandGeneratorAction): - action = action._generate(tlist, slist, sub_env, 0, executor=executor) - - variables = {} - - comstr = get_comstr(sub_env, action, tlist, slist) - if not comstr: - return None - - provider = __NINJA_RULE_MAPPING.get(comstr, get_generic_shell_command) - rule, variables, provider_deps = provider( - sub_env, - node, - action, - tlist, - slist, - executor=executor, - ) - - # Get the dependencies for all targets - implicit = list({dep for tgt in tlist for dep in get_dependencies(tgt)}) - - # Now add in the other dependencies related to the command, - # e.g. the compiler binary. The ninja rule can be user provided so - # we must do some validation to resolve the dependency path for ninja. - for provider_dep in provider_deps: - provider_dep = sub_env.subst(provider_dep) - if not provider_dep: - continue - - # If the tool is a node, then SCons will resolve the path later, if its not - # a node then we assume it generated from build and make sure it is existing. - if isinstance(provider_dep, SCons.Node.Node) or os.path.exists(provider_dep): - implicit.append(provider_dep) - continue - - # in some case the tool could be in the local directory and be suppled without the ext - # such as in windows, so append the executable suffix and check. - prog_suffix = sub_env.get("PROGSUFFIX", "") - provider_dep_ext = ( - provider_dep if provider_dep.endswith(prog_suffix) else provider_dep + prog_suffix - ) - if os.path.exists(provider_dep_ext): - implicit.append(provider_dep_ext) - continue - - # Many commands will assume the binary is in the path, so - # we accept this as a possible input from a given command. - - provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs( - provider_dep, path=os.environ["PATH"] - ) - if provider_dep_abspath: - implicit.append(provider_dep_abspath) - continue - - # Possibly these could be ignore and the build would still work, however it may not always - # rebuild correctly, so we hard stop, and force the user to fix the issue with the provided - # ninja rule. - err_msg = f"Could not resolve path for '{provider_dep}' dependency on node '{node}', you may need to setup your shell environment for ninja builds." - if os.name == "nt": - err_msg += " On Windows, please ensure that you have run the necessary Visual Studio environment setup scripts (e.g. vcvarsall.bat ..., or launching a Visual Studio Command Prompt) before invoking SCons." - raise Exception(err_msg) - - bazel_deps = getattr(node.attributes, "bazel_libdeps", []) - ninja_build = { - "order_only": get_order_only(node), - "outputs": get_outputs(node), - "inputs": get_inputs(node), - "implicit": implicit + bazel_deps, - "rule": rule, - "variables": variables, - } - - # Don't use sub_env here because we require that NINJA_POOL be set - # on a per-builder call basis to prevent accidental strange - # behavior like env['NINJA_POOL'] = 'console' and sub_env can be - # the global Environment object if node.env is None. - # Example: - # - # Allowed: - # - # env.Command("ls", NINJA_POOL="ls_pool") - # - # Not allowed and ignored: - # - # env["NINJA_POOL"] = "ls_pool" - # env.Command("ls") - # - if node.env and node.env.get("NINJA_POOL", None) is not None: - ninja_build["pool"] = node.env["NINJA_POOL"] - - return ninja_build - - -def ninja_builder(env, target, source): - """Generate a build.ninja for source.""" - if not isinstance(source, list): - source = [source] - if not isinstance(target, list): - target = [target] - - # We have no COMSTR equivalent so print that we're generating - # here. - print("Generating:", str(target[0])) - - generated_build_ninja = target[0].get_abspath() - NINJA_STATE.generate(generated_build_ninja) - - return 0 - - -class AlwaysExecAction(SCons.Action.FunctionAction): - """Override FunctionAction.__call__ to always execute.""" - - def __call__(self, *args, **kwargs): - kwargs["execute"] = 1 - return super().__call__(*args, **kwargs) - - -def register_custom_handler(env, name, handler): - """Register a custom handler for SCons function actions.""" - env[NINJA_CUSTOM_HANDLERS][name] = handler - - -def register_custom_rule_mapping(env, pre_subst_string, rule): - """Register a function to call for a given rule.""" - global __NINJA_RULE_MAPPING - __NINJA_RULE_MAPPING[pre_subst_string] = rule - - -def register_custom_rule( - env, - rule, - command, - description="", - deps=None, - pool=None, - use_depfile=False, - depfile=None, - use_response_file=False, - response_file_content="$rspc", - restat=False, -): - """Allows specification of Ninja rules from inside SCons files.""" - rule_obj = { - "command": command, - "description": description if description else "{} $out".format(rule), - } - - if use_depfile: - if depfile: - rule_obj["depfile"] = depfile - else: - rule_obj["depfile"] = os.path.join(get_path(env["NINJA_BUILDDIR"]), "$out.depfile") - - if deps is not None: - rule_obj["deps"] = deps - - if pool is not None: - rule_obj["pool"] = pool - - if use_response_file: - rule_obj["rspfile"] = "$out.rsp" - if rule_obj["rspfile"] not in command: - raise Exception( - f'Bad Ninja Custom Rule: response file requested, but {rule_obj["rspfile"]} not in in command: {command}' - ) - rule_obj["rspfile_content"] = response_file_content - - if restat: - rule_obj["restat"] = 1 - - env[NINJA_RULES][rule] = rule_obj - - -def register_custom_pool(env, pool, size): - """Allows the creation of custom Ninja pools""" - env[NINJA_POOLS][pool] = size - - -def set_build_node_callback(env, node, callback): - if "conftest" not in str(node): - setattr(node.attributes, "ninja_build_callback", callback) - - -def ninja_csig(original): - """Return a dummy csig""" - - def wrapper(self): - name = str(self) - if "SConscript" in name or "SConstruct" in name: - return original(self) - return "dummy_ninja_csig" - - return wrapper - - -def ninja_contents(original): - """Return a dummy content without doing IO""" - - def wrapper(self): - name = str(self) - if "SConscript" in name or "SConstruct" in name: - return original(self) - return bytes("dummy_ninja_contents", encoding="utf-8") - - return wrapper - - -def CheckNinjaCompdbExpand(env, context): - """Configure check testing if ninja's compdb can expand response files""" - - context.Message("Checking if ninja compdb can expand response files... ") - ret, output = context.TryAction( - action="ninja -f $SOURCE -t compdb -x CMD_RSP > $TARGET", - extension=".ninja", - text=textwrap.dedent(""" - rule CMD_RSP - command = $cmd @$out.rsp > fake_output.txt - description = Built $out - rspfile = $out.rsp - rspfile_content = $rspc - build fake_output.txt: CMD_RSP fake_input.txt - cmd = echo - pool = console - rspc = "test" - """), - ) - result = "@fake_output.txt.rsp" not in output - context.Result(result) - return result - - -def ninja_stat(_self, path): - """ - Eternally memoized stat call. - - SCons is very aggressive about clearing out cached values. For our - purposes everything should only ever call stat once since we're - running in a no_exec build the file system state should not - change. For these reasons we patch SCons.Node.FS.LocalFS.stat to - use our eternal memoized dictionary. - """ - global NINJA_STAT_MEMO - - try: - return NINJA_STAT_MEMO[path] - except KeyError: - try: - result = os.stat(path) - except os.error: - result = None - - NINJA_STAT_MEMO[path] = result - return result - - -def ninja_noop(*_args, **_kwargs): - """ - A general purpose no-op function. - - There are many things that happen in SCons that we don't need and - also don't return anything. We use this to disable those functions - instead of creating multiple definitions of the same thing. - """ - return None - - -def ninja_whereis(thing, *_args, **_kwargs): - """Replace env.WhereIs with a much faster version""" - global NINJA_WHEREIS_MEMO - - # Optimize for success, this gets called significantly more often - # when the value is already memoized than when it's not. - try: - return NINJA_WHEREIS_MEMO[thing] - except KeyError: - # We do not honor any env['ENV'] or env[*] variables in the - # generated ninja ile. Ninja passes your raw shell environment - # down to it's subprocess so the only sane option is to do the - # same during generation. At some point, if and when we try to - # upstream this, I'm sure a sticking point will be respecting - # env['ENV'] variables and such but it's actually quite - # complicated. I have a naive version but making it always work - # with shell quoting is nigh impossible. So I've decided to - # cross that bridge when it's absolutely required. - path = shutil.which(thing) - NINJA_WHEREIS_MEMO[thing] = path - return path - - -def ninja_always_serial(self, num, taskmaster): - """Replacement for SCons.Job.Jobs constructor which always uses the Serial Job class.""" - # We still set self.num_jobs to num even though it's a lie. The - # only consumer of this attribute is the Parallel Job class AND - # the Main.py function which instantiates a Jobs class. It checks - # if Jobs.num_jobs is equal to options.num_jobs, so if the user - # provides -j12 but we set self.num_jobs = 1 they get an incorrect - # warning about this version of Python not supporting parallel - # builds. So here we lie so the Main.py will not give a false - # warning to users. - self.num_jobs = num - self.job = SCons.Job.Serial(taskmaster) - - -def ninja_print_conf_log(s, target, source, env): - """Command line print only for conftest to generate a correct conf log.""" - if target and "conftest" in str(target[0]): - action = SCons.Action._ActionAction() - action.print_cmd_line(s, target, source, env) - - -class NinjaNoResponseFiles(SCons.Platform.TempFileMunge): - """Overwrite the __call__ method of SCons' TempFileMunge to not delete.""" - - def __call__(self, target, source, env, for_signature): - return self.cmd - - def _print_cmd_str(*_args, **_kwargs): - """Disable this method""" - pass - - -def exists(env): - """Enable if called.""" - - # This variable disables the tool when storing the SCons command in the - # generated ninja file to ensure that the ninja tool is not loaded when - # SCons should do actual work as a subprocess of a ninja build. The ninja - # tool is very invasive into the internals of SCons and so should never be - # enabled when SCons needs to build a target. - if env.get("__NINJA_NO", "0") == "1": - return False - - return True - - -def generate(env): - """Generate the NINJA builders.""" - env[NINJA_SYNTAX] = env.get(NINJA_SYNTAX, "ninja_syntax.py") - - # Add the Ninja builder. - always_exec_ninja_action = AlwaysExecAction(ninja_builder, {}) - ninja_builder_obj = SCons.Builder.Builder(action=always_exec_ninja_action) - env.Append(BUILDERS={"Ninja": ninja_builder_obj}) - - env["NINJA_PREFIX"] = env.get("NINJA_PREFIX", "build") - env["NINJA_SUFFIX"] = env.get("NINJA_SUFFIX", "ninja") - env["NINJA_ALIAS_NAME"] = env.get("NINJA_ALIAS_NAME", "generate-ninja") - env["NINJA_BUILDDIR"] = env.get("NINJA_BUILDDIR", env.Dir(".ninja").path) - ninja_file_name = env.subst("${NINJA_PREFIX}.${NINJA_SUFFIX}") - ninja_file = env.Ninja(target=ninja_file_name, source=[]) - env.AlwaysBuild(ninja_file) - - # TODO: API for getting the SConscripts programmatically - # exists upstream: https://github.com/SCons/scons/issues/3625 - def ninja_generate_deps(env): - return sorted([env.File("#SConstruct").path] + glob("**/SConscript", recursive=True)) - - env["_NINJA_REGENERATE_DEPS_FUNC"] = ninja_generate_deps - - env["NINJA_REGENERATE_DEPS"] = env.get( - "NINJA_REGENERATE_DEPS", - "${_NINJA_REGENERATE_DEPS_FUNC(__env__)}", - ) - - # This adds the required flags such that the generated compile - # commands will create depfiles as appropriate in the Ninja file. - if env["PLATFORM"] == "win32": - env.Append(CCFLAGS=["/showIncludes"]) - else: - env.Append(CCFLAGS=["-MMD", "-MF", "${TARGET}.d"]) - - env.AddMethod(CheckNinjaCompdbExpand, "CheckNinjaCompdbExpand") - - # Provide a way for custom rule authors to easily access command - # generation. - env.AddMethod(get_generic_shell_command, "NinjaGetGenericShellCommand") - env.AddMethod(get_command, "NinjaGetCommand") - env.AddMethod(gen_get_response_file_command, "NinjaGenResponseFileProvider") - env.AddMethod(set_build_node_callback, "NinjaSetBuildNodeCallback") - - # Expose ninja node path converstion functions to make writing - # custom function action handlers easier. - env.AddMethod(lambda _env, node: get_outputs(node), "NinjaGetOutputs") - env.AddMethod( - lambda _env, node, skip_unknown_types=False: get_inputs(node, skip_unknown_types), - "NinjaGetInputs", - ) - env.AddMethod( - lambda _env, node, skip_sources=False: get_dependencies(node), "NinjaGetDependencies" - ) - env.AddMethod(lambda _env, node: get_order_only(node), "NinjaGetOrderOnly") - - # Provides a way for users to handle custom FunctionActions they - # want to translate to Ninja. - env[NINJA_CUSTOM_HANDLERS] = {} - env.AddMethod(register_custom_handler, "NinjaRegisterFunctionHandler") - - # Provides a mechanism for inject custom Ninja rules which can - # then be mapped using NinjaRuleMapping. - env[NINJA_RULES] = {} - env.AddMethod(register_custom_rule, "NinjaRule") - - # Provides a mechanism for inject custom Ninja pools which can - # be used by providing the NINJA_POOL="name" as an - # OverrideEnvironment variable in a builder call. - env[NINJA_POOLS] = {} - env.AddMethod(register_custom_pool, "NinjaPool") - - # Add the ability to register custom NinjaRuleMappings for Command - # builders. We don't store this dictionary in the env to prevent - # accidental deletion of the CC/XXCOM mappings. You can still - # overwrite them if you really want to but you have to explicit - # about it this way. The reason is that if they were accidentally - # deleted you would get a very subtly incorrect Ninja file and - # might not catch it. - env.AddMethod(register_custom_rule_mapping, "NinjaRuleMapping") - - # TODO: change LINKCOM and SHLINKCOM to handle embedding manifest exe checks - # without relying on the SCons hacks that SCons uses by default. - if env["PLATFORM"] == "win32": - from SCons.Tool.mslink import compositeLinkAction - - if env["LINKCOM"] == compositeLinkAction: - env["LINKCOM"] = ( - '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}' - ) - env["SHLINKCOM"] = ( - '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}' - ) - - # Normally in SCons actions for the Program and *Library builders - # will return "${*COM}" as their pre-subst'd command line. However - # if a user in a SConscript overwrites those values via key access - # like env["LINKCOM"] = "$( $ICERUN $)" + env["LINKCOM"] then - # those actions no longer return the "bracketted" string and - # instead return something that looks more expanded. So to - # continue working even if a user has done this we map both the - # "bracketted" and semi-expanded versions. - def robust_rule_mapping(var, rule, tool): - provider = gen_get_response_file_command(env, rule, tool) - env.NinjaRuleMapping("${" + var + "}", provider) - env.NinjaRuleMapping(env[var], provider) - - robust_rule_mapping("CCCOM", "CC", "$CC") - robust_rule_mapping("SHCCCOM", "CC", "$SHCC") - robust_rule_mapping("CXXCOM", "CXX", "$CXX") - robust_rule_mapping("SHCXXCOM", "CXX", "$SHCXX") - robust_rule_mapping("LINKCOM", "LINK", "$LINK") - robust_rule_mapping("SHLINKCOM", "LINK", "$SHLINK") - robust_rule_mapping("ARCOM", "AR", "$AR") - - # Make SCons node walk faster by preventing unnecessary work - env.Decider("timestamp-match") - - # Used to determine if a build generates a source file. Ninja - # requires that all generated sources are added as order_only - # dependencies to any builds that *might* use them. - env["NINJA_GENERATED_SOURCE_SUFFIXES"] = [".h", ".hpp"] - - if env["PLATFORM"] != "win32" and env.get("RANLIBCOM"): - # There is no way to translate the ranlib list action into - # Ninja so add the s flag and disable ranlib. - # - # This is equivalent to Meson. - # https://github.com/mesonbuild/meson/blob/master/mesonbuild/linkers.py#L143 - old_arflags = str(env["ARFLAGS"]) - if "s" not in old_arflags: - old_arflags += "s" - - env["ARFLAGS"] = SCons.Util.CLVar([old_arflags]) - - # Disable running ranlib, since we added 's' above - env["RANLIBCOM"] = "" - - # This is the point of no return, anything after this comment - # makes changes to SCons that are irreversible and incompatible - # with a normal SCons build. We return early if __NINJA_NO=1 has - # been given on the command line (i.e. by us in the generated - # ninja file) here to prevent these modifications from happening - # when we want SCons to do work. Everything before this was - # necessary to setup the builder and other functions so that the - # tool can be unconditionally used in the users's SCons files. - - if not exists(env): - return - - # There is a target called generate-ninja which needs to be included - # with the --ninja flag in order to generate the ninja file. Because the --ninja - # flag is ONLY used with generate-ninja, we have combined the two by making the --ninja flag - # implicitly build the generate-ninja target. - SCons.Script.BUILD_TARGETS = SCons.Script.TargetList(env.Alias("$NINJA_ALIAS_NAME", ninja_file)) - - # Set a known variable that other tools can query so they can - # behave correctly during ninja generation. - env["GENERATING_NINJA"] = True - - # These methods are no-op'd because they do not work during ninja - # generation, expected to do no work, or simply fail. All of which - # are slow in SCons. So we overwrite them with no logic. - SCons.Node.FS.File.make_ready = ninja_noop - SCons.Node.FS.File.prepare = ninja_noop - SCons.Node.FS.File.push_to_cache = ninja_noop - SCons.Executor.Executor.prepare = ninja_noop - SCons.Taskmaster.Task.prepare = ninja_noop - SCons.Node.FS.File.built = ninja_noop - SCons.Node.Node.visited = ninja_noop - - # We make lstat a no-op because it is only used for SONAME - # symlinks which we're not producing. - SCons.Node.FS.LocalFS.lstat = ninja_noop - - # This is a slow method that isn't memoized. We make it a noop - # since during our generation we will never use the results of - # this or change the results. - SCons.Node.FS.is_up_to_date = ninja_noop - - # We overwrite stat and WhereIs with eternally memoized - # implementations. See the docstring of ninja_stat and - # ninja_whereis for detailed explanations. - SCons.Node.FS.LocalFS.stat = ninja_stat - SCons.Util.WhereIs = ninja_whereis - - # Monkey patch get_csig and get_contents for some classes. It - # slows down the build significantly and we don't need contents or - # content signatures calculated when generating a ninja file since - # we're not doing any SCons caching or building. - SCons.Executor.Executor.get_contents = ninja_contents(SCons.Executor.Executor.get_contents) - SCons.Node.Alias.Alias.get_contents = ninja_contents(SCons.Node.Alias.Alias.get_contents) - SCons.Node.FS.File.get_contents = ninja_contents(SCons.Node.FS.File.get_contents) - SCons.Node.FS.File.get_csig = ninja_csig(SCons.Node.FS.File.get_csig) - SCons.Node.FS.Dir.get_csig = ninja_csig(SCons.Node.FS.Dir.get_csig) - SCons.Node.Alias.Alias.get_csig = ninja_csig(SCons.Node.Alias.Alias.get_csig) - - # Ignore CHANGED_SOURCES and CHANGED_TARGETS. We don't want those - # to have effect in a generation pass because the generator - # shouldn't generate differently depending on the current local - # state. Without this, when generating on Windows, if you already - # had a foo.obj, you would omit foo.cpp from the response file. Do the same for UNCHANGED. - SCons.Executor.Executor._get_changed_sources = SCons.Executor.Executor._get_sources - SCons.Executor.Executor._get_changed_targets = SCons.Executor.Executor._get_targets - SCons.Executor.Executor._get_unchanged_sources = SCons.Executor.Executor._get_sources - SCons.Executor.Executor._get_unchanged_targets = SCons.Executor.Executor._get_targets - - # Replace false action messages with nothing. - env["PRINT_CMD_LINE_FUNC"] = ninja_print_conf_log - - # This reduces unnecessary subst_list calls to add the compiler to - # the implicit dependencies of targets. Since we encode full paths - # in our generated commands we do not need these slow subst calls - # as executing the command will fail if the file is not found - # where we expect it. - env["IMPLICIT_COMMAND_DEPENDENCIES"] = False - - # This makes SCons more aggressively cache MD5 signatures in the - # SConsign file. - env.SetOption("max_drift", 1) - - # The Serial job class is SIGNIFICANTLY (almost twice as) faster - # than the Parallel job class for generating Ninja files. So we - # monkey the Jobs constructor to only use the Serial Job class. - SCons.Job.Jobs.__init__ = ninja_always_serial - - # The environment variable NINJA_SYNTAX points to the - # ninja_syntax.py module from the ninja sources found here: - # https://github.com/ninja-build/ninja/blob/master/misc/ninja_syntax.py - # - # This should be vendored into the build sources and it's location - # set in NINJA_SYNTAX. This code block loads the location from - # that variable, gets the absolute path to the vendored file, gets - # it's parent directory then uses importlib to import the module - # dynamically. - ninja_syntax_file = env[NINJA_SYNTAX] - if isinstance(ninja_syntax_file, str): - ninja_syntax_file = env.File(ninja_syntax_file).get_abspath() - ninja_syntax_mod_dir = os.path.dirname(ninja_syntax_file) - sys.path.append(ninja_syntax_mod_dir) - ninja_syntax_mod_name = os.path.basename(ninja_syntax_file) - ninja_syntax = importlib.import_module(ninja_syntax_mod_name.replace(".py", "")) - - global NINJA_STATE - NINJA_STATE = NinjaState(env, ninja_syntax) - - # Here we will force every builder to use an emitter which makes the ninja - # file depend on it's target. This forces the ninja file to the bottom of - # the DAG which is required so that we walk every target, and therefore add - # it to the global NINJA_STATE, before we try to write the ninja file. - def ninja_file_depends_on_all(target, source, env): - if not any("conftest" in str(t) for t in target): - env.Depends(ninja_file, target) - return target, source - - # The "Alias Builder" isn't in the BUILDERS map so we have to - # modify it directly. - SCons.Environment.AliasBuilder.emitter = ninja_file_depends_on_all - - for _, builder in env["BUILDERS"].items(): - try: - emitter = builder.emitter - if emitter is not None: - builder.emitter = SCons.Builder.ListEmitter( - [ - emitter, - ninja_file_depends_on_all, - ], - ) - else: - builder.emitter = ninja_file_depends_on_all - # Users can inject whatever they want into the BUILDERS - # dictionary so if the thing doesn't have an emitter we'll - # just ignore it. - except AttributeError: - pass - - # Here we monkey patch the Task.execute method to not do a bunch of - # unnecessary work. If a build is a regular builder (i.e not a conftest and - # not our own Ninja builder) then we add it to the NINJA_STATE. Otherwise we - # build it like normal. This skips all of the caching work that this method - # would normally do since we aren't pulling any of these targets from the - # cache. - # - # In the future we may be able to use this to actually cache the build.ninja - # file once we have the upstream support for referencing SConscripts as File - # nodes. - def ninja_execute(self): - global NINJA_STATE - - target = self.targets[0] - target_name = str(target) - if target_name != ninja_file_name and "conftest" not in target_name: - NINJA_STATE.add_build(target) - else: - target.build() - - SCons.Taskmaster.Task.execute = ninja_execute - - # Make needs_execute always return true instead of determining out of - # date-ness. - SCons.Script.Main.BuildTask.needs_execute = lambda x: True - - # We will eventually need to overwrite TempFileMunge to make it - # handle persistent tempfiles or get an upstreamed change to add - # some configurability to it's behavior in regards to tempfiles. - # - # Set all three environment variables that Python's - # tempfile.mkstemp looks at as it behaves differently on different - # platforms and versions of Python. - os.environ["TMPDIR"] = env.Dir("$BUILD_DIR/response_files").get_abspath() - os.environ["TEMP"] = os.environ["TMPDIR"] - os.environ["TMP"] = os.environ["TMPDIR"] - if not os.path.isdir(os.environ["TMPDIR"]): - env.Execute(SCons.Defaults.Mkdir(os.environ["TMPDIR"])) - - env["TEMPFILE"] = NinjaNoResponseFiles diff --git a/site_scons/site_tools/protobuf_compiler.py b/site_scons/site_tools/protobuf_compiler.py deleted file mode 100644 index 6094ea2549e..00000000000 --- a/site_scons/site_tools/protobuf_compiler.py +++ /dev/null @@ -1,431 +0,0 @@ -# Copyright 2022 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -"""Protobuf Compiler Scons Tool.""" - -import contextlib -import os -import subprocess -import tempfile - -import SCons - - -# context manager copied from -# https://stackoverflow.com/a/57701186/1644736 -@contextlib.contextmanager -def temporary_filename(suffix=None): - """Context that introduces a temporary file. - - Creates a temporary file, yields its name, and upon context exit, deletes it. - (In contrast, tempfile.NamedTemporaryFile() provides a 'file' object and - deletes the file as soon as that file object is closed, so the temporary file - cannot be safely re-opened by another library or process.) - - Args: - suffix: desired filename extension (e.g. '.mp4'). - - Yields: - The name of the temporary file. - """ - try: - f = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) - tmp_name = f.name - f.close() - yield tmp_name - finally: - os.unlink(tmp_name) - - -def get_gen_type_and_dir(env, gen_type): - # Utility function for parsing out the gen type and desired gen dir - if SCons.Util.is_String(gen_type): - gen_out_dir = None - elif SCons.Util.is_List(gen_type) and len(gen_type) == 1: - gen_type = gen_type[0] - gen_out_dir = None - elif SCons.Util.is_List(gen_type) and len(gen_type) == 2: - gen_out_dir = gen_type[1] - gen_type = gen_type[0] - else: - raise ValueError( - f"Invalid generation type {gen_type}, must be string of gen type, or list of gen type and gen out dir." - ) - return (gen_type, gen_out_dir) - - -def protoc_emitter(target, source, env): - new_targets = [] - gen_types = env.subst_list("$PROTOC_GEN_TYPES", target=target, source=source) - base_file_name = os.path.splitext(target[0].get_path())[0] - for gen_type in gen_types: - # Check for valid requested gen type. - gen_type, gen_out_dir = get_gen_type_and_dir(env, gen_type) - - if gen_type not in env["_PROTOC_SUPPORTED_GEN_TYPES"]: - raise ValueError( - f"Requested protoc gen output of {gen_type}, but only {env['_PROTOC_SUPPORTED_GEN_TYPES']} are currenlty supported." - ) - - if gen_out_dir: - base_file_name = os.path.join( - env.Dir(gen_out_dir).get_path(), - os.path.split(SCons.Util.splitext(target[0].get_path())[0])[1], - ) - - # Create the targets by extensions list for this type in the desired gen dir. - exts = env["_PROTOC_SUPPORTED_GEN_TYPES"][gen_type] - new_targets += [env.File(f"{base_file_name}{ext}") for ext in exts] - - if gen_types: - # Setup the dependency file. - # This is little weird currently, because of the limitation of ninja and multiple - # outputs. The base file name can change for each gen type, so in this case we are - # taking the last one. This works if all gen outs are in the same dir and makes ninja - # happy, but if there are multiple gen_out dirs, then in a scons only build the deps - # is gened to the last in the list, which is awkward, but because this is only refernced - # as a target throughout the rest of tool, it works fine in scons build. - dep_file = env.File(f"{base_file_name}.protodeps") - new_targets += [dep_file] - - # Create targets for any listed plugins. - plugins = env.get("PROTOC_PLUGINS", []) - for name in plugins: - out_dir = plugins[name].get("gen_out") - exts = plugins[name].get("exts", []) - - if out_dir: - base_file_name = os.path.join( - env.Dir(out_dir).get_path(), - os.path.split(SCons.Util.splitext(target[0].get_path())[0])[1], - ) - - new_targets += [env.File(f"{base_file_name}{ext}") for ext in exts] - - return new_targets, source - - -def protoc_scanner(node, env, path): - deps = [] - - # Need to depend on the compiler and any plugins. - plugins = env.get("PROTOC_PLUGINS", {}) - for name in plugins: - deps.append(env.File(env.subst(plugins[name].get("plugin")))) - deps.append(env.File("$PROTOC")) - - # For scanning the proto dependencies from within the proto files themselves, - # there are two ways (with out writing a custom reader) to do it. One is with the - # output depends file and other other is with a tool the protobuf project supplies. - # The problem with the depends files, is you must first run the command before you can - # get the dependencies, which has some downsides: - # https://scons.org/doc/4.4.0/HTML/scons-user.html#idp105548894482512 - # - # Using the reader provided by protobuf project works, but you must have access to the - # proto which gives this functionality. - # - # Scanners will run multiple times during the building phase, revisiting as new dependencies - # from the original scan are completed. Here we will use both methods, because in the case - # you have an existing dep file you can get more dependency information on the first scan. - if str(node).endswith(".protodeps"): - if os.path.exists(node.get_path()): - # This code was mostly ripped from SCons ParseDepends function - try: - with open(node.get_path(), "r") as fp: - lines = SCons.Util.LogicalLines(fp).readlines() - except IOError: - pass - else: - lines = [l for l in lines if l[0] != "#"] - for line in lines: - try: - target, depends = line.split(":", 1) - except (AttributeError, ValueError): - # Throws AttributeError if line isn't a string. Can throw - # ValueError if line doesn't split into two or more elements. - pass - else: - deps += [env.File(d) for d in depends.split()] - - if os.path.exists(env.File("$PROTOC").abspath) and os.path.exists( - env.File("$PROTOC_DESCRIPTOR_PROTO").abspath - ): - # First we generate a the command line so we can extract the proto_paths as they - # used for finding imported protos. Then we run the command and output the - # descriptor set to a file for use later. The descriptor set is output as binary data - # intended to be read in by other protos. In this case the second command does that - # and extracts the dependencies. - source = node.sources[0] - with temporary_filename() as temp_filename: - cmd_list, _, _ = env["BUILDERS"]["Protoc"].action.process( - [node], [source], env, executor=None - ) - - paths = [ - str(proto_path) - for proto_path in cmd_list[0] - if str(proto_path).startswith("--proto_path=") - ] - cmd = ( - [env.File("$PROTOC").path] - + paths - + [ - "--include_imports", - f"--descriptor_set_out={temp_filename}", - source.srcnode().path, - ] - ) - - subprocess.run(cmd) - with open(temp_filename) as f: - cmd = ( - [env.File("$PROTOC").path] - + paths - + [ - "--decode=google.protobuf.FileDescriptorSet", - str(env.File("$PROTOC_DESCRIPTOR_PROTO")), - ] - ) - - p = subprocess.run(cmd, stdin=f, capture_output=True) - for line in p.stdout.decode().splitlines(): - if line.startswith(' name: "'): - file = line[len(' name: "') : -1] - for path in paths: - proto_file = os.path.join(path.replace("--proto_path=", ""), file) - if os.path.exists(proto_file) and proto_file != str( - source.srcnode() - ): - dep_node = env.File(proto_file) - if dep_node not in deps: - deps += [env.File(proto_file)] - break - - return sorted(deps, key=lambda dep: dep.path) - - -protoc_scanner = SCons.Scanner.Scanner(function=protoc_scanner) - - -def get_cmd_line_dirs(env, target, source): - source_dir = os.path.dirname(source[0].srcnode().path) - target_dir = os.path.dirname(target[0].get_path()) - - return target_dir, source_dir - - -def gen_types(source, target, env, for_signature): - # This subst function is for generating the command line --proto_path and desired - # --TYPE_out options. - cmd_flags = "" - gen_types = env.subst_list("$PROTOC_GEN_TYPES", target=target, source=source) - if gen_types: - for gen_type in gen_types: - gen_type, gen_out_dir = get_gen_type_and_dir(env, gen_type) - exts = tuple(env["_PROTOC_SUPPORTED_GEN_TYPES"][gen_type]) - - gen_targets = [t for t in target if str(t).endswith(exts)] - if gen_targets: - out_dir, proto_path = get_cmd_line_dirs(env, gen_targets, source) - cmd_flags += f" --proto_path={proto_path} --{gen_type}_out={out_dir}" - - # This depends out only works if there is at least one gen out - for t in target: - if str(t).endswith(".protodeps"): - cmd_flags = f"--dependency_out={t} " + cmd_flags - - return cmd_flags - - -def gen_types_str(source, target, env, for_signature): - # This generates the types from the list of types requested by the user - # for the pretty build output message. Any invalid types are caught in the emitter. - gen_types = [] - for gen_type in env.subst_list("$PROTOC_GEN_TYPES", target=target, source=source): - gen_type, gen_out_dir = get_gen_type_and_dir(env, gen_type) - gen_types += [str(gen_type)] - - return ", ".join(gen_types) - - -def gen_plugins(source, target, env, for_signature): - # Plugins are user customizable ways to modify the generation and generate - # additional files if desired. This extracts the desired plugins from the environment - # and formats them to be suitable for the command line. - plugins_cmds = [] - plugins = env.get("PROTOC_PLUGINS", []) - - for name in plugins: - plugin = plugins[name].get("plugin") - exts = plugins[name].get("exts") - if plugin and exts: - out_dir = plugins[name].get("gen_out", ".") - options = plugins[name].get("options", []) - - # A custom out command for this plugin, options to the plugin can - # be passed here with colon separating - cmd_line = f"--{name}_out=" - for opt in options: - cmd_line += f"{opt}:" - - gen_targets = [t for t in target if str(t).endswith(tuple(exts))] - if gen_targets: - out_dir, proto_path = get_cmd_line_dirs(env, gen_targets, source) - cmd_line += out_dir - - # specify the plugin binary - cmd_line += ( - f" --proto_path={proto_path} --plugin=protoc-gen-{name}={env.File(plugin).path}" - ) - plugins_cmds += [cmd_line] - else: - print( - f"Failed to process PROTOC plugin, need valid plugin and extensions {name}: {plugins[name]}" - ) - - gen_types = env.subst_list("$PROTOC_GEN_TYPES", target=target, source=source) - # In the case the command did not include any standard gen types, we add a command line - # entry so the depends file is still written - if not gen_types: - for t in target: - if str(t).endswith(".protodeps"): - plugins_cmds += [f"--dependency_out={t}"] - - return " ".join(plugins_cmds) - - -def generate(env): - ProtocBuilder = SCons.Builder.Builder( - action=SCons.Action.Action("$PROTOCCOM", "$PROTOCCOMSTR"), - emitter=protoc_emitter, - src_suffix=".proto", - suffix=".cc", - target_scanner=protoc_scanner, - ) - - env.Append(SCANNERS=protoc_scanner) - env["BUILDERS"]["Protoc"] = ProtocBuilder - - env["PROTOC"] = env.get("PROTOC", env.WhereIs("protoc")) - env["PROTOCCOM"] = ( - "$PROTOC $_PROTOCPATHS $_PROTOC_GEN_TYPES $_PROTOC_PLUGINS $PROTOCFLAGS $SOURCE" - ) - env["PROTOCCOMSTR"] = ( - "Generating $_PROTOC_GEN_TYPES_STR Protocol Buffers from ${SOURCE}" - if not env.Verbose() - else "" - ) - - # Internal subst function vars - env["_PROTOC_GEN_TYPES"] = gen_types - env["_PROTOC_GEN_TYPES_STR"] = gen_types_str - env["_PROTOC_PLUGINS"] = gen_plugins - env["_PROTOCPATHS"] = "${_concat(PROTOPATH_PREFIX, PROTOCPATHS, PROTOPATH_SUFFIX, __env__)}" - - env["PROTOPATH_PREFIX"] = "--proto_path=" - env["PROTOPATH_SUFFIX"] = "" - - # Somewhat safe cross tool dependency - if hasattr(env, "NinjaGenResponseFileProvider"): - env.NinjaRule( - rule="PROTO", - command="$env$cmd", - description="Generating protocol buffers $out", - deps="gcc", - use_depfile=True, - depfile="$protodep", - ) - - def gen_protobuf_provider(env, rule, tool): - def protobuf_provider(env, node, action, targets, sources, executor=None): - provided_rule, variables, tool_command = env.NinjaGetGenericShellCommand( - node, action, targets, sources, executor - ) - - t_dirs = [os.path.dirname(t.get_path()) for t in targets] - if len(set(t_dirs)) > 1: - raise SCons.Errors.BuildError( - node=node, - errstr="Due to limitations with ninja tool and using phonies for multiple targets, protoc must generate all generated output for a single command to the same directory.", - ) - for t in targets: - if str(t).endswith(".protodeps"): - variables["protodep"] = str(t) - return "PROTO", variables, tool_command - - return protobuf_provider - - def robust_rule_mapping(var, rule, tool): - provider = gen_protobuf_provider(env, rule, tool) - env.NinjaRuleMapping("${" + var + "}", provider) - env.NinjaRuleMapping(env[var], provider) - - robust_rule_mapping("PROTOCCOM", "PROTO", "$PROTOC") - - # TODO create variables to support other generation types, might require a more flexible - # builder setup - env["_PROTOC_SUPPORTED_GEN_TYPES"] = {"cpp": [".pb.cc", ".pb.h"]} - - # User facing customizable variables - - # PROTOC_GEN_TYPES can be a list of strings, where - # each string is the gen type desired, or it could - # a list of lists, where each list contains first - # the type, the the desired output dir, if no - # dir is specified the scons will build it at the location - # of the source proto file, accounting for variant - # dirs. e.g. - # env["PROTOC_GEN_TYPES"] = [ - # 'cpp', - # ['java', "$BUILD_DIR/java_gen_source"] - # ] - env["PROTOC_GEN_TYPES"] = [] - - # PROTOC_PLUGINS allows customization of the plugins - # for the command lines. It should be a dict of dicts where - # the keys are the names of the plugins, and the plugin must - # specify the plugin binary file path and a list of extensions - # to use on the output files. Optionally you can specify a list - # of options to pass the plugin and a gen out directory. e.g: - # env['PROTOC_PLUGINS']={ - # 'grpc': { - # 'plugin': '$PROTOC_GRPC_PLUGIN', - # 'options': ['generate_mock_code=true'], - # 'gen_out': "$BUILD_DIR/grpc_gen" - # 'exts': ['.grpc.pb.cc', '.grpc.pb.h'], - # }, - # 'my_plugin': { - # 'plugin': '/usr/bin/my_custom_plugin', - # 'exts': ['.pb.txt'], - # } - # }, - env["PROTOC_PLUGINS"] = {} - - # This is a proto which allows dependent protos to be extracted - # generally this is in protobuf src tree at google/protobuf/descriptor.proto - env["PROTOC_DESCRIPTOR_PROTO"] = "google/protobuf/descriptor.proto" - - env["PROTOCFLAGS"] = SCons.Util.CLVar("") - env["PROTOCPATHS"] = SCons.Util.CLVar("") - - -def exists(env): - return True diff --git a/site_scons/site_tools/separate_debug.py b/site_scons/site_tools/separate_debug.py deleted file mode 100644 index 611fd27d7ef..00000000000 --- a/site_scons/site_tools/separate_debug.py +++ /dev/null @@ -1,202 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import SCons - - -def _update_builder(env, builder): - old_scanner = builder.target_scanner - old_path_function = old_scanner.path_function - - def new_scanner(node, env, path=()): - results = old_scanner.function(node, env, path) - origin = getattr(node.attributes, "debug_file_for", None) - if origin is not None: - origin_results = old_scanner(origin, env, path) - for origin_result in origin_results: - origin_result_debug_files = getattr( - origin_result.attributes, "separate_debug_files", None - ) - if origin_result_debug_files is not None: - results.extend(origin_result_debug_files) - return results - - builder.target_scanner = SCons.Scanner.Scanner( - function=new_scanner, - path_function=old_path_function, - ) - - base_action = builder.action - if not isinstance(base_action, SCons.Action.ListAction): - base_action = SCons.Action.ListAction([base_action]) - - # TODO: Make variables for dsymutil and strip, and for the action - # strings. We should really be running these tools as found by - # xcrun by default. We should achieve that by upgrading the - # site_scons/site_tools/xcode.py tool to search for these for - # us. We could then also remove a lot of the compiler and sysroot - # setup from the etc/scons/xcode_*.vars files, which would be a - # win as well. - if env.TargetOSIs("darwin"): - base_action.list.extend( - [ - SCons.Action.Action( - "$DSYMUTIL -num-threads 1 $TARGET -o ${TARGET}$SEPDBG_SUFFIX", - "$DSYMUTILCOMSTR", - ), - SCons.Action.Action( - "$STRIP -S ${TARGET}", - "$DEBUGSTRIPCOMSTR", - ), - ] - ) - elif env.TargetOSIs("posix"): - base_action.list.extend( - [ - SCons.Action.Action( - "$OBJCOPY --only-keep-debug $TARGET ${TARGET}$SEPDBG_SUFFIX", - "$OBJCOPY_ONLY_KEEP_DEBUG_COMSTR", - ), - SCons.Action.Action( - "$OBJCOPY --strip-debug --add-gnu-debuglink ${TARGET}$SEPDBG_SUFFIX ${TARGET}", - "$DEBUGSTRIPCOMSTR", - ), - ] - ) - else: - pass - - builder.action = base_action - - base_emitter = builder.emitter - - def new_emitter(target, source, env): - debug_files = [] - if env.TargetOSIs("darwin"): - # There isn't a lot of great documentation about the structure of dSYM bundles. - # For general bundles, see: - # - # https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html - # - # But we expect to find two files in the bundle. An - # Info.plist file under Contents, and a file with the same - # name as the target under Contents/Resources/DWARF. - - target0 = env.File(target[0]) - dsym_dir_name = str(target[0]) + ".dSYM" - dsym_dir = env.Dir(dsym_dir_name, directory=target0.get_dir()) - - plist_file = env.File("Contents/Info.plist", directory=dsym_dir) - setattr(plist_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - plist_file.attributes, - "aib_additional_directory", - "{}/Contents".format(dsym_dir_name), - ) - - dwarf_dir = env.Dir("Contents/Resources/DWARF", directory=dsym_dir) - - dwarf_file = env.File(target0.name, directory=dwarf_dir) - setattr(dwarf_file.attributes, "aib_effective_suffix", ".dSYM") - setattr( - dwarf_file.attributes, - "aib_additional_directory", - "{}/Contents/Resources/DWARF".format(dsym_dir_name), - ) - - debug_files.extend([plist_file, dwarf_file]) - - elif env.TargetOSIs("posix"): - debug_file = env.File(f"{target[0]}$SEPDBG_SUFFIX") - debug_files.append(debug_file) - elif env.TargetOSIs("windows"): - debug_file = env.File(env.subst("${PDB}", target=target)) - debug_files.append(debug_file) - else: - pass - - # Establish bidirectional linkages between the target and each debug file by setting - # attributes on th nodes. We use these in the scanner above to ensure that transitive - # dependencies among libraries are projected into transitive dependencies between - # debug files. - for debug_file in debug_files: - setattr(debug_file.attributes, "debug_file_for", target[0]) - setattr(target[0].attributes, "separate_debug_files", debug_files) - - # On Windows, we don't want to emit the PDB. The mslink.py - # SCons tool is already doing so, so otherwise it would be - # emitted twice. This tool only needed to adorn the node with - # the various attributes as needed, so we just return the - # original target, source tuple. - if env.TargetOSIs("windows"): - return target, source - - return (target + debug_files, source) - - new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter]) - builder.emitter = new_emitter - - -def generate(env): - if not exists(env): - return - - if env.TargetOSIs("darwin"): - env["SEPDBG_SUFFIX"] = ".dSYM" - - if env.get("DSYMUTIL", None) is None: - env["DSYMUTIL"] = env.WhereIs("dsymutil") - - if env.get("STRIP", None) is None: - env["STRIP"] = env.WhereIs("strip") - - if not env.Verbose(): - env.Append( - DSYMUTILCOMSTR="Generating debug info for $TARGET into ${TARGET}.dSYM", - DEBUGSTRIPCOMSTR="Stripping debug info from ${TARGET}", - ) - - elif env.TargetOSIs("posix"): - env["SEPDBG_SUFFIX"] = env.get("SEPDBG_SUFFIX", ".debug") - if env.get("OBJCOPY", None) is None: - env["OBJCOPY"] = env.Whereis("objcopy") - - if not env.Verbose(): - env.Append( - OBJCOPY_ONLY_KEEP_DEBUG_COMSTR="Generating debug info for $TARGET into ${TARGET}${SEPDBG_SUFFIX}", - DEBUGSTRIPCOMSTR="Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}${SEPDBG_SUFFIX}", - ) - - for builder in ["Program", "SharedLibrary", "LoadableModule"]: - _update_builder(env, env["BUILDERS"][builder]) - - -def exists(env): - if env.TargetOSIs("darwin"): - if env.get("DSYMUTIL", None) is None and env.WhereIs("dsymutil") is None: - return False - if env.get("STRIP", None) is None and env.WhereIs("strip") is None: - return False - elif env.TargetOSIs("posix"): - if env.get("OBJCOPY", None) is None and env.WhereIs("objcopy") is None: - return False - return True diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py deleted file mode 100644 index dc6f9ab5518..00000000000 --- a/site_scons/site_tools/split_dwarf.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import os - -import SCons - -_splitDwarfFlag = "-gsplit-dwarf" - -# Cribbed from Tool/cc.py and Tool/c++.py. It would be better if -# we could obtain this from SCons. -_CSuffixes = [".c"] -if not SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CSuffixes.append(".C") - -_CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"] -if SCons.Util.case_sensitive_suffixes(".c", ".C"): - _CXXSuffixes.append(".C") - - -def _dwo_emitter(target, source, env): - new_targets = [] - for t in target: - base, ext = SCons.Util.splitext(str(t)) - if not any(ext == env[osuffix] for osuffix in ["OBJSUFFIX", "SHOBJSUFFIX"]): - continue - dwotarget = (t.builder.target_factory or env.File)(base + env["DWOSUFFIX"]) - new_targets.append(dwotarget) - targets = target + new_targets - return (targets, source) - - -def _dwp_emitter(target, source, env): - if "conftest" not in str(target[0]) and env.get("SPLIT_DWARF_DWP_FILES"): - # Check if the information regarding where the dwo files is located are stored in - # the binary or a separate debug file. - if len(target) > 1 and os.path.splitext(str(target[1]))[1] == env.get("SEPDBG_SUFFIX"): - target_file = target[1] - else: - target_file = target[0] - - dwp_file = env.DWP( - target=env.File(os.path.splitext(target_file.name)[0] + env["DWPSUFFIX"]), - source=target_file, - ) - env.NoCache(dwp_file) - - if hasattr(env, "AutoInstall") and env.get("AIB_COMPONENT"): - env.AutoInstall( - "$PREFIX_BINDIR", - dwp_file, - AIB_COMPONENT=env.get("AIB_COMPONENT"), - AIB_ROLE="debug", - AIB_EXTRA_COMPONENTS=env.get("AIB_EXTRA_COMPONENTS", []), - ) - - return target, source - - -def options(opts): - opts.AddVariables( - ("DWP", "Path to dwp binary."), - ("SPLIT_DWARF_DWP_FILES", "Set to enable DWP file creation from split dwarf."), - ) - - -def generate(env): - # these suffixes are not adjustable to the underlying tools (gcc) - # but we leave them adjustable in scons in case this changes in certain circumstances - env["DWOSUFFIX"] = env.get("DWOSUFFIX", ".dwo") - env["DWPSUFFIX"] = env.get("DWPSUFFIX", ".dwp") - - env.Append(CCFLAGS=[_splitDwarfFlag]) - - for object_builder in SCons.Tool.createObjBuilders(env): - emitterdict = object_builder.builder.emitter - for suffix in emitterdict.keys(): - if suffix not in _CSuffixes + _CXXSuffixes: - continue - base = emitterdict[suffix] - emitterdict[suffix] = SCons.Builder.ListEmitter( - [ - base, - _dwo_emitter, - ] - ) - - for builder in ["Program"]: - builder = env["BUILDERS"][builder] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter([base_emitter, _dwp_emitter]) - builder.emitter = new_emitter - - env["DWP"] = env.get("DWP", "dwp") - env["BUILDERS"]["DWP"] = SCons.Builder.Builder( - action=SCons.Action.Action( - "$DWP -e $SOURCE -o $TARGET", - "Building dwp file from $SOURCE" if not env.Verbose() else "", - ) - ) - - -def exists(env): - return any(_splitDwarfFlag in env[f] for f in ["CCFLAGS", "CFLAGS", "CXXFLAGS"]) diff --git a/site_scons/site_tools/tapilink.py b/site_scons/site_tools/tapilink.py deleted file mode 100644 index d2e0854204b..00000000000 --- a/site_scons/site_tools/tapilink.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import SCons - -# TODO: DRY this with abilink.py by moving duplicated code out to a common -# support module. - - -def _detect(env): - try: - tapi = env["TAPI"] - if not tapi: - return None - return tapi - except KeyError: - pass - - return env.WhereIs("tapi") - - -def _add_emitter(builder): - base_emitter = builder.emitter - - def new_emitter(target, source, env): - new_targets = [] - for t in target: - base, ext = SCons.Util.splitext(str(t)) - if not ext == env["SHLIBSUFFIX"]: - continue - - tbd_target = (t.builder.target_factory or env.File)(base + ".tbd") - new_targets.append(tbd_target) - - tbd_no_uuid_target = (t.builder.target_factory or env.File)(base + ".tbd.no_uuid") - new_targets.append(tbd_no_uuid_target) - - setattr(t.attributes, "tbd", tbd_no_uuid_target) - targets = target + new_targets - return (targets, source) - - new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter]) - builder.emitter = new_emitter - - -def _add_scanner(builder): - old_scanner = builder.target_scanner - path_function = old_scanner.path_function - - def new_scanner(node, env, path): - return (getattr(env.Entry(o).attributes, "tbd", o) for o in old_scanner(node, env, path)) - - builder.target_scanner = SCons.Scanner.Scanner( - function=new_scanner, - path_function=path_function, - ) - - -def _add_action(builder): - actions = builder.action - - # The first inbocation of TAPI is to make the tbd file that the - # linker will actually use when linking. This must contain the - # dylib UUID or the link will fail. The second creates a version - # that does not contain the UUID. We use that as the ABI file. If - # invoking TAPI proves to be expensive, we could address this by - # instead post-processing the "real" .tbd file to strip out the - # UUID, and then potentially even feed it into a hash algorithm. - builder.action = actions + SCons.Action.Action( - [ - "$TAPI stubify -o ${TARGET.base}.tbd ${TARGET}", - "$TAPI stubify --no-uuids -o ${TARGET.base}.tbd.no_uuid ${TARGET}", - ] - ) - - -def exists(env): - result = _detect(env) is not None - return result - - -def generate(env): - if not exists(env): - return - - builder = env["BUILDERS"]["SharedLibrary"] - _add_emitter(builder) - _add_action(builder) - _add_scanner(builder) - _add_scanner(env["BUILDERS"]["Program"]) - _add_scanner(env["BUILDERS"]["LoadableModule"]) diff --git a/site_scons/site_tools/task_limiter.py b/site_scons/site_tools/task_limiter.py deleted file mode 100644 index cc54cc7e741..00000000000 --- a/site_scons/site_tools/task_limiter.py +++ /dev/null @@ -1,97 +0,0 @@ -import re - -import SCons - -task_limiter_patterns = {} - - -def setup_task_limiter( - env, name, concurrency_ratio=0.75, builders=None, source_file_regex=".*", target_file_regex=".*" -): - global task_limiter_patterns - - task_limiter_patterns[name] = {} - task_limiter_patterns[name]["source"] = re.compile(source_file_regex) - task_limiter_patterns[name]["target"] = re.compile(target_file_regex) - - # We need to convert the ratio value into a int that corrlates to a specific - # number of concurrent jobs allowed - concurrency_ratio = float(concurrency_ratio) - if concurrency_ratio <= 0.0: - env.FatalError( - f"The concurrency ratio for {name} must be a positive, got {max_concurrency}" - ) - - if concurrency_ratio > 1.0: - concurrency_ratio = 1.0 - - max_concurrency = env.GetOption("num_jobs") * concurrency_ratio - max_concurrency = round(max_concurrency) - if max_concurrency < 1.0: - max_concurrency = 1.0 - - max_concurrency = int(max_concurrency) - - # A bound map of stream (as in stream of work) name to side-effect - # file. Since SCons will not allow tasks with a shared side-effect - # to execute concurrently, this gives us a way to limit link jobs - # independently of overall SCons concurrency. - concurrent_stream_map = dict() - - def task_limiter_emitter(target, source, env): - global task_limiter_patterns - nonlocal name - - matched = False - for s_file in source: - if re.search(task_limiter_patterns[name]["source"], s_file.path): - matched = True - break - - if not matched: - for t_file in target: - if re.search(task_limiter_patterns[name]["target"], t_file.path): - matched = True - break - if matched: - se_name = f"#{name}-stream{hash(str(target[0])) % max_concurrency}" - se_node = concurrent_stream_map.get(se_name, None) - if not se_node: - se_node = env.Entry(se_name) - # This may not be necessary, but why chance it - env.NoCache(se_node) - concurrent_stream_map[se_name] = se_node - env.SideEffect(se_node, target) - - return (target, source) - - if isinstance(builders, dict): - for target_builder, suffixes in builders.items(): - builder = env["BUILDERS"][target_builder] - emitterdict = builder.builder.emitter - for suffix in emitterdict.keys(): - if suffix not in suffixes: - continue - base = emitterdict[suffix] - emitterdict[suffix] = SCons.Builder.ListEmitter( - [ - base, - task_limiter_emitter, - ] - ) - else: - for target_builder in builders: - builder = env["BUILDERS"][target_builder] - base_emitter = builder.emitter - new_emitter = SCons.Builder.ListEmitter([base_emitter, task_limiter_emitter]) - builder.emitter = new_emitter - - return max_concurrency - - -def exists(env): - return True - - -def generate(env): - env.AddMethod(setup_task_limiter, "SetupTaskLimiter") diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py deleted file mode 100644 index 80759b5843b..00000000000 --- a/site_scons/site_tools/thin_archive.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import re -import subprocess - -import SCons - - -def exists(env): - if "AR" not in env: - return False - - ar = env.subst(env["AR"]) - if not ar: - return False - - # If the user has done anything confusing with ARFLAGS, bail out. We want to find - # an item in ARFLAGS of the exact form 'rc'. - if "rc" not in env["ARFLAGS"]: - return False - - pipe = SCons.Action._subproc( - env, - SCons.Util.CLVar(ar) + ["--version"], - stdin="devnull", - stderr="devnull", - stdout=subprocess.PIPE, - ) - if pipe.wait() != 0: - return False - - found = False - for line in pipe.stdout: - if found: - continue # consume all data - found = re.search(r"^GNU ar|^LLVM", line.decode("utf-8")) - - return bool(found) - - -def _add_emitter(builder): - base_emitter = builder.emitter - - def new_emitter(target, source, env): - for t in target: - setattr(t.attributes, "thin_archive", True) - return (target, source) - - new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter]) - builder.emitter = new_emitter - - -def _add_scanner(builder): - old_scanner = builder.target_scanner - path_function = old_scanner.path_function - - def new_scanner(node, env, path): - old_results = old_scanner(node, env, path) - - # Ninja uses only timestamps for implicit dependencies so will - # always rebuild a program whose archive has been updated even - # if has the same content signature. - if env.get("GENERATING_NINJA", False): - return old_results - - new_results = [] - for base in old_results: - new_results.append(base) - if getattr(env.Entry(base).attributes, "thin_archive", None): - new_results.extend(base.children()) - - return new_results - - builder.target_scanner = SCons.Scanner.Scanner( - function=new_scanner, - path_function=path_function, - ) - - -def generate(env): - if not exists(env): - return - - env["ARFLAGS"] = SCons.Util.CLVar( - [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]] - ) - - # Disable running ranlib, since we added 's' above - env["RANLIBCOM"] = "" - env["RANLIBCOMSTR"] = "Skipping ranlib for thin archive $TARGET" - - for builder in ["StaticLibrary", "SharedArchive"]: - _add_emitter(env["BUILDERS"][builder]) - - for builder in ["SharedLibrary", "LoadableModule", "Program"]: - _add_scanner(env["BUILDERS"][builder]) diff --git a/site_scons/site_tools/validate_cache_dir.py b/site_scons/site_tools/validate_cache_dir.py deleted file mode 100644 index 967b37261b8..00000000000 --- a/site_scons/site_tools/validate_cache_dir.py +++ /dev/null @@ -1,311 +0,0 @@ -# Copyright 2021 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import datetime -import json -import os -import pathlib -import shutil -import sys -import tempfile -import traceback -from timeit import default_timer as timer - -import SCons - -cache_debug_suffix = " (target: %s, cachefile: %s) " - - -class InvalidChecksum(SCons.Errors.BuildError): - def __init__(self, src, dst, reason, cache_csig="", computed_csig=""): - self.message = f"ERROR: md5 checksum {reason} for {src} ({dst})" - self.cache_csig = cache_csig - self.computed_csig = computed_csig - - def __str__(self): - return self.message - - -class CacheTransferFailed(SCons.Errors.BuildError): - def __init__(self, src, dst, reason): - self.message = f"ERROR: cachedir transfer {reason} while transfering {src} to {dst}" - - def __str__(self): - return self.message - - -class UnsupportedError(SCons.Errors.BuildError): - def __init__(self, class_name, feature): - self.message = f"{class_name} does not support {feature}" - - def __str__(self): - return self.message - - -class CacheDirValidate(SCons.CacheDir.CacheDir): - def __init__(self, path): - self.json_log = None - super().__init__(path) - - @staticmethod - def get_ext(): - # Cache prune script is allowing only directories with this extension - # if this is changed, cache prune script should also be updated. - return ".cksum" - - @staticmethod - def get_file_contents_path(default_cachefile_path): - return ( - pathlib.Path(default_cachefile_path) - / pathlib.Path(default_cachefile_path).name.split(".")[0] - ) - - @staticmethod - def get_bad_cachefile_path(cksum_cachefile_dir): - return pathlib.Path(cksum_cachefile_dir) / "bad_cache_file" - - @staticmethod - def get_hash_path(cksum_cachefile_path): - return pathlib.Path(cksum_cachefile_path).parent / "content_hash" - - @staticmethod - def get_cachedir_path(path): - return str(pathlib.Path(path + CacheDirValidate.get_ext())) - - @classmethod - def copy_from_cache(cls, env, src, dst): - if not str(pathlib.Path(src)).endswith(cls.get_ext()): - return super().copy_from_cache(env, src, dst) - - if env.cache_timestamp_newer: - raise UnsupportedError(cls.__name__, "timestamp-newer") - - src_file = cls.get_file_contents_path(src) - # using os.path.exists here because: https://bugs.python.org/issue35306 - if os.path.exists(str(cls.get_bad_cachefile_path(src))): - raise InvalidChecksum( - cls.get_hash_path(src_file), dst, "cachefile marked as bad checksum" - ) - - csig = None - try: - with open(cls.get_hash_path(src_file), "rb") as f_out: - csig = f_out.read().decode().strip() - except OSError as ex: - raise InvalidChecksum( - cls.get_hash_path(src_file), dst, f"failed to read hash file: {ex}" - ) from ex - else: - if not csig: - raise InvalidChecksum( - cls.get_hash_path(src_file), dst, "no content_hash data found" - ) - - with tempfile.TemporaryDirectory() as tmpdirname: - dst_tmp = pathlib.Path(tmpdirname) / os.path.basename(dst) - try: - shutil.copy2(src_file, dst_tmp) - except OSError as ex: - raise CacheTransferFailed(src_file, dst, f"failed to copy from cache: {ex}") from ex - else: - shutil.move(dst_tmp, dst) - - new_csig = SCons.Util.MD5filesignature( - dst, chunksize=SCons.Node.FS.File.md5_chunksize * 1024 - ) - - if csig != new_csig: - raise InvalidChecksum( - cls.get_hash_path(src_file), - dst, - f"checksums don't match {csig} != {new_csig}", - cache_csig=csig, - computed_csig=new_csig, - ) - - @classmethod - def copy_to_cache(cls, env, src, dst): - # dst is bsig/file from cachepath method, so - # we make sure to make the bsig dir first - dst = pathlib.Path(dst) - dst_file = dst / dst.name.split(".")[0] - - try: - os.makedirs(dst, exist_ok=True) - super().copy_to_cache(env, src, dst_file) - except OSError as ex: - raise CacheTransferFailed(src, dst_file, f"failed to copy to cache: {ex}") from ex - - try: - with open(cls.get_hash_path(dst_file), "w") as f_out: - f_out.write(env.File(src).get_content_hash()) - except OSError as ex: - raise CacheTransferFailed(src, dst_file, f"failed to create hash file: {ex}") from ex - - def log_json_cachedebug(self, node, pushing=False, duration=0): - if pushing and (node.nocache or SCons.CacheDir.cache_readonly or "conftest" in str(node)): - return - - cachefile = self.get_file_contents_path(self.cachepath(node)[1]) - if node.fs.exists(cachefile): - cache_event = "double_push" if pushing else "hit" - else: - cache_event = "push" if pushing else "miss" - - self.CacheDebugJson({"type": cache_event}, node, cachefile, duration) - - def retrieve(self, node): - if not self.is_enabled(): - return False - try: - start = timer() - result = super().retrieve(node) - self.log_json_cachedebug(node, duration=timer() - start) - return result - except InvalidChecksum as ex: - self.print_cache_issue(node, ex) - self.clean_bad_cachefile(node, ex.cache_csig, ex.computed_csig) - return False - except (UnsupportedError, CacheTransferFailed) as ex: - self.print_cache_issue(node, ex) - return False - - def push(self, node): - if self.is_readonly() or not self.is_enabled(): - return - try: - start = timer() - result = super().push(node) - self.log_json_cachedebug(node, pushing=True, duration=timer() - start) - return result - except CacheTransferFailed as ex: - self.print_cache_issue(node, ex) - return False - - def CacheDebugJson(self, json_data, target, cachefile, duration, size=None): - if ( - SCons.CacheDir.cache_debug - and SCons.CacheDir.cache_debug != "-" - and self.json_log is None - ): - self.json_log = open(SCons.CacheDir.cache_debug + ".json", "a") - - if self.json_log is not None: - if size is None: - try: - size = os.path.getsize(cachefile) - except FileNotFoundError: - size = "FileNotFoundError" - except NotADirectoryError: - size = "NotADirectoryError" - - cksum_cachefile = str(pathlib.Path(cachefile).parent) - if cksum_cachefile.endswith(self.get_ext()): - cachefile = cksum_cachefile - - json_data.update( - { - "timestamp": str(datetime.datetime.now(datetime.timezone.utc)), - "duration": duration, - "size": size, - "realfile": str(target), - "cachefile": pathlib.Path(cachefile).name, - "cache_dir": str(pathlib.Path(cachefile).parent.parent), - } - ) - - # capture exception information - if sys.exc_info()[1]: - json_data.update({"error": self._format_exception_msg()}) - - self.json_log.write(json.dumps(json_data) + "\n") - - def CacheDebug(self, fmt, target, cachefile): - super().CacheDebug(fmt, target, cachefile) - # Capture exception information into the cache debug log - if sys.exc_info()[1] and self.debugFP: - self.debugFP.write(self._format_exception_msg()) - - def _format_exception_msg(self): - return ( - "An exception was detected while using the cache:\n" - + " " - + "\n ".join("".join(traceback.format_exc()).split("\n")) - ) + "\n" - - def _log(self, log_msg, json_info, realnode, cachefile): - self.CacheDebug(log_msg + cache_debug_suffix, realnode, cachefile) - self.CacheDebugJson(json_info, realnode, cachefile, 0) - - def print_cache_issue(self, node, ex): - cksum_dir = pathlib.Path(self.cachepath(node)[1]) - self._log(str(ex), {"type": "error"}, node, cksum_dir) - - def clean_bad_cachefile(self, node, cache_csig, computed_csig): - cksum_dir = pathlib.Path(self.cachepath(node)[1]) - rm_path = f"{cksum_dir}.{SCons.CacheDir.cache_tmp_uuid}.del" - try: - try: - pathlib.Path(self.get_bad_cachefile_path(cksum_dir)).touch() - except FileExistsError: - pass - cksum_dir.replace(rm_path) - except OSError as ex: - msg = f"Failed to rename {cksum_dir} to {rm_path}: {ex}" - self._log(msg, {"type": "error"}, node, cksum_dir) - return - - msg = f"Removed bad cachefile {cksum_dir} found in cache." - self._log( - msg, - { - "type": "invalid_checksum", - "cache_csig": cache_csig, - "computed_csig": computed_csig, - }, - node, - cksum_dir, - ) - - def get_cachedir_csig(self, node): - cachedir, cachefile = self.cachepath(node) - if cachefile and os.path.exists(cachefile): - with open(self.get_hash_path(self.get_file_contents_path(cachefile)), "rb") as f_out: - return f_out.read().decode() - - def cachepath(self, node): - if not self.is_enabled(): - return None, None - - dir, path = super().cachepath(node) - if node.fs.exists(path): - return dir, path - return dir, str(self.get_cachedir_path(path)) - - -def exists(env): - return True - - -def generate(env): - if not env.get("CACHEDIR_CLASS"): - env["CACHEDIR_CLASS"] = CacheDirValidate diff --git a/site_scons/site_tools/vcredist.py b/site_scons/site_tools/vcredist.py deleted file mode 100644 index 54057c35cf5..00000000000 --- a/site_scons/site_tools/vcredist.py +++ /dev/null @@ -1,234 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import os -import re -import subprocess -import winreg - - -def exists(env): - result = "msvc" in env["TOOLS"] - return result - - -# How to locate the Merge Modules path is described in: -# -# - VS2019: https://docs.microsoft.com/en-us/visualstudio/releases/2019/redistribution#visual-c-runtime-files -# - VS2017: https://docs.microsoft.com/en-us/visualstudio/productinfo/2017-redistribution-vs#visual-c-runtime-files -# - VS2015: https://docs.microsoft.com/en-us/visualstudio/productinfo/2015-redistribution-vs#visual-c-runtime -# -# However, please note that for VS2017 an VS2019, the documented paths are incorrect, per this -# discussion: -# -# - https://developercommunity.visualstudio.com/content/problem/828060/what-are-the-correct-location-to-search-for-vc-crt.html -# -# This tool uses the currently undocumented but correct paths. - -# The keys are the values SCons accepts for TARGET_ARCH to name -# different windows targets, the values are the tag that VS uses -# for the associated redistributable for that platform. -# -# TODO: Expand this map as needed. -target_arch_expansion_map = { - "amd64": "x64", - "arm": None, - "arm64": "arm64", - "emt64": "x64", - "i386": "x86", - "x86": "x86", - "x86_64": "x64", -} - - -def _get_programfiles(): - result = os.getenv("ProgramFiles(x86)") - # When we run this under cygwin, the environment is broken, fall - # back to hard coded C:\Program Files (x86) - if result is None: - result = "C:\\Program Files (x86)" - if not os.path.isdir(result): - return None - return result - - -def _get_merge_module_name_for_feature(env, feature): - version_components = env["MSVC_VERSION"].split(".") - return "Microsoft_VC{msvc_major}{msvc_minor}_{feature}_{target_arch}.msm".format( - msvc_major=version_components[0], - msvc_minor=version_components[1], - feature=feature, - target_arch=target_arch_expansion_map[env.subst("$TARGET_ARCH")], - ) - - -def generate(env): - if not exists(env): - return - - env.Tool("msvc") - - env.AddMethod(_get_merge_module_name_for_feature, "GetMergeModuleNameForFeature") - - # Obtain the major and minor versions of the curently configured MSVC - # and ensure that we are using a VC 14 based toolchain. - # - # Please see - # https://en.wikipedia.org/wiki/Microsoft_Visual_C%2B%2B#Internal_version_numbering - # for details on the various version numbers in play for - # the Microsoft toolchain. - msvc_major, msvc_minor = env["MSVC_VERSION"].split(".") - if msvc_major != "14": - return - - # We may or may not need to figure out the path to Program files, - # depending on the various paths we take throught this logic. - programfilesx86 = None - - # TODO: Getting this path is a start, but we should later provide - # an abstraction over the names of the merge modules - # themselves. They seem to have the form - # Microsoft_VC{msvc_major}{msvc_minor}_{Feature}_{target_arch}.msm. It - # would be useful to provide an env.MergeModuleNameFor('feature') - # that consulted the values we have found here and used - # TARGET_ARCH (normalized somehow) to select the right one. - mergemodulepath = None - - # On VS2015 the merge modules are in the program files directory, - # not under the VS install dir. - if msvc_minor == "0": - if not programfilesx86: - programfilesx86 = _get_programfiles() - if not programfilesx86: - return - - mergemodulepath = os.path.join(programfilesx86, "Common Files", "Merge Modules") - if os.path.isdir(mergemodulepath): - env["MSVS"]["VCREDISTMERGEMODULEPATH"] = mergemodulepath - - if "VSINSTALLDIR" not in env["MSVS"]: - # Compute a VS version based on the VC version. VC 14.0 is VS 2015, VC - # 14.1 is VS 2017. Also compute the next theoretical version by - # incrementing the major version by 1. Then form a range from this - # that we can use as an argument to the -version flag to vswhere. - vs_version = int(msvc_major) + int(msvc_minor) - vs_version_next = vs_version + 1 - vs_version_range = "[{vs_version}.0, {vs_version_next}.0)".format( - vs_version=vs_version, vs_version_next=vs_version_next - ) - - if not programfilesx86: - programfilesx86 = _get_programfiles() - if not programfilesx86: - return - - # Use vswhere (it has a fixed stable path) to query where Visual Studio is installed. - env["MSVS"]["VSINSTALLDIR"] = ( - subprocess.check_output( - [ - os.path.join( - programfilesx86, - "Microsoft Visual Studio", - "Installer", - "vswhere.exe", - ), - "-version", - vs_version_range, - "-property", - "installationPath", - "-nologo", - ] - ) - .decode("utf-8") - .strip() - ) - - vsinstall_dir = env["MSVS"]["VSINSTALLDIR"] - - # Combine and set the full merge module path - redist_root = os.path.join(vsinstall_dir, "VC", "Redist", "MSVC") - if not os.path.isdir(redist_root): - return - env["MSVS"]["VCREDISTROOT"] = redist_root - - # Check the registry key that has the runtime lib version - try: - # TOOO: This x64 needs to be abstracted away. Is it the host - # arch, or the target arch? My guess is host. - vsruntime_key_name = ( - "SOFTWARE\\Microsoft\\VisualStudio\\{msvc_major}.0\\VC\\Runtimes\\x64".format( - msvc_major=msvc_major - ) - ) - vsruntime_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, vsruntime_key_name) - vslib_version, vslib_version_type = winreg.QueryValueEx(vsruntime_key, "Version") - except WindowsError: - return - - # Fallback to directory search if we don't find the expected version - redist_path = os.path.join( - redist_root, re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1) - ) - if not os.path.isdir(redist_path): - redist_path = None - dirs = os.listdir(redist_root) - dirs.sort() - for dir in reversed(dirs): - candidate = os.path.join(redist_root, dir) - if os.path.isdir(candidate): - redist_path = candidate - break - else: - return - env["MSVS"]["VCREDISTPATH"] = redist_path - - if mergemodulepath is None and msvc_minor != "0": - mergemodulepath = os.path.join(redist_path, "MergeModules") - if os.path.isdir(mergemodulepath): - env["MSVS"]["VCREDISTMERGEMODULEPATH"] = mergemodulepath - - # Keep these in preference order. The way with the {} in between - # the dots appears to be the more modern form, but we select the - # older one when available to minimize disruption to existing - # automation that expects the redist executable embedded in our - # packages to have this shape. Some architectures, like arm64, - # don't appear to be provided under that syntax though, so we - # include the newer form for that purpose. If Microsoft ever stops - # providing the old form, we will automatically roll forward to - # the new form. - vcredist_search_template_sequence = [ - "vcredist_{}.exe", - "vc_redist.{}.exe", - ] - - expansion = target_arch_expansion_map.get(env.subst("$TARGET_ARCH"), None) - if not expansion: - return - - vcredist_candidates = [c.format(expansion) for c in vcredist_search_template_sequence] - for candidate in vcredist_candidates: - candidate = os.path.join(redist_path, candidate) - if os.path.isfile(candidate): - break - else: - return - env["MSVS"]["VCREDISTEXE"] = candidate diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py deleted file mode 100644 index d40528d3a54..00000000000 --- a/site_scons/site_tools/xcode.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2020 MongoDB Inc. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import os - - -def exists(env): - return env.Detect("xcrun") - - -def generate(env): - if not exists(env): - return - - if "DEVELOPER_DIR" in os.environ: - env["ENV"]["DEVELOPER_DIR"] = os.environ["DEVELOPER_DIR"] - print( - "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands" - ) diff --git a/site_scons/third_party/ninja_syntax.py b/site_scons/third_party/ninja_syntax.py deleted file mode 100644 index ebe6490d8df..00000000000 --- a/site_scons/third_party/ninja_syntax.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/python - -"""Python module for generating .ninja files. - -Note that this is emphatically not a required piece of Ninja; it's -just a helpful utility for build-file-generation systems that already -use Python. -""" - -import re -import textwrap - -def escape_path(word): - return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:') - -class Writer(object): - def __init__(self, output, width=78): - self.output = output - self.width = width - - def newline(self): - self.output.write('\n') - - def comment(self, text): - for line in textwrap.wrap(text, self.width - 2, break_long_words=False, - break_on_hyphens=False): - self.output.write('# ' + line + '\n') - - def variable(self, key, value, indent=0): - if value is None: - return - if isinstance(value, list): - value = ' '.join(filter(None, value)) # Filter out empty strings. - self._line('%s = %s' % (key, value), indent) - - def pool(self, name, depth): - self._line('pool %s' % name) - self.variable('depth', depth, indent=1) - - def rule(self, name, command, description=None, depfile=None, - generator=False, pool=None, restat=False, rspfile=None, - rspfile_content=None, deps=None): - self._line('rule %s' % name) - self.variable('command', command, indent=1) - if description: - self.variable('description', description, indent=1) - if depfile: - self.variable('depfile', depfile, indent=1) - if generator: - self.variable('generator', '1', indent=1) - if pool: - self.variable('pool', pool, indent=1) - if restat: - self.variable('restat', '1', indent=1) - if rspfile: - self.variable('rspfile', rspfile, indent=1) - if rspfile_content: - self.variable('rspfile_content', rspfile_content, indent=1) - if deps: - self.variable('deps', deps, indent=1) - - def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, - variables=None, implicit_outputs=None, pool=None): - outputs = as_list(outputs) - out_outputs = [escape_path(x) for x in outputs] - all_inputs = [escape_path(x) for x in as_list(inputs)] - - if implicit: - implicit = [escape_path(x) for x in as_list(implicit)] - all_inputs.append('|') - all_inputs.extend(implicit) - if order_only: - order_only = [escape_path(x) for x in as_list(order_only)] - all_inputs.append('||') - all_inputs.extend(order_only) - if implicit_outputs: - implicit_outputs = [escape_path(x) - for x in as_list(implicit_outputs)] - out_outputs.append('|') - out_outputs.extend(implicit_outputs) - - self._line('build %s: %s' % (' '.join(out_outputs), - ' '.join([rule] + all_inputs))) - if pool is not None: - self._line(' pool = %s' % pool) - - if variables: - if isinstance(variables, dict): - iterator = iter(variables.items()) - else: - iterator = iter(variables) - - for key, val in iterator: - self.variable(key, val, indent=1) - - return outputs - - def include(self, path): - self._line('include %s' % path) - - def subninja(self, path): - self._line('subninja %s' % path) - - def default(self, paths): - self._line('default %s' % ' '.join(as_list(paths))) - - def _count_dollars_before_index(self, s, i): - """Returns the number of '$' characters right in front of s[i].""" - dollar_count = 0 - dollar_index = i - 1 - while dollar_index > 0 and s[dollar_index] == '$': - dollar_count += 1 - dollar_index -= 1 - return dollar_count - - def _line(self, text, indent=0): - """Write 'text' word-wrapped at self.width characters.""" - leading_space = ' ' * indent - while len(leading_space) + len(text) > self.width: - # The text is too wide; wrap if possible. - - # Find the rightmost space that would obey our width constraint and - # that's not an escaped space. - available_space = self.width - len(leading_space) - len(' $') - space = available_space - while True: - space = text.rfind(' ', 0, space) - if (space < 0 or - self._count_dollars_before_index(text, space) % 2 == 0): - break - - if space < 0: - # No such space; just use the first unescaped space we can find. - space = available_space - 1 - while True: - space = text.find(' ', space + 1) - if (space < 0 or - self._count_dollars_before_index(text, space) % 2 == 0): - break - if space < 0: - # Give up on breaking. - break - - self.output.write(leading_space + text[0:space] + ' $\n') - text = text[space+1:] - - # Subsequent lines are continuations, so indent them. - leading_space = ' ' * (indent+2) - - self.output.write(leading_space + text + '\n') - - def close(self): - self.output.close() - - -def as_list(input): - if input is None: - return [] - if isinstance(input, list): - return input - return [input] - - -def escape(string): - """Escape a string such that it can be embedded into a Ninja file without - further interpretation.""" - assert '\n' not in string, 'Ninja syntax does not allow newlines' - # We only have one special metacharacter: '$'. - return string.replace('$', '$$') - - -def expand(string, vars, local_vars={}): - """Expand a string containing $vars as Ninja would. - - Note: doesn't handle the full Ninja variable syntax, but it's enough - to make configure.py's use of it work. - """ - def exp(m): - var = m.group(1) - if var == '$': - return '$' - return local_vars.get(var, vars.get(var, '')) - return re.sub(r'\$(\$|\w*)', exp, string) diff --git a/src/SConscript b/src/SConscript deleted file mode 100644 index c6f8b693d93..00000000000 --- a/src/SConscript +++ /dev/null @@ -1,73 +0,0 @@ -# -*- mode: python; -*- -# -# This is the principle SConscript file, invoked by the SConstruct. Its job is -# to delegate to any and all per-module SConscript files. - - -Import("env") -Import("module_sconscripts") -Import("use_libunwind") - -env = env.Clone() - -# Add any "global" dependencies here. This is where we make every build node -# depend on a list of other build nodes, such as an allocator or libunwind -# or libstdx or similar. -env.AppendUnique( - LIBDEPS_GLOBAL=[ - "$BUILD_DIR/third_party/gperftools/tcmalloc_minimal" - if env["MONGO_ALLOCATOR"] in ["tcmalloc-gperftools"] - else [], - "$BUILD_DIR/third_party/tcmalloc/tcmalloc" - if env["MONGO_ALLOCATOR"] in ["tcmalloc-google"] - else [], - "$BUILD_DIR/third_party/unwind/unwind" if use_libunwind else [], - ], -) - -# NOTE: We must do third_party first as it adds methods to the environment -# that we need in the mongo sconscript -env.SConscript("third_party/SConscript", must_exist=1, exports=["env"]) - -# Ensure our subsequent modifications are not shared with the -# third_party env. Normally that SConscript would have done a clone, -# so handled that isolation itself, but it doesn't since it needs to -# alter the env in ways that we need to use up here. -env = env.Clone() - -# Inject common dependencies from third_party globally for all core mongo code -# and modules. -env.InjectThirdParty( - libraries=[ - "abseil-cpp", - "boost", - "croaring", - "fmt", - "immer", - "tomcrypt_md5", - "murmurhash3", - "safeint", - "stemmer", - "variant", - ] -) - -# It would be somewhat better if this could be applied down in -# `src/mongo/SConscript`, since the goal of doing it here rather than -# up in SConstruct is to only enforce this rule for code that we wrote -# and exclude third party sources. However, doing it in -# `src/mongo/SConscript`` would also exclude applying it for modules, -# and we do want this enabled for enterprise. - -if env.ToolchainIs("gcc"): - # With GCC, use the implicit fallthrough flag variant that doesn't - # care about your feeble attempts to use comments to explain yourself. - env.AddToCCFLAGSIfSupported("-Wimplicit-fallthrough=5") -elif env.ToolchainIs("clang"): - env.AddToCCFLAGSIfSupported("-Wimplicit-fallthrough") - -# Run the core mongodb SConscript. -env.SConscript("mongo/SConscript", must_exist=1, exports=["env"]) - -# Run SConscripts for any modules in play -env.SConscript(module_sconscripts, must_exist=1, exports=["env"]) diff --git a/src/mongo/SConscript b/src/mongo/SConscript deleted file mode 100644 index 281ca6d1751..00000000000 --- a/src/mongo/SConscript +++ /dev/null @@ -1,116 +0,0 @@ -# -*- mode: python; -*- - -import sys - -Import( - [ - "env", - "use_libunwind", - "version_extra", - "version_parts", - ] -) - -env = env.Clone() - -env.InjectMongoIncludePaths() - -env.AppendUnique( - FORCEINCLUDES=[ - "mongo/platform/basic.h", - ], -) - -env.SConscript( - must_exist=1, - dirs=[ - "base", - "bson", - "client", - "crypto", - "db", - "dbtests", - "embedded", - "executor", - "idl", - "installer", - "logv2", - "platform", - "resmoke", - "rpc", - "s", - "scripting", - "shell", - "stdx", - "tools", - "transport", - "unittest", - "util", - "watchdog", - ], - exports=[ - "env", - ], -) - -sys.path.append("src/mongo") - -# NOTE: The 'base' library does not really belong here. Its presence -# here is temporary. Do not add to this library, do not remove from -# it, and do not declare other libraries in this file. -baseEnv = env.Clone() - -if use_libunwind: - baseEnv.InjectThirdParty("unwind") -baseEnv.InjectThirdParty("intel_decimal128") - -# Stage the top-level mongodb banners - -distsrc = env.Dir("#distsrc") -env.AutoInstall( - target="$PREFIX", - source=[ - distsrc.File("README"), - # TODO: we need figure out what to do when we use a different - # THIRD-PARTY-NOTICES for example, with Embedded - distsrc.File("THIRD-PARTY-NOTICES"), - distsrc.File("MPL-2"), - ], - AIB_COMPONENT="common", - AIB_COMPONENTS_EXTRA=["dist", "dist-test"], - AIB_ROLE="base", -) - -# If no module has introduced a file named LICENSE-Enterprise.txt then this -# is a Community build, so inject the AGPL and the Community license -enterprise_license = [ - banner for banner in env["MODULE_BANNERS"] if banner.name == "LICENSE-Enterprise.txt" -] -if not enterprise_license: - env.Append(MODULE_BANNERS=[distsrc.File("LICENSE-Community.txt")]) - -# All module banners get staged to the top level of the tarfile, so we -# need to fail if we are going to have a name collision. -module_banner_filenames = set([f.name for f in env["MODULE_BANNERS"]]) -if not len(module_banner_filenames) == len(env["MODULE_BANNERS"]): - # TODO: Be nice and identify conflicts in error. - env.FatalError("ERROR: Filename conflicts exist in module banners.") - -env.AutoInstall( - target="$PREFIX", - source=env.get("MODULE_BANNERS", []), - AIB_COMPONENT="common", - AIB_COMPONENTS_EXTRA=["dist", "dist-test"], - AIB_ROLE="base", -) - -if env.TargetOSIs("darwin", "macOS"): - env.AutoInstall( - target="$PREFIX", - source=[ - env.File("#/etc/macos_mongodb.plist"), - ], - AIB_COMPONENT="common", - AIB_COMPONENTS_EXTRA=["dist", "dist-test"], - AIB_ROLE="base", - ) diff --git a/src/mongo/base/SConscript b/src/mongo/base/SConscript deleted file mode 100644 index f811df9d2ae..00000000000 --- a/src/mongo/base/SConscript +++ /dev/null @@ -1,16 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="base_test", - source=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="status_bm", - source=[], -) diff --git a/src/mongo/bson/SConscript b/src/mongo/bson/SConscript deleted file mode 100644 index 37713e02c61..00000000000 --- a/src/mongo/bson/SConscript +++ /dev/null @@ -1,43 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "column", - "util", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="bson_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="bson_bm", - source=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.CppLibfuzzerTest( - target="bson_validate_fuzzer", - source=[], -) - -env.CppLibfuzzerTest( - target="fromjson_fuzzer", - source=[], -) - -env.CppIntegrationTest( - target="bson_integration_test", - source=[], -) diff --git a/src/mongo/bson/column/SConscript b/src/mongo/bson/column/SConscript deleted file mode 100644 index 016fc070e78..00000000000 --- a/src/mongo/bson/column/SConscript +++ /dev/null @@ -1,57 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.BazelProgram( - target="bson_column_encoder", - source=[], -) - -env.Benchmark( - target="simple8b_bm", - source=[], - BAZEL_BENCHMARK_TAG="bsoncolumn_bm", -) - -env.CppUnitTest( - target="bsoncolumn_test", - source=[], -) - -env.Benchmark( - target="bson_column_bm", - source=[], - BAZEL_BENCHMARK_TAG="bsoncolumn_bm", -) - -env.CppLibfuzzerTest( - target="simple8b_fuzzer", - source=[], -) - -env.CppLibfuzzerTest( - target="bson_column_validate_fuzzer", - source=[], -) - -env.CppLibfuzzerTest( - target="bsoncolumn_decompress_fuzzer", - source=[], -) - -env.CppLibfuzzerTest( - target="bsoncolumn_builder_fuzzer", - source=[], -) - -env.CppLibfuzzerTest( - target="bsoncolumn_decompress_paths_fuzzer", - source=[], -) - -env.CppLibfuzzerTest( - target="bsoncolumnbuilder_reopen_fuzzer", - source=[], -) diff --git a/src/mongo/bson/util/SConscript b/src/mongo/bson/util/SConscript deleted file mode 100644 index fc14df9f8fe..00000000000 --- a/src/mongo/bson/util/SConscript +++ /dev/null @@ -1,19 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.BazelProgram( - target="bson_corpus_gen", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="bson_util_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark(target="bufbuilder_bm", source=[]) diff --git a/src/mongo/client/SConscript b/src/mongo/client/SConscript deleted file mode 100644 index 91c66e5d9e9..00000000000 --- a/src/mongo/client/SConscript +++ /dev/null @@ -1,76 +0,0 @@ -# -*- mode: python -*- - -Import("env") -Import("get_option") -Import("wiredtiger") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=["sdam"], - exports=["env"], -) - - -def shouldBuildGRPC(myEnv): - return myEnv.TargetOSIs("linux") and get_option("ssl") == "on" - - -clientDriverEnv = env.Clone() -clientDriverEnv.InjectThirdParty("asio") -if shouldBuildGRPC(env): - clientDriverEnv.InjectThirdParty(libraries=["grpc"]) - -if wiredtiger: - env.CppUnitTest( - target="client_test", - source=[], - LIBDEPS=[], - ) - -env.CppIntegrationTest( - target="replica_set_monitor_integration_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="client_rs_test", - source=[], - LIBDEPS=[], -) - -if shouldBuildGRPC(env): - clientDriverEnv.CppUnitTest( - target="dbclient_grpc_stream_test", - source=[], - LIBDEPS=[], - ) - -env.CppUnitTest( - target="scoped_db_connection_pool_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="dbclient_connection_test", - source=[], - LIBDEPS=[], -) - -# The following two tests cannot be combined because the second one -# needs to be filtered out for the repl and sharding variants of the -# integration tests. -env.CppIntegrationTest( - target="client_connpool_integration_test", - source=[], - LIBDEPS=[], -) - -env.CppIntegrationTest( - target="client_dbclient_connection_integration_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/client/sdam/SConscript b/src/mongo/client/sdam/SConscript deleted file mode 100644 index adf04a1261f..00000000000 --- a/src/mongo/client/sdam/SConscript +++ /dev/null @@ -1,23 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -sdam_json_test = env.BazelProgram( - target="sdam_json_test", - source=[], - AIB_COMPONENT="sdam-json-test", -)[0] - -server_selection_json_test = env.BazelProgram( - target="server_selection_json_test", - source=[], - AIB_COMPONENT="server-selection-json-test", -)[0] - -env.CppUnitTest( - target="sdam_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/crypto/SConscript b/src/mongo/crypto/SConscript deleted file mode 100644 index 54108ddb312..00000000000 --- a/src/mongo/crypto/SConscript +++ /dev/null @@ -1,30 +0,0 @@ -# -*- mode: python -*- - -Import( - [ - "env", - "http_client", - "ssl_provider", - ] -) - -env = env.Clone() -env.CppUnitTest( - target="crypto_test", - source=[], - LIBDEPS=[], -) - -if ssl_provider == "openssl": - env.CppUnitTest( - target="jws_validator_test", - source=[], - LIBDEPS=[], - ) - -env.Benchmark( - target="crypto_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) diff --git a/src/mongo/db/SConscript b/src/mongo/db/SConscript deleted file mode 100644 index 98e23f7544e..00000000000 --- a/src/mongo/db/SConscript +++ /dev/null @@ -1,324 +0,0 @@ -# -*- mode: python -*- - -import platform - -Import( - [ - "env", - "have_sasl_lib", - "http_client", - "version_extra", - "version_parts", - "wiredtiger", - ] -) - -env = env.Clone() - -# Ideally 's2' would be scoped narrowly but it is spammed in all over the place by -# db/geo unfortunately. -env.InjectThirdParty( - libraries=[ - "s2", - ] -) - - -env.SConscript( - must_exist=1, - dirs=[ - "admission", - "auth", - "catalog", - "collection_crud", - "commands", - "concurrency", - "exec", - "fts", - "ftdc", - "geo", - "process_health", - "index", - "matcher", - "memory_tracking", - "op_observer", - "pipeline", - "query", - "repl", - "s", - "session", - "sorter", - "stats", - "storage", - "timeseries", - "transaction", - "ttl", - "update", - "views", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="diagnostic_printers_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="server_base_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="profile_filter_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.CppUnitTest( - target="profile_filter_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="profile_settings_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="change_stream_pre_images_manager_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="default_baton_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.CppUnitTest( - target="prepare_conflict_tracker_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="client_out_of_line_executor_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.CppUnitTest( - target="mongod_options_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.CppUnitTest( - target="replica_set_endpoint_test", - source=[], - LIBDEPS=[], -) - -asioEnv = env.Clone() -asioEnv.InjectThirdParty("asio") - -env.BazelProgram( - target="mongotrafficreader", - source=[], - LIBDEPS=[], -) - -if env.TargetOSIs("windows"): - generatedDbManifest = env.Substfile( - "mongod.manifest.in", - SUBST_DICT=[ - ("@mongo_version_major@", version_parts[0]), - ("@mongo_version_minor@", version_parts[1]), - ("@mongo_version_patch@", version_parts[2]), - ("@mongo_version_extra@", version_parts[3]), - ("@mongo_version_extra_str@", version_extra), - ], - ) - - env.Alias("generated-sources", generatedDbManifest) - env.Depends("mongod.res", generatedDbManifest) - -env.BazelProgram( - target="mongod", - source=[], - LIBDEPS=[], - AIB_COMPONENT="mongod", - AIB_COMPONENTS_EXTRA=[ - "core", - "default", - "devcore", - "dist", - "dist-test", - "integration-tests", - "serverless", - "serverless-test", - "servers", - ], -) - -envWithAsio = env.Clone() -envWithAsio.InjectThirdParty(libraries=["asio"]) - -if wiredtiger: - envWithAsio.CppUnitTest( - target="db_base_test", - source=[], - LIBDEPS=[], - ) - - envWithAsio.CppUnitTest( - target="change_collection_test", - source=[], - LIBDEPS=[], - ) - - envWithAsio.CppUnitTest( - target="command_test", - source=[], - LIBDEPS=[], - ) - - envWithAsio.CppUnitTest( - target="fle_test", - source=[], - LIBDEPS=[], - ) - - envWithAsio.CppUnitTest( - target="operation_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], - ) - - envWithAsio.CppUnitTest( - target="query_test", - source=[], - LIBDEPS=[], - ) - - envWithAsio.CppUnitTest( - target="read_write_concern_test", - source=[], - LIBDEPS=[], - ) - - envWithAsio.CppUnitTest( - target="session_test", - source=[], - LIBDEPS=[], - ) - -env.CppUnitTest( - target="shard_role_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="multiple_collection_accessor_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="vector_clock_test", - source=[], - LIBDEPS=[], -) - -envWithAsio.CppUnitTest( - target="db_unittest_with_config_server_test_fixture_test", - source=[], - LIBDEPS=[], -) - -asioEnv.CppIntegrationTest( - target="db_integration_test", - source=[], - LIBDEPS=[], -) - -env.CppLibfuzzerTest( - target="op_msg_fuzzer", - source=[], -) - -env.Benchmark( - target="commands_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.Benchmark( - target="operation_cpu_timer_bm", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="d_concurrency_bm", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="service_entry_point_shard_role_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="service_entry_point_shard_role_bm", source=[], LIBDEPS=[], BAZEL_BENCHMARK_TAG="sep_bm" -) - -env.Benchmark( - target="namespace_string_bm", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="collection_acquisition_bm", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="server_lifecycle_monitor_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="op_msg_fuzzer_fixture_test", - source=[], - LIBDEPS=[], -) - -if platform.system() == "Linux": - env.CppUnitTest( - target="throw_hook_impl_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], - ) diff --git a/src/mongo/db/admission/SConscript b/src/mongo/db/admission/SConscript deleted file mode 100644 index 5693548af2c..00000000000 --- a/src/mongo/db/admission/SConscript +++ /dev/null @@ -1,17 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="ingress_admission_control_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="throughput_probing_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/auth/SConscript b/src/mongo/db/auth/SConscript deleted file mode 100644 index c502e4e802a..00000000000 --- a/src/mongo/db/auth/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() -env.CppUnitTest( - target="db_auth_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/catalog/SConscript b/src/mongo/db/catalog/SConscript deleted file mode 100644 index 22224ef8fe2..00000000000 --- a/src/mongo/db/catalog/SConscript +++ /dev/null @@ -1,29 +0,0 @@ -# -*- mode: python; -*- - -Import("env") -Import("wiredtiger") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "util", - ], - exports=[ - "env", - ], -) - -env.Benchmark( - target="collection_catalog_bm", - source=[], - LIBDEPS=[], -) - -if wiredtiger: - env.CppUnitTest( - target="db_catalog_test", - source=[], - LIBDEPS=[], - ) diff --git a/src/mongo/db/catalog/util/SConscript b/src/mongo/db/catalog/util/SConscript deleted file mode 100644 index 87f771143fa..00000000000 --- a/src/mongo/db/catalog/util/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_catalog_util_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/collection_crud/SConscript b/src/mongo/db/collection_crud/SConscript deleted file mode 100644 index bd84e57e40c..00000000000 --- a/src/mongo/db/collection_crud/SConscript +++ /dev/null @@ -1,9 +0,0 @@ -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_capped_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/commands/SConscript b/src/mongo/db/commands/SConscript deleted file mode 100644 index 684da890756..00000000000 --- a/src/mongo/db/commands/SConscript +++ /dev/null @@ -1,33 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "query_cmd", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="command_mirroring_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="async_command_execution_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_commands_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/commands/query_cmd/SConscript b/src/mongo/db/commands/query_cmd/SConscript deleted file mode 100644 index 4d8cc0dc925..00000000000 --- a/src/mongo/db/commands/query_cmd/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="map_reduce_agg_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/concurrency/SConscript b/src/mongo/db/concurrency/SConscript deleted file mode 100644 index 4780bd0aee3..00000000000 --- a/src/mongo/db/concurrency/SConscript +++ /dev/null @@ -1,27 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="lock_manager_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="exception_util_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="lock_manager_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.PrettyPrinterTest(target="lock_gdb_test.py") diff --git a/src/mongo/db/exec/SConscript b/src/mongo/db/exec/SConscript deleted file mode 100644 index d9fc84131cc..00000000000 --- a/src/mongo/db/exec/SConscript +++ /dev/null @@ -1,35 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "document_value", - "mutable_bson", - "sbe", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="db_exec_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="distinct_scan_bm", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="expression_bm", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/exec/document_value/SConscript b/src/mongo/db/exec/document_value/SConscript deleted file mode 100644 index 27a65902a51..00000000000 --- a/src/mongo/db/exec/document_value/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env.Benchmark( - target="document_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) diff --git a/src/mongo/db/exec/mutable_bson/SConscript b/src/mongo/db/exec/mutable_bson/SConscript deleted file mode 100644 index 952e5ac1910..00000000000 --- a/src/mongo/db/exec/mutable_bson/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="bson_mutable_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/exec/sbe/SConscript b/src/mongo/db/exec/sbe/SConscript deleted file mode 100644 index ce6d3c279a3..00000000000 --- a/src/mongo/db/exec/sbe/SConscript +++ /dev/null @@ -1,28 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env.CppUnitTest( - target="db_sbe_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="makeobj_spec_hash_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="sbe_vm_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.Benchmark( - target="sbe_expression_bm", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/ftdc/SConscript b/src/mongo/db/ftdc/SConscript deleted file mode 100644 index a3d04e62099..00000000000 --- a/src/mongo/db/ftdc/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() -env.CppUnitTest( - target="db_ftdc_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/fts/SConscript b/src/mongo/db/fts/SConscript deleted file mode 100644 index e54c64f90ef..00000000000 --- a/src/mongo/db/fts/SConscript +++ /dev/null @@ -1,21 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "unicode", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="db_fts_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/fts/unicode/SConscript b/src/mongo/db/fts/unicode/SConscript deleted file mode 100644 index 083a60e9d48..00000000000 --- a/src/mongo/db/fts/unicode/SConscript +++ /dev/null @@ -1,12 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - - -env.CppUnitTest( - target="db_fts_unicode_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/geo/SConscript b/src/mongo/db/geo/SConscript deleted file mode 100644 index bf5db245760..00000000000 --- a/src/mongo/db/geo/SConscript +++ /dev/null @@ -1,17 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_geo_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="hash_bm", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/index/SConscript b/src/mongo/db/index/SConscript deleted file mode 100644 index b8befe2fa70..00000000000 --- a/src/mongo/db/index/SConscript +++ /dev/null @@ -1,18 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.Benchmark( - target="key_gen_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="second_half_bm", -) - -env.CppUnitTest( - target="db_index_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/index_builds/SConscript b/src/mongo/db/index_builds/SConscript deleted file mode 100644 index 6cc75fc60bf..00000000000 --- a/src/mongo/db/index_builds/SConscript +++ /dev/null @@ -1,13 +0,0 @@ -# -*- mode: python; -*- - -Import("env") -Import("wiredtiger") - -env = env.Clone() - -if wiredtiger: - env.CppUnitTest( - target="index_builds_test", - source=[], - LIBDEPS=[], - ) diff --git a/src/mongo/db/matcher/SConscript b/src/mongo/db/matcher/SConscript deleted file mode 100644 index 5fca82bfb5e..00000000000 --- a/src/mongo/db/matcher/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_matcher_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/memory_tracking/SConscript b/src/mongo/db/memory_tracking/SConscript deleted file mode 100644 index 231d4da52ec..00000000000 --- a/src/mongo/db/memory_tracking/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="memory_tracking_test", - source=[], -) diff --git a/src/mongo/db/op_observer/SConscript b/src/mongo/db/op_observer/SConscript deleted file mode 100644 index 014be514ca5..00000000000 --- a/src/mongo/db/op_observer/SConscript +++ /dev/null @@ -1,17 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.Benchmark( - target="op_observer_bm", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_op_observer_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/pipeline/SConscript b/src/mongo/db/pipeline/SConscript deleted file mode 100644 index a39dba09af0..00000000000 --- a/src/mongo/db/pipeline/SConscript +++ /dev/null @@ -1,68 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "process_interface", - "spilling", - ], - exports=[ - "env", - ], -) - -env.Benchmark( - target="document_source_group_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="second_half_bm", -) - -env.CppUnitTest( - target="db_pipeline_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_percentile_algo_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="percentile_algo_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.CppUnitTest( - target="common_sort_key_optimization_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="group_with_acc_n_optimization_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="window_function_percentile_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.Benchmark( - target="window_function_concat_arrays_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="query_bm", -) diff --git a/src/mongo/db/pipeline/process_interface/SConscript b/src/mongo/db/pipeline/process_interface/SConscript deleted file mode 100644 index 3975d6b55ef..00000000000 --- a/src/mongo/db/pipeline/process_interface/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="process_interface_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/pipeline/spilling/SConscript b/src/mongo/db/pipeline/spilling/SConscript deleted file mode 100644 index 5b3ccfca37f..00000000000 --- a/src/mongo/db/pipeline/spilling/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_spilling_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/process_health/SConscript b/src/mongo/db/process_health/SConscript deleted file mode 100644 index 59f264dd78b..00000000000 --- a/src/mongo/db/process_health/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="fault_base_classes_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/SConscript b/src/mongo/db/query/SConscript deleted file mode 100644 index 3a4d3b3d333..00000000000 --- a/src/mongo/db/query/SConscript +++ /dev/null @@ -1,72 +0,0 @@ -# -*- mode: python -*- - -Import( - [ - "env", - ] -) - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "boolean_simplification", - "bson", - "ce", - "client_cursor", - "collation", - "cost_based_ranker", - "datetime", - "optimizer", - "plan_cache", - "query_settings", - "query_stats", - "query_shape", - "query_tester", - "search", - "stage_builder/sbe", - "stats", - "virtual_collection", - "write_ops", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="express_execution_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_query_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="canonical_query_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.Benchmark( - target="query_planner_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.Benchmark( - target="point_query_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.Benchmark( - target="complex_query_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) diff --git a/src/mongo/db/query/boolean_simplification/SConscript b/src/mongo/db/query/boolean_simplification/SConscript deleted file mode 100644 index c445f73dcd1..00000000000 --- a/src/mongo/db/query/boolean_simplification/SConscript +++ /dev/null @@ -1,16 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env.CppUnitTest( - target="boolean_simplification_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="quine_mccluskey_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) diff --git a/src/mongo/db/query/bson/SConscript b/src/mongo/db/query/bson/SConscript deleted file mode 100644 index ebf58b0c943..00000000000 --- a/src/mongo/db/query/bson/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_bson_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/ce/SConscript b/src/mongo/db/query/ce/SConscript deleted file mode 100644 index e16e1e21a6e..00000000000 --- a/src/mongo/db/query/ce/SConscript +++ /dev/null @@ -1,40 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="histogram_estimation_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="maxdiff_histogram_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="generated_histograms_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="sampling_estimator_test", - source=[], -) - -env.Benchmark( - target="histogram_bm", - source=[], - BAZEL_BENCHMARK_TAG="second_half_bm", -) - -env.BazelProgram( - target="histogram_estimation_accuracy_analysis_program", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/client_cursor/SConscript b/src/mongo/db/query/client_cursor/SConscript deleted file mode 100644 index a0e36537a5a..00000000000 --- a/src/mongo/db/query/client_cursor/SConscript +++ /dev/null @@ -1,15 +0,0 @@ -# -*- mode: python -*- - -Import( - [ - "env", - ] -) - -env = env.Clone() - -env.CppUnitTest( - target="cursor_manager_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/collation/SConscript b/src/mongo/db/query/collation/SConscript deleted file mode 100644 index 1901a0b77fd..00000000000 --- a/src/mongo/db/query/collation/SConscript +++ /dev/null @@ -1,33 +0,0 @@ -# -*- mode: python -*- - -Import("env") -Import("use_system_version_of_library") - -env = env.Clone() - -# The collator_icu library and the collator_interface_icu_test unit tests need an environment which -# has access to the third-party ICU headers. -icuEnv = env.Clone() - -if not use_system_version_of_library("icu"): - icuEnv.InjectThirdParty("icu") - # Since we are injecting the third-party ICU headers, we must also copy the same defines that we - # use to configure ICU when building ICU sources. See comment in - # src/third_party/icu4c-57.1/source/SConscript. - icuEnv.Append( - CPPDEFINES=[ - ("UCONFIG_NO_BREAK_ITERATION", 1), - ("UCONFIG_NO_FORMATTING", 1), - ("UCONFIG_NO_TRANSLITERATION", 1), - ("UCONFIG_NO_REGULAR_EXPRESSIONS", 1), - ("U_CHARSET_IS_UTF8", 1), - ("U_STATIC_IMPLEMENTATION", 1), - ("U_USING_ICU_NAMESPACE", 0), - ], - ) - -icuEnv.CppUnitTest( - target="db_query_collation_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/cost_based_ranker/SConscript b/src/mongo/db/query/cost_based_ranker/SConscript deleted file mode 100644 index 58081ff469d..00000000000 --- a/src/mongo/db/query/cost_based_ranker/SConscript +++ /dev/null @@ -1,9 +0,0 @@ -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="cost_based_ranker_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/datetime/SConscript b/src/mongo/db/query/datetime/SConscript deleted file mode 100644 index 8446f2f4010..00000000000 --- a/src/mongo/db/query/datetime/SConscript +++ /dev/null @@ -1,16 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="date_time_support_test", - source=[], - LIBDEPS=[], -) - -env.CppLibfuzzerTest( - target="date_time_support_fuzzer", - source=[], -) diff --git a/src/mongo/db/query/optimizer/SConscript b/src/mongo/db/query/optimizer/SConscript deleted file mode 100644 index 93a2a45fe22..00000000000 --- a/src/mongo/db/query/optimizer/SConscript +++ /dev/null @@ -1,31 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "algebra", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="optimizer_test", - source=[], - LIBDEPS=[], -) - -optimizer_gdb_test_program = env.BazelProgram( - target="optimizer_gdb_test_program", - source=[], - AIB_COMPONENT="pretty-printer-tests", - AIB_COMPONENTS_EXTRA=["dist-test"], -) -optimizer_gdb_test_program_installed = env.GetAutoInstalledFiles(optimizer_gdb_test_program[0]) - -env.PrettyPrinterTest("optimizer_gdb_test.py", TEST_PROGRAM=optimizer_gdb_test_program_installed) diff --git a/src/mongo/db/query/optimizer/algebra/SConscript b/src/mongo/db/query/optimizer/algebra/SConscript deleted file mode 100644 index 5b4b5c371de..00000000000 --- a/src/mongo/db/query/optimizer/algebra/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="algebra_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/plan_cache/SConscript b/src/mongo/db/query/plan_cache/SConscript deleted file mode 100644 index a1c9a76d567..00000000000 --- a/src/mongo/db/query/plan_cache/SConscript +++ /dev/null @@ -1,19 +0,0 @@ -Import( - [ - "env", - ] -) - -env = env.Clone() - -env.Benchmark( - target="plan_cache_key_encoding_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.Benchmark( - target="plan_cache_classic_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) diff --git a/src/mongo/db/query/query_settings/SConscript b/src/mongo/db/query/query_settings/SConscript deleted file mode 100644 index c5ad11faae5..00000000000 --- a/src/mongo/db/query/query_settings/SConscript +++ /dev/null @@ -1,17 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="query_settings_serialization_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="query_settings_lookup_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) diff --git a/src/mongo/db/query/query_shape/SConscript b/src/mongo/db/query/query_shape/SConscript deleted file mode 100644 index 1c26dd40991..00000000000 --- a/src/mongo/db/query/query_shape/SConscript +++ /dev/null @@ -1,15 +0,0 @@ -# -*- mode: python -*- - -Import( - [ - "env", - ] -) - -env = env.Clone() - -env.CppUnitTest( - target="db_query_query_shape_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/query_stats/SConscript b/src/mongo/db/query/query_stats/SConscript deleted file mode 100644 index 9658c6eecbb..00000000000 --- a/src/mongo/db/query/query_stats/SConscript +++ /dev/null @@ -1,27 +0,0 @@ -# -*- mode: python -*- - -Import( - [ - "env", - ] -) - -env = env.Clone() - -env.CppUnitTest( - target="db_query_query_stats_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="rate_limiting_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.Benchmark( - target="shapifying_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) diff --git a/src/mongo/db/query/query_tester/SConscript b/src/mongo/db/query/query_tester/SConscript deleted file mode 100644 index 8946402a74c..00000000000 --- a/src/mongo/db/query/query_tester/SConscript +++ /dev/null @@ -1,21 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.AppendUnique( - FORCEINCLUDES=[ - "mongo/platform/basic.h", - ], -) - -mongoTest = env.BazelProgram( - target="mongotest", - source=[], - AIB_COMPONENT="mongotest", - AIB_COMPONENTS_EXTRA=[ - "devcore", - "dist-test", - ], -) diff --git a/src/mongo/db/query/search/SConscript b/src/mongo/db/query/search/SConscript deleted file mode 100644 index ca113486155..00000000000 --- a/src/mongo/db/query/search/SConscript +++ /dev/null @@ -1,25 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript(must_exist=1, dirs=["mongotmock"], exports=["env"]) - -env.CppUnitTest( - target="mongot_cursor_test", - source=[], - LIBDEPS=[], -) - -env.CppIntegrationTest( - target="search_task_executors_integration_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="auth_search_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/search/mongotmock/SConscript b/src/mongo/db/query/search/mongotmock/SConscript deleted file mode 100644 index 8ff663944d0..00000000000 --- a/src/mongo/db/query/search/mongotmock/SConscript +++ /dev/null @@ -1,12 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -mongotmock = env.BazelProgram( - target="mongotmock", - source=[], - AIB_COMPONENT="mongotmock", - AIB_COMPONENTS_EXTRA=["dist-test"], -) diff --git a/src/mongo/db/query/stage_builder/sbe/SConscript b/src/mongo/db/query/stage_builder/sbe/SConscript deleted file mode 100644 index af4d8755ed9..00000000000 --- a/src/mongo/db/query/stage_builder/sbe/SConscript +++ /dev/null @@ -1,35 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.Benchmark( - target="sbe_builder_bm", - source=[], - BAZEL_BENCHMARK_TAG="query_bm", -) - -env.CppUnitTest( - target="abt_unit_test_infra_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="sbe_abt_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="abt_lower_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="abt_lower_bm", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/stats/SConscript b/src/mongo/db/query/stats/SConscript deleted file mode 100644 index eda35127acb..00000000000 --- a/src/mongo/db/query/stats/SConscript +++ /dev/null @@ -1,49 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="stats_cache_loader_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="stats_cache_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="stats_path_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="type_collision_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="type_count_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="ce_histogram_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="value_utils_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark(target="value_utils_bm", source=[]) diff --git a/src/mongo/db/query/timeseries/SConscript b/src/mongo/db/query/timeseries/SConscript deleted file mode 100644 index c303005fd30..00000000000 --- a/src/mongo/db/query/timeseries/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_query_timeseries_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/virtual_collection/SConscript b/src/mongo/db/query/virtual_collection/SConscript deleted file mode 100644 index d7464a4a9fc..00000000000 --- a/src/mongo/db/query/virtual_collection/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="virtual_collection_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/query/write_ops/SConscript b/src/mongo/db/query/write_ops/SConscript deleted file mode 100644 index af342b0ce87..00000000000 --- a/src/mongo/db/query/write_ops/SConscript +++ /dev/null @@ -1,17 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_ops_test", - source=[], - LIBDEPS=[], -) - -env.CppIntegrationTest( - target="db_ops_integration_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/repl/SConscript b/src/mongo/db/repl/SConscript deleted file mode 100644 index 4ae37ff7fca..00000000000 --- a/src/mongo/db/repl/SConscript +++ /dev/null @@ -1,127 +0,0 @@ -# -*- mode: python -*- - -Import("env") -Import("wiredtiger") - -env = env.Clone() - -env.Benchmark( - target="oplog_entry_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="repl_bm", -) - -if wiredtiger: - env.CppUnitTest( - target="db_repl_idempotency_test", - source=[], - LIBDEPS=[], - ) - - env.CppUnitTest( - target="db_repl_misc_test", - source=[], - LIBDEPS=[], - ) - -env.CppUnitTest( - target="db_repl_coordinator_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="oplog_application_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="rollback_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="initial_syncer_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="storage_timestamp_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="repl_set_config_and_heartbeat_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="replication_recovery_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="storage_interface_impl_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="topology_version_observer_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_repl_cloners_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_repl_set_aware_service_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="oplog_application_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="repl_bm", -) - -env.Benchmark( - target="oplog_applier_utils_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="repl_bm", -) - -env.Benchmark( - target="replication_consistency_markers_impl_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="repl_bm", -) - -env.Benchmark( - target="oplog_write_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="repl_bm", -) - -env.Benchmark( - target="replication_waiter_list_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="repl_bm", -) diff --git a/src/mongo/db/s/SConscript b/src/mongo/db/s/SConscript deleted file mode 100644 index 987805c1bcd..00000000000 --- a/src/mongo/db/s/SConscript +++ /dev/null @@ -1,50 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="shard_server_op_observer_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_s_shard_server_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="db_s_config_server_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="chunk_manager_refresh_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="sharding_bm", -) - -env.Benchmark( - target="migration_chunk_cloner_source_bm", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="sharding_write_router_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="sharding_bm", -) - -env.Benchmark( - target="placement_history_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) diff --git a/src/mongo/db/session/SConscript b/src/mongo/db/session/SConscript deleted file mode 100644 index 2ce9b8389e2..00000000000 --- a/src/mongo/db/session/SConscript +++ /dev/null @@ -1 +0,0 @@ -# -*- mode: python -*- diff --git a/src/mongo/db/sorter/SConscript b/src/mongo/db/sorter/SConscript deleted file mode 100644 index 44afebbf94b..00000000000 --- a/src/mongo/db/sorter/SConscript +++ /dev/null @@ -1,13 +0,0 @@ -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_sorter_test", - source=[], -) - -env.Benchmark( - target="sorter_checksum_calculator_bm", - source=[], -) diff --git a/src/mongo/db/stats/SConscript b/src/mongo/db/stats/SConscript deleted file mode 100644 index e112c0a28d2..00000000000 --- a/src/mongo/db/stats/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_stats_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/storage/SConscript b/src/mongo/db/storage/SConscript deleted file mode 100644 index 852fbe0de4f..00000000000 --- a/src/mongo/db/storage/SConscript +++ /dev/null @@ -1,26 +0,0 @@ -# -*- mode: python -*- -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "key_string", - "wiredtiger", - ], - exports=[ - "env", - ], -) - -env.Benchmark( - target="storage_record_id_bm", - source=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.CppUnitTest( - target="db_storage_test", - source=[], -) diff --git a/src/mongo/db/storage/key_string/SConscript b/src/mongo/db/storage/key_string/SConscript deleted file mode 100644 index 87be2bcea6a..00000000000 --- a/src/mongo/db/storage/key_string/SConscript +++ /dev/null @@ -1,28 +0,0 @@ -Import("env") - -env = env.Clone() - -env.Benchmark( - target="storage_key_string_bm", - source=[], - BAZEL_BENCHMARK_TAG="storage_bm", -) - -ksdecode = env.BazelProgram( - target="ksdecode", - source=[], - AIB_COMPONENT="ksdecode", - AIB_COMPONENTS_EXTRA=[ - "dist-test", - ], -) - -env.CppUnitTest( - target="key_string_test", - source=[], -) - -env.CppLibfuzzerTest( - target="key_string_to_bson_fuzzer", - source=[], -) diff --git a/src/mongo/db/storage/wiredtiger/SConscript b/src/mongo/db/storage/wiredtiger/SConscript deleted file mode 100644 index 8cf3bf9b682..00000000000 --- a/src/mongo/db/storage/wiredtiger/SConscript +++ /dev/null @@ -1,36 +0,0 @@ -# -*- mode: python -*- -Import("env") -Import("wiredtiger") - -if not wiredtiger: - Return() - -env = env.Clone() - -env.CppUnitTest( - target="storage_wiredtiger_test", - source=[], -) - -env.CppUnitTest( - target="storage_wiredtiger_record_store_and_index_test", - source=[], -) - -env.Benchmark( - target="storage_wiredtiger_record_store_and_index_bm", - source=[], - BAZEL_BENCHMARK_TAG="storage_bm", -) - -env.Benchmark( - target="storage_wiredtiger_begin_transaction_block_bm", - source=[], - BAZEL_BENCHMARK_TAG="storage_bm", -) - -env.Benchmark( - target="write_conflict_retry_bm", - source=[], - BAZEL_BENCHMARK_TAG="storage_bm", -) diff --git a/src/mongo/db/timeseries/SConscript b/src/mongo/db/timeseries/SConscript deleted file mode 100644 index dc6fa789860..00000000000 --- a/src/mongo/db/timeseries/SConscript +++ /dev/null @@ -1,22 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "bucket_catalog", - "write_ops", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="db_timeseries_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/timeseries/bucket_catalog/SConscript b/src/mongo/db/timeseries/bucket_catalog/SConscript deleted file mode 100644 index c8fd81d95f5..00000000000 --- a/src/mongo/db/timeseries/bucket_catalog/SConscript +++ /dev/null @@ -1,17 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_bucket_catalog_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="timeseries_sizing_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/timeseries/write_ops/SConscript b/src/mongo/db/timeseries/write_ops/SConscript deleted file mode 100644 index 3d96281746b..00000000000 --- a/src/mongo/db/timeseries/write_ops/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_timeseries_write_ops_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/timeseries/write_ops/internal/SConscript b/src/mongo/db/timeseries/write_ops/internal/SConscript deleted file mode 100644 index f3659872b06..00000000000 --- a/src/mongo/db/timeseries/write_ops/internal/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_timeseries_write_ops_internal_test", - source=[], -) diff --git a/src/mongo/db/transaction/SConscript b/src/mongo/db/transaction/SConscript deleted file mode 100644 index 5534a3f67bc..00000000000 --- a/src/mongo/db/transaction/SConscript +++ /dev/null @@ -1,15 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_transaction_test", - source=[], -) - -env.Benchmark( - target="transaction_operations_bm", - source=[], -) diff --git a/src/mongo/db/ttl/SConscript b/src/mongo/db/ttl/SConscript deleted file mode 100644 index 878bda00d66..00000000000 --- a/src/mongo/db/ttl/SConscript +++ /dev/null @@ -1,9 +0,0 @@ -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="ttl_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/update/SConscript b/src/mongo/db/update/SConscript deleted file mode 100644 index 66ee3a2d223..00000000000 --- a/src/mongo/db/update/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_update_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/db/views/SConscript b/src/mongo/db/views/SConscript deleted file mode 100644 index fd1c466bf40..00000000000 --- a/src/mongo/db/views/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="db_views_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/dbtests/SConscript b/src/mongo/dbtests/SConscript deleted file mode 100644 index 1aebe9968e1..00000000000 --- a/src/mongo/dbtests/SConscript +++ /dev/null @@ -1,18 +0,0 @@ -# -*- mode: python; -*- - -Import("env") -Import("get_option") -Import("has_option") -Import("wiredtiger") - -env = env.Clone() - -env.BazelProgram( - target="dbtest", - source=[], - LIBDEPS=[], - AIB_COMPONENT="dbtest", - AIB_COMPONENTS_EXTRA=[ - "tests", - ], -) diff --git a/src/mongo/embedded/SConscript b/src/mongo/embedded/SConscript deleted file mode 100644 index 8a636f23306..00000000000 --- a/src/mongo/embedded/SConscript +++ /dev/null @@ -1,52 +0,0 @@ -# -*- mode: python; -*- - - -Import("env") -Import("get_option") -Import("wiredtiger") - -env = env.Clone() - -env.AppendUnique( - CPPPATH=["$BUILD_DIR/mongo/embedded"], -) - -# Inject this before we call the SDK directory SConscripts so that -# they can both use it. - -sdkEnv = env.Clone() - - -def mongo_export_file_generator(target, source, env, for_signature): - if env.ToolchainIs("msvc"): - script = env.File(env.subst("${TARGET.base}.def", target=target)) - return script.get_csig() if for_signature else "/DEF:" + str(script) - elif env.TargetOSIs("darwin"): - script = env.File(env.subst("${TARGET.base}.exported_symbols_list", target=target)) - return script.get_csig() if for_signature else "-Wl,-exported_symbols_list," + str(script) - elif env.TargetOSIs("posix"): - script = env.File(env.subst("${TARGET.base}.version_script", target=target)) - return script.get_csig() if for_signature else "-Wl,--version-script," + str(script) - else: - pass - - -# We really only want to use the mapfile if we are doing an SDK build. In an ordinary -# dynamic build, we would end up building the normal library with an export map -# but many of its symbols should in fact be coming from other libraries, and we -# get odd ODR-esque violations. UBSAN caught this. Thanks UBSAN! -if get_option("link-model") == "dynamic-sdk": - sdkEnv["MONGO_EXPORT_FILE_SHLINKFLAGS"] = mongo_export_file_generator - -env.SConscript( - must_exist=1, - dirs=[ - "stitch_support", - ], - exports={ - "env": sdkEnv, - }, -) - -yamlEnv = env.Clone() -yamlEnv.InjectThirdParty(libraries=["yaml"]) diff --git a/src/mongo/embedded/stitch_support/SConscript b/src/mongo/embedded/stitch_support/SConscript deleted file mode 100644 index 3f31c8039ef..00000000000 --- a/src/mongo/embedded/stitch_support/SConscript +++ /dev/null @@ -1,23 +0,0 @@ -# -*- mode: python; -*- - - -Import("env") -Import("get_option") - -env = env.Clone() - -env.AutoInstall( - "$PREFIX_INCLUDEDIR/stitch_support/v1/stitch_support", - source=["stitch_support.h"], - AIB_COMPONENT="stitch-support", - AIB_ROLE="dev", -) - -if get_option("link-model") != "dynamic-sdk": - stitchSupportTestEnv = env.Clone() - unitTest = stitchSupportTestEnv.CppUnitTest( - target="stitch_support_test", - source=[], - UNITTEST_HAS_CUSTOM_MAINLINE=True, - AIB_COMPONENT="stitch-support-test", - ) diff --git a/src/mongo/executor/SConscript b/src/mongo/executor/SConscript deleted file mode 100644 index fe8fc6a2d9f..00000000000 --- a/src/mongo/executor/SConscript +++ /dev/null @@ -1,21 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="network_executor_test", - source=[], -) - -env.CppUnitTest( - target="task_executor_test", - source=[], - PROVE_ALIASES=["server-programmability"], -) - -env.CppIntegrationTest( - target="executor_integration_test", - source=[], -) diff --git a/src/mongo/idl/README.md b/src/mongo/idl/README.md index 3aba8502540..06ee976aa90 100644 --- a/src/mongo/idl/README.md +++ b/src/mongo/idl/README.md @@ -69,7 +69,7 @@ instance, python buildscripts/idl/idlc.py src/mongo/idl/unittest.idl ``` -generates two files when invoked from SCons. +generates two files when invoked: ```sh build/opt/mongo/idl/unittest_gen.h @@ -124,24 +124,24 @@ structs: ``` The next step is to actually generate code from the YAML description. To do that, add the following -to a `SConscript` file: +to a `BUILD.bazel` file: -`src\mongo\example\SConscript`: +`src\mongo\example\BUILD.bazel`: ```python -env.Library( - target='example', - source=[ +mongo_idl_library( + name='example', + src=[ 'example.idl', ], - LIBDEPS=[ - '$BUILD_DIR/mongo/idl/idl_parser', + deps=[ + '//src/mongo/idl:idl_parser', ], ) ``` -SCons knows how to invoke the IDL compiler and generate files in the build directory with the C++ -code. This code can also be generated by `generated-sources` target in SCons which is useful for +Bazel knows how to invoke the IDL compiler and generate files in the build directory with the C++ +code. This code can also be generated by `--build_tag_filters=gen_source` tag in bazel which is useful for code navigation. The generated IDL code looks something like the simplified code below. @@ -264,7 +264,7 @@ commands: type: string ``` -To see how to integrate a command IDL file in SCons, see the example above for structs. +To see how to integrate a command IDL file in Bazel, see the example above for structs. ## The IDL file diff --git a/src/mongo/idl/SConscript b/src/mongo/idl/SConscript deleted file mode 100644 index b78813c7936..00000000000 --- a/src/mongo/idl/SConscript +++ /dev/null @@ -1,23 +0,0 @@ -# -*- mode: python -*- -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="cluster_server_parameter_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="idl_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="idl_parser_bm", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/installer/SConscript b/src/mongo/installer/SConscript deleted file mode 100644 index 76547c59f73..00000000000 --- a/src/mongo/installer/SConscript +++ /dev/null @@ -1,33 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "compass", - "msi", - ], - exports=[ - "env", - ], -) - -if env.TargetOSIs("windows"): - # This ensures that the VC++ redistributable is always included in the - # community server ZIP. - - redist_file = env["MSVS"].get("VCREDISTEXE", None) - if not redist_file: - env.FatalError("Required CRT redistributable not found; cannot build distribution package") - - env.AutoInstall( - target="$PREFIX_BINDIR", - source=[ - "${MSVS['VCREDISTEXE']}", - ], - AIB_COMPONENT="dist", - AIB_ROLE="runtime", - ) diff --git a/src/mongo/installer/compass/SConscript b/src/mongo/installer/compass/SConscript deleted file mode 100644 index 0dda42b6a94..00000000000 --- a/src/mongo/installer/compass/SConscript +++ /dev/null @@ -1,25 +0,0 @@ -# -*- mode: python; -*- - -Import( - [ - "env", - ] -) - -env = env.Clone() - -compass_installer = "install_compass" -if env.TargetOSIs("windows"): - compass_installer = "Install-Compass.ps1" - -if env.TargetOSIs("posix"): - env.AddPostAction(compass_installer, Chmod("$TARGET", 0o755)) - -env.AutoInstall( - target="$PREFIX_BINDIR", - source=[ - compass_installer, - ], - AIB_COMPONENT="dist", - AIB_ROLE="runtime", -) diff --git a/src/mongo/installer/msi/SConscript b/src/mongo/installer/msi/SConscript deleted file mode 100644 index 9bdc3a01536..00000000000 --- a/src/mongo/installer/msi/SConscript +++ /dev/null @@ -1,191 +0,0 @@ -# -*- mode: python; -*- - -import hashlib -import os -import uuid - -Import("env") -Import("get_option") -Import("has_option") - -if not env.TargetOSIs("windows"): - Return() - - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "ca", - ], - exports=[ - "env", - ], -) - -env["WIX"] = os.environ.get("WIX") -env["WIXPATH"] = r"$WIX\bin" -env["WIXHEAT"] = r"$WIXPATH\heat.exe" -env["WIXCANDLE"] = r"$WIXPATH\candle.exe" -env["WIXLIGHT"] = r"$WIXPATH\light.exe" -env["WIXUIEXT"] = r"$WIXPATH\WixUIExtension.dll" -env["WIXUTILEXT"] = r"$WIXPATH\WixUtilExtension.dll" - -if "VCREDISTMERGEMODULEPATH" not in env["MSVS"]: - print( - "SCons tool setup did not configure the path to the vcredist merge modules, disabling MSI installer" - ) - Return() - -sourcesList = [ - "BinaryFragment.wxs", - "FeatureFragment.wxs", - "LicensingFragment.wxs", - "UIFragment.wxs", -] - -# Need to do this in order to get scons to translate path separators into native format -buildDir = env.Dir("$BUILD_DIR").path -buildRoot = env.Dir("$BUILD_ROOT").path -toolBuildDir = buildDir + r"\mongo" - -enterpriselicensesource = "src\mongo\db\modules\enterprise\distsrc" -enterpriseToolBuildDir = buildDir + r"\mongo\db\modules\enterprise" - -# Set up parameters to pass to wix - -# -# msi_edition - "Enterprise" or "Standard" -# msi_platform - "x64" or "x86" -# msi_flavor - "2008R2Plus" or "" -# - -msi_flavor = "2008R2Plus" -msi_platform = "x64" - -if "enterprise" in env["MONGO_MODULES"]: # Enterprise - msi_edition = "Enterprise" -else: # Community - if get_option("ssl") == "on": - msi_edition = "SSL" - else: - msi_edition = "Standard" - -full_version = env["MONGO_VERSION"].partition("-")[0] - -# major version is the x.y, not the x.y.z -major_version = full_version -mv = major_version.split(".") -major_version = "%s.%s" % (mv[0], mv[1]) - -# We must regenerate upgrade codes for each major release. These upgrade codes must also be -# different for each MSI edition. -m = hashlib.sha256() -hash_str = "{}_{}".format(major_version, msi_edition) -m.update(hash_str.encode()) -upgrade_code = str(uuid.UUID(bytes=m.digest()[0:16])) - -sourcesList.append("Installer_64.wxs") - -sources = ["wxs/" + file for file in sourcesList] -objects = ["$BUILD_DIR/msi/" + file.replace(".wxs", ".wixobj") for file in sourcesList] - -# Currently, we are planning to key the same upgrade code for each -# (msi_edition, msi_platform, msi_flavor) combination -# and change MSI ProductId on minor updates, 2.6.0 -> 2.6.1, we let Wix do automatic -# GUID generation for us rather then build a database of GUIDs in our build system -# For major updates, we are going to create a new directory/productid/upgrade_code ie, 2.6 -> 3.0 - -# candle: compile .wxs files into .wixobjs -candle_targets = env.Command( - target=objects, - source=sources, - action=[ - '"$WIXCANDLE" -wx' - # cannot have anything other than x.x.x.x in version string. - # we should choose a fourth version number that reflects pre-ness. - " -dMongoDBMajorVersion=" - + major_version - + " -dMongoDBVersion=" - + full_version - + " -dLicenseSource=distsrc" - r" -dEnterpriseLicenseSource=" - + enterpriselicensesource - + " -dBinarySource=" - + '"$DESTDIR\\$PREFIX_BINDIR"' - + " -dMergeModulesBasePath=" - + "\"${MSVS['VCREDISTMERGEMODULEPATH']}\"" - + " -dMergeModuleFileCRT=" - + env.GetMergeModuleNameForFeature("CRT") - + " -dEdition=" - + msi_edition - + ' -d"ProductId=*"' - " -dUpgradeCode=" - + upgrade_code - + " -dCustomActionDll=" - + '"$DESTDIR\\$PREFIX_BINDIR\\mongoca.dll"' - + " -dConfiguration=Release" - " -dOutDir=" + buildDir + r"\msi" - " -dPlatform=" - + msi_platform - + " -dFlavor=" - + msi_flavor - + r" -dProjectDir=buildscripts\packaging\msi\\" - " -dProjectName=MongoDB" - " -dTargetDir=" + buildDir + r"\msi" - " -dTargetExt=.msi" - " -dTargetFileName=${SERVER_ARCHIVE}" - r" -dSaslSource=" + buildRoot + r"\sasl_2_1_28\bin" - r" -dSslSource=" + env["WINDOWS_OPENSSL_BIN"] + " -out " + buildDir + r"\msi\\" - " -arch " + msi_platform + ' -ext "$WIXUIEXT"' - ' -ext "$WIXUTILEXT"' - " $SOURCES" - ], -) - -pre_msi = env.Command( - target="$BUILD_DIR/msi/${SERVER_DIST_BASENAME}.pre.msi", - source=candle_targets, - action=[ - '"$WIXLIGHT" -out ${TARGET} -wx -cultures:null' - # Suppress VC140_CRT_CRT.MSM Internal Consistency Errors - # ICE82 - Suppress "duplicate sequence number" - # -- https://msdn.microsoft.com/en-us/library/windows/desktop/aa368798(v=vs.85).aspx - " -sice:ICE82" - # ICE03 - Supress "String overflow" - # -- https://msdn.microsoft.com/en-us/library/windows/desktop/aa369037(v=vs.85).aspx - " -sice:ICE03" - # ICE30 - Suppress "different components install same file" - # -- mongod.exe is installed in two different components but only one is ever used during an install - # so this consistency check can be ignored. - # -- https://msdn.microsoft.com/en-us/library/windows/desktop/aa368954(v=vs.85).aspx - " -sice:ICE30" - ' -ext "$WIXUIEXT"' - ' -ext "$WIXUTILEXT"' - " ${SOURCES}" - ], -) - -env.Depends( - pre_msi, - [ - "#/buildscripts/packaging/msi/mongod.yaml", - # This could potentially be a superset of what we actually - # require to build the MSI, but it should never be a subset. - env.Alias("install-dist"), - # We also need the mongoca DLL. - env.Alias("install-msi-util"), - ], -) - -env.NoCache(pre_msi) - -msi = env.Command( - target="$BUILD_DIR/msi/${SERVER_DIST_BASENAME}.msi", - source=pre_msi, - action=[r"$PYTHON buildscripts\msitrim.py ${SOURCES} ${TARGET}"], -) -env.AlwaysBuild(msi) -env.NoCache(msi) - -env.Alias("msi", msi) diff --git a/src/mongo/installer/msi/ca/SConscript b/src/mongo/installer/msi/ca/SConscript deleted file mode 100644 index bc214f59cd6..00000000000 --- a/src/mongo/installer/msi/ca/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- -Import("env") - -env = env.Clone() - -env.BazelSharedLibrary( - target="mongoca", - source=[], - AIB_COMPONENT="msi-util", - AIB_ROLE="runtime", -) diff --git a/src/mongo/logv2/SConscript b/src/mongo/logv2/SConscript deleted file mode 100644 index a4d002f8c9d..00000000000 --- a/src/mongo/logv2/SConscript +++ /dev/null @@ -1,18 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="logv2_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="logv2_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) diff --git a/src/mongo/mongo_config_header.py b/src/mongo/mongo_config_header.py index 8ca89b73661..f1b245ea073 100644 --- a/src/mongo/mongo_config_header.py +++ b/src/mongo/mongo_config_header.py @@ -124,7 +124,6 @@ def memset_s_present_flag() -> list[HeaderDefinition]: def strnlen_present_flag() -> list[HeaderDefinition]: if platform.system() == "Windows": - # Match SCons behavior return [] log_check("[MONGO_CONFIG_HAVE_STRNLEN] Checking for strnlen...") @@ -384,7 +383,6 @@ def altivec_vbpermq_output_flag() -> list[HeaderDefinition]: def usdt_provider_flags() -> list[HeaderDefinition]: if platform.system() == "Darwin": - # Match SCons behavior return [] log_check("[MONGO_CONFIG_USDT_PROVIDER] Checking if SDT usdt provider is available...") diff --git a/src/mongo/platform/SConscript b/src/mongo/platform/SConscript deleted file mode 100644 index 2c5b6d6d136..00000000000 --- a/src/mongo/platform/SConscript +++ /dev/null @@ -1,60 +0,0 @@ -# -*- mode: python -*- - -import platform - -Import( - [ - "env", - "get_option", - ] -) - -env = env.Clone() -env.CppUnitTest( - target="platform_test", - source=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="endian_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -if get_option("link-model") == "dynamic": - env.CppUnitTest( - target="visibility1_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], - ) - - env.CppUnitTest( - target="visibility2_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], - ) - -env.CppUnitTest( - target="rwmutex_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="rwmutex_bm", - source=[], - LIBDEPS=[], -) - -if platform.system() == "Linux": - env.CppUnitTest( - target="throw_hook_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], - ) diff --git a/src/mongo/resmoke/SConscript b/src/mongo/resmoke/SConscript deleted file mode 100644 index a43f2334762..00000000000 --- a/src/mongo/resmoke/SConscript +++ /dev/null @@ -1,26 +0,0 @@ -# -*- mode: python -*- -import SCons - -Import("env") - -env = env.Clone() - -install_dir = env.Dir("$DESTDIR/$PREFIX_BINDIR").path.replace("\\", r"\\") -resmoke_py = env.Substfile( - target="resmoke.py", - source="resmoke.py.in", - SUBST_DICT={ - "@install_dir@": install_dir, - }, -) -resmoke_py_install = env.AutoInstall( - "$PREFIX_BINDIR", - source=resmoke_py, - AIB_COMPONENT="common", - AIB_ROLE="runtime", -) -setattr(resmoke_py_install[0].attributes, "AIB_NO_ARCHIVE", True) -env.AddPostAction( - resmoke_py_install, - action=SCons.Defaults.Chmod("$TARGET", "u+x"), -) diff --git a/src/mongo/rpc/SConscript b/src/mongo/rpc/SConscript deleted file mode 100644 index 96690ddbdc7..00000000000 --- a/src/mongo/rpc/SConscript +++ /dev/null @@ -1,29 +0,0 @@ -# -*- mode: python -*- - -Import("env") -Import("wiredtiger") - -env = env.Clone() - -env.CppLibfuzzerTest( - target="protocol_fuzzer", - source=[], -) - -if wiredtiger: - env.CppUnitTest( - target="rpc_test", - source=[], - LIBDEPS=[], - ) - - env.CppUnitTest( - target="rpc_d_test", - source=[], - LIBDEPS=[], - ) - -env.CppIntegrationTest( - target="rpc_integration_test", - source=[], -) diff --git a/src/mongo/s/SConscript b/src/mongo/s/SConscript deleted file mode 100644 index 18fede74685..00000000000 --- a/src/mongo/s/SConscript +++ /dev/null @@ -1,82 +0,0 @@ -# -*- mode: python -*- - -Import( - [ - "env", - "have_sasl_lib", - "http_client", - "version_extra", - "version_parts", - ] -) - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "client", - "commands", - "query", - ], - exports=[ - "env", - ], -) - - -if env.TargetOSIs("windows"): - generatedServerManifest = env.Substfile( - "mongos.manifest.in", - SUBST_DICT=[ - ("@mongo_version_major@", version_parts[0]), - ("@mongo_version_minor@", version_parts[1]), - ("@mongo_version_patch@", version_parts[2]), - ("@mongo_version_extra@", version_parts[3]), - ("@mongo_version_extra_str@", version_extra), - ], - ) - - env.Alias("generated-sources", generatedServerManifest) - env.Depends("mongos.res", generatedServerManifest) - -env.BazelProgram( - target="mongos", - source=[], - LIBDEPS=[], - AIB_COMPONENT="mongos", - AIB_COMPONENTS_EXTRA=[ - "core", - "default", - "devcore", - "dist", - "dist-test", - "integration-tests", - "servers", - ], -) - -env.CppUnitTest( - target="multi_statement_transaction_requests_sender_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="s_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="service_entry_point_router_role_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="sep_bm", -) - -env.CppUnitTest( - target="service_entry_point_router_role_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/s/client/SConscript b/src/mongo/s/client/SConscript deleted file mode 100644 index de0c05c5e5e..00000000000 --- a/src/mongo/s/client/SConscript +++ /dev/null @@ -1,5 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() diff --git a/src/mongo/s/commands/SConscript b/src/mongo/s/commands/SConscript deleted file mode 100644 index 596178be542..00000000000 --- a/src/mongo/s/commands/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="s_commands_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/s/query/SConscript b/src/mongo/s/query/SConscript deleted file mode 100644 index 4b28e8babf4..00000000000 --- a/src/mongo/s/query/SConscript +++ /dev/null @@ -1,16 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "exec", - "planner", - ], - exports=[ - "env", - ], -) diff --git a/src/mongo/s/query/exec/SConscript b/src/mongo/s/query/exec/SConscript deleted file mode 100644 index abfe343212c..00000000000 --- a/src/mongo/s/query/exec/SConscript +++ /dev/null @@ -1,11 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="s_query_exec_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/s/query/planner/SConscript b/src/mongo/s/query/planner/SConscript deleted file mode 100644 index de0c05c5e5e..00000000000 --- a/src/mongo/s/query/planner/SConscript +++ /dev/null @@ -1,5 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() diff --git a/src/mongo/scripting/BUILD.bazel b/src/mongo/scripting/BUILD.bazel index b55cea2cc61..e026e1204fc 100644 --- a/src/mongo/scripting/BUILD.bazel +++ b/src/mongo/scripting/BUILD.bazel @@ -254,7 +254,7 @@ mongo_cc_unit_test( ], copts = select({ "@platforms//os:windows": [ - # See the /Zc:preprocessor comment in third_party/mozjs/SConscript + # See the /Zc:preprocessor comment in third_party/mozjs/BUILD.bazel "/Zc:preprocessor", "/wd5104", "/wd5105", diff --git a/src/mongo/scripting/SConscript b/src/mongo/scripting/SConscript deleted file mode 100644 index a12142878ab..00000000000 --- a/src/mongo/scripting/SConscript +++ /dev/null @@ -1,16 +0,0 @@ -# -*- mode: python -*- - -Import("env", "jsEngine") - -if jsEngine != "none": - env.CppUnitTest( - target="scripting_mozjs_test", - source=[], - LIBDEPS=[], - ) - -env.CppUnitTest( - target="scripting_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/scripting/mozjs/BUILD.bazel b/src/mongo/scripting/mozjs/BUILD.bazel index f263a882052..62ebb7b743d 100644 --- a/src/mongo/scripting/mozjs/BUILD.bazel +++ b/src/mongo/scripting/mozjs/BUILD.bazel @@ -28,10 +28,10 @@ render_template( name = "mongohelpers_js_cpp", srcs = [ "mongohelpers.js", - "//site_scons/site_tools:jstoh.py", + "//buildscripts:jstoh.py", ], cmd = [ - "$(location //site_scons/site_tools:jstoh.py)", + "$(location //buildscripts:jstoh.py)", "$(location mongohelpers_js.cpp)", "$(location mongohelpers.js)", ], diff --git a/src/mongo/shell/BUILD.bazel b/src/mongo/shell/BUILD.bazel index 354a51ead51..849618a1ad6 100644 --- a/src/mongo/shell/BUILD.bazel +++ b/src/mongo/shell/BUILD.bazel @@ -68,7 +68,7 @@ mongo_cc_library( ], copts = select({ "@platforms//os:windows": [ - # See the /Zc:preprocessor comment in third_party/mozjs/SConscript + # See the /Zc:preprocessor comment in third_party/mozjs/BUILD.bazel "/Zc:preprocessor", "/wd5104", "/wd5105", @@ -292,10 +292,10 @@ MONGOJS_CPP_JSFILES = [ render_template( name = "mongojs_cpp", srcs = [ - "//site_scons/site_tools:jstoh.py", + "//buildscripts:jstoh.py", ] + MONGOJS_CPP_JSFILES, cmd = [ - "$(location //site_scons/site_tools:jstoh.py)", + "$(location //buildscripts:jstoh.py)", "$(location mongojs.cpp)", ] + [ "$(location {})".format(file) @@ -316,10 +316,10 @@ MONGO_SERVER_CPP_JS_FILES = [ render_template( name = "mongo_server_cpp", srcs = [ - "//site_scons/site_tools:jstoh.py", + "//buildscripts:jstoh.py", ] + MONGO_SERVER_CPP_JS_FILES, cmd = [ - "$(location //site_scons/site_tools:jstoh.py)", + "$(location //buildscripts:jstoh.py)", "$(location mongo-server.cpp)", ] + [ "$(location {})".format(file) diff --git a/src/mongo/shell/SConscript b/src/mongo/shell/SConscript deleted file mode 100644 index 3c7bbd090dc..00000000000 --- a/src/mongo/shell/SConscript +++ /dev/null @@ -1,62 +0,0 @@ -# -*- mode: python; -*- - -Import( - [ - "env", - "get_option", - "has_option", - "http_client", - "jsEngine", - "version_parts", - "version_extra", - ] -) - -env = env.Clone() - - -def shouldBuildGRPC(myEnv): - return myEnv.TargetOSIs("linux") and get_option("ssl") == "on" - - -if shouldBuildGRPC(env): - env.InjectThirdParty(libraries=["grpc"]) - - -if not has_option("noshell") and jsEngine != "none": - shellEnv = env.Clone() - if env.TargetOSIs("windows"): - shellEnv.Append(LIBS=["winmm"]) - - generatedMongoManifest = shellEnv.Substfile( - "mongo.manifest.in", - SUBST_DICT=[ - ("@mongo_version_major@", version_parts[0]), - ("@mongo_version_minor@", version_parts[1]), - ("@mongo_version_patch@", version_parts[2]), - ("@mongo_version_extra@", version_parts[3]), - ("@mongo_version_extra_str@", version_extra), - ], - ) - shellEnv.Alias("generated-sources", generatedMongoManifest) - shellEnv.Depends("mongo.res", generatedMongoManifest) - - mongo_shell = shellEnv.BazelProgram( - target="mongo", - source=[], - LIBDEPS=[], - AIB_COMPONENT="jstestshell", - AIB_COMPONENTS_EXTRA=[ - "devcore", - "dist-test", - "integration-tests", - "mongo", - "serverless-test", - ], - ) - -env.CppUnitTest( - target="shell_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/stdx/SConscript b/src/mongo/stdx/SConscript deleted file mode 100644 index f8131402d41..00000000000 --- a/src/mongo/stdx/SConscript +++ /dev/null @@ -1,73 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.Benchmark( - target="condition_variable_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.CppUnitTest( - target="stdx_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -# Specify UNITTEST_HAS_CUSTOM_MAINLINE because it needs low-level control of -# thread creation and signals, so it shouldn't use unittest_main and typical -# mongo startup routines. -env.CppUnitTest( - target="sigaltstack_location_test", - source=[], - LIBDEPS=[], - UNITTEST_HAS_CUSTOM_MAINLINE=True, - PROVE_ALIASES=["server-programmability"], -) - -# The tests for `stdx::set_terminate` need to run outside of the mongo unittest harneses. -# The tests require altering the global `set_terminate` handler, which our unittest framework -# doesn't expect to have happen. Further, the tests have to return successfully from a -# terminate condition which interacts poorly with the unittest framework. -# -# A set of dedicated binaries to each test case is actually the simplest way to accomplish -# robust testing of this mechanism. - -# Needs to be a different test -- It has to have direct control over the `main()` entry point. -env.CppUnitTest( - target="set_terminate_dispatch_test", - source=[], - LIBDEPS=[], - UNITTEST_HAS_CUSTOM_MAINLINE=True, - PROVE_ALIASES=["server-programmability"], -) - -# Needs to be a different test -- It has to have direct control over the `main()` entry point. -env.CppUnitTest( - target="set_terminate_from_main_die_in_thread_test", - source=[], - LIBDEPS=[], - UNITTEST_HAS_CUSTOM_MAINLINE=True, - PROVE_ALIASES=["server-programmability"], -) - -# Needs to be a different test -- It has to have direct control over the `main()` entry point. -env.CppUnitTest( - target="set_terminate_from_thread_die_in_main_test", - source=[], - LIBDEPS=[], - UNITTEST_HAS_CUSTOM_MAINLINE=True, -) - -# Needs to be a different test -- It has to have direct control over the `main()` entry point. -env.CppUnitTest( - target="set_terminate_from_thread_die_in_thread_test", - source=[], - LIBDEPS=[], - UNITTEST_HAS_CUSTOM_MAINLINE=True, - PROVE_ALIASES=["server-programmability"], -) diff --git a/src/mongo/tools/SConscript b/src/mongo/tools/SConscript deleted file mode 100644 index 8c6f3b45a2d..00000000000 --- a/src/mongo/tools/SConscript +++ /dev/null @@ -1,15 +0,0 @@ -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "mongobridge_tool", - "mongo_tidy_checks", - "workload_simulation", - ], - exports=[ - "env", - ], -) diff --git a/src/mongo/tools/mongo_tidy_checks/BUILD.bazel b/src/mongo/tools/mongo_tidy_checks/BUILD.bazel index d370b041b9c..9c4c108ef40 100644 --- a/src/mongo/tools/mongo_tidy_checks/BUILD.bazel +++ b/src/mongo/tools/mongo_tidy_checks/BUILD.bazel @@ -60,7 +60,7 @@ cc_library( "MongoVolatileCheck.h", ], copts = [ - "-Isrc/mongo/tools/mongo_tidy_checks/", # Ensures that `#include "Mongo..."` calls continue to work in Bazel AND in SCons + "-Isrc/mongo/tools/mongo_tidy_checks/", # Ensures that `#include "Mongo..."` calls continue to work in Bazel "-DNDEBUG", "-Wall", "-fdiagnostics-color", diff --git a/src/mongo/tools/mongo_tidy_checks/README.md b/src/mongo/tools/mongo_tidy_checks/README.md index b58fed262e3..6f1f7f5d41a 100644 --- a/src/mongo/tools/mongo_tidy_checks/README.md +++ b/src/mongo/tools/mongo_tidy_checks/README.md @@ -8,19 +8,17 @@ The basics of implementing a check are in the [clang docs](https://releases.llvm #### Basic usage of the custom checks -The current directory contains the individual check source files, the main `MongoTidyModule.cpp` source file which registers the checks, and the SConscript responsible for building the check library module. The module will be installed into the DESTDIR, by default `build/install/lib/libmongo_tidy_checks.so`. +The current directory contains the individual check source files, the main `MongoTidyModule.cpp` source file which registers the checks, and the BUILD.bazel file responsible for building the check library module. To build the custom checks use this command: -Our internal `buildscripts/clang_tidy.py` will automatically check this location and attempt to load the module if it exists. If it is installed to a non-default location you will need to supply the `--check-module` argument with the location to the module. +The bazel clang-tidy config will automatically build and use the custom checks module. To run clang tidy with the checks module use: -The check will only be run if you add the name of the check to the `.clang-tidy.in` configuration file. Note you can also customized options for the specific check in this configuration file. Please reference some of the other checks and clang docs on how to add check specific options. - -Each check should be contained in its own `cpp` and `h` file, and have one or more unit tests. The h file must be `#include`'d to the `MongoTidyModule.cpp` file where the check class will be registered with a given check name. + bazel build --config=clang-tidy //src/mongo/... #### Adding check unittests A simple unittest framework is included with the checks so that they will automatically be run in quick, isolated, and minimal fashion. This allows for faster development and ensures the checks continue working. -The `test` directory contains the python unittest script, the test source files, and the SConscript which builds and runs the tests. NOTE: The python unittest script requires arguments to function correctly, you must supply compile_commands.json files matching the correct location and filename to the corresponding tests. For this reason, you should use the scons build as the interface for running the tests as it will create the compile_commands files, and run the unittest script automatically with the correct arguments. To build and test the checks use the scons command `python buildscripts/scons.py --build-profile=compiledb VERBOSE=1 +mongo-tidy-tests`. Note that currently the `--ninja` option does not support running the mongo tidy unittests. +The `test` directory contains the python unittest script, the test source files, and the BUILD.bazel which builds and runs the tests. NOTE: The python unittest script requires arguments to function correctly, you must supply compile_commands.json files matching the correct location and filename to the corresponding tests. For this reason, you should use the bazel build as the interface for running the tests as it will create the compile_commands files, and run the unittest script automatically with the correct arguments. To build and test the checks use the bazel command `bazel test //src/mongo/tools/mongo_tidy_checks/test:mongo_tidy_checks`. #### Writing your own check checklist @@ -29,12 +27,12 @@ Below is a checklist of all the steps to make sure to perform when writing a new 1. Implement the check in the respectively named `.h` and `.cpp` files. 2. Add the check's `#include` to the `MongoTidyModule.cpp`. 3. Register the check class with a check name in the `MongoTidyModule.cpp`. -4. Add the `.cpp` file to the source list in the `SConscript` file. +4. Add the `.cpp` file to the source list in the `BUILD.bazel` file. 5. Write a unittest file named `tests/test_{CHECK_NAME}.cpp` which minimally reproduces the issue. -6. Add the test file to the list of test sources in `tests/SConscript`. +6. Add the test file to the list of test sources in `tests/BUILD.bazel`. 7. Add a `def test_{CHECK_NAME}():` function to the `MongoTidyCheck_unittest.py` file which writes the config file, and finds the expected error output in the stdout. Reference the other check funcions for details. -8. Run the scons build with `python buildscripts/scons.py --build-profile=compiledb VERBOSE=1 +mongo-tidy-tests` to run the tests and see the detailed output of each test. +8. Run the bazel build with `bazel test //src/mongo/tools/mongo_tidy_checks/test:mongo_tidy_checks` to run the tests and see the detailed output of each test. #### Questions and Troubleshooting -If you have any questions please reach out to the `#server-build-help` slack channel. +If you have any questions please reach out to the `#ask-devprod-build` slack channel. diff --git a/src/mongo/tools/mongo_tidy_checks/SConscript b/src/mongo/tools/mongo_tidy_checks/SConscript deleted file mode 100644 index b496128c437..00000000000 --- a/src/mongo/tools/mongo_tidy_checks/SConscript +++ /dev/null @@ -1,121 +0,0 @@ -Import("env") -Import("use_libunwind") - -from pathlib import Path - -from buildscripts.mongo_toolchain import try_get_mongo_toolchain - -toolchain_clang_tidy_dev_found = False -toolchain_found = False -base_toolchain_bin = None -toolchain = try_get_mongo_toolchain( - version=env.get("MONGO_TOOLCHAIN_VERSION", "v4"), from_bazel=False -) -toolchain_found = toolchain is not None -if not toolchain_found: - Return() -base_toolchain_bin = Path(toolchain.get_bin_dir()) - -tidy_include = Path(toolchain.get_include_dir()) -tidy_lib = toolchain.get_lib_dir() - -toolchain_clang_tidy_dev_found = (tidy_include / "clang-tidy" / "ClangTidy.h").exists() - - -if toolchain_found and not toolchain_clang_tidy_dev_found: - # If there was a toolchain but its not setup right, issue a warning about this. - print( - "Could not find not find clang-tidy headers in toolchain, not building mongo custom checks module." - ) - Return() - -env = env.Clone() - -# TODO SERVER-73731 -# Instead of hardcoding these flags, we should load the flags used by the toolchain here -env["CXX"] = [f"{base_toolchain_bin}/g++"] -env["SHCXX"] = [f"{base_toolchain_bin}/g++"] -env["LINK"] = [f"{base_toolchain_bin}/g++"] -env["SHLINK"] = [f"{base_toolchain_bin}/g++"] -env["CPPPATH"] = [str(tidy_include)] -env["LIBPATH"] = [] -env["CPPFLAGS"] = [] -env["CCFLAGS"] = [ - "-DGTEST_HAS_RTTI=0", - "-D_GNU_SOURCE", - "-D__STDC_CONSTANT_MACROS", - "-D__STDC_FORMAT_MACROS", - "-D__STDC_LIMIT_MACROS", - "-fPIC", - "-fvisibility-inlines-hidden", - "-Werror=date-time", - "-Wall", - "-Wextra", - "-Wno-unused-parameter", - "-Wwrite-strings", - "-Wcast-qual", - "-Wno-missing-field-initializers", - "-pedantic", - "-Wno-long-long", - "-Wimplicit-fallthrough", - "-Wno-maybe-uninitialized", - "-Wno-class-memaccess", - "-Wno-redundant-move", - "-Wno-noexcept-type", - "-Wdelete-non-virtual-dtor", - "-Wsuggest-override", - "-Wno-comment", - "-fdiagnostics-color", - "-ffunction-sections", - "-fdata-sections", - "-fno-common", - "-Woverloaded-virtual", - "-fno-strict-aliasing", - "-O3", - "-g2", - "-fPIC", - "-fno-exceptions", - "-fno-rtti", - "-std=c++17", -] -env["LINKFLAGS"] = [] -env["SHLINKFLAGS"] = [ - "-fPIC", - "-fvisibility-inlines-hidden", - "-Werror=date-time", - "-Wall", - "-Wextra", - "-Wno-unused-parameter", - "-Wwrite-strings", - "-Wcast-qual", - "-Wno-missing-field-initializers", - "-pedantic", - "-Wno-long-long", - "-Wimplicit-fallthrough", - "-Wno-maybe-uninitialized", - "-Wno-class-memaccess", - "-Wno-redundant-move", - "-Wno-noexcept-type", - "-Wdelete-non-virtual-dtor", - "-Wsuggest-override", - "-Wno-comment", - "-fdiagnostics-color", - "-ffunction-sections", - "-fdata-sections", - "-fno-common", - "-Woverloaded-virtual", - "-fno-strict-aliasing", - "-O3", - "-Wl,-z,nodelete", - f"-Wl,-rpath-link,{tidy_lib}", - "-Wl,-O3", - "-Wl,--gc-sections", - "-shared", - r'-Wl,-rpath,"\$$ORIGIN/../lib"', -] -env["CXXFLAGS"] = [] -env["CFLAGS"] = [] -env["FORCEINCLUDES"] = [] -env["LIBDEPS_TAG_EXPANSIONS"] = [] - -env.SConscript("tests/SConscript", must_exist=1, exports=["env", "base_toolchain_bin"]) diff --git a/src/mongo/tools/mongo_tidy_checks/tests/SConscript b/src/mongo/tools/mongo_tidy_checks/tests/SConscript deleted file mode 100644 index 1893f6c602f..00000000000 --- a/src/mongo/tools/mongo_tidy_checks/tests/SConscript +++ /dev/null @@ -1,109 +0,0 @@ -Import("env") -Import("base_toolchain_bin") -Import("use_libunwind") - -import os -import sys - -import SCons - -# multiple compilation databases is not supported by ninja -if env.GetOption("ninja") == "disabled": - mongo_tidy_test_env = env.Clone() - - mongo_tidy_test_env.Append( - CPPPATH=[ - ".", - "#src", - "#src/third_party/boost", - ], - ) - - # These test files will purposefully be error prone, so we can disable warnings any warnings we expect - # to see. - mongo_tidy_test_env.Append( - CCFLAGS=[ - "-Wno-unused-but-set-parameter", - ], - ) - - # This list represents the test source files, which should contain a single issue which will be flagged - # by a clang tidy check. The issue should be isolated in as minimal way as possible. - tests = [ - "test_MongoHeaderBracketCheck.cpp", - "test_MongoVolatileCheck.cpp", - "test_MongoUninterruptibleLockGuardCheck.cpp", - "test_MongoUninterruptibleLockGuardCheckForOpCtxMember.cpp", - "test_MongoCctypeCheck.cpp", - "test_MongoConfigHeaderCheck.cpp", - "test_MongoCxx20BannedIncludesCheck.cpp", - "test_MongoCxx20StdChronoCheck.cpp", - "test_MongoStdOptionalCheck.cpp", - "test_MongoTraceCheck.cpp", - "test_MongoStdAtomicCheck.cpp", - "test_MongoAssertCheck.cpp", - "test_MongoFCVConstantCheck.cpp", - "test_MongoUnstructuredLogCheck.cpp", - "test_MongoCollectionShardingRuntimeCheck.cpp", - "test_MongoMacroDefinitionLeaksCheck.cpp", - "test_MongoRandCheck.cpp", - "test_MongoRWMutexCheck.cpp", - "test_MongoPolyFillCheck.cpp", - "test_MongoNoUniqueAddressCheck.cpp", - "test_MongoStringDataConstRefCheck1.cpp", - "test_MongoStringDataConstRefCheck2.cpp", - "test_MongoStringDataConstRefCheck3.cpp", - "test_MongoInvariantStatusIsOKCheck.cpp", - ] - - # So that we can do fast runs, we will generate a separate compilation database file for each - # unittest. To keep things simple we will name the compilation database file after the test. - # We need to create separate environments here because compilation database acts on the current - # environment. - test_objs = [] - compilation_dbs = [] - for test in tests: - test_env = mongo_tidy_test_env.Clone(COMPDB_IGNORE_BAZEL=True) - compilation_dbs += test_env.CompilationDatabase( - os.path.splitext(os.path.basename(test))[0] + "/compile_commands.json" - ) - test_objs += test_env.Object(test) - - # Building a program binary may not be necessary but it does validate the code and tie it together. - test_prog = mongo_tidy_test_env.BazelProgram( - target="MongoTidyCheck_test", - source=[], - LIBDEPS_NO_INHERIT=[], - ) - - # Here we setup the test execution. The test will pythons built in unittest framework - # to execute clang tidy for each test source file from the list above. - test = mongo_tidy_test_env.Command( - target="run_MongoTidyCheck_test", - source=[ - "MongoTidyCheck_unittest.py", - str(base_toolchain_bin / "clang-tidy"), - "#$BAZEL_OUT_DIR/src/mongo/tools/mongo_tidy_checks/libmongo_tidy_checks.so", - ] - + compilation_dbs, - action=SCons.Action.Action( - " ".join( - [ - sys.executable, - "${SOURCES[0]}", - "--clang-tidy-path=${SOURCES[1]}", - "--mongo-tidy-module=${SOURCES[2]}", - '${["--test-compiledbs=%s" % src for src in SOURCES[3:]]}', - f"{'' if mongo_tidy_test_env.Verbose() else '2> /dev/null'}", - ] - ), - "" if mongo_tidy_test_env.Verbose() else "Runnning mongo tidy checks unittests.", - ), - ENV={ - "MONGO_TOOLCHAIN_VERSION": env["MONGO_TOOLCHAIN_VERSION"], - "MONGO_TOOLCHAIN_FROM_BAZEL": "false", - }, - ) - - mongo_tidy_test_env.Alias("+mongo-tidy-tests", test) - mongo_tidy_test_env.Depends(test, test_prog) diff --git a/src/mongo/tools/mongobridge_tool/SConscript b/src/mongo/tools/mongobridge_tool/SConscript deleted file mode 100644 index 7998440af94..00000000000 --- a/src/mongo/tools/mongobridge_tool/SConscript +++ /dev/null @@ -1,12 +0,0 @@ -Import("env") - -env = env.Clone() - -yamlEnv = env.Clone() -yamlEnv.InjectThirdParty(libraries=["yaml"]) - -mongobridge = env.BazelProgram( - target="mongobridge", - source=[], - AIB_COMPONENT="dist-test", -) diff --git a/src/mongo/tools/workload_simulation/README.md b/src/mongo/tools/workload_simulation/README.md index 9f387993d67..7b23cfd7d6a 100644 --- a/src/mongo/tools/workload_simulation/README.md +++ b/src/mongo/tools/workload_simulation/README.md @@ -31,9 +31,8 @@ a fixture class derived from `mongo::workload_simulation::Simulation`. The secon name of the workload, and any additional parameters will be passed as input to the constructor of the fixture class. -A C++ file that defines these macros can be compiled using the scons helper `WorkloadSimulator` to -link the relevant `main` implementation that runs all the workloads defined using the macro. The -name of the target _must_ end in `_simulator`. +A C++ file that defines these macros can be compiled using the `simulator_main` bazel target to +link the relevant `main` implementation that runs all the workloads defined using the macro. ## Generating Visualizations diff --git a/src/mongo/tools/workload_simulation/SConscript b/src/mongo/tools/workload_simulation/SConscript deleted file mode 100644 index 1c092683ef1..00000000000 --- a/src/mongo/tools/workload_simulation/SConscript +++ /dev/null @@ -1,19 +0,0 @@ -Import("env") - -env = env.Clone() - -env.SConscript( - must_exist=1, - dirs=[ - "throughput_probing", - ], - exports=[ - "env", - ], -) - -env.CppUnitTest( - target="tools_workload_characteristics_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/tools/workload_simulation/throughput_probing/SConscript b/src/mongo/tools/workload_simulation/throughput_probing/SConscript deleted file mode 100644 index 60f665e449a..00000000000 --- a/src/mongo/tools/workload_simulation/throughput_probing/SConscript +++ /dev/null @@ -1,8 +0,0 @@ -Import("env") - -env = env.Clone() - -env.WorkloadSimulator( - target="throughput_probing_simulator", - source=[], -) diff --git a/src/mongo/tracing/SConscript b/src/mongo/tracing/SConscript deleted file mode 100644 index 097cb38ed99..00000000000 --- a/src/mongo/tracing/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="tracing_test", - source=[], -) diff --git a/src/mongo/transport/SConscript b/src/mongo/transport/SConscript deleted file mode 100644 index 97544abdb1b..00000000000 --- a/src/mongo/transport/SConscript +++ /dev/null @@ -1,54 +0,0 @@ -# -*- mode: python -*- - -Import("env") -Import("get_option") - -env = env.Clone() - - -def shouldBuildGRPC(myEnv): - return myEnv.TargetOSIs("linux") and get_option("ssl") == "on" - - -if shouldBuildGRPC(env): - env.SConscript( - must_exist=1, - dirs=[ - "grpc", - ], - exports=[ - "env", - ], - ) - -env.CppUnitTest( - target="transport_test", - source=[], - LIBDEPS=[], -) - -env.CppIntegrationTest( - target="asio_transport_integration_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="service_executor_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="second_half_bm", -) - -env.Benchmark( - target="session_workflow_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="second_half_bm", -) - -env.Benchmark( - target="message_compressor_bm", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/transport/grpc/BUILD.bazel b/src/mongo/transport/grpc/BUILD.bazel index 073119a0aa2..149a7796c45 100644 --- a/src/mongo/transport/grpc/BUILD.bazel +++ b/src/mongo/transport/grpc/BUILD.bazel @@ -139,7 +139,6 @@ mongo_cc_proto_library( deps = [":core_test_proto"], ) -# This is referenced in src/mongo/transport/grpc/SConscript mongo_cc_grpc_library( name = "core_test_cc_grpc", srcs = [":core_test_proto"], diff --git a/src/mongo/transport/grpc/SConscript b/src/mongo/transport/grpc/SConscript deleted file mode 100644 index 68badf06e4c..00000000000 --- a/src/mongo/transport/grpc/SConscript +++ /dev/null @@ -1,41 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() -env.InjectThirdParty(libraries=["grpc"]) - -env.CppUnitTest( - target="grpc_transport_layer_test", - source=[], - LIBDEPS=[], -) - -env.CppIntegrationTest( - target="grpc_transport_integration_test", - source=[], - LIBDEPS=[], -) - -protoEnv = env.Clone() -protoEnv.InjectThirdParty(libraries=["proto"]) -protoEnv.AppendUnique( - PROTOC_GEN_TYPES=[["cpp", protoEnv.Dir(".")]], - PROTOC_PLUGINS={ - "grpc": { - "plugin": "$PROTOC_GRPC_PLUGIN", - "options": ["generate_mock_code=true"], - "gen_out": protoEnv.Dir("."), - "exts": [".grpc.pb.cc", ".grpc.pb.h"], - }, - }, - CPPPATH=[protoEnv.Dir(".")], -) -protoEnv.Append( - CPPPATH=[ - "$BUILD_DIR/third_party/protobuf/dist/src", - "#/bazel-bin", - ] -) - -protoEnv.CppUnitTest(target="grpc_core_test", source=[]) diff --git a/src/mongo/unittest/SConscript b/src/mongo/unittest/SConscript deleted file mode 100644 index 6cea26ee5ba..00000000000 --- a/src/mongo/unittest/SConscript +++ /dev/null @@ -1,22 +0,0 @@ -# -*- mode: python; -*- - -Import("env") - -env = env.Clone() -env.InjectThirdParty(libraries=["yaml"]) - -utEnv = env.Clone() -utEnv.InjectThirdParty(libraries=["yaml"]) - -bmEnv = env.Clone() - -env.CppUnitTest( - target="unittest_test", - source=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="system_resource_canary_bm", - source=[], -) diff --git a/src/mongo/util/BUILD.bazel b/src/mongo/util/BUILD.bazel index 02de90a9561..faa6bfbe77f 100644 --- a/src/mongo/util/BUILD.bazel +++ b/src/mongo/util/BUILD.bazel @@ -1240,7 +1240,7 @@ mongo_cc_unit_test( "stacktrace_test.cpp", ], # deps=stacktrace_test_LIBDEPS, - # From SCons, unclear if necessasry: + # From SCons, unclear if necessary: # EXPORT_SYMBOLS=[ # "mongo_stacktrace_test_detail_testFunctionWithLinkage", # ], diff --git a/src/mongo/util/SConscript b/src/mongo/util/SConscript deleted file mode 100644 index db7e1e824a6..00000000000 --- a/src/mongo/util/SConscript +++ /dev/null @@ -1,241 +0,0 @@ -# -*- mode: python -*- - -from site_scons.mongo import insort_wrapper - -Import( - [ - "endian", - "env", - "use_libunwind", - "use_system_version_of_library", - "version_extra", - "version_parts", - ] -) - -env = env.Clone() - -env.InjectThirdParty("asio") - -if env.TargetOSIs("windows"): - enterpriseEnv = env.Clone().InjectModule("enterprise") - generatedResourceConstantFile = enterpriseEnv.Substfile( - "resource_constants.h.in", - SUBST_DICT=[ - ("@mongo_version@", env["MONGO_VERSION"]), - ("@mongo_version_major@", version_parts[0]), - ("@mongo_version_minor@", version_parts[1]), - ("@mongo_version_patch@", version_parts[2]), - ("@mongo_git_hash@", env["MONGO_GIT_HASH"]), - ], - ) - env.Alias("generated-sources", generatedResourceConstantFile) - -env.SConscript( - must_exist=1, - dirs=[ - "concurrency", - "immutable", - "net", - "options_parser", - "tracing_profiler", - "tracking", - ], - exports=[ - "env", - "version_extra", - "version_parts", - ], -) - -env.Benchmark( - target="fail_point_bm", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="concurrent_shared_values_map_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.CppUnitTest( - target="tracing_support_test", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="thread_safety_context_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.CppUnitTest( - target="versioned_value_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -if env["MONGO_ALLOCATOR"] in set(["tcmalloc-google", "tcmalloc-gperf"]): - env.CppUnitTest( - target="tcmalloc_set_parameters_test", - source=[], - ) - -env.Benchmark( - target="clock_source_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.CppUnitTest( - target="executor_stats_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="decimal_counter_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.Benchmark( - target="itoa_bm", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="future_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.Benchmark( - target="tick_source_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -pcre_env = env.Clone() -pcre_env.InjectThirdParty(libraries=["pcre2"]) - -env.Benchmark( - target="hash_table_bm", - source=[], - LIBDEPS=[], -) - -icuEnv = env.Clone() - -if not use_system_version_of_library("icu"): - icuEnv.InjectThirdParty("icu") - # Since we are injecting the third-party ICU headers, we must also copy the same defines that we - # use to configure ICU when building ICU sources. See comment in - # src/third_party/icu4c-57.1/source/SConscript. - icuEnv.Append( - CPPDEFINES=[ - ("UCONFIG_NO_BREAK_ITERATION", 1), - ("UCONFIG_NO_FORMATTING", 1), - ("UCONFIG_NO_TRANSLITERATION", 1), - ("UCONFIG_NO_REGULAR_EXPRESSIONS", 1), - ("U_CHARSET_IS_UTF8", 1), - ("U_STATIC_IMPLEMENTATION", 1), - ("U_USING_ICU_NAMESPACE", 0), - ], - ) - - -icuEnv.CppUnitTest( - target="util_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -if not env.TargetOSIs("windows"): - icuEnv.CppUnitTest( - target="signal_handlers_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], - ) - -if env.TargetOSIs("windows"): - env.CppUnitTest( - target="util_windows_test", - source=[], - LIBDEPS=[], - ) - -env.Benchmark(target="base64_bm", source=[], BAZEL_BENCHMARK_TAG="first_half_bm") - -stacktraceEnv = env.Clone() -if use_libunwind: - stacktraceEnv.InjectThirdParty(libraries=["unwind"]) - stacktraceEnv.CppUnitTest( - target="stacktrace_libunwind_test", - source=[], - PROVE_ALIASES=["server-programmability"], - ) - -stacktrace_test_LIBDEPS = stacktraceEnv.get("LIBDEPS", []).copy() -insort_wrapper(stacktrace_test_LIBDEPS, "pcre_wrapper") - -stacktraceEnv.CppUnitTest( - target="stacktrace_test", - source=[], - PROVE_ALIASES=["server-programmability"], -) - -stacktraceEnv.Benchmark( - target="stacktrace_bm", - source=[], - # See above for how to handle any future LIBDEPS additions here. - # LIBDEPS=... - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.Benchmark( - target="string_bm", - source=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -env.Benchmark( - target="cancellation_bm", - source=[], -) - -env.Benchmark( - target="inline_memory_bm", - source=[], -) - -env.Benchmark( - target="uuid_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="first_half_bm", -) - -pretty_printer_test_program = env.BazelProgram( - target="pretty_printer_test_program", - source=[], - AIB_COMPONENT="pretty-printer-tests", - AIB_COMPONENTS_EXTRA=["dist-test"], -) -pretty_printer_test_program_installed = env.GetAutoInstalledFiles(pretty_printer_test_program[0]) - -env.PrettyPrinterTest("pretty_printer_test.py", TEST_PROGRAM=pretty_printer_test_program_installed) diff --git a/src/mongo/util/concurrency/SConscript b/src/mongo/util/concurrency/SConscript deleted file mode 100644 index 984671997df..00000000000 --- a/src/mongo/util/concurrency/SConscript +++ /dev/null @@ -1,34 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="util_concurrency_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.CppUnitTest(target="ticketholder_test", source=[]) - -env.Benchmark( - target="ticketholder_bm", - source=[], - LIBDEPS=[], - BAZEL_BENCHMARK_TAG="second_half_bm", -) - -env.Benchmark( - target="lock_free_read_list_bm", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="lock_free_read_list_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) diff --git a/src/mongo/util/immutable/SConscript b/src/mongo/util/immutable/SConscript deleted file mode 100644 index 5facd6ce346..00000000000 --- a/src/mongo/util/immutable/SConscript +++ /dev/null @@ -1,22 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="immutable_test", - source=[], - LIBDEPS=[], - PROVE_ALIASES=["server-programmability"], -) - -env.Benchmark( - target="immutable_absl_comparison_bm", - source=[], -) - -env.Benchmark( - target="immutable_std_comparison_bm", - source=[], -) diff --git a/src/mongo/util/net/SConscript b/src/mongo/util/net/SConscript deleted file mode 100644 index 2f3e4b241d6..00000000000 --- a/src/mongo/util/net/SConscript +++ /dev/null @@ -1,32 +0,0 @@ -# -*- mode: python; -*- - -Import("env") -Import("get_option") -Import("http_client") -Import("ssl_provider") - -env = env.Clone() - -env.CppUnitTest( - target="util_net_test", - source=[], - LIBDEPS=[], -) - -env.CppLibfuzzerTest( - target="asn1_parser_fuzzer", - source=[], -) - -if get_option("ssl") == "on": - env.CppUnitTest( - target="util_net_ssl_test", - source=[], - LIBDEPS=[], - ) - -if get_option("ssl") == "on": - env.CppIntegrationTest( - target="network_interface_ssl_test", - source=[], - ) diff --git a/src/mongo/util/options_parser/SConscript b/src/mongo/util/options_parser/SConscript deleted file mode 100644 index 2dc5a26d8f2..00000000000 --- a/src/mongo/util/options_parser/SConscript +++ /dev/null @@ -1,10 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.CppUnitTest( - target="options_parser_test", - source=[], -) diff --git a/src/mongo/util/pretty_printer_test_launcher.py.in b/src/mongo/util/pretty_printer_test_launcher.py.in index 821c6c128ce..44f68a2acbf 100644 --- a/src/mongo/util/pretty_printer_test_launcher.py.in +++ b/src/mongo/util/pretty_printer_test_launcher.py.in @@ -1,8 +1,7 @@ #! /usr/bin/env python3 """ -Wrapper python script to launch the pretty_printer_test_py program as interpolated -by scons with the standard input set to the test file readtest_input, also as -interpolated by scons. +Wrapper python script to launch the pretty_printer_test_py program +with the standard input set to the test file readtest_input. """ import subprocess diff --git a/src/mongo/util/tracing_profiler/README.md b/src/mongo/util/tracing_profiler/README.md index 57d87412003..12c3eb5c9b5 100644 --- a/src/mongo/util/tracing_profiler/README.md +++ b/src/mongo/util/tracing_profiler/README.md @@ -20,7 +20,7 @@ fast functions. ## Building To use the profiler, enable the profilerStats while building: -./buildscripts/scons.py ... --use-tracing-profiler=on +bazel build --use-tracing-profiler=on ... When using MONGO_PROFILER_SCOPE_XYZ macros without --use-tracing-profiler=on they will resolve to no-op empty definitions. diff --git a/src/mongo/util/tracing_profiler/SConscript b/src/mongo/util/tracing_profiler/SConscript deleted file mode 100644 index 5b0e45d97ef..00000000000 --- a/src/mongo/util/tracing_profiler/SConscript +++ /dev/null @@ -1,16 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() -env.CppUnitTest( - target="tracing_profiler_test", - source=[], - LIBDEPS=[], -) - -env.Benchmark( - target="tracing_profiler_bm", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/util/tracking/SConscript b/src/mongo/util/tracking/SConscript deleted file mode 100644 index a44402db550..00000000000 --- a/src/mongo/util/tracking/SConscript +++ /dev/null @@ -1,17 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env = env.Clone() - -env.Benchmark( - target="tracking_allocator_bm", - source=[], - LIBDEPS=[], -) - -env.CppUnitTest( - target="tracking_allocator_test", - source=[], - LIBDEPS=[], -) diff --git a/src/mongo/util/version_constants_gen.py b/src/mongo/util/version_constants_gen.py index 09c08f5c97a..8d097b2b2b4 100644 --- a/src/mongo/util/version_constants_gen.py +++ b/src/mongo/util/version_constants_gen.py @@ -94,8 +94,7 @@ def get_toolchain_ver(tool, compiler_path, env_vars): # value: # inBuildInfo: : should it be included in buildInfo output # inVersion: : should it be included in --version output -# The `value` field will be passed through env.subst, so you can use any SCons variables you -# want to define them. +# The `value` field will be passed to bazel def default_buildinfo_environment_data(compiler_path, extra_definitions, env_vars): data = ( ( diff --git a/src/mongo/watchdog/SConscript b/src/mongo/watchdog/SConscript deleted file mode 100644 index 18f2802e1cf..00000000000 --- a/src/mongo/watchdog/SConscript +++ /dev/null @@ -1,5 +0,0 @@ -# -*- mode: python -*- - -Import("env") - -env.CppUnitTest(target="watchdog_test", source=[]) diff --git a/src/third_party/IntelRDFPMathLib20U1/SConscript b/src/third_party/IntelRDFPMathLib20U1/SConscript deleted file mode 100644 index b8c61947cfa..00000000000 --- a/src/third_party/IntelRDFPMathLib20U1/SConscript +++ /dev/null @@ -1,180 +0,0 @@ -# -*- mode: python -*- - -import sys - -Import("env") -Import("get_option") - -env = env.Clone() - -# Define Intel decimal128 library build variables -cpp_defines = { - 'DECIMAL_CALL_BY_REFERENCE': '0', - 'DECIMAL_GLOBAL_ROUNDING': '0', - 'DECIMAL_GLOBAL_EXCEPTION_FLAGS': '0', - 'UNCHANGED_BINARY_STATUS_FLAGS': '0', - 'USE_COMPILER_F128_TYPE': '0', - 'USE_COMPILER_F80_TYPE': '0', - 'USE_NATIVE_QUAD_TYPE': '0', -} - -libs = [] - - -def removeIfPresent(lst, item): - try: - lst.remove(item) - except ValueError: - pass - - -# Set OS Defines -if env.TargetOSIs('darwin'): - cpp_defines['LINUX'] = '1' - cpp_defines['mach'] = '1' - -if env.TargetOSIs('linux') or env.TargetOSIs('solaris'): - cpp_defines['LINUX'] = '1' - cpp_defines['linux'] = '1' - libs.append('m') - -if env.TargetOSIs('freebsd') or env.TargetOSIs('openbsd'): - cpp_defines['LINUX'] = '1' - cpp_defines['freebsd'] = '1' - libs.append('m') - -# Set Architecture Defines -processor = env['TARGET_ARCH'] -# Using 32 bit -if processor == 'i386' or processor == 'emscripten': - cpp_defines['IA32'] = '1' - cpp_defines['ia32'] = '1' -elif processor == 'arm': - cpp_defines['IA32'] = '1' - cpp_defines['ia32'] = '1' -elif processor == "aarch64": - cpp_defines['efi2'] = '1' - cpp_defines['EFI2'] = '1' -# Using 64 bit little endian -elif processor == 'x86_64' or processor == 'ppc64le' or processor == 'riscv64': - cpp_defines['efi2'] = '1' - cpp_defines['EFI2'] = '1' -# Using 64 bit big endian -elif processor == 's390x': - cpp_defines['s390x'] = '1' - cpp_defines['BID_BIG_ENDIAN'] = '1' -else: - assert False, "Unsupported architecture: " + processor - -# Set Compiler Defines -if env.ToolchainIs('MSVC'): - cpp_defines['cl'] = '1' -else: - cpp_defines['gcc'] = '1' - -env.Append(CPPDEFINES=cpp_defines) - -if env.TargetOSIs('windows'): - extra_defines = ['WINDOWS', 'WNT', 'winnt'] - env.Append(CPPDEFINES=extra_defines) - # C4273: '...': inconsistent dll linkage - env.Append(CCFLAGS=['/wd4273']) - # C4477: incorrect scanf format string - env.Append(CCFLAGS=['/wd4477']) - -if env.ToolchainIs('msvc'): - removeIfPresent(env['CCFLAGS'], '/W3') -else: - env.Append(CCFLAGS='-w') - -if env["BUILDERS"].get("Ninja", None) is not None: - Return() - -readtestEnv = env.Clone() - -# Since the readtest program is fully C, smartlink was trying to link -# it with the C compiler driver. That is fine for dynamic builds, but -# in static builds falls afoul of -# https://github.com/SCons/scons/issues/3673 if the allocator library -# needs the C++ runtime. -if env["LINK"] == "$SMARTLINK" and get_option("link-model") != "dynamic": - readtestEnv["LINK"] = "$CXX" - -readtest = readtestEnv.Program( - target='intel_decimal128_readtest', - source=[ - 'TESTS/readtest.c', - ], - LIBDEPS=[ - 'intel_decimal128', - ], - LIBS=readtestEnv['LIBS'] + libs, - AIB_COMPONENT="intel-test", - AIB_COMPONENTS_EXTRA=[ - "unittests", - "tests", - ], -) - -readtest_input = env.AutoInstall( - target="$PREFIX_BINDIR", - source=["TESTS/readtest.in"], - AIB_ROLE="runtime", - AIB_COMPONENT="intel-test", - AIB_COMPONENTS_EXTRA=[ - "unittests", - "tests", - ], -) - -env.Depends(readtest_input, readtest) - -readtest_dict = { - '@readtest_python_interpreter@': sys.executable.replace('\\', r'\\'), - '@readtest_program@': readtest[0].name, - '@readtest_input@': readtest_input[0].name, -} - -readtest_wrapper = env.Substfile( - target='intel_decimal128_readtest_wrapper.py', - source=['intel_decimal128_readtest_wrapper.py.in'], - SUBST_DICT=readtest_dict, -) -env.Depends(readtest_wrapper, readtest_input) - -readtest_wrapper_install = env.AutoInstall( - target="$PREFIX_BINDIR", - source=readtest_wrapper, - AIB_ROLE="runtime", - AIB_COMPONENT="intel-test", - AIB_COMPONENTS_EXTRA=[ - "unittests", - "tests", - "first_group_unittests", - ], -) - -if env.TargetOSIs('windows'): - readtest_wrapper_bat = env.Substfile( - target='intel_decimal128_readtest_wrapper.bat', - source=['intel_decimal128_readtest_wrapper.bat.in'], - SUBST_DICT=readtest_dict, - ) - env.Depends(readtest_wrapper_bat, readtest_wrapper) - - readtest_wrapper_bat_install = env.AutoInstall( - target="$PREFIX_BINDIR", - source=readtest_wrapper_bat, - AIB_ROLE="runtime", - AIB_COMPONENT="intel-test", - AIB_COMPONENTS_EXTRA=[ - "unittests", - "tests", - "first_group_unittests", - ], - ) - env.RegisterTest("$UNITTEST_LIST", readtest_wrapper_bat_install[0]) - -else: - env.RegisterTest("$UNITTEST_LIST", readtest_wrapper_install[0]) - env.AddPostAction(readtest_wrapper[0], Chmod(readtest_wrapper[0], 'oug+x')) diff --git a/src/third_party/IntelRDFPMathLib20U1/intel_decimal128_readtest_wrapper.py.in b/src/third_party/IntelRDFPMathLib20U1/intel_decimal128_readtest_wrapper.py.in index 24585cdd150..9a921df2f86 100755 --- a/src/third_party/IntelRDFPMathLib20U1/intel_decimal128_readtest_wrapper.py.in +++ b/src/third_party/IntelRDFPMathLib20U1/intel_decimal128_readtest_wrapper.py.in @@ -1,8 +1,6 @@ #!@readtest_python_interpreter@ """ -Wrapper python script to launch the readtest_program program as interpolated -by scons with the standard input set to the test file readtest_input, also as -interpolated by scons. +Wrapper python script to launch the readtest_program program with the standard input set to the test file readtest_input. """ import os diff --git a/src/third_party/OWNERS.yml b/src/third_party/OWNERS.yml index 7a30e99dd38..c36e46692bc 100644 --- a/src/third_party/OWNERS.yml +++ b/src/third_party/OWNERS.yml @@ -97,9 +97,6 @@ filters: - "schemastore.org": approvers: - 10gen/query-optimization - - "scons*": - approvers: - - 10gen/devprod-build - "snappy": approvers: - 10gen/server-networking-and-observability @@ -134,9 +131,6 @@ filters: - "zstandard": approvers: - 10gen/server-networking-and-observability - - "SConscript": - approvers: - - 10gen/devprod-build - "*.bazel": approvers: - 10gen/devprod-build diff --git a/src/third_party/SConscript b/src/third_party/SConscript deleted file mode 100644 index ae807e925e4..00000000000 --- a/src/third_party/SConscript +++ /dev/null @@ -1,432 +0,0 @@ -# -*- mode: python -*- -import SCons -import json - -Import([ - 'env', - 'get_option', - 'jsEngine', - 'use_libunwind', - 'use_system_version_of_library', - 'wiredtiger', - 'releaseBuild', - 'ssl_provider', - 'http_client', -]) - -icuSuffix = '-57.1' -tomcryptSuffix = '-1.18.2' - -thirdPartyEnvironmentModifications = { - 'abseil-cpp': {'CPPPATH': ['#/src/third_party/abseil-cpp/dist'], }, - 'cares': { - 'CPPPATH': [ - '#src/third_party/cares/dist/include', - '#src/third_party/cares/platform/${TARGET_OS}_${TARGET_ARCH}/install/include' - ], - }, - 'croaring': {'CPPPATH': ['#src/third_party/croaring/dist'], }, - 'fmt': {'CPPPATH': ['#src/third_party/fmt/dist/include'], }, - 'immer': {'CPPPATH': ['#src/third_party/immer/dist'], }, - 's2': {'CPPPATH': ['#src/third_party/s2'], }, - 'safeint': { - 'CPPPATH': ['#src/third_party/SafeInt'], - # SAFEINT_USE_INTRINSICS=0 for overflow-safe constexpr multiply. See comment in SafeInt.hpp. - 'CPPDEFINES': [('SAFEINT_USE_INTRINSICS', 0)], - }, - 'timelib': {'CPPPATH': ['#/src/third_party/timelib/dist'], }, - 'unwind': { - 'CPPPATH': [ - "#/src/third_party/unwind/platform/${TARGET_OS}_${TARGET_ARCH}/install/include", - ] - }, - 'mozjs': { - 'CPPPATH': [ - '#/src/third_party/mozjs/include', - '#/src/third_party/mozjs/mongo_sources', - '#/src/third_party/mozjs/platform/' + env["TARGET_ARCH"] + "/" + env["TARGET_OS"] + - "/include", - ], - 'FORCEINCLUDES': ['js-config.h', ], - }, - 'murmurhash3': {'CPPPATH': ['#src/third_party/murmurhash3', ], }, - 'librdkafka': { - 'CPPPATH': [ - '#/src/third_party/librdkafka/dist/src', - '#/src/third_party/librdkafka/dist/src-cpp', - ], - 'SYSLIBDEPS_PRIVATE': [ - 'curl', - ], - }, -} - -def injectMozJS(thisEnv): - thisEnv.InjectThirdParty(libraries=['mozjs']) - - if thisEnv.TargetOSIs('windows'): - thisEnv.Append(CPPDEFINES=[ - '_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING', - ], ) - else: - thisEnv.Append(CXXFLAGS=[ - '-Wno-non-virtual-dtor', - '-Wno-invalid-offsetof', - ], ) - - thisEnv.Prepend(CPPDEFINES=[ - 'JS_USE_CUSTOM_ALLOCATOR', - 'STATIC_JS_API=1', - ]) - - if get_option('spider-monkey-dbg') == "on": - thisEnv.Prepend(CPPDEFINES=[ - 'DEBUG', - 'JS_DEBUG', - 'JS_GC_ZEAL' - ]) - -env.AddMethod(injectMozJS, 'InjectMozJS') - -if http_client == "on": - if env.TargetOSIs("windows"): - env.Append(SYSLIBDEPS=["winhttp"]) - else: - env.Append(SYSLIBDEPS=["curl"]) - -ldap_lib = env.get("MONGO_LDAP_LIB", []) -if ldap_lib: - env.Append(SYSLIBDEPS=ldap_lib) - -gssapi_lib = env.get("MONGO_GSSAPI_LIB", []) -if gssapi_lib: - env.Append(SYSLIBDEPS=gssapi_lib) - -if "sasl" in env.get("MONGO_ENTERPRISE_FEATURES", []): - env.Append(SYSLIBDEPS=["sasl2"]) - -if env.ToolchainIs("msvc"): - env.Append(SYSLIBDEPS=["synchronization"]) - -if not use_system_version_of_library('tcmalloc-gperf'): - # GPerftools does this slightly differently than the others. - thirdPartyEnvironmentModifications['gperftools'] = { - 'CPPPATH': ["#/src/third_party/gperftools/platform/${TARGET_OS}_${TARGET_ARCH}/include"] - } - -if not use_system_version_of_library('tcmalloc-google'): - thirdPartyEnvironmentModifications['tcmalloc'] = { - 'CPPPATH': ['#/src/third_party/tcmalloc/dist'], - } - - if get_option('link-model') == 'dynamic': - thirdPartyEnvironmentModifications['tcmalloc']['CPPDEFINES'] = [ - ('MONGO_TCMALLOC_DYNAMIC_BUILD', 1) - ] - -if not use_system_version_of_library('pcre2'): - thirdPartyEnvironmentModifications['pcre2'] = { - 'CPPPATH': ['#/src/third_party/pcre2/src'], - } - -if not use_system_version_of_library('boost'): - - # On at least Apple clang, proto throws this error. - # - # See https://github.com/boostorg/proto/issues/30. - # - # We use a generator so we can filter out conf tests, where applying this - # flag could change their meaning. - def NoErrorForUnknownWarningOptionGenerator(target, source, env, for_signature): - if 'conftest' in str(target[0]): - return str() - return '-Wno-error=unknown-warning-option' - - thirdPartyEnvironmentModifications['boost'] = { - 'CPPPATH': ['#/src/third_party/boost'], - - # We could narror further to just clang on Darwin, but there is - # little harm in applying for all clang. - 'NOERROR_FOR_UNKNOWN_WARNING_OPTION_GEN': NoErrorForUnknownWarningOptionGenerator, - 'CCFLAGS': ['$NOERROR_FOR_UNKNOWN_WARNING_OPTION_GEN'] if env.ToolchainIs('clang') else [], - 'SYSLIBDEPS': ['synchronization'] if env.ToolchainIs('msvc') else [], - } - -if not use_system_version_of_library('snappy'): - platform_directory = "build_posix" - - if env.TargetOSIs('linux'): - platform_directory = "build_linux_" + env['TARGET_ARCH'] - elif env.TargetOSIs('windows'): - platform_directory = "build_windows" - elif env.TargetOSIs('darwin'): - platform_directory = "build_" + env["TARGET_OS"] + "_" + env["TARGET_ARCH"] - - thirdPartyEnvironmentModifications['snappy'] = { - 'CPPPATH': [ - '#/src/third_party/snappy/dist', - '#/src/third_party/snappy/platform/build_all', - '#/src/third_party/snappy/platform/' + platform_directory, - ], - } - -# Valgrind is a header only include as valgrind.h includes everything we need -if not use_system_version_of_library('valgrind'): - thirdPartyEnvironmentModifications['valgrind'] = { - 'CPPPATH': ['#/src/third_party/valgrind/include'], - } - -if not use_system_version_of_library('zlib'): - thirdPartyEnvironmentModifications['zlib'] = { - 'CPPPATH': ['#/src/third_party/zlib'], - } - -if not use_system_version_of_library('zstd'): - thirdPartyEnvironmentModifications['zstd'] = { - 'CPPPATH': ['#/src/third_party/zstandard/zstd/lib'], - } - -if not use_system_version_of_library('google-benchmark'): - thirdPartyEnvironmentModifications['benchmark'] = { - 'CPPPATH': ['#/src/third_party/benchmark/dist/include'], - } - -if "tom" in env["MONGO_CRYPTO"]: - thirdPartyEnvironmentModifications['tomcrypt'] = { - 'CPPPATH': ['#/src/third_party/tomcrypt' + tomcryptSuffix + '/src/headers'], - } - -if not use_system_version_of_library('tomcrypt'): - thirdPartyEnvironmentModifications['tomcrypt_md5'] = { - 'CPPPATH': ['#/src/third_party/tomcrypt' + tomcryptSuffix + '/src/headers'], - 'CPPDEFINES': ['LTC_NO_PROTOTYPES'] if env.TargetOSIs('windows') else [], - } - -if not use_system_version_of_library('stemmer'): - thirdPartyEnvironmentModifications['stemmer'] = { - 'CPPPATH': ['#/src/third_party/libstemmer_c/dist/include'], - } - -# Note that the wiredtiger.h header is generated, so -# we want to look for it in the build directory not -# the source directory. -if wiredtiger and not use_system_version_of_library('wiredtiger'): - thirdPartyEnvironmentModifications['wiredtiger'] = { - 'CPPPATH': ["#$BAZEL_OUT_DIR/src/third_party/wiredtiger"], - } - -if not use_system_version_of_library('yaml'): - thirdPartyEnvironmentModifications['yaml'] = { - 'CPPPATH': ['#/src/third_party/yaml-cpp/yaml-cpp/include'], - 'CPPDEFINES': ['_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING'] - if env.ToolchainIs('msvc') else [], - } - -if not use_system_version_of_library('asio'): - thirdPartyEnvironmentModifications['asio'] = { - 'CPPPATH': ['#/src/third_party/asio-master/asio/include'], - } - -if not use_system_version_of_library('intel_decimal128'): - thirdPartyEnvironmentModifications['intel_decimal128'] = { - 'CPPPATH': ['#/src/third_party/IntelRDFPMathLib20U1/LIBRARY/src'], - } - -if not use_system_version_of_library('icu'): - thirdPartyEnvironmentModifications['icu'] = { - 'CPPPATH': [ - '#/src/third_party/icu4c' + icuSuffix + '/source/common', - '#/src/third_party/icu4c' + icuSuffix + '/source/i18n', - ], - } - -if not use_system_version_of_library('bson'): - - platform_directory = "build_linux" - - if env['TARGET_ARCH'] == 's390x': - platform_directory = "build_linux_s390x" - - if env.TargetOSIs('windows'): - platform_directory = "build_windows" - elif env.TargetOSIs('darwin'): - platform_directory = "build_osx" - - thirdPartyEnvironmentModifications['bson'] = { - 'CPPPATH': [ - '#/src/third_party/libbson/dist/src/libbson/src', - '#/src/third_party/libbson/' + platform_directory, - '#/src/third_party/libbson/' + platform_directory + '/bson', - ], - 'CPPDEFINES': ['BSON_STATIC'], - } - -if not use_system_version_of_library('libmongocrypt'): - - platform_directory = "build_linux" - crypto_directory = "build_libcrypto" - - if env.TargetOSIs('windows'): - platform_directory = "build_windows" - crypto_directory = "build_cng" - elif env.TargetOSIs('darwin'): - platform_directory = "build_osx" - crypto_directory = "build_commoncrypto" - - if get_option('ssl') == 'off': - crypto_directory = "build_none" - - thirdPartyEnvironmentModifications['libmongocrypt'] = { - 'CPPPATH': [ - '#/src/third_party/libmongocrypt/dist/src', - '#/src/third_party/libmongocrypt/dist/kms-message/src', - '#/src/third_party/libmongocrypt/' + platform_directory, - '#/src/third_party/libmongocrypt/' + crypto_directory, - ], - 'CPPDEFINES': [ - 'KMS_MSG_STATIC', - 'MONGOCRYPT_STATIC_DEFINE', - ], - } - -if not use_system_version_of_library('protobuf'): - thirdPartyEnvironmentModifications['protobuf'] = { - 'CPPPATH': ['#src/third_party/protobuf/dist/src'], - } - -if not use_system_version_of_library('grpc'): - thirdPartyEnvironmentModifications['grpc'] = { - 'CPPPATH': ['#src/third_party/grpc/dist/include', '#src/third_party/grpc/dist/'], - } - thirdPartyEnvironmentModifications['re2'] = { - 'CPPPATH': ['#src/third_party/re2/dist'], - } - thirdPartyEnvironmentModifications['cares'] = { - 'CPPPATH': [ - '#src/third_party/cares/dist/include', - '#src/third_party/cares/platform/${TARGET_OS}_${TARGET_ARCH}/install/include' - ], - } - -# Vendored libunwind can be configured to use liblzma for decompressing -# minidebuginfo sections, but we disable that feature via -# `unwind/scripts/host-config.sh`. If vendored libunwind ever needs -# minidebuginfo support, we'd say so here, and we'd dynamically link with -# system lzma. Until then, it would be an unnecessary dynamic dependency. -# However, the system libunwind is assumed to need `lzma` unconditionally. -vendored_libunwind_needs_lzma = False - -if vendored_libunwind_needs_lzma: - thirdPartyEnvironmentModifications['unwind'] = { - 'SYSLIBDEPS_PRIVATE': [env['LIBDEPS_LZMA_SYSLIBDEP']], - } - - -def injectThirdParty(thisEnv, libraries=[], parts=[]): - libraries = thisEnv.Flatten([libraries]) - parts = thisEnv.Flatten([parts]) - for lib in libraries: - mods = thirdPartyEnvironmentModifications.get(lib, None) - if mods is None: - continue - if not parts: - thisEnv.PrependUnique(**mods) - else: - for part in parts: - thisEnv.PrependUnique({part: mods[part]}) - - -env.AddMethod(injectThirdParty, 'InjectThirdParty') - -env = env.Clone() - -# Construct an empty object file that we can use to produce the -# library for every shim. This avoids the need to create and name a -# different empty source file for every third-party library, as we did -# in the past. - -empty_source = env.Textfile( - target='third_party_shim.cpp', - source=str(), -) -env.Alias('generated-sources', empty_source) - -empty_object = env.LibraryObject(target='third_party_shim', source=empty_source, - NINJA_GENSOURCE_INDEPENDENT=True) - - -def shim_library(env, name, **kwargs): - # Add the 'virtual-libdep' tag, which will prevent shim libraries - # from actually being linked to. They don't provide any symbols, - # so there is no need to do so. Instead, they just act as a node - # in the library dependency graph to reach other libraries. - libdeps_tags = kwargs.get('LIBDEPS_TAGS', env.get('LIBDEPS_TAGS', [])).copy() - libdeps_tags.append('virtual-libdep') - kwargs['LIBDEPS_TAGS'] = libdeps_tags - return env.Library( - target=f'shim_{name}', - source=empty_object[0], - # Since nothing will link to this library per the - # `virtual-libdep` tag above, we can also skip installing it. - AIB_IGNORE=True, - **kwargs, - ) - - -env.AddMethod(shim_library, 'ShimLibrary') - -if jsEngine: - mozjsEnv = env.Clone() - -wiredtigerEnv = env.Clone() -if wiredtiger: - if use_system_version_of_library("wiredtiger"): - wiredtigerEnv = wiredtigerEnv.Clone(SYSLIBDEPS=[ - env['LIBDEPS_WIREDTIGER_SYSLIBDEP'], - ]) - else: - wiredtigerEnv = wiredtigerEnv.Clone() - wiredtigerEnv.InjectThirdParty(libraries=['wiredtiger']) - wiredtigerEnv.SConscript('wiredtiger/SConscript', must_exist=1, exports={'env': wiredtigerEnv}) - wiredtigerEnv = wiredtigerEnv.Clone(LIBDEPS_INTERFACE=[ - 'wiredtiger/wiredtiger', - ]) - -if env.TargetOSIs('linux') and get_option("ssl") == "on": - - protobufEnv = env.Clone(NINJA_GENSOURCE_INDEPENDENT=True) - if use_system_version_of_library("protobuf"): - protobufEnv = protobufEnv.Clone( - SYSLIBDEPS=[ - env['LIBDEPS_PROTOBUF_SYSLIBDEP'], - env['LIBDEPS_PROTOC_SYSLIBDEP'], - ], ) - else: - protobufEnv.SConscript(dirs=[ - 'protobuf', - ], duplicate=False, exports={'env': env}, must_exist=1,) - protobufEnv = protobufEnv.Clone(LIBDEPS_INTERFACE=[ - 'protobuf/protoc', - ]) - protobufEnv.ShimLibrary(name="protobuf", ) - - grpcEnv = env.Clone() - if use_system_version_of_library("grpc"): - grpcEnv = grpcEnv.Clone(SYSLIBDEPS=[ - env['LIBDEPS_GRPC_SYSLIBDEP'], - env['LIBDEPS_GRPCXX_SYSLIBDEP'], - env['LIBDEPS_GRPCXX_REFLECTION_SYSLIBDEP'], - ]) - else: - grpcEnv.SConscript( - dirs=[ - 'grpc', - ], duplicate=False, exports={'env': env}, must_exist=1,) - grpcEnv = grpcEnv.Clone(LIBDEPS_INTERFACE=[ - 'grpc/grpc++_reflection', - ]) - grpcEnv.ShimLibrary(name="grpc", ) - -if env.ShouldBuildStreams(): - libKafkaEnv = env.Clone() - libKafkaEnv.InjectThirdParty(libraries=['librdkafka']) diff --git a/src/third_party/abseil-cpp/scripts/README.md b/src/third_party/abseil-cpp/scripts/README.md index d79e8634efc..c89a91c350f 100644 --- a/src/third_party/abseil-cpp/scripts/README.md +++ b/src/third_party/abseil-cpp/scripts/README.md @@ -1,8 +1,8 @@ # Updating abseil -The SConscript file for the abseil build is parsed via the generated ninja +The BUILD.bazel file for the abseil build is parsed via the generated ninja file from the native abseil cmake build. The parse_libs_from_ninja.py will -perform the parsing and generation of the SConscript file. +perform the parsing and generation of the BUILD.bazel file. To update abseil you should: @@ -11,7 +11,7 @@ To update abseil you should: 3. run the import.sh script 4. run the parse_libs_from_ninja.py -# Updating the SConscript generated libraries +# Updating the BUILD.bazel generated libraries The parse_libs_from_ninja.py will extract specifically requested libraries from the native abseil build. This list of libraries should be a python list diff --git a/src/third_party/abseil-cpp/scripts/import.sh b/src/third_party/abseil-cpp/scripts/import.sh index 66b289f0969..3d69847f8d2 100755 --- a/src/third_party/abseil-cpp/scripts/import.sh +++ b/src/third_party/abseil-cpp/scripts/import.sh @@ -23,6 +23,6 @@ pushd $DEST_DIR/dist git apply $PATCH_DIR/*.patch find . -mindepth 1 -maxdepth 1 -name ".*" -exec rm -rf {} \; rm -rf ci -rm -rf scons_gen_build +rm -rf gen_build find absl -depth -type d -name "testdata" -exec rm -rf {} \; popd diff --git a/src/third_party/abseil-cpp/scripts/parse_libs_from_ninja.py b/src/third_party/abseil-cpp/scripts/parse_libs_from_ninja.py index a13b5c1d601..c7cdfc7e5a2 100644 --- a/src/third_party/abseil-cpp/scripts/parse_libs_from_ninja.py +++ b/src/third_party/abseil-cpp/scripts/parse_libs_from_ninja.py @@ -77,7 +77,7 @@ logging.basicConfig( original_target_libs = target_libs.copy() logging.info(f"Original list: {original_target_libs}") -ninja_build_dir = pathlib.Path(__file__).parent.parent / "dist" / "scons_gen_build" +ninja_build_dir = pathlib.Path(__file__).parent.parent / "dist" / "gen_build" if not os.path.exists(ninja_build_dir): os.mkdir(ninja_build_dir) environ = os.environ.copy() @@ -90,34 +90,7 @@ if not os.path.exists(ninja_build_dir): with open(ninja_build_dir / "build.ninja") as fninja: content = fninja.readlines() -with open(pathlib.Path(__file__).parent.parent / "SConscript", "w") as sconscript: with open(pathlib.Path(__file__).parent.parent / "BUILD.bazel", "w") as bazel: - sconscript.write("""\ -# AUTO-GENERATED FILE DO NOT MANUALLY EDIT -# generated from the parse_libs_from_ninja.py script in scripts directory via `python ./parse_libs_from_ninja.py` -Import("env") -env = env.Clone(NINJA_GENSOURCE_INDEPENDENT=True, LIBDEPS_NO_INHERIT=[ - # libunwind and tcmalloc are both added as global dependencies. Skip - # inheriting global dependencies to avoid a circular dependency. - '$BUILD_DIR/third_party/unwind/unwind', - "$BUILD_DIR/third_party/tcmalloc/tcmalloc", - "$BUILD_DIR/third_party/gperftools/tcmalloc_minimal", -] -env.InjectThirdParty(libraries=['abseil-cpp']) -if env.ToolchainIs('msvc'): - env.Append( - CPPDEFINES=[ - 'NOMINMAX', - ], - CCFLAGS=[], - ) - -if env.ToolchainIs('gcc'): - env.Append( - CCFLAGS=[ - '-Wno-error=ignored-attributes', - ], ) -""") abseil_headers = glob.glob( str(pathlib.Path(__file__).parent.parent / "dist/absl/**/*.h"), recursive=True @@ -231,20 +204,6 @@ ABSEIL_SKIP_GLOBAL_DEPS = [ logging.info(f"Found library {found_target_lib}") logging.info(f"Libbraries left to find: {target_libs.difference(written_libs)}") - sconscript.write(f"""\ -{f'# {found_target_lib} added as a dependency of other abseil libraries' -if found_target_lib not in original_target_libs -else f'# {found_target_lib} is an explicit dependency to the server build'} -env.BazelLibrary( - target='{found_target_lib}', - source=[ -{os.linesep.join([f" '{source}'," for source in source_files])} - ], - LIBDEPS=[ -{os.linesep.join([f" '{libdep}'," for libdep in sorted(libdeps)])} - ], -) -""") bazel.write(f"""\ {f'# {found_target_lib} added as a dependency of other abseil libraries' if found_target_lib not in original_target_libs diff --git a/src/third_party/asio-master/asio/include/asio/detail/config.hpp b/src/third_party/asio-master/asio/include/asio/detail/config.hpp index 2b8582e4077..6b83225a783 100644 --- a/src/third_party/asio-master/asio/include/asio/detail/config.hpp +++ b/src/third_party/asio-master/asio/include/asio/detail/config.hpp @@ -16,7 +16,7 @@ // We cannot set these outside of the ASIO drop, because a system copy of ASIO might not have been // built with ASIO_SEPARATE_COMPILATION. // TODO: Remove this once we have a mechanism for injecting third party includes and CPPDEFINES into -// individual scons objects. +// individual bazel objects. # define ASIO_STANDALONE // MongoDB # define ASIO_SEPARATE_COMPILATION // MongoDB diff --git a/src/third_party/gperftools/README.md b/src/third_party/gperftools/README.md index cbc5b003b99..ec43bfdf963 100644 --- a/src/third_party/gperftools/README.md +++ b/src/third_party/gperftools/README.md @@ -3,7 +3,7 @@ Don't make manual changes to files in the dist/ or platform/ directories. Source code changes should be made in the mongodb-labs/gperftools repo. Tweaks or reconfigurations of our third-party installation have to be -made in scripts/ or SConscript files, etc. +made in scripts/ or BUILD.bazel files, etc. == contents == diff --git a/src/third_party/gperftools/scripts/import.sh b/src/third_party/gperftools/scripts/import.sh index 302cd2bdc8f..fd1b1a66ced 100755 --- a/src/third_party/gperftools/scripts/import.sh +++ b/src/third_party/gperftools/scripts/import.sh @@ -1,12 +1,12 @@ #!/bin/bash # This script downloads and imports gperftools. # It can be run on Linux, Windows WSL or Mac OS X. -# The actual integration via SConscript is not done by this script +# The actual integration via BUILD.bazel is not done by this script # # NOTES # 1. Gperftools is autotools based except for Windows where it has a checked in config.h # 2. On Linux, we generate config.h on the oldest supported distribution for each architecture -# But to support newer distributions we must set some defines via SConscript instead of config.h +# But to support newer distributions we must set some defines via BUILD.bazel instead of config.h # 3. tcmalloc.h is configured by autotools for system installation purposes, but we modify it # to be used across platforms via an ifdef instead. This matches the corresponding logic used in # tcmalloc.cc to control functions that are guarded by HAVE_STRUCT_MALLINFO. diff --git a/src/third_party/grpc/SConscript b/src/third_party/grpc/SConscript deleted file mode 100644 index 2dcb2ae839f..00000000000 --- a/src/third_party/grpc/SConscript +++ /dev/null @@ -1,144 +0,0 @@ -Import('env') - -import os - -grpc_env = env.Clone() - -grpc_env.InjectThirdParty(libraries=['abseil-cpp', 're2', 'cares', 'protobuf', 'zlib']) - -if grpc_env.ToolchainIs('gcc', 'clang'): - - grpc_env.Append(CCFLAGS=[ - '-Wno-error=sign-compare', - '-Wno-error=comment', - ], ) - - if grpc_env.ToolchainIs('gcc'): - grpc_env.Append( - CCFLAGS=[ - '-Wno-stringop-overread', # false positive: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=98465#c30 - '-Wno-stringop-overflow', - '-Wno-error=attributes', - '-Wno-error=class-memaccess', - '-Wno-error=overloaded-virtual', - ], ) - -grpc_env.Append(CPPDEFINES=[ - '_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING', -], ) - -if grpc_env.ToolchainIs('msvc'): - grpc_env.Append( - CCFLAGS=[ - '/wd4334', # '<<': result of 32-bit shift implicitly converted to 64 bits (was 64-bit shift intended?) - '/wd4116', # unnamed type definition in parentheses - '/wd4146', # unary minus operator applied to unsigned type, result still unsigned - '/wd4715', # not all control paths return a value - '/wd4200', # nonstandard extension used: zero-sized array in struct/union - '/wd4312', # 'reinterpret_cast': conversion from 'unsigned int' to 'void *' of greater size - '/wd4090', # 'function': different 'const' qualifiers - ], - CPPDEFINES=[ - '_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING', - ], - ) - -grpc_root = grpc_env.Dir(".").srcnode() - -upb_env = grpc_env.Clone() -upb_env.Append( - CPPPATH=[ - grpc_root.Dir('dist/third_party/upb'), - grpc_root.Dir('dist/third_party/utf8_range'), - grpc_root.Dir('dist/src/core/ext/upb-generated'), - grpc_root.Dir('dist/src/core/ext/upbdefs-generated'), - ], ) - - -address_sorting_env = grpc_env.Clone() -address_sorting_env.Append(CPPPATH=[ - grpc_root.Dir('dist/third_party/address_sorting/include'), -], ) - - -grpc_env.Append( - CPPPATH=[ - grpc_root.Dir('dist'), - grpc_env.Dir('dist').get_path(), - grpc_root.Dir('dist/include'), - grpc_root.Dir('dist/third_party/upb'), - grpc_root.Dir('dist/third_party/xxhash'), - grpc_root.Dir('dist/src/core/ext/upb-generated'), - grpc_root.Dir('dist/src/core/ext/upbdefs-generated'), - grpc_root.Dir('dist/third_party/address_sorting/include'), - grpc_root.Dir('dist/third_party/address_sorting/include'), - grpc_root.Dir('dist/third_party/utf8_range'), - ], ) - -protobuf_plugin_env = grpc_env.Clone(NINJA_GENSOURCE_INDEPENDENT=True) - -gpr_env = grpc_env.Clone() - -# older rhel7 platforms with older glibc give a warning `__warn_memset_zero_len` which -# can not be selectively repressed, so we are disabling all warnings for this library. -if gpr_env['TARGET_ARCH'] in ['x86_64', 's390x', 'ppc64le']: - gpr_env.Append( - CCFLAGS=[ - '-Wno-error', - ], - LINKFLAGS=[ - '-Wl,--no-fatal-warnings', - ], - ) - -grpcxx_env = grpc_env.Clone() - -grpcxx_env.AppendUnique( - PROTOC_GEN_TYPES=['cpp'], PROTOC_PLUGINS={ - 'grpc': { - 'plugin': '$PROTOC_GRPC_PLUGIN', - 'options': ['generate_mock_code=true'], - 'exts': ['.grpc.pb.cc', '.grpc.pb.h'], - }, - }, CPPPATH=[grpcxx_env.Dir('.')]) - -grpc_unittest_env = grpc_env.Clone() -grpc_unittest_env.InjectThirdParty(libraries=['fmt', 'variant', 'boost', 'safeint']) - -# purposely changing the gen dir as an example and to excerise setting gen dirs -# this causes the helloworld to gen in the $BUILD_DIR/third_party/grpc dir -# instead of the dist/examples/protos dir (adjacent to the helloworkd.proto) -grpc_unittest_env.AppendUnique( - PROTOC_GEN_TYPES=[['cpp', grpc_unittest_env.Dir('.')]], PROTOC_PLUGINS={ - 'grpc': { - 'plugin': '$PROTOC_GRPC_PLUGIN', - 'options': ['generate_mock_code=true'], - 'gen_out': grpc_unittest_env.Dir('.'), - 'exts': ['.grpc.pb.cc', '.grpc.pb.h'], - }, - }, CPPPATH=[grpc_unittest_env.Dir('.')]) - -grpc_unittest_env.Append(CPPPATH=[ - '#src', - '$BUILD_DIR', - '#$BAZEL_OUT_DIR/src' - '#$BAZEL_OUT_DIR' - '#$BAZEL_OUT_DIR/src/' - '#$BAZEL_OUT_DIR/' - '#/bazel-bin', - '#/bazel-bin/src', - '#/bazel-bin/', - '#/bazel-bin/src/', - '#/bazel-bin/src/third_party/grpc/dist/examples/protos' -], ) - -grpc_unittest_env.CppUnitTest( - target="grpc_lib_test", - source=[], -) - -grpc_unittest_env.BazelProgram( - target="grpc_cpp_plugin", - source=[], - LIBDEPS=[], -) diff --git a/src/third_party/libstemmer_c/BUILD.bazel b/src/third_party/libstemmer_c/BUILD.bazel index e01572afea5..4664f68ec62 100644 --- a/src/third_party/libstemmer_c/BUILD.bazel +++ b/src/third_party/libstemmer_c/BUILD.bazel @@ -55,7 +55,6 @@ mongo_cc_library( "//conditions:default": [], }), includes = [ - # from https://github.com/10gen/mongo/blob/master/src/third_party/SConscript#L172-L175 "dist/include", ], ) diff --git a/src/third_party/protobuf/SConscript b/src/third_party/protobuf/SConscript deleted file mode 100644 index c6408db8063..00000000000 --- a/src/third_party/protobuf/SConscript +++ /dev/null @@ -1,63 +0,0 @@ -Import('env') -Import('debugBuild') -protobuf_env = env.Clone(NINJA_GENSOURCE_INDEPENDENT=True) - -protobuf_env.InjectThirdParty(libraries=['abseil-cpp', 'zlib']) - -protobuf_env.Append(CPPDEFINES=[ - 'HAVE_CONFIG_H', - ('HAVE_ZLIB', '1'), - ('PIC', '1'), -], ) - -if protobuf_env.ToolchainIs('gcc', 'clang'): - protobuf_env.Append( - CPPDEFINES=[ - ('HAVE_PTHREAD', '1'), - ], - CCFLAGS=[ - '-Wno-sign-compare', - '-Wno-overloaded-virtual', - ], - ) - if debugBuild: - protobuf_env.Append(CCFLAGS=['-Wno-error']) - -if protobuf_env.ToolchainIs('gcc'): - protobuf_env.Append( - CPPDEFINES=[], - CCFLAGS=[ - '-Wno-stringop-overflow', - '-Wno-error=deprecated', - '-Wno-error=attributes', - ], - ) - -if protobuf_env.ToolchainIs('msvc'): - protobuf_env.Append( - CCFLAGS=[ - '/wd4018', # signed/unsigned mismatch - '/wd4065', # switch statement contains 'default' but no 'case' labels - '/wd4146', # unary minus operator applied to unsigned type, result still unsigned - ], - CPPDEFINES=[ - '_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING', - ], - ) - -protobuf_root = protobuf_env.Dir(".").srcnode() -protobuf_platform = protobuf_root.Dir("platform/${TARGET_OS}_${TARGET_ARCH}") -protobuf_env.Append(CPPPATH=[ - protobuf_root.Dir("dist"), - protobuf_root.Dir("dist/src"), -], ) - -protobuf_env.BazelProgram( - target="protobuf_compiler", - source=[], - LIBDEPS=[], -) - -# Passing this up to the main env -env['PROTOC_DESCRIPTOR_PROTO'] = protobuf_env.File( - "dist/src/google/protobuf/descriptor.proto").abspath diff --git a/src/third_party/scons-3.1.2/scons-3.1.2.bat b/src/third_party/scons-3.1.2/scons-3.1.2.bat deleted file mode 100755 index dc671889f63..00000000000 --- a/src/third_party/scons-3.1.2/scons-3.1.2.bat +++ /dev/null @@ -1,38 +0,0 @@ -@REM Copyright (c) 2001 - 2019 The SCons Foundation -@REM src/script/scons.bat bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan -@echo off -set SCONS_ERRORLEVEL= -if "%OS%" == "Windows_NT" goto WinNT - -@REM for 9x/Me you better not have more than 9 args -python -c "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-3.1.2'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons-3.1.2'), join(sys.prefix, 'scons')] + sys.path; import SCons.Script; SCons.Script.main()" %1 %2 %3 %4 %5 %6 %7 %8 %9 -@REM no way to set exit status of this script for 9x/Me -goto endscons - -@REM Credit where credit is due: we return the exit code despite our -@REM use of setlocal+endlocal using a technique from Bear's Journal: -@REM http://code-bear.com/bearlog/2007/06/01/getting-the-exit-code-from-a-batch-file-that-is-run-from-a-python-program/ - -:WinNT -setlocal -@REM ensure the script will be executed with the Python it was installed for -pushd %~dp0.. -set path=%~dp0;%CD%;%path% -popd -@REM try the script named as the .bat file in current dir, then in Scripts subdir -set scriptname=%~dp0%~n0.py -if not exist "%scriptname%" set scriptname=%~dp0Scripts\%~n0.py -@REM Handle when running from wheel where the script has no .py extension -if not exist "%scriptname%" set scriptname=%~dp0%~n0 -python "%scriptname%" %* -endlocal & set SCONS_ERRORLEVEL=%ERRORLEVEL% - -if NOT "%COMSPEC%" == "%SystemRoot%\system32\cmd.exe" goto returncode -if errorlevel 9009 echo you do not have python in your PATH -goto endscons - -:returncode -exit /B %SCONS_ERRORLEVEL% - -:endscons -call :returncode %SCONS_ERRORLEVEL% diff --git a/src/third_party/scons-3.1.2/scons-LICENSE b/src/third_party/scons-3.1.2/scons-LICENSE deleted file mode 100644 index 9f6ec757710..00000000000 --- a/src/third_party/scons-3.1.2/scons-LICENSE +++ /dev/null @@ -1,27 +0,0 @@ - Copyright and license for SCons - a software construction tool - - This copyright and license do not apply to any other software - with which this software may have been included. - -MIT License - -Copyright (c) 2001 - 2019 The SCons Foundation - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/third_party/scons-3.1.2/scons-README b/src/third_party/scons-3.1.2/scons-README deleted file mode 100644 index 301b50684d1..00000000000 --- a/src/third_party/scons-3.1.2/scons-README +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright (c) 2001 - 2019 The SCons Foundation - - SCons - a software construction tool - -This is the scons-README file for a version of SCons packaged for local -execution--that is, execution out of a specific local directory, without -having to install SCons as a system-wide utility. - -You are likely reading this file in one of the following two situations: - - 1) You have unpacked an scons-local-{version} package and are - examining the contents. - - In this case, you are presumably interested in using this - package to include a local copy of SCons with some other - software that you package, so that you can use SCons to build - your software without forcing all of your users to have it fully - installed. Instructions for this can be found below. - - If you are not looking to use SCons in this way, then please - use either the scons-{version} package to install SCons on your - system, or the scons-src-{version} package if you want the full - source to SCons, including its packaging code and underlying - tests and testing infrastructure. - - 2) This file was included in some other software package so that - the package could be built using SCons. - - In this case, follow the instructions provided with the - rest of the software package for how to use SCons to build - and/or install the software. The file containing build and - installation instructions will typically be named README or - INSTALL. - -LATEST VERSION -============== - -Before going further, you can check for the latest version of the -scons-local package, or any SCons package, at the SCons download page: - - http://www.scons.org/download.html - - -EXECUTION REQUIREMENTS -====================== - -Running SCons requires either Python version 2.7.* or Python 3.5 or higher. -There should be no other dependencies or requirements to run SCons. - -The default SCons configuration assumes use of the Microsoft Visual C++ -compiler suite on WIN32 systems (either through the Visual Studio -product, or through the separate Build Tools), and assumes a C compiler -named 'cc', a C++ compiler named 'c++', and a Fortran compiler named 'g77' -(such as found in the GNU Compiler Collection) on any other type of system. -You may, of course, override these default values by appropriate -configuration of Environment construction variables. - - -INSTALLATION -============ - -Installation of this package should be as simple as unpacking the -archive (either .tar.gz or .zip) in any directory (top-level or a -subdirectory) within the software package with which you want to ship -SCons. - -Once you have installed this package, you should write an SConstruct -file at the top level of your source tree to build your software as you -see fit. - -Then modify the build/install instructions for your package to instruct -your users to execute SCons as follows (if you installed this package in -your top-level directory): - - $ python scons.py - -Or (if, for example, you installed this package in a subdirectory named -"scons"): - - $ python scons/scons.py - -That should be all you have to do. (If it isn't that simple, please let -us know!) - - -CONTENTS OF THIS PACKAGE -======================== - -This scons-local package consists of the following: - -scons-LICENSE - A copy of the copyright and terms under which SCons is - distributed (the Open Source Initiative-approved MIT license). - - A disclaimer has been added to the beginning to make clear that - this license applies only to SCons, and not to any separate - software you've written with which you're planning to package - SCons. - -scons-README - What you're looking at right now. - -scons-local-{version}/ - The SCons build engine. This is structured as a Python - library. - -scons.py - The SCons script itself. The script sets up the Python - sys.path variable to use the build engine found in the - scons-local-{version}/ directory in preference to any other - SCons build engine installed on your system. - - -DOCUMENTATION -============= - -Because this package is intended to be included with other software by -experienced users, we have not included any SCons documentation in this -package (other than this scons-README file you're reading right now). - -If, however, you need documentation about SCons, then consult any of the -following from the corresponding scons-{version} or scons-src-{version} -package: - - The RELEASE.txt file (src/RELEASE.txt file in the - scons-src-{version} package), which contains notes about this - specific release, including known problems. - - The CHANGES.txt file (src/CHANGES.txt file in the - scons-src-{version} package), which contains a list of changes - since the previous release. - - The scons.1 man page (doc/man/scons.1 in the scons-src-{version} - package), which contains a section of small examples for getting - started using SCons. - -Additional documentation for SCons is available at: - - http://www.scons.org/doc.html - - -LICENSING -========= - -SCons is distributed under the MIT license, a full copy of which is -available in the scons-LICENSE file in this package. The MIT license is -an approved Open Source license, which means: - - This software is OSI Certified Open Source Software. OSI - Certified is a certification mark of the Open Source Initiative. - -More information about OSI certifications and Open Source software is -available at: - - http://www.opensource.org/ - - -REPORTING BUGS -============== - -The SCons project welcomes bug reports and feature requests. - -Please make sure you send email with the problem or feature request to -the SCons users mailing list, which you can join via the link below: - - http://two.pairlist.net/mailman/listinfo/scons-users - -Once you have discussed your issue on the users mailing list and the -community has confirmed that it is either a new bug or a duplicate of an -existing bug, then please follow the instructions the community provides -to file a new bug or to add yourself to the CC list for an existing bug - -You can explore the list of existing bugs, which may include workarounds -for the problem you've run into, on GitHub: - - https://github.com/SCons/scons/issues - - - -MAILING LISTS -============= - -A mailing list for users of SCons is available. You may send questions -or comments to the list at: - - scons-users@scons.org - -You may subscribe to the scons-users mailing list at: - - http://two.pairlist.net/mailman/listinfo/scons-users - -An active mailing list for developers of SCons is available. You may -send questions or comments to the list at: - - scons-dev@scons.org - -You may subscribe to the developer's mailing list using form on this page: - - http://two.pairlist.net/mailman/listinfo/scons-dev - -Subscription to the developer's mailing list is by approval. In practice, no -one is refused list membership, but we reserve the right to limit membership -in the future and/or weed out lurkers. - -There is also a low-volume mailing list available for announcements about -SCons. Subscribe by sending email to: - - announce-subscribe@scons.tigris.org - -There are other mailing lists available for SCons users, for notification of -SCons code changes, and for notification of updated bug reports and project -documents. Please see our mailing lists page for details. - - - -FOR MORE INFORMATION -==================== - -Check the SCons web site at: - - http://www.scons.org/ - - -AUTHOR INFO -=========== - -Steven Knight -knight at baldmt dot com -http://www.baldmt.com/~knight/ - -With plenty of help from the SCons Development team: - Chad Austin - Charles Crain - Steve Leblanc - Anthony Roach - Terrel Shumway - - diff --git a/src/third_party/scons-3.1.2/scons-configure-cache.py b/src/third_party/scons-3.1.2/scons-configure-cache.py deleted file mode 100755 index ee11943a0a8..00000000000 --- a/src/third_party/scons-3.1.2/scons-configure-cache.py +++ /dev/null @@ -1,178 +0,0 @@ -#! /usr/bin/env python -# -# SCons - a Software Constructor -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -'''Show or convert the configuration of an SCons cache directory. - -A cache of derived files is stored by file signature. -The files are split into directories named by the first few -digits of the signature. The prefix length used for directory -names can be changed by this script. -''' - -from __future__ import print_function -import argparse -import glob -import json -import os - -__revision__ = "src/script/scons-configure-cache.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__version__ = "3.1.2" - -__build__ = "bee7caf9defd6e108fc2998a2520ddb36a967691" - -__buildsys__ = "octodog" - -__date__ = "2019-12-17 02:07:09" - -__developer__ = "bdeegan" - - -def rearrange_cache_entries(current_prefix_len, new_prefix_len): - '''Move cache files if prefix length changed. - - Move the existing cache files to new directories of the - appropriate name length and clean up the old directories. - ''' - print('Changing prefix length from', current_prefix_len, - 'to', new_prefix_len) - dirs = set() - old_dirs = set() - for file in glob.iglob(os.path.join('*', '*')): - name = os.path.basename(file) - dname = name[:current_prefix_len].upper() - if dname not in old_dirs: - print('Migrating', dname) - old_dirs.add(dname) - dname = name[:new_prefix_len].upper() - if dname not in dirs: - os.mkdir(dname) - dirs.add(dname) - os.rename(file, os.path.join(dname, name)) - - # Now delete the original directories - for dname in old_dirs: - os.rmdir(dname) - - -# The configuration dictionary should have one entry per entry in the -# cache config. The value of each entry should include the following: -# implicit - (optional) This is to allow adding a new config entry and also -# changing the behaviour of the system at the same time. This -# indicates the value the config entry would have had if it had -# been specified. -# default - The value the config entry should have if it wasn't previously -# specified -# command-line - parameters to pass to ArgumentParser.add_argument -# converter - (optional) Function to call if conversion is required -# if this configuration entry changes -config_entries = { - 'prefix_len': { - 'implicit': 1, - 'default': 2, - 'command-line': { - 'help': 'Length of cache file name used as subdirectory prefix', - 'metavar': '', - 'type': int - }, - 'converter': rearrange_cache_entries - } -} - -parser = argparse.ArgumentParser( - description='Modify the configuration of an scons cache directory', - epilog=''' - Unspecified options will not be changed unless they are not - set at all, in which case they are set to an appropriate default. - ''') - -parser.add_argument('cache-dir', help='Path to scons cache directory') -for param in config_entries: - parser.add_argument('--' + param.replace('_', '-'), - **config_entries[param]['command-line']) -parser.add_argument('--version', - action='version', - version='%(prog)s 1.0') -parser.add_argument('--show', - action="store_true", - help="show current configuration") - -# Get the command line as a dict without any of the unspecified entries. -args = dict([x for x in vars(parser.parse_args()).items() if x[1]]) - -# It seems somewhat strange to me, but positional arguments don't get the - -# in the name changed to _, whereas optional arguments do... -cache = args['cache-dir'] -if not os.path.isdir(cache): - raise RuntimeError("There is no cache directory named %s" % cache) -os.chdir(cache) -del args['cache-dir'] - -if not os.path.exists('config'): - # old config dirs did not have a 'config' file. Try to update. - # Validate the only files in the directory are directories 0-9, a-f - expected = ['{:X}'.format(x) for x in range(0, 16)] - if not set(os.listdir('.')).issubset(expected): - raise RuntimeError( - "%s does not look like a valid version 1 cache directory" % cache) - config = dict() -else: - with open('config') as conf: - config = json.load(conf) - -if args.get('show', None): - print("Current configuration in '%s':" % cache) - print(json.dumps(config, sort_keys=True, - indent=4, separators=(',', ': '))) - # in case of the show argument, emit some stats as well - file_count = 0 - for _, _, files in os.walk('.'): - file_count += len(files) - if file_count: # skip config file if it exists - file_count -= 1 - print("Cache contains %s files" % file_count) - del args['show'] - -# Find any keys that are not currently set but should be -for key in config_entries: - if key not in config: - if 'implicit' in config_entries[key]: - config[key] = config_entries[key]['implicit'] - else: - config[key] = config_entries[key]['default'] - if key not in args: - args[key] = config_entries[key]['default'] - -# Now go through each entry in args to see if it changes an existing config -# setting. -for key in args: - if args[key] != config[key]: - if 'converter' in config_entries[key]: - config_entries[key]['converter'](config[key], args[key]) - config[key] = args[key] - -# and write the updated config file -with open('config', 'w') as conf: - json.dump(config, conf) diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Action.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Action.py deleted file mode 100644 index 2e4e742b4b7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Action.py +++ /dev/null @@ -1,1405 +0,0 @@ -"""SCons.Action - -This encapsulates information about executing any sort of action that -can build one or more target Nodes (typically files) from one or more -source Nodes (also typically files) given a specific Environment. - -The base class here is ActionBase. The base class supplies just a few -OO utility methods and some generic methods for displaying information -about an Action in response to the various commands that control printing. - -A second-level base class is _ActionAction. This extends ActionBase -by providing the methods that can be used to show and perform an -action. True Action objects will subclass _ActionAction; Action -factory class objects will subclass ActionBase. - -The heavy lifting is handled by subclasses for the different types of -actions we might execute: - - CommandAction - CommandGeneratorAction - FunctionAction - ListAction - -The subclasses supply the following public interface methods used by -other modules: - - __call__() - THE public interface, "calling" an Action object executes the - command or Python function. This also takes care of printing - a pre-substitution command for debugging purposes. - - get_contents() - Fetches the "contents" of an Action for signature calculation - plus the varlist. This is what gets MD5 checksummed to decide - if a target needs to be rebuilt because its action changed. - - genstring() - Returns a string representation of the Action *without* - command substitution, but allows a CommandGeneratorAction to - generate the right action based on the specified target, - source and env. This is used by the Signature subsystem - (through the Executor) to obtain an (imprecise) representation - of the Action operation for informative purposes. - - -Subclasses also supply the following methods for internal use within -this module: - - __str__() - Returns a string approximation of the Action; no variable - substitution is performed. - - execute() - The internal method that really, truly, actually handles the - execution of a command or Python function. This is used so - that the __call__() methods can take care of displaying any - pre-substitution representations, and *then* execute an action - without worrying about the specific Actions involved. - - get_presig() - Fetches the "contents" of a subclass for signature calculation. - The varlist is added to this to produce the Action's contents. - TODO(?): Change this to always return ascii/bytes and not unicode (or py3 strings) - - strfunction() - Returns a substituted string representation of the Action. - This is used by the _ActionAction.show() command to display the - command/function that will be executed to generate the target(s). - -There is a related independent ActionCaller class that looks like a -regular Action, and which serves as a wrapper for arbitrary functions -that we want to let the user specify the arguments to now, but actually -execute later (when an out-of-date check determines that it's needed to -be executed, for example). Objects of this class are returned by an -ActionFactory class that provides a __call__() method as a convenient -way for wrapping up the functions. - -""" - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Action.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import pickle -import re -import sys -import subprocess -import itertools -import inspect -from collections import OrderedDict - -import SCons.Debug -from SCons.Debug import logInstanceCreation -import SCons.Errors -import SCons.Util -import SCons.Subst - -# we use these a lot, so try to optimize them -from SCons.Util import is_String, is_List - -class _null(object): - pass - -print_actions = 1 -execute_actions = 1 -print_actions_presub = 0 - -# Use pickle protocol 1 when pickling functions for signature -# otherwise python3 and python2 will yield different pickles -# for the same object. -# This is due to default being 1 for python 2.7, and 3 for 3.x -# TODO: We can roll this forward to 2 (if it has value), but not -# before a deprecation cycle as the sconsigns will change -ACTION_SIGNATURE_PICKLE_PROTOCOL = 1 - - -def rfile(n): - try: - return n.rfile() - except AttributeError: - return n - - -def default_exitstatfunc(s): - return s - -strip_quotes = re.compile('^[\'"](.*)[\'"]$') - - -def _callable_contents(obj): - """Return the signature contents of a callable Python object. - """ - try: - # Test if obj is a method. - return _function_contents(obj.__func__) - - except AttributeError: - try: - # Test if obj is a callable object. - return _function_contents(obj.__call__.__func__) - - except AttributeError: - try: - # Test if obj is a code object. - return _code_contents(obj) - - except AttributeError: - # Test if obj is a function object. - return _function_contents(obj) - - -def _object_contents(obj): - """Return the signature contents of any Python object. - - We have to handle the case where object contains a code object - since it can be pickled directly. - """ - try: - # Test if obj is a method. - return _function_contents(obj.__func__) - - except AttributeError: - try: - # Test if obj is a callable object. - return _function_contents(obj.__call__.__func__) - - except AttributeError: - try: - # Test if obj is a code object. - return _code_contents(obj) - - except AttributeError: - try: - # Test if obj is a function object. - return _function_contents(obj) - - except AttributeError as ae: - # Should be a pickle-able Python object. - try: - return _object_instance_content(obj) - # pickling an Action instance or object doesn't yield a stable - # content as instance property may be dumped in different orders - # return pickle.dumps(obj, ACTION_SIGNATURE_PICKLE_PROTOCOL) - except (pickle.PicklingError, TypeError, AttributeError) as ex: - # This is weird, but it seems that nested classes - # are unpickable. The Python docs say it should - # always be a PicklingError, but some Python - # versions seem to return TypeError. Just do - # the best we can. - return bytearray(repr(obj), 'utf-8') - - -def _code_contents(code, docstring=None): - r"""Return the signature contents of a code object. - - By providing direct access to the code object of the - function, Python makes this extremely easy. Hooray! - - Unfortunately, older versions of Python include line - number indications in the compiled byte code. Boo! - So we remove the line number byte codes to prevent - recompilations from moving a Python function. - - See: - - https://docs.python.org/2/library/inspect.html - - http://python-reference.readthedocs.io/en/latest/docs/code/index.html - - For info on what each co\_ variable provides - - The signature is as follows (should be byte/chars): - co_argcount, len(co_varnames), len(co_cellvars), len(co_freevars), - ( comma separated signature for each object in co_consts ), - ( comma separated signature for each object in co_names ), - ( The bytecode with line number bytecodes removed from co_code ) - - co_argcount - Returns the number of positional arguments (including arguments with default values). - co_varnames - Returns a tuple containing the names of the local variables (starting with the argument names). - co_cellvars - Returns a tuple containing the names of local variables that are referenced by nested functions. - co_freevars - Returns a tuple containing the names of free variables. (?) - co_consts - Returns a tuple containing the literals used by the bytecode. - co_names - Returns a tuple containing the names used by the bytecode. - co_code - Returns a string representing the sequence of bytecode instructions. - - """ - - # contents = [] - - # The code contents depends on the number of local variables - # but not their actual names. - contents = bytearray("{}, {}".format(code.co_argcount, len(code.co_varnames)), 'utf-8') - - contents.extend(b", ") - contents.extend(bytearray(str(len(code.co_cellvars)), 'utf-8')) - contents.extend(b", ") - contents.extend(bytearray(str(len(code.co_freevars)), 'utf-8')) - - # The code contents depends on any constants accessed by the - # function. Note that we have to call _object_contents on each - # constants because the code object of nested functions can - # show-up among the constants. - z = [_object_contents(cc) for cc in code.co_consts if cc != docstring] - contents.extend(b',(') - contents.extend(bytearray(',', 'utf-8').join(z)) - contents.extend(b')') - - # The code contents depends on the variable names used to - # accessed global variable, as changing the variable name changes - # the variable actually accessed and therefore changes the - # function result. - z= [bytearray(_object_contents(cc)) for cc in code.co_names] - contents.extend(b',(') - contents.extend(bytearray(',','utf-8').join(z)) - contents.extend(b')') - - # The code contents depends on its actual code!!! - contents.extend(b',(') - contents.extend(code.co_code) - contents.extend(b')') - - return contents - - -def _function_contents(func): - """ - The signature is as follows (should be byte/chars): - < _code_contents (see above) from func.__code__ > - ,( comma separated _object_contents for function argument defaults) - ,( comma separated _object_contents for any closure contents ) - - - See also: https://docs.python.org/3/reference/datamodel.html - - func.__code__ - The code object representing the compiled function body. - - func.__defaults__ - A tuple containing default argument values for those arguments that have defaults, or None if no arguments have a default value - - func.__closure__ - None or a tuple of cells that contain bindings for the function's free variables. - - :Returns: - Signature contents of a function. (in bytes) - """ - - contents = [_code_contents(func.__code__, func.__doc__)] - - # The function contents depends on the value of defaults arguments - if func.__defaults__: - - function_defaults_contents = [_object_contents(cc) for cc in func.__defaults__] - - defaults = bytearray(b',(') - defaults.extend(bytearray(b',').join(function_defaults_contents)) - defaults.extend(b')') - - contents.append(defaults) - else: - contents.append(b',()') - - # The function contents depends on the closure captured cell values. - closure = func.__closure__ or [] - - try: - closure_contents = [_object_contents(x.cell_contents) for x in closure] - except AttributeError: - closure_contents = [] - - contents.append(b',(') - contents.append(bytearray(b',').join(closure_contents)) - contents.append(b')') - - retval = bytearray(b'').join(contents) - return retval - - -def _object_instance_content(obj): - """ - Returns consistant content for a action class or an instance thereof - - :Parameters: - - `obj` Should be either and action class or an instance thereof - - :Returns: - bytearray or bytes representing the obj suitable for generating a signature from. - """ - retval = bytearray() - - if obj is None: - return b'N.' - - if isinstance(obj, SCons.Util.BaseStringTypes): - return SCons.Util.to_bytes(obj) - - inst_class = obj.__class__ - inst_class_name = bytearray(obj.__class__.__name__,'utf-8') - inst_class_module = bytearray(obj.__class__.__module__,'utf-8') - inst_class_hierarchy = bytearray(repr(inspect.getclasstree([obj.__class__,])),'utf-8') - # print("ICH:%s : %s"%(inst_class_hierarchy, repr(obj))) - - properties = [(p, getattr(obj, p, "None")) for p in dir(obj) if not (p[:2] == '__' or inspect.ismethod(getattr(obj, p)) or inspect.isbuiltin(getattr(obj,p))) ] - properties.sort() - properties_str = ','.join(["%s=%s"%(p[0],p[1]) for p in properties]) - properties_bytes = bytearray(properties_str,'utf-8') - - methods = [p for p in dir(obj) if inspect.ismethod(getattr(obj, p))] - methods.sort() - - method_contents = [] - for m in methods: - # print("Method:%s"%m) - v = _function_contents(getattr(obj, m)) - # print("[%s->]V:%s [%s]"%(m,v,type(v))) - method_contents.append(v) - - retval = bytearray(b'{') - retval.extend(inst_class_name) - retval.extend(b":") - retval.extend(inst_class_module) - retval.extend(b'}[[') - retval.extend(inst_class_hierarchy) - retval.extend(b']]{{') - retval.extend(bytearray(b",").join(method_contents)) - retval.extend(b"}}{{{") - retval.extend(properties_bytes) - retval.extend(b'}}}') - return retval - - # print("class :%s"%inst_class) - # print("class_name :%s"%inst_class_name) - # print("class_module :%s"%inst_class_module) - # print("Class hier :\n%s"%pp.pformat(inst_class_hierarchy)) - # print("Inst Properties:\n%s"%pp.pformat(properties)) - # print("Inst Methods :\n%s"%pp.pformat(methods)) - -def _actionAppend(act1, act2): - # This function knows how to slap two actions together. - # Mainly, it handles ListActions by concatenating into - # a single ListAction. - a1 = Action(act1) - a2 = Action(act2) - if a1 is None: - return a2 - if a2 is None: - return a1 - if isinstance(a1, ListAction): - if isinstance(a2, ListAction): - return ListAction(a1.list + a2.list) - else: - return ListAction(a1.list + [ a2 ]) - else: - if isinstance(a2, ListAction): - return ListAction([ a1 ] + a2.list) - else: - return ListAction([ a1, a2 ]) - - -def _do_create_keywords(args, kw): - """This converts any arguments after the action argument into - their equivalent keywords and adds them to the kw argument. - """ - v = kw.get('varlist', ()) - # prevent varlist="FOO" from being interpreted as ['F', 'O', 'O'] - if is_String(v): v = (v,) - kw['varlist'] = tuple(v) - if args: - # turn positional args into equivalent keywords - cmdstrfunc = args[0] - if cmdstrfunc is None or is_String(cmdstrfunc): - kw['cmdstr'] = cmdstrfunc - elif callable(cmdstrfunc): - kw['strfunction'] = cmdstrfunc - else: - raise SCons.Errors.UserError( - 'Invalid command display variable type. ' - 'You must either pass a string or a callback which ' - 'accepts (target, source, env) as parameters.') - if len(args) > 1: - kw['varlist'] = tuple(SCons.Util.flatten(args[1:])) + kw['varlist'] - if kw.get('strfunction', _null) is not _null \ - and kw.get('cmdstr', _null) is not _null: - raise SCons.Errors.UserError( - 'Cannot have both strfunction and cmdstr args to Action()') - - -def _do_create_action(act, kw): - """This is the actual "implementation" for the - Action factory method, below. This handles the - fact that passing lists to Action() itself has - different semantics than passing lists as elements - of lists. - - The former will create a ListAction, the latter - will create a CommandAction by converting the inner - list elements to strings.""" - - if isinstance(act, ActionBase): - return act - - if is_String(act): - var=SCons.Util.get_environment_var(act) - if var: - # This looks like a string that is purely an Environment - # variable reference, like "$FOO" or "${FOO}". We do - # something special here...we lazily evaluate the contents - # of that Environment variable, so a user could put something - # like a function or a CommandGenerator in that variable - # instead of a string. - return LazyAction(var, kw) - commands = str(act).split('\n') - if len(commands) == 1: - return CommandAction(commands[0], **kw) - # The list of string commands may include a LazyAction, so we - # reprocess them via _do_create_list_action. - return _do_create_list_action(commands, kw) - - if is_List(act): - return CommandAction(act, **kw) - - if callable(act): - try: - gen = kw['generator'] - del kw['generator'] - except KeyError: - gen = 0 - if gen: - action_type = CommandGeneratorAction - else: - action_type = FunctionAction - return action_type(act, kw) - - # Catch a common error case with a nice message: - if isinstance(act, int) or isinstance(act, float): - raise TypeError("Don't know how to create an Action from a number (%s)"%act) - # Else fail silently (???) - return None - - -def _do_create_list_action(act, kw): - """A factory for list actions. Convert the input list into Actions - and then wrap them in a ListAction.""" - acts = [] - for a in act: - aa = _do_create_action(a, kw) - if aa is not None: acts.append(aa) - if not acts: - return ListAction([]) - elif len(acts) == 1: - return acts[0] - else: - return ListAction(acts) - - -def Action(act, *args, **kw): - """A factory for action objects.""" - # Really simple: the _do_create_* routines do the heavy lifting. - _do_create_keywords(args, kw) - if is_List(act): - return _do_create_list_action(act, kw) - return _do_create_action(act, kw) - - -class ActionBase(object): - """Base class for all types of action objects that can be held by - other objects (Builders, Executors, etc.) This provides the - common methods for manipulating and combining those actions.""" - - def __eq__(self, other): - return self.__dict__ == other - - def no_batch_key(self, env, target, source): - return None - - batch_key = no_batch_key - - def genstring(self, target, source, env): - return str(self) - - def get_contents(self, target, source, env): - result = self.get_presig(target, source, env) - - if not isinstance(result,(bytes, bytearray)): - result = bytearray(result, 'utf-8') - else: - # Make a copy and put in bytearray, without this the contents returned by get_presig - # can be changed by the logic below, appending with each call and causing very - # hard to track down issues... - result = bytearray(result) - - # At this point everything should be a bytearray - - # This should never happen, as the Action() factory should wrap - # the varlist, but just in case an action is created directly, - # we duplicate this check here. - vl = self.get_varlist(target, source, env) - if is_String(vl): vl = (vl,) - for v in vl: - # do the subst this way to ignore $(...$) parts: - if isinstance(result, bytearray): - result.extend(SCons.Util.to_bytes(env.subst_target_source('${'+v+'}', SCons.Subst.SUBST_SIG, target, source))) - else: - raise Exception("WE SHOULD NEVER GET HERE result should be bytearray not:%s"%type(result)) - # result.append(SCons.Util.to_bytes(env.subst_target_source('${'+v+'}', SCons.Subst.SUBST_SIG, target, source))) - - - if isinstance(result, (bytes,bytearray)): - return result - else: - raise Exception("WE SHOULD NEVER GET HERE - #2 result should be bytearray not:%s" % type(result)) - # return b''.join(result) - - def __add__(self, other): - return _actionAppend(self, other) - - def __radd__(self, other): - return _actionAppend(other, self) - - def presub_lines(self, env): - # CommandGeneratorAction needs a real environment - # in order to return the proper string here, since - # it may call LazyAction, which looks up a key - # in that env. So we temporarily remember the env here, - # and CommandGeneratorAction will use this env - # when it calls its _generate method. - self.presub_env = env - lines = str(self).split('\n') - self.presub_env = None # don't need this any more - return lines - - def get_varlist(self, target, source, env, executor=None): - return self.varlist - - def get_targets(self, env, executor): - """ - Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used - by this action. - """ - return self.targets - - -class _ActionAction(ActionBase): - """Base class for actions that create output objects.""" - def __init__(self, cmdstr=_null, strfunction=_null, varlist=(), - presub=_null, chdir=None, exitstatfunc=None, - batch_key=None, targets='$TARGETS', - **kw): - self.cmdstr = cmdstr - if strfunction is not _null: - if strfunction is None: - self.cmdstr = None - else: - self.strfunction = strfunction - self.varlist = varlist - self.presub = presub - self.chdir = chdir - if not exitstatfunc: - exitstatfunc = default_exitstatfunc - self.exitstatfunc = exitstatfunc - - self.targets = targets - - if batch_key: - if not callable(batch_key): - # They have set batch_key, but not to their own - # callable. The default behavior here will batch - # *all* targets+sources using this action, separated - # for each construction environment. - def default_batch_key(self, env, target, source): - return (id(self), id(env)) - batch_key = default_batch_key - SCons.Util.AddMethod(self, batch_key, 'batch_key') - - def print_cmd_line(self, s, target, source, env): - """ - In python 3, and in some of our tests, sys.stdout is - a String io object, and it takes unicode strings only - In other cases it's a regular Python 2.x file object - which takes strings (bytes), and if you pass those a - unicode object they try to decode with 'ascii' codec - which fails if the cmd line has any hi-bit-set chars. - This code assumes s is a regular string, but should - work if it's unicode too. - """ - try: - sys.stdout.write(s + u"\n") - except UnicodeDecodeError: - sys.stdout.write(s + "\n") - - def __call__(self, target, source, env, - exitstatfunc=_null, - presub=_null, - show=_null, - execute=_null, - chdir=_null, - executor=None): - if not is_List(target): - target = [target] - if not is_List(source): - source = [source] - - if presub is _null: - presub = self.presub - if presub is _null: - presub = print_actions_presub - if exitstatfunc is _null: exitstatfunc = self.exitstatfunc - if show is _null: show = print_actions - if execute is _null: execute = execute_actions - if chdir is _null: chdir = self.chdir - save_cwd = None - if chdir: - save_cwd = os.getcwd() - try: - chdir = str(chdir.get_abspath()) - except AttributeError: - if not is_String(chdir): - if executor: - chdir = str(executor.batches[0].targets[0].dir) - else: - chdir = str(target[0].dir) - if presub: - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - t = ' and '.join(map(str, target)) - l = '\n '.join(self.presub_lines(env)) - out = u"Building %s with action:\n %s\n" % (t, l) - sys.stdout.write(out) - cmd = None - if show and self.strfunction: - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - try: - cmd = self.strfunction(target, source, env, executor) - except TypeError: - cmd = self.strfunction(target, source, env) - if cmd: - if chdir: - cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd - try: - get = env.get - except AttributeError: - print_func = self.print_cmd_line - else: - print_func = get('PRINT_CMD_LINE_FUNC') - if not print_func: - print_func = self.print_cmd_line - print_func(cmd, target, source, env) - stat = 0 - if execute: - if chdir: - os.chdir(chdir) - try: - stat = self.execute(target, source, env, executor=executor) - if isinstance(stat, SCons.Errors.BuildError): - s = exitstatfunc(stat.status) - if s: - stat.status = s - else: - stat = s - else: - stat = exitstatfunc(stat) - finally: - if save_cwd: - os.chdir(save_cwd) - if cmd and save_cwd: - print_func('os.chdir(%s)' % repr(save_cwd), target, source, env) - - return stat - - -def _string_from_cmd_list(cmd_list): - """Takes a list of command line arguments and returns a pretty - representation for printing.""" - cl = [] - for arg in map(str, cmd_list): - if ' ' in arg or '\t' in arg: - arg = '"' + arg + '"' - cl.append(arg) - return ' '.join(cl) - -default_ENV = None - - -def get_default_ENV(env, target=None, source=None): - """ - A fiddlin' little function that has an 'import SCons.Environment' which - can't be moved to the top level without creating an import loop. Since - this import creates a local variable named 'SCons', it blocks access to - the global variable, so we move it here to prevent complaints about local - variables being used uninitialized. - """ - global default_ENV - try: - return env['ENV'] - except KeyError: - if not default_ENV: - import SCons.Environment - # This is a hideously expensive way to get a default shell - # environment. What it really should do is run the platform - # setup to get the default ENV. Fortunately, it's incredibly - # rare for an Environment not to have a shell environment, so - # we're not going to worry about it overmuch. - default_ENV = SCons.Environment.Environment()['ENV'] - return default_ENV - - -def _subproc(scons_env, cmd, error = 'ignore', **kw): - """Do common setup for a subprocess.Popen() call - - This function is still in draft mode. We're going to need something like - it in the long run as more and more places use subprocess, but I'm sure - it'll have to be tweaked to get the full desired functionality. - one special arg (so far?), 'error', to tell what to do with exceptions. - """ - # allow std{in,out,err} to be "'devnull'". This is like - # subprocess.DEVNULL, which does not exist for Py2. Use the - # subprocess one if possible. - # Clean this up when Py2 support is dropped - try: - from subprocess import DEVNULL - except ImportError: - DEVNULL = None - - for stream in 'stdin', 'stdout', 'stderr': - io = kw.get(stream) - if is_String(io) and io == 'devnull': - if DEVNULL: - kw[stream] = DEVNULL - else: - kw[stream] = open(os.devnull, "r+") - - # Figure out what shell environment to use - ENV = kw.get('env', None) - if ENV is None: ENV = get_default_ENV(scons_env) - - # Ensure that the ENV values are all strings: - new_env = {} - for key, value in ENV.items(): - if is_List(value): - # If the value is a list, then we assume it is a path list, - # because that's a pretty common list-like value to stick - # in an environment variable: - value = SCons.Util.flatten_sequence(value) - new_env[key] = os.pathsep.join(map(str, value)) - else: - # It's either a string or something else. If it's a string, - # we still want to call str() because it might be a *Unicode* - # string, which makes subprocess.Popen() gag. If it isn't a - # string or a list, then we just coerce it to a string, which - # is the proper way to handle Dir and File instances and will - # produce something reasonable for just about everything else: - new_env[key] = str(value) - kw['env'] = new_env - - try: - pobj = subprocess.Popen(cmd, **kw) - except EnvironmentError as e: - if error == 'raise': raise - # return a dummy Popen instance that only returns error - class dummyPopen(object): - def __init__(self, e): self.exception = e - def communicate(self, input=None): return ('', '') - def wait(self): return -self.exception.errno - stdin = None - class f(object): - def read(self): return '' - def readline(self): return '' - def __iter__(self): return iter(()) - stdout = stderr = f() - pobj = dummyPopen(e) - finally: - # clean up open file handles stored in parent's kw - for k, v in kw.items(): - if inspect.ismethod(getattr(v, 'close', None)): - v.close() - - return pobj - - -class CommandAction(_ActionAction): - """Class for command-execution actions.""" - def __init__(self, cmd, **kw): - # Cmd can actually be a list or a single item; if it's a - # single item it should be the command string to execute; if a - # list then it should be the words of the command string to - # execute. Only a single command should be executed by this - # object; lists of commands should be handled by embedding - # these objects in a ListAction object (which the Action() - # factory above does). cmd will be passed to - # Environment.subst_list() for substituting environment - # variables. - if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.CommandAction') - - _ActionAction.__init__(self, **kw) - if is_List(cmd): - if [c for c in cmd if is_List(c)]: - raise TypeError("CommandAction should be given only " - "a single command") - self.cmd_list = cmd - - def __str__(self): - if is_List(self.cmd_list): - return ' '.join(map(str, self.cmd_list)) - return str(self.cmd_list) - - def process(self, target, source, env, executor=None): - if executor: - result = env.subst_list(self.cmd_list, 0, executor=executor) - else: - result = env.subst_list(self.cmd_list, 0, target, source) - silent = None - ignore = None - while True: - try: c = result[0][0][0] - except IndexError: c = None - if c == '@': silent = 1 - elif c == '-': ignore = 1 - else: break - result[0][0] = result[0][0][1:] - try: - if not result[0][0]: - result[0] = result[0][1:] - except IndexError: - pass - return result, ignore, silent - - def strfunction(self, target, source, env, executor=None): - if self.cmdstr is None: - return None - if self.cmdstr is not _null: - from SCons.Subst import SUBST_RAW - if executor: - c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) - else: - c = env.subst(self.cmdstr, SUBST_RAW, target, source) - if c: - return c - cmd_list, ignore, silent = self.process(target, source, env, executor) - if silent: - return '' - return _string_from_cmd_list(cmd_list[0]) - - def execute(self, target, source, env, executor=None): - """Execute a command action. - - This will handle lists of commands as well as individual commands, - because construction variable substitution may turn a single - "command" into a list. This means that this class can actually - handle lists of commands, even though that's not how we use it - externally. - """ - escape_list = SCons.Subst.escape_list - flatten_sequence = SCons.Util.flatten_sequence - - try: - shell = env['SHELL'] - except KeyError: - raise SCons.Errors.UserError('Missing SHELL construction variable.') - - try: - spawn = env['SPAWN'] - except KeyError: - raise SCons.Errors.UserError('Missing SPAWN construction variable.') - else: - if is_String(spawn): - spawn = env.subst(spawn, raw=1, conv=lambda x: x) - - escape = env.get('ESCAPE', lambda x: x) - - ENV = env.get('SHELL_ENV_GENERATOR', get_default_ENV)(env, target, source) - - # Ensure that the ENV values are all strings: - for key, value in ENV.items(): - if not is_String(value): - if is_List(value): - # If the value is a list, then we assume it is a - # path list, because that's a pretty common list-like - # value to stick in an environment variable: - value = flatten_sequence(value) - ENV[key] = os.pathsep.join(map(str, value)) - else: - # If it isn't a string or a list, then we just coerce - # it to a string, which is the proper way to handle - # Dir and File instances and will produce something - # reasonable for just about everything else: - ENV[key] = str(value) - - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - cmd_list, ignore, silent = self.process(target, list(map(rfile, source)), env, executor) - - # Use len() to filter out any "command" that's zero-length. - for cmd_line in filter(len, cmd_list): - # Escape the command line for the interpreter we are using. - cmd_line = escape_list(cmd_line, escape) - result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) - if not ignore and result: - msg = "Error %s" % result - return SCons.Errors.BuildError(errstr=msg, - status=result, - action=self, - command=cmd_line) - return 0 - - def get_presig(self, target, source, env, executor=None): - """Return the signature contents of this action's command line. - - This strips $(-$) and everything in between the string, - since those parts don't affect signatures. - """ - from SCons.Subst import SUBST_SIG - cmd = self.cmd_list - if is_List(cmd): - cmd = ' '.join(map(str, cmd)) - else: - cmd = str(cmd) - if executor: - return env.subst_target_source(cmd, SUBST_SIG, executor=executor) - else: - return env.subst_target_source(cmd, SUBST_SIG, target, source) - - def get_implicit_deps(self, target, source, env, executor=None): - icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True) - if is_String(icd) and icd[:1] == '$': - icd = env.subst(icd) - if not icd or icd in ('0', 'None'): - return [] - from SCons.Subst import SUBST_SIG - if executor: - cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor) - else: - cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source) - res = [] - for cmd_line in cmd_list: - if cmd_line: - d = str(cmd_line[0]) - m = strip_quotes.match(d) - if m: - d = m.group(1) - d = env.WhereIs(d) - if d: - res.append(env.fs.File(d)) - return res - - -class CommandGeneratorAction(ActionBase): - """Class for command-generator actions.""" - def __init__(self, generator, kw): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.CommandGeneratorAction') - self.generator = generator - self.gen_kw = kw - self.varlist = kw.get('varlist', ()) - self.targets = kw.get('targets', '$TARGETS') - - def _generate(self, target, source, env, for_signature, executor=None): - # ensure that target is a list, to make it easier to write - # generator functions: - if not is_List(target): - target = [target] - - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - ret = self.generator(target=target, - source=source, - env=env, - for_signature=for_signature) - gen_cmd = Action(ret, **self.gen_kw) - if not gen_cmd: - raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret)) - return gen_cmd - - def __str__(self): - try: - env = self.presub_env - except AttributeError: - env = None - if env is None: - env = SCons.Defaults.DefaultEnvironment() - act = self._generate([], [], env, 1) - return str(act) - - def batch_key(self, env, target, source): - return self._generate(target, source, env, 1).batch_key(env, target, source) - - def genstring(self, target, source, env, executor=None): - return self._generate(target, source, env, 1, executor).genstring(target, source, env) - - def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, - show=_null, execute=_null, chdir=_null, executor=None): - act = self._generate(target, source, env, 0, executor) - if act is None: - raise SCons.Errors.UserError("While building `%s': " - "Cannot deduce file extension from source files: %s" - % (repr(list(map(str, target))), repr(list(map(str, source))))) - return act(target, source, env, exitstatfunc, presub, - show, execute, chdir, executor) - - def get_presig(self, target, source, env, executor=None): - """Return the signature contents of this action's command line. - - This strips $(-$) and everything in between the string, - since those parts don't affect signatures. - """ - return self._generate(target, source, env, 1, executor).get_presig(target, source, env) - - def get_implicit_deps(self, target, source, env, executor=None): - return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env) - - def get_varlist(self, target, source, env, executor=None): - return self._generate(target, source, env, 1, executor).get_varlist(target, source, env, executor) - - def get_targets(self, env, executor): - return self._generate(None, None, env, 1, executor).get_targets(env, executor) - - -class LazyAction(CommandGeneratorAction, CommandAction): - """ - A LazyAction is a kind of hybrid generator and command action for - strings of the form "$VAR". These strings normally expand to other - strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also - want to be able to replace them with functions in the construction - environment. Consequently, we want lazy evaluation and creation of - an Action in the case of the function, but that's overkill in the more - normal case of expansion to other strings. - - So we do this with a subclass that's both a generator *and* - a command action. The overridden methods all do a quick check - of the construction variable, and if it's a string we just call - the corresponding CommandAction method to do the heavy lifting. - If not, then we call the same-named CommandGeneratorAction method. - The CommandGeneratorAction methods work by using the overridden - _generate() method, that is, our own way of handling "generation" of - an action based on what's in the construction variable. - """ - - def __init__(self, var, kw): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.LazyAction') - CommandAction.__init__(self, '${'+var+'}', **kw) - self.var = SCons.Util.to_String(var) - self.gen_kw = kw - - def get_parent_class(self, env): - c = env.get(self.var) - if is_String(c) and '\n' not in c: - return CommandAction - return CommandGeneratorAction - - def _generate_cache(self, env): - if env: - c = env.get(self.var, '') - else: - c = '' - gen_cmd = Action(c, **self.gen_kw) - if not gen_cmd: - raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c))) - return gen_cmd - - def _generate(self, target, source, env, for_signature, executor=None): - return self._generate_cache(env) - - def __call__(self, target, source, env, *args, **kw): - c = self.get_parent_class(env) - return c.__call__(self, target, source, env, *args, **kw) - - def get_presig(self, target, source, env): - c = self.get_parent_class(env) - return c.get_presig(self, target, source, env) - - def get_varlist(self, target, source, env, executor=None): - c = self.get_parent_class(env) - return c.get_varlist(self, target, source, env, executor) - - -class FunctionAction(_ActionAction): - """Class for Python function actions.""" - - def __init__(self, execfunction, kw): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.FunctionAction') - - self.execfunction = execfunction - try: - self.funccontents = _callable_contents(execfunction) - except AttributeError: - try: - # See if execfunction will do the heavy lifting for us. - self.gc = execfunction.get_contents - except AttributeError: - # This is weird, just do the best we can. - self.funccontents = _object_contents(execfunction) - - _ActionAction.__init__(self, **kw) - - def function_name(self): - try: - return self.execfunction.__name__ - except AttributeError: - try: - return self.execfunction.__class__.__name__ - except AttributeError: - return "unknown_python_function" - - def strfunction(self, target, source, env, executor=None): - if self.cmdstr is None: - return None - if self.cmdstr is not _null: - from SCons.Subst import SUBST_RAW - if executor: - c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) - else: - c = env.subst(self.cmdstr, SUBST_RAW, target, source) - if c: - return c - - def array(a): - def quote(s): - try: - str_for_display = s.str_for_display - except AttributeError: - s = repr(s) - else: - s = str_for_display() - return s - return '[' + ", ".join(map(quote, a)) + ']' - try: - strfunc = self.execfunction.strfunction - except AttributeError: - pass - else: - if strfunc is None: - return None - if callable(strfunc): - return strfunc(target, source, env) - name = self.function_name() - tstr = array(target) - sstr = array(source) - return "%s(%s, %s)" % (name, tstr, sstr) - - def __str__(self): - name = self.function_name() - if name == 'ActionCaller': - return str(self.execfunction) - return "%s(target, source, env)" % name - - def execute(self, target, source, env, executor=None): - exc_info = (None,None,None) - try: - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - rsources = list(map(rfile, source)) - try: - result = self.execfunction(target=target, source=rsources, env=env) - except KeyboardInterrupt as e: - raise - except SystemExit as e: - raise - except Exception as e: - result = e - exc_info = sys.exc_info() - - if result: - result = SCons.Errors.convert_to_BuildError(result, exc_info) - result.node=target - result.action=self - try: - result.command=self.strfunction(target, source, env, executor) - except TypeError: - result.command=self.strfunction(target, source, env) - - # FIXME: This maintains backward compatibility with respect to - # which type of exceptions were returned by raising an - # exception and which ones were returned by value. It would - # probably be best to always return them by value here, but - # some codes do not check the return value of Actions and I do - # not have the time to modify them at this point. - if (exc_info[1] and - not isinstance(exc_info[1],EnvironmentError)): - raise result - - return result - finally: - # Break the cycle between the traceback object and this - # function stack frame. See the sys.exc_info() doc info for - # more information about this issue. - del exc_info - - def get_presig(self, target, source, env): - """Return the signature contents of this callable action.""" - try: - return self.gc(target, source, env) - except AttributeError: - return self.funccontents - - def get_implicit_deps(self, target, source, env): - return [] - -class ListAction(ActionBase): - """Class for lists of other actions.""" - def __init__(self, actionlist): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.ListAction') - def list_of_actions(x): - if isinstance(x, ActionBase): - return x - return Action(x) - self.list = list(map(list_of_actions, actionlist)) - # our children will have had any varlist - # applied; we don't need to do it again - self.varlist = () - self.targets = '$TARGETS' - - def genstring(self, target, source, env): - return '\n'.join([a.genstring(target, source, env) for a in self.list]) - - def __str__(self): - return '\n'.join(map(str, self.list)) - - def presub_lines(self, env): - return SCons.Util.flatten_sequence( - [a.presub_lines(env) for a in self.list]) - - def get_presig(self, target, source, env): - """Return the signature contents of this action list. - - Simple concatenation of the signatures of the elements. - """ - return b"".join([bytes(x.get_contents(target, source, env)) for x in self.list]) - - def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, - show=_null, execute=_null, chdir=_null, executor=None): - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - for act in self.list: - stat = act(target, source, env, exitstatfunc, presub, - show, execute, chdir, executor) - if stat: - return stat - return 0 - - def get_implicit_deps(self, target, source, env): - result = [] - for act in self.list: - result.extend(act.get_implicit_deps(target, source, env)) - return result - - def get_varlist(self, target, source, env, executor=None): - result = OrderedDict() - for act in self.list: - for var in act.get_varlist(target, source, env, executor): - result[var] = True - return list(result.keys()) - - -class ActionCaller(object): - """A class for delaying calling an Action function with specific - (positional and keyword) arguments until the Action is actually - executed. - - This class looks to the rest of the world like a normal Action object, - but what it's really doing is hanging on to the arguments until we - have a target, source and env to use for the expansion. - """ - def __init__(self, parent, args, kw): - self.parent = parent - self.args = args - self.kw = kw - - def get_contents(self, target, source, env): - actfunc = self.parent.actfunc - try: - # "self.actfunc" is a function. - contents = actfunc.__code__.co_code - except AttributeError: - # "self.actfunc" is a callable object. - try: - contents = actfunc.__call__.__func__.__code__.co_code - except AttributeError: - # No __call__() method, so it might be a builtin - # or something like that. Do the best we can. - contents = repr(actfunc) - - return contents - - def subst(self, s, target, source, env): - # If s is a list, recursively apply subst() - # to every element in the list - if is_List(s): - result = [] - for elem in s: - result.append(self.subst(elem, target, source, env)) - return self.parent.convert(result) - - # Special-case hack: Let a custom function wrapped in an - # ActionCaller get at the environment through which the action - # was called by using this hard-coded value as a special return. - if s == '$__env__': - return env - elif is_String(s): - return env.subst(s, 1, target, source) - return self.parent.convert(s) - - def subst_args(self, target, source, env): - return [self.subst(x, target, source, env) for x in self.args] - - def subst_kw(self, target, source, env): - kw = {} - for key in list(self.kw.keys()): - kw[key] = self.subst(self.kw[key], target, source, env) - return kw - - def __call__(self, target, source, env, executor=None): - args = self.subst_args(target, source, env) - kw = self.subst_kw(target, source, env) - return self.parent.actfunc(*args, **kw) - - def strfunction(self, target, source, env): - args = self.subst_args(target, source, env) - kw = self.subst_kw(target, source, env) - return self.parent.strfunc(*args, **kw) - - def __str__(self): - return self.parent.strfunc(*self.args, **self.kw) - - -class ActionFactory(object): - """A factory class that will wrap up an arbitrary function - as an SCons-executable Action object. - - The real heavy lifting here is done by the ActionCaller class. - We just collect the (positional and keyword) arguments that we're - called with and give them to the ActionCaller object we create, - so it can hang onto them until it needs them. - """ - def __init__(self, actfunc, strfunc, convert=lambda x: x): - self.actfunc = actfunc - self.strfunc = strfunc - self.convert = convert - - def __call__(self, *args, **kw): - ac = ActionCaller(self, args, kw) - action = Action(ac, strfunction=ac.strfunction) - return action - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Builder.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Builder.py deleted file mode 100644 index 0ee614b6edd..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Builder.py +++ /dev/null @@ -1,896 +0,0 @@ -""" -SCons.Builder - -Builder object subsystem. - -A Builder object is a callable that encapsulates information about how -to execute actions to create a target Node (file) from source Nodes -(files), and how to create those dependencies for tracking. - -The main entry point here is the Builder() factory method. This provides -a procedural interface that creates the right underlying Builder object -based on the keyword arguments supplied and the types of the arguments. - -The goal is for this external interface to be simple enough that the -vast majority of users can create new Builders as necessary to support -building new types of files in their configurations, without having to -dive any deeper into this subsystem. - -The base class here is BuilderBase. This is a concrete base class which -does, in fact, represent the Builder objects that we (or users) create. - -There is also a proxy that looks like a Builder: - - CompositeBuilder - - This proxies for a Builder with an action that is actually a - dictionary that knows how to map file suffixes to a specific - action. This is so that we can invoke different actions - (compilers, compile options) for different flavors of source - files. - -Builders and their proxies have the following public interface methods -used by other modules: - - - __call__() - THE public interface. Calling a Builder object (with the - use of internal helper methods) sets up the target and source - dependencies, appropriate mapping to a specific action, and the - environment manipulation necessary for overridden construction - variable. This also takes care of warning about possible mistakes - in keyword arguments. - - - add_emitter() - Adds an emitter for a specific file suffix, used by some Tool - modules to specify that (for example) a yacc invocation on a .y - can create a .h *and* a .c file. - - - add_action() - Adds an action for a specific file suffix, heavily used by - Tool modules to add their specific action(s) for turning - a source file into an object file to the global static - and shared object file Builders. - -There are the following methods for internal use within this module: - - - _execute() - The internal method that handles the heavily lifting when a - Builder is called. This is used so that the __call__() methods - can set up warning about possible mistakes in keyword-argument - overrides, and *then* execute all of the steps necessary so that - the warnings only occur once. - - - get_name() - Returns the Builder's name within a specific Environment, - primarily used to try to return helpful information in error - messages. - - - adjust_suffix() - - get_prefix() - - get_suffix() - - get_src_suffix() - - set_src_suffix() - Miscellaneous stuff for handling the prefix and suffix - manipulation we use in turning source file names into target - file names. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Builder.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import collections - -import SCons.Action -import SCons.Debug -from SCons.Debug import logInstanceCreation -from SCons.Errors import InternalError, UserError -import SCons.Executor -import SCons.Memoize -import SCons.Util -import SCons.Warnings - -class _Null(object): - pass - -_null = _Null - -def match_splitext(path, suffixes = []): - if suffixes: - matchsuf = [S for S in suffixes if path[-len(S):] == S] - if matchsuf: - suf = max([(len(_f),_f) for _f in matchsuf])[1] - return [path[:-len(suf)], path[-len(suf):]] - return SCons.Util.splitext(path) - -class DictCmdGenerator(SCons.Util.Selector): - """This is a callable class that can be used as a - command generator function. It holds on to a dictionary - mapping file suffixes to Actions. It uses that dictionary - to return the proper action based on the file suffix of - the source file.""" - - def __init__(self, dict=None, source_ext_match=1): - SCons.Util.Selector.__init__(self, dict) - self.source_ext_match = source_ext_match - - def src_suffixes(self): - return list(self.keys()) - - def add_action(self, suffix, action): - """Add a suffix-action pair to the mapping. - """ - self[suffix] = action - - def __call__(self, target, source, env, for_signature): - if not source: - return [] - - if self.source_ext_match: - suffixes = self.src_suffixes() - ext = None - for src in map(str, source): - my_ext = match_splitext(src, suffixes)[1] - if ext and my_ext != ext: - raise UserError("While building `%s' from `%s': Cannot build multiple sources with different extensions: %s, %s" - % (repr(list(map(str, target))), src, ext, my_ext)) - ext = my_ext - else: - ext = match_splitext(str(source[0]), self.src_suffixes())[1] - - if not ext: - #return ext - raise UserError("While building `%s': " - "Cannot deduce file extension from source files: %s" - % (repr(list(map(str, target))), repr(list(map(str, source))))) - - try: - ret = SCons.Util.Selector.__call__(self, env, source, ext) - except KeyError as e: - raise UserError("Ambiguous suffixes after environment substitution: %s == %s == %s" % (e.args[0], e.args[1], e.args[2])) - if ret is None: - raise UserError("While building `%s' from `%s': Don't know how to build from a source file with suffix `%s'. Expected a suffix in this list: %s." % \ - (repr(list(map(str, target))), repr(list(map(str, source))), ext, repr(list(self.keys())))) - return ret - -class CallableSelector(SCons.Util.Selector): - """A callable dictionary that will, in turn, call the value it - finds if it can.""" - def __call__(self, env, source): - value = SCons.Util.Selector.__call__(self, env, source) - if callable(value): - value = value(env, source) - return value - -class DictEmitter(SCons.Util.Selector): - """A callable dictionary that maps file suffixes to emitters. - When called, it finds the right emitter in its dictionary for the - suffix of the first source file, and calls that emitter to get the - right lists of targets and sources to return. If there's no emitter - for the suffix in its dictionary, the original target and source are - returned. - """ - def __call__(self, target, source, env): - emitter = SCons.Util.Selector.__call__(self, env, source) - if emitter: - target, source = emitter(target, source, env) - return (target, source) - -class ListEmitter(collections.UserList): - """A callable list of emitters that calls each in sequence, - returning the result. - """ - def __call__(self, target, source, env): - for e in self.data: - target, source = e(target, source, env) - return (target, source) - -# These are a common errors when calling a Builder; -# they are similar to the 'target' and 'source' keyword args to builders, -# so we issue warnings when we see them. The warnings can, of course, -# be disabled. -misleading_keywords = { - 'targets' : 'target', - 'sources' : 'source', -} - -class OverrideWarner(collections.UserDict): - """A class for warning about keyword arguments that we use as - overrides in a Builder call. - - This class exists to handle the fact that a single Builder call - can actually invoke multiple builders. This class only emits the - warnings once, no matter how many Builders are invoked. - """ - def __init__(self, dict): - collections.UserDict.__init__(self, dict) - if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.OverrideWarner') - self.already_warned = None - def warn(self): - if self.already_warned: - return - for k in list(self.keys()): - if k in misleading_keywords: - alt = misleading_keywords[k] - msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k) - SCons.Warnings.warn(SCons.Warnings.MisleadingKeywordsWarning, msg) - self.already_warned = 1 - -def Builder(**kw): - """A factory for builder objects.""" - composite = None - if 'generator' in kw: - if 'action' in kw: - raise UserError("You must not specify both an action and a generator.") - kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {}) - del kw['generator'] - elif 'action' in kw: - source_ext_match = kw.get('source_ext_match', 1) - if 'source_ext_match' in kw: - del kw['source_ext_match'] - if SCons.Util.is_Dict(kw['action']): - composite = DictCmdGenerator(kw['action'], source_ext_match) - kw['action'] = SCons.Action.CommandGeneratorAction(composite, {}) - kw['src_suffix'] = composite.src_suffixes() - else: - kw['action'] = SCons.Action.Action(kw['action']) - - if 'emitter' in kw: - emitter = kw['emitter'] - if SCons.Util.is_String(emitter): - # This allows users to pass in an Environment - # variable reference (like "$FOO") as an emitter. - # We will look in that Environment variable for - # a callable to use as the actual emitter. - var = SCons.Util.get_environment_var(emitter) - if not var: - raise UserError("Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter) - kw['emitter'] = EmitterProxy(var) - elif SCons.Util.is_Dict(emitter): - kw['emitter'] = DictEmitter(emitter) - elif SCons.Util.is_List(emitter): - kw['emitter'] = ListEmitter(emitter) - - result = BuilderBase(**kw) - - if composite is not None: - result = CompositeBuilder(result, composite) - - return result - -def _node_errors(builder, env, tlist, slist): - """Validate that the lists of target and source nodes are - legal for this builder and environment. Raise errors or - issue warnings as appropriate. - """ - - # First, figure out if there are any errors in the way the targets - # were specified. - for t in tlist: - if t.side_effect: - raise UserError("Multiple ways to build the same target were specified for: %s" % t) - if t.has_explicit_builder(): - # Check for errors when the environments are different - # No error if environments are the same Environment instance - if (t.env is not None and t.env is not env and - # Check OverrideEnvironment case - no error if wrapped Environments - # are the same instance, and overrides lists match - not (getattr(t.env, '__subject', 0) is getattr(env, '__subject', 1) and - getattr(t.env, 'overrides', 0) == getattr(env, 'overrides', 1) and - not builder.multi)): - action = t.builder.action - t_contents = t.builder.action.get_contents(tlist, slist, t.env) - contents = builder.action.get_contents(tlist, slist, env) - - if t_contents == contents: - msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env)) - SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg) - else: - try: - msg = "Two environments with different actions were specified for the same target: %s\n(action 1: %s)\n(action 2: %s)" % (t,t_contents.decode('utf-8'),contents.decode('utf-8')) - except UnicodeDecodeError: - msg = "Two environments with different actions were specified for the same target: %s"%t - raise UserError(msg) - if builder.multi: - if t.builder != builder: - msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t) - raise UserError(msg) - # TODO(batch): list constructed each time! - if t.get_executor().get_all_targets() != tlist: - msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, list(map(str, t.get_executor().get_all_targets())), list(map(str, tlist))) - raise UserError(msg) - elif t.sources != slist: - msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, list(map(str, t.sources)), list(map(str, slist))) - raise UserError(msg) - - if builder.single_source: - if len(slist) > 1: - raise UserError("More than one source given for single-source builder: targets=%s sources=%s" % (list(map(str,tlist)), list(map(str,slist)))) - -class EmitterProxy(object): - """This is a callable class that can act as a - Builder emitter. It holds on to a string that - is a key into an Environment dictionary, and will - look there at actual build time to see if it holds - a callable. If so, we will call that as the actual - emitter.""" - def __init__(self, var): - self.var = SCons.Util.to_String(var) - - def __call__(self, target, source, env): - emitter = self.var - - # Recursively substitute the variable. - # We can't use env.subst() because it deals only - # in strings. Maybe we should change that? - while SCons.Util.is_String(emitter) and emitter in env: - emitter = env[emitter] - if callable(emitter): - target, source = emitter(target, source, env) - elif SCons.Util.is_List(emitter): - for e in emitter: - target, source = e(target, source, env) - - return (target, source) - - - def __eq__(self, other): - return self.var == other.var - - def __lt__(self, other): - return self.var < other.var - -class BuilderBase(object): - """Base class for Builders, objects that create output - nodes (files) from input nodes (files). - """ - - def __init__(self, action = None, - prefix = '', - suffix = '', - src_suffix = '', - target_factory = None, - source_factory = None, - target_scanner = None, - source_scanner = None, - emitter = None, - multi = 0, - env = None, - single_source = 0, - name = None, - chdir = _null, - is_explicit = 1, - src_builder = None, - ensure_suffix = False, - **overrides): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.BuilderBase') - self._memo = {} - self.action = action - self.multi = multi - if SCons.Util.is_Dict(prefix): - prefix = CallableSelector(prefix) - self.prefix = prefix - if SCons.Util.is_Dict(suffix): - suffix = CallableSelector(suffix) - self.env = env - self.single_source = single_source - if 'overrides' in overrides: - msg = "The \"overrides\" keyword to Builder() creation has been removed;\n" +\ - "\tspecify the items as keyword arguments to the Builder() call instead." - raise TypeError(msg) - if 'scanner' in overrides: - msg = "The \"scanner\" keyword to Builder() creation has been removed;\n" +\ - "\tuse: source_scanner or target_scanner as appropriate." - raise TypeError(msg) - self.overrides = overrides - - self.set_suffix(suffix) - self.set_src_suffix(src_suffix) - self.ensure_suffix = ensure_suffix - - self.target_factory = target_factory - self.source_factory = source_factory - self.target_scanner = target_scanner - self.source_scanner = source_scanner - - self.emitter = emitter - - # Optional Builder name should only be used for Builders - # that don't get attached to construction environments. - if name: - self.name = name - self.executor_kw = {} - if chdir is not _null: - self.executor_kw['chdir'] = chdir - self.is_explicit = is_explicit - - if src_builder is None: - src_builder = [] - elif not SCons.Util.is_List(src_builder): - src_builder = [ src_builder ] - self.src_builder = src_builder - - def __nonzero__(self): - raise InternalError("Do not test for the Node.builder attribute directly; use Node.has_builder() instead") - - def __bool__(self): - return self.__nonzero__() - - def get_name(self, env): - """Attempts to get the name of the Builder. - - Look at the BUILDERS variable of env, expecting it to be a - dictionary containing this Builder, and return the key of the - dictionary. If there's no key, then return a directly-configured - name (if there is one) or the name of the class (by default).""" - - try: - index = list(env['BUILDERS'].values()).index(self) - return list(env['BUILDERS'].keys())[index] - except (AttributeError, KeyError, TypeError, ValueError): - try: - return self.name - except AttributeError: - return str(self.__class__) - - def __eq__(self, other): - return self.__dict__ == other.__dict__ - - def splitext(self, path, env=None): - if not env: - env = self.env - if env: - suffixes = self.src_suffixes(env) - else: - suffixes = [] - return match_splitext(path, suffixes) - - def _adjustixes(self, files, pre, suf, ensure_suffix=False): - if not files: - return [] - result = [] - if not SCons.Util.is_List(files): - files = [files] - - for f in files: - if SCons.Util.is_String(f): - f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix) - result.append(f) - return result - - def _create_nodes(self, env, target = None, source = None): - """Create and return lists of target and source nodes. - """ - src_suf = self.get_src_suffix(env) - - target_factory = env.get_factory(self.target_factory) - source_factory = env.get_factory(self.source_factory) - - source = self._adjustixes(source, None, src_suf) - slist = env.arg2nodes(source, source_factory) - - pre = self.get_prefix(env, slist) - suf = self.get_suffix(env, slist) - - if target is None: - try: - t_from_s = slist[0].target_from_source - except AttributeError: - raise UserError("Do not know how to create a target from source `%s'" % slist[0]) - except IndexError: - tlist = [] - else: - splitext = lambda S: self.splitext(S,env) - tlist = [ t_from_s(pre, suf, splitext) ] - else: - target = self._adjustixes(target, pre, suf, self.ensure_suffix) - tlist = env.arg2nodes(target, target_factory, target=target, source=source) - - if self.emitter: - # The emitter is going to do str(node), but because we're - # being called *from* a builder invocation, the new targets - # don't yet have a builder set on them and will look like - # source files. Fool the emitter's str() calls by setting - # up a temporary builder on the new targets. - new_targets = [] - for t in tlist: - if not t.is_derived(): - t.builder_set(self) - new_targets.append(t) - - orig_tlist = tlist[:] - orig_slist = slist[:] - - target, source = self.emitter(target=tlist, source=slist, env=env) - - # Now delete the temporary builders that we attached to any - # new targets, so that _node_errors() doesn't do weird stuff - # to them because it thinks they already have builders. - for t in new_targets: - if t.builder is self: - # Only delete the temporary builder if the emitter - # didn't change it on us. - t.builder_set(None) - - # Have to call arg2nodes yet again, since it is legal for - # emitters to spit out strings as well as Node instances. - tlist = env.arg2nodes(target, target_factory, - target=orig_tlist, source=orig_slist) - slist = env.arg2nodes(source, source_factory, - target=orig_tlist, source=orig_slist) - - return tlist, slist - - def _execute(self, env, target, source, overwarn={}, executor_kw={}): - # We now assume that target and source are lists or None. - if self.src_builder: - source = self.src_builder_sources(env, source, overwarn) - - if self.single_source and len(source) > 1 and target is None: - result = [] - if target is None: target = [None]*len(source) - for tgt, src in zip(target, source): - if tgt is not None: - tgt = [tgt] - if src is not None: - src = [src] - result.extend(self._execute(env, tgt, src, overwarn)) - return SCons.Node.NodeList(result) - - overwarn.warn() - - tlist, slist = self._create_nodes(env, target, source) - - # If there is more than one target ensure that if we need to reset - # the implicit list to new scan of dependency all targets implicit lists - # are cleared. (SCons GH Issue #2811 and MongoDB SERVER-33111) - if len(tlist) > 1: - for t in tlist: - t.target_peers = tlist - - # Check for errors with the specified target/source lists. - _node_errors(self, env, tlist, slist) - - # The targets are fine, so find or make the appropriate Executor to - # build this particular list of targets from this particular list of - # sources. - - executor = None - key = None - - if self.multi: - try: - executor = tlist[0].get_executor(create = 0) - except (AttributeError, IndexError): - pass - else: - executor.add_sources(slist) - - if executor is None: - if not self.action: - fmt = "Builder %s must have an action to build %s." - raise UserError(fmt % (self.get_name(env or self.env), - list(map(str,tlist)))) - key = self.action.batch_key(env or self.env, tlist, slist) - if key: - try: - executor = SCons.Executor.GetBatchExecutor(key) - except KeyError: - pass - else: - executor.add_batch(tlist, slist) - - if executor is None: - executor = SCons.Executor.Executor(self.action, env, [], - tlist, slist, executor_kw) - if key: - SCons.Executor.AddBatchExecutor(key, executor) - - # Now set up the relevant information in the target Nodes themselves. - for t in tlist: - t.cwd = env.fs.getcwd() - t.builder_set(self) - t.env_set(env) - t.add_source(slist) - t.set_executor(executor) - t.set_explicit(self.is_explicit) - - return SCons.Node.NodeList(tlist) - - def __call__(self, env, target=None, source=None, chdir=_null, **kw): - # We now assume that target and source are lists or None. - # The caller (typically Environment.BuilderWrapper) is - # responsible for converting any scalar values to lists. - if chdir is _null: - ekw = self.executor_kw - else: - ekw = self.executor_kw.copy() - ekw['chdir'] = chdir - if 'chdir' in ekw and SCons.Util.is_String(ekw['chdir']): - ekw['chdir'] = env.subst(ekw['chdir']) - if kw: - if 'srcdir' in kw: - def prependDirIfRelative(f, srcdir=kw['srcdir']): - import os.path - if SCons.Util.is_String(f) and not os.path.isabs(f): - f = os.path.join(srcdir, f) - return f - if not SCons.Util.is_List(source): - source = [source] - source = list(map(prependDirIfRelative, source)) - del kw['srcdir'] - if self.overrides: - env_kw = self.overrides.copy() - env_kw.update(kw) - else: - env_kw = kw - else: - env_kw = self.overrides - - # TODO if env_kw: then the following line. there's no purpose in calling if no overrides. - env = env.Override(env_kw) - return self._execute(env, target, source, OverrideWarner(kw), ekw) - - def adjust_suffix(self, suff): - if suff and not suff[0] in [ '.', '_', '$' ]: - return '.' + suff - return suff - - def get_prefix(self, env, sources=[]): - prefix = self.prefix - if callable(prefix): - prefix = prefix(env, sources) - return env.subst(prefix) - - def set_suffix(self, suffix): - if not callable(suffix): - suffix = self.adjust_suffix(suffix) - self.suffix = suffix - - def get_suffix(self, env, sources=[]): - suffix = self.suffix - if callable(suffix): - suffix = suffix(env, sources) - return env.subst(suffix) - - def set_src_suffix(self, src_suffix): - if not src_suffix: - src_suffix = [] - elif not SCons.Util.is_List(src_suffix): - src_suffix = [ src_suffix ] - self.src_suffix = [callable(suf) and suf or self.adjust_suffix(suf) for suf in src_suffix] - - def get_src_suffix(self, env): - """Get the first src_suffix in the list of src_suffixes.""" - ret = self.src_suffixes(env) - if not ret: - return '' - return ret[0] - - def add_emitter(self, suffix, emitter): - """Add a suffix-emitter mapping to this Builder. - - This assumes that emitter has been initialized with an - appropriate dictionary type, and will throw a TypeError if - not, so the caller is responsible for knowing that this is an - appropriate method to call for the Builder in question. - """ - self.emitter[suffix] = emitter - - def add_src_builder(self, builder): - """ - Add a new Builder to the list of src_builders. - - This requires wiping out cached values so that the computed - lists of source suffixes get re-calculated. - """ - self._memo = {} - self.src_builder.append(builder) - - def _get_sdict(self, env): - """ - Returns a dictionary mapping all of the source suffixes of all - src_builders of this Builder to the underlying Builder that - should be called first. - - This dictionary is used for each target specified, so we save a - lot of extra computation by memoizing it for each construction - environment. - - Note that this is re-computed each time, not cached, because there - might be changes to one of our source Builders (or one of their - source Builders, and so on, and so on...) that we can't "see." - - The underlying methods we call cache their computed values, - though, so we hope repeatedly aggregating them into a dictionary - like this won't be too big a hit. We may need to look for a - better way to do this if performance data show this has turned - into a significant bottleneck. - """ - sdict = {} - for bld in self.get_src_builders(env): - for suf in bld.src_suffixes(env): - sdict[suf] = bld - return sdict - - def src_builder_sources(self, env, source, overwarn={}): - sdict = self._get_sdict(env) - - src_suffixes = self.src_suffixes(env) - - lengths = list(set(map(len, src_suffixes))) - - def match_src_suffix(name, src_suffixes=src_suffixes, lengths=lengths): - node_suffixes = [name[-l:] for l in lengths] - for suf in src_suffixes: - if suf in node_suffixes: - return suf - return None - - result = [] - for s in SCons.Util.flatten(source): - if SCons.Util.is_String(s): - match_suffix = match_src_suffix(env.subst(s)) - if not match_suffix and '.' not in s: - src_suf = self.get_src_suffix(env) - s = self._adjustixes(s, None, src_suf)[0] - else: - match_suffix = match_src_suffix(s.name) - if match_suffix: - try: - bld = sdict[match_suffix] - except KeyError: - result.append(s) - else: - tlist = bld._execute(env, None, [s], overwarn) - # If the subsidiary Builder returned more than one - # target, then filter out any sources that this - # Builder isn't capable of building. - if len(tlist) > 1: - tlist = [t for t in tlist if match_src_suffix(t.name)] - result.extend(tlist) - else: - result.append(s) - - source_factory = env.get_factory(self.source_factory) - - return env.arg2nodes(result, source_factory) - - def _get_src_builders_key(self, env): - return id(env) - - @SCons.Memoize.CountDictCall(_get_src_builders_key) - def get_src_builders(self, env): - """ - Returns the list of source Builders for this Builder. - - This exists mainly to look up Builders referenced as - strings in the 'BUILDER' variable of the construction - environment and cache the result. - """ - memo_key = id(env) - try: - memo_dict = self._memo['get_src_builders'] - except KeyError: - memo_dict = {} - self._memo['get_src_builders'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - - builders = [] - for bld in self.src_builder: - if SCons.Util.is_String(bld): - try: - bld = env['BUILDERS'][bld] - except KeyError: - continue - builders.append(bld) - - memo_dict[memo_key] = builders - return builders - - def _subst_src_suffixes_key(self, env): - return id(env) - - @SCons.Memoize.CountDictCall(_subst_src_suffixes_key) - def subst_src_suffixes(self, env): - """ - The suffix list may contain construction variable expansions, - so we have to evaluate the individual strings. To avoid doing - this over and over, we memoize the results for each construction - environment. - """ - memo_key = id(env) - try: - memo_dict = self._memo['subst_src_suffixes'] - except KeyError: - memo_dict = {} - self._memo['subst_src_suffixes'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - suffixes = [env.subst(x) for x in self.src_suffix] - memo_dict[memo_key] = suffixes - return suffixes - - def src_suffixes(self, env): - """ - Returns the list of source suffixes for all src_builders of this - Builder. - - This is essentially a recursive descent of the src_builder "tree." - (This value isn't cached because there may be changes in a - src_builder many levels deep that we can't see.) - """ - sdict = {} - suffixes = self.subst_src_suffixes(env) - for s in suffixes: - sdict[s] = 1 - for builder in self.get_src_builders(env): - for s in builder.src_suffixes(env): - if s not in sdict: - sdict[s] = 1 - suffixes.append(s) - return suffixes - -class CompositeBuilder(SCons.Util.Proxy): - """A Builder Proxy whose main purpose is to always have - a DictCmdGenerator as its action, and to provide access - to the DictCmdGenerator's add_action() method. - """ - - def __init__(self, builder, cmdgen): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.CompositeBuilder') - SCons.Util.Proxy.__init__(self, builder) - - # cmdgen should always be an instance of DictCmdGenerator. - self.cmdgen = cmdgen - self.builder = builder - - __call__ = SCons.Util.Delegate('__call__') - - def add_action(self, suffix, action): - self.cmdgen.add_action(suffix, action) - self.set_src_suffix(self.cmdgen.src_suffixes()) - -def is_a_Builder(obj): - """"Returns True if the specified obj is one of our Builder classes. - - The test is complicated a bit by the fact that CompositeBuilder - is a proxy, not a subclass of BuilderBase. - """ - return (isinstance(obj, BuilderBase) - or isinstance(obj, CompositeBuilder) - or callable(obj)) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/CacheDir.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/CacheDir.py deleted file mode 100644 index 26f04a1bc9d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/CacheDir.py +++ /dev/null @@ -1,384 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/CacheDir.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """ -CacheDir support -""" - -import hashlib -import json -import os -import stat -import sys -import uuid - -import SCons -import SCons.Action -import SCons.Warnings -from SCons.Util import PY3 - -cache_enabled = True -cache_debug = False -cache_force = False -cache_show = False -cache_readonly = False -cache_tmp_uuid = uuid.uuid4().hex - -def CacheRetrieveFunc(target, source, env): - t = target[0] - fs = t.fs - cd = env.get_CacheDir() - cd.requests += 1 - cachedir, cachefile = cd.cachepath(t) - if not fs.exists(cachefile): - cd.CacheDebug('CacheRetrieve(%s): %s not in cache\n', t, cachefile) - return 1 - cd.hits += 1 - cd.CacheDebug('CacheRetrieve(%s): retrieving from %s\n', t, cachefile) - if SCons.Action.execute_actions: - if fs.islink(cachefile): - fs.symlink(fs.readlink(cachefile), t.get_internal_path()) - else: - cd.copy_from_cache(env, cachefile, t.get_internal_path()) - try: - os.utime(cachefile, None) - except OSError: - pass - st = fs.stat(cachefile) - fs.chmod(t.get_internal_path(), stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - return 0 - -def CacheRetrieveString(target, source, env): - t = target[0] - fs = t.fs - cd = env.get_CacheDir() - cachedir, cachefile = cd.cachepath(t) - if t.fs.exists(cachefile): - return "Retrieved `%s' from cache" % t.get_internal_path() - return None - -CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString) - -CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None) - -def CachePushFunc(target, source, env): - if cache_readonly: - return - - t = target[0] - if t.nocache: - return - fs = t.fs - cd = env.get_CacheDir() - cachedir, cachefile = cd.cachepath(t) - if fs.exists(cachefile): - # Don't bother copying it if it's already there. Note that - # usually this "shouldn't happen" because if the file already - # existed in cache, we'd have retrieved the file from there, - # not built it. This can happen, though, in a race, if some - # other person running the same build pushes their copy to - # the cache after we decide we need to build it but before our - # build completes. - cd.CacheDebug('CachePush(%s): %s already exists in cache\n', t, cachefile) - return - - cd.CacheDebug('CachePush(%s): pushing to %s\n', t, cachefile) - - tempfile = f"{cachefile}.tmp{cache_tmp_uuid}" - errfmt = "Unable to copy %s to cache. Cache file is %s" - - if not fs.isdir(cachedir): - try: - fs.makedirs(cachedir) - except EnvironmentError: - # We may have received an exception because another process - # has beaten us creating the directory. - if not fs.isdir(cachedir): - msg = errfmt % (str(target), cachefile) - raise SCons.Errors.SConsEnvironmentError(msg) - - try: - if fs.islink(t.get_internal_path()): - fs.symlink(fs.readlink(t.get_internal_path()), tempfile) - else: - cd.copy_to_cache(env, t.get_internal_path(), tempfile) - fs.rename(tempfile, cachefile) - except EnvironmentError: - # It's possible someone else tried writing the file at the - # same time we did, or else that there was some problem like - # the CacheDir being on a separate file system that's full. - # In any case, inability to push a file to cache doesn't affect - # the correctness of the build, so just print a warning. - msg = errfmt % (str(t), cachefile) - cd.CacheDebug(errfmt + '\n', str(t), cachefile) - SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning, msg) - -CachePush = SCons.Action.Action(CachePushFunc, None) - -# Nasty hack to cut down to one warning for each cachedir path that needs -# upgrading. -warned = dict() - -class CacheDir(object): - - def __init__(self, path): - """ - Initialize a CacheDir object. - - The cache configuration is stored in the object. It - is read from the config file in the supplied path if - one exists, if not the config file is created and - the default config is written, as well as saved in the object. - """ - self.requests = 0 - self.hits = 0 - self.path = path - self.current_cache_debug = None - self.debugFP = None - self.config = dict() - if path is None: - return - - if PY3: - self._readconfig3(path) - else: - self._readconfig2(path) - - - def _readconfig3(self, path): - """ - Python3 version of reading the cache config. - - If directory or config file do not exist, create. Take advantage - of Py3 capability in os.makedirs() and in file open(): just try - the operation and handle failure appropriately. - - Omit the check for old cache format, assume that's old enough - there will be none of those left to worry about. - - :param path: path to the cache directory - """ - config_file = os.path.join(path, 'config') - try: - os.makedirs(path, exist_ok=True) - except FileExistsError: - pass - except OSError: - msg = "Failed to create cache directory " + path - raise SCons.Errors.SConsEnvironmentError(msg) - - try: - with open(config_file, 'x') as config: - self.config['prefix_len'] = 2 - try: - json.dump(self.config, config) - except Exception: - msg = "Failed to write cache configuration for " + path - raise SCons.Errors.SConsEnvironmentError(msg) - except FileExistsError: - try: - with open(config_file) as config: - self.config = json.load(config) - except ValueError: - msg = "Failed to read cache configuration for " + path - raise SCons.Errors.SConsEnvironmentError(msg) - - - def _readconfig2(self, path): - """ - Python2 version of reading cache config. - - See if there is a config file in the cache directory. If there is, - use it. If there isn't, and the directory exists and isn't empty, - produce a warning. If the directory does not exist or is empty, - write a config file. - - :param path: path to the cache directory - """ - config_file = os.path.join(path, 'config') - if not os.path.exists(config_file): - # A note: There is a race hazard here if two processes start and - # attempt to create the cache directory at the same time. However, - # Python 2.x does not give you the option to do exclusive file - # creation (not even the option to error on opening an existing - # file for writing...). The ordering of events here is an attempt - # to alleviate this, on the basis that it's a pretty unlikely - # occurrence (would require two builds with a brand new cache - # directory) - if os.path.isdir(path) and any(f != "config" for f in os.listdir(path)): - self.config['prefix_len'] = 1 - # When building the project I was testing this on, the warning - # was output over 20 times. That seems excessive - global warned - if self.path not in warned: - msg = "Please upgrade your cache by running " +\ - "scons-configure-cache.py " + self.path - SCons.Warnings.warn(SCons.Warnings.CacheVersionWarning, msg) - warned[self.path] = True - else: - if not os.path.isdir(path): - try: - os.makedirs(path) - except OSError: - # If someone else is trying to create the directory at - # the same time as me, bad things will happen - msg = "Failed to create cache directory " + path - raise SCons.Errors.SConsEnvironmentError(msg) - - self.config['prefix_len'] = 2 - if not os.path.exists(config_file): - try: - with open(config_file, 'w') as config: - json.dump(self.config, config) - except Exception: - msg = "Failed to write cache configuration for " + path - raise SCons.Errors.SConsEnvironmentError(msg) - else: - try: - with open(config_file) as config: - self.config = json.load(config) - except ValueError: - msg = "Failed to read cache configuration for " + path - raise SCons.Errors.SConsEnvironmentError(msg) - - - def CacheDebug(self, fmt, target, cachefile): - if cache_debug != self.current_cache_debug: - if cache_debug == '-': - self.debugFP = sys.stdout - elif cache_debug: - self.debugFP = open(cache_debug, 'w') - else: - self.debugFP = None - self.current_cache_debug = cache_debug - if self.debugFP: - self.debugFP.write(fmt % (target, os.path.split(cachefile)[1])) - self.debugFP.write("requests: %d, hits: %d, misses: %d, hit rate: %.2f%%\n" % - (self.requests, self.hits, self.misses, self.hit_ratio)) - - @classmethod - def copy_from_cache(cls, env, src, dst): - if env.cache_timestamp_newer: - return env.fs.copy(src, dst) - else: - return env.fs.copy2(src, dst) - - @classmethod - def copy_to_cache(cls, env, src, dst): - try: - result = env.fs.copy2(src, dst) - fs = env.File(src).fs - st = fs.stat(src) - fs.chmod(dst, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - return result - except AttributeError as ex: - raise EnvironmentError from ex - - @property - def hit_ratio(self): - return (100.0 * self.hits / self.requests if self.requests > 0 else 100) - - @property - def misses(self): - return self.requests - self.hits - - def is_enabled(self): - return cache_enabled and self.path is not None - - def is_readonly(self): - return cache_readonly - - def get_cachedir_csig(self, node): - cachedir, cachefile = self.cachepath(node) - if cachefile and os.path.exists(cachefile): - return SCons.Util.MD5filesignature(cachefile, \ - SCons.Node.FS.File.md5_chunksize * 1024) - - def cachepath(self, node): - """ - """ - if not self.is_enabled(): - return None, None - - sig = node.get_cachedir_bsig() - - subdir = sig[:self.config['prefix_len']].upper() - - dir = os.path.join(self.path, subdir) - return dir, os.path.join(dir, sig) - - def retrieve(self, node): - """ - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - built(). - - Note that there's a special trick here with the execute flag - (one that's not normally done for other actions). Basically - if the user requested a no_exec (-n) build, then - SCons.Action.execute_actions is set to 0 and when any action - is called, it does its showing but then just returns zero - instead of actually calling the action execution operation. - The problem for caching is that if the file does NOT exist in - cache then the CacheRetrieveString won't return anything to - show for the task, but the Action.__call__ won't call - CacheRetrieveFunc; instead it just returns zero, which makes - the code below think that the file *was* successfully - retrieved from the cache, therefore it doesn't do any - subsequent building. However, the CacheRetrieveString didn't - print anything because it didn't actually exist in the cache, - and no more build actions will be performed, so the user just - sees nothing. The fix is to tell Action.__call__ to always - execute the CacheRetrieveFunc and then have the latter - explicitly check SCons.Action.execute_actions itself. - """ - if not self.is_enabled(): - return False - - env = node.get_build_env() - if cache_show: - if CacheRetrieveSilent(node, [], env, execute=1) == 0: - node.build(presub=0, execute=0) - return True - else: - if CacheRetrieve(node, [], env, execute=1) == 0: - return True - - return False - - def push(self, node): - if self.is_readonly() or not self.is_enabled(): - return - return CachePush(node, [], node.get_build_env()) - - def push_if_forced(self, node): - if cache_force: - return self.push(node) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Conftest.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Conftest.py deleted file mode 100644 index 4491884a090..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Conftest.py +++ /dev/null @@ -1,816 +0,0 @@ -"""SCons.Conftest - -Autoconf-like configuration support; low level implementation of tests. -""" - -# -# Copyright (c) 2003 Stichting NLnet Labs -# Copyright (c) 2001, 2002, 2003 Steven Knight -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -# -# The purpose of this module is to define how a check is to be performed. -# Use one of the Check...() functions below. -# - -# -# A context class is used that defines functions for carrying out the tests, -# logging and messages. The following methods and members must be present: -# -# context.Display(msg) Function called to print messages that are normally -# displayed for the user. Newlines are explicitly used. -# The text should also be written to the logfile! -# -# context.Log(msg) Function called to write to a log file. -# -# context.BuildProg(text, ext) -# Function called to build a program, using "ext" for the -# file extention. Must return an empty string for -# success, an error message for failure. -# For reliable test results building should be done just -# like an actual program would be build, using the same -# command and arguments (including configure results so -# far). -# -# context.CompileProg(text, ext) -# Function called to compile a program, using "ext" for -# the file extention. Must return an empty string for -# success, an error message for failure. -# For reliable test results compiling should be done just -# like an actual source file would be compiled, using the -# same command and arguments (including configure results -# so far). -# -# context.AppendLIBS(lib_name_list) -# Append "lib_name_list" to the value of LIBS. -# "lib_namelist" is a list of strings. -# Return the value of LIBS before changing it (any type -# can be used, it is passed to SetLIBS() later.) -# -# context.PrependLIBS(lib_name_list) -# Prepend "lib_name_list" to the value of LIBS. -# "lib_namelist" is a list of strings. -# Return the value of LIBS before changing it (any type -# can be used, it is passed to SetLIBS() later.) -# -# context.SetLIBS(value) -# Set LIBS to "value". The type of "value" is what -# AppendLIBS() returned. -# Return the value of LIBS before changing it (any type -# can be used, it is passed to SetLIBS() later.) -# -# context.headerfilename -# Name of file to append configure results to, usually -# "confdefs.h". -# The file must not exist or be empty when starting. -# Empty or None to skip this (some tests will not work!). -# -# context.config_h (may be missing). If present, must be a string, which -# will be filled with the contents of a config_h file. -# -# context.vardict Dictionary holding variables used for the tests and -# stores results from the tests, used for the build -# commands. -# Normally contains "CC", "LIBS", "CPPFLAGS", etc. -# -# context.havedict Dictionary holding results from the tests that are to -# be used inside a program. -# Names often start with "HAVE_". These are zero -# (feature not present) or one (feature present). Other -# variables may have any value, e.g., "PERLVERSION" can -# be a number and "SYSTEMNAME" a string. -# - -import re - -# -# PUBLIC VARIABLES -# - -LogInputFiles = 1 # Set that to log the input files in case of a failed test -LogErrorMessages = 1 # Set that to log Conftest-generated error messages - -# -# PUBLIC FUNCTIONS -# - -# Generic remarks: -# - When a language is specified which is not supported the test fails. The -# message is a bit different, because not all the arguments for the normal -# message are available yet (chicken-egg problem). - - -def CheckBuilder(context, text = None, language = None): - """ - Configure check to see if the compiler works. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - "text" may be used to specify the code to be build. - Returns an empty string for success, an error message for failure. - """ - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("%s\n" % msg) - return msg - - if not text: - text = """ -int main(void) { - return 0; -} -""" - - context.Display("Checking if building a %s file works... " % lang) - ret = context.BuildProg(text, suffix) - _YesNoResult(context, ret, None, text) - return ret - -def CheckCC(context): - """ - Configure check for a working C compiler. - - This checks whether the C compiler, as defined in the $CC construction - variable, can compile a C source file. It uses the current $CCCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the C compiler works... ") - text = """ -int main(void) -{ - return 0; -} -""" - ret = _check_empty_program(context, 'CC', text, 'C') - _YesNoResult(context, ret, None, text) - return ret - -def CheckSHCC(context): - """ - Configure check for a working shared C compiler. - - This checks whether the C compiler, as defined in the $SHCC construction - variable, can compile a C source file. It uses the current $SHCCCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the (shared) C compiler works... ") - text = """ -int foo(void) -{ - return 0; -} -""" - ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True) - _YesNoResult(context, ret, None, text) - return ret - -def CheckCXX(context): - """ - Configure check for a working CXX compiler. - - This checks whether the CXX compiler, as defined in the $CXX construction - variable, can compile a CXX source file. It uses the current $CXXCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the C++ compiler works... ") - text = """ -int main(void) -{ - return 0; -} -""" - ret = _check_empty_program(context, 'CXX', text, 'C++') - _YesNoResult(context, ret, None, text) - return ret - -def CheckSHCXX(context): - """ - Configure check for a working shared CXX compiler. - - This checks whether the CXX compiler, as defined in the $SHCXX construction - variable, can compile a CXX source file. It uses the current $SHCXXCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the (shared) C++ compiler works... ") - text = """ -int main(void) -{ - return 0; -} -""" - ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True) - _YesNoResult(context, ret, None, text) - return ret - -def _check_empty_program(context, comp, text, language, use_shared = False): - """Return 0 on success, 1 otherwise.""" - if comp not in context.env or not context.env[comp]: - # The compiler construction variable is not set or empty - return 1 - - lang, suffix, msg = _lang2suffix(language) - if msg: - return 1 - - if use_shared: - return context.CompileSharedObject(text, suffix) - else: - return context.CompileProg(text, suffix) - - -def CheckFunc(context, function_name, header = None, language = None): - """ - Configure check for a function "function_name". - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Optional "header" can be defined to define a function prototype, include a - header file or anything else that comes before main(). - Sets HAVE_function_name in context.havedict according to the result. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - Returns an empty string for success, an error message for failure. - """ - - # Remarks from autoconf: - # - Don't include because on OSF/1 3.0 it includes - # which includes which contains a prototype for select. - # Similarly for bzero. - # - assert.h is included to define __stub macros and hopefully few - # prototypes, which can conflict with char $1(); below. - # - Override any gcc2 internal prototype to avoid an error. - # - We use char for the function declaration because int might match the - # return type of a gcc2 builtin and then its argument prototype would - # still apply. - # - The GNU C library defines this for functions which it implements to - # always fail with ENOSYS. Some functions are actually named something - # starting with __ and the normal name is an alias. - - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - if not header: - header = """ -#ifdef __cplusplus -extern "C" -#endif -char %s();""" % function_name - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for %s(): %s\n" % (function_name, msg)) - return msg - - text = """ -%(include)s -#include -%(hdr)s - -#if _MSC_VER && !__INTEL_COMPILER - #pragma function(%(name)s) -#endif - -int main(void) { -#if defined (__stub_%(name)s) || defined (__stub___%(name)s) - fail fail fail -#else - %(name)s(); -#endif - - return 0; -} -""" % { 'name': function_name, - 'include': includetext, - 'hdr': header } - - context.Display("Checking for %s function %s()... " % (lang, function_name)) - ret = context.BuildProg(text, suffix) - _YesNoResult(context, ret, "HAVE_" + function_name, text, - "Define to 1 if the system has the function `%s'." %\ - function_name) - return ret - - -def CheckHeader(context, header_name, header=None, language=None, - include_quotes=None): - """ - Configure check for a C or C++ header file "header_name". - Optional "header" can be defined to do something before including the - header file (unusual, supported for consistency). - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Sets HAVE_header_name in context.havedict according to the result. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS and $CPPFLAGS are set correctly. - Returns an empty string for success, an error message for failure. - """ - # Why compile the program instead of just running the preprocessor? - # It is possible that the header file exists, but actually using it may - # fail (e.g., because it depends on other header files). Thus this test is - # more strict. It may require using the "header" argument. - # - # Use <> by default, because the check is normally used for system header - # files. SCons passes '""' to overrule this. - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"\n' % context.headerfilename - else: - includetext = '' - if not header: - header = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for header file %s: %s\n" - % (header_name, msg)) - return msg - - if not include_quotes: - include_quotes = "<>" - - text = "%s%s\n#include %s%s%s\n\n" % (includetext, header, - include_quotes[0], header_name, include_quotes[1]) - - context.Display("Checking for %s header file %s... " % (lang, header_name)) - ret = context.CompileProg(text, suffix) - _YesNoResult(context, ret, "HAVE_" + header_name, text, - "Define to 1 if you have the <%s> header file." % header_name) - return ret - - -def CheckType(context, type_name, fallback = None, - header = None, language = None): - """ - Configure check for a C or C++ type "type_name". - Optional "header" can be defined to include a header file. - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Sets HAVE_type_name in context.havedict according to the result. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - Returns an empty string for success, an error message for failure. - """ - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - if not header: - header = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) - return msg - - # Remarks from autoconf about this test: - # - Grepping for the type in include files is not reliable (grep isn't - # portable anyway). - # - Using "TYPE my_var;" doesn't work for const qualified types in C++. - # Adding an initializer is not valid for some C++ classes. - # - Using the type as parameter to a function either fails for K&$ C or for - # C++. - # - Using "TYPE *my_var;" is valid in C for some types that are not - # declared (struct something). - # - Using "sizeof(TYPE)" is valid when TYPE is actually a variable. - # - Using the previous two together works reliably. - text = """ -%(include)s -%(header)s - -int main(void) { - if ((%(name)s *) 0) - return 0; - if (sizeof (%(name)s)) - return 0; -} -""" % { 'include': includetext, - 'header': header, - 'name': type_name } - - context.Display("Checking for %s type %s... " % (lang, type_name)) - ret = context.BuildProg(text, suffix) - _YesNoResult(context, ret, "HAVE_" + type_name, text, - "Define to 1 if the system has the type `%s'." % type_name) - if ret and fallback and context.headerfilename: - f = open(context.headerfilename, "a") - f.write("typedef %s %s;\n" % (fallback, type_name)) - f.close() - - return ret - -def CheckTypeSize(context, type_name, header = None, language = None, expect = None): - """This check can be used to get the size of a given type, or to check whether - the type is of expected size. - - Arguments: - - type : str - the type to check - - includes : sequence - list of headers to include in the test code before testing the type - - language : str - 'C' or 'C++' - - expect : int - if given, will test wether the type has the given number of bytes. - If not given, will automatically find the size. - - Returns: - status : int - 0 if the check failed, or the found size of the type if the check succeeded.""" - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - - if not header: - header = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) - return msg - - src = includetext + header - if expect is not None: - # Only check if the given size is the right one - context.Display('Checking %s is %d bytes... ' % (type_name, expect)) - - # test code taken from autoconf: this is a pretty clever hack to find that - # a type is of a given size using only compilation. This speeds things up - # quite a bit compared to straightforward code using TryRun - src = src + r""" -typedef %s scons_check_type; - -int main(void) -{ - static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)]; - test_array[0] = 0; - - return 0; -} -""" - - st = context.CompileProg(src % (type_name, expect), suffix) - if not st: - context.Display("yes\n") - _Have(context, "SIZEOF_%s" % type_name, expect, - "The size of `%s', as computed by sizeof." % type_name) - return expect - else: - context.Display("no\n") - _LogFailed(context, src, st) - return 0 - else: - # Only check if the given size is the right one - context.Message('Checking size of %s ... ' % type_name) - - # We have to be careful with the program we wish to test here since - # compilation will be attempted using the current environment's flags. - # So make sure that the program will compile without any warning. For - # example using: 'int main(int argc, char** argv)' will fail with the - # '-Wall -Werror' flags since the variables argc and argv would not be - # used in the program... - # - src = src + """ -#include -#include -int main(void) { - printf("%d", (int)sizeof(""" + type_name + """)); - return 0; -} - """ - st, out = context.RunProg(src, suffix) - try: - size = int(out) - except ValueError: - # If cannot convert output of test prog to an integer (the size), - # something went wront, so just fail - st = 1 - size = 0 - - if not st: - context.Display("yes\n") - _Have(context, "SIZEOF_%s" % type_name, size, - "The size of `%s', as computed by sizeof." % type_name) - return size - else: - context.Display("no\n") - _LogFailed(context, src, st) - return 0 - - return 0 - -def CheckDeclaration(context, symbol, includes = None, language = None): - """Checks whether symbol is declared. - - Use the same test as autoconf, that is test whether the symbol is defined - as a macro or can be used as an r-value. - - Arguments: - symbol : str - the symbol to check - includes : str - Optional "header" can be defined to include a header file. - language : str - only C and C++ supported. - - Returns: - status : bool - True if the check failed, False if succeeded.""" - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - - if not includes: - includes = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for declaration %s: %s\n" % (symbol, msg)) - return msg - - src = includetext + includes - context.Display('Checking whether %s is declared... ' % symbol) - - src = src + r""" -int main(void) -{ -#ifndef %s - (void) %s; -#endif - ; - return 0; -} -""" % (symbol, symbol) - - st = context.CompileProg(src, suffix) - _YesNoResult(context, st, "HAVE_DECL_" + symbol, src, - "Set to 1 if %s is defined." % symbol) - return st - -def CheckLib(context, libs, func_name = None, header = None, - extra_libs = None, call = None, language = None, autoadd = 1, - append = True): - """ - Configure check for a C or C++ libraries "libs". Searches through - the list of libraries, until one is found where the test succeeds. - Tests if "func_name" or "call" exists in the library. Note: if it exists - in another library the test succeeds anyway! - Optional "header" can be defined to include a header file. If not given a - default prototype for "func_name" is added. - Optional "extra_libs" is a list of library names to be added after - "lib_name" in the build command. To be used for libraries that "lib_name" - depends on. - Optional "call" replaces the call to "func_name" in the test code. It must - consist of complete C statements, including a trailing ";". - Both "func_name" and "call" arguments are optional, and in that case, just - linking against the libs is tested. - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - Returns an empty string for success, an error message for failure. - """ - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - if not header: - header = "" - - text = """ -%s -%s""" % (includetext, header) - - # Add a function declaration if needed. - if func_name and func_name != "main": - if not header: - text = text + """ -#ifdef __cplusplus -extern "C" -#endif -char %s(); -""" % func_name - - # The actual test code. - if not call: - call = "%s();" % func_name - - # if no function to test, leave main() blank - text = text + """ -int -main() { - %s -return 0; -} -""" % (call or "") - - if call: - i = call.find("\n") - if i > 0: - calltext = call[:i] + ".." - elif call[-1] == ';': - calltext = call[:-1] - else: - calltext = call - - for lib_name in libs: - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for library %s: %s\n" % (lib_name, msg)) - return msg - - # if a function was specified to run in main(), say it - if call: - context.Display("Checking for %s in %s library %s... " - % (calltext, lang, lib_name)) - # otherwise, just say the name of library and language - else: - context.Display("Checking for %s library %s... " - % (lang, lib_name)) - - if lib_name: - l = [ lib_name ] - if extra_libs: - l.extend(extra_libs) - if append: - oldLIBS = context.AppendLIBS(l) - else: - oldLIBS = context.PrependLIBS(l) - sym = "HAVE_LIB" + lib_name - else: - oldLIBS = -1 - sym = None - - ret = context.BuildProg(text, suffix) - - _YesNoResult(context, ret, sym, text, - "Define to 1 if you have the `%s' library." % lib_name) - if oldLIBS != -1 and (ret or not autoadd): - context.SetLIBS(oldLIBS) - - if not ret: - return ret - - return ret - -def CheckProg(context, prog_name): - """ - Configure check for a specific program. - - Check whether program prog_name exists in path. If it is found, - returns the path for it, otherwise returns None. - """ - context.Display("Checking whether %s program exists..." % prog_name) - path = context.env.WhereIs(prog_name) - if path: - context.Display(path + "\n") - else: - context.Display("no\n") - return path - - -# -# END OF PUBLIC FUNCTIONS -# - -def _YesNoResult(context, ret, key, text, comment = None): - r""" - Handle the result of a test with a "yes" or "no" result. - - :Parameters: - - `ret` is the return value: empty if OK, error message when not. - - `key` is the name of the symbol to be defined (HAVE_foo). - - `text` is the source code of the program used for testing. - - `comment` is the C comment to add above the line defining the symbol (the comment is automatically put inside a /\* \*/). If None, no comment is added. - """ - if key: - _Have(context, key, not ret, comment) - if ret: - context.Display("no\n") - _LogFailed(context, text, ret) - else: - context.Display("yes\n") - - -def _Have(context, key, have, comment = None): - r""" - Store result of a test in context.havedict and context.headerfilename. - - :Parameters: - - `key` - is a "HAVE_abc" name. It is turned into all CAPITALS and non-alphanumerics are replaced by an underscore. - - `have` - value as it should appear in the header file, include quotes when desired and escape special characters! - - `comment` is the C comment to add above the line defining the symbol (the comment is automatically put inside a /\* \*/). If None, no comment is added. - - - The value of "have" can be: - - 1 - Feature is defined, add "#define key". - - 0 - Feature is not defined, add "/\* #undef key \*/". Adding "undef" is what autoconf does. Not useful for the compiler, but it shows that the test was done. - - number - Feature is defined to this number "#define key have". Doesn't work for 0 or 1, use a string then. - - string - Feature is defined to this string "#define key have". - - - """ - key_up = key.upper() - key_up = re.sub('[^A-Z0-9_]', '_', key_up) - context.havedict[key_up] = have - if have == 1: - line = "#define %s 1\n" % key_up - elif have == 0: - line = "/* #undef %s */\n" % key_up - elif isinstance(have, int): - line = "#define %s %d\n" % (key_up, have) - else: - line = "#define %s %s\n" % (key_up, str(have)) - - if comment is not None: - lines = "\n/* %s */\n" % comment + line - else: - lines = "\n" + line - - if context.headerfilename: - f = open(context.headerfilename, "a") - f.write(lines) - f.close() - elif hasattr(context,'config_h'): - context.config_h = context.config_h + lines - - -def _LogFailed(context, text, msg): - """ - Write to the log about a failed program. - Add line numbers, so that error messages can be understood. - """ - if LogInputFiles: - context.Log("Failed program was:\n") - lines = text.split('\n') - if len(lines) and lines[-1] == '': - lines = lines[:-1] # remove trailing empty line - n = 1 - for line in lines: - context.Log("%d: %s\n" % (n, line)) - n = n + 1 - if LogErrorMessages: - context.Log("Error message: %s\n" % msg) - - -def _lang2suffix(lang): - """ - Convert a language name to a suffix. - When "lang" is empty or None C is assumed. - Returns a tuple (lang, suffix, None) when it works. - For an unrecognized language returns (None, None, msg). - - Where: - - lang = the unified language name - - suffix = the suffix, including the leading dot - - msg = an error message - """ - if not lang or lang in ["C", "c"]: - return ("C", ".c", None) - if lang in ["c++", "C++", "cpp", "CXX", "cxx"]: - return ("C++", ".cpp", None) - - return None, None, "Unsupported language: %s" % lang - - -# vim: set sw=4 et sts=4 tw=79 fo+=l: - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Debug.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Debug.py deleted file mode 100644 index ef7dfffe8fd..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Debug.py +++ /dev/null @@ -1,243 +0,0 @@ -"""SCons.Debug - -Code for debugging SCons internal things. Shouldn't be -needed by most users. Quick shortcuts: - -from SCons.Debug import caller_trace -caller_trace() - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Debug.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import sys -import time -import weakref -import inspect - -# Global variable that gets set to 'True' by the Main script, -# when the creation of class instances should get tracked. -track_instances = False -# List of currently tracked classes -tracked_classes = {} - -def logInstanceCreation(instance, name=None): - if name is None: - name = instance.__class__.__name__ - if name not in tracked_classes: - tracked_classes[name] = [] - if hasattr(instance, '__dict__'): - tracked_classes[name].append(weakref.ref(instance)) - else: - # weakref doesn't seem to work when the instance - # contains only slots... - tracked_classes[name].append(instance) - -def string_to_classes(s): - if s == '*': - return sorted(tracked_classes.keys()) - else: - return s.split() - -def fetchLoggedInstances(classes="*"): - classnames = string_to_classes(classes) - return [(cn, len(tracked_classes[cn])) for cn in classnames] - -def countLoggedInstances(classes, file=sys.stdout): - for classname in string_to_classes(classes): - file.write("%s: %d\n" % (classname, len(tracked_classes[classname]))) - -def listLoggedInstances(classes, file=sys.stdout): - for classname in string_to_classes(classes): - file.write('\n%s:\n' % classname) - for ref in tracked_classes[classname]: - if inspect.isclass(ref): - obj = ref() - else: - obj = ref - if obj is not None: - file.write(' %s\n' % repr(obj)) - -def dumpLoggedInstances(classes, file=sys.stdout): - for classname in string_to_classes(classes): - file.write('\n%s:\n' % classname) - for ref in tracked_classes[classname]: - obj = ref() - if obj is not None: - file.write(' %s:\n' % obj) - for key, value in obj.__dict__.items(): - file.write(' %20s : %s\n' % (key, value)) - - - -if sys.platform[:5] == "linux": - # Linux doesn't actually support memory usage stats from getrusage(). - def memory(): - with open('/proc/self/stat') as f: - mstr = f.read() - mstr = mstr.split()[22] - return int(mstr) -elif sys.platform[:6] == 'darwin': - #TODO really get memory stats for OS X - def memory(): - return 0 -else: - try: - import resource - except ImportError: - try: - import win32process - import win32api - except ImportError: - def memory(): - return 0 - else: - def memory(): - process_handle = win32api.GetCurrentProcess() - memory_info = win32process.GetProcessMemoryInfo( process_handle ) - return memory_info['PeakWorkingSetSize'] - else: - def memory(): - res = resource.getrusage(resource.RUSAGE_SELF) - return res[4] - -# returns caller's stack -def caller_stack(): - import traceback - tb = traceback.extract_stack() - # strip itself and the caller from the output - tb = tb[:-2] - result = [] - for back in tb: - # (filename, line number, function name, text) - key = back[:3] - result.append('%s:%d(%s)' % func_shorten(key)) - return result - -caller_bases = {} -caller_dicts = {} - -def caller_trace(back=0): - """ - Trace caller stack and save info into global dicts, which - are printed automatically at the end of SCons execution. - """ - global caller_bases, caller_dicts - import traceback - tb = traceback.extract_stack(limit=3+back) - tb.reverse() - callee = tb[1][:3] - caller_bases[callee] = caller_bases.get(callee, 0) + 1 - for caller in tb[2:]: - caller = callee + caller[:3] - try: - entry = caller_dicts[callee] - except KeyError: - caller_dicts[callee] = entry = {} - entry[caller] = entry.get(caller, 0) + 1 - callee = caller - -# print a single caller and its callers, if any -def _dump_one_caller(key, file, level=0): - leader = ' '*level - for v,c in sorted([(-v,c) for c,v in caller_dicts[key].items()]): - file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:]))) - if c in caller_dicts: - _dump_one_caller(c, file, level+1) - -# print each call tree -def dump_caller_counts(file=sys.stdout): - for k in sorted(caller_bases.keys()): - file.write("Callers of %s:%d(%s), %d calls:\n" - % (func_shorten(k) + (caller_bases[k],))) - _dump_one_caller(k, file) - -shorten_list = [ - ( '/scons/SCons/', 1), - ( '/src/engine/SCons/', 1), - ( '/usr/lib/python', 0), -] - -if os.sep != '/': - shorten_list = [(t[0].replace('/', os.sep), t[1]) for t in shorten_list] - -def func_shorten(func_tuple): - f = func_tuple[0] - for t in shorten_list: - i = f.find(t[0]) - if i >= 0: - if t[1]: - i = i + len(t[0]) - return (f[i:],)+func_tuple[1:] - return func_tuple - - -TraceFP = {} -if sys.platform == 'win32': - TraceDefault = 'con' -else: - TraceDefault = '/dev/tty' - -TimeStampDefault = None -StartTime = time.time() -PreviousTime = StartTime - -def Trace(msg, file=None, mode='w', tstamp=None): - """Write a trace message to a file. Whenever a file is specified, - it becomes the default for the next call to Trace().""" - global TraceDefault - global TimeStampDefault - global PreviousTime - if file is None: - file = TraceDefault - else: - TraceDefault = file - if tstamp is None: - tstamp = TimeStampDefault - else: - TimeStampDefault = tstamp - try: - fp = TraceFP[file] - except KeyError: - try: - fp = TraceFP[file] = open(file, mode) - except TypeError: - # Assume we were passed an open file pointer. - fp = file - if tstamp: - now = time.time() - fp.write('%8.4f %8.4f: ' % (now - StartTime, now - PreviousTime)) - PreviousTime = now - fp.write(msg) - fp.flush() - fp.close() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Defaults.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Defaults.py deleted file mode 100644 index 118356d1b54..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Defaults.py +++ /dev/null @@ -1,596 +0,0 @@ -"""SCons.Defaults - -Builders and other things for the local site. Here's where we'll -duplicate the functionality of autoconf until we move it into the -installation procedure or use something like qmconf. - -The code that reads the registry to find MSVC components was borrowed -from distutils.msvccompiler. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import division - -__revision__ = "src/engine/SCons/Defaults.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -import os -import errno -import shutil -import stat -import time -import sys - -import SCons.Action -import SCons.Builder -import SCons.CacheDir -import SCons.Environment -import SCons.PathList -import SCons.Subst -import SCons.Tool - -# A placeholder for a default Environment (for fetching source files -# from source code management systems and the like). This must be -# initialized later, after the top-level directory is set by the calling -# interface. -_default_env = None - -# Lazily instantiate the default environment so the overhead of creating -# it doesn't apply when it's not needed. -def _fetch_DefaultEnvironment(*args, **kw): - """ - Returns the already-created default construction environment. - """ - global _default_env - return _default_env - -def DefaultEnvironment(*args, **kw): - """ - Initial public entry point for creating the default construction - Environment. - - After creating the environment, we overwrite our name - (DefaultEnvironment) with the _fetch_DefaultEnvironment() function, - which more efficiently returns the initialized default construction - environment without checking for its existence. - - (This function still exists with its _default_check because someone - else (*cough* Script/__init__.py *cough*) may keep a reference - to this function. So we can't use the fully functional idiom of - having the name originally be a something that *only* creates the - construction environment and then overwrites the name.) - """ - global _default_env - if not _default_env: - import SCons.Util - _default_env = SCons.Environment.Environment(*args, **kw) - if SCons.Util.md5: - _default_env.Decider('MD5') - else: - _default_env.Decider('timestamp-match') - global DefaultEnvironment - DefaultEnvironment = _fetch_DefaultEnvironment - _default_env._CacheDir_path = None - return _default_env - -# Emitters for setting the shared attribute on object files, -# and an action for checking that all of the source files -# going into a shared library are, in fact, shared. -def StaticObjectEmitter(target, source, env): - for tgt in target: - tgt.attributes.shared = None - return (target, source) - -def SharedObjectEmitter(target, source, env): - for tgt in target: - tgt.attributes.shared = 1 - return (target, source) - -def SharedFlagChecker(source, target, env): - same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME') - if same == '0' or same == '' or same == 'False': - for src in source: - try: - shared = src.attributes.shared - except AttributeError: - shared = None - if not shared: - raise SCons.Errors.UserError("Source file: %s is static and is not compatible with shared target: %s" % (src, target[0])) - -SharedCheck = SCons.Action.Action(SharedFlagChecker, None) - -# Some people were using these variable name before we made -# SourceFileScanner part of the public interface. Don't break their -# SConscript files until we've given them some fair warning and a -# transition period. -CScan = SCons.Tool.CScanner -DScan = SCons.Tool.DScanner -LaTeXScan = SCons.Tool.LaTeXScanner -ObjSourceScan = SCons.Tool.SourceFileScanner -ProgScan = SCons.Tool.ProgramScanner - -# These aren't really tool scanners, so they don't quite belong with -# the rest of those in Tool/__init__.py, but I'm not sure where else -# they should go. Leave them here for now. -import SCons.Scanner.Dir -DirScanner = SCons.Scanner.Dir.DirScanner() -DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner() - -# Actions for common languages. -CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR") -ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR") -CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR") -ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR") - -DAction = SCons.Action.Action("$DCOM", "$DCOMSTR") -ShDAction = SCons.Action.Action("$SHDCOM", "$SHDCOMSTR") - -ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR") -ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR") - -LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR") -ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR") - -LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR") - -# Common tasks that we allow users to perform in platform-independent -# ways by creating ActionFactory instances. -ActionFactory = SCons.Action.ActionFactory - -def get_paths_str(dest): - # If dest is a list, we need to manually call str() on each element - if SCons.Util.is_List(dest): - elem_strs = [] - for element in dest: - elem_strs.append('"' + str(element) + '"') - return '[' + ', '.join(elem_strs) + ']' - else: - return '"' + str(dest) + '"' - -permission_dic = { - 'u':{ - 'r':stat.S_IRUSR, - 'w':stat.S_IWUSR, - 'x':stat.S_IXUSR - }, - 'g':{ - 'r':stat.S_IRGRP, - 'w':stat.S_IWGRP, - 'x':stat.S_IXGRP - }, - 'o':{ - 'r':stat.S_IROTH, - 'w':stat.S_IWOTH, - 'x':stat.S_IXOTH - } -} - -def chmod_func(dest, mode): - import SCons.Util - from string import digits - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - if SCons.Util.is_String(mode) and 0 not in [i in digits for i in mode]: - mode = int(mode, 8) - if not SCons.Util.is_String(mode): - for element in dest: - os.chmod(str(element), mode) - else: - mode = str(mode) - for operation in mode.split(","): - if "=" in operation: - operator = "=" - elif "+" in operation: - operator = "+" - elif "-" in operation: - operator = "-" - else: - raise SyntaxError("Could not find +, - or =") - operation_list = operation.split(operator) - if len(operation_list) != 2: - raise SyntaxError("More than one operator found") - user = operation_list[0].strip().replace("a", "ugo") - permission = operation_list[1].strip() - new_perm = 0 - for u in user: - for p in permission: - try: - new_perm = new_perm | permission_dic[u][p] - except KeyError: - raise SyntaxError("Unrecognized user or permission format") - for element in dest: - curr_perm = os.stat(str(element)).st_mode - if operator == "=": - os.chmod(str(element), new_perm) - elif operator == "+": - os.chmod(str(element), curr_perm | new_perm) - elif operator == "-": - os.chmod(str(element), curr_perm & ~new_perm) - -def chmod_strfunc(dest, mode): - import SCons.Util - if not SCons.Util.is_String(mode): - return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode) - else: - return 'Chmod(%s, "%s")' % (get_paths_str(dest), str(mode)) - -Chmod = ActionFactory(chmod_func, chmod_strfunc) - -def copy_func(dest, src, symlinks=True): - """ - If symlinks (is true), then a symbolic link will be - shallow copied and recreated as a symbolic link; otherwise, copying - a symbolic link will be equivalent to copying the symbolic link's - final target regardless of symbolic link depth. - """ - - dest = str(dest) - src = str(src) - - SCons.Node.FS.invalidate_node_memos(dest) - if SCons.Util.is_List(src) and os.path.isdir(dest): - for file in src: - shutil.copy2(file, dest) - return 0 - elif os.path.islink(src): - if symlinks: - return os.symlink(os.readlink(src), dest) - else: - return copy_func(dest, os.path.realpath(src)) - elif os.path.isfile(src): - shutil.copy2(src, dest) - return 0 - else: - shutil.copytree(src, dest, symlinks) - # copytree returns None in python2 and destination string in python3 - # A error is raised in both cases, so we can just return 0 for success - return 0 - -Copy = ActionFactory( - copy_func, - lambda dest, src, symlinks=True: 'Copy("%s", "%s")' % (dest, src) -) - -def delete_func(dest, must_exist=0): - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - for entry in dest: - entry = str(entry) - # os.path.exists returns False with broken links that exist - entry_exists = os.path.exists(entry) or os.path.islink(entry) - if not entry_exists and not must_exist: - continue - # os.path.isdir returns True when entry is a link to a dir - if os.path.isdir(entry) and not os.path.islink(entry): - shutil.rmtree(entry, 1) - continue - os.unlink(entry) - -def delete_strfunc(dest, must_exist=0): - return 'Delete(%s)' % get_paths_str(dest) - -Delete = ActionFactory(delete_func, delete_strfunc) - -def mkdir_func(dest): - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - for entry in dest: - try: - os.makedirs(str(entry)) - except os.error as e: - p = str(entry) - if (e.args[0] == errno.EEXIST or - (sys.platform=='win32' and e.args[0]==183)) \ - and os.path.isdir(str(entry)): - pass # not an error if already exists - else: - raise - -Mkdir = ActionFactory(mkdir_func, - lambda dir: 'Mkdir(%s)' % get_paths_str(dir)) - -def move_func(dest, src): - SCons.Node.FS.invalidate_node_memos(dest) - SCons.Node.FS.invalidate_node_memos(src) - shutil.move(src, dest) - -Move = ActionFactory(move_func, - lambda dest, src: 'Move("%s", "%s")' % (dest, src), - convert=str) - -def touch_func(dest): - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - for file in dest: - file = str(file) - mtime = int(time.time()) - if os.path.exists(file): - atime = os.path.getatime(file) - else: - with open(file, 'w'): - atime = mtime - os.utime(file, (atime, mtime)) - -Touch = ActionFactory(touch_func, - lambda file: 'Touch(%s)' % get_paths_str(file)) - -# Internal utility functions - - -def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None): - """ - Creates a new list from 'list' by first interpolating each element - in the list using the 'env' dictionary and then calling f on the - list, and finally calling _concat_ixes to concatenate 'prefix' and - 'suffix' onto each element of the list. - """ - if not list: - return list - - l = f(SCons.PathList.PathList(list).subst_path(env, target, source)) - if l is not None: - list = l - - return _concat_ixes(prefix, list, suffix, env) - - -def _concat_ixes(prefix, list, suffix, env): - """ - Creates a new list from 'list' by concatenating the 'prefix' and - 'suffix' arguments onto each element of the list. A trailing space - on 'prefix' or leading space on 'suffix' will cause them to be put - into separate list elements rather than being concatenated. - """ - - result = [] - - # ensure that prefix and suffix are strings - prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW)) - suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW)) - - for x in list: - if isinstance(x, SCons.Node.FS.File): - result.append(x) - continue - x = str(x) - if x: - - if prefix: - if prefix[-1] == ' ': - result.append(prefix[:-1]) - elif x[:len(prefix)] != prefix: - x = prefix + x - - result.append(x) - - if suffix: - if suffix[0] == ' ': - result.append(suffix[1:]) - elif x[-len(suffix):] != suffix: - result[-1] = result[-1]+suffix - - return result - - -def _stripixes(prefix, itms, suffix, stripprefixes, stripsuffixes, env, c=None): - """ - This is a wrapper around _concat()/_concat_ixes() that checks for - the existence of prefixes or suffixes on list items and strips them - where it finds them. This is used by tools (like the GNU linker) - that need to turn something like 'libfoo.a' into '-lfoo'. - """ - - if not itms: - return itms - - if not callable(c): - env_c = env['_concat'] - if env_c != _concat and callable(env_c): - # There's a custom _concat() method in the construction - # environment, and we've allowed people to set that in - # the past (see test/custom-concat.py), so preserve the - # backwards compatibility. - c = env_c - else: - c = _concat_ixes - - stripprefixes = list(map(env.subst, SCons.Util.flatten(stripprefixes))) - stripsuffixes = list(map(env.subst, SCons.Util.flatten(stripsuffixes))) - - stripped = [] - for l in SCons.PathList.PathList(itms).subst_path(env, None, None): - if isinstance(l, SCons.Node.FS.File): - stripped.append(l) - continue - - if not SCons.Util.is_String(l): - l = str(l) - - for stripprefix in stripprefixes: - lsp = len(stripprefix) - if l[:lsp] == stripprefix: - l = l[lsp:] - # Do not strip more than one prefix - break - - for stripsuffix in stripsuffixes: - lss = len(stripsuffix) - if l[-lss:] == stripsuffix: - l = l[:-lss] - # Do not strip more than one suffix - break - - stripped.append(l) - - return c(prefix, stripped, suffix, env) - -def processDefines(defs): - """process defines, resolving strings, lists, dictionaries, into a list of - strings - """ - if SCons.Util.is_List(defs): - l = [] - for d in defs: - if d is None: - continue - elif SCons.Util.is_List(d) or isinstance(d, tuple): - if len(d) >= 2: - l.append(str(d[0]) + '=' + str(d[1])) - else: - l.append(str(d[0])) - elif SCons.Util.is_Dict(d): - for macro,value in d.items(): - if value is not None: - l.append(str(macro) + '=' + str(value)) - else: - l.append(str(macro)) - elif SCons.Util.is_String(d): - l.append(str(d)) - else: - raise SCons.Errors.UserError("DEFINE %s is not a list, dict, string or None."%repr(d)) - elif SCons.Util.is_Dict(defs): - # The items in a dictionary are stored in random order, but - # if the order of the command-line options changes from - # invocation to invocation, then the signature of the command - # line will change and we'll get random unnecessary rebuilds. - # Consequently, we have to sort the keys to ensure a - # consistent order... - l = [] - for k,v in sorted(defs.items()): - if v is None: - l.append(str(k)) - else: - l.append(str(k) + '=' + str(v)) - else: - l = [str(defs)] - return l - - -def _defines(prefix, defs, suffix, env, target, source, c=_concat_ixes): - """A wrapper around _concat_ixes that turns a list or string - into a list of C preprocessor command-line definitions. - """ - - return c(prefix, env.subst_path(processDefines(defs), target=target, source=source), suffix, env) - - -class NullCmdGenerator(object): - """This is a callable class that can be used in place of other - command generators if you don't want them to do anything. - - The __call__ method for this class simply returns the thing - you instantiated it with. - - Example usage: - env["DO_NOTHING"] = NullCmdGenerator - env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}" - """ - - def __init__(self, cmd): - self.cmd = cmd - - def __call__(self, target, source, env, for_signature=None): - return self.cmd - - -class Variable_Method_Caller(object): - """A class for finding a construction variable on the stack and - calling one of its methods. - - We use this to support "construction variables" in our string - eval()s that actually stand in for methods--specifically, use - of "RDirs" in call to _concat that should actually execute the - "TARGET.RDirs" method. (We used to support this by creating a little - "build dictionary" that mapped RDirs to the method, but this got in - the way of Memoizing construction environments, because we had to - create new environment objects to hold the variables.) - """ - def __init__(self, variable, method): - self.variable = variable - self.method = method - def __call__(self, *args, **kw): - try: 1//0 - except ZeroDivisionError: - # Don't start iterating with the current stack-frame to - # prevent creating reference cycles (f_back is safe). - frame = sys.exc_info()[2].tb_frame.f_back - variable = self.variable - while frame: - if variable in frame.f_locals: - v = frame.f_locals[variable] - if v: - method = getattr(v, self.method) - return method(*args, **kw) - frame = frame.f_back - return None - -# if $version_var is not empty, returns env[flags_var], otherwise returns None -def __libversionflags(env, version_var, flags_var): - try: - if env.subst('$'+version_var): - return env[flags_var] - except KeyError: - pass - return None - -ConstructionEnvironment = { - 'BUILDERS' : {}, - 'SCANNERS' : [ SCons.Tool.SourceFileScanner ], - 'CONFIGUREDIR' : '#/.sconf_temp', - 'CONFIGURELOG' : '#/config.log', - 'CPPSUFFIXES' : SCons.Tool.CSuffixes, - 'DSUFFIXES' : SCons.Tool.DSuffixes, - 'ENV' : {}, - 'IDLSUFFIXES' : SCons.Tool.IDLSuffixes, - '_concat' : _concat, - '_defines' : _defines, - '_stripixes' : _stripixes, - '_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}', - '_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', - '_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', - '_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__, TARGET, SOURCE)}', - - '__libversionflags' : __libversionflags, - '__SHLIBVERSIONFLAGS' : '${__libversionflags(__env__,"SHLIBVERSION","_SHLIBVERSIONFLAGS")}', - '__LDMODULEVERSIONFLAGS' : '${__libversionflags(__env__,"LDMODULEVERSION","_LDMODULEVERSIONFLAGS")}', - '__DSHLIBVERSIONFLAGS' : '${__libversionflags(__env__,"DSHLIBVERSION","_DSHLIBVERSIONFLAGS")}', - - 'TEMPFILE' : NullCmdGenerator, - 'TEMPFILEARGJOIN': ' ', - 'Dir' : Variable_Method_Caller('TARGET', 'Dir'), - 'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'), - 'File' : Variable_Method_Caller('TARGET', 'File'), - 'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'), -} - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Environment.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Environment.py deleted file mode 100644 index 6c32dd0d456..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Environment.py +++ /dev/null @@ -1,2476 +0,0 @@ -"""SCons.Environment - -Base class for construction Environments. These are -the primary objects used to communicate dependency and -construction information to the build engine. - -Keyword arguments supplied when the construction Environment -is created are construction variables used to initialize the -Environment -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Environment.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -import copy -import os -import sys -import re -import shlex -from collections import UserDict - -import SCons.Action -import SCons.Builder -import SCons.Debug -from SCons.Debug import logInstanceCreation -import SCons.Defaults -from SCons.Errors import UserError, BuildError -import SCons.Memoize -import SCons.Node -import SCons.Node.Alias -import SCons.Node.FS -import SCons.Node.Python -import SCons.Platform -import SCons.SConf -import SCons.SConsign -import SCons.Subst -import SCons.Tool -import SCons.Util -import SCons.Warnings - -class _Null(object): - pass - -_null = _Null - -_warn_copy_deprecated = True -_warn_source_signatures_deprecated = True -_warn_target_signatures_deprecated = True - -CleanTargets = {} -CalculatorArgs = {} - -semi_deepcopy = SCons.Util.semi_deepcopy -semi_deepcopy_dict = SCons.Util.semi_deepcopy_dict - -def alias_builder(env, target, source): - pass - -AliasBuilder = SCons.Builder.Builder(action = alias_builder, - target_factory = SCons.Node.Alias.default_ans.Alias, - source_factory = SCons.Node.FS.Entry, - multi = 1, - is_explicit = None, - name='AliasBuilder') - -def apply_tools(env, tools, toolpath): - # Store the toolpath in the Environment. - if toolpath is not None: - env['toolpath'] = toolpath - - if not tools: - return - # Filter out null tools from the list. - for tool in [_f for _f in tools if _f]: - if SCons.Util.is_List(tool) or isinstance(tool, tuple): - toolname = tool[0] - toolargs = tool[1] # should be a dict of kw args - tool = env.Tool(toolname, **toolargs) - else: - env.Tool(tool) - -# These names are (or will be) controlled by SCons; users should never -# set or override them. This warning can optionally be turned off, -# but scons will still ignore the illegal variable names even if it's off. -reserved_construction_var_names = [ - 'CHANGED_SOURCES', - 'CHANGED_TARGETS', - 'SOURCE', - 'SOURCES', - 'TARGET', - 'TARGETS', - 'UNCHANGED_SOURCES', - 'UNCHANGED_TARGETS', -] - -future_reserved_construction_var_names = [ - #'HOST_OS', - #'HOST_ARCH', - #'HOST_CPU', - ] - -def copy_non_reserved_keywords(dict): - result = semi_deepcopy(dict) - for k in list(result.keys()): - if k in reserved_construction_var_names: - msg = "Ignoring attempt to set reserved variable `$%s'" - SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % k) - del result[k] - return result - -def _set_reserved(env, key, value): - msg = "Ignoring attempt to set reserved variable `$%s'" - SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key) - -def _set_future_reserved(env, key, value): - env._dict[key] = value - msg = "`$%s' will be reserved in a future release and setting it will become ignored" - SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key) - -def _set_BUILDERS(env, key, value): - try: - bd = env._dict[key] - for k in list(bd.keys()): - del bd[k] - except KeyError: - bd = BuilderDict(bd, env) - env._dict[key] = bd - for k, v in value.items(): - if not SCons.Builder.is_a_Builder(v): - raise UserError('%s is not a Builder.' % repr(v)) - bd.update(value) - -def _del_SCANNERS(env, key): - del env._dict[key] - env.scanner_map_delete() - -def _set_SCANNERS(env, key, value): - env._dict[key] = value - env.scanner_map_delete() - -def _delete_duplicates(l, keep_last): - """Delete duplicates from a sequence, keeping the first or last.""" - seen=set() - result=[] - if keep_last: # reverse in & out, then keep first - l.reverse() - for i in l: - try: - if i not in seen: - result.append(i) - seen.add(i) - except TypeError: - # probably unhashable. Just keep it. - result.append(i) - if keep_last: - result.reverse() - return result - - - -# The following is partly based on code in a comment added by Peter -# Shannon at the following page (there called the "transplant" class): -# -# ASPN : Python Cookbook : Dynamically added methods to a class -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732 -# -# We had independently been using the idiom as BuilderWrapper, but -# factoring out the common parts into this base class, and making -# BuilderWrapper a subclass that overrides __call__() to enforce specific -# Builder calling conventions, simplified some of our higher-layer code. - -class MethodWrapper(object): - """ - A generic Wrapper class that associates a method (which can - actually be any callable) with an object. As part of creating this - MethodWrapper object an attribute with the specified (by default, - the name of the supplied method) is added to the underlying object. - When that new "method" is called, our __call__() method adds the - object as the first argument, simulating the Python behavior of - supplying "self" on method calls. - - We hang on to the name by which the method was added to the underlying - base class so that we can provide a method to "clone" ourselves onto - a new underlying object being copied (without which we wouldn't need - to save that info). - """ - def __init__(self, object, method, name=None): - if name is None: - name = method.__name__ - self.object = object - self.method = method - self.name = name - setattr(self.object, name, self) - - def __call__(self, *args, **kwargs): - nargs = (self.object,) + args - return self.method(*nargs, **kwargs) - - def clone(self, new_object): - """ - Returns an object that re-binds the underlying "method" to - the specified new object. - """ - return self.__class__(new_object, self.method, self.name) - -class BuilderWrapper(MethodWrapper): - """ - A MethodWrapper subclass that that associates an environment with - a Builder. - - This mainly exists to wrap the __call__() function so that all calls - to Builders can have their argument lists massaged in the same way - (treat a lone argument as the source, treat two arguments as target - then source, make sure both target and source are lists) without - having to have cut-and-paste code to do it. - - As a bit of obsessive backwards compatibility, we also intercept - attempts to get or set the "env" or "builder" attributes, which were - the names we used before we put the common functionality into the - MethodWrapper base class. We'll keep this around for a while in case - people shipped Tool modules that reached into the wrapper (like the - Tool/qt.py module does, or did). There shouldn't be a lot attribute - fetching or setting on these, so a little extra work shouldn't hurt. - """ - def __call__(self, target=None, source=_null, *args, **kw): - if source is _null: - source = target - target = None - if target is not None and not SCons.Util.is_List(target): - target = [target] - if source is not None and not SCons.Util.is_List(source): - source = [source] - return MethodWrapper.__call__(self, target, source, *args, **kw) - - def __repr__(self): - return '' % repr(self.name) - - def __str__(self): - return self.__repr__() - - def __getattr__(self, name): - if name == 'env': - return self.object - elif name == 'builder': - return self.method - else: - raise AttributeError(name) - - def __setattr__(self, name, value): - if name == 'env': - self.object = value - elif name == 'builder': - self.method = value - else: - self.__dict__[name] = value - - # This allows a Builder to be executed directly - # through the Environment to which it's attached. - # In practice, we shouldn't need this, because - # builders actually get executed through a Node. - # But we do have a unit test for this, and can't - # yet rule out that it would be useful in the - # future, so leave it for now. - #def execute(self, **kw): - # kw['env'] = self.env - # self.builder.execute(**kw) - -class BuilderDict(UserDict): - """This is a dictionary-like class used by an Environment to hold - the Builders. We need to do this because every time someone changes - the Builders in the Environment's BUILDERS dictionary, we must - update the Environment's attributes.""" - def __init__(self, dict, env): - # Set self.env before calling the superclass initialization, - # because it will end up calling our other methods, which will - # need to point the values in this dictionary to self.env. - self.env = env - UserDict.__init__(self, dict) - - def __semi_deepcopy__(self): - # These cannot be copied since they would both modify the same builder object, and indeed - # just copying would modify the original builder - raise TypeError( 'cannot semi_deepcopy a BuilderDict' ) - - def __setitem__(self, item, val): - try: - method = getattr(self.env, item).method - except AttributeError: - pass - else: - self.env.RemoveMethod(method) - UserDict.__setitem__(self, item, val) - BuilderWrapper(self.env, val, item) - - def __delitem__(self, item): - UserDict.__delitem__(self, item) - delattr(self.env, item) - - def update(self, dict): - for i, v in dict.items(): - self.__setitem__(i, v) - - - -_is_valid_var = re.compile(r'[_a-zA-Z]\w*$') - -def is_valid_construction_var(varstr): - """Return if the specified string is a legitimate construction - variable. - """ - return _is_valid_var.match(varstr) - - - -class SubstitutionEnvironment(object): - """Base class for different flavors of construction environments. - - This class contains a minimal set of methods that handle construction - variable expansion and conversion of strings to Nodes, which may or - may not be actually useful as a stand-alone class. Which methods - ended up in this class is pretty arbitrary right now. They're - basically the ones which we've empirically determined are common to - the different construction environment subclasses, and most of the - others that use or touch the underlying dictionary of construction - variables. - - Eventually, this class should contain all the methods that we - determine are necessary for a "minimal" interface to the build engine. - A full "native Python" SCons environment has gotten pretty heavyweight - with all of the methods and Tools and construction variables we've - jammed in there, so it would be nice to have a lighter weight - alternative for interfaces that don't need all of the bells and - whistles. (At some point, we'll also probably rename this class - "Base," since that more reflects what we want this class to become, - but because we've released comments that tell people to subclass - Environment.Base to create their own flavors of construction - environment, we'll save that for a future refactoring when this - class actually becomes useful.) - """ - - def __init__(self, **kw): - """Initialization of an underlying SubstitutionEnvironment class. - """ - if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.SubstitutionEnvironment') - self.fs = SCons.Node.FS.get_default_fs() - self.ans = SCons.Node.Alias.default_ans - self.lookup_list = SCons.Node.arg2nodes_lookups - self._dict = kw.copy() - self._init_special() - self.added_methods = [] - #self._memo = {} - - def _init_special(self): - """Initial the dispatch tables for special handling of - special construction variables.""" - self._special_del = {} - self._special_del['SCANNERS'] = _del_SCANNERS - - self._special_set = {} - for key in reserved_construction_var_names: - self._special_set[key] = _set_reserved - for key in future_reserved_construction_var_names: - self._special_set[key] = _set_future_reserved - self._special_set['BUILDERS'] = _set_BUILDERS - self._special_set['SCANNERS'] = _set_SCANNERS - - # Freeze the keys of self._special_set in a list for use by - # methods that need to check. (Empirically, list scanning has - # gotten better than dict.has_key() in Python 2.5.) - self._special_set_keys = list(self._special_set.keys()) - - def __eq__(self, other): - return self._dict == other._dict - - def __delitem__(self, key): - special = self._special_del.get(key) - if special: - special(self, key) - else: - del self._dict[key] - - def __getitem__(self, key): - return self._dict[key] - - def __setitem__(self, key, value): - # This is heavily used. This implementation is the best we have - # according to the timings in bench/env.__setitem__.py. - # - # The "key in self._special_set_keys" test here seems to perform - # pretty well for the number of keys we have. A hard-coded - # list works a little better in Python 2.5, but that has the - # disadvantage of maybe getting out of sync if we ever add more - # variable names. Using self._special_set.has_key() works a - # little better in Python 2.4, but is worse than this test. - # So right now it seems like a good trade-off, but feel free to - # revisit this with bench/env.__setitem__.py as needed (and - # as newer versions of Python come out). - if key in self._special_set_keys: - self._special_set[key](self, key, value) - else: - # If we already have the entry, then it's obviously a valid - # key and we don't need to check. If we do check, using a - # global, pre-compiled regular expression directly is more - # efficient than calling another function or a method. - if key not in self._dict \ - and not _is_valid_var.match(key): - raise UserError("Illegal construction variable `%s'" % key) - self._dict[key] = value - - def get(self, key, default=None): - """Emulates the get() method of dictionaries.""" - return self._dict.get(key, default) - - def has_key(self, key): - return key in self._dict - - def __contains__(self, key): - return self._dict.__contains__(key) - - def items(self): - return list(self._dict.items()) - - def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw): - if node_factory is _null: - node_factory = self.fs.File - if lookup_list is _null: - lookup_list = self.lookup_list - - if not args: - return [] - - args = SCons.Util.flatten(args) - - nodes = [] - for v in args: - if SCons.Util.is_String(v): - n = None - for l in lookup_list: - n = l(v) - if n is not None: - break - if n is not None: - if SCons.Util.is_String(n): - # n = self.subst(n, raw=1, **kw) - kw['raw'] = 1 - n = self.subst(n, **kw) - if node_factory: - n = node_factory(n) - if SCons.Util.is_List(n): - nodes.extend(n) - else: - nodes.append(n) - elif node_factory: - # v = node_factory(self.subst(v, raw=1, **kw)) - kw['raw'] = 1 - v = node_factory(self.subst(v, **kw)) - if SCons.Util.is_List(v): - nodes.extend(v) - else: - nodes.append(v) - else: - nodes.append(v) - - return nodes - - def gvars(self): - return self._dict - - def lvars(self): - return {} - - def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None): - """Recursively interpolates construction variables from the - Environment into the specified string, returning the expanded - result. Construction variables are specified by a $ prefix - in the string and begin with an initial underscore or - alphabetic character followed by any number of underscores - or alphanumeric characters. The construction variable names - may be surrounded by curly braces to separate the name from - trailing characters. - """ - gvars = self.gvars() - lvars = self.lvars() - lvars['__env__'] = self - if executor: - lvars.update(executor.get_lvars()) - return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv) - - def subst_kw(self, kw, raw=0, target=None, source=None): - nkw = {} - for k, v in kw.items(): - k = self.subst(k, raw, target, source) - if SCons.Util.is_String(v): - v = self.subst(v, raw, target, source) - nkw[k] = v - return nkw - - def subst_list(self, string, raw=0, target=None, source=None, conv=None, executor=None): - """Calls through to SCons.Subst.scons_subst_list(). See - the documentation for that function.""" - gvars = self.gvars() - lvars = self.lvars() - lvars['__env__'] = self - if executor: - lvars.update(executor.get_lvars()) - return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv) - - def subst_path(self, path, target=None, source=None): - """Substitute a path list, turning EntryProxies into Nodes - and leaving Nodes (and other objects) as-is.""" - - if not SCons.Util.is_List(path): - path = [path] - - def s(obj): - """This is the "string conversion" routine that we have our - substitutions use to return Nodes, not strings. This relies - on the fact that an EntryProxy object has a get() method that - returns the underlying Node that it wraps, which is a bit of - architectural dependence that we might need to break or modify - in the future in response to additional requirements.""" - try: - get = obj.get - except AttributeError: - obj = SCons.Util.to_String_for_subst(obj) - else: - obj = get() - return obj - - r = [] - for p in path: - if SCons.Util.is_String(p): - p = self.subst(p, target=target, source=source, conv=s) - if SCons.Util.is_List(p): - if len(p) == 1: - p = p[0] - else: - # We have an object plus a string, or multiple - # objects that we need to smush together. No choice - # but to make them into a string. - p = ''.join(map(SCons.Util.to_String_for_subst, p)) - else: - p = s(p) - r.append(p) - return r - - subst_target_source = subst - - def backtick(self, command): - import subprocess - # common arguments - kw = { 'stdin' : 'devnull', - 'stdout' : subprocess.PIPE, - 'stderr' : subprocess.PIPE, - 'universal_newlines' : True, - } - # if the command is a list, assume it's been quoted - # othewise force a shell - if not SCons.Util.is_List(command): kw['shell'] = True - # run constructed command - p = SCons.Action._subproc(self, command, **kw) - out,err = p.communicate() - status = p.wait() - if err: - sys.stderr.write(u"" + err) - if status: - raise OSError("'%s' exited %d" % (command, status)) - return out - - def AddMethod(self, function, name=None): - """ - Adds the specified function as a method of this construction - environment with the specified name. If the name is omitted, - the default name is the name of the function itself. - """ - method = MethodWrapper(self, function, name) - self.added_methods.append(method) - - def RemoveMethod(self, function): - """ - Removes the specified function's MethodWrapper from the - added_methods list, so we don't re-bind it when making a clone. - """ - self.added_methods = [dm for dm in self.added_methods if dm.method is not function] - - def Override(self, overrides): - """ - Produce a modified environment whose variables are overridden by - the overrides dictionaries. "overrides" is a dictionary that - will override the variables of this environment. - - This function is much more efficient than Clone() or creating - a new Environment because it doesn't copy the construction - environment dictionary, it just wraps the underlying construction - environment, and doesn't even create a wrapper object if there - are no overrides. - """ - if not overrides: return self - o = copy_non_reserved_keywords(overrides) - if not o: return self - overrides = {} - merges = None - for key, value in o.items(): - if key == 'parse_flags': - merges = value - else: - overrides[key] = SCons.Subst.scons_subst_once(value, self, key) - env = OverrideEnvironment(self, overrides) - if merges: env.MergeFlags(merges) - return env - - def ParseFlags(self, *flags): - """ - Parse the set of flags and return a dict with the flags placed - in the appropriate entry. The flags are treated as a typical - set of command-line flags for a GNU-like toolchain and used to - populate the entries in the dict immediately below. If one of - the flag strings begins with a bang (exclamation mark), it is - assumed to be a command and the rest of the string is executed; - the result of that evaluation is then added to the dict. - """ - dict = { - 'ASFLAGS' : SCons.Util.CLVar(''), - 'CFLAGS' : SCons.Util.CLVar(''), - 'CCFLAGS' : SCons.Util.CLVar(''), - 'CXXFLAGS' : SCons.Util.CLVar(''), - 'CPPDEFINES' : [], - 'CPPFLAGS' : SCons.Util.CLVar(''), - 'CPPPATH' : [], - 'FRAMEWORKPATH' : SCons.Util.CLVar(''), - 'FRAMEWORKS' : SCons.Util.CLVar(''), - 'LIBPATH' : [], - 'LIBS' : [], - 'LINKFLAGS' : SCons.Util.CLVar(''), - 'RPATH' : [], - } - - def do_parse(arg): - # if arg is a sequence, recurse with each element - if not arg: - return - - if not SCons.Util.is_String(arg): - for t in arg: do_parse(t) - return - - # if arg is a command, execute it - if arg[0] == '!': - arg = self.backtick(arg[1:]) - - # utility function to deal with -D option - def append_define(name, dict = dict): - t = name.split('=') - if len(t) == 1: - dict['CPPDEFINES'].append(name) - else: - dict['CPPDEFINES'].append([t[0], '='.join(t[1:])]) - - # Loop through the flags and add them to the appropriate option. - # This tries to strike a balance between checking for all possible - # flags and keeping the logic to a finite size, so it doesn't - # check for some that don't occur often. It particular, if the - # flag is not known to occur in a config script and there's a way - # of passing the flag to the right place (by wrapping it in a -W - # flag, for example) we don't check for it. Note that most - # preprocessor options are not handled, since unhandled options - # are placed in CCFLAGS, so unless the preprocessor is invoked - # separately, these flags will still get to the preprocessor. - # Other options not currently handled: - # -iqoutedir (preprocessor search path) - # -u symbol (linker undefined symbol) - # -s (linker strip files) - # -static* (linker static binding) - # -shared* (linker dynamic binding) - # -symbolic (linker global binding) - # -R dir (deprecated linker rpath) - # IBM compilers may also accept -qframeworkdir=foo - - params = shlex.split(arg) - append_next_arg_to = None # for multi-word args - for arg in params: - if append_next_arg_to: - if append_next_arg_to == 'CPPDEFINES': - append_define(arg) - elif append_next_arg_to == '-include': - t = ('-include', self.fs.File(arg)) - dict['CCFLAGS'].append(t) - elif append_next_arg_to == '-imacros': - t = ('-imacros', self.fs.File(arg)) - dict['CCFLAGS'].append(t) - elif append_next_arg_to == '-isysroot': - t = ('-isysroot', arg) - dict['CCFLAGS'].append(t) - dict['LINKFLAGS'].append(t) - elif append_next_arg_to == '-isystem': - t = ('-isystem', arg) - dict['CCFLAGS'].append(t) - elif append_next_arg_to == '-iquote': - t = ('-iquote', arg) - dict['CCFLAGS'].append(t) - elif append_next_arg_to == '-idirafter': - t = ('-idirafter', arg) - dict['CCFLAGS'].append(t) - elif append_next_arg_to == '-arch': - t = ('-arch', arg) - dict['CCFLAGS'].append(t) - dict['LINKFLAGS'].append(t) - else: - dict[append_next_arg_to].append(arg) - append_next_arg_to = None - elif not arg[0] in ['-', '+']: - dict['LIBS'].append(self.fs.File(arg)) - elif arg == '-dylib_file': - dict['LINKFLAGS'].append(arg) - append_next_arg_to = 'LINKFLAGS' - elif arg[:2] == '-L': - if arg[2:]: - dict['LIBPATH'].append(arg[2:]) - else: - append_next_arg_to = 'LIBPATH' - elif arg[:2] == '-l': - if arg[2:]: - dict['LIBS'].append(arg[2:]) - else: - append_next_arg_to = 'LIBS' - elif arg[:2] == '-I': - if arg[2:]: - dict['CPPPATH'].append(arg[2:]) - else: - append_next_arg_to = 'CPPPATH' - elif arg[:4] == '-Wa,': - dict['ASFLAGS'].append(arg[4:]) - dict['CCFLAGS'].append(arg) - elif arg[:4] == '-Wl,': - if arg[:11] == '-Wl,-rpath=': - dict['RPATH'].append(arg[11:]) - elif arg[:7] == '-Wl,-R,': - dict['RPATH'].append(arg[7:]) - elif arg[:6] == '-Wl,-R': - dict['RPATH'].append(arg[6:]) - else: - dict['LINKFLAGS'].append(arg) - elif arg[:4] == '-Wp,': - dict['CPPFLAGS'].append(arg) - elif arg[:2] == '-D': - if arg[2:]: - append_define(arg[2:]) - else: - append_next_arg_to = 'CPPDEFINES' - elif arg == '-framework': - append_next_arg_to = 'FRAMEWORKS' - elif arg[:14] == '-frameworkdir=': - dict['FRAMEWORKPATH'].append(arg[14:]) - elif arg[:2] == '-F': - if arg[2:]: - dict['FRAMEWORKPATH'].append(arg[2:]) - else: - append_next_arg_to = 'FRAMEWORKPATH' - elif arg in ['-mno-cygwin', - '-pthread', - '-openmp', - '-fmerge-all-constants', - '-fopenmp']: - dict['CCFLAGS'].append(arg) - dict['LINKFLAGS'].append(arg) - elif arg == '-mwindows': - dict['LINKFLAGS'].append(arg) - elif arg[:5] == '-std=': - if '++' in arg[5:]: - key='CXXFLAGS' - else: - key='CFLAGS' - dict[key].append(arg) - elif arg[0] == '+': - dict['CCFLAGS'].append(arg) - dict['LINKFLAGS'].append(arg) - elif arg in ['-include', '-imacros', '-isysroot', '-isystem', '-iquote', '-idirafter', '-arch']: - append_next_arg_to = arg - else: - dict['CCFLAGS'].append(arg) - - for arg in flags: - do_parse(arg) - return dict - - def MergeFlags(self, args, unique=1, dict=None): - """ - Merge the dict in args into the construction variables of this - env, or the passed-in dict. If args is not a dict, it is - converted into a dict using ParseFlags. If unique is not set, - the flags are appended rather than merged. - """ - - if dict is None: - dict = self - if not SCons.Util.is_Dict(args): - args = self.ParseFlags(args) - if not unique: - self.Append(**args) - return self - for key, value in args.items(): - if not value: - continue - try: - orig = self[key] - except KeyError: - orig = value - else: - if not orig: - orig = value - elif value: - # Add orig and value. The logic here was lifted from - # part of env.Append() (see there for a lot of comments - # about the order in which things are tried) and is - # used mainly to handle coercion of strings to CLVar to - # "do the right thing" given (e.g.) an original CCFLAGS - # string variable like '-pipe -Wall'. - try: - orig = orig + value - except (KeyError, TypeError): - try: - add_to_orig = orig.append - except AttributeError: - value.insert(0, orig) - orig = value - else: - add_to_orig(value) - t = [] - if key[-4:] == 'PATH': - ### keep left-most occurence - for v in orig: - if v not in t: - t.append(v) - else: - ### keep right-most occurence - orig.reverse() - for v in orig: - if v not in t: - t.insert(0, v) - self[key] = t - return self - - -def default_decide_source(dependency, target, prev_ni, repo_node=None): - f = SCons.Defaults.DefaultEnvironment().decide_source - return f(dependency, target, prev_ni, repo_node) - - -def default_decide_target(dependency, target, prev_ni, repo_node=None): - f = SCons.Defaults.DefaultEnvironment().decide_target - return f(dependency, target, prev_ni, repo_node) - - -def default_copy_from_cache(env, src, dst): - return SCons.CacheDir.CacheDir.copy_from_cache(env, src, dst) - -def default_copy_to_cache(env, src, dst): - return SCons.CacheDir.CacheDir.copy_to_cache(env, src, dst) - - -class Base(SubstitutionEnvironment): - """Base class for "real" construction Environments. These are the - primary objects used to communicate dependency and construction - information to the build engine. - - Keyword arguments supplied when the construction Environment - is created are construction variables used to initialize the - Environment. - """ - - ####################################################################### - # This is THE class for interacting with the SCons build engine, - # and it contains a lot of stuff, so we're going to try to keep this - # a little organized by grouping the methods. - ####################################################################### - - ####################################################################### - # Methods that make an Environment act like a dictionary. These have - # the expected standard names for Python mapping objects. Note that - # we don't actually make an Environment a subclass of UserDict for - # performance reasons. Note also that we only supply methods for - # dictionary functionality that we actually need and use. - ####################################################################### - - def __init__(self, - platform=None, - tools=None, - toolpath=None, - variables=None, - parse_flags = None, - **kw): - """ - Initialization of a basic SCons construction environment, - including setting up special construction variables like BUILDER, - PLATFORM, etc., and searching for and applying available Tools. - - Note that we do *not* call the underlying base class - (SubsitutionEnvironment) initialization, because we need to - initialize things in a very specific order that doesn't work - with the much simpler base class initialization. - """ - if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.Base') - self._memo = {} - self.fs = SCons.Node.FS.get_default_fs() - self.ans = SCons.Node.Alias.default_ans - self.lookup_list = SCons.Node.arg2nodes_lookups - self._dict = semi_deepcopy(SCons.Defaults.ConstructionEnvironment) - self._init_special() - self.added_methods = [] - - # We don't use AddMethod, or define these as methods in this - # class, because we *don't* want these functions to be bound - # methods. They need to operate independently so that the - # settings will work properly regardless of whether a given - # target ends up being built with a Base environment or an - # OverrideEnvironment or what have you. - self.decide_target = default_decide_target - self.decide_source = default_decide_source - - self.cache_timestamp_newer = False - - self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self) - - if platform is None: - platform = self._dict.get('PLATFORM', None) - if platform is None: - platform = SCons.Platform.Platform() - if SCons.Util.is_String(platform): - platform = SCons.Platform.Platform(platform) - self._dict['PLATFORM'] = str(platform) - platform(self) - - self._dict['HOST_OS'] = self._dict.get('HOST_OS',None) - self._dict['HOST_ARCH'] = self._dict.get('HOST_ARCH',None) - - # Now set defaults for TARGET_{OS|ARCH} - self._dict['TARGET_OS'] = self._dict.get('TARGET_OS',None) - self._dict['TARGET_ARCH'] = self._dict.get('TARGET_ARCH',None) - - - # Apply the passed-in and customizable variables to the - # environment before calling the tools, because they may use - # some of them during initialization. - if 'options' in kw: - # Backwards compatibility: they may stll be using the - # old "options" keyword. - variables = kw['options'] - del kw['options'] - self.Replace(**kw) - keys = list(kw.keys()) - if variables: - keys = keys + list(variables.keys()) - variables.Update(self) - - save = {} - for k in keys: - try: - save[k] = self._dict[k] - except KeyError: - # No value may have been set if they tried to pass in a - # reserved variable name like TARGETS. - pass - - SCons.Tool.Initializers(self) - - if tools is None: - tools = self._dict.get('TOOLS', None) - if tools is None: - tools = ['default'] - apply_tools(self, tools, toolpath) - - # Now restore the passed-in and customized variables - # to the environment, since the values the user set explicitly - # should override any values set by the tools. - for key, val in save.items(): - self._dict[key] = val - - # Finally, apply any flags to be merged in - if parse_flags: self.MergeFlags(parse_flags) - - ####################################################################### - # Utility methods that are primarily for internal use by SCons. - # These begin with lower-case letters. - ####################################################################### - - def get_builder(self, name): - """Fetch the builder with the specified name from the environment. - """ - try: - return self._dict['BUILDERS'][name] - except KeyError: - return None - - def validate_CacheDir_class(self, custom_class=None): - """Validate the passed custom CacheDir class, or if no args are passed, - validate the custom CacheDir class from the environment. - """ - - if custom_class is None: - custom_class = self.get("CACHEDIR_CLASS", SCons.CacheDir.CacheDir) - if not issubclass(custom_class, SCons.CacheDir.CacheDir): - raise UserError("Custom CACHEDIR_CLASS %s not derived from CacheDir" % str(custom_class)) - return custom_class - - def get_CacheDir(self): - try: - path = self._CacheDir_path - except AttributeError: - path = SCons.Defaults.DefaultEnvironment()._CacheDir_path - - cachedir_class = self.validate_CacheDir_class() - try: - if (path == self._last_CacheDir_path - # this checks if the cachedir class type has changed from what the - # instantiated cache dir type is. If the are exactly the same we - # can just keep using the existing one, otherwise the user is requesting - # something new, so we will re-instantiate below. - and type(self._last_CacheDir) is cachedir_class): - return self._last_CacheDir - except AttributeError: - pass - - cd = cachedir_class(path) - self._last_CacheDir_path = path - self._last_CacheDir = cd - return cd - - def get_factory(self, factory, default='File'): - """Return a factory function for creating Nodes for this - construction environment. - """ - name = default - try: - is_node = issubclass(factory, SCons.Node.FS.Base) - except TypeError: - # The specified factory isn't a Node itself--it's - # most likely None, or possibly a callable. - pass - else: - if is_node: - # The specified factory is a Node (sub)class. Try to - # return the FS method that corresponds to the Node's - # name--that is, we return self.fs.Dir if they want a Dir, - # self.fs.File for a File, etc. - try: name = factory.__name__ - except AttributeError: pass - else: factory = None - if not factory: - # They passed us None, or we picked up a name from a specified - # class, so return the FS method. (Note that we *don't* - # use our own self.{Dir,File} methods because that would - # cause env.subst() to be called twice on the file name, - # interfering with files that have $$ in them.) - factory = getattr(self.fs, name) - return factory - - @SCons.Memoize.CountMethodCall - def _gsm(self): - try: - return self._memo['_gsm'] - except KeyError: - pass - - result = {} - - try: - scanners = self._dict['SCANNERS'] - except KeyError: - pass - else: - # Reverse the scanner list so that, if multiple scanners - # claim they can scan the same suffix, earlier scanners - # in the list will overwrite later scanners, so that - # the result looks like a "first match" to the user. - if not SCons.Util.is_List(scanners): - scanners = [scanners] - else: - scanners = scanners[:] # copy so reverse() doesn't mod original - scanners.reverse() - for scanner in scanners: - for k in scanner.get_skeys(self): - if k and self['PLATFORM'] == 'win32': - k = k.lower() - result[k] = scanner - - self._memo['_gsm'] = result - - return result - - def get_scanner(self, skey): - """Find the appropriate scanner given a key (usually a file suffix). - """ - if skey and self['PLATFORM'] == 'win32': - skey = skey.lower() - return self._gsm().get(skey) - - def scanner_map_delete(self, kw=None): - """Delete the cached scanner map (if we need to). - """ - try: - del self._memo['_gsm'] - except KeyError: - pass - - def _update(self, dict): - """Update an environment's values directly, bypassing the normal - checks that occur when users try to set items. - """ - self._dict.update(dict) - - def get_src_sig_type(self): - try: - return self.src_sig_type - except AttributeError: - t = SCons.Defaults.DefaultEnvironment().src_sig_type - self.src_sig_type = t - return t - - def get_tgt_sig_type(self): - try: - return self.tgt_sig_type - except AttributeError: - t = SCons.Defaults.DefaultEnvironment().tgt_sig_type - self.tgt_sig_type = t - return t - - ####################################################################### - # Public methods for manipulating an Environment. These begin with - # upper-case letters. The essential characteristic of methods in - # this section is that they do *not* have corresponding same-named - # global functions. For example, a stand-alone Append() function - # makes no sense, because Append() is all about appending values to - # an Environment's construction variables. - ####################################################################### - - def Append(self, **kw): - """Append values to existing construction variables - in an Environment. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - # It would be easier on the eyes to write this using - # "continue" statements whenever we finish processing an item, - # but Python 1.5.2 apparently doesn't let you use "continue" - # within try:-except: blocks, so we have to nest our code. - try: - if key == 'CPPDEFINES' and SCons.Util.is_String(self._dict[key]): - self._dict[key] = [self._dict[key]] - orig = self._dict[key] - except KeyError: - # No existing variable in the environment, so just set - # it to the new value. - if key == 'CPPDEFINES' and SCons.Util.is_String(val): - self._dict[key] = [val] - else: - self._dict[key] = val - else: - try: - # Check if the original looks like a dictionary. - # If it is, we can't just try adding the value because - # dictionaries don't have __add__() methods, and - # things like UserList will incorrectly coerce the - # original dict to a list (which we don't want). - update_dict = orig.update - except AttributeError: - try: - # Most straightforward: just try to add them - # together. This will work in most cases, when the - # original and new values are of compatible types. - self._dict[key] = orig + val - except (KeyError, TypeError): - try: - # Check if the original is a list. - add_to_orig = orig.append - except AttributeError: - # The original isn't a list, but the new - # value is (by process of elimination), - # so insert the original in the new value - # (if there's one to insert) and replace - # the variable with it. - if orig: - val.insert(0, orig) - self._dict[key] = val - else: - # The original is a list, so append the new - # value to it (if there's a value to append). - if val: - add_to_orig(val) - else: - # The original looks like a dictionary, so update it - # based on what we think the value looks like. - if SCons.Util.is_List(val): - if key == 'CPPDEFINES': - tmp = [] - for (k, v) in orig.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - orig = tmp - orig += val - self._dict[key] = orig - else: - for v in val: - orig[v] = None - else: - try: - update_dict(val) - except (AttributeError, TypeError, ValueError): - if SCons.Util.is_Dict(val): - for k, v in val.items(): - orig[k] = v - else: - orig[val] = None - self.scanner_map_delete(kw) - - # allow Dirs and strings beginning with # for top-relative - # Note this uses the current env's fs (in self). - def _canonicalize(self, path): - if not SCons.Util.is_String(path): # typically a Dir - path = str(path) - if path and path[0] == '#': - path = str(self.fs.Dir(path)) - return path - - def AppendENVPath(self, name, newpath, envname = 'ENV', - sep = os.pathsep, delete_existing=0): - """Append path elements to the path 'name' in the 'ENV' - dictionary for this environment. Will only add any particular - path once, and will normpath and normcase all paths to help - assure this. This can also handle the case where the env - variable is a list instead of a string. - - If delete_existing is 0, a newpath which is already in the path - will not be moved to the end (it will be left where it is). - """ - - orig = '' - if envname in self._dict and name in self._dict[envname]: - orig = self._dict[envname][name] - - nv = SCons.Util.AppendPath(orig, newpath, sep, delete_existing, - canonicalize=self._canonicalize) - - if envname not in self._dict: - self._dict[envname] = {} - - self._dict[envname][name] = nv - - def AppendUnique(self, delete_existing=0, **kw): - """Append values to existing construction variables - in an Environment, if they're not already there. - If delete_existing is 1, removes existing values first, so - values move to end. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - if SCons.Util.is_List(val): - val = _delete_duplicates(val, delete_existing) - if key not in self._dict or self._dict[key] in ('', None): - self._dict[key] = val - elif SCons.Util.is_Dict(self._dict[key]) and \ - SCons.Util.is_Dict(val): - self._dict[key].update(val) - elif SCons.Util.is_List(val): - dk = self._dict[key] - if key == 'CPPDEFINES': - tmp = [] - for i in val: - if SCons.Util.is_List(i): - if len(i) >= 2: - tmp.append((i[0], i[1])) - else: - tmp.append((i[0],)) - elif SCons.Util.is_Tuple(i): - tmp.append(i) - else: - tmp.append((i,)) - val = tmp - # Construct a list of (key, value) tuples. - if SCons.Util.is_Dict(dk): - tmp = [] - for (k, v) in dk.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - dk = tmp - elif SCons.Util.is_String(dk): - dk = [(dk,)] - else: - tmp = [] - for i in dk: - if SCons.Util.is_List(i): - if len(i) >= 2: - tmp.append((i[0], i[1])) - else: - tmp.append((i[0],)) - elif SCons.Util.is_Tuple(i): - tmp.append(i) - else: - tmp.append((i,)) - dk = tmp - else: - if not SCons.Util.is_List(dk): - dk = [dk] - if delete_existing: - dk = [x for x in dk if x not in val] - else: - val = [x for x in val if x not in dk] - self._dict[key] = dk + val - else: - dk = self._dict[key] - if SCons.Util.is_List(dk): - if key == 'CPPDEFINES': - tmp = [] - for i in dk: - if SCons.Util.is_List(i): - if len(i) >= 2: - tmp.append((i[0], i[1])) - else: - tmp.append((i[0],)) - elif SCons.Util.is_Tuple(i): - tmp.append(i) - else: - tmp.append((i,)) - dk = tmp - # Construct a list of (key, value) tuples. - if SCons.Util.is_Dict(val): - tmp = [] - for (k, v) in val.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - val = tmp - elif SCons.Util.is_String(val): - val = [(val,)] - if delete_existing: - dk = list(filter(lambda x, val=val: x not in val, dk)) - self._dict[key] = dk + val - else: - dk = [x for x in dk if x not in val] - self._dict[key] = dk + val - else: - # By elimination, val is not a list. Since dk is a - # list, wrap val in a list first. - if delete_existing: - dk = list(filter(lambda x, val=val: x not in val, dk)) - self._dict[key] = dk + [val] - else: - if val not in dk: - self._dict[key] = dk + [val] - else: - if key == 'CPPDEFINES': - if SCons.Util.is_String(dk): - dk = [dk] - elif SCons.Util.is_Dict(dk): - tmp = [] - for (k, v) in dk.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - dk = tmp - if SCons.Util.is_String(val): - if val in dk: - val = [] - else: - val = [val] - elif SCons.Util.is_Dict(val): - tmp = [] - for i,j in val.items(): - if j is not None: - tmp.append((i,j)) - else: - tmp.append(i) - val = tmp - if delete_existing: - dk = [x for x in dk if x not in val] - self._dict[key] = dk + val - self.scanner_map_delete(kw) - - def Clone(self, tools=[], toolpath=None, parse_flags = None, **kw): - """Return a copy of a construction Environment. The - copy is like a Python "deep copy"--that is, independent - copies are made recursively of each objects--except that - a reference is copied when an object is not deep-copyable - (like a function). There are no references to any mutable - objects in the original Environment. - """ - - builders = self._dict.get('BUILDERS', {}) - - clone = copy.copy(self) - # BUILDERS is not safe to do a simple copy - clone._dict = semi_deepcopy_dict(self._dict, ['BUILDERS']) - clone._dict['BUILDERS'] = BuilderDict(builders, clone) - - # Check the methods added via AddMethod() and re-bind them to - # the cloned environment. Only do this if the attribute hasn't - # been overwritten by the user explicitly and still points to - # the added method. - clone.added_methods = [] - for mw in self.added_methods: - if mw == getattr(self, mw.name): - clone.added_methods.append(mw.clone(clone)) - - clone._memo = {} - - # Apply passed-in variables before the tools - # so the tools can use the new variables - kw = copy_non_reserved_keywords(kw) - new = {} - for key, value in kw.items(): - new[key] = SCons.Subst.scons_subst_once(value, self, key) - clone.Replace(**new) - - apply_tools(clone, tools, toolpath) - - # apply them again in case the tools overwrote them - clone.Replace(**new) - - # Finally, apply any flags to be merged in - if parse_flags: clone.MergeFlags(parse_flags) - - if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.EnvironmentClone') - return clone - - def _changed_build(self, dependency, target, prev_ni, repo_node=None): - if dependency.changed_state(target, prev_ni, repo_node): - return 1 - return self.decide_source(dependency, target, prev_ni, repo_node) - - def _changed_content(self, dependency, target, prev_ni, repo_node=None): - return dependency.changed_content(target, prev_ni, repo_node) - - def _changed_source(self, dependency, target, prev_ni, repo_node=None): - target_env = dependency.get_build_env() - type = target_env.get_tgt_sig_type() - if type == 'source': - return target_env.decide_source(dependency, target, prev_ni, repo_node) - else: - return target_env.decide_target(dependency, target, prev_ni, repo_node) - - def _changed_timestamp_then_content(self, dependency, target, prev_ni, repo_node=None): - return dependency.changed_timestamp_then_content(target, prev_ni, repo_node) - - def _changed_timestamp_newer(self, dependency, target, prev_ni, repo_node=None): - return dependency.changed_timestamp_newer(target, prev_ni, repo_node) - - def _changed_timestamp_match(self, dependency, target, prev_ni, repo_node=None): - return dependency.changed_timestamp_match(target, prev_ni, repo_node) - - def Decider(self, function): - self.cache_timestamp_newer = False - if function in ('MD5', 'content'): - if not SCons.Util.md5: - raise UserError("MD5 signatures are not available in this version of Python.") - function = self._changed_content - elif function == 'MD5-timestamp': - function = self._changed_timestamp_then_content - elif function in ('timestamp-newer', 'make'): - function = self._changed_timestamp_newer - self.cache_timestamp_newer = True - elif function == 'timestamp-match': - function = self._changed_timestamp_match - elif not callable(function): - raise UserError("Unknown Decider value %s" % repr(function)) - - # We don't use AddMethod because we don't want to turn the - # function, which only expects three arguments, into a bound - # method, which would add self as an initial, fourth argument. - self.decide_target = function - self.decide_source = function - - - def Detect(self, progs): - """Return the first available program in progs. - - :param progs: one or more command names to check for - :type progs: str or list - :returns str: first name from progs that can be found. - - """ - if not SCons.Util.is_List(progs): - progs = [ progs ] - for prog in progs: - path = self.WhereIs(prog) - if path: return prog - return None - - - def Dictionary(self, *args): - """Return construction variables from an environment. - - :param *args: (optional) variable names to look up - :returns: if args omitted, the dictionary of all constr. vars. - If one arg, the corresponding value is returned. - If more than one arg, a list of values is returned. - :raises KeyError: if any of *args is not in the construction env. - - """ - if not args: - return self._dict - dlist = [self._dict[x] for x in args] - if len(dlist) == 1: - dlist = dlist[0] - return dlist - - - def Dump(self, key=None): - """ Return pretty-printed string of construction variables. - - :param key: if None, format the whole dict of variables. - Else look up and format just the value for key. - - """ - import pprint - pp = pprint.PrettyPrinter(indent=2) - if key: - cvars = self.Dictionary(key) - else: - cvars = self.Dictionary() - - # TODO: pprint doesn't do a nice job on path-style values - # if the paths contain spaces (i.e. Windows), because the - # algorithm tries to break lines on spaces, while breaking - # on the path-separator would be more "natural". Is there - # a better way to format those? - return pp.pformat(cvars) - - - def FindIxes(self, paths, prefix, suffix): - """ - Search a list of paths for something that matches the prefix and suffix. - - paths - the list of paths or nodes. - prefix - construction variable for the prefix. - suffix - construction variable for the suffix. - """ - - suffix = self.subst('$'+suffix) - prefix = self.subst('$'+prefix) - - for path in paths: - dir,name = os.path.split(str(path)) - if name[:len(prefix)] == prefix and name[-len(suffix):] == suffix: - return path - - def ParseConfig(self, command, function=None, unique=1): - """ - Use the specified function to parse the output of the command - in order to modify the current environment. The 'command' can - be a string or a list of strings representing a command and - its arguments. 'Function' is an optional argument that takes - the environment, the output of the command, and the unique flag. - If no function is specified, MergeFlags, which treats the output - as the result of a typical 'X-config' command (i.e. gtk-config), - will merge the output into the appropriate variables. - """ - if function is None: - def parse_conf(env, cmd, unique=unique): - return env.MergeFlags(cmd, unique) - function = parse_conf - if SCons.Util.is_List(command): - command = ' '.join(command) - command = self.subst(command) - return function(self, self.backtick(command)) - - def ParseDepends(self, filename, must_exist=None, only_one=0): - """ - Parse a mkdep-style file for explicit dependencies. This is - completely abusable, and should be unnecessary in the "normal" - case of proper SCons configuration, but it may help make - the transition from a Make hierarchy easier for some people - to swallow. It can also be genuinely useful when using a tool - that can write a .d file, but for which writing a scanner would - be too complicated. - """ - filename = self.subst(filename) - try: - with open(filename, 'r') as fp: - lines = SCons.Util.LogicalLines(fp).readlines() - except IOError: - if must_exist: - raise - return - lines = [l for l in lines if l[0] != '#'] - tdlist = [] - for line in lines: - try: - target, depends = line.split(':', 1) - except (AttributeError, ValueError): - # Throws AttributeError if line isn't a string. Can throw - # ValueError if line doesn't split into two or more elements. - pass - else: - tdlist.append((target.split(), depends.split())) - if only_one: - targets = [] - for td in tdlist: - targets.extend(td[0]) - if len(targets) > 1: - raise UserError( - "More than one dependency target found in `%s': %s" - % (filename, targets)) - for target, depends in tdlist: - self.Depends(target, depends) - - def Platform(self, platform): - platform = self.subst(platform) - return SCons.Platform.Platform(platform)(self) - - def Prepend(self, **kw): - """Prepend values to existing construction variables - in an Environment. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - # It would be easier on the eyes to write this using - # "continue" statements whenever we finish processing an item, - # but Python 1.5.2 apparently doesn't let you use "continue" - # within try:-except: blocks, so we have to nest our code. - try: - orig = self._dict[key] - except KeyError: - # No existing variable in the environment, so just set - # it to the new value. - self._dict[key] = val - else: - try: - # Check if the original looks like a dictionary. - # If it is, we can't just try adding the value because - # dictionaries don't have __add__() methods, and - # things like UserList will incorrectly coerce the - # original dict to a list (which we don't want). - update_dict = orig.update - except AttributeError: - try: - # Most straightforward: just try to add them - # together. This will work in most cases, when the - # original and new values are of compatible types. - self._dict[key] = val + orig - except (KeyError, TypeError): - try: - # Check if the added value is a list. - add_to_val = val.append - except AttributeError: - # The added value isn't a list, but the - # original is (by process of elimination), - # so insert the the new value in the original - # (if there's one to insert). - if val: - orig.insert(0, val) - else: - # The added value is a list, so append - # the original to it (if there's a value - # to append). - if orig: - add_to_val(orig) - self._dict[key] = val - else: - # The original looks like a dictionary, so update it - # based on what we think the value looks like. - if SCons.Util.is_List(val): - for v in val: - orig[v] = None - else: - try: - update_dict(val) - except (AttributeError, TypeError, ValueError): - if SCons.Util.is_Dict(val): - for k, v in val.items(): - orig[k] = v - else: - orig[val] = None - self.scanner_map_delete(kw) - - def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep, - delete_existing=1): - """Prepend path elements to the path 'name' in the 'ENV' - dictionary for this environment. Will only add any particular - path once, and will normpath and normcase all paths to help - assure this. This can also handle the case where the env - variable is a list instead of a string. - - If delete_existing is 0, a newpath which is already in the path - will not be moved to the front (it will be left where it is). - """ - - orig = '' - if envname in self._dict and name in self._dict[envname]: - orig = self._dict[envname][name] - - nv = SCons.Util.PrependPath(orig, newpath, sep, delete_existing, - canonicalize=self._canonicalize) - - if envname not in self._dict: - self._dict[envname] = {} - - self._dict[envname][name] = nv - - def PrependUnique(self, delete_existing=0, **kw): - """Prepend values to existing construction variables - in an Environment, if they're not already there. - If delete_existing is 1, removes existing values first, so - values move to front. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - if SCons.Util.is_List(val): - val = _delete_duplicates(val, not delete_existing) - if key not in self._dict or self._dict[key] in ('', None): - self._dict[key] = val - elif SCons.Util.is_Dict(self._dict[key]) and \ - SCons.Util.is_Dict(val): - self._dict[key].update(val) - elif SCons.Util.is_List(val): - dk = self._dict[key] - if not SCons.Util.is_List(dk): - dk = [dk] - if delete_existing: - dk = [x for x in dk if x not in val] - else: - val = [x for x in val if x not in dk] - self._dict[key] = val + dk - else: - dk = self._dict[key] - if SCons.Util.is_List(dk): - # By elimination, val is not a list. Since dk is a - # list, wrap val in a list first. - if delete_existing: - dk = [x for x in dk if x not in val] - self._dict[key] = [val] + dk - else: - if val not in dk: - self._dict[key] = [val] + dk - else: - if delete_existing: - dk = [x for x in dk if x not in val] - self._dict[key] = val + dk - self.scanner_map_delete(kw) - - def Replace(self, **kw): - """Replace existing construction variables in an Environment - with new construction variables and/or values. - """ - try: - kwbd = kw['BUILDERS'] - except KeyError: - pass - else: - kwbd = BuilderDict(kwbd,self) - del kw['BUILDERS'] - self.__setitem__('BUILDERS', kwbd) - kw = copy_non_reserved_keywords(kw) - self._update(semi_deepcopy(kw)) - self.scanner_map_delete(kw) - - def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix): - """ - Replace old_prefix with new_prefix and old_suffix with new_suffix. - - env - Environment used to interpolate variables. - path - the path that will be modified. - old_prefix - construction variable for the old prefix. - old_suffix - construction variable for the old suffix. - new_prefix - construction variable for the new prefix. - new_suffix - construction variable for the new suffix. - """ - old_prefix = self.subst('$'+old_prefix) - old_suffix = self.subst('$'+old_suffix) - - new_prefix = self.subst('$'+new_prefix) - new_suffix = self.subst('$'+new_suffix) - - dir,name = os.path.split(str(path)) - if name[:len(old_prefix)] == old_prefix: - name = name[len(old_prefix):] - if name[-len(old_suffix):] == old_suffix: - name = name[:-len(old_suffix)] - return os.path.join(dir, new_prefix+name+new_suffix) - - def SetDefault(self, **kw): - for k in list(kw.keys()): - if k in self._dict: - del kw[k] - self.Replace(**kw) - - def _find_toolpath_dir(self, tp): - return self.fs.Dir(self.subst(tp)).srcnode().get_abspath() - - def Tool(self, tool, toolpath=None, **kw): - if SCons.Util.is_String(tool): - tool = self.subst(tool) - if toolpath is None: - toolpath = self.get('toolpath', []) - toolpath = list(map(self._find_toolpath_dir, toolpath)) - tool = SCons.Tool.Tool(tool, toolpath, **kw) - tool(self) - - def WhereIs(self, prog, path=None, pathext=None, reject=[]): - """Find prog in the path. - """ - if path is None: - try: - path = self['ENV']['PATH'] - except KeyError: - pass - elif SCons.Util.is_String(path): - path = self.subst(path) - if pathext is None: - try: - pathext = self['ENV']['PATHEXT'] - except KeyError: - pass - elif SCons.Util.is_String(pathext): - pathext = self.subst(pathext) - prog = SCons.Util.CLVar(self.subst(prog)) # support "program --with-args" - path = SCons.Util.WhereIs(prog[0], path, pathext, reject) - if path: return path - return None - - ####################################################################### - # Public methods for doing real "SCons stuff" (manipulating - # dependencies, setting attributes on targets, etc.). These begin - # with upper-case letters. The essential characteristic of methods - # in this section is that they all *should* have corresponding - # same-named global functions. - ####################################################################### - - def Action(self, *args, **kw): - def subst_string(a, self=self): - if SCons.Util.is_String(a): - a = self.subst(a) - return a - nargs = list(map(subst_string, args)) - nkw = self.subst_kw(kw) - return SCons.Action.Action(*nargs, **nkw) - - def AddPreAction(self, files, action): - nodes = self.arg2nodes(files, self.fs.Entry) - action = SCons.Action.Action(action) - uniq = {} - for executor in [n.get_executor() for n in nodes]: - uniq[executor] = 1 - for executor in list(uniq.keys()): - executor.add_pre_action(action) - return nodes - - def AddPostAction(self, files, action): - nodes = self.arg2nodes(files, self.fs.Entry) - action = SCons.Action.Action(action) - uniq = {} - for executor in [n.get_executor() for n in nodes]: - uniq[executor] = 1 - for executor in list(uniq.keys()): - executor.add_post_action(action) - return nodes - - def Alias(self, target, source=[], action=None, **kw): - tlist = self.arg2nodes(target, self.ans.Alias) - if not SCons.Util.is_List(source): - source = [source] - source = [_f for _f in source if _f] - - if not action: - if not source: - # There are no source files and no action, so just - # return a target list of classic Alias Nodes, without - # any builder. The externally visible effect is that - # this will make the wrapping Script.BuildTask class - # say that there's "Nothing to be done" for this Alias, - # instead of that it's "up to date." - return tlist - - # No action, but there are sources. Re-call all the target - # builders to add the sources to each target. - result = [] - for t in tlist: - bld = t.get_builder(AliasBuilder) - result.extend(bld(self, t, source)) - return result - - nkw = self.subst_kw(kw) - nkw.update({ - 'action' : SCons.Action.Action(action), - 'source_factory' : self.fs.Entry, - 'multi' : 1, - 'is_explicit' : None, - }) - bld = SCons.Builder.Builder(**nkw) - - # Apply the Builder separately to each target so that the Aliases - # stay separate. If we did one "normal" Builder call with the - # whole target list, then all of the target Aliases would be - # associated under a single Executor. - result = [] - for t in tlist: - # Calling the convert() method will cause a new Executor to be - # created from scratch, so we have to explicitly initialize - # it with the target's existing sources, plus our new ones, - # so nothing gets lost. - b = t.get_builder() - if b is None or b is AliasBuilder: - b = bld - else: - nkw['action'] = b.action + action - b = SCons.Builder.Builder(**nkw) - t.convert() - result.extend(b(self, t, t.sources + source)) - return result - - def AlwaysBuild(self, *targets): - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_always_build() - return tlist - - def Builder(self, **kw): - nkw = self.subst_kw(kw) - return SCons.Builder.Builder(**nkw) - - def CacheDir(self, path, custom_class=None): - import SCons.CacheDir - if path is not None: - path = self.subst(path) - - if custom_class: - self['CACHEDIR_CLASS'] = self.validate_CacheDir_class(custom_class) - - self._CacheDir_path = path - - def Clean(self, targets, files): - global CleanTargets - tlist = self.arg2nodes(targets, self.fs.Entry) - flist = self.arg2nodes(files, self.fs.Entry) - for t in tlist: - try: - CleanTargets[t].extend(flist) - except KeyError: - CleanTargets[t] = flist - - def Configure(self, *args, **kw): - nargs = [self] - if args: - nargs = nargs + self.subst_list(args)[0] - nkw = self.subst_kw(kw) - nkw['_depth'] = kw.get('_depth', 0) + 1 - try: - nkw['custom_tests'] = self.subst_kw(nkw['custom_tests']) - except KeyError: - pass - return SCons.SConf.SConf(*nargs, **nkw) - - def Command(self, target, source, action, **kw): - """Builds the supplied target files from the supplied - source files using the supplied action. Action may - be any type that the Builder constructor will accept - for an action.""" - bkw = { - 'action': action, - 'target_factory': self.fs.Entry, - 'source_factory': self.fs.Entry, - } - # source scanner - try: - bkw['source_scanner'] = kw['source_scanner'] - except KeyError: - pass - else: - del kw['source_scanner'] - - # target scanner - try: - bkw['target_scanner'] = kw['target_scanner'] - except KeyError: - pass - else: - del kw['target_scanner'] - - # source factory - try: - bkw['source_factory'] = kw['source_factory'] - except KeyError: - pass - else: - del kw['source_factory'] - - # target factory - try: - bkw['target_factory'] = kw['target_factory'] - except KeyError: - pass - else: - del kw['target_factory'] - - bld = SCons.Builder.Builder(**bkw) - return bld(self, target, source, **kw) - - def Depends(self, target, dependency): - """Explicity specify that 'target's depend on 'dependency'.""" - tlist = self.arg2nodes(target, self.fs.Entry) - dlist = self.arg2nodes(dependency, self.fs.Entry) - for t in tlist: - t.add_dependency(dlist) - return tlist - - def Dir(self, name, *args, **kw): - """ - """ - s = self.subst(name) - if SCons.Util.is_Sequence(s): - result=[] - for e in s: - result.append(self.fs.Dir(e, *args, **kw)) - return result - return self.fs.Dir(s, *args, **kw) - - def PyPackageDir(self, modulename): - s = self.subst(modulename) - if SCons.Util.is_Sequence(s): - result=[] - for e in s: - result.append(self.fs.PyPackageDir(e)) - return result - return self.fs.PyPackageDir(s) - - def NoClean(self, *targets): - """Tags a target so that it will not be cleaned by -c""" - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_noclean() - return tlist - - def NoCache(self, *targets): - """Tags a target so that it will not be cached""" - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_nocache() - return tlist - - def Entry(self, name, *args, **kw): - """ - """ - s = self.subst(name) - if SCons.Util.is_Sequence(s): - result=[] - for e in s: - result.append(self.fs.Entry(e, *args, **kw)) - return result - return self.fs.Entry(s, *args, **kw) - - def Environment(self, **kw): - return SCons.Environment.Environment(**self.subst_kw(kw)) - - def Execute(self, action, *args, **kw): - """Directly execute an action through an Environment - """ - action = self.Action(action, *args, **kw) - result = action([], [], self) - if isinstance(result, BuildError): - errstr = result.errstr - if result.filename: - errstr = result.filename + ': ' + errstr - sys.stderr.write("scons: *** %s\n" % errstr) - return result.status - else: - return result - - def File(self, name, *args, **kw): - """ - """ - s = self.subst(name) - if SCons.Util.is_Sequence(s): - result=[] - for e in s: - result.append(self.fs.File(e, *args, **kw)) - return result - return self.fs.File(s, *args, **kw) - - def FindFile(self, file, dirs): - file = self.subst(file) - nodes = self.arg2nodes(dirs, self.fs.Dir) - return SCons.Node.FS.find_file(file, tuple(nodes)) - - def Flatten(self, sequence): - return SCons.Util.flatten(sequence) - - def GetBuildPath(self, files): - result = list(map(str, self.arg2nodes(files, self.fs.Entry))) - if SCons.Util.is_List(files): - return result - else: - return result[0] - - def Glob(self, pattern, ondisk=True, source=False, strings=False, exclude=None): - return self.fs.Glob(self.subst(pattern), ondisk, source, strings, exclude) - - def Ignore(self, target, dependency): - """Ignore a dependency.""" - tlist = self.arg2nodes(target, self.fs.Entry) - dlist = self.arg2nodes(dependency, self.fs.Entry) - for t in tlist: - t.add_ignore(dlist) - return tlist - - def Literal(self, string): - return SCons.Subst.Literal(string) - - def Local(self, *targets): - ret = [] - for targ in targets: - if isinstance(targ, SCons.Node.Node): - targ.set_local() - ret.append(targ) - else: - for t in self.arg2nodes(targ, self.fs.Entry): - t.set_local() - ret.append(t) - return ret - - def Precious(self, *targets): - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_precious() - return tlist - - def Pseudo(self, *targets): - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_pseudo() - return tlist - - def Repository(self, *dirs, **kw): - dirs = self.arg2nodes(list(dirs), self.fs.Dir) - self.fs.Repository(*dirs, **kw) - - def Requires(self, target, prerequisite): - """Specify that 'prerequisite' must be built before 'target', - (but 'target' does not actually depend on 'prerequisite' - and need not be rebuilt if it changes).""" - tlist = self.arg2nodes(target, self.fs.Entry) - plist = self.arg2nodes(prerequisite, self.fs.Entry) - for t in tlist: - t.add_prerequisite(plist) - return tlist - - def Scanner(self, *args, **kw): - nargs = [] - for arg in args: - if SCons.Util.is_String(arg): - arg = self.subst(arg) - nargs.append(arg) - nkw = self.subst_kw(kw) - return SCons.Scanner.Base(*nargs, **nkw) - - def SConsignFile(self, name=".sconsign", dbm_module=None): - if name is not None: - name = self.subst(name) - if not os.path.isabs(name): - name = os.path.join(str(self.fs.SConstruct_dir), name) - if name: - name = os.path.normpath(name) - sconsign_dir = os.path.dirname(name) - if sconsign_dir and not os.path.exists(sconsign_dir): - self.Execute(SCons.Defaults.Mkdir(sconsign_dir)) - SCons.SConsign.File(name, dbm_module) - - def SideEffect(self, side_effect, target): - """Tell scons that side_effects are built as side - effects of building targets.""" - side_effects = self.arg2nodes(side_effect, self.fs.Entry) - targets = self.arg2nodes(target, self.fs.Entry) - - for side_effect in side_effects: - if side_effect.multiple_side_effect_has_builder(): - raise UserError("Multiple ways to build the same target were specified for: %s" % str(side_effect)) - side_effect.add_source(targets) - side_effect.side_effect = 1 - self.Precious(side_effect) - for target in targets: - target.side_effects.append(side_effect) - return side_effects - - def SourceCode(self, entry, builder): - """Arrange for a source code builder for (part of) a tree.""" - msg = """SourceCode() has been deprecated and there is no replacement. -\tIf you need this function, please contact scons-dev@scons.org""" - SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceCodeWarning, msg) - entries = self.arg2nodes(entry, self.fs.Entry) - for entry in entries: - entry.set_src_builder(builder) - return entries - - def Split(self, arg): - """This function converts a string or list into a list of strings - or Nodes. This makes things easier for users by allowing files to - be specified as a white-space separated list to be split. - - The input rules are: - - A single string containing names separated by spaces. These will be - split apart at the spaces. - - A single Node instance - - A list containing either strings or Node instances. Any strings - in the list are not split at spaces. - - In all cases, the function returns a list of Nodes and strings.""" - - if SCons.Util.is_List(arg): - return list(map(self.subst, arg)) - elif SCons.Util.is_String(arg): - return self.subst(arg).split() - else: - return [self.subst(arg)] - - def Value(self, value, built_value=None): - """ - """ - return SCons.Node.Python.Value(value, built_value) - - def VariantDir(self, variant_dir, src_dir, duplicate=1): - variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0] - src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0] - self.fs.VariantDir(variant_dir, src_dir, duplicate) - - def FindSourceFiles(self, node='.'): - """ returns a list of all source files. - """ - node = self.arg2nodes(node, self.fs.Entry)[0] - - sources = [] - def build_source(ss): - for s in ss: - if isinstance(s, SCons.Node.FS.Dir): - build_source(s.all_children()) - elif s.has_builder(): - build_source(s.sources) - elif isinstance(s.disambiguate(), SCons.Node.FS.File): - sources.append(s) - build_source(node.all_children()) - - def final_source(node): - while (node != node.srcnode()): - node = node.srcnode() - return node - sources = list(map(final_source, sources)) - # remove duplicates - return list(set(sources)) - - def FindInstalledFiles(self): - """ returns the list of all targets of the Install and InstallAs Builder. - """ - from SCons.Tool import install - if install._UNIQUE_INSTALLED_FILES is None: - install._UNIQUE_INSTALLED_FILES = SCons.Util.uniquer_hashables(install._INSTALLED_FILES) - return install._UNIQUE_INSTALLED_FILES - - -class OverrideEnvironment(Base): - """A proxy that overrides variables in a wrapped construction - environment by returning values from an overrides dictionary in - preference to values from the underlying subject environment. - - This is a lightweight (I hope) proxy that passes through most use of - attributes to the underlying Environment.Base class, but has just - enough additional methods defined to act like a real construction - environment with overridden values. It can wrap either a Base - construction environment, or another OverrideEnvironment, which - can in turn nest arbitrary OverrideEnvironments... - - Note that we do *not* call the underlying base class - (SubsitutionEnvironment) initialization, because we get most of those - from proxying the attributes of the subject construction environment. - But because we subclass SubstitutionEnvironment, this class also - has inherited arg2nodes() and subst*() methods; those methods can't - be proxied because they need *this* object's methods to fetch the - values from the overrides dictionary. - """ - - def __init__(self, subject, overrides={}): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.OverrideEnvironment') - self.__dict__['__subject'] = subject - self.__dict__['overrides'] = overrides - - # Methods that make this class act like a proxy. - def __getattr__(self, name): - attr = getattr(self.__dict__['__subject'], name) - # Here we check if attr is one of the Wrapper classes. For - # example when a pseudo-builder is being called from an - # OverrideEnvironment. - # - # These wrappers when they're constructed capture the - # Environment they are being constructed with and so will not - # have access to overrided values. So we rebuild them with the - # OverrideEnvironment so they have access to overrided values. - if isinstance(attr, (MethodWrapper, BuilderWrapper)): - return attr.clone(self) - else: - return attr - - def __setattr__(self, name, value): - setattr(self.__dict__['__subject'], name, value) - - # Methods that make this class act like a dictionary. - def __getitem__(self, key): - try: - return self.__dict__['overrides'][key] - except KeyError: - return self.__dict__['__subject'].__getitem__(key) - def __setitem__(self, key, value): - if not is_valid_construction_var(key): - raise UserError("Illegal construction variable `%s'" % key) - self.__dict__['overrides'][key] = value - def __delitem__(self, key): - try: - del self.__dict__['overrides'][key] - except KeyError: - deleted = 0 - else: - deleted = 1 - try: - result = self.__dict__['__subject'].__delitem__(key) - except KeyError: - if not deleted: - raise - result = None - return result - def get(self, key, default=None): - """Emulates the get() method of dictionaries.""" - try: - return self.__dict__['overrides'][key] - except KeyError: - return self.__dict__['__subject'].get(key, default) - def has_key(self, key): - try: - self.__dict__['overrides'][key] - return 1 - except KeyError: - return key in self.__dict__['__subject'] - def __contains__(self, key): - if self.__dict__['overrides'].__contains__(key): - return 1 - return self.__dict__['__subject'].__contains__(key) - def Dictionary(self): - """Emulates the items() method of dictionaries.""" - d = self.__dict__['__subject'].Dictionary().copy() - d.update(self.__dict__['overrides']) - return d - def items(self): - """Emulates the items() method of dictionaries.""" - return list(self.Dictionary().items()) - - # Overridden private construction environment methods. - def _update(self, dict): - """Update an environment's values directly, bypassing the normal - checks that occur when users try to set items. - """ - self.__dict__['overrides'].update(dict) - - def gvars(self): - return self.__dict__['__subject'].gvars() - - def lvars(self): - lvars = self.__dict__['__subject'].lvars() - lvars.update(self.__dict__['overrides']) - return lvars - - # Overridden public construction environment methods. - def Replace(self, **kw): - kw = copy_non_reserved_keywords(kw) - self.__dict__['overrides'].update(semi_deepcopy(kw)) - - -# The entry point that will be used by the external world -# to refer to a construction environment. This allows the wrapper -# interface to extend a construction environment for its own purposes -# by subclassing SCons.Environment.Base and then assigning the -# class to SCons.Environment.Environment. - -Environment = Base - - -def NoSubstitutionProxy(subject): - """ - An entry point for returning a proxy subclass instance that overrides - the subst*() methods so they don't actually perform construction - variable substitution. This is specifically intended to be the shim - layer in between global function calls (which don't want construction - variable substitution) and the DefaultEnvironment() (which would - substitute variables if left to its own devices). - - We have to wrap this in a function that allows us to delay definition of - the class until it's necessary, so that when it subclasses Environment - it will pick up whatever Environment subclass the wrapper interface - might have assigned to SCons.Environment.Environment. - """ - class _NoSubstitutionProxy(Environment): - def __init__(self, subject): - self.__dict__['__subject'] = subject - def __getattr__(self, name): - return getattr(self.__dict__['__subject'], name) - def __setattr__(self, name, value): - return setattr(self.__dict__['__subject'], name, value) - def executor_to_lvars(self, kwdict): - if 'executor' in kwdict: - kwdict['lvars'] = kwdict['executor'].get_lvars() - del kwdict['executor'] - else: - kwdict['lvars'] = {} - def raw_to_mode(self, dict): - try: - raw = dict['raw'] - except KeyError: - pass - else: - del dict['raw'] - dict['mode'] = raw - def subst(self, string, *args, **kwargs): - return string - def subst_kw(self, kw, *args, **kwargs): - return kw - def subst_list(self, string, *args, **kwargs): - nargs = (string, self,) + args - nkw = kwargs.copy() - nkw['gvars'] = {} - self.executor_to_lvars(nkw) - self.raw_to_mode(nkw) - return SCons.Subst.scons_subst_list(*nargs, **nkw) - def subst_target_source(self, string, *args, **kwargs): - nargs = (string, self,) + args - nkw = kwargs.copy() - nkw['gvars'] = {} - self.executor_to_lvars(nkw) - self.raw_to_mode(nkw) - return SCons.Subst.scons_subst(*nargs, **nkw) - return _NoSubstitutionProxy(subject) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Errors.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Errors.py deleted file mode 100644 index 0bad46d151b..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Errors.py +++ /dev/null @@ -1,228 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -"""SCons.Errors - -This file contains the exception classes used to handle internal -and user errors in SCons. - -""" - -__revision__ = "src/engine/SCons/Errors.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import shutil -import SCons.Util - - -class BuildError(Exception): - """ Errors occurring while building. - - BuildError have the following attributes: - ========================================= - - Information about the cause of the build error: - ----------------------------------------------- - - errstr : a description of the error message - - status : the return code of the action that caused the build error. - Must be set to a non-zero value even if the build error is not due - to an action returning a non-zero returned code. - - exitstatus : SCons exit status due to this build error. - Must be nonzero unless due to an explicit Exit() - call. Not always the same as status, since - actions return a status code that should be - respected, but SCons typically exits with 2 - irrespective of the return value of the failed - action. - - filename : The name of the file or directory that caused the - build error. Set to None if no files are associated with - this error. This might be different from the target - being built. For example, failure to create the - directory in which the target file will appear. It - can be None if the error is not due to a particular - filename. - - exc_info : Info about exception that caused the build - error. Set to (None, None, None) if this build - error is not due to an exception. - - - Information about the cause of the location of the error: - --------------------------------------------------------- - - node : the error occured while building this target node(s) - - executor : the executor that caused the build to fail (might - be None if the build failures is not due to the - executor failing) - - action : the action that caused the build to fail (might be - None if the build failures is not due to the an - action failure) - - command : the command line for the action that caused the - build to fail (might be None if the build failures - is not due to the an action failure) - """ - - def __init__(self, - node=None, errstr="Unknown error", status=2, exitstatus=2, - filename=None, executor=None, action=None, command=None, - exc_info=(None, None, None)): - - # py3: errstr should be string and not bytes. - - self.errstr = SCons.Util.to_String(errstr) - self.status = status - self.exitstatus = exitstatus - self.filename = filename - self.exc_info = exc_info - - self.node = node - self.executor = executor - self.action = action - self.command = command - - Exception.__init__(self, node, errstr, status, exitstatus, filename, - executor, action, command, exc_info) - - def __str__(self): - if self.filename: - return self.filename + ': ' + self.errstr - else: - return self.errstr - -class InternalError(Exception): - pass - -class UserError(Exception): - pass - -class StopError(Exception): - pass - -class SConsEnvironmentError(Exception): - pass - -class MSVCError(IOError): - pass - -class ExplicitExit(Exception): - def __init__(self, node=None, status=None, *args): - self.node = node - self.status = status - self.exitstatus = status - Exception.__init__(self, *args) - -def convert_to_BuildError(status, exc_info=None): - """ - Convert any return code a BuildError Exception. - - :Parameters: - - `status`: can either be a return code or an Exception. - - The buildError.status we set here will normally be - used as the exit status of the "scons" process. - """ - - if not exc_info and isinstance(status, Exception): - exc_info = (status.__class__, status, None) - - - if isinstance(status, BuildError): - buildError = status - buildError.exitstatus = 2 # always exit with 2 on build errors - elif isinstance(status, ExplicitExit): - status = status.status - errstr = 'Explicit exit, status %s' % status - buildError = BuildError( - errstr=errstr, - status=status, # might be 0, OK here - exitstatus=status, # might be 0, OK here - exc_info=exc_info) - elif isinstance(status, (StopError, UserError)): - buildError = BuildError( - errstr=str(status), - status=2, - exitstatus=2, - exc_info=exc_info) - elif isinstance(status, shutil.SameFileError): - # PY3 has a exception for when copying file to itself - # It's object provides info differently than below - try: - filename = status.filename - except AttributeError: - filename = None - - buildError = BuildError( - errstr=status.args[0], - status=status.errno, - exitstatus=2, - filename=filename, - exc_info=exc_info) - - elif isinstance(status, (SConsEnvironmentError, OSError, IOError)): - # If an IOError/OSError happens, raise a BuildError. - # Report the name of the file or directory that caused the - # error, which might be different from the target being built - # (for example, failure to create the directory in which the - # target file will appear). - filename = getattr(status, 'filename', None) - strerror = getattr(status, 'strerror', str(status)) - errno = getattr(status, 'errno', 2) - - buildError = BuildError( - errstr=strerror, - status=errno, - exitstatus=2, - filename=filename, - exc_info=exc_info) - elif isinstance(status, Exception): - buildError = BuildError( - errstr='%s : %s' % (status.__class__.__name__, status), - status=2, - exitstatus=2, - exc_info=exc_info) - elif SCons.Util.is_String(status): - buildError = BuildError( - errstr=status, - status=2, - exitstatus=2) - else: - buildError = BuildError( - errstr="Error %s" % status, - status=status, - exitstatus=2) - - #import sys - #sys.stderr.write("convert_to_BuildError: status %s => (errstr %s, status %s)\n"%(status,buildError.errstr, buildError.status)) - return buildError - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Executor.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Executor.py deleted file mode 100644 index c8e1850a314..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Executor.py +++ /dev/null @@ -1,673 +0,0 @@ -"""SCons.Executor - -A module for executing actions with specific lists of target and source -Nodes. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from __future__ import print_function - -__revision__ = "src/engine/SCons/Executor.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import collections - -import SCons.Debug -from SCons.Debug import logInstanceCreation -import SCons.Errors -import SCons.Memoize -import SCons.Util -from SCons.compat import with_metaclass, NoSlotsPyPy - -class Batch(object): - """Remembers exact association between targets - and sources of executor.""" - - __slots__ = ('targets', - 'sources') - - def __init__(self, targets=[], sources=[]): - self.targets = targets - self.sources = sources - - - -class TSList(collections.UserList): - """A class that implements $TARGETS or $SOURCES expansions by wrapping - an executor Method. This class is used in the Executor.lvars() - to delay creation of NodeList objects until they're needed. - - Note that we subclass collections.UserList purely so that the - is_Sequence() function will identify an object of this class as - a list during variable expansion. We're not really using any - collections.UserList methods in practice. - """ - def __init__(self, func): - self.func = func - def __getattr__(self, attr): - nl = self.func() - return getattr(nl, attr) - def __getitem__(self, i): - nl = self.func() - return nl[i] - def __getslice__(self, i, j): - nl = self.func() - i, j = max(i, 0), max(j, 0) - return nl[i:j] - def __str__(self): - nl = self.func() - return str(nl) - def __repr__(self): - nl = self.func() - return repr(nl) - -class TSObject(object): - """A class that implements $TARGET or $SOURCE expansions by wrapping - an Executor method. - """ - def __init__(self, func): - self.func = func - def __getattr__(self, attr): - n = self.func() - return getattr(n, attr) - def __str__(self): - n = self.func() - if n: - return str(n) - return '' - def __repr__(self): - n = self.func() - if n: - return repr(n) - return '' - -def rfile(node): - """ - A function to return the results of a Node's rfile() method, - if it exists, and the Node itself otherwise (if it's a Value - Node, e.g.). - """ - try: - rfile = node.rfile - except AttributeError: - return node - else: - return rfile() - - -def execute_nothing(obj, target, kw): - return 0 - -def execute_action_list(obj, target, kw): - """Actually execute the action list.""" - env = obj.get_build_env() - kw = obj.get_kw(kw) - status = 0 - for act in obj.get_action_list(): - args = ([], [], env) - status = act(*args, **kw) - if isinstance(status, SCons.Errors.BuildError): - status.executor = obj - raise status # TODO pylint E0702: raising int not allowed - elif status: - msg = "Error %s" % status - raise SCons.Errors.BuildError( - errstr=msg, - node=obj.batches[0].targets, - executor=obj, - action=act) - return status - -_do_execute_map = {0 : execute_nothing, - 1 : execute_action_list} - - -def execute_actions_str(obj): - env = obj.get_build_env() - return "\n".join([action.genstring(obj.get_all_targets(), - obj.get_all_sources(), - env) - for action in obj.get_action_list()]) - -def execute_null_str(obj): - return '' - -_execute_str_map = {0 : execute_null_str, - 1 : execute_actions_str} - - -class Executor(object, with_metaclass(NoSlotsPyPy)): - """A class for controlling instances of executing an action. - - This largely exists to hold a single association of an action, - environment, list of environment override dictionaries, targets - and sources for later processing as needed. - """ - - __slots__ = ('pre_actions', - 'post_actions', - 'env', - 'overridelist', - 'batches', - 'builder_kw', - '_memo', - 'lvars', - '_changed_sources_list', - '_changed_targets_list', - '_unchanged_sources_list', - '_unchanged_targets_list', - 'action_list', - '_do_execute', - '_execute_str') - - def __init__(self, action, env=None, overridelist=[{}], - targets=[], sources=[], builder_kw={}): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Executor.Executor') - self.set_action_list(action) - self.pre_actions = [] - self.post_actions = [] - self.env = env - self.overridelist = overridelist - if targets or sources: - self.batches = [Batch(targets[:], sources[:])] - else: - self.batches = [] - self.builder_kw = builder_kw - self._do_execute = 1 - self._execute_str = 1 - self._memo = {} - - def get_lvars(self): - try: - return self.lvars - except AttributeError: - self.lvars = { - 'CHANGED_SOURCES' : TSList(self._get_changed_sources), - 'CHANGED_TARGETS' : TSList(self._get_changed_targets), - 'SOURCE' : TSObject(self._get_source), - 'SOURCES' : TSList(self._get_sources), - 'TARGET' : TSObject(self._get_target), - 'TARGETS' : TSList(self._get_targets), - 'UNCHANGED_SOURCES' : TSList(self._get_unchanged_sources), - 'UNCHANGED_TARGETS' : TSList(self._get_unchanged_targets), - } - return self.lvars - - def _get_changes(self): - cs = [] - ct = [] - us = [] - ut = [] - for b in self.batches: - # don't add targets marked always build to unchanged lists - # add to changed list as they always need to build - if not b.targets[0].always_build and b.targets[0].is_up_to_date(): - us.extend(list(map(rfile, b.sources))) - ut.extend(b.targets) - else: - cs.extend(list(map(rfile, b.sources))) - ct.extend(b.targets) - self._changed_sources_list = SCons.Util.NodeList(cs) - self._changed_targets_list = SCons.Util.NodeList(ct) - self._unchanged_sources_list = SCons.Util.NodeList(us) - self._unchanged_targets_list = SCons.Util.NodeList(ut) - - def _get_changed_sources(self, *args, **kw): - try: - return self._changed_sources_list - except AttributeError: - self._get_changes() - return self._changed_sources_list - - def _get_changed_targets(self, *args, **kw): - try: - return self._changed_targets_list - except AttributeError: - self._get_changes() - return self._changed_targets_list - - def _get_source(self, *args, **kw): - return rfile(self.batches[0].sources[0]).get_subst_proxy() - - def _get_sources(self, *args, **kw): - return SCons.Util.NodeList([rfile(n).get_subst_proxy() for n in self.get_all_sources()]) - - def _get_target(self, *args, **kw): - return self.batches[0].targets[0].get_subst_proxy() - - def _get_targets(self, *args, **kw): - return SCons.Util.NodeList([n.get_subst_proxy() for n in self.get_all_targets()]) - - def _get_unchanged_sources(self, *args, **kw): - try: - return self._unchanged_sources_list - except AttributeError: - self._get_changes() - return self._unchanged_sources_list - - def _get_unchanged_targets(self, *args, **kw): - try: - return self._unchanged_targets_list - except AttributeError: - self._get_changes() - return self._unchanged_targets_list - - def get_action_targets(self): - if not self.action_list: - return [] - targets_string = self.action_list[0].get_targets(self.env, self) - if targets_string[0] == '$': - targets_string = targets_string[1:] - return self.get_lvars()[targets_string] - - def set_action_list(self, action): - import SCons.Util - if not SCons.Util.is_List(action): - if not action: - import SCons.Errors - raise SCons.Errors.UserError("Executor must have an action.") - action = [action] - self.action_list = action - - def get_action_list(self): - if self.action_list is None: - return [] - return self.pre_actions + self.action_list + self.post_actions - - def get_all_targets(self): - """Returns all targets for all batches of this Executor.""" - result = [] - for batch in self.batches: - result.extend(batch.targets) - return result - - def get_all_sources(self): - """Returns all sources for all batches of this Executor.""" - result = [] - for batch in self.batches: - result.extend(batch.sources) - return result - - def get_all_children(self): - """Returns all unique children (dependencies) for all batches - of this Executor. - - The Taskmaster can recognize when it's already evaluated a - Node, so we don't have to make this list unique for its intended - canonical use case, but we expect there to be a lot of redundancy - (long lists of batched .cc files #including the same .h files - over and over), so removing the duplicates once up front should - save the Taskmaster a lot of work. - """ - result = [] - for target in self.get_all_targets(): - result.extend(target.children()) - return SCons.Util.uniquer_hashables(result) - - def get_all_prerequisites(self): - """Returns all unique (order-only) prerequisites for all batches - of this Executor. - """ - result = [] - for target in self.get_all_targets(): - if target.prerequisites is not None: - result.extend(target.prerequisites) - return SCons.Util.uniquer_hashables(result) - - def get_action_side_effects(self): - - """Returns all side effects for all batches of this - Executor used by the underlying Action. - """ - result = [] - for target in self.get_action_targets(): - result.extend(target.side_effects) - return SCons.Util.uniquer_hashables(result) - - @SCons.Memoize.CountMethodCall - def get_build_env(self): - """Fetch or create the appropriate build Environment - for this Executor. - """ - try: - return self._memo['get_build_env'] - except KeyError: - pass - - # Create the build environment instance with appropriate - # overrides. These get evaluated against the current - # environment's construction variables so that users can - # add to existing values by referencing the variable in - # the expansion. - overrides = {} - for odict in self.overridelist: - overrides.update(odict) - - import SCons.Defaults - env = self.env or SCons.Defaults.DefaultEnvironment() - build_env = env.Override(overrides) - - self._memo['get_build_env'] = build_env - - return build_env - - def get_build_scanner_path(self, scanner): - """Fetch the scanner path for this executor's targets and sources. - """ - env = self.get_build_env() - try: - cwd = self.batches[0].targets[0].cwd - except (IndexError, AttributeError): - cwd = None - return scanner.path(env, cwd, - self.get_all_targets(), - self.get_all_sources()) - - def get_kw(self, kw={}): - result = self.builder_kw.copy() - result.update(kw) - result['executor'] = self - return result - - # use extra indirection because with new-style objects (Python 2.2 - # and above) we can't override special methods, and nullify() needs - # to be able to do this. - - def __call__(self, target, **kw): - return _do_execute_map[self._do_execute](self, target, kw) - - def cleanup(self): - self._memo = {} - - def add_sources(self, sources): - """Add source files to this Executor's list. This is necessary - for "multi" Builders that can be called repeatedly to build up - a source file list for a given target.""" - # TODO(batch): extend to multiple batches - assert (len(self.batches) == 1) - # TODO(batch): remove duplicates? - sources = [x for x in sources if x not in self.batches[0].sources] - self.batches[0].sources.extend(sources) - - def get_sources(self): - return self.batches[0].sources - - def add_batch(self, targets, sources): - """Add pair of associated target and source to this Executor's list. - This is necessary for "batch" Builders that can be called repeatedly - to build up a list of matching target and source files that will be - used in order to update multiple target files at once from multiple - corresponding source files, for tools like MSVC that support it.""" - self.batches.append(Batch(targets, sources)) - - def prepare(self): - """ - Preparatory checks for whether this Executor can go ahead - and (try to) build its targets. - """ - for s in self.get_all_sources(): - if s.missing(): - msg = "Source `%s' not found, needed by target `%s'." - raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0])) - - def add_pre_action(self, action): - self.pre_actions.append(action) - - def add_post_action(self, action): - self.post_actions.append(action) - - # another extra indirection for new-style objects and nullify... - - def __str__(self): - return _execute_str_map[self._execute_str](self) - - def nullify(self): - self.cleanup() - self._do_execute = 0 - self._execute_str = 0 - - @SCons.Memoize.CountMethodCall - def get_contents(self): - """Fetch the signature contents. This is the main reason this - class exists, so we can compute this once and cache it regardless - of how many target or source Nodes there are. - - Returns bytes - """ - try: - return self._memo['get_contents'] - except KeyError: - pass - env = self.get_build_env() - - action_list = self.get_action_list() - all_targets = self.get_all_targets() - all_sources = self.get_all_sources() - - result = bytearray("",'utf-8').join([action.get_contents(all_targets, - all_sources, - env) - for action in action_list]) - - self._memo['get_contents'] = result - return result - - def get_timestamp(self): - """Fetch a time stamp for this Executor. We don't have one, of - course (only files do), but this is the interface used by the - timestamp module. - """ - return 0 - - def scan_targets(self, scanner): - # TODO(batch): scan by batches - self.scan(scanner, self.get_all_targets()) - - def scan_sources(self, scanner): - # TODO(batch): scan by batches - if self.batches[0].sources: - self.scan(scanner, self.get_all_sources()) - - def scan(self, scanner, node_list): - """Scan a list of this Executor's files (targets or sources) for - implicit dependencies and update all of the targets with them. - This essentially short-circuits an N*M scan of the sources for - each individual target, which is a hell of a lot more efficient. - """ - env = self.get_build_env() - path = self.get_build_scanner_path - kw = self.get_kw() - - # TODO(batch): scan by batches) - deps = [] - - for node in node_list: - node.disambiguate() - deps.extend(node.get_implicit_deps(env, scanner, path, kw)) - - deps.extend(self.get_implicit_deps()) - - for tgt in self.get_all_targets(): - tgt.add_to_implicit(deps) - - def _get_unignored_sources_key(self, node, ignore=()): - return (node,) + tuple(ignore) - - @SCons.Memoize.CountDictCall(_get_unignored_sources_key) - def get_unignored_sources(self, node, ignore=()): - key = (node,) + tuple(ignore) - try: - memo_dict = self._memo['get_unignored_sources'] - except KeyError: - memo_dict = {} - self._memo['get_unignored_sources'] = memo_dict - else: - try: - return memo_dict[key] - except KeyError: - pass - - if node: - # TODO: better way to do this (it's a linear search, - # but it may not be critical path)? - sourcelist = [] - for b in self.batches: - if node in b.targets: - sourcelist = b.sources - break - else: - sourcelist = self.get_all_sources() - if ignore: - idict = {} - for i in ignore: - idict[i] = 1 - sourcelist = [s for s in sourcelist if s not in idict] - - memo_dict[key] = sourcelist - - return sourcelist - - def get_implicit_deps(self): - """Return the executor's implicit dependencies, i.e. the nodes of - the commands to be executed.""" - result = [] - build_env = self.get_build_env() - for act in self.get_action_list(): - deps = act.get_implicit_deps(self.get_all_targets(), - self.get_all_sources(), - build_env) - result.extend(deps) - return result - - - -_batch_executors = {} - -def GetBatchExecutor(key): - return _batch_executors[key] - -def AddBatchExecutor(key, executor): - assert key not in _batch_executors - _batch_executors[key] = executor - -nullenv = None - - -class NullEnvironment(SCons.Util.Null): - import SCons.CacheDir - _CacheDir_path = None - _CacheDir = SCons.CacheDir.CacheDir(None) - def get_CacheDir(self): - return self._CacheDir - - -def get_NullEnvironment(): - """Use singleton pattern for Null Environments.""" - global nullenv - - if nullenv is None: - nullenv = NullEnvironment() - return nullenv - -class Null(object, with_metaclass(NoSlotsPyPy)): - """A null Executor, with a null build Environment, that does - nothing when the rest of the methods call it. - - This might be able to disappear when we refactor things to - disassociate Builders from Nodes entirely, so we're not - going to worry about unit tests for this--at least for now. - """ - - __slots__ = ('pre_actions', - 'post_actions', - 'env', - 'overridelist', - 'batches', - 'builder_kw', - '_memo', - 'lvars', - '_changed_sources_list', - '_changed_targets_list', - '_unchanged_sources_list', - '_unchanged_targets_list', - 'action_list', - '_do_execute', - '_execute_str') - - def __init__(self, *args, **kw): - if SCons.Debug.track_instances: - logInstanceCreation(self, 'Executor.Null') - self.batches = [Batch(kw['targets'][:], [])] - def get_build_env(self): - return get_NullEnvironment() - def get_build_scanner_path(self): - return None - def cleanup(self): - pass - def prepare(self): - pass - def get_unignored_sources(self, *args, **kw): - return tuple(()) - def get_action_targets(self): - return [] - def get_action_list(self): - return [] - def get_all_targets(self): - return self.batches[0].targets - def get_all_sources(self): - return self.batches[0].targets[0].sources - def get_all_children(self): - return self.batches[0].targets[0].children() - def get_all_prerequisites(self): - return [] - def get_action_side_effects(self): - return [] - def __call__(self, *args, **kw): - return 0 - def get_contents(self): - return '' - def _morph(self): - """Morph this Null executor to a real Executor object.""" - batches = self.batches - self.__class__ = Executor - self.__init__([]) - self.batches = batches - - # The following methods require morphing this Null Executor to a - # real Executor object. - - def add_pre_action(self, action): - self._morph() - self.add_pre_action(action) - def add_post_action(self, action): - self._morph() - self.add_post_action(action) - def set_action_list(self, action): - self._morph() - self.set_action_list(action) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Job.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Job.py deleted file mode 100644 index 2a8189a1a1d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Job.py +++ /dev/null @@ -1,698 +0,0 @@ -"""SCons.Job - -This module defines the Serial and Parallel classes that execute tasks to -complete a build. The Jobs class provides a higher level interface to start, -stop, and wait on jobs. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Job.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.compat - -import os -import signal -import threading - -from enum import Enum - -import SCons.Errors - -# The default stack size (in kilobytes) of the threads used to execute -# jobs in parallel. -# -# We use a stack size of 256 kilobytes. The default on some platforms -# is too large and prevents us from creating enough threads to fully -# parallelized the build. For example, the default stack size on linux -# is 8 MBytes. - -explicit_stack_size = None -default_stack_size = 256 - -interrupt_msg = 'Build interrupted.' - - -class InterruptState(object): - def __init__(self): - self.interrupted = False - - def set(self): - self.interrupted = True - - def __call__(self): - return self.interrupted - - -class Jobs(object): - """An instance of this class initializes N jobs, and provides - methods for starting, stopping, and waiting on all N jobs. - """ - - def __init__(self, num, taskmaster): - """ - Create 'num' jobs using the given taskmaster. - - If 'num' is 1 or less, then a serial job will be used, - otherwise a parallel job with 'num' worker threads will - be used. - - The 'num_jobs' attribute will be set to the actual number of jobs - allocated. If more than one job is requested but the Parallel - class can't do it, it gets reset to 1. Wrapping interfaces that - care should check the value of 'num_jobs' after initialization. - """ - - self.job = None - if num > 1: - stack_size = explicit_stack_size - if stack_size is None: - stack_size = default_stack_size - - try: - self.job = Parallel(taskmaster, num, stack_size) - self.num_jobs = num - except NameError: - pass - if self.job is None: - self.job = Serial(taskmaster) - self.num_jobs = 1 - - def run(self, postfunc=lambda: None): - """Run the jobs. - - postfunc() will be invoked after the jobs has run. It will be - invoked even if the jobs are interrupted by a keyboard - interrupt (well, in fact by a signal such as either SIGINT, - SIGTERM or SIGHUP). The execution of postfunc() is protected - against keyboard interrupts and is guaranteed to run to - completion.""" - self._setup_sig_handler() - try: - self.job.start() - finally: - postfunc() - self._reset_sig_handler() - - def were_interrupted(self): - """Returns whether the jobs were interrupted by a signal.""" - return self.job.interrupted() - - def _setup_sig_handler(self): - """Setup an interrupt handler so that SCons can shutdown cleanly in - various conditions: - - a) SIGINT: Keyboard interrupt - b) SIGTERM: kill or system shutdown - c) SIGHUP: Controlling shell exiting - - We handle all of these cases by stopping the taskmaster. It - turns out that it's very difficult to stop the build process - by throwing asynchronously an exception such as - KeyboardInterrupt. For example, the python Condition - variables (threading.Condition) and queues do not seem to be - asynchronous-exception-safe. It would require adding a whole - bunch of try/finally block and except KeyboardInterrupt all - over the place. - - Note also that we have to be careful to handle the case when - SCons forks before executing another process. In that case, we - want the child to exit immediately. - """ - def handler(signum, stack, self=self, parentpid=os.getpid()): - if os.getpid() == parentpid: - self.job.taskmaster.stop() - self.job.interrupted.set() - else: - os._exit(2) - - self.old_sigint = signal.signal(signal.SIGINT, handler) - self.old_sigterm = signal.signal(signal.SIGTERM, handler) - try: - self.old_sighup = signal.signal(signal.SIGHUP, handler) - except AttributeError: - pass - - def _reset_sig_handler(self): - """Restore the signal handlers to their previous state (before the - call to _setup_sig_handler().""" - - signal.signal(signal.SIGINT, self.old_sigint) - signal.signal(signal.SIGTERM, self.old_sigterm) - try: - signal.signal(signal.SIGHUP, self.old_sighup) - except AttributeError: - pass - -class Serial(object): - """This class is used to execute tasks in series, and is more efficient - than Parallel, but is only appropriate for non-parallel builds. Only - one instance of this class should be in existence at a time. - - This class is not thread safe. - """ - - def __init__(self, taskmaster): - """Create a new serial job given a taskmaster. - - The taskmaster's next_task() method should return the next task - that needs to be executed, or None if there are no more tasks. The - taskmaster's executed() method will be called for each task when it - is successfully executed, or failed() will be called if it failed to - execute (e.g. execute() raised an exception).""" - - self.taskmaster = taskmaster - self.interrupted = InterruptState() - - def start(self): - """Start the job. This will begin pulling tasks from the taskmaster - and executing them, and return when there are no more tasks. If a task - fails to execute (i.e. execute() raises an exception), then the job will - stop.""" - - while True: - task = self.taskmaster.next_task() - - if task is None: - break - - try: - task.prepare() - if task.needs_execute(): - task.execute() - except Exception: - if self.interrupted(): - try: - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - except Exception: - task.exception_set() - else: - task.exception_set() - - # Let the failed() callback function arrange for the - # build to stop if that's appropriate. - task.failed() - else: - task.executed() - - task.postprocess() - self.taskmaster.cleanup() - - -# Trap import failure so that everything in the Job module but the -# Parallel class (and its dependent classes) will work if the interpreter -# doesn't support threads. -try: - import queue - import threading -except ImportError: - pass -else: - class Worker(threading.Thread): - """A worker thread waits on a task to be posted to its request queue, - dequeues the task, executes it, and posts a tuple including the task - and a boolean indicating whether the task executed successfully. """ - - def __init__(self, requestQueue, resultsQueue, interrupted): - threading.Thread.__init__(self) - self.setDaemon(1) - self.requestQueue = requestQueue - self.resultsQueue = resultsQueue - self.interrupted = interrupted - self.start() - - def run(self): - while True: - task = self.requestQueue.get() - - if task is None: - # The "None" value is used as a sentinel by - # ThreadPool.cleanup(). This indicates that there - # are no more tasks, so we should quit. - break - - try: - if self.interrupted(): - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - task.execute() - except Exception: - task.exception_set() - ok = False - else: - ok = True - - self.resultsQueue.put((task, ok)) - - class ThreadPool(object): - """This class is responsible for spawning and managing worker threads.""" - - def __init__(self, num, stack_size, interrupted): - """Create the request and reply queues, and 'num' worker threads. - - One must specify the stack size of the worker threads. The - stack size is specified in kilobytes. - """ - self.requestQueue = queue.Queue(0) - self.resultsQueue = queue.Queue(0) - - try: - prev_size = threading.stack_size(stack_size * 1024) - except AttributeError as e: - # Only print a warning if the stack size has been - # explicitly set. - if explicit_stack_size is not None: - msg = "Setting stack size is unsupported by this version of Python:\n " + \ - e.args[0] - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - except ValueError as e: - msg = "Setting stack size failed:\n " + str(e) - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - - # Create worker threads - self.workers = [] - for _ in range(num): - worker = Worker(self.requestQueue, self.resultsQueue, interrupted) - self.workers.append(worker) - - if 'prev_size' in locals(): - threading.stack_size(prev_size) - - def put(self, task): - """Put task into request queue.""" - self.requestQueue.put(task) - - def get(self): - """Remove and return a result tuple from the results queue.""" - return self.resultsQueue.get() - - def preparation_failed(self, task): - self.resultsQueue.put((task, False)) - - def cleanup(self): - """ - Shuts down the thread pool, giving each worker thread a - chance to shut down gracefully. - """ - # For each worker thread, put a sentinel "None" value - # on the requestQueue (indicating that there's no work - # to be done) so that each worker thread will get one and - # terminate gracefully. - for _ in self.workers: - self.requestQueue.put(None) - - # Wait for all of the workers to terminate. - # - # If we don't do this, later Python versions (2.4, 2.5) often - # seem to raise exceptions during shutdown. This happens - # in requestQueue.get(), as an assertion failure that - # requestQueue.not_full is notified while not acquired, - # seemingly because the main thread has shut down (or is - # in the process of doing so) while the workers are still - # trying to pull sentinels off the requestQueue. - # - # Normally these terminations should happen fairly quickly, - # but we'll stick a one-second timeout on here just in case - # someone gets hung. - for worker in self.workers: - worker.join(1.0) - self.workers = [] - - class Parallel(object): - """This class is used to execute tasks in parallel, and is somewhat - less efficient than Serial, but is appropriate for parallel builds. - - This class is thread safe. - """ - - def __init__(self, taskmaster, num, stack_size): - """Create a new parallel job given a taskmaster. - - The taskmaster's next_task() method should return the next - task that needs to be executed, or None if there are no more - tasks. The taskmaster's executed() method will be called - for each task when it is successfully executed, or failed() - will be called if the task failed to execute (i.e. execute() - raised an exception). - - Note: calls to taskmaster are serialized, but calls to - execute() on distinct tasks are not serialized, because - that is the whole point of parallel jobs: they can execute - multiple tasks simultaneously. """ - - self.taskmaster = taskmaster - self.interrupted = InterruptState() - self.tp = ThreadPool(num, stack_size, self.interrupted) - - self.maxjobs = num - - def start(self): - """Start the job. This will begin pulling tasks from the - taskmaster and executing them, and return when there are no - more tasks. If a task fails to execute (i.e. execute() raises - an exception), then the job will stop.""" - - jobs = 0 - - while True: - # Start up as many available tasks as we're - # allowed to. - while jobs < self.maxjobs: - task = self.taskmaster.next_task() - if task is None: - break - - try: - # prepare task for execution - task.prepare() - except Exception: - task.exception_set() - task.failed() - task.postprocess() - else: - if task.needs_execute(): - # dispatch task - self.tp.put(task) - jobs = jobs + 1 - else: - task.executed() - task.postprocess() - - if not task and not jobs: - break - - # Let any/all completed tasks finish up before we go - # back and put the next batch of tasks on the queue. - while True: - task, ok = self.tp.get() - jobs = jobs - 1 - - if ok: - task.executed() - else: - if self.interrupted(): - try: - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - except Exception: - task.exception_set() - - # Let the failed() callback function arrange - # for the build to stop if that's appropriate. - task.failed() - - task.postprocess() - - if self.tp.resultsQueue.empty(): - break - - self.tp.cleanup() - self.taskmaster.cleanup() - - # An experimental new parallel scheduler that uses a leaders/followers pattern. - class ExperimentalParallel: - - class State(Enum): - READY = 0 - SEARCHING = 1 - STALLED = 2 - COMPLETED = 3 - - class Worker(threading.Thread): - def __init__(self, owner): - super().__init__() - self.daemon = True - self.owner = owner - self.start() - - def run(self): - self.owner._work() - - def __init__(self, taskmaster, num, stack_size): - self.taskmaster = taskmaster - self.num_workers = num - self.stack_size = stack_size - self.interrupted = InterruptState() - self.workers = [] - - # The `tm_lock` is what ensures that we only have one - # thread interacting with the taskmaster at a time. It - # also protects access to our state that gets updated - # concurrently. The `can_search_cv` is associated with - # this mutex. - self.tm_lock = threading.Lock() - - # Guarded under `tm_lock`. - self.jobs = 0 - self.state = ExperimentalParallel.State.READY - - # The `can_search_cv` is used to manage a leader / - # follower pattern for access to the taskmaster, and to - # awaken from stalls. - self.can_search_cv = threading.Condition(self.tm_lock) - - # The queue of tasks that have completed execution. The - # next thread to obtain `tm_lock`` will retire them. - self.results_queue_lock = threading.Lock() - self.results_queue = [] - - def start(self): - self._start_workers() - for worker in self.workers: - worker.join() - self.workers = [] - self.taskmaster.cleanup() - - def _start_workers(self): - prev_size = self._adjust_stack_size() - for _ in range(self.num_workers): - self.workers.append(ExperimentalParallel.Worker(self)) - self._restore_stack_size(prev_size) - - def _adjust_stack_size(self): - try: - prev_size = threading.stack_size(self.stack_size * 1024) - return prev_size - except AttributeError as e: - # Only print a warning if the stack size has been - # explicitly set. - if explicit_stack_size is not None: - msg = "Setting stack size is unsupported by this version of Python:\n " + \ - e.args[0] - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - except ValueError as e: - msg = "Setting stack size failed:\n " + str(e) - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - - return None - - def _restore_stack_size(self, prev_size): - if prev_size is not None: - threading.stack_size(prev_size) - - def _work(self): - - task = None - - while True: - - # Obtain `tm_lock`, granting exclusive access to the taskmaster. - with self.can_search_cv: - - # print(f"XXX {threading.get_ident()} Gained exclusive access") - - # Capture whether we got here with `task` set, - # then drop our reference to the task as we are no - # longer interested in the actual object. - completed_task = (task is not None) - task = None - - # We will only have `completed_task` set here if - # we have looped back after executing a task. If - # we have completed a task and find that we are - # stalled, we should speculatively indicate that - # we are no longer stalled by transitioning to the - # 'ready' state which will bypass the condition - # wait so that we immediately process the results - # queue and hopefully light up new - # work. Otherwise, stay stalled, and we will wait - # in the condvar. Some other thread will come back - # here with a completed task. - if self.state == ExperimentalParallel.State.STALLED and completed_task: - # print(f"XXX {threading.get_ident()} Detected stall with completed task, bypassing wait") - self.state = ExperimentalParallel.State.READY - - # Wait until we are neither searching nor stalled. - while self.state == ExperimentalParallel.State.SEARCHING or self.state == ExperimentalParallel.State.STALLED: - # print(f"XXX {threading.get_ident()} Search already in progress, waiting") - self.can_search_cv.wait() - - # If someone set the completed flag, bail. - if self.state == ExperimentalParallel.State.COMPLETED: - # print(f"XXX {threading.get_ident()} Completion detected, breaking from main loop") - break - - # Set the searching flag to indicate that a thread - # is currently in the critical section for - # taskmaster work. - # - # print(f"XXX {threading.get_ident()} Starting search") - self.state = ExperimentalParallel.State.SEARCHING - - # Bulk acquire the tasks in the results queue - # under the result queue lock, then process them - # all outside that lock. We need to process the - # tasks in the results queue before looking for - # new work because we might be unable to find new - # work if we don't. - results_queue = [] - with self.results_queue_lock: - results_queue, self.results_queue = self.results_queue, results_queue - - # print(f"XXX {threading.get_ident()} Found {len(results_queue)} completed tasks to process") - for (rtask, rresult) in results_queue: - if rresult: - rtask.executed() - else: - if self.interrupted(): - try: - raise SCons.Errors.BuildError( - rtask.targets[0], errstr=interrupt_msg) - except Exception: - rtask.exception_set() - - # Let the failed() callback function arrange - # for the build to stop if that's appropriate. - rtask.failed() - - rtask.postprocess() - self.jobs -= 1 - - # We are done with any task objects that were in - # the results queue. - results_queue.clear() - - # Now, turn the crank on the taskmaster until we - # either run out of tasks, or find a task that - # needs execution. If we run out of tasks, go idle - # until results arrive if jobs are pending, or - # mark the walk as complete if not. - while self.state == ExperimentalParallel.State.SEARCHING: - # print(f"XXX {threading.get_ident()} Searching for new tasks") - task = self.taskmaster.next_task() - - if task: - # We found a task. Walk it through the - # task lifecycle. If it does not need - # execution, just complete the task and - # look for the next one. Otherwise, - # indicate that we are no longer searching - # so we can drop out of this loop, execute - # the task outside the lock, and allow - # another thread in to search. - try: - task.prepare() - except Exception: - task.exception_set() - task.failed() - task.postprocess() - else: - if not task.needs_execute(): - # print(f"XXX {threading.get_ident()} Found internal task") - task.executed() - task.postprocess() - else: - self.jobs += 1 - # print(f"XXX {threading.get_ident()} Found task requiring execution") - self.state = ExperimentalParallel.State.READY - self.can_search_cv.notify() - - else: - # We failed to find a task, so this thread - # cannot continue turning the taskmaster - # crank. We must exit the loop. - if self.jobs: - # No task was found, but there are - # outstanding jobs executing that - # might unblock new tasks when they - # complete. Transition to the stalled - # state. We do not need a notify, - # because we know there are threads - # outstanding that will re-enter the - # loop. - # - # print(f"XXX {threading.get_ident()} Found no task requiring execution, but have jobs: marking stalled") - self.state = ExperimentalParallel.State.STALLED - else: - # We didn't find a task and there are - # no jobs outstanding, so there is - # nothing that will ever return - # results which might unblock new - # tasks. We can conclude that the walk - # is complete. Update our state to - # note completion and awaken anyone - # sleeping on the condvar. - # - # print(f"XXX {threading.get_ident()} Found no task requiring execution, and have no jobs: marking complete") - self.state = ExperimentalParallel.State.COMPLETED - self.can_search_cv.notify_all() - - # We no longer hold `tm_lock` here. If we have a task, - # we can now execute it. If there are threads waiting - # to search, one of them can now begin turning the - # taskmaster crank in parallel. - if task: - # print(f"XXX {threading.get_ident()} Executing task") - ok = True - try: - if self.interrupted(): - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - task.execute() - except Exception: - ok = False - task.exception_set() - - # Grab the results queue lock and enqueue the - # executed task and state. The next thread into - # the searching loop will complete the - # postprocessing work under the taskmaster lock. - # - # print(f"XXX {threading.get_ident()} Enqueueing executed task results") - with self.results_queue_lock: - self.results_queue.append((task, ok)) - - # Tricky state "fallthrough" here. We are going back - # to the top of the loop, which behaves differently - # depending on whether `task` is set. Do not perturb - # the value of the `task` variable if you add new code - # after this comment. - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Memoize.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Memoize.py deleted file mode 100644 index 8f33a1be211..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Memoize.py +++ /dev/null @@ -1,245 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Memoize.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """Memoizer - -A decorator-based implementation to count hits and misses of the computed -values that various methods cache in memory. - -Use of this modules assumes that wrapped methods be coded to cache their -values in a consistent way. In particular, it requires that the class uses a -dictionary named "_memo" to store the cached values. - -Here is an example of wrapping a method that returns a computed value, -with no input parameters:: - - @SCons.Memoize.CountMethodCall - def foo(self): - - try: # Memoization - return self._memo['foo'] # Memoization - except KeyError: # Memoization - pass # Memoization - - result = self.compute_foo_value() - - self._memo['foo'] = result # Memoization - - return result - -Here is an example of wrapping a method that will return different values -based on one or more input arguments:: - - def _bar_key(self, argument): # Memoization - return argument # Memoization - - @SCons.Memoize.CountDictCall(_bar_key) - def bar(self, argument): - - memo_key = argument # Memoization - try: # Memoization - memo_dict = self._memo['bar'] # Memoization - except KeyError: # Memoization - memo_dict = {} # Memoization - self._memo['dict'] = memo_dict # Memoization - else: # Memoization - try: # Memoization - return memo_dict[memo_key] # Memoization - except KeyError: # Memoization - pass # Memoization - - result = self.compute_bar_value(argument) - - memo_dict[memo_key] = result # Memoization - - return result - -Deciding what to cache is tricky, because different configurations -can have radically different performance tradeoffs, and because the -tradeoffs involved are often so non-obvious. Consequently, deciding -whether or not to cache a given method will likely be more of an art than -a science, but should still be based on available data from this module. -Here are some VERY GENERAL guidelines about deciding whether or not to -cache return values from a method that's being called a lot: - - -- The first question to ask is, "Can we change the calling code - so this method isn't called so often?" Sometimes this can be - done by changing the algorithm. Sometimes the *caller* should - be memoized, not the method you're looking at. - - -- The memoized function should be timed with multiple configurations - to make sure it doesn't inadvertently slow down some other - configuration. - - -- When memoizing values based on a dictionary key composed of - input arguments, you don't need to use all of the arguments - if some of them don't affect the return values. - -""" - -# A flag controlling whether or not we actually use memoization. -use_memoizer = None - -# Global list of counter objects -CounterList = {} - -class Counter(object): - """ - Base class for counting memoization hits and misses. - - We expect that the initialization in a matching decorator will - fill in the correct class name and method name that represents - the name of the function being counted. - """ - def __init__(self, cls_name, method_name): - """ - """ - self.cls_name = cls_name - self.method_name = method_name - self.hit = 0 - self.miss = 0 - def key(self): - return self.cls_name+'.'+self.method_name - def display(self): - print(" {:7d} hits {:7d} misses {}()".format(self.hit, self.miss, self.key())) - def __eq__(self, other): - try: - return self.key() == other.key() - except AttributeError: - return True - -class CountValue(Counter): - """ - A counter class for simple, atomic memoized values. - - A CountValue object should be instantiated in a decorator for each of - the class's methods that memoizes its return value by simply storing - the return value in its _memo dictionary. - """ - def count(self, *args, **kw): - """ Counts whether the memoized value has already been - set (a hit) or not (a miss). - """ - obj = args[0] - if self.method_name in obj._memo: - self.hit = self.hit + 1 - else: - self.miss = self.miss + 1 - -class CountDict(Counter): - """ - A counter class for memoized values stored in a dictionary, with - keys based on the method's input arguments. - - A CountDict object is instantiated in a decorator for each of the - class's methods that memoizes its return value in a dictionary, - indexed by some key that can be computed from one or more of - its input arguments. - """ - def __init__(self, cls_name, method_name, keymaker): - """ - """ - Counter.__init__(self, cls_name, method_name) - self.keymaker = keymaker - def count(self, *args, **kw): - """ Counts whether the computed key value is already present - in the memoization dictionary (a hit) or not (a miss). - """ - obj = args[0] - try: - memo_dict = obj._memo[self.method_name] - except KeyError: - self.miss = self.miss + 1 - else: - key = self.keymaker(*args, **kw) - if key in memo_dict: - self.hit = self.hit + 1 - else: - self.miss = self.miss + 1 - -def Dump(title=None): - """ Dump the hit/miss count for all the counters - collected so far. - """ - if title: - print(title) - for counter in sorted(CounterList): - CounterList[counter].display() - -def EnableMemoization(): - global use_memoizer - use_memoizer = 1 - -def CountMethodCall(fn): - """ Decorator for counting memoizer hits/misses while retrieving - a simple value in a class method. It wraps the given method - fn and uses a CountValue object to keep track of the - caching statistics. - Wrapping gets enabled by calling EnableMemoization(). - """ - if use_memoizer: - def wrapper(self, *args, **kwargs): - global CounterList - key = self.__class__.__name__+'.'+fn.__name__ - if key not in CounterList: - CounterList[key] = CountValue(self.__class__.__name__, fn.__name__) - CounterList[key].count(self, *args, **kwargs) - return fn(self, *args, **kwargs) - wrapper.__name__= fn.__name__ - return wrapper - else: - return fn - -def CountDictCall(keyfunc): - """ Decorator for counting memoizer hits/misses while accessing - dictionary values with a key-generating function. Like - CountMethodCall above, it wraps the given method - fn and uses a CountDict object to keep track of the - caching statistics. The dict-key function keyfunc has to - get passed in the decorator call and gets stored in the - CountDict instance. - Wrapping gets enabled by calling EnableMemoization(). - """ - def decorator(fn): - if use_memoizer: - def wrapper(self, *args, **kwargs): - global CounterList - key = self.__class__.__name__+'.'+fn.__name__ - if key not in CounterList: - CounterList[key] = CountDict(self.__class__.__name__, fn.__name__, keyfunc) - CounterList[key].count(self, *args, **kwargs) - return fn(self, *args, **kwargs) - wrapper.__name__= fn.__name__ - return wrapper - else: - return fn - return decorator - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/Alias.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/Alias.py deleted file mode 100644 index 5c62677cda3..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/Alias.py +++ /dev/null @@ -1,181 +0,0 @@ - -"""scons.Node.Alias - -Alias nodes. - -This creates a hash of global Aliases (dummy targets). - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Node/Alias.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import collections - -import SCons.Errors -import SCons.Node -import SCons.Util - -class AliasNameSpace(collections.UserDict): - def Alias(self, name, **kw): - if isinstance(name, SCons.Node.Alias.Alias): - return name - try: - a = self[name] - except KeyError: - a = SCons.Node.Alias.Alias(name, **kw) - self[name] = a - return a - - def lookup(self, name, **kw): - try: - return self[name] - except KeyError: - return None - -class AliasNodeInfo(SCons.Node.NodeInfoBase): - __slots__ = ('csig',) - current_version_id = 2 - field_list = ['csig'] - def str_to_node(self, s): - return default_ans.Alias(s) - - def __getstate__(self): - """ - Return all fields that shall be pickled. Walk the slots in the class - hierarchy and add those to the state dictionary. If a '__dict__' slot is - available, copy all entries to the dictionary. Also include the version - id, which is fixed for all instances of a class. - """ - state = getattr(self, '__dict__', {}).copy() - for obj in type(self).mro(): - for name in getattr(obj,'__slots__',()): - if hasattr(self, name): - state[name] = getattr(self, name) - - state['_version_id'] = self.current_version_id - try: - del state['__weakref__'] - except KeyError: - pass - - return state - - def __setstate__(self, state): - """ - Restore the attributes from a pickled state. - """ - # TODO check or discard version - del state['_version_id'] - for key, value in state.items(): - if key not in ('__weakref__',): - setattr(self, key, value) - - -class AliasBuildInfo(SCons.Node.BuildInfoBase): - __slots__ = () - current_version_id = 2 - -class Alias(SCons.Node.Node): - - NodeInfo = AliasNodeInfo - BuildInfo = AliasBuildInfo - - def __init__(self, name): - SCons.Node.Node.__init__(self) - self.name = name - self.changed_since_last_build = 1 - self.store_info = 0 - - def str_for_display(self): - return '"' + self.__str__() + '"' - - def __str__(self): - return self.name - - def make_ready(self): - self.get_csig() - - really_build = SCons.Node.Node.build - is_up_to_date = SCons.Node.Node.children_are_up_to_date - - def is_under(self, dir): - # Make Alias nodes get built regardless of - # what directory scons was run from. Alias nodes - # are outside the filesystem: - return 1 - - def get_contents(self): - """The contents of an alias is the concatenation - of the content signatures of all its sources.""" - childsigs = [n.get_csig() for n in self.children()] - return ''.join(childsigs) - - def sconsign(self): - """An Alias is not recorded in .sconsign files""" - pass - - # - # - # - - def build(self): - """A "builder" for aliases.""" - pass - - def convert(self): - try: del self.builder - except AttributeError: pass - self.reset_executor() - self.build = self.really_build - - def get_csig(self): - """ - Generate a node's content signature, the digested signature - of its content. - - node - the node - cache - alternate node to use for the signature cache - returns - the content signature - """ - try: - return self.ninfo.csig - except AttributeError: - pass - - contents = self.get_contents() - csig = SCons.Util.MD5signature(contents) - self.get_ninfo().csig = csig - return csig - -default_ans = AliasNameSpace() - -SCons.Node.arg2nodes_lookups.append(default_ans.lookup) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/FS.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/FS.py deleted file mode 100644 index 57b3644223f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/FS.py +++ /dev/null @@ -1,3820 +0,0 @@ -"""scons.Node.FS - -File system nodes. - -These Nodes represent the canonical external objects that people think -of when they think of building software: files and directories. - -This holds a "default_fs" variable that should be initialized with an FS -that can be used by scripts or modules looking for the canonical default. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from __future__ import print_function - -__revision__ = "src/engine/SCons/Node/FS.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import fnmatch -import os -import re -import shutil -import stat -import sys -import time -import codecs -from itertools import chain - -import SCons.Action -import SCons.Debug -from SCons.Debug import logInstanceCreation -import SCons.Errors -import SCons.Memoize -import SCons.Node -import SCons.Node.Alias -import SCons.Subst -import SCons.Util -import SCons.Warnings - -from SCons.Debug import Trace - -print_duplicate = 0 - -MD5_TIMESTAMP_DEBUG = False - - -def sconsign_none(node): - raise NotImplementedError - -def sconsign_dir(node): - """Return the .sconsign file info for this directory, - creating it first if necessary.""" - if not node._sconsign: - import SCons.SConsign - node._sconsign = SCons.SConsign.ForDirectory(node) - return node._sconsign - -_sconsign_map = {0 : sconsign_none, - 1 : sconsign_dir} - -class FileBuildInfoFileToCsigMappingError(Exception): - pass - -class EntryProxyAttributeError(AttributeError): - """ - An AttributeError subclass for recording and displaying the name - of the underlying Entry involved in an AttributeError exception. - """ - def __init__(self, entry_proxy, attribute): - AttributeError.__init__(self) - self.entry_proxy = entry_proxy - self.attribute = attribute - def __str__(self): - entry = self.entry_proxy.get() - fmt = "%s instance %s has no attribute %s" - return fmt % (entry.__class__.__name__, - repr(entry.name), - repr(self.attribute)) - -# The max_drift value: by default, use a cached signature value for -# any file that's been untouched for more than two days. -default_max_drift = 2*24*60*60 - -# -# We stringify these file system Nodes a lot. Turning a file system Node -# into a string is non-trivial, because the final string representation -# can depend on a lot of factors: whether it's a derived target or not, -# whether it's linked to a repository or source directory, and whether -# there's duplication going on. The normal technique for optimizing -# calculations like this is to memoize (cache) the string value, so you -# only have to do the calculation once. -# -# A number of the above factors, however, can be set after we've already -# been asked to return a string for a Node, because a Repository() or -# VariantDir() call or the like may not occur until later in SConscript -# files. So this variable controls whether we bother trying to save -# string values for Nodes. The wrapper interface can set this whenever -# they're done mucking with Repository and VariantDir and the other stuff, -# to let this module know it can start returning saved string values -# for Nodes. -# -Save_Strings = None - -def save_strings(val): - global Save_Strings - Save_Strings = val - -# -# Avoid unnecessary function calls by recording a Boolean value that -# tells us whether or not os.path.splitdrive() actually does anything -# on this system, and therefore whether we need to bother calling it -# when looking up path names in various methods below. -# - -do_splitdrive = None -_my_splitdrive =None - -def initialize_do_splitdrive(): - global do_splitdrive - global has_unc - drive, path = os.path.splitdrive('X:/foo') - # splitunc is removed from python 3.7 and newer - # so we can also just test if splitdrive works with UNC - has_unc = (hasattr(os.path, 'splitunc') - or os.path.splitdrive(r'\\split\drive\test')[0] == r'\\split\drive') - - do_splitdrive = not not drive or has_unc - - global _my_splitdrive - if has_unc: - def splitdrive(p): - if p[1:2] == ':': - return p[:2], p[2:] - if p[0:2] == '//': - # Note that we leave a leading slash in the path - # because UNC paths are always absolute. - return '//', p[1:] - return '', p - else: - def splitdrive(p): - if p[1:2] == ':': - return p[:2], p[2:] - return '', p - _my_splitdrive = splitdrive - - # Keep some commonly used values in global variables to skip to - # module look-up costs. - global OS_SEP - global UNC_PREFIX - global os_sep_is_slash - - OS_SEP = os.sep - UNC_PREFIX = OS_SEP + OS_SEP - os_sep_is_slash = OS_SEP == '/' - -initialize_do_splitdrive() - -# Used to avoid invoking os.path.normpath if not necessary. -needs_normpath_check = re.compile( - r''' - # We need to renormalize the path if it contains any consecutive - # '/' characters. - .*// | - - # We need to renormalize the path if it contains a '..' directory. - # Note that we check for all the following cases: - # - # a) The path is a single '..' - # b) The path starts with '..'. E.g. '../' or '../moredirs' - # but we not match '..abc/'. - # c) The path ends with '..'. E.g. '/..' or 'dirs/..' - # d) The path contains a '..' in the middle. - # E.g. dirs/../moredirs - - (.*/)?\.\.(?:/|$) | - - # We need to renormalize the path if it contains a '.' - # directory, but NOT if it is a single '.' '/' characters. We - # do not want to match a single '.' because this case is checked - # for explicitly since this is common enough case. - # - # Note that we check for all the following cases: - # - # a) We don't match a single '.' - # b) We match if the path starts with '.'. E.g. './' or - # './moredirs' but we not match '.abc/'. - # c) We match if the path ends with '.'. E.g. '/.' or - # 'dirs/.' - # d) We match if the path contains a '.' in the middle. - # E.g. dirs/./moredirs - - \./|.*/\.(?:/|$) - - ''', - re.VERBOSE - ) -needs_normpath_match = needs_normpath_check.match - -# -# SCons.Action objects for interacting with the outside world. -# -# The Node.FS methods in this module should use these actions to -# create and/or remove files and directories; they should *not* use -# os.{link,symlink,unlink,mkdir}(), etc., directly. -# -# Using these SCons.Action objects ensures that descriptions of these -# external activities are properly displayed, that the displays are -# suppressed when the -s (silent) option is used, and (most importantly) -# the actions are disabled when the the -n option is used, in which case -# there should be *no* changes to the external file system(s)... -# - -# For Now disable hard & softlinks for win32 -# PY3 supports them, but the rest of SCons is not ready for this -# in some cases user permissions may be required. -# TODO: See if theres a reasonable way to enable using links on win32/64 - -if hasattr(os, 'link') and sys.platform != 'win32': - def _hardlink_func(fs, src, dst): - # If the source is a symlink, we can't just hard-link to it - # because a relative symlink may point somewhere completely - # different. We must disambiguate the symlink and then - # hard-link the final destination file. - while fs.islink(src): - link = fs.readlink(src) - if not os.path.isabs(link): - src = link - else: - src = os.path.join(os.path.dirname(src), link) - fs.link(src, dst) -else: - _hardlink_func = None - -if hasattr(os, 'symlink') and sys.platform != 'win32': - def _softlink_func(fs, src, dst): - fs.symlink(src, dst) -else: - _softlink_func = None - -def _copy_func(fs, src, dest): - shutil.copy2(src, dest) - st = fs.stat(src) - fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - - -Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', - 'hard-copy', 'soft-copy', 'copy'] - -Link_Funcs = [] # contains the callables of the specified duplication style - -def set_duplicate(duplicate): - # Fill in the Link_Funcs list according to the argument - # (discarding those not available on the platform). - - # Set up the dictionary that maps the argument names to the - # underlying implementations. We do this inside this function, - # not in the top-level module code, so that we can remap os.link - # and os.symlink for testing purposes. - link_dict = { - 'hard' : _hardlink_func, - 'soft' : _softlink_func, - 'copy' : _copy_func - } - - if duplicate not in Valid_Duplicates: - raise SCons.Errors.InternalError("The argument of set_duplicate " - "should be in Valid_Duplicates") - global Link_Funcs - Link_Funcs = [] - for func in duplicate.split('-'): - if link_dict[func]: - Link_Funcs.append(link_dict[func]) - -def LinkFunc(target, source, env): - """ - Relative paths cause problems with symbolic links, so - we use absolute paths, which may be a problem for people - who want to move their soft-linked src-trees around. Those - people should use the 'hard-copy' mode, softlinks cannot be - used for that; at least I have no idea how ... - """ - src = source[0].get_abspath() - dest = target[0].get_abspath() - dir, file = os.path.split(dest) - if dir and not target[0].fs.isdir(dir): - os.makedirs(dir) - if not Link_Funcs: - # Set a default order of link functions. - set_duplicate('hard-soft-copy') - fs = source[0].fs - # Now link the files with the previously specified order. - for func in Link_Funcs: - try: - func(fs, src, dest) - break - except (IOError, OSError): - # An OSError indicates something happened like a permissions - # problem or an attempt to symlink across file-system - # boundaries. An IOError indicates something like the file - # not existing. In either case, keeping trying additional - # functions in the list and only raise an error if the last - # one failed. - if func == Link_Funcs[-1]: - # exception of the last link method (copy) are fatal - raise - return 0 - -Link = SCons.Action.Action(LinkFunc, None) -def LocalString(target, source, env): - return 'Local copy of %s from %s' % (target[0], source[0]) - -LocalCopy = SCons.Action.Action(LinkFunc, LocalString) - -def UnlinkFunc(target, source, env): - t = target[0] - t.fs.unlink(t.get_abspath()) - return 0 - -Unlink = SCons.Action.Action(UnlinkFunc, None) - -def MkdirFunc(target, source, env): - t = target[0] - # This os.path.exists test looks redundant, but it's possible - # when using Install() to install multiple dirs outside the - # source tree to get a case where t.exists() is true but - # the path does already exist, so this prevents spurious - # build failures in that case. See test/Install/multi-dir. - if not t.exists() and not os.path.exists(t.get_abspath()): - t.fs.mkdir(t.get_abspath()) - return 0 - -Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) - -MkdirBuilder = None - -def get_MkdirBuilder(): - global MkdirBuilder - if MkdirBuilder is None: - import SCons.Builder - import SCons.Defaults - # "env" will get filled in by Executor.get_build_env() - # calling SCons.Defaults.DefaultEnvironment() when necessary. - MkdirBuilder = SCons.Builder.Builder(action = Mkdir, - env = None, - explain = None, - is_explicit = None, - target_scanner = SCons.Defaults.DirEntryScanner, - name = "MkdirBuilder") - return MkdirBuilder - -class _Null(object): - pass - -_null = _Null() - -# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. -_is_cygwin = sys.platform == "cygwin" -if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin: - def _my_normcase(x): - return x -else: - def _my_normcase(x): - return x.upper() - - - -class DiskChecker(object): - def __init__(self, type, do, ignore): - self.type = type - self.do = do - self.ignore = ignore - self.func = do - def __call__(self, *args, **kw): - return self.func(*args, **kw) - def set(self, list): - if self.type in list: - self.func = self.do - else: - self.func = self.ignore - -def do_diskcheck_match(node, predicate, errorfmt): - result = predicate() - try: - # If calling the predicate() cached a None value from stat(), - # remove it so it doesn't interfere with later attempts to - # build this Node as we walk the DAG. (This isn't a great way - # to do this, we're reaching into an interface that doesn't - # really belong to us, but it's all about performance, so - # for now we'll just document the dependency...) - if node._memo['stat'] is None: - del node._memo['stat'] - except (AttributeError, KeyError): - pass - if result: - raise TypeError(errorfmt % node.get_abspath()) - -def ignore_diskcheck_match(node, predicate, errorfmt): - pass - - - -diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) - -diskcheckers = [ - diskcheck_match, -] - -def set_diskcheck(list): - for dc in diskcheckers: - dc.set(list) - -def diskcheck_types(): - return [dc.type for dc in diskcheckers] - - - -class EntryProxy(SCons.Util.Proxy): - - __str__ = SCons.Util.Delegate('__str__') - - # In PY3 if a class defines __eq__, then it must explicitly provide - # __hash__. Since SCons.Util.Proxy provides __eq__ we need the following - # see: https://docs.python.org/3.1/reference/datamodel.html#object.__hash__ - __hash__ = SCons.Util.Delegate('__hash__') - - def __get_abspath(self): - entry = self.get() - return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), - entry.name + "_abspath") - - def __get_filebase(self): - name = self.get().name - return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], - name + "_filebase") - - def __get_suffix(self): - name = self.get().name - return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], - name + "_suffix") - - def __get_file(self): - name = self.get().name - return SCons.Subst.SpecialAttrWrapper(name, name + "_file") - - def __get_base_path(self): - """Return the file's directory and file name, with the - suffix stripped.""" - entry = self.get() - return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], - entry.name + "_base") - - def __get_posix_path(self): - """Return the path with / as the path separator, - regardless of platform.""" - if os_sep_is_slash: - return self - else: - entry = self.get() - r = entry.get_path().replace(OS_SEP, '/') - return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix") - - def __get_windows_path(self): - r"""Return the path with \ as the path separator, - regardless of platform.""" - if OS_SEP == '\\': - return self - else: - entry = self.get() - r = entry.get_path().replace(OS_SEP, '\\') - return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows") - - def __get_srcnode(self): - return EntryProxy(self.get().srcnode()) - - def __get_srcdir(self): - """Returns the directory containing the source node linked to this - node via VariantDir(), or the directory of this node if not linked.""" - return EntryProxy(self.get().srcnode().dir) - - def __get_rsrcnode(self): - return EntryProxy(self.get().srcnode().rfile()) - - def __get_rsrcdir(self): - """Returns the directory containing the source node linked to this - node via VariantDir(), or the directory of this node if not linked.""" - return EntryProxy(self.get().srcnode().rfile().dir) - - def __get_dir(self): - return EntryProxy(self.get().dir) - - dictSpecialAttrs = { "base" : __get_base_path, - "posix" : __get_posix_path, - "windows" : __get_windows_path, - "win32" : __get_windows_path, - "srcpath" : __get_srcnode, - "srcdir" : __get_srcdir, - "dir" : __get_dir, - "abspath" : __get_abspath, - "filebase" : __get_filebase, - "suffix" : __get_suffix, - "file" : __get_file, - "rsrcpath" : __get_rsrcnode, - "rsrcdir" : __get_rsrcdir, - } - - def __getattr__(self, name): - # This is how we implement the "special" attributes - # such as base, posix, srcdir, etc. - try: - attr_function = self.dictSpecialAttrs[name] - except KeyError: - try: - attr = SCons.Util.Proxy.__getattr__(self, name) - except AttributeError: - # Raise our own AttributeError subclass with an - # overridden __str__() method that identifies the - # name of the entry that caused the exception. - raise EntryProxyAttributeError(self, name) - return attr - else: - return attr_function(self) - - -class Base(SCons.Node.Node): - """A generic class for file system entries. This class is for - when we don't know yet whether the entry being looked up is a file - or a directory. Instances of this class can morph into either - Dir or File objects by a later, more precise lookup. - - Note: this class does not define __cmp__ and __hash__ for - efficiency reasons. SCons does a lot of comparing of - Node.FS.{Base,Entry,File,Dir} objects, so those operations must be - as fast as possible, which means we want to use Python's built-in - object identity comparisons. - """ - - __slots__ = ['name', - 'fs', - '_abspath', - '_labspath', - '_path', - '_tpath', - '_path_elements', - 'dir', - 'cwd', - 'duplicate', - '_local', - 'sbuilder', - '_proxy', - '_func_sconsign'] - - def __init__(self, name, directory, fs): - """Initialize a generic Node.FS.Base object. - - Call the superclass initialization, take care of setting up - our relative and absolute paths, identify our parent - directory, and indicate that this node should use - signatures.""" - - if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.Base') - SCons.Node.Node.__init__(self) - - # Filenames and paths are probably reused and are intern'ed to save some memory. - # Filename with extension as it was specified when the object was - # created; to obtain filesystem path, use Python str() function - self.name = SCons.Util.silent_intern(name) - self.fs = fs #: Reference to parent Node.FS object - - assert directory, "A directory must be provided" - - self._abspath = None - self._labspath = None - self._path = None - self._tpath = None - self._path_elements = None - - self.dir = directory - self.cwd = None # will hold the SConscript directory for target nodes - self.duplicate = directory.duplicate - self.changed_since_last_build = 2 - self._func_sconsign = 0 - self._func_exists = 2 - self._func_rexists = 2 - self._func_get_contents = 0 - self._func_target_from_source = 1 - self.store_info = 1 - - def str_for_display(self): - return '"' + self.__str__() + '"' - - def must_be_same(self, klass): - """ - This node, which already existed, is being looked up as the - specified klass. Raise an exception if it isn't. - """ - if isinstance(self, klass) or klass is Entry: - return - raise TypeError("Tried to lookup %s '%s' as a %s." %\ - (self.__class__.__name__, self.get_internal_path(), klass.__name__)) - - def get_dir(self): - return self.dir - - def get_suffix(self): - return SCons.Util.splitext(self.name)[1] - - def rfile(self): - return self - - def __getattr__(self, attr): - """ Together with the node_bwcomp dict defined below, - this method provides a simple backward compatibility - layer for the Node attributes 'abspath', 'labspath', - 'path', 'tpath', 'suffix' and 'path_elements'. These Node - attributes used to be directly available in v2.3 and earlier, but - have been replaced by getter methods that initialize the - single variables lazily when required, in order to save memory. - The redirection to the getters lets older Tools and - SConstruct continue to work without any additional changes, - fully transparent to the user. - Note, that __getattr__ is only called as fallback when the - requested attribute can't be found, so there should be no - speed performance penalty involved for standard builds. - """ - if attr in node_bwcomp: - return node_bwcomp[attr](self) - - raise AttributeError("%r object has no attribute %r" % - (self.__class__, attr)) - - def __str__(self): - """A Node.FS.Base object's string representation is its path - name.""" - global Save_Strings - if Save_Strings: - return self._save_str() - return self._get_str() - - def __lt__(self, other): - """ less than operator used by sorting on py3""" - return str(self) < str(other) - - @SCons.Memoize.CountMethodCall - def _save_str(self): - try: - return self._memo['_save_str'] - except KeyError: - pass - result = SCons.Util.silent_intern(self._get_str()) - self._memo['_save_str'] = result - return result - - def _get_str(self): - global Save_Strings - if self.duplicate or self.is_derived(): - return self.get_path() - srcnode = self.srcnode() - if srcnode.stat() is None and self.stat() is not None: - result = self.get_path() - else: - result = srcnode.get_path() - if not Save_Strings: - # We're not at the point where we're saving the string - # representations of FS Nodes (because we haven't finished - # reading the SConscript files and need to have str() return - # things relative to them). That also means we can't yet - # cache values returned (or not returned) by stat(), since - # Python code in the SConscript files might still create - # or otherwise affect the on-disk file. So get rid of the - # values that the underlying stat() method saved. - try: del self._memo['stat'] - except KeyError: pass - if self is not srcnode: - try: del srcnode._memo['stat'] - except KeyError: pass - return result - - rstr = __str__ - - @SCons.Memoize.CountMethodCall - def stat(self): - try: - return self._memo['stat'] - except KeyError: - pass - try: - result = self.fs.stat(self.get_abspath()) - except os.error: - result = None - - self._memo['stat'] = result - return result - - def exists(self): - return SCons.Node._exists_map[self._func_exists](self) - - def rexists(self): - return SCons.Node._rexists_map[self._func_rexists](self) - - def getmtime(self): - st = self.stat() - if st: - return st[stat.ST_MTIME] - else: - return None - - def getsize(self): - st = self.stat() - if st: - return st[stat.ST_SIZE] - else: - return None - - def isdir(self): - st = self.stat() - return st is not None and stat.S_ISDIR(st[stat.ST_MODE]) - - def isfile(self): - st = self.stat() - return st is not None and stat.S_ISREG(st[stat.ST_MODE]) - - if hasattr(os, 'symlink'): - def islink(self): - try: st = self.fs.lstat(self.get_abspath()) - except os.error: return 0 - return stat.S_ISLNK(st[stat.ST_MODE]) - else: - def islink(self): - return 0 # no symlinks - - def is_under(self, dir): - if self is dir: - return 1 - else: - return self.dir.is_under(dir) - - def set_local(self): - self._local = 1 - - def srcnode(self): - """If this node is in a build path, return the node - corresponding to its source file. Otherwise, return - ourself. - """ - srcdir_list = self.dir.srcdir_list() - if srcdir_list: - srcnode = srcdir_list[0].Entry(self.name) - srcnode.must_be_same(self.__class__) - return srcnode - return self - - def get_path(self, dir=None): - """Return path relative to the current working directory of the - Node.FS.Base object that owns us.""" - if not dir: - dir = self.fs.getcwd() - if self == dir: - return '.' - path_elems = self.get_path_elements() - pathname = '' - try: i = path_elems.index(dir) - except ValueError: - for p in path_elems[:-1]: - pathname += p.dirname - else: - for p in path_elems[i+1:-1]: - pathname += p.dirname - return pathname + path_elems[-1].name - - def set_src_builder(self, builder): - """Set the source code builder for this node.""" - self.sbuilder = builder - if not self.has_builder(): - self.builder_set(builder) - - def src_builder(self): - """Fetch the source code builder for this node. - - If there isn't one, we cache the source code builder specified - for the directory (which in turn will cache the value from its - parent directory, and so on up to the file system root). - """ - try: - scb = self.sbuilder - except AttributeError: - scb = self.dir.src_builder() - self.sbuilder = scb - return scb - - def get_abspath(self): - """Get the absolute path of the file.""" - return self.dir.entry_abspath(self.name) - - def get_labspath(self): - """Get the absolute path of the file.""" - return self.dir.entry_labspath(self.name) - - def get_internal_path(self): - if self.dir._path == '.': - return self.name - else: - return self.dir.entry_path(self.name) - - def get_tpath(self): - if self.dir._tpath == '.': - return self.name - else: - return self.dir.entry_tpath(self.name) - - def get_path_elements(self): - return self.dir._path_elements + [self] - - def for_signature(self): - # Return just our name. Even an absolute path would not work, - # because that can change thanks to symlinks or remapped network - # paths. - return self.name - - def get_subst_proxy(self): - try: - return self._proxy - except AttributeError: - ret = EntryProxy(self) - self._proxy = ret - return ret - - def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext): - """ - - Generates a target entry that corresponds to this entry (usually - a source file) with the specified prefix and suffix. - - Note that this method can be overridden dynamically for generated - files that need different behavior. See Tool/swig.py for - an example. - """ - return SCons.Node._target_from_source_map[self._func_target_from_source](self, prefix, suffix, splitext) - - def _Rfindalldirs_key(self, pathlist): - return pathlist - - @SCons.Memoize.CountDictCall(_Rfindalldirs_key) - def Rfindalldirs(self, pathlist): - """ - Return all of the directories for a given path list, including - corresponding "backing" directories in any repositories. - - The Node lookups are relative to this Node (typically a - directory), so memoizing result saves cycles from looking - up the same path for each target in a given directory. - """ - try: - memo_dict = self._memo['Rfindalldirs'] - except KeyError: - memo_dict = {} - self._memo['Rfindalldirs'] = memo_dict - else: - try: - return memo_dict[pathlist] - except KeyError: - pass - - create_dir_relative_to_self = self.Dir - result = [] - for path in pathlist: - if isinstance(path, SCons.Node.Node): - result.append(path) - else: - dir = create_dir_relative_to_self(path) - result.extend(dir.get_all_rdirs()) - - memo_dict[pathlist] = result - - return result - - def RDirs(self, pathlist): - """Search for a list of directories in the Repository list.""" - cwd = self.cwd or self.fs._cwd - return cwd.Rfindalldirs(pathlist) - - @SCons.Memoize.CountMethodCall - def rentry(self): - try: - return self._memo['rentry'] - except KeyError: - pass - result = self - if not self.exists(): - norm_name = _my_normcase(self.name) - for dir in self.dir.get_all_rdirs(): - try: - node = dir.entries[norm_name] - except KeyError: - if dir.entry_exists_on_disk(self.name): - result = dir.Entry(self.name) - break - self._memo['rentry'] = result - return result - - def _glob1(self, pattern, ondisk=True, source=False, strings=False): - return [] - -# Dict that provides a simple backward compatibility -# layer for the Node attributes 'abspath', 'labspath', -# 'path', 'tpath' and 'path_elements'. -# @see Base.__getattr__ above -node_bwcomp = {'abspath' : Base.get_abspath, - 'labspath' : Base.get_labspath, - 'path' : Base.get_internal_path, - 'tpath' : Base.get_tpath, - 'path_elements' : Base.get_path_elements, - 'suffix' : Base.get_suffix} - -class Entry(Base): - """This is the class for generic Node.FS entries--that is, things - that could be a File or a Dir, but we're just not sure yet. - Consequently, the methods in this class really exist just to - transform their associated object into the right class when the - time comes, and then call the same-named method in the transformed - class.""" - - __slots__ = ['scanner_paths', - 'cachedir_csig', - 'cachesig', - 'repositories', - 'srcdir', - 'entries', - 'searched', - '_sconsign', - 'variant_dirs', - 'root', - 'dirname', - 'on_disk_entries', - 'released_target_info', - 'contentsig'] - - def __init__(self, name, directory, fs): - Base.__init__(self, name, directory, fs) - self._func_exists = 3 - self._func_get_contents = 1 - - def diskcheck_match(self): - pass - - def disambiguate(self, must_exist=None): - """ - """ - if self.isfile(): - self.__class__ = File - self._morph() - self.clear() - elif self.isdir(): - self.__class__ = Dir - self._morph() - else: - # There was nothing on-disk at this location, so look in - # the src directory. - # - # We can't just use self.srcnode() straight away because - # that would create an actual Node for this file in the src - # directory, and there might not be one. Instead, use the - # dir_on_disk() method to see if there's something on-disk - # with that name, in which case we can go ahead and call - # self.srcnode() to create the right type of entry. - srcdir = self.dir.srcnode() - if srcdir != self.dir and \ - srcdir.entry_exists_on_disk(self.name) and \ - self.srcnode().isdir(): - self.__class__ = Dir - self._morph() - elif must_exist: - msg = "No such file or directory: '%s'" % self.get_abspath() - raise SCons.Errors.UserError(msg) - else: - self.__class__ = File - self._morph() - self.clear() - return self - - def rfile(self): - """We're a generic Entry, but the caller is actually looking for - a File at this point, so morph into one.""" - self.__class__ = File - self._morph() - self.clear() - return File.rfile(self) - - def scanner_key(self): - return self.get_suffix() - - def get_contents(self): - """Fetch the contents of the entry. Returns the exact binary - contents of the file.""" - return SCons.Node._get_contents_map[self._func_get_contents](self) - - def get_text_contents(self): - """Fetch the decoded text contents of a Unicode encoded Entry. - - Since this should return the text contents from the file - system, we check to see into what sort of subclass we should - morph this Entry.""" - try: - self = self.disambiguate(must_exist=1) - except SCons.Errors.UserError: - # There was nothing on disk with which to disambiguate - # this entry. Leave it as an Entry, but return a null - # string so calls to get_text_contents() in emitters and - # the like (e.g. in qt.py) don't have to disambiguate by - # hand or catch the exception. - return '' - else: - return self.get_text_contents() - - def must_be_same(self, klass): - """Called to make sure a Node is a Dir. Since we're an - Entry, we can morph into one.""" - if self.__class__ is not klass: - self.__class__ = klass - self._morph() - self.clear() - - # The following methods can get called before the Taskmaster has - # had a chance to call disambiguate() directly to see if this Entry - # should really be a Dir or a File. We therefore use these to call - # disambiguate() transparently (from our caller's point of view). - # - # Right now, this minimal set of methods has been derived by just - # looking at some of the methods that will obviously be called early - # in any of the various Taskmasters' calling sequences, and then - # empirically figuring out which additional methods are necessary - # to make various tests pass. - - def exists(self): - return SCons.Node._exists_map[self._func_exists](self) - - def rel_path(self, other): - d = self.disambiguate() - if d.__class__ is Entry: - raise Exception("rel_path() could not disambiguate File/Dir") - return d.rel_path(other) - - def new_ninfo(self): - return self.disambiguate().new_ninfo() - - def _glob1(self, pattern, ondisk=True, source=False, strings=False): - return self.disambiguate()._glob1(pattern, ondisk, source, strings) - - def get_subst_proxy(self): - return self.disambiguate().get_subst_proxy() - -# This is for later so we can differentiate between Entry the class and Entry -# the method of the FS class. -_classEntry = Entry - - -class LocalFS(object): - """ - This class implements an abstraction layer for operations involving - a local file system. Essentially, this wraps any function in - the os, os.path or shutil modules that we use to actually go do - anything with or to the local file system. - - Note that there's a very good chance we'll refactor this part of - the architecture in some way as we really implement the interface(s) - for remote file system Nodes. For example, the right architecture - might be to have this be a subclass instead of a base class. - Nevertheless, we're using this as a first step in that direction. - - We're not using chdir() yet because the calling subclass method - needs to use os.chdir() directly to avoid recursion. Will we - really need this one? - """ - #def chdir(self, path): - # return os.chdir(path) - def chmod(self, path, mode): - return os.chmod(path, mode) - def copy(self, src, dst): - return shutil.copy(src, dst) - def copy2(self, src, dst): - return shutil.copy2(src, dst) - def exists(self, path): - return os.path.exists(path) - def getmtime(self, path): - return os.path.getmtime(path) - def getsize(self, path): - return os.path.getsize(path) - def isdir(self, path): - return os.path.isdir(path) - def isfile(self, path): - return os.path.isfile(path) - def link(self, src, dst): - return os.link(src, dst) - def lstat(self, path): - return os.lstat(path) - def listdir(self, path): - return os.listdir(path) - def makedirs(self, path): - return os.makedirs(path) - def mkdir(self, path): - return os.mkdir(path) - def rename(self, old, new): - return os.rename(old, new) - def stat(self, path): - return os.stat(path) - def symlink(self, src, dst): - return os.symlink(src, dst) - def open(self, path): - return open(path) - def unlink(self, path): - return os.unlink(path) - - if hasattr(os, 'symlink'): - def islink(self, path): - return os.path.islink(path) - else: - def islink(self, path): - return 0 # no symlinks - - if hasattr(os, 'readlink'): - def readlink(self, file): - return os.readlink(file) - else: - def readlink(self, file): - return '' - - -class FS(LocalFS): - - def __init__(self, path = None): - """Initialize the Node.FS subsystem. - - The supplied path is the top of the source tree, where we - expect to find the top-level build file. If no path is - supplied, the current directory is the default. - - The path argument must be a valid absolute path. - """ - if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS') - - self._memo = {} - - self.Root = {} - self.SConstruct_dir = None - self.max_drift = default_max_drift - - self.Top = None - if path is None: - self.pathTop = os.getcwd() - else: - self.pathTop = path - self.defaultDrive = _my_normcase(_my_splitdrive(self.pathTop)[0]) - - self.Top = self.Dir(self.pathTop) - self.Top._path = '.' - self.Top._tpath = '.' - self._cwd = self.Top - - DirNodeInfo.fs = self - FileNodeInfo.fs = self - - def set_SConstruct_dir(self, dir): - self.SConstruct_dir = dir - - def get_max_drift(self): - return self.max_drift - - def set_max_drift(self, max_drift): - self.max_drift = max_drift - - def getcwd(self): - if hasattr(self, "_cwd"): - return self._cwd - else: - return "" - - def chdir(self, dir, change_os_dir=0): - """Change the current working directory for lookups. - If change_os_dir is true, we will also change the "real" cwd - to match. - """ - curr=self._cwd - try: - if dir is not None: - self._cwd = dir - if change_os_dir: - os.chdir(dir.get_abspath()) - except OSError: - self._cwd = curr - raise - - def get_root(self, drive): - """ - Returns the root directory for the specified drive, creating - it if necessary. - """ - drive = _my_normcase(drive) - try: - return self.Root[drive] - except KeyError: - root = RootDir(drive, self) - self.Root[drive] = root - if not drive: - self.Root[self.defaultDrive] = root - elif drive == self.defaultDrive: - self.Root[''] = root - return root - - def _lookup(self, p, directory, fsclass, create=1): - """ - The generic entry point for Node lookup with user-supplied data. - - This translates arbitrary input into a canonical Node.FS object - of the specified fsclass. The general approach for strings is - to turn it into a fully normalized absolute path and then call - the root directory's lookup_abs() method for the heavy lifting. - - If the path name begins with '#', it is unconditionally - interpreted relative to the top-level directory of this FS. '#' - is treated as a synonym for the top-level SConstruct directory, - much like '~' is treated as a synonym for the user's home - directory in a UNIX shell. So both '#foo' and '#/foo' refer - to the 'foo' subdirectory underneath the top-level SConstruct - directory. - - If the path name is relative, then the path is looked up relative - to the specified directory, or the current directory (self._cwd, - typically the SConscript directory) if the specified directory - is None. - """ - if isinstance(p, Base): - # It's already a Node.FS object. Make sure it's the right - # class and return. - p.must_be_same(fsclass) - return p - # str(p) in case it's something like a proxy object - p = str(p) - - if not os_sep_is_slash: - p = p.replace(OS_SEP, '/') - - if p[0:1] == '#': - # There was an initial '#', so we strip it and override - # whatever directory they may have specified with the - # top-level SConstruct directory. - p = p[1:] - directory = self.Top - - # There might be a drive letter following the - # '#'. Although it is not described in the SCons man page, - # the regression test suite explicitly tests for that - # syntax. It seems to mean the following thing: - # - # Assuming the the SCons top dir is in C:/xxx/yyy, - # '#X:/toto' means X:/xxx/yyy/toto. - # - # i.e. it assumes that the X: drive has a directory - # structure similar to the one found on drive C:. - if do_splitdrive: - drive, p = _my_splitdrive(p) - if drive: - root = self.get_root(drive) - else: - root = directory.root - else: - root = directory.root - - # We can only strip trailing after splitting the drive - # since the drive might the UNC '//' prefix. - p = p.strip('/') - - needs_normpath = needs_normpath_match(p) - - # The path is relative to the top-level SCons directory. - if p in ('', '.'): - p = directory.get_labspath() - else: - p = directory.get_labspath() + '/' + p - else: - if do_splitdrive: - drive, p = _my_splitdrive(p) - if drive and not p: - # This causes a naked drive letter to be treated - # as a synonym for the root directory on that - # drive. - p = '/' - else: - drive = '' - - # We can only strip trailing '/' since the drive might the - # UNC '//' prefix. - if p != '/': - p = p.rstrip('/') - - needs_normpath = needs_normpath_match(p) - - if p[0:1] == '/': - # Absolute path - root = self.get_root(drive) - else: - # This is a relative lookup or to the current directory - # (the path name is not absolute). Add the string to the - # appropriate directory lookup path, after which the whole - # thing gets normalized. - if directory: - if not isinstance(directory, Dir): - directory = self.Dir(directory) - else: - directory = self._cwd - - if p in ('', '.'): - p = directory.get_labspath() - else: - p = directory.get_labspath() + '/' + p - - if drive: - root = self.get_root(drive) - else: - root = directory.root - - if needs_normpath is not None: - # Normalize a pathname. Will return the same result for - # equivalent paths. - # - # We take advantage of the fact that we have an absolute - # path here for sure. In addition, we know that the - # components of lookup path are separated by slashes at - # this point. Because of this, this code is about 2X - # faster than calling os.path.normpath() followed by - # replacing os.sep with '/' again. - ins = p.split('/')[1:] - outs = [] - for d in ins: - if d == '..': - try: - outs.pop() - except IndexError: - pass - elif d not in ('', '.'): - outs.append(d) - p = '/' + '/'.join(outs) - - return root._lookup_abs(p, fsclass, create) - - def Entry(self, name, directory = None, create = 1): - """Look up or create a generic Entry node with the specified name. - If the name is a relative path (begins with ./, ../, or a file - name), then it is looked up relative to the supplied directory - node, or to the top level directory of the FS (supplied at - construction time) if no directory is supplied. - """ - return self._lookup(name, directory, Entry, create) - - def File(self, name, directory = None, create = 1): - """Look up or create a File node with the specified name. If - the name is a relative path (begins with ./, ../, or a file name), - then it is looked up relative to the supplied directory node, - or to the top level directory of the FS (supplied at construction - time) if no directory is supplied. - - This method will raise TypeError if a directory is found at the - specified path. - """ - return self._lookup(name, directory, File, create) - - def Dir(self, name, directory = None, create = True): - """Look up or create a Dir node with the specified name. If - the name is a relative path (begins with ./, ../, or a file name), - then it is looked up relative to the supplied directory node, - or to the top level directory of the FS (supplied at construction - time) if no directory is supplied. - - This method will raise TypeError if a normal file is found at the - specified path. - """ - return self._lookup(name, directory, Dir, create) - - def VariantDir(self, variant_dir, src_dir, duplicate=1): - """Link the supplied variant directory to the source directory - for purposes of building files.""" - - if not isinstance(src_dir, SCons.Node.Node): - src_dir = self.Dir(src_dir) - if not isinstance(variant_dir, SCons.Node.Node): - variant_dir = self.Dir(variant_dir) - if src_dir.is_under(variant_dir): - raise SCons.Errors.UserError("Source directory cannot be under variant directory.") - if variant_dir.srcdir: - if variant_dir.srcdir == src_dir: - return # We already did this. - raise SCons.Errors.UserError("'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)) - variant_dir.link(src_dir, duplicate) - - def Repository(self, *dirs): - """Specify Repository directories to search.""" - for d in dirs: - if not isinstance(d, SCons.Node.Node): - d = self.Dir(d) - self.Top.addRepository(d) - - def PyPackageDir(self, modulename): - r"""Locate the directory of a given python module name - - For example scons might resolve to - Windows: C:\Python27\Lib\site-packages\scons-2.5.1 - Linux: /usr/lib/scons - - This can be useful when we want to determine a toolpath based on a python module name""" - - dirpath = '' - if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and sys.version_info[1] in (0,1,2,3,4)): - # Python2 Code - import imp - splitname = modulename.split('.') - srchpths = sys.path - for item in splitname: - file, path, desc = imp.find_module(item, srchpths) - if file is not None: - path = os.path.dirname(path) - srchpths = [path] - dirpath = path - else: - # Python3 Code - import importlib.util - modspec = importlib.util.find_spec(modulename) - dirpath = os.path.dirname(modspec.origin) - return self._lookup(dirpath, None, Dir, True) - - - def variant_dir_target_climb(self, orig, dir, tail): - """Create targets in corresponding variant directories - - Climb the directory tree, and look up path names - relative to any linked variant directories we find. - - Even though this loops and walks up the tree, we don't memoize - the return value because this is really only used to process - the command-line targets. - """ - targets = [] - message = None - fmt = "building associated VariantDir targets: %s" - start_dir = dir - while dir: - for bd in dir.variant_dirs: - if start_dir.is_under(bd): - # If already in the build-dir location, don't reflect - return [orig], fmt % str(orig) - p = os.path.join(bd._path, *tail) - targets.append(self.Entry(p)) - tail = [dir.name] + tail - dir = dir.up() - if targets: - message = fmt % ' '.join(map(str, targets)) - return targets, message - - def Glob(self, pathname, ondisk=True, source=True, strings=False, exclude=None, cwd=None): - """ - Globs - - This is mainly a shim layer - """ - if cwd is None: - cwd = self.getcwd() - return cwd.glob(pathname, ondisk, source, strings, exclude) - -class DirNodeInfo(SCons.Node.NodeInfoBase): - __slots__ = () - # This should get reset by the FS initialization. - current_version_id = 2 - - fs = None - - def str_to_node(self, s): - top = self.fs.Top - root = top.root - if do_splitdrive: - drive, s = _my_splitdrive(s) - if drive: - root = self.fs.get_root(drive) - if not os.path.isabs(s): - s = top.get_labspath() + '/' + s - return root._lookup_abs(s, Entry) - -class DirBuildInfo(SCons.Node.BuildInfoBase): - __slots__ = () - current_version_id = 2 - -glob_magic_check = re.compile('[*?[]') - -def has_glob_magic(s): - return glob_magic_check.search(s) is not None - -class Dir(Base): - """A class for directories in a file system. - """ - - __slots__ = ['scanner_paths', - 'cachedir_csig', - 'cachesig', - 'repositories', - 'srcdir', - 'entries', - 'searched', - '_sconsign', - 'variant_dirs', - 'root', - 'dirname', - 'on_disk_entries', - 'released_target_info', - 'contentsig'] - - NodeInfo = DirNodeInfo - BuildInfo = DirBuildInfo - - def __init__(self, name, directory, fs): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.Dir') - Base.__init__(self, name, directory, fs) - self._morph() - - def _morph(self): - """Turn a file system Node (either a freshly initialized directory - object or a separate Entry object) into a proper directory object. - - Set up this directory's entries and hook it into the file - system tree. Specify that directories (this Node) don't use - signatures for calculating whether they're current. - """ - - self.repositories = [] - self.srcdir = None - - self.entries = {} - self.entries['.'] = self - self.entries['..'] = self.dir - self.cwd = self - self.searched = 0 - self._sconsign = None - self.variant_dirs = [] - self.root = self.dir.root - self.changed_since_last_build = 3 - self._func_sconsign = 1 - self._func_exists = 2 - self._func_get_contents = 2 - - self._abspath = SCons.Util.silent_intern(self.dir.entry_abspath(self.name)) - self._labspath = SCons.Util.silent_intern(self.dir.entry_labspath(self.name)) - if self.dir._path == '.': - self._path = SCons.Util.silent_intern(self.name) - else: - self._path = SCons.Util.silent_intern(self.dir.entry_path(self.name)) - if self.dir._tpath == '.': - self._tpath = SCons.Util.silent_intern(self.name) - else: - self._tpath = SCons.Util.silent_intern(self.dir.entry_tpath(self.name)) - self._path_elements = self.dir._path_elements + [self] - - # For directories, we make a difference between the directory - # 'name' and the directory 'dirname'. The 'name' attribute is - # used when we need to print the 'name' of the directory or - # when we it is used as the last part of a path. The 'dirname' - # is used when the directory is not the last element of the - # path. The main reason for making that distinction is that - # for RoorDir's the dirname can not be easily inferred from - # the name. For example, we have to add a '/' after a drive - # letter but not after a UNC path prefix ('//'). - self.dirname = self.name + OS_SEP - - # Don't just reset the executor, replace its action list, - # because it might have some pre-or post-actions that need to - # be preserved. - # - # But don't reset the executor if there is a non-null executor - # attached already. The existing executor might have other - # targets, in which case replacing the action list with a - # Mkdir action is a big mistake. - if not hasattr(self, 'executor'): - self.builder = get_MkdirBuilder() - self.get_executor().set_action_list(self.builder.action) - else: - # Prepend MkdirBuilder action to existing action list - l = self.get_executor().action_list - a = get_MkdirBuilder().action - l.insert(0, a) - self.get_executor().set_action_list(l) - - def diskcheck_match(self): - diskcheck_match(self, self.isfile, - "File %s found where directory expected.") - - def __clearRepositoryCache(self, duplicate=None): - """Called when we change the repository(ies) for a directory. - This clears any cached information that is invalidated by changing - the repository.""" - - for node in list(self.entries.values()): - if node != self.dir: - if node != self and isinstance(node, Dir): - node.__clearRepositoryCache(duplicate) - else: - node.clear() - try: - del node._srcreps - except AttributeError: - pass - if duplicate is not None: - node.duplicate=duplicate - - def __resetDuplicate(self, node): - if node != self: - node.duplicate = node.get_dir().duplicate - - def Entry(self, name): - """ - Looks up or creates an entry node named 'name' relative to - this directory. - """ - return self.fs.Entry(name, self) - - def Dir(self, name, create=True): - """ - Looks up or creates a directory node named 'name' relative to - this directory. - """ - return self.fs.Dir(name, self, create) - - def File(self, name): - """ - Looks up or creates a file node named 'name' relative to - this directory. - """ - return self.fs.File(name, self) - - def link(self, srcdir, duplicate): - """Set this directory as the variant directory for the - supplied source directory.""" - self.srcdir = srcdir - self.duplicate = duplicate - self.__clearRepositoryCache(duplicate) - srcdir.variant_dirs.append(self) - - def getRepositories(self): - """Returns a list of repositories for this directory. - """ - if self.srcdir and not self.duplicate: - return self.srcdir.get_all_rdirs() + self.repositories - return self.repositories - - @SCons.Memoize.CountMethodCall - def get_all_rdirs(self): - try: - return list(self._memo['get_all_rdirs']) - except KeyError: - pass - - result = [self] - fname = '.' - dir = self - while dir: - for rep in dir.getRepositories(): - result.append(rep.Dir(fname)) - if fname == '.': - fname = dir.name - else: - fname = dir.name + OS_SEP + fname - dir = dir.up() - - self._memo['get_all_rdirs'] = list(result) - - return result - - def addRepository(self, dir): - if dir != self and dir not in self.repositories: - self.repositories.append(dir) - dir._tpath = '.' - self.__clearRepositoryCache() - - def up(self): - return self.dir - - def _rel_path_key(self, other): - return str(other) - - @SCons.Memoize.CountDictCall(_rel_path_key) - def rel_path(self, other): - """Return a path to "other" relative to this directory. - """ - - # This complicated and expensive method, which constructs relative - # paths between arbitrary Node.FS objects, is no longer used - # by SCons itself. It was introduced to store dependency paths - # in .sconsign files relative to the target, but that ended up - # being significantly inefficient. - # - # We're continuing to support the method because some SConstruct - # files out there started using it when it was available, and - # we're all about backwards compatibility.. - - try: - memo_dict = self._memo['rel_path'] - except KeyError: - memo_dict = {} - self._memo['rel_path'] = memo_dict - else: - try: - return memo_dict[other] - except KeyError: - pass - - if self is other: - result = '.' - - elif other not in self._path_elements: - try: - other_dir = other.get_dir() - except AttributeError: - result = str(other) - else: - if other_dir is None: - result = other.name - else: - dir_rel_path = self.rel_path(other_dir) - if dir_rel_path == '.': - result = other.name - else: - result = dir_rel_path + OS_SEP + other.name - else: - i = self._path_elements.index(other) + 1 - - path_elems = ['..'] * (len(self._path_elements) - i) \ - + [n.name for n in other._path_elements[i:]] - - result = OS_SEP.join(path_elems) - - memo_dict[other] = result - - return result - - def get_env_scanner(self, env, kw={}): - import SCons.Defaults - return SCons.Defaults.DirEntryScanner - - def get_target_scanner(self): - import SCons.Defaults - return SCons.Defaults.DirEntryScanner - - def get_found_includes(self, env, scanner, path): - """Return this directory's implicit dependencies. - - We don't bother caching the results because the scan typically - shouldn't be requested more than once (as opposed to scanning - .h file contents, which can be requested as many times as the - files is #included by other files). - """ - if not scanner: - return [] - # Clear cached info for this Dir. If we already visited this - # directory on our walk down the tree (because we didn't know at - # that point it was being used as the source for another Node) - # then we may have calculated build signature before realizing - # we had to scan the disk. Now that we have to, though, we need - # to invalidate the old calculated signature so that any node - # dependent on our directory structure gets one that includes - # info about everything on disk. - self.clear() - return scanner(self, env, path) - - # - # Taskmaster interface subsystem - # - - def prepare(self): - pass - - def build(self, **kw): - """A null "builder" for directories.""" - global MkdirBuilder - if self.builder is not MkdirBuilder: - SCons.Node.Node.build(self, **kw) - - # - # - # - - def _create(self): - """Create this directory, silently and without worrying about - whether the builder is the default or not.""" - listDirs = [] - parent = self - while parent: - if parent.exists(): - break - listDirs.append(parent) - p = parent.up() - if p is None: - # Don't use while: - else: for this condition because - # if so, then parent is None and has no .path attribute. - raise SCons.Errors.StopError(parent._path) - parent = p - listDirs.reverse() - for dirnode in listDirs: - try: - # Don't call dirnode.build(), call the base Node method - # directly because we definitely *must* create this - # directory. The dirnode.build() method will suppress - # the build if it's the default builder. - SCons.Node.Node.build(dirnode) - dirnode.get_executor().nullify() - # The build() action may or may not have actually - # created the directory, depending on whether the -n - # option was used or not. Delete the _exists and - # _rexists attributes so they can be reevaluated. - dirnode.clear() - except OSError: - pass - - def multiple_side_effect_has_builder(self): - global MkdirBuilder - return self.builder is not MkdirBuilder and self.has_builder() - - def alter_targets(self): - """Return any corresponding targets in a variant directory. - """ - return self.fs.variant_dir_target_climb(self, self, []) - - def scanner_key(self): - """A directory does not get scanned.""" - return None - - def get_text_contents(self): - """We already emit things in text, so just return the binary - version.""" - return self.get_contents() - - def get_contents(self): - """Return content signatures and names of all our children - separated by new-lines. Ensure that the nodes are sorted.""" - return SCons.Node._get_contents_map[self._func_get_contents](self) - - def get_csig(self): - """Compute the content signature for Directory nodes. In - general, this is not needed and the content signature is not - stored in the DirNodeInfo. However, if get_contents on a Dir - node is called which has a child directory, the child - directory should return the hash of its contents.""" - contents = self.get_contents() - return SCons.Util.MD5signature(contents) - - def do_duplicate(self, src): - pass - - def is_up_to_date(self): - """If any child is not up-to-date, then this directory isn't, - either.""" - if self.builder is not MkdirBuilder and not self.exists(): - return 0 - up_to_date = SCons.Node.up_to_date - for kid in self.children(): - if kid.get_state() > up_to_date: - return 0 - return 1 - - def rdir(self): - if not self.exists(): - norm_name = _my_normcase(self.name) - for dir in self.dir.get_all_rdirs(): - try: node = dir.entries[norm_name] - except KeyError: node = dir.dir_on_disk(self.name) - if node and node.exists() and \ - (isinstance(dir, Dir) or isinstance(dir, Entry)): - return node - return self - - def sconsign(self): - """Return the .sconsign file info for this directory. """ - return _sconsign_map[self._func_sconsign](self) - - def srcnode(self): - """Dir has a special need for srcnode()...if we - have a srcdir attribute set, then that *is* our srcnode.""" - if self.srcdir: - return self.srcdir - return Base.srcnode(self) - - def get_timestamp(self): - """Return the latest timestamp from among our children""" - stamp = 0 - for kid in self.children(): - if kid.get_timestamp() > stamp: - stamp = kid.get_timestamp() - return stamp - - def get_abspath(self): - """Get the absolute path of the file.""" - return self._abspath - - def get_labspath(self): - """Get the absolute path of the file.""" - return self._labspath - - def get_internal_path(self): - return self._path - - def get_tpath(self): - return self._tpath - - def get_path_elements(self): - return self._path_elements - - def entry_abspath(self, name): - return self._abspath + OS_SEP + name - - def entry_labspath(self, name): - return self._labspath + '/' + name - - def entry_path(self, name): - return self._path + OS_SEP + name - - def entry_tpath(self, name): - return self._tpath + OS_SEP + name - - def entry_exists_on_disk(self, name): - """ Searches through the file/dir entries of the current - directory, and returns True if a physical entry with the given - name could be found. - - @see rentry_exists_on_disk - """ - try: - d = self.on_disk_entries - except AttributeError: - d = {} - try: - entries = os.listdir(self._abspath) - except OSError: - pass - else: - for entry in map(_my_normcase, entries): - d[entry] = True - self.on_disk_entries = d - if sys.platform == 'win32' or sys.platform == 'cygwin': - name = _my_normcase(name) - result = d.get(name) - if result is None: - # Belt-and-suspenders for Windows: check directly for - # 8.3 file names that don't show up in os.listdir(). - result = os.path.exists(self._abspath + OS_SEP + name) - d[name] = result - return result - else: - return name in d - - def rentry_exists_on_disk(self, name): - """ Searches through the file/dir entries of the current - *and* all its remote directories (repos), and returns - True if a physical entry with the given name could be found. - The local directory (self) gets searched first, so - repositories take a lower precedence regarding the - searching order. - - @see entry_exists_on_disk - """ - - rentry_exists = self.entry_exists_on_disk(name) - if not rentry_exists: - # Search through the repository folders - norm_name = _my_normcase(name) - for rdir in self.get_all_rdirs(): - try: - node = rdir.entries[norm_name] - if node: - rentry_exists = True - break - except KeyError: - if rdir.entry_exists_on_disk(name): - rentry_exists = True - break - return rentry_exists - - @SCons.Memoize.CountMethodCall - def srcdir_list(self): - try: - return self._memo['srcdir_list'] - except KeyError: - pass - - result = [] - - dirname = '.' - dir = self - while dir: - if dir.srcdir: - result.append(dir.srcdir.Dir(dirname)) - dirname = dir.name + OS_SEP + dirname - dir = dir.up() - - self._memo['srcdir_list'] = result - - return result - - def srcdir_duplicate(self, name): - for dir in self.srcdir_list(): - if self.is_under(dir): - # We shouldn't source from something in the build path; - # variant_dir is probably under src_dir, in which case - # we are reflecting. - break - if dir.entry_exists_on_disk(name): - srcnode = dir.Entry(name).disambiguate() - if self.duplicate: - node = self.Entry(name).disambiguate() - node.do_duplicate(srcnode) - return node - else: - return srcnode - return None - - def _srcdir_find_file_key(self, filename): - return filename - - @SCons.Memoize.CountDictCall(_srcdir_find_file_key) - def srcdir_find_file(self, filename): - try: - memo_dict = self._memo['srcdir_find_file'] - except KeyError: - memo_dict = {} - self._memo['srcdir_find_file'] = memo_dict - else: - try: - return memo_dict[filename] - except KeyError: - pass - - def func(node): - if (isinstance(node, File) or isinstance(node, Entry)) and \ - (node.is_derived() or node.exists()): - return node - return None - - norm_name = _my_normcase(filename) - - for rdir in self.get_all_rdirs(): - try: node = rdir.entries[norm_name] - except KeyError: node = rdir.file_on_disk(filename) - else: node = func(node) - if node: - result = (node, self) - memo_dict[filename] = result - return result - - for srcdir in self.srcdir_list(): - for rdir in srcdir.get_all_rdirs(): - try: node = rdir.entries[norm_name] - except KeyError: node = rdir.file_on_disk(filename) - else: node = func(node) - if node: - result = (File(filename, self, self.fs), srcdir) - memo_dict[filename] = result - return result - - result = (None, None) - memo_dict[filename] = result - return result - - def dir_on_disk(self, name): - if self.entry_exists_on_disk(name): - try: return self.Dir(name) - except TypeError: pass - node = self.srcdir_duplicate(name) - if isinstance(node, File): - return None - return node - - def file_on_disk(self, name): - if self.entry_exists_on_disk(name): - try: return self.File(name) - except TypeError: pass - node = self.srcdir_duplicate(name) - if isinstance(node, Dir): - return None - return node - - def walk(self, func, arg): - """ - Walk this directory tree by calling the specified function - for each directory in the tree. - - This behaves like the os.path.walk() function, but for in-memory - Node.FS.Dir objects. The function takes the same arguments as - the functions passed to os.path.walk(): - - func(arg, dirname, fnames) - - Except that "dirname" will actually be the directory *Node*, - not the string. The '.' and '..' entries are excluded from - fnames. The fnames list may be modified in-place to filter the - subdirectories visited or otherwise impose a specific order. - The "arg" argument is always passed to func() and may be used - in any way (or ignored, passing None is common). - """ - entries = self.entries - names = list(entries.keys()) - names.remove('.') - names.remove('..') - func(arg, self, names) - for dirname in [n for n in names if isinstance(entries[n], Dir)]: - entries[dirname].walk(func, arg) - - def glob(self, pathname, ondisk=True, source=False, strings=False, exclude=None): - """ - Returns a list of Nodes (or strings) matching a specified - pathname pattern. - - Pathname patterns follow UNIX shell semantics: * matches - any-length strings of any characters, ? matches any character, - and [] can enclose lists or ranges of characters. Matches do - not span directory separators. - - The matches take into account Repositories, returning local - Nodes if a corresponding entry exists in a Repository (either - an in-memory Node or something on disk). - - By defafult, the glob() function matches entries that exist - on-disk, in addition to in-memory Nodes. Setting the "ondisk" - argument to False (or some other non-true value) causes the glob() - function to only match in-memory Nodes. The default behavior is - to return both the on-disk and in-memory Nodes. - - The "source" argument, when true, specifies that corresponding - source Nodes must be returned if you're globbing in a build - directory (initialized with VariantDir()). The default behavior - is to return Nodes local to the VariantDir(). - - The "strings" argument, when true, returns the matches as strings, - not Nodes. The strings are path names relative to this directory. - - The "exclude" argument, if not None, must be a pattern or a list - of patterns following the same UNIX shell semantics. - Elements matching a least one pattern of this list will be excluded - from the result. - - The underlying algorithm is adapted from the glob.glob() function - in the Python library (but heavily modified), and uses fnmatch() - under the covers. - """ - dirname, basename = os.path.split(pathname) - if not dirname: - result = self._glob1(basename, ondisk, source, strings) - else: - if has_glob_magic(dirname): - list = self.glob(dirname, ondisk, source, False, exclude) - else: - list = [self.Dir(dirname, create=True)] - result = [] - for dir in list: - r = dir._glob1(basename, ondisk, source, strings) - if strings: - r = [os.path.join(str(dir), x) for x in r] - result.extend(r) - if exclude: - excludes = [] - excludeList = SCons.Util.flatten(exclude) - for x in excludeList: - r = self.glob(x, ondisk, source, strings) - excludes.extend(r) - result = [x for x in result if not any(fnmatch.fnmatch(str(x), str(e)) for e in SCons.Util.flatten(excludes))] - return sorted(result, key=lambda a: str(a)) - - def _glob1(self, pattern, ondisk=True, source=False, strings=False): - """ - Globs for and returns a list of entry names matching a single - pattern in this directory. - - This searches any repositories and source directories for - corresponding entries and returns a Node (or string) relative - to the current directory if an entry is found anywhere. - - TODO: handle pattern with no wildcard - """ - search_dir_list = self.get_all_rdirs() - for srcdir in self.srcdir_list(): - search_dir_list.extend(srcdir.get_all_rdirs()) - - selfEntry = self.Entry - names = [] - for dir in search_dir_list: - # We use the .name attribute from the Node because the keys of - # the dir.entries dictionary are normalized (that is, all upper - # case) on case-insensitive systems like Windows. - node_names = [ v.name for k, v in dir.entries.items() - if k not in ('.', '..') ] - names.extend(node_names) - if not strings: - # Make sure the working directory (self) actually has - # entries for all Nodes in repositories or variant dirs. - for name in node_names: selfEntry(name) - if ondisk: - try: - disk_names = os.listdir(dir._abspath) - except os.error: - continue - names.extend(disk_names) - if not strings: - # We're going to return corresponding Nodes in - # the local directory, so we need to make sure - # those Nodes exist. We only want to create - # Nodes for the entries that will match the - # specified pattern, though, which means we - # need to filter the list here, even though - # the overall list will also be filtered later, - # after we exit this loop. - if pattern[0] != '.': - disk_names = [x for x in disk_names if x[0] != '.'] - disk_names = fnmatch.filter(disk_names, pattern) - dirEntry = dir.Entry - for name in disk_names: - # Add './' before disk filename so that '#' at - # beginning of filename isn't interpreted. - name = './' + name - node = dirEntry(name).disambiguate() - n = selfEntry(name) - if n.__class__ != node.__class__: - n.__class__ = node.__class__ - n._morph() - - names = set(names) - if pattern[0] != '.': - names = [x for x in names if x[0] != '.'] - names = fnmatch.filter(names, pattern) - - if strings: - return names - - return [self.entries[_my_normcase(n)] for n in names] - -class RootDir(Dir): - """A class for the root directory of a file system. - - This is the same as a Dir class, except that the path separator - ('/' or '\\') is actually part of the name, so we don't need to - add a separator when creating the path names of entries within - this directory. - """ - - __slots__ = ('_lookupDict', ) - - def __init__(self, drive, fs): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.RootDir') - SCons.Node.Node.__init__(self) - - # Handle all the types of drives: - if drive == '': - # No drive, regular UNIX root or Windows default drive. - name = OS_SEP - dirname = OS_SEP - elif drive == '//': - # UNC path - name = UNC_PREFIX - dirname = UNC_PREFIX - else: - # Windows drive letter - name = drive - dirname = drive + OS_SEP - - # Filename with extension as it was specified when the object was - # created; to obtain filesystem path, use Python str() function - self.name = SCons.Util.silent_intern(name) - self.fs = fs #: Reference to parent Node.FS object - - self._path_elements = [self] - self.dir = self - self._func_rexists = 2 - self._func_target_from_source = 1 - self.store_info = 1 - - # Now set our paths to what we really want them to be. The - # name should already contain any necessary separators, such - # as the initial drive letter (the name) plus the directory - # separator, except for the "lookup abspath," which does not - # have the drive letter. - self._abspath = dirname - self._labspath = '' - self._path = dirname - self._tpath = dirname - self.dirname = dirname - - self._morph() - - self.duplicate = 0 - self._lookupDict = {} - - self._lookupDict[''] = self - self._lookupDict['/'] = self - self.root = self - # The // entry is necessary because os.path.normpath() - # preserves double slashes at the beginning of a path on Posix - # platforms. - if not has_unc: - self._lookupDict['//'] = self - - def _morph(self): - """Turn a file system Node (either a freshly initialized directory - object or a separate Entry object) into a proper directory object. - - Set up this directory's entries and hook it into the file - system tree. Specify that directories (this Node) don't use - signatures for calculating whether they're current. - """ - - self.repositories = [] - self.srcdir = None - - self.entries = {} - self.entries['.'] = self - self.entries['..'] = self.dir - self.cwd = self - self.searched = 0 - self._sconsign = None - self.variant_dirs = [] - self.changed_since_last_build = 3 - self._func_sconsign = 1 - self._func_exists = 2 - self._func_get_contents = 2 - - # Don't just reset the executor, replace its action list, - # because it might have some pre-or post-actions that need to - # be preserved. - # - # But don't reset the executor if there is a non-null executor - # attached already. The existing executor might have other - # targets, in which case replacing the action list with a - # Mkdir action is a big mistake. - if not hasattr(self, 'executor'): - self.builder = get_MkdirBuilder() - self.get_executor().set_action_list(self.builder.action) - else: - # Prepend MkdirBuilder action to existing action list - l = self.get_executor().action_list - a = get_MkdirBuilder().action - l.insert(0, a) - self.get_executor().set_action_list(l) - - - def must_be_same(self, klass): - if klass is Dir: - return - Base.must_be_same(self, klass) - - def _lookup_abs(self, p, klass, create=1): - """ - Fast (?) lookup of a *normalized* absolute path. - - This method is intended for use by internal lookups with - already-normalized path data. For general-purpose lookups, - use the FS.Entry(), FS.Dir() or FS.File() methods. - - The caller is responsible for making sure we're passed a - normalized absolute path; we merely let Python's dictionary look - up and return the One True Node.FS object for the path. - - If a Node for the specified "p" doesn't already exist, and - "create" is specified, the Node may be created after recursive - invocation to find or create the parent directory or directories. - """ - k = _my_normcase(p) - try: - result = self._lookupDict[k] - except KeyError: - if not create: - msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self)) - raise SCons.Errors.UserError(msg) - # There is no Node for this path name, and we're allowed - # to create it. - dir_name, file_name = p.rsplit('/',1) - dir_node = self._lookup_abs(dir_name, Dir) - result = klass(file_name, dir_node, self.fs) - - # Double-check on disk (as configured) that the Node we - # created matches whatever is out there in the real world. - result.diskcheck_match() - - self._lookupDict[k] = result - dir_node.entries[_my_normcase(file_name)] = result - dir_node.implicit = None - else: - # There is already a Node for this path name. Allow it to - # complain if we were looking for an inappropriate type. - result.must_be_same(klass) - return result - - def __str__(self): - return self._abspath - - def entry_abspath(self, name): - return self._abspath + name - - def entry_labspath(self, name): - return '/' + name - - def entry_path(self, name): - return self._path + name - - def entry_tpath(self, name): - return self._tpath + name - - def is_under(self, dir): - if self is dir: - return 1 - else: - return 0 - - def up(self): - return None - - def get_dir(self): - return None - - def src_builder(self): - return _null - - -class FileNodeInfo(SCons.Node.NodeInfoBase): - __slots__ = ('csig', 'timestamp', 'size') - current_version_id = 2 - - field_list = ['csig', 'timestamp', 'size'] - - # This should get reset by the FS initialization. - fs = None - - def str_to_node(self, s): - top = self.fs.Top - root = top.root - if do_splitdrive: - drive, s = _my_splitdrive(s) - if drive: - root = self.fs.get_root(drive) - if not os.path.isabs(s): - s = top.get_labspath() + '/' + s - return root._lookup_abs(s, Entry) - - def __getstate__(self): - """ - Return all fields that shall be pickled. Walk the slots in the class - hierarchy and add those to the state dictionary. If a '__dict__' slot is - available, copy all entries to the dictionary. Also include the version - id, which is fixed for all instances of a class. - """ - state = getattr(self, '__dict__', {}).copy() - for obj in type(self).mro(): - for name in getattr(obj, '__slots__', ()): - if hasattr(self, name): - state[name] = getattr(self, name) - - state['_version_id'] = self.current_version_id - try: - del state['__weakref__'] - except KeyError: - pass - - return state - - def __setstate__(self, state): - """ - Restore the attributes from a pickled state. - """ - # TODO check or discard version - del state['_version_id'] - for key, value in state.items(): - if key not in ('__weakref__',): - setattr(self, key, value) - - def __eq__(self, other): - return self.csig == other.csig and self.timestamp == other.timestamp and self.size == other.size - - def __ne__(self, other): - return not self.__eq__(other) - - -class FileBuildInfo(SCons.Node.BuildInfoBase): - """ - This is info loaded from sconsign. - - Attributes unique to FileBuildInfo: - dependency_map : Caches file->csig mapping - for all dependencies. Currently this is only used when using - MD5-timestamp decider. - It's used to ensure that we copy the correct - csig from previous build to be written to .sconsign when current build - is done. Previously the matching of csig to file was strictly by order - they appeared in bdepends, bsources, or bimplicit, and so a change in order - or count of any of these could yield writing wrong csig, and then false positive - rebuilds - """ - __slots__ = ['dependency_map', ] - current_version_id = 2 - - def __setattr__(self, key, value): - - # If any attributes are changed in FileBuildInfo, we need to - # invalidate the cached map of file name to content signature - # heald in dependency_map. Currently only used with - # MD5-timestamp decider - if key != 'dependency_map' and hasattr(self, 'dependency_map'): - del self.dependency_map - - return super(FileBuildInfo, self).__setattr__(key, value) - - def convert_to_sconsign(self): - """ - Converts this FileBuildInfo object for writing to a .sconsign file - - This replaces each Node in our various dependency lists with its - usual string representation: relative to the top-level SConstruct - directory, or an absolute path if it's outside. - """ - if os_sep_is_slash: - node_to_str = str - else: - def node_to_str(n): - try: - s = n.get_internal_path() - except AttributeError: - s = str(n) - else: - s = s.replace(OS_SEP, '/') - return s - for attr in ['bsources', 'bdepends', 'bimplicit']: - try: - val = getattr(self, attr) - except AttributeError: - pass - else: - setattr(self, attr, list(map(node_to_str, val))) - - def convert_from_sconsign(self, dir, name): - """ - Converts a newly-read FileBuildInfo object for in-SCons use - - For normal up-to-date checking, we don't have any conversion to - perform--but we're leaving this method here to make that clear. - """ - pass - - def prepare_dependencies(self): - """ - Prepares a FileBuildInfo object for explaining what changed - - The bsources, bdepends and bimplicit lists have all been - stored on disk as paths relative to the top-level SConstruct - directory. Convert the strings to actual Nodes (for use by the - --debug=explain code and --implicit-cache). - """ - attrs = [ - ('bsources', 'bsourcesigs'), - ('bdepends', 'bdependsigs'), - ('bimplicit', 'bimplicitsigs'), - ] - for (nattr, sattr) in attrs: - try: - strings = getattr(self, nattr) - nodeinfos = getattr(self, sattr) - except AttributeError: - continue - if strings is None or nodeinfos is None: - continue - nodes = [] - for s, ni in zip(strings, nodeinfos): - if not isinstance(s, SCons.Node.Node): - s = ni.str_to_node(s) - nodes.append(s) - setattr(self, nattr, nodes) - - def format(self, names=0): - result = [] - bkids = self.bsources + self.bdepends + self.bimplicit - bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs - for bkid, bkidsig in zip(bkids, bkidsigs): - result.append(str(bkid) + ': ' + - ' '.join(bkidsig.format(names=names))) - if not hasattr(self,'bact'): - self.bact = "none" - result.append('%s [%s]' % (self.bactsig, self.bact)) - return '\n'.join(result) - - -class File(Base): - """A class for files in a file system. - """ - - __slots__ = ['scanner_paths', - 'cachedir_csig', - 'cachesig', - 'repositories', - 'srcdir', - 'entries', - 'searched', - '_sconsign', - 'variant_dirs', - 'root', - 'dirname', - 'on_disk_entries', - 'released_target_info', - 'contentsig'] - - NodeInfo = FileNodeInfo - BuildInfo = FileBuildInfo - - md5_chunksize = 64 - - def diskcheck_match(self): - diskcheck_match(self, self.isdir, - "Directory %s found where file expected.") - - def __init__(self, name, directory, fs): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.File') - Base.__init__(self, name, directory, fs) - self._morph() - - def Entry(self, name): - """Create an entry node named 'name' relative to - the directory of this file.""" - return self.dir.Entry(name) - - def Dir(self, name, create=True): - """Create a directory node named 'name' relative to - the directory of this file.""" - return self.dir.Dir(name, create=create) - - def Dirs(self, pathlist): - """Create a list of directories relative to the SConscript - directory of this file.""" - return [self.Dir(p) for p in pathlist] - - def File(self, name): - """Create a file node named 'name' relative to - the directory of this file.""" - return self.dir.File(name) - - def _morph(self): - """Turn a file system node into a File object.""" - self.scanner_paths = {} - if not hasattr(self, '_local'): - self._local = 0 - if not hasattr(self, 'released_target_info'): - self.released_target_info = False - - self.store_info = 1 - self._func_exists = 4 - self._func_get_contents = 3 - - # Initialize this Node's decider function to decide_source() because - # every file is a source file until it has a Builder attached... - self.changed_since_last_build = 4 - - # If there was already a Builder set on this entry, then - # we need to make sure we call the target-decider function, - # not the source-decider. Reaching in and doing this by hand - # is a little bogus. We'd prefer to handle this by adding - # an Entry.builder_set() method that disambiguates like the - # other methods, but that starts running into problems with the - # fragile way we initialize Dir Nodes with their Mkdir builders, - # yet still allow them to be overridden by the user. Since it's - # not clear right now how to fix that, stick with what works - # until it becomes clear... - if self.has_builder(): - self.changed_since_last_build = 5 - - def scanner_key(self): - return self.get_suffix() - - def get_contents(self): - return SCons.Node._get_contents_map[self._func_get_contents](self) - - def get_text_contents(self): - """ - This attempts to figure out what the encoding of the text is - based upon the BOM bytes, and then decodes the contents so that - it's a valid python string. - """ - contents = self.get_contents() - # The behavior of various decode() methods and functions - # w.r.t. the initial BOM bytes is different for different - # encodings and/or Python versions. ('utf-8' does not strip - # them, but has a 'utf-8-sig' which does; 'utf-16' seems to - # strip them; etc.) Just sidestep all the complication by - # explicitly stripping the BOM before we decode(). - if contents[:len(codecs.BOM_UTF8)] == codecs.BOM_UTF8: - return contents[len(codecs.BOM_UTF8):].decode('utf-8') - if contents[:len(codecs.BOM_UTF16_LE)] == codecs.BOM_UTF16_LE: - return contents[len(codecs.BOM_UTF16_LE):].decode('utf-16-le') - if contents[:len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: - return contents[len(codecs.BOM_UTF16_BE):].decode('utf-16-be') - try: - return contents.decode('utf-8') - except UnicodeDecodeError as e: - try: - return contents.decode('latin-1') - except UnicodeDecodeError as e: - return contents.decode('utf-8', error='backslashreplace') - - - def get_content_hash(self): - """ - Compute and return the MD5 hash for this file. - """ - if not self.rexists(): - return SCons.Util.MD5signature('') - fname = self.rfile().get_abspath() - try: - cs = SCons.Util.MD5filesignature(fname, - chunksize=SCons.Node.FS.File.md5_chunksize*1024) - except EnvironmentError as e: - if not e.filename: - e.filename = fname - raise - return cs - - @SCons.Memoize.CountMethodCall - def get_size(self): - try: - return self._memo['get_size'] - except KeyError: - pass - - if self.rexists(): - size = self.rfile().getsize() - else: - size = 0 - - self._memo['get_size'] = size - - return size - - @SCons.Memoize.CountMethodCall - def get_timestamp(self): - try: - return self._memo['get_timestamp'] - except KeyError: - pass - - if self.rexists(): - timestamp = self.rfile().getmtime() - else: - timestamp = 0 - - self._memo['get_timestamp'] = timestamp - - return timestamp - - convert_copy_attrs = [ - 'bsources', - 'bimplicit', - 'bdepends', - 'bact', - 'bactsig', - 'ninfo', - ] - - - convert_sig_attrs = [ - 'bsourcesigs', - 'bimplicitsigs', - 'bdependsigs', - ] - - def convert_old_entry(self, old_entry): - # Convert a .sconsign entry from before the Big Signature - # Refactoring, doing what we can to convert its information - # to the new .sconsign entry format. - # - # The old format looked essentially like this: - # - # BuildInfo - # .ninfo (NodeInfo) - # .bsig - # .csig - # .timestamp - # .size - # .bsources - # .bsourcesigs ("signature" list) - # .bdepends - # .bdependsigs ("signature" list) - # .bimplicit - # .bimplicitsigs ("signature" list) - # .bact - # .bactsig - # - # The new format looks like this: - # - # .ninfo (NodeInfo) - # .bsig - # .csig - # .timestamp - # .size - # .binfo (BuildInfo) - # .bsources - # .bsourcesigs (NodeInfo list) - # .bsig - # .csig - # .timestamp - # .size - # .bdepends - # .bdependsigs (NodeInfo list) - # .bsig - # .csig - # .timestamp - # .size - # .bimplicit - # .bimplicitsigs (NodeInfo list) - # .bsig - # .csig - # .timestamp - # .size - # .bact - # .bactsig - # - # The basic idea of the new structure is that a NodeInfo always - # holds all available information about the state of a given Node - # at a certain point in time. The various .b*sigs lists can just - # be a list of pointers to the .ninfo attributes of the different - # dependent nodes, without any copying of information until it's - # time to pickle it for writing out to a .sconsign file. - # - # The complicating issue is that the *old* format only stored one - # "signature" per dependency, based on however the *last* build - # was configured. We don't know from just looking at it whether - # it was a build signature, a content signature, or a timestamp - # "signature". Since we no longer use build signatures, the - # best we can do is look at the length and if it's thirty two, - # assume that it was (or might have been) a content signature. - # If it was actually a build signature, then it will cause a - # rebuild anyway when it doesn't match the new content signature, - # but that's probably the best we can do. - import SCons.SConsign - new_entry = SCons.SConsign.SConsignEntry() - new_entry.binfo = self.new_binfo() - binfo = new_entry.binfo - for attr in self.convert_copy_attrs: - try: - value = getattr(old_entry, attr) - except AttributeError: - continue - setattr(binfo, attr, value) - delattr(old_entry, attr) - for attr in self.convert_sig_attrs: - try: - sig_list = getattr(old_entry, attr) - except AttributeError: - continue - value = [] - for sig in sig_list: - ninfo = self.new_ninfo() - if len(sig) == 32: - ninfo.csig = sig - else: - ninfo.timestamp = sig - value.append(ninfo) - setattr(binfo, attr, value) - delattr(old_entry, attr) - return new_entry - - @SCons.Memoize.CountMethodCall - def get_stored_info(self): - try: - return self._memo['get_stored_info'] - except KeyError: - pass - - try: - sconsign_entry = self.dir.sconsign().get_entry(self.name) - except (KeyError, EnvironmentError): - import SCons.SConsign - sconsign_entry = SCons.SConsign.SConsignEntry() - sconsign_entry.binfo = self.new_binfo() - sconsign_entry.ninfo = self.new_ninfo() - else: - if isinstance(sconsign_entry, FileBuildInfo): - # This is a .sconsign file from before the Big Signature - # Refactoring; convert it as best we can. - sconsign_entry = self.convert_old_entry(sconsign_entry) - try: - delattr(sconsign_entry.ninfo, 'bsig') - except AttributeError: - pass - - self._memo['get_stored_info'] = sconsign_entry - - return sconsign_entry - - def get_stored_implicit(self): - binfo = self.get_stored_info().binfo - binfo.prepare_dependencies() - try: return binfo.bimplicit - except AttributeError: return None - - def rel_path(self, other): - return self.dir.rel_path(other) - - def _get_found_includes_key(self, env, scanner, path): - return (id(env), id(scanner), path) - - @SCons.Memoize.CountDictCall(_get_found_includes_key) - def get_found_includes(self, env, scanner, path): - """Return the included implicit dependencies in this file. - Cache results so we only scan the file once per path - regardless of how many times this information is requested. - """ - memo_key = (id(env), id(scanner), path) - try: - memo_dict = self._memo['get_found_includes'] - except KeyError: - memo_dict = {} - self._memo['get_found_includes'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - - if scanner: - result = [n.disambiguate() for n in scanner(self, env, path)] - else: - result = [] - - memo_dict[memo_key] = result - - return result - - def _createDir(self): - # ensure that the directories for this node are - # created. - self.dir._create() - - def push_to_cache(self): - """Try to push the node into a cache - """ - # This should get called before the Nodes' .built() method is - # called, which would clear the build signature if the file has - # a source scanner. - # - # We have to clear the local memoized values *before* we push - # the node to cache so that the memoization of the self.exists() - # return value doesn't interfere. - if self.nocache: - return - self.clear_memoized_values() - if self.exists(): - self.get_build_env().get_CacheDir().push(self) - - def retrieve_from_cache(self): - """Try to retrieve the node's content from a cache - - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - built(). - - Returns true if the node was successfully retrieved. - """ - if self.nocache: - return None - if not self.is_derived(): - return None - return self.get_build_env().get_CacheDir().retrieve(self) - - def visited(self): - if self.exists() and self.executor is not None: - self.get_build_env().get_CacheDir().push_if_forced(self) - - ninfo = self.get_ninfo() - - csig = self.get_max_drift_csig() - if csig: - ninfo.csig = csig - - ninfo.timestamp = self.get_timestamp() - ninfo.size = self.get_size() - - if not self.has_builder(): - # This is a source file, but it might have been a target file - # in another build that included more of the DAG. Copy - # any build information that's stored in the .sconsign file - # into our binfo object so it doesn't get lost. - old = self.get_stored_info() - self.get_binfo().merge(old.binfo) - - SCons.Node.store_info_map[self.store_info](self) - - def release_target_info(self): - """Called just after this node has been marked - up-to-date or was built completely. - - This is where we try to release as many target node infos - as possible for clean builds and update runs, in order - to minimize the overall memory consumption. - - We'd like to remove a lot more attributes like self.sources - and self.sources_set, but they might get used - in a next build step. For example, during configuration - the source files for a built E{*}.o file are used to figure out - which linker to use for the resulting Program (gcc vs. g++)! - That's why we check for the 'keep_targetinfo' attribute, - config Nodes and the Interactive mode just don't allow - an early release of most variables. - - In the same manner, we can't simply remove the self.attributes - here. The smart linking relies on the shared flag, and some - parts of the java Tool use it to transport information - about nodes... - - @see: built() and Node.release_target_info() - """ - if (self.released_target_info or SCons.Node.interactive): - return - - if not hasattr(self.attributes, 'keep_targetinfo'): - # Cache some required values, before releasing - # stuff like env, executor and builder... - self.changed(allowcache=True) - self.get_contents_sig() - self.get_build_env() - # Now purge unneeded stuff to free memory... - self.executor = None - self._memo.pop('rfile', None) - self.prerequisites = None - # Cleanup lists, but only if they're empty - if not len(self.ignore_set): - self.ignore_set = None - if not len(self.implicit_set): - self.implicit_set = None - if not len(self.depends_set): - self.depends_set = None - if not len(self.ignore): - self.ignore = None - if not len(self.depends): - self.depends = None - # Mark this node as done, we only have to release - # the memory once... - self.released_target_info = True - - def find_src_builder(self): - if self.rexists(): - return None - scb = self.dir.src_builder() - if scb is _null: - scb = None - if scb is not None: - try: - b = self.builder - except AttributeError: - b = None - if b is None: - self.builder_set(scb) - return scb - - def has_src_builder(self): - """Return whether this Node has a source builder or not. - - If this Node doesn't have an explicit source code builder, this - is where we figure out, on the fly, if there's a transparent - source code builder for it. - - Note that if we found a source builder, we also set the - self.builder attribute, so that all of the methods that actually - *build* this file don't have to do anything different. - """ - try: - scb = self.sbuilder - except AttributeError: - scb = self.sbuilder = self.find_src_builder() - return scb is not None - - def alter_targets(self): - """Return any corresponding targets in a variant directory. - """ - if self.is_derived(): - return [], None - return self.fs.variant_dir_target_climb(self, self.dir, [self.name]) - - def _rmv_existing(self): - self.clear_memoized_values() - if SCons.Node.print_duplicate: - print("dup: removing existing target {}".format(self)) - e = Unlink(self, [], None) - if isinstance(e, SCons.Errors.BuildError): - raise e - - # - # Taskmaster interface subsystem - # - - def make_ready(self): - self.has_src_builder() - self.get_binfo() - - def prepare(self): - """Prepare for this file to be created.""" - SCons.Node.Node.prepare(self) - - if self.get_state() != SCons.Node.up_to_date: - if self.exists(): - if self.is_derived() and not self.precious: - self._rmv_existing() - else: - try: - self._createDir() - except SCons.Errors.StopError as drive: - raise SCons.Errors.StopError("No drive `{}' for target `{}'.".format(drive, self)) - - # - # - # - - def remove(self): - """Remove this file.""" - if self.exists() or self.islink(): - self.fs.unlink(self.get_internal_path()) - return 1 - return None - - def do_duplicate(self, src): - self._createDir() - if SCons.Node.print_duplicate: - print("dup: relinking variant '{}' from '{}'".format(self, src)) - Unlink(self, None, None) - e = Link(self, src, None) - if isinstance(e, SCons.Errors.BuildError): - raise SCons.Errors.StopError("Cannot duplicate `{}' in `{}': {}.".format(src.get_internal_path(), self.dir._path, e.errstr)) - self.linked = 1 - # The Link() action may or may not have actually - # created the file, depending on whether the -n - # option was used or not. Delete the _exists and - # _rexists attributes so they can be reevaluated. - self.clear() - - @SCons.Memoize.CountMethodCall - def exists(self): - try: - return self._memo['exists'] - except KeyError: - pass - result = SCons.Node._exists_map[self._func_exists](self) - self._memo['exists'] = result - return result - - # - # SIGNATURE SUBSYSTEM - # - - def get_max_drift_csig(self): - """ - Returns the content signature currently stored for this node - if it's been unmodified longer than the max_drift value, or the - max_drift value is 0. Returns None otherwise. - """ - old = self.get_stored_info() - mtime = self.get_timestamp() - - max_drift = self.fs.max_drift - if max_drift > 0: - if (time.time() - mtime) > max_drift: - try: - n = old.ninfo - if n.timestamp and n.csig and n.timestamp == mtime: - return n.csig - except AttributeError: - pass - elif max_drift == 0: - try: - return old.ninfo.csig - except AttributeError: - pass - - return None - - def get_csig(self): - """ - Generate a node's content signature, the digested signature - of its content. - - node - the node - cache - alternate node to use for the signature cache - returns - the content signature - """ - ninfo = self.get_ninfo() - try: - return ninfo.csig - except AttributeError: - pass - - csig = self.get_max_drift_csig() - if csig is None: - - try: - if self.get_size() < SCons.Node.FS.File.md5_chunksize: - contents = self.get_contents() - else: - csig = self.get_content_hash() - except IOError: - # This can happen if there's actually a directory on-disk, - # which can be the case if they've disabled disk checks, - # or if an action with a File target actually happens to - # create a same-named directory by mistake. - csig = '' - else: - if not csig: - csig = SCons.Util.MD5signature(contents) - - ninfo.csig = csig - - return csig - - # - # DECISION SUBSYSTEM - # - - def builder_set(self, builder): - SCons.Node.Node.builder_set(self, builder) - self.changed_since_last_build = 5 - - def built(self): - """Called just after this File node is successfully built. - - Just like for 'release_target_info' we try to release - some more target node attributes in order to minimize the - overall memory consumption. - - @see: release_target_info - """ - - SCons.Node.Node.built(self) - - if (not SCons.Node.interactive and - not hasattr(self.attributes, 'keep_targetinfo')): - # Ensure that the build infos get computed and cached... - SCons.Node.store_info_map[self.store_info](self) - # ... then release some more variables. - self._specific_sources = False - self._labspath = None - self._save_str() - self.cwd = None - - self.scanner_paths = None - - def changed(self, node=None, allowcache=False): - """ - Returns if the node is up-to-date with respect to the BuildInfo - stored last time it was built. - - For File nodes this is basically a wrapper around Node.changed(), - but we allow the return value to get cached after the reference - to the Executor got released in release_target_info(). - - @see: Node.changed() - """ - if node is None: - try: - return self._memo['changed'] - except KeyError: - pass - - has_changed = SCons.Node.Node.changed(self, node) - if allowcache: - self._memo['changed'] = has_changed - return has_changed - - def changed_content(self, target, prev_ni, repo_node=None): - cur_csig = self.get_csig() - try: - return cur_csig != prev_ni.csig - except AttributeError: - return 1 - - def changed_state(self, target, prev_ni, repo_node=None): - return self.state != SCons.Node.up_to_date - - - # Caching node -> string mapping for the below method - __dmap_cache = {} - __dmap_sig_cache = {} - - - def _build_dependency_map(self, binfo): - """ - Build mapping from file -> signature - - Args: - self - self - binfo - buildinfo from node being considered - - Returns: - dictionary of file->signature mappings - """ - - # For an "empty" binfo properties like bsources - # do not exist: check this to avoid exception. - if (len(binfo.bsourcesigs) + len(binfo.bdependsigs) + \ - len(binfo.bimplicitsigs)) == 0: - return {} - - binfo.dependency_map = { child:signature for child, signature in zip(chain(binfo.bsources, binfo.bdepends, binfo.bimplicit), - chain(binfo.bsourcesigs, binfo.bdependsigs, binfo.bimplicitsigs))} - - return binfo.dependency_map - - # @profile - def _add_strings_to_dependency_map(self, dmap): - """ - In the case comparing node objects isn't sufficient, we'll add the strings for the nodes to the dependency map - :return: - """ - - first_string = str(next(iter(dmap))) - - # print("DMAP:%s"%id(dmap)) - if first_string not in dmap: - string_dict = {str(child): signature for child, signature in dmap.items()} - dmap.update(string_dict) - return dmap - - def _get_previous_signatures(self, dmap): - """ - Return a list of corresponding csigs from previous - build in order of the node/files in children. - - Args: - self - self - dmap - Dictionary of file -> csig - - Returns: - List of csigs for provided list of children - """ - prev = [] - # MD5_TIMESTAMP_DEBUG = False - - if len(dmap) == 0: - if MD5_TIMESTAMP_DEBUG: print("Nothing dmap shortcutting") - return None - elif MD5_TIMESTAMP_DEBUG: print("len(dmap):%d"%len(dmap)) - - - # First try retrieving via Node - if MD5_TIMESTAMP_DEBUG: print("Checking if self is in map:%s id:%s type:%s"%(str(self), id(self), type(self))) - df = dmap.get(self, False) - if df: - return df - - # Now check if self's repository file is in map. - rf = self.rfile() - if MD5_TIMESTAMP_DEBUG: print("Checking if self.rfile is in map:%s id:%s type:%s"%(str(rf), id(rf), type(rf))) - rfm = dmap.get(rf, False) - if rfm: - return rfm - - # get default string for node and then also string swapping os.altsep for os.sep (/ for \) - c_strs = [str(self)] - - if os.altsep: - c_strs.append(c_strs[0].replace(os.sep, os.altsep)) - - # In some cases the dependency_maps' keys are already strings check. - # Check if either string is now in dmap. - for s in c_strs: - if MD5_TIMESTAMP_DEBUG: print("Checking if str(self) is in map :%s" % s) - df = dmap.get(s, False) - if df: - return df - - # Strings don't exist in map, add them and try again - # If there are no strings in this dmap, then add them. - # This may not be necessary, we could walk the nodes in the dmap and check each string - # rather than adding ALL the strings to dmap. In theory that would be n/2 vs 2n str() calls on node - # if not dmap.has_strings: - dmap = self._add_strings_to_dependency_map(dmap) - - # In some cases the dependency_maps' keys are already strings check. - # Check if either string is now in dmap. - for s in c_strs: - if MD5_TIMESTAMP_DEBUG: print("Checking if str(self) is in map (now with strings) :%s" % s) - df = dmap.get(s, False) - if df: - return df - - # Lastly use nodes get_path() to generate string and see if that's in dmap - if not df: - try: - # this should yield a path which matches what's in the sconsign - c_str = self.get_path() - if os.altsep: - c_str = c_str.replace(os.sep, os.altsep) - - if MD5_TIMESTAMP_DEBUG: print("Checking if self.get_path is in map (now with strings) :%s" % s) - - df = dmap.get(c_str, None) - - except AttributeError as e: - raise FileBuildInfoFileToCsigMappingError("No mapping from file name to content signature for :%s"%c_str) - - return df - - def changed_timestamp_then_content(self, target, prev_ni, node=None): - """ - Used when decider for file is Timestamp-MD5 - - NOTE: If the timestamp hasn't changed this will skip md5'ing the - file and just copy the prev_ni provided. If the prev_ni - is wrong. It will propagate it. - See: https://github.com/SCons/scons/issues/2980 - - Args: - self - dependency - target - target - prev_ni - The NodeInfo object loaded from previous builds .sconsign - node - Node instance. Check this node for file existence/timestamp - if specified. - - Returns: - Boolean - Indicates if node(File) has changed. - """ - - if node is None: - node = self - # Now get sconsign name -> csig map and then get proper prev_ni if possible - bi = node.get_stored_info().binfo - rebuilt = False - try: - dependency_map = bi.dependency_map - except AttributeError as e: - dependency_map = self._build_dependency_map(bi) - rebuilt = True - - if len(dependency_map) == 0: - # If there's no dependency map, there's no need to find the - # prev_ni as there aren't any - # shortcut the rest of the logic - if MD5_TIMESTAMP_DEBUG: print("Skipping checks len(dmap)=0") - - # We still need to get the current file's csig - # This should be slightly faster than calling self.changed_content(target, new_prev_ni) - self.get_csig() - return True - - new_prev_ni = self._get_previous_signatures(dependency_map) - new = self.changed_timestamp_match(target, new_prev_ni) - - if MD5_TIMESTAMP_DEBUG: - old = self.changed_timestamp_match(target, prev_ni) - - if old != new: - print("Mismatch self.changed_timestamp_match(%s, prev_ni) old:%s new:%s"%(str(target), old, new)) - new_prev_ni = self._get_previous_signatures(dependency_map) - - if not new: - try: - # NOTE: We're modifying the current node's csig in a query. - self.get_ninfo().csig = new_prev_ni.csig - except AttributeError: - pass - return False - return self.changed_content(target, new_prev_ni) - - def changed_timestamp_newer(self, target, prev_ni, repo_node=None): - try: - return self.get_timestamp() > target.get_timestamp() - except AttributeError: - return 1 - - def changed_timestamp_match(self, target, prev_ni, repo_node=None): - """ - Return True if the timestamps don't match or if there is no previous timestamp - :param target: - :param prev_ni: Information about the node from the previous build - :return: - """ - try: - return self.get_timestamp() != prev_ni.timestamp - except AttributeError: - return 1 - - def is_up_to_date(self): - """Check for whether the Node is current - In all cases self is the target we're checking to see if it's up to date - """ - - T = 0 - if T: Trace('is_up_to_date(%s):' % self) - if not self.exists(): - if T: Trace(' not self.exists():') - # The file (always a target) doesn't exist locally... - r = self.rfile() - if r != self: - # ...but there is one (always a target) in a Repository... - if not self.changed(r): - if T: Trace(' changed(%s):' % r) - # ...and it's even up-to-date... - if self._local: - # ...and they'd like a local copy. - e = LocalCopy(self, r, None) - if isinstance(e, SCons.Errors.BuildError): - # Likely this should be re-raising exception e - # (which would be BuildError) - raise e - SCons.Node.store_info_map[self.store_info](self) - if T: Trace(' 1\n') - return 1 - self.changed() - if T: Trace(' None\n') - return None - else: - r = self.changed() - if T: Trace(' self.exists(): %s\n' % r) - return not r - - @SCons.Memoize.CountMethodCall - def rfile(self): - try: - return self._memo['rfile'] - except KeyError: - pass - result = self - if not self.exists(): - norm_name = _my_normcase(self.name) - for repo_dir in self.dir.get_all_rdirs(): - try: - node = repo_dir.entries[norm_name] - except KeyError: - node = repo_dir.file_on_disk(self.name) - - if node and node.exists() and \ - (isinstance(node, File) or isinstance(node, Entry) - or not node.is_derived()): - result = node - # Copy over our local attributes to the repository - # Node so we identify shared object files in the - # repository and don't assume they're static. - # - # This isn't perfect; the attribute would ideally - # be attached to the object in the repository in - # case it was built statically in the repository - # and we changed it to shared locally, but that's - # rarely the case and would only occur if you - # intentionally used the same suffix for both - # shared and static objects anyway. So this - # should work well in practice. - result.attributes = self.attributes - break - self._memo['rfile'] = result - return result - - def find_repo_file(self): - """ - For this node, find if there exists a corresponding file in one or more repositories - :return: list of corresponding files in repositories - """ - retvals = [] - - norm_name = _my_normcase(self.name) - for repo_dir in self.dir.get_all_rdirs(): - try: - node = repo_dir.entries[norm_name] - except KeyError: - node = repo_dir.file_on_disk(self.name) - - if node and node.exists() and \ - (isinstance(node, File) or isinstance(node, Entry) \ - or not node.is_derived()): - retvals.append(node) - - return retvals - - - def rstr(self): - return str(self.rfile()) - - def get_cachedir_csig(self): - """ - Fetch a Node's content signature for purposes of computing - another Node's cachesig. - - This is a wrapper around the normal get_csig() method that handles - the somewhat obscure case of using CacheDir with the -n option. - Any files that don't exist would normally be "built" by fetching - them from the cache, but the normal get_csig() method will try - to open up the local file, which doesn't exist because the -n - option meant we didn't actually pull the file from cachedir. - But since the file *does* actually exist in the cachedir, we - can use its contents for the csig. - """ - try: - return self.cachedir_csig - except AttributeError: - pass - - cache = self.get_build_env().get_CacheDir() - cachedir, cachefile = cache.cachepath(self) - if not self.exists() and cachefile and os.path.exists(cachefile): - self.cachedir_csig = cache.get_cachedir_csig(self) - else: - self.cachedir_csig = self.get_csig() - return self.cachedir_csig - - def get_contents_sig(self): - """ - A helper method for get_cachedir_bsig. - - It computes and returns the signature for this - node's contents. - """ - - try: - return self.contentsig - except AttributeError: - pass - - executor = self.get_executor() - - result = self.contentsig = SCons.Util.MD5signature(executor.get_contents()) - return result - - def get_cachedir_bsig(self): - """ - Return the signature for a cached file, including - its children. - - It adds the path of the cached file to the cache signature, - because multiple targets built by the same action will all - have the same build signature, and we have to differentiate - them somehow. - - Signature should normally be string of hex digits. - """ - try: - return self.cachesig - except AttributeError: - pass - - # Collect signatures for all children - children = self.children() - sigs = [n.get_cachedir_csig() for n in children] - - # Append this node's signature... - sigs.append(self.get_contents_sig()) - - # ...and it's path - sigs.append(self.get_internal_path()) - - # Merge this all into a single signature - result = self.cachesig = SCons.Util.MD5collect(sigs) - return result - -default_fs = None - -def get_default_fs(): - global default_fs - if not default_fs: - default_fs = FS() - return default_fs - -class FileFinder(object): - """ - """ - - def __init__(self): - self._memo = {} - - def filedir_lookup(self, p, fd=None): - """ - A helper method for find_file() that looks up a directory for - a file we're trying to find. This only creates the Dir Node if - it exists on-disk, since if the directory doesn't exist we know - we won't find any files in it... :-) - - It would be more compact to just use this as a nested function - with a default keyword argument (see the commented-out version - below), but that doesn't work unless you have nested scopes, - so we define it here just so this work under Python 1.5.2. - """ - if fd is None: - fd = self.default_filedir - dir, name = os.path.split(fd) - drive, d = _my_splitdrive(dir) - if not name and d[:1] in ('/', OS_SEP): - #return p.fs.get_root(drive).dir_on_disk(name) - return p.fs.get_root(drive) - if dir: - p = self.filedir_lookup(p, dir) - if not p: - return None - norm_name = _my_normcase(name) - try: - node = p.entries[norm_name] - except KeyError: - return p.dir_on_disk(name) - if isinstance(node, Dir): - return node - if isinstance(node, Entry): - node.must_be_same(Dir) - return node - return None - - def _find_file_key(self, filename, paths, verbose=None): - return (filename, paths) - - @SCons.Memoize.CountDictCall(_find_file_key) - def find_file(self, filename, paths, verbose=None): - """ - Find a node corresponding to either a derived file or a file that exists already. - - Only the first file found is returned, and none is returned if no file is found. - - filename: A filename to find - paths: A list of directory path *nodes* to search in. Can be represented as a list, a tuple, or a callable that is called with no arguments and returns the list or tuple. - - returns The node created from the found file. - - """ - memo_key = self._find_file_key(filename, paths) - try: - memo_dict = self._memo['find_file'] - except KeyError: - memo_dict = {} - self._memo['find_file'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - - if verbose and not callable(verbose): - if not SCons.Util.is_String(verbose): - verbose = "find_file" - _verbose = u' %s: ' % verbose - verbose = lambda s: sys.stdout.write(_verbose + s) - - filedir, filename = os.path.split(filename) - if filedir: - self.default_filedir = filedir - paths = [_f for _f in map(self.filedir_lookup, paths) if _f] - - result = None - for dir in paths: - if verbose: - verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) - node, d = dir.srcdir_find_file(filename) - if node: - if verbose: - verbose("... FOUND '%s' in '%s'\n" % (filename, d)) - result = node - break - - memo_dict[memo_key] = result - - return result - -find_file = FileFinder().find_file - - -def invalidate_node_memos(targets): - """ - Invalidate the memoized values of all Nodes (files or directories) - that are associated with the given entries. Has been added to - clear the cache of nodes affected by a direct execution of an - action (e.g. Delete/Copy/Chmod). Existing Node caches become - inconsistent if the action is run through Execute(). The argument - `targets` can be a single Node object or filename, or a sequence - of Nodes/filenames. - """ - from traceback import extract_stack - - # First check if the cache really needs to be flushed. Only - # actions run in the SConscript with Execute() seem to be - # affected. XXX The way to check if Execute() is in the stacktrace - # is a very dirty hack and should be replaced by a more sensible - # solution. - for f in extract_stack(): - if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': - break - else: - # Dont have to invalidate, so return - return - - if not SCons.Util.is_List(targets): - targets = [targets] - - for entry in targets: - # If the target is a Node object, clear the cache. If it is a - # filename, look up potentially existing Node object first. - try: - entry.clear_memoized_values() - except AttributeError: - # Not a Node object, try to look up Node by filename. XXX - # This creates Node objects even for those filenames which - # do not correspond to an existing Node object. - node = get_default_fs().Entry(entry) - if node: - node.clear_memoized_values() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/Python.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/Python.py deleted file mode 100644 index ec23b3fc186..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/Python.py +++ /dev/null @@ -1,180 +0,0 @@ -"""scons.Node.Python - -Python nodes. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Node/Python.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Node - -class ValueNodeInfo(SCons.Node.NodeInfoBase): - __slots__ = ('csig',) - current_version_id = 2 - - field_list = ['csig'] - - def str_to_node(self, s): - return Value(s) - - def __getstate__(self): - """ - Return all fields that shall be pickled. Walk the slots in the class - hierarchy and add those to the state dictionary. If a '__dict__' slot is - available, copy all entries to the dictionary. Also include the version - id, which is fixed for all instances of a class. - """ - state = getattr(self, '__dict__', {}).copy() - for obj in type(self).mro(): - for name in getattr(obj,'__slots__',()): - if hasattr(self, name): - state[name] = getattr(self, name) - - state['_version_id'] = self.current_version_id - try: - del state['__weakref__'] - except KeyError: - pass - - return state - - def __setstate__(self, state): - """ - Restore the attributes from a pickled state. - """ - # TODO check or discard version - del state['_version_id'] - for key, value in state.items(): - if key not in ('__weakref__',): - setattr(self, key, value) - - -class ValueBuildInfo(SCons.Node.BuildInfoBase): - __slots__ = () - current_version_id = 2 - -class Value(SCons.Node.Node): - """A class for Python variables, typically passed on the command line - or generated by a script, but not from a file or some other source. - """ - - NodeInfo = ValueNodeInfo - BuildInfo = ValueBuildInfo - - def __init__(self, value, built_value=None): - SCons.Node.Node.__init__(self) - self.value = value - self.changed_since_last_build = 6 - self.store_info = 0 - if built_value is not None: - self.built_value = built_value - - def str_for_display(self): - return repr(self.value) - - def __str__(self): - return str(self.value) - - def make_ready(self): - self.get_csig() - - def build(self, **kw): - if not hasattr(self, 'built_value'): - SCons.Node.Node.build(self, **kw) - - is_up_to_date = SCons.Node.Node.children_are_up_to_date - - def is_under(self, dir): - # Make Value nodes get built regardless of - # what directory scons was run from. Value nodes - # are outside the filesystem: - return 1 - - def write(self, built_value): - """Set the value of the node.""" - self.built_value = built_value - - def read(self): - """Return the value. If necessary, the value is built.""" - self.build() - if not hasattr(self, 'built_value'): - self.built_value = self.value - return self.built_value - - def get_text_contents(self): - """By the assumption that the node.built_value is a - deterministic product of the sources, the contents of a Value - are the concatenation of all the contents of its sources. As - the value need not be built when get_contents() is called, we - cannot use the actual node.built_value.""" - ###TODO: something reasonable about universal newlines - contents = str(self.value) - for kid in self.children(None): - contents = contents + kid.get_contents().decode() - return contents - - def get_contents(self): - """ - Get contents for signature calculations. - :return: bytes - """ - text_contents = self.get_text_contents() - try: - return text_contents.encode() - except UnicodeDecodeError: - # Already encoded as python2 str are bytes - return text_contents - - - def changed_since_last_build(self, target, prev_ni): - cur_csig = self.get_csig() - try: - return cur_csig != prev_ni.csig - except AttributeError: - return 1 - - def get_csig(self, calc=None): - """Because we're a Python value node and don't have a real - timestamp, we get to ignore the calculator and just use the - value contents. - - Returns string. Ideally string of hex digits. (Not bytes) - """ - try: - return self.ninfo.csig - except AttributeError: - pass - - contents = self.get_text_contents() - - self.get_ninfo().csig = contents - return contents - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/__init__.py deleted file mode 100644 index 32f4bbaa778..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/__init__.py +++ /dev/null @@ -1,1780 +0,0 @@ -"""SCons.Node - -The Node package for the SCons software construction utility. - -This is, in many ways, the heart of SCons. - -A Node is where we encapsulate all of the dependency information about -any thing that SCons can build, or about any thing which SCons can use -to build some other thing. The canonical "thing," of course, is a file, -but a Node can also represent something remote (like a web page) or -something completely abstract (like an Alias). - -Each specific type of "thing" is specifically represented by a subclass -of the Node base class: Node.FS.File for files, Node.Alias for aliases, -etc. Dependency information is kept here in the base class, and -information specific to files/aliases/etc. is in the subclass. The -goal, if we've done this correctly, is that any type of "thing" should -be able to depend on any other type of "thing." - -""" - -from __future__ import print_function - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Node/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import collections -import copy -from itertools import chain - -try: - from itertools import zip_longest -except ImportError: - from itertools import izip_longest as zip_longest - -import SCons.Debug -from SCons.Debug import logInstanceCreation -import SCons.Executor -import SCons.Memoize -import SCons.Util - -from SCons.Debug import Trace - -from SCons.compat import with_metaclass, NoSlotsPyPy - -print_duplicate = 0 - -def classname(obj): - return str(obj.__class__).split('.')[-1] - -# Set to false if we're doing a dry run. There's more than one of these -# little treats -do_store_info = True - -# Node states -# -# These are in "priority" order, so that the maximum value for any -# child/dependency of a node represents the state of that node if -# it has no builder of its own. The canonical example is a file -# system directory, which is only up to date if all of its children -# were up to date. -no_state = 0 -pending = 1 -executing = 2 -up_to_date = 3 -executed = 4 -failed = 5 - -StateString = { - 0 : "no_state", - 1 : "pending", - 2 : "executing", - 3 : "up_to_date", - 4 : "executed", - 5 : "failed", -} - -# controls whether implicit dependencies are cached: -implicit_cache = 0 - -# controls whether implicit dep changes are ignored: -implicit_deps_unchanged = 0 - -# controls whether the cached implicit deps are ignored: -implicit_deps_changed = 0 - -# A variable that can be set to an interface-specific function be called -# to annotate a Node with information about its creation. -def do_nothing_node(node): pass - -Annotate = do_nothing_node - -# Gets set to 'True' if we're running in interactive mode. Is -# currently used to release parts of a target's info during -# clean builds and update runs (see release_target_info). -interactive = False - -def is_derived_none(node): - raise NotImplementedError - -def is_derived_node(node): - """ - Returns true if this node is derived (i.e. built). - """ - return node.has_builder() or node.side_effect - -_is_derived_map = {0 : is_derived_none, - 1 : is_derived_node} - -def exists_none(node): - raise NotImplementedError - -def exists_always(node): - return 1 - -def exists_base(node): - return node.stat() is not None - -def exists_entry(node): - """Return if the Entry exists. Check the file system to see - what we should turn into first. Assume a file if there's no - directory.""" - node.disambiguate() - return _exists_map[node._func_exists](node) - - -def exists_file(node): - # Duplicate from source path if we are set up to do this. - if node.duplicate and not node.is_derived() and not node.linked: - src = node.srcnode() - if src is not node: - # At this point, src is meant to be copied in a variant directory. - src = src.rfile() - if src.get_abspath() != node.get_abspath(): - if src.exists(): - node.do_duplicate(src) - # Can't return 1 here because the duplication might - # not actually occur if the -n option is being used. - else: - # The source file does not exist. Make sure no old - # copy remains in the variant directory. - if print_duplicate: - print("dup: no src for %s, unlinking old variant copy" % node) - if exists_base(node) or node.islink(): - node.fs.unlink(node.get_internal_path()) - # Return None explicitly because the Base.exists() call - # above will have cached its value if the file existed. - return None - return exists_base(node) - -_exists_map = {0 : exists_none, - 1 : exists_always, - 2 : exists_base, - 3 : exists_entry, - 4 : exists_file} - - -def rexists_none(node): - raise NotImplementedError - -def rexists_node(node): - return node.exists() - -def rexists_base(node): - return node.rfile().exists() - -_rexists_map = {0 : rexists_none, - 1 : rexists_node, - 2 : rexists_base} - -def get_contents_none(node): - raise NotImplementedError - -def get_contents_entry(node): - """Fetch the contents of the entry. Returns the exact binary - contents of the file.""" - try: - node = node.disambiguate(must_exist=1) - except SCons.Errors.UserError: - # There was nothing on disk with which to disambiguate - # this entry. Leave it as an Entry, but return a null - # string so calls to get_contents() in emitters and the - # like (e.g. in qt.py) don't have to disambiguate by hand - # or catch the exception. - return '' - else: - return _get_contents_map[node._func_get_contents](node) - -def get_contents_dir(node): - """Return content signatures and names of all our children - separated by new-lines. Ensure that the nodes are sorted.""" - contents = [] - for n in sorted(node.children(), key=lambda t: t.name): - contents.append('%s %s\n' % (n.get_csig(), n.name)) - return ''.join(contents) - -def get_contents_file(node): - if not node.rexists(): - return b'' - fname = node.rfile().get_abspath() - try: - with open(fname, "rb") as fp: - contents = fp.read() - except EnvironmentError as e: - if not e.filename: - e.filename = fname - raise - return contents - -_get_contents_map = {0 : get_contents_none, - 1 : get_contents_entry, - 2 : get_contents_dir, - 3 : get_contents_file} - -def target_from_source_none(node, prefix, suffix, splitext): - raise NotImplementedError - -def target_from_source_base(node, prefix, suffix, splitext): - return node.dir.Entry(prefix + splitext(node.name)[0] + suffix) - -_target_from_source_map = {0 : target_from_source_none, - 1 : target_from_source_base} - -# -# The new decider subsystem for Nodes -# -# We would set and overwrite the changed_since_last_build function -# before, but for being able to use slots (less memory!) we now have -# a dictionary of the different decider functions. Then in the Node -# subclasses we simply store the index to the decider that should be -# used by it. -# - -# -# First, the single decider functions -# -def changed_since_last_build_node(node, target, prev_ni, repo_node=None): - """ - - Must be overridden in a specific subclass to return True if this - Node (a dependency) has changed since the last time it was used - to build the specified target. prev_ni is this Node's state (for - example, its file timestamp, length, maybe content signature) - as of the last time the target was built. - - Note that this method is called through the dependency, not the - target, because a dependency Node must be able to use its own - logic to decide if it changed. For example, File Nodes need to - obey if we're configured to use timestamps, but Python Value Nodes - never use timestamps and always use the content. If this method - were called through the target, then each Node's implementation - of this method would have to have more complicated logic to - handle all the different Node types on which it might depend. - """ - raise NotImplementedError - - -def changed_since_last_build_alias(node, target, prev_ni, repo_node=None): - cur_csig = node.get_csig() - try: - return cur_csig != prev_ni.csig - except AttributeError: - return 1 - - -def changed_since_last_build_entry(node, target, prev_ni, repo_node=None): - node.disambiguate() - return _decider_map[node.changed_since_last_build](node, target, prev_ni, repo_node) - - -def changed_since_last_build_state_changed(node, target, prev_ni, repo_node=None): - return node.state != SCons.Node.up_to_date - - -def decide_source(node, target, prev_ni, repo_node=None): - return target.get_build_env().decide_source(node, target, prev_ni, repo_node) - - -def decide_target(node, target, prev_ni, repo_node=None): - return target.get_build_env().decide_target(node, target, prev_ni, repo_node) - - -def changed_since_last_build_python(node, target, prev_ni, repo_node=None): - cur_csig = node.get_csig() - try: - return cur_csig != prev_ni.csig - except AttributeError: - return 1 - - -# -# Now, the mapping from indices to decider functions -# -_decider_map = {0 : changed_since_last_build_node, - 1 : changed_since_last_build_alias, - 2 : changed_since_last_build_entry, - 3 : changed_since_last_build_state_changed, - 4 : decide_source, - 5 : decide_target, - 6 : changed_since_last_build_python} - -do_store_info = True - -# -# The new store_info subsystem for Nodes -# -# We would set and overwrite the store_info function -# before, but for being able to use slots (less memory!) we now have -# a dictionary of the different functions. Then in the Node -# subclasses we simply store the index to the info method that should be -# used by it. -# - -# -# First, the single info functions -# - -def store_info_pass(node): - pass - -def store_info_file(node): - # Merge our build information into the already-stored entry. - # This accommodates "chained builds" where a file that's a target - # in one build (SConstruct file) is a source in a different build. - # See test/chained-build.py for the use case. - if do_store_info: - node.dir.sconsign().store_info(node.name, node) - - -store_info_map = {0 : store_info_pass, - 1 : store_info_file} - -# Classes for signature info for Nodes. - -class NodeInfoBase(object): - """ - The generic base class for signature information for a Node. - - Node subclasses should subclass NodeInfoBase to provide their own - logic for dealing with their own Node-specific signature information. - """ - __slots__ = ('__weakref__',) - current_version_id = 2 - - def update(self, node): - try: - field_list = self.field_list - except AttributeError: - return - for f in field_list: - try: - delattr(self, f) - except AttributeError: - pass - try: - func = getattr(node, 'get_' + f) - except AttributeError: - pass - else: - setattr(self, f, func()) - - def convert(self, node, val): - pass - - def merge(self, other): - """ - Merge the fields of another object into this object. Already existing - information is overwritten by the other instance's data. - WARNING: If a '__dict__' slot is added, it should be updated instead of - replaced. - """ - state = other.__getstate__() - self.__setstate__(state) - - def format(self, field_list=None, names=0): - if field_list is None: - try: - field_list = self.field_list - except AttributeError: - field_list = list(getattr(self, '__dict__', {}).keys()) - for obj in type(self).mro(): - for slot in getattr(obj, '__slots__', ()): - if slot not in ('__weakref__', '__dict__'): - field_list.append(slot) - field_list.sort() - fields = [] - for field in field_list: - try: - f = getattr(self, field) - except AttributeError: - f = None - f = str(f) - if names: - f = field + ': ' + f - fields.append(f) - return fields - - def __getstate__(self): - """ - Return all fields that shall be pickled. Walk the slots in the class - hierarchy and add those to the state dictionary. If a '__dict__' slot is - available, copy all entries to the dictionary. Also include the version - id, which is fixed for all instances of a class. - """ - state = getattr(self, '__dict__', {}).copy() - for obj in type(self).mro(): - for name in getattr(obj,'__slots__',()): - if hasattr(self, name): - state[name] = getattr(self, name) - - state['_version_id'] = self.current_version_id - try: - del state['__weakref__'] - except KeyError: - pass - return state - - def __setstate__(self, state): - """ - Restore the attributes from a pickled state. The version is discarded. - """ - # TODO check or discard version - del state['_version_id'] - - for key, value in state.items(): - if key not in ('__weakref__',): - setattr(self, key, value) - - -class BuildInfoBase(object): - """ - The generic base class for build information for a Node. - - This is what gets stored in a .sconsign file for each target file. - It contains a NodeInfo instance for this node (signature information - that's specific to the type of Node) and direct attributes for the - generic build stuff we have to track: sources, explicit dependencies, - implicit dependencies, and action information. - """ - __slots__ = ("bsourcesigs", "bdependsigs", "bimplicitsigs", "bactsig", - "bsources", "bdepends", "bact", "bimplicit", "__weakref__") - current_version_id = 2 - - def __init__(self): - # Create an object attribute from the class attribute so it ends up - # in the pickled data in the .sconsign file. - self.bsourcesigs = [] - self.bdependsigs = [] - self.bimplicitsigs = [] - self.bactsig = None - - def merge(self, other): - """ - Merge the fields of another object into this object. Already existing - information is overwritten by the other instance's data. - WARNING: If a '__dict__' slot is added, it should be updated instead of - replaced. - """ - state = other.__getstate__() - self.__setstate__(state) - - def __getstate__(self): - """ - Return all fields that shall be pickled. Walk the slots in the class - hierarchy and add those to the state dictionary. If a '__dict__' slot is - available, copy all entries to the dictionary. Also include the version - id, which is fixed for all instances of a class. - """ - state = getattr(self, '__dict__', {}).copy() - for obj in type(self).mro(): - for name in getattr(obj,'__slots__',()): - if hasattr(self, name): - state[name] = getattr(self, name) - - state['_version_id'] = self.current_version_id - try: - del state['__weakref__'] - except KeyError: - pass - return state - - def __setstate__(self, state): - """ - Restore the attributes from a pickled state. - """ - # TODO check or discard version - del state['_version_id'] - for key, value in state.items(): - if key not in ('__weakref__',): - setattr(self, key, value) - - -class Node(object, with_metaclass(NoSlotsPyPy)): - """The base Node class, for entities that we know how to - build, or use to build other Nodes. - """ - - __slots__ = ['sources', - 'sources_set', - 'target_peers', - '_specific_sources', - 'depends', - 'depends_set', - 'ignore', - 'ignore_set', - 'prerequisites', - 'implicit', - 'waiting_parents', - 'waiting_s_e', - 'ref_count', - 'wkids', - 'env', - 'state', - 'precious', - 'noclean', - 'nocache', - 'cached', - 'always_build', - 'includes', - 'attributes', - 'side_effect', - 'side_effects', - 'linked', - '_memo', - 'executor', - 'binfo', - 'ninfo', - 'builder', - 'is_explicit', - 'implicit_set', - 'changed_since_last_build', - 'store_info', - 'pseudo', - '_tags', - '_func_is_derived', - '_func_exists', - '_func_rexists', - '_func_get_contents', - '_func_target_from_source'] - - class Attrs(object): - __slots__ = ('shared', '__dict__') - - - def __init__(self): - if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.Node') - # Note that we no longer explicitly initialize a self.builder - # attribute to None here. That's because the self.builder - # attribute may be created on-the-fly later by a subclass (the - # canonical example being a builder to fetch a file from a - # source code system like CVS or Subversion). - - # Each list of children that we maintain is accompanied by a - # dictionary used to look up quickly whether a node is already - # present in the list. Empirical tests showed that it was - # fastest to maintain them as side-by-side Node attributes in - # this way, instead of wrapping up each list+dictionary pair in - # a class. (Of course, we could always still do that in the - # future if we had a good reason to...). - self.sources = [] # source files used to build node - self.sources_set = set() - self._specific_sources = False - self.depends = [] # explicit dependencies (from Depends) - self.depends_set = set() - self.ignore = [] # dependencies to ignore - self.ignore_set = set() - self.prerequisites = None - self.implicit = None # implicit (scanned) dependencies (None means not scanned yet) - self.waiting_parents = set() - self.waiting_s_e = set() - self.ref_count = 0 - self.wkids = None # Kids yet to walk, when it's an array - - self.env = None - self.state = no_state - self.precious = None - self.pseudo = False - self.noclean = 0 - self.nocache = 0 - self.cached = 0 # is this node pulled from cache? - self.always_build = None - self.includes = None - self.attributes = self.Attrs() # Generic place to stick information about the Node. - self.side_effect = 0 # true iff this node is a side effect - self.side_effects = [] # the side effects of building this target - self.linked = 0 # is this node linked to the variant directory? - self.changed_since_last_build = 0 - self.store_info = 0 - self._tags = None - self._func_is_derived = 1 - self._func_exists = 1 - self._func_rexists = 1 - self._func_get_contents = 0 - self._func_target_from_source = 0 - self.ninfo = None - - self.clear_memoized_values() - - # Let the interface in which the build engine is embedded - # annotate this Node with its own info (like a description of - # what line in what file created the node, for example). - Annotate(self) - - def disambiguate(self, must_exist=None): - return self - - def get_suffix(self): - return '' - - @SCons.Memoize.CountMethodCall - def get_build_env(self): - """Fetch the appropriate Environment to build this node. - """ - try: - return self._memo['get_build_env'] - except KeyError: - pass - result = self.get_executor().get_build_env() - self._memo['get_build_env'] = result - return result - - def get_build_scanner_path(self, scanner): - """Fetch the appropriate scanner path for this node.""" - return self.get_executor().get_build_scanner_path(scanner) - - def set_executor(self, executor): - """Set the action executor for this node.""" - self.executor = executor - - def get_executor(self, create=1): - """Fetch the action executor for this node. Create one if - there isn't already one, and requested to do so.""" - try: - executor = self.executor - except AttributeError: - if not create: - raise - try: - act = self.builder.action - except AttributeError: - executor = SCons.Executor.Null(targets=[self]) - else: - executor = SCons.Executor.Executor(act, - self.env or self.builder.env, - [self.builder.overrides], - [self], - self.sources) - self.executor = executor - return executor - - def executor_cleanup(self): - """Let the executor clean up any cached information.""" - try: - executor = self.get_executor(create=None) - except AttributeError: - pass - else: - if executor is not None: - executor.cleanup() - - def reset_executor(self): - """Remove cached executor; forces recompute when needed.""" - try: - delattr(self, 'executor') - except AttributeError: - pass - - def push_to_cache(self): - """Try to push a node into a cache - """ - pass - - def retrieve_from_cache(self): - """Try to retrieve the node's content from a cache - - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - built(). - - Returns true if the node was successfully retrieved. - """ - return 0 - - # - # Taskmaster interface subsystem - # - - def make_ready(self): - """Get a Node ready for evaluation. - - This is called before the Taskmaster decides if the Node is - up-to-date or not. Overriding this method allows for a Node - subclass to be disambiguated if necessary, or for an implicit - source builder to be attached. - """ - pass - - def prepare(self): - """Prepare for this Node to be built. - - This is called after the Taskmaster has decided that the Node - is out-of-date and must be rebuilt, but before actually calling - the method to build the Node. - - This default implementation checks that explicit or implicit - dependencies either exist or are derived, and initializes the - BuildInfo structure that will hold the information about how - this node is, uh, built. - - (The existence of source files is checked separately by the - Executor, which aggregates checks for all of the targets built - by a specific action.) - - Overriding this method allows for for a Node subclass to remove - the underlying file from the file system. Note that subclass - methods should call this base class method to get the child - check and the BuildInfo structure. - """ - if self.depends is not None: - for d in self.depends: - if d.missing(): - msg = "Explicit dependency `%s' not found, needed by target `%s'." - raise SCons.Errors.StopError(msg % (d, self)) - if self.implicit is not None: - for i in self.implicit: - if i.missing(): - msg = "Implicit dependency `%s' not found, needed by target `%s'." - raise SCons.Errors.StopError(msg % (i, self)) - self.binfo = self.get_binfo() - - def build(self, **kw): - """Actually build the node. - - This is called by the Taskmaster after it's decided that the - Node is out-of-date and must be rebuilt, and after the prepare() - method has gotten everything, uh, prepared. - - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff - in built(). - - """ - try: - self.get_executor()(self, **kw) - except SCons.Errors.BuildError as e: - e.node = self - raise - - def built(self): - """Called just after this node is successfully built.""" - - # Clear the implicit dependency caches of any Nodes - # waiting for this Node to be built. - for parent in self.waiting_parents: - parent.implicit = None - - # Handle issue where builder emits more than one target and - # the source file for the builder is generated. - # in that case only the first target was getting it's .implicit - # cleared when the source file is built (second scan). - # leaving only partial implicits from scan before source file is generated - # typically the compiler only. Then scanned files are appended - # This is persisted to sconsign and rebuild causes false rebuilds - # because the ordering of the implicit list then changes to what it - # should have been. - # This is at least the following bugs - # https://github.com/SCons/scons/issues/2811 - # https://jira.mongodb.org/browse/SERVER-33111 - try: - for peer in parent.target_peers: - peer.implicit = None - except AttributeError: - pass - - - self.clear() - - if self.pseudo: - if self.exists(): - raise SCons.Errors.UserError("Pseudo target " + str(self) + " must not exist") - else: - if not self.exists() and do_store_info: - SCons.Warnings.warn(SCons.Warnings.TargetNotBuiltWarning, - "Cannot find target " + str(self) + " after building") - self.ninfo.update(self) - - def visited(self): - """Called just after this node has been visited (with or - without a build).""" - try: - binfo = self.binfo - except AttributeError: - # Apparently this node doesn't need build info, so - # don't bother calculating or storing it. - pass - else: - self.ninfo.update(self) - SCons.Node.store_info_map[self.store_info](self) - - def release_target_info(self): - """Called just after this node has been marked - up-to-date or was built completely. - - This is where we try to release as many target node infos - as possible for clean builds and update runs, in order - to minimize the overall memory consumption. - - By purging attributes that aren't needed any longer after - a Node (=File) got built, we don't have to care that much how - many KBytes a Node actually requires...as long as we free - the memory shortly afterwards. - - @see: built() and File.release_target_info() - """ - pass - - # - # - # - - def add_to_waiting_s_e(self, node): - self.waiting_s_e.add(node) - - def add_to_waiting_parents(self, node): - """ - Returns the number of nodes added to our waiting parents list: - 1 if we add a unique waiting parent, 0 if not. (Note that the - returned values are intended to be used to increment a reference - count, so don't think you can "clean up" this function by using - True and False instead...) - """ - wp = self.waiting_parents - if node in wp: - return 0 - wp.add(node) - return 1 - - def postprocess(self): - """Clean up anything we don't need to hang onto after we've - been built.""" - self.executor_cleanup() - self.waiting_parents = set() - - def clear(self): - """Completely clear a Node of all its cached state (so that it - can be re-evaluated by interfaces that do continuous integration - builds). - """ - # The del_binfo() call here isn't necessary for normal execution, - # but is for interactive mode, where we might rebuild the same - # target and need to start from scratch. - self.del_binfo() - self.clear_memoized_values() - self.ninfo = self.new_ninfo() - self.executor_cleanup() - try: - delattr(self, '_calculated_sig') - except AttributeError: - pass - self.includes = None - - def clear_memoized_values(self): - self._memo = {} - - def builder_set(self, builder): - self.builder = builder - try: - del self.executor - except AttributeError: - pass - - def has_builder(self): - """Return whether this Node has a builder or not. - - In Boolean tests, this turns out to be a *lot* more efficient - than simply examining the builder attribute directly ("if - node.builder: ..."). When the builder attribute is examined - directly, it ends up calling __getattr__ for both the __len__ - and __nonzero__ attributes on instances of our Builder Proxy - class(es), generating a bazillion extra calls and slowing - things down immensely. - """ - try: - b = self.builder - except AttributeError: - # There was no explicit builder for this Node, so initialize - # the self.builder attribute to None now. - b = self.builder = None - return b is not None - - def set_explicit(self, is_explicit): - self.is_explicit = is_explicit - - def has_explicit_builder(self): - """Return whether this Node has an explicit builder - - This allows an internal Builder created by SCons to be marked - non-explicit, so that it can be overridden by an explicit - builder that the user supplies (the canonical example being - directories).""" - try: - return self.is_explicit - except AttributeError: - self.is_explicit = None - return self.is_explicit - - def get_builder(self, default_builder=None): - """Return the set builder, or a specified default value""" - try: - return self.builder - except AttributeError: - return default_builder - - multiple_side_effect_has_builder = has_builder - - def is_derived(self): - """ - Returns true if this node is derived (i.e. built). - - This should return true only for nodes whose path should be in - the variant directory when duplicate=0 and should contribute their build - signatures when they are used as source files to other derived files. For - example: source with source builders are not derived in this sense, - and hence should not return true. - """ - return _is_derived_map[self._func_is_derived](self) - - def alter_targets(self): - """Return a list of alternate targets for this Node. - """ - return [], None - - def get_found_includes(self, env, scanner, path): - """Return the scanned include lines (implicit dependencies) - found in this node. - - The default is no implicit dependencies. We expect this method - to be overridden by any subclass that can be scanned for - implicit dependencies. - """ - return [] - - def get_implicit_deps(self, env, initial_scanner, path_func, kw = {}): - """Return a list of implicit dependencies for this node. - - This method exists to handle recursive invocation of the scanner - on the implicit dependencies returned by the scanner, if the - scanner's recursive flag says that we should. - """ - nodes = [self] - seen = set(nodes) - dependencies = [] - path_memo = {} - - root_node_scanner = self._get_scanner(env, initial_scanner, None, kw) - - while nodes: - node = nodes.pop(0) - - scanner = node._get_scanner(env, initial_scanner, root_node_scanner, kw) - if not scanner: - continue - - try: - path = path_memo[scanner] - except KeyError: - path = path_func(scanner) - path_memo[scanner] = path - - included_deps = [x for x in node.get_found_includes(env, scanner, path) if x not in seen] - if included_deps: - dependencies.extend(included_deps) - seen.update(included_deps) - nodes.extend(scanner.recurse_nodes(included_deps)) - - return dependencies - - def _get_scanner(self, env, initial_scanner, root_node_scanner, kw): - if initial_scanner: - # handle explicit scanner case - scanner = initial_scanner.select(self) - else: - # handle implicit scanner case - scanner = self.get_env_scanner(env, kw) - if scanner: - scanner = scanner.select(self) - - if not scanner: - # no scanner could be found for the given node's scanner key; - # thus, make an attempt at using a default. - scanner = root_node_scanner - - return scanner - - def get_env_scanner(self, env, kw={}): - return env.get_scanner(self.scanner_key()) - - def get_target_scanner(self): - return self.builder.target_scanner - - def get_source_scanner(self, node): - """Fetch the source scanner for the specified node - - NOTE: "self" is the target being built, "node" is - the source file for which we want to fetch the scanner. - - Implies self.has_builder() is true; again, expect to only be - called from locations where this is already verified. - - This function may be called very often; it attempts to cache - the scanner found to improve performance. - """ - scanner = None - try: - scanner = self.builder.source_scanner - except AttributeError: - pass - if not scanner: - # The builder didn't have an explicit scanner, so go look up - # a scanner from env['SCANNERS'] based on the node's scanner - # key (usually the file extension). - scanner = self.get_env_scanner(self.get_build_env()) - if scanner: - scanner = scanner.select(node) - return scanner - - def add_to_implicit(self, deps): - if not hasattr(self, 'implicit') or self.implicit is None: - self.implicit = [] - self.implicit_set = set() - self._children_reset() - self._add_child(self.implicit, self.implicit_set, deps) - - def scan(self): - """Scan this node's dependents for implicit dependencies.""" - # Don't bother scanning non-derived files, because we don't - # care what their dependencies are. - # Don't scan again, if we already have scanned. - if self.implicit is not None: - return - self.implicit = [] - self.implicit_set = set() - self._children_reset() - if not self.has_builder(): - return - - build_env = self.get_build_env() - executor = self.get_executor() - - # Here's where we implement --implicit-cache. - if implicit_cache and not implicit_deps_changed: - implicit = self.get_stored_implicit() - if implicit is not None: - # We now add the implicit dependencies returned from the - # stored .sconsign entry to have already been converted - # to Nodes for us. (We used to run them through a - # source_factory function here.) - - # Update all of the targets with them. This - # essentially short-circuits an N*M scan of the - # sources for each individual target, which is a hell - # of a lot more efficient. - for tgt in executor.get_all_targets(): - tgt.add_to_implicit(implicit) - - if implicit_deps_unchanged or self.is_up_to_date(): - return - # one of this node's sources has changed, - # so we must recalculate the implicit deps for all targets - for tgt in executor.get_all_targets(): - tgt.implicit = [] - tgt.implicit_set = set() - - # Have the executor scan the sources. - executor.scan_sources(self.builder.source_scanner) - - # If there's a target scanner, have the executor scan the target - # node itself and associated targets that might be built. - scanner = self.get_target_scanner() - if scanner: - executor.scan_targets(scanner) - - def scanner_key(self): - return None - - def select_scanner(self, scanner): - """Selects a scanner for this Node. - - This is a separate method so it can be overridden by Node - subclasses (specifically, Node.FS.Dir) that *must* use their - own Scanner and don't select one the Scanner.Selector that's - configured for the target. - """ - return scanner.select(self) - - def env_set(self, env, safe=0): - if safe and self.env: - return - self.env = env - - # - # SIGNATURE SUBSYSTEM - # - - NodeInfo = NodeInfoBase - BuildInfo = BuildInfoBase - - def new_ninfo(self): - ninfo = self.NodeInfo() - return ninfo - - def get_ninfo(self): - if self.ninfo is not None: - return self.ninfo - self.ninfo = self.new_ninfo() - return self.ninfo - - def new_binfo(self): - binfo = self.BuildInfo() - return binfo - - def get_binfo(self): - """ - Fetch a node's build information. - - node - the node whose sources will be collected - cache - alternate node to use for the signature cache - returns - the build signature - - This no longer handles the recursive descent of the - node's children's signatures. We expect that they're - already built and updated by someone else, if that's - what's wanted. - """ - try: - return self.binfo - except AttributeError: - pass - - binfo = self.new_binfo() - self.binfo = binfo - - executor = self.get_executor() - ignore_set = self.ignore_set - - if self.has_builder(): - binfo.bact = str(executor) - binfo.bactsig = SCons.Util.MD5signature(executor.get_contents()) - - if self._specific_sources: - sources = [s for s in self.sources if s not in ignore_set] - - else: - sources = executor.get_unignored_sources(self, self.ignore) - - seen = set() - binfo.bsources = [s for s in sources if s not in seen and not seen.add(s)] - binfo.bsourcesigs = [s.get_ninfo() for s in binfo.bsources] - - binfo.bdepends = [d for d in self.depends if d not in ignore_set] - binfo.bdependsigs = [d.get_ninfo() for d in self.depends] - - # Because self.implicit is initialized to None (and not empty list []) - # we have to handle this case - if not self.implicit: - binfo.bimplicit = [] - binfo.bimplicitsigs = [] - else: - binfo.bimplicit = [i for i in self.implicit if i not in ignore_set] - binfo.bimplicitsigs = [i.get_ninfo() for i in binfo.bimplicit] - - return binfo - - def del_binfo(self): - """Delete the build info from this node.""" - try: - delattr(self, 'binfo') - except AttributeError: - pass - - def get_csig(self): - try: - return self.ninfo.csig - except AttributeError: - ninfo = self.get_ninfo() - ninfo.csig = SCons.Util.MD5signature(self.get_contents()) - return self.ninfo.csig - - def get_cachedir_csig(self): - return self.get_csig() - - def get_stored_info(self): - return None - - def get_stored_implicit(self): - """Fetch the stored implicit dependencies""" - return None - - # - # - # - - def set_precious(self, precious = 1): - """Set the Node's precious value.""" - self.precious = precious - - def set_pseudo(self, pseudo = True): - """Set the Node's precious value.""" - self.pseudo = pseudo - - def set_noclean(self, noclean = 1): - """Set the Node's noclean value.""" - # Make sure noclean is an integer so the --debug=stree - # output in Util.py can use it as an index. - self.noclean = noclean and 1 or 0 - - def set_nocache(self, nocache = 1): - """Set the Node's nocache value.""" - # Make sure nocache is an integer so the --debug=stree - # output in Util.py can use it as an index. - self.nocache = nocache and 1 or 0 - - def set_always_build(self, always_build = 1): - """Set the Node's always_build value.""" - self.always_build = always_build - - def exists(self): - """Does this node exists?""" - return _exists_map[self._func_exists](self) - - def rexists(self): - """Does this node exist locally or in a repository?""" - # There are no repositories by default: - return _rexists_map[self._func_rexists](self) - - def get_contents(self): - """Fetch the contents of the entry.""" - return _get_contents_map[self._func_get_contents](self) - - def missing(self): - return not self.is_derived() and \ - not self.linked and \ - not self.rexists() - - def remove(self): - """Remove this Node: no-op by default.""" - return None - - def add_dependency(self, depend): - """Adds dependencies.""" - try: - self._add_child(self.depends, self.depends_set, depend) - except TypeError as e: - e = e.args[0] - if SCons.Util.is_List(e): - s = list(map(str, e)) - else: - s = str(e) - raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) - - def add_prerequisite(self, prerequisite): - """Adds prerequisites""" - if self.prerequisites is None: - self.prerequisites = SCons.Util.UniqueList() - self.prerequisites.extend(prerequisite) - self._children_reset() - - def add_ignore(self, depend): - """Adds dependencies to ignore.""" - try: - self._add_child(self.ignore, self.ignore_set, depend) - except TypeError as e: - e = e.args[0] - if SCons.Util.is_List(e): - s = list(map(str, e)) - else: - s = str(e) - raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) - - def add_source(self, source): - """Adds sources.""" - if self._specific_sources: - return - try: - self._add_child(self.sources, self.sources_set, source) - except TypeError as e: - e = e.args[0] - if SCons.Util.is_List(e): - s = list(map(str, e)) - else: - s = str(e) - raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) - - def _add_child(self, collection, set, child): - """Adds 'child' to 'collection', first checking 'set' to see if it's - already present.""" - added = None - for c in child: - if c not in set: - set.add(c) - collection.append(c) - added = 1 - if added: - self._children_reset() - - def set_specific_source(self, source): - self.add_source(source) - self._specific_sources = True - - def add_wkid(self, wkid): - """Add a node to the list of kids waiting to be evaluated""" - if self.wkids is not None: - self.wkids.append(wkid) - - def _children_reset(self): - self.clear_memoized_values() - # We need to let the Executor clear out any calculated - # build info that it's cached so we can re-calculate it. - self.executor_cleanup() - - @SCons.Memoize.CountMethodCall - def _children_get(self): - try: - return self._memo['_children_get'] - except KeyError: - pass - - # The return list may contain duplicate Nodes, especially in - # source trees where there are a lot of repeated #includes - # of a tangle of .h files. Profiling shows, however, that - # eliminating the duplicates with a brute-force approach that - # preserves the order (that is, something like: - # - # u = [] - # for n in list: - # if n not in u: - # u.append(n)" - # - # takes more cycles than just letting the underlying methods - # hand back cached values if a Node's information is requested - # multiple times. (Other methods of removing duplicates, like - # using dictionary keys, lose the order, and the only ordered - # dictionary patterns I found all ended up using "not in" - # internally anyway...) - if self.ignore_set: - iter = chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f]) - - children = [] - for i in iter: - if i not in self.ignore_set: - children.append(i) - else: - children = self.all_children(scan=0) - - self._memo['_children_get'] = children - return children - - def all_children(self, scan=1): - """Return a list of all the node's direct children.""" - if scan: - self.scan() - - # The return list may contain duplicate Nodes, especially in - # source trees where there are a lot of repeated #includes - # of a tangle of .h files. Profiling shows, however, that - # eliminating the duplicates with a brute-force approach that - # preserves the order (that is, something like: - # - # u = [] - # for n in list: - # if n not in u: - # u.append(n)" - # - # takes more cycles than just letting the underlying methods - # hand back cached values if a Node's information is requested - # multiple times. (Other methods of removing duplicates, like - # using dictionary keys, lose the order, and the only ordered - # dictionary patterns I found all ended up using "not in" - # internally anyway...) - return list(chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f])) - - def children(self, scan=1): - """Return a list of the node's direct children, minus those - that are ignored by this node.""" - if scan: - self.scan() - return self._children_get() - - def set_state(self, state): - self.state = state - - def get_state(self): - return self.state - - def get_env(self): - env = self.env - if not env: - import SCons.Defaults - env = SCons.Defaults.DefaultEnvironment() - return env - - def Decider(self, function): - foundkey = None - for k, v in _decider_map.items(): - if v == function: - foundkey = k - break - if not foundkey: - foundkey = len(_decider_map) - _decider_map[foundkey] = function - self.changed_since_last_build = foundkey - - def Tag(self, key, value): - """ Add a user-defined tag. """ - if not self._tags: - self._tags = {} - self._tags[key] = value - - def GetTag(self, key): - """ Return a user-defined tag. """ - if not self._tags: - return None - return self._tags.get(key, None) - - def changed(self, node=None, allowcache=False): - """ - Returns if the node is up-to-date with respect to the BuildInfo - stored last time it was built. The default behavior is to compare - it against our own previously stored BuildInfo, but the stored - BuildInfo from another Node (typically one in a Repository) - can be used instead. - - Note that we now *always* check every dependency. We used to - short-circuit the check by returning as soon as we detected - any difference, but we now rely on checking every dependency - to make sure that any necessary Node information (for example, - the content signature of an #included .h file) is updated. - - The allowcache option was added for supporting the early - release of the executor/builder structures, right after - a File target was built. When set to true, the return - value of this changed method gets cached for File nodes. - Like this, the executor isn't needed any longer for subsequent - calls to changed(). - - @see: FS.File.changed(), FS.File.release_target_info() - """ - t = 0 - if t: Trace('changed(%s [%s], %s)' % (self, classname(self), node)) - if node is None: - node = self - - result = False - - bi = node.get_stored_info().binfo - then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs - children = self.children() - - diff = len(children) - len(then) - if diff: - # The old and new dependency lists are different lengths. - # This always indicates that the Node must be rebuilt. - # We also extend the old dependency list with enough None - # entries to equal the new dependency list, for the benefit - # of the loop below that updates node information. - then.extend([None] * diff) - if t: Trace(': old %s new %s' % (len(then), len(children))) - result = True - - for child, prev_ni in zip(children, then): - if _decider_map[child.changed_since_last_build](child, self, prev_ni, node): - if t: Trace(': %s changed' % child) - result = True - - if self.has_builder(): - contents = self.get_executor().get_contents() - newsig = SCons.Util.MD5signature(contents) - if bi.bactsig != newsig: - if t: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig)) - result = True - - if not result: - if t: Trace(': up to date') - - if t: Trace('\n') - - return result - - def is_up_to_date(self): - """Default check for whether the Node is current: unknown Node - subtypes are always out of date, so they will always get built.""" - return None - - def children_are_up_to_date(self): - """Alternate check for whether the Node is current: If all of - our children were up-to-date, then this Node was up-to-date, too. - - The SCons.Node.Alias and SCons.Node.Python.Value subclasses - rebind their current() method to this method.""" - # Allow the children to calculate their signatures. - self.binfo = self.get_binfo() - if self.always_build: - return None - state = 0 - for kid in self.children(None): - s = kid.get_state() - if s and (not state or s > state): - state = s - return (state == 0 or state == SCons.Node.up_to_date) - - def is_literal(self): - """Always pass the string representation of a Node to - the command interpreter literally.""" - return 1 - - def render_include_tree(self): - """ - Return a text representation, suitable for displaying to the - user, of the include tree for the sources of this node. - """ - if self.is_derived(): - env = self.get_build_env() - if env: - for s in self.sources: - scanner = self.get_source_scanner(s) - if scanner: - path = self.get_build_scanner_path(scanner) - else: - path = None - def f(node, env=env, scanner=scanner, path=path): - return node.get_found_includes(env, scanner, path) - return SCons.Util.render_tree(s, f, 1) - else: - return None - - def get_abspath(self): - """ - Return an absolute path to the Node. This will return simply - str(Node) by default, but for Node types that have a concept of - relative path, this might return something different. - """ - return str(self) - - def for_signature(self): - """ - Return a string representation of the Node that will always - be the same for this particular Node, no matter what. This - is by contrast to the __str__() method, which might, for - instance, return a relative path for a file Node. The purpose - of this method is to generate a value to be used in signature - calculation for the command line used to build a target, and - we use this method instead of str() to avoid unnecessary - rebuilds. This method does not need to return something that - would actually work in a command line; it can return any kind of - nonsense, so long as it does not change. - """ - return str(self) - - def get_string(self, for_signature): - """This is a convenience function designed primarily to be - used in command generators (i.e., CommandGeneratorActions or - Environment variables that are callable), which are called - with a for_signature argument that is nonzero if the command - generator is being called to generate a signature for the - command line, which determines if we should rebuild or not. - - Such command generators should use this method in preference - to str(Node) when converting a Node to a string, passing - in the for_signature parameter, such that we will call - Node.for_signature() or str(Node) properly, depending on whether - we are calculating a signature or actually constructing a - command line.""" - if for_signature: - return self.for_signature() - return str(self) - - def get_subst_proxy(self): - """ - This method is expected to return an object that will function - exactly like this Node, except that it implements any additional - special features that we would like to be in effect for - Environment variable substitution. The principle use is that - some Nodes would like to implement a __getattr__() method, - but putting that in the Node type itself has a tendency to kill - performance. We instead put it in a proxy and return it from - this method. It is legal for this method to return self - if no new functionality is needed for Environment substitution. - """ - return self - - def explain(self): - if not self.exists(): - return "building `%s' because it doesn't exist\n" % self - - if self.always_build: - return "rebuilding `%s' because AlwaysBuild() is specified\n" % self - - old = self.get_stored_info() - if old is None: - return None - - old = old.binfo - old.prepare_dependencies() - - try: - old_bkids = old.bsources + old.bdepends + old.bimplicit - old_bkidsigs = old.bsourcesigs + old.bdependsigs + old.bimplicitsigs - except AttributeError: - return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self - - new = self.get_binfo() - - new_bkids = new.bsources + new.bdepends + new.bimplicit - new_bkidsigs = new.bsourcesigs + new.bdependsigs + new.bimplicitsigs - - osig = dict(list(zip(old_bkids, old_bkidsigs))) - nsig = dict(list(zip(new_bkids, new_bkidsigs))) - - # The sources and dependencies we'll want to report are all stored - # as relative paths to this target's directory, but we want to - # report them relative to the top-level SConstruct directory, - # so we only print them after running them through this lambda - # to turn them into the right relative Node and then return - # its string. - def stringify( s, E=self.dir.Entry): - if hasattr( s, 'dir' ) : - return str(E(s)) - return str(s) - - lines = [] - - removed = [x for x in old_bkids if x not in new_bkids] - if removed: - removed = [stringify(r) for r in removed] - fmt = "`%s' is no longer a dependency\n" - lines.extend([fmt % s for s in removed]) - - for k in new_bkids: - if k not in old_bkids: - lines.append("`%s' is a new dependency\n" % stringify(k)) - else: - changed = _decider_map[k.changed_since_last_build](k, self, osig[k]) - - if changed: - lines.append("`%s' changed\n" % stringify(k)) - - if len(lines) == 0 and old_bkids != new_bkids: - lines.append("the dependency order changed:\n") - lines.append("->Sources\n") - for (o,n) in zip_longest(old.bsources, new.bsources, fillvalue=None): - lines.append("Old:%s\tNew:%s\n"%(o,n)) - lines.append("->Depends\n") - for (o,n) in zip_longest(old.bdepends, new.bdepends, fillvalue=None): - lines.append("Old:%s\tNew:%s\n"%(o,n)) - lines.append("->Implicit\n") - for (o,n) in zip_longest(old.bimplicit, new.bimplicit, fillvalue=None): - lines.append("Old:%s\tNew:%s\n"%(o,n)) - - if len(lines) == 0: - def fmt_with_title(title, strlines): - lines = strlines.split('\n') - sep = '\n' + ' '*(15 + len(title)) - return ' '*15 + title + sep.join(lines) + '\n' - if old.bactsig != new.bactsig: - if old.bact == new.bact: - lines.append("the contents of the build action changed\n" + - fmt_with_title('action: ', new.bact)) - - # lines.append("the contents of the build action changed [%s] [%s]\n"%(old.bactsig,new.bactsig) + - # fmt_with_title('action: ', new.bact)) - else: - lines.append("the build action changed:\n" + - fmt_with_title('old: ', old.bact) + - fmt_with_title('new: ', new.bact)) - - if len(lines) == 0: - return "rebuilding `%s' for unknown reasons\n" % self - - preamble = "rebuilding `%s' because" % self - if len(lines) == 1: - return "%s %s" % (preamble, lines[0]) - else: - lines = ["%s:\n" % preamble] + lines - return ( ' '*11).join(lines) - -class NodeList(collections.UserList): - def __str__(self): - return str(list(map(str, self.data))) - -def get_children(node, parent): return node.children() -def ignore_cycle(node, stack): pass -def do_nothing(node, parent): pass - -class Walker(object): - """An iterator for walking a Node tree. - - This is depth-first, children are visited before the parent. - The Walker object can be initialized with any node, and - returns the next node on the descent with each get_next() call. - get the children of a node instead of calling 'children'. - 'cycle_func' is an optional function that will be called - when a cycle is detected. - - This class does not get caught in node cycles caused, for example, - by C header file include loops. - """ - def __init__(self, node, kids_func=get_children, - cycle_func=ignore_cycle, - eval_func=do_nothing): - self.kids_func = kids_func - self.cycle_func = cycle_func - self.eval_func = eval_func - node.wkids = copy.copy(kids_func(node, None)) - self.stack = [node] - self.history = {} # used to efficiently detect and avoid cycles - self.history[node] = None - - def get_next(self): - """Return the next node for this walk of the tree. - - This function is intentionally iterative, not recursive, - to sidestep any issues of stack size limitations. - """ - - while self.stack: - if self.stack[-1].wkids: - node = self.stack[-1].wkids.pop(0) - if not self.stack[-1].wkids: - self.stack[-1].wkids = None - if node in self.history: - self.cycle_func(node, self.stack) - else: - node.wkids = copy.copy(self.kids_func(node, self.stack[-1])) - self.stack.append(node) - self.history[node] = None - else: - node = self.stack.pop() - del self.history[node] - if node: - if self.stack: - parent = self.stack[-1] - else: - parent = None - self.eval_func(node, parent) - return node - return None - - def is_done(self): - return not self.stack - - -arg2nodes_lookups = [] - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/PathList.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/PathList.py deleted file mode 100644 index ad029369f36..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/PathList.py +++ /dev/null @@ -1,227 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/PathList.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """SCons.PathList - -A module for handling lists of directory paths (the sort of things -that get set as CPPPATH, LIBPATH, etc.) with as much caching of data and -efficiency as we can, while still keeping the evaluation delayed so that we -Do the Right Thing (almost) regardless of how the variable is specified. - -""" - -import os - -import SCons.Memoize -import SCons.Node -import SCons.Util - -# -# Variables to specify the different types of entries in a PathList object: -# - -TYPE_STRING_NO_SUBST = 0 # string with no '$' -TYPE_STRING_SUBST = 1 # string containing '$' -TYPE_OBJECT = 2 # other object - -def node_conv(obj): - """ - This is the "string conversion" routine that we have our substitutions - use to return Nodes, not strings. This relies on the fact that an - EntryProxy object has a get() method that returns the underlying - Node that it wraps, which is a bit of architectural dependence - that we might need to break or modify in the future in response to - additional requirements. - """ - try: - get = obj.get - except AttributeError: - if isinstance(obj, SCons.Node.Node) or SCons.Util.is_Sequence( obj ): - result = obj - else: - result = str(obj) - else: - result = get() - return result - -class _PathList(object): - """ - An actual PathList object. - """ - def __init__(self, pathlist): - """ - Initializes a PathList object, canonicalizing the input and - pre-processing it for quicker substitution later. - - The stored representation of the PathList is a list of tuples - containing (type, value), where the "type" is one of the TYPE_* - variables defined above. We distinguish between: - - strings that contain no '$' and therefore need no - delayed-evaluation string substitution (we expect that there - will be many of these and that we therefore get a pretty - big win from avoiding string substitution) - - strings that contain '$' and therefore need substitution - (the hard case is things like '${TARGET.dir}/include', - which require re-evaluation for every target + source) - - other objects (which may be something like an EntryProxy - that needs a method called to return a Node) - - Pre-identifying the type of each element in the PathList up-front - and storing the type in the list of tuples is intended to reduce - the amount of calculation when we actually do the substitution - over and over for each target. - """ - if SCons.Util.is_String(pathlist): - pathlist = pathlist.split(os.pathsep) - elif not SCons.Util.is_Sequence(pathlist): - pathlist = [pathlist] - - pl = [] - for p in pathlist: - try: - found = '$' in p - except (AttributeError, TypeError): - type = TYPE_OBJECT - else: - if not found: - type = TYPE_STRING_NO_SUBST - else: - type = TYPE_STRING_SUBST - pl.append((type, p)) - - self.pathlist = tuple(pl) - - def __len__(self): return len(self.pathlist) - - def __getitem__(self, i): return self.pathlist[i] - - def subst_path(self, env, target, source): - """ - Performs construction variable substitution on a pre-digested - PathList for a specific target and source. - """ - result = [] - for type, value in self.pathlist: - if type == TYPE_STRING_SUBST: - value = env.subst(value, target=target, source=source, - conv=node_conv) - if SCons.Util.is_Sequence(value): - result.extend(SCons.Util.flatten(value)) - elif value: - result.append(value) - elif type == TYPE_OBJECT: - value = node_conv(value) - if value: - result.append(value) - elif value: - result.append(value) - return tuple(result) - - -class PathListCache(object): - """ - A class to handle caching of PathList lookups. - - This class gets instantiated once and then deleted from the namespace, - so it's used as a Singleton (although we don't enforce that in the - usual Pythonic ways). We could have just made the cache a dictionary - in the module namespace, but putting it in this class allows us to - use the same Memoizer pattern that we use elsewhere to count cache - hits and misses, which is very valuable. - - Lookup keys in the cache are computed by the _PathList_key() method. - Cache lookup should be quick, so we don't spend cycles canonicalizing - all forms of the same lookup key. For example, 'x:y' and ['x', - 'y'] logically represent the same list, but we don't bother to - split string representations and treat those two equivalently. - (Note, however, that we do, treat lists and tuples the same.) - - The main type of duplication we're trying to catch will come from - looking up the same path list from two different clones of the - same construction environment. That is, given - - env2 = env1.Clone() - - both env1 and env2 will have the same CPPPATH value, and we can - cheaply avoid re-parsing both values of CPPPATH by using the - common value from this cache. - """ - def __init__(self): - self._memo = {} - - def _PathList_key(self, pathlist): - """ - Returns the key for memoization of PathLists. - - Note that we want this to be pretty quick, so we don't completely - canonicalize all forms of the same list. For example, - 'dir1:$ROOT/dir2' and ['$ROOT/dir1', 'dir'] may logically - represent the same list if you're executing from $ROOT, but - we're not going to bother splitting strings into path elements, - or massaging strings into Nodes, to identify that equivalence. - We just want to eliminate obvious redundancy from the normal - case of re-using exactly the same cloned value for a path. - """ - if SCons.Util.is_Sequence(pathlist): - pathlist = tuple(SCons.Util.flatten(pathlist)) - return pathlist - - @SCons.Memoize.CountDictCall(_PathList_key) - def PathList(self, pathlist): - """ - Returns the cached _PathList object for the specified pathlist, - creating and caching a new object as necessary. - """ - pathlist = self._PathList_key(pathlist) - try: - memo_dict = self._memo['PathList'] - except KeyError: - memo_dict = {} - self._memo['PathList'] = memo_dict - else: - try: - return memo_dict[pathlist] - except KeyError: - pass - - result = _PathList(pathlist) - - memo_dict[pathlist] = result - - return result - -PathList = PathListCache().PathList - - -del PathListCache - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/__init__.py deleted file mode 100644 index 2e3d6cd9b3f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/__init__.py +++ /dev/null @@ -1,296 +0,0 @@ -"""SCons.Platform - -SCons platform selection. - -This looks for modules that define a callable object that can modify a -construction environment as appropriate for a given platform. - -Note that we take a more simplistic view of "platform" than Python does. -We're looking for a single string that determines a set of -tool-independent variables with which to initialize a construction -environment. Consequently, we'll examine both sys.platform and os.name -(and anything else that might come in to play) in order to return some -specification which is unique enough for our purposes. - -Note that because this subsystem just *selects* a callable that can -modify a construction environment, it's possible for people to define -their own "platform specification" in an arbitrary callable function. -No one needs to use or tie in to this subsystem in order to roll -their own platform definition. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Platform/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.compat - -import importlib -import os -import sys -import tempfile - -import SCons.Errors -import SCons.Subst -import SCons.Tool - - -def platform_default(): - """Return the platform string for our execution environment. - - The returned value should map to one of the SCons/Platform/*.py - files. Since we're architecture independent, though, we don't - care about the machine architecture. - """ - osname = os.name - if osname == 'java': - osname = os._osType - if osname == 'posix': - if sys.platform == 'cygwin': - return 'cygwin' - elif sys.platform.find('irix') != -1: - return 'irix' - elif sys.platform.find('sunos') != -1: - return 'sunos' - elif sys.platform.find('hp-ux') != -1: - return 'hpux' - elif sys.platform.find('aix') != -1: - return 'aix' - elif sys.platform.find('darwin') != -1: - return 'darwin' - else: - return 'posix' - elif os.name == 'os2': - return 'os2' - else: - return sys.platform - - -def platform_module(name = platform_default()): - """Return the imported module for the platform. - - This looks for a module name that matches the specified argument. - If the name is unspecified, we fetch the appropriate default for - our execution environment. - """ - full_name = 'SCons.Platform.' + name - if full_name not in sys.modules: - if os.name == 'java': - eval(full_name) - else: - try: - # the specific platform module is a relative import - mod = importlib.import_module("." + name, __name__) - except ImportError: - try: - import zipimport - importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] ) - mod = importer.load_module(full_name) - except ImportError: - raise SCons.Errors.UserError("No platform named '%s'" % name) - setattr(SCons.Platform, name, mod) - return sys.modules[full_name] - - -def DefaultToolList(platform, env): - """Select a default tool list for the specified platform. - """ - return SCons.Tool.tool_list(platform, env) - - -class PlatformSpec(object): - def __init__(self, name, generate): - self.name = name - self.generate = generate - - def __call__(self, *args, **kw): - return self.generate(*args, **kw) - - def __str__(self): - return self.name - - -class TempFileMunge(object): - """A callable class. You can set an Environment variable to this, - then call it with a string argument, then it will perform temporary - file substitution on it. This is used to circumvent the long command - line limitation. - - Example usage: - env["TEMPFILE"] = TempFileMunge - env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES','$LINKCOMSTR')}" - - By default, the name of the temporary file used begins with a - prefix of '@'. This may be configured for other tool chains by - setting '$TEMPFILEPREFIX': - env["TEMPFILEPREFIX"] = '-@' # diab compiler - env["TEMPFILEPREFIX"] = '-via' # arm tool chain - env["TEMPFILEPREFIX"] = '' # (the empty string) PC Lint - - You can configure the extension of the temporary file through the - TEMPFILESUFFIX variable, which defaults to '.lnk' (see comments - in the code below): - env["TEMPFILESUFFIX"] = '.lnt' # PC Lint - """ - def __init__(self, cmd, cmdstr = None): - self.cmd = cmd - self.cmdstr = cmdstr - - def __call__(self, target, source, env, for_signature): - if for_signature: - # If we're being called for signature calculation, it's - # because we're being called by the string expansion in - # Subst.py, which has the logic to strip any $( $) that - # may be in the command line we squirreled away. So we - # just return the raw command line and let the upper - # string substitution layers do their thing. - return self.cmd - - # Now we're actually being called because someone is actually - # going to try to execute the command, so we have to do our - # own expansion. - cmd = env.subst_list(self.cmd, SCons.Subst.SUBST_CMD, target, source)[0] - try: - maxline = int(env.subst('$MAXLINELENGTH')) - except ValueError: - maxline = 2048 - - length = 0 - for c in cmd: - length += len(c) - length += len(cmd) - 1 - if length <= maxline: - return self.cmd - - # Check if we already created the temporary file for this target - # It should have been previously done by Action.strfunction() call - node = target[0] if SCons.Util.is_List(target) else target - cmdlist = getattr(node.attributes, 'tempfile_cmdlist', None) \ - if node is not None else None - if cmdlist is not None: - return cmdlist - - # We do a normpath because mktemp() has what appears to be - # a bug in Windows that will use a forward slash as a path - # delimiter. Windows' link mistakes that for a command line - # switch and barfs. - # - # Default to the .lnk suffix for the benefit of the Phar Lap - # linkloc linker, which likes to append an .lnk suffix if - # none is given. - if env.has_key('TEMPFILESUFFIX'): - suffix = env.subst('$TEMPFILESUFFIX') - else: - suffix = '.lnk' - - fd, tmp = tempfile.mkstemp(suffix, text=True) - native_tmp = SCons.Util.get_native_path(os.path.normpath(tmp)) - - if env.get('SHELL', None) == 'sh': - # The sh shell will try to escape the backslashes in the - # path, so unescape them. - native_tmp = native_tmp.replace('\\', r'\\\\') - # In Cygwin, we want to use rm to delete the temporary - # file, because del does not exist in the sh shell. - rm = env.Detect('rm') or 'del' - else: - # Don't use 'rm' if the shell is not sh, because rm won't - # work with the Windows shells (cmd.exe or command.com) or - # Windows path names. - rm = 'del' - - prefix = env.subst('$TEMPFILEPREFIX') - if not prefix: - prefix = '@' - - args = list(map(SCons.Subst.quote_spaces, cmd[1:])) - join_char = env.get('TEMPFILEARGJOIN',' ') - os.write(fd, bytearray(join_char.join(args) + "\n",'utf-8')) - os.close(fd) - - # XXX Using the SCons.Action.print_actions value directly - # like this is bogus, but expedient. This class should - # really be rewritten as an Action that defines the - # __call__() and strfunction() methods and lets the - # normal action-execution logic handle whether or not to - # print/execute the action. The problem, though, is all - # of that is decided before we execute this method as - # part of expanding the $TEMPFILE construction variable. - # Consequently, refactoring this will have to wait until - # we get more flexible with allowing Actions to exist - # independently and get strung together arbitrarily like - # Ant tasks. In the meantime, it's going to be more - # user-friendly to not let obsession with architectural - # purity get in the way of just being helpful, so we'll - # reach into SCons.Action directly. - if SCons.Action.print_actions: - cmdstr = env.subst(self.cmdstr, SCons.Subst.SUBST_RAW, target, - source) if self.cmdstr is not None else '' - # Print our message only if XXXCOMSTR returns an empty string - if len(cmdstr) == 0 : - cmdstr = ("Using tempfile "+native_tmp+" for command line:\n"+ - str(cmd[0]) + " " + " ".join(args)) - self._print_cmd_str(target, source, env, cmdstr) - - # Store the temporary file command list into the target Node.attributes - # to avoid creating two temporary files one for print and one for execute. - cmdlist = [ cmd[0], prefix + native_tmp + '\n' + rm, native_tmp ] - if node is not None: - try : - setattr(node.attributes, 'tempfile_cmdlist', cmdlist) - except AttributeError: - pass - return cmdlist - - def _print_cmd_str(self, target, source, env, cmdstr): - # check if the user has specified a cmd line print function - print_func = None - try: - get = env.get - except AttributeError: - pass - else: - print_func = get('PRINT_CMD_LINE_FUNC') - - # use the default action cmd line print if user did not supply one - if not print_func: - action = SCons.Action._ActionAction() - action.print_cmd_line(cmdstr, target, source, env) - else: - print_func(cmdstr, target, source, env) - - -def Platform(name = platform_default()): - """Select a canned Platform specification. - """ - module = platform_module(name) - spec = PlatformSpec(name, module.generate) - return spec - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/aix.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/aix.py deleted file mode 100644 index c5964b6458a..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/aix.py +++ /dev/null @@ -1,85 +0,0 @@ -"""engine.SCons.Platform.aix - -Platform-specific initialization for IBM AIX systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/aix.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import subprocess - -from . import posix - -import SCons.Util -import SCons.Action - -def get_xlc(env, xlc=None, packages=[]): - # Use the AIX package installer tool lslpp to figure out where a - # given xl* compiler is installed and what version it is. - xlcPath = None - xlcVersion = None - - if xlc is None: - xlc = env.get('CC', 'xlc') - if SCons.Util.is_List(xlc): - xlc = xlc[0] - for package in packages: - # find the installed filename, which may be a symlink as well - pipe = SCons.Action._subproc(env, ['lslpp', '-fc', package], - stdin = 'devnull', - stderr = 'devnull', - stdout = subprocess.PIPE) - # output of lslpp is something like this: - # #Path:Fileset:File - # /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/exe/xlCcpp - # /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/bin/xlc_r -> /usr/vac/bin/xlc - for line in pipe.stdout: - if xlcPath: - continue # read everything to let lslpp terminate - fileset, filename = line.split(':')[1:3] - filename = filename.split()[0] - if ('/' in xlc and filename == xlc) \ - or ('/' not in xlc and filename.endswith('/' + xlc)): - xlcVersion = fileset.split()[1] - xlcPath, sep, xlc = filename.rpartition('/') - pass - pass - return (xlcPath, xlc, xlcVersion) - -def generate(env): - posix.generate(env) - #Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion - env['MAXLINELENGTH'] = 21576 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/cygwin.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/cygwin.py deleted file mode 100644 index fe71ea91cc3..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/cygwin.py +++ /dev/null @@ -1,64 +0,0 @@ -"""SCons.Platform.cygwin - -Platform-specific initialization for Cygwin systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/cygwin.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import sys - -from . import posix -from SCons.Platform import TempFileMunge - -CYGWIN_DEFAULT_PATHS = [] -if sys.platform == 'win32': - CYGWIN_DEFAULT_PATHS = [ - r'C:\cygwin64\bin', - r'C:\cygwin\bin' - ] - -def generate(env): - posix.generate(env) - - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '.exe' - env['SHLIBPREFIX'] = '' - env['SHLIBSUFFIX'] = '.dll' - env['LIBPREFIXES'] = [ '$LIBPREFIX', '$SHLIBPREFIX', '$IMPLIBPREFIX' ] - env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX', '$IMPLIBSUFFIX' ] - env['TEMPFILE'] = TempFileMunge - env['TEMPFILEPREFIX'] = '@' - env['MAXLINELENGTH'] = 2048 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/darwin.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/darwin.py deleted file mode 100644 index ff0d50dee5e..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/darwin.py +++ /dev/null @@ -1,73 +0,0 @@ -"""engine.SCons.Platform.darwin - -Platform-specific initialization for Mac OS X systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/darwin.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import posix -import os - -def generate(env): - posix.generate(env) - env['SHLIBSUFFIX'] = '.dylib' - # put macports paths at front to override Apple's versions, fink path is after - # For now let people who want Macports or Fink tools specify it! - # env['ENV']['PATH'] = '/opt/local/bin:/opt/local/sbin:' + env['ENV']['PATH'] + ':/sw/bin' - - # Store extra system paths in env['ENV']['PATHOSX'] - - filelist = ['/etc/paths',] - # make sure this works on Macs with Tiger or earlier - try: - dirlist = os.listdir('/etc/paths.d') - except: - dirlist = [] - - for file in dirlist: - filelist.append('/etc/paths.d/'+file) - - for file in filelist: - if os.path.isfile(file): - with open(file, 'r') as f: - lines = f.readlines() - for line in lines: - if line: - env.AppendENVPath('PATHOSX', line.strip('\n')) - - # Not sure why this wasn't the case all along? - if env['ENV'].get('PATHOSX', False) and os.environ.get('SCONS_USE_MAC_PATHS', False): - env.AppendENVPath('PATH',env['ENV']['PATHOSX']) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/hpux.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/hpux.py deleted file mode 100644 index 3158438ee39..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/hpux.py +++ /dev/null @@ -1,46 +0,0 @@ -"""engine.SCons.Platform.hpux - -Platform-specific initialization for HP-UX systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/hpux.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import posix - -def generate(env): - posix.generate(env) - #Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion - env['MAXLINELENGTH'] = 2045000 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/irix.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/irix.py deleted file mode 100644 index 8643b14a0a5..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/irix.py +++ /dev/null @@ -1,44 +0,0 @@ -"""SCons.Platform.irix - -Platform-specific initialization for SGI IRIX systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/irix.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import posix - -def generate(env): - posix.generate(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/mingw.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/mingw.py deleted file mode 100644 index 1961606a133..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/mingw.py +++ /dev/null @@ -1,39 +0,0 @@ -"""SCons.Platform.mingw - -Platform-specific initialization for the MinGW system. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/mingw.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import sys - -MINGW_DEFAULT_PATHS = [] -if sys.platform == 'win32': - MINGW_DEFAULT_PATHS = [ - r'C:\msys64', - r'C:\msys' - ] \ No newline at end of file diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/os2.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/os2.py deleted file mode 100644 index eb2c6798bbb..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/os2.py +++ /dev/null @@ -1,58 +0,0 @@ -"""SCons.Platform.os2 - -Platform-specific initialization for OS/2 systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/os2.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" -from . import win32 - -def generate(env): - if 'ENV' not in env: - env['ENV'] = {} - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.obj' - env['SHOBJPREFIX'] = '$OBJPREFIX' - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '.exe' - env['LIBPREFIX'] = '' - env['LIBSUFFIX'] = '.lib' - env['SHLIBPREFIX'] = '' - env['SHLIBSUFFIX'] = '.dll' - env['LIBPREFIXES'] = '$LIBPREFIX' - env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] - env['HOST_OS'] = 'os2' - env['HOST_ARCH'] = win32.get_architecture().arch - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/posix.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/posix.py deleted file mode 100644 index c9f9a2d57d7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/posix.py +++ /dev/null @@ -1,131 +0,0 @@ -"""SCons.Platform.posix - -Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/posix.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import errno -import os -import os.path -import subprocess -import sys -import select - -import SCons.Util -from SCons.Platform import TempFileMunge -from SCons.Platform.virtualenv import ImportVirtualenv -from SCons.Platform.virtualenv import ignore_virtualenv, enable_virtualenv - -exitvalmap = { - 2 : 127, - 13 : 126, -} - -def escape(arg): - """escape shell special characters""" - slash = '\\' - special = '"$' - - arg = arg.replace(slash, slash+slash) - for c in special: - arg = arg.replace(c, slash+c) - - # print("ESCAPE RESULT: %s" % arg) - return '"' + arg + '"' - - -def exec_subprocess(l, env): - proc = subprocess.Popen(l, env = env, close_fds = True) - return proc.wait() - -def subprocess_spawn(sh, escape, cmd, args, env): - return exec_subprocess([sh, '-c', ' '.join(args)], env) - -def exec_popen3(l, env, stdout, stderr): - proc = subprocess.Popen(l, env = env, close_fds = True, - stdout = stdout, - stderr = stderr) - return proc.wait() - -def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr): - # spawn using Popen3 combined with the env command - # the command name and the command's stdout is written to stdout - # the command's stderr is written to stderr - return exec_popen3([sh, '-c', ' '.join(args)], - env, stdout, stderr) - - -def generate(env): - # Bearing in mind we have python 2.4 as a baseline, we can just do this: - spawn = subprocess_spawn - pspawn = piped_env_spawn - # Note that this means that 'escape' is no longer used - - if 'ENV' not in env: - env['ENV'] = {} - env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin' - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.o' - env['SHOBJPREFIX'] = '$OBJPREFIX' - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '' - env['LIBPREFIX'] = 'lib' - env['LIBSUFFIX'] = '.a' - env['SHLIBPREFIX'] = '$LIBPREFIX' - env['SHLIBSUFFIX'] = '.so' - env['LIBPREFIXES'] = [ '$LIBPREFIX' ] - env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] - env['PSPAWN'] = pspawn - env['SPAWN'] = spawn - env['SHELL'] = 'sh' - env['ESCAPE'] = escape - env['TEMPFILE'] = TempFileMunge - env['TEMPFILEPREFIX'] = '@' - #Based on LINUX: ARG_MAX=ARG_MAX=131072 - 3000 for environment expansion - #Note: specific platforms might rise or lower this value - env['MAXLINELENGTH'] = 128072 - - # This platform supports RPATH specifications. - env['__RPATH'] = '$_RPATH' - - # GDC is GCC family, but DMD and LDC have different options. - # Must be able to have GCC and DMD work in the same build, so: - env['__DRPATH'] = '$_DRPATH' - - if enable_virtualenv and not ignore_virtualenv: - ImportVirtualenv(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/sunos.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/sunos.py deleted file mode 100644 index 90418d5ceae..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/sunos.py +++ /dev/null @@ -1,50 +0,0 @@ -"""engine.SCons.Platform.sunos - -Platform-specific initialization for Sun systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/sunos.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import posix - -def generate(env): - posix.generate(env) - # Based on sunSparc 8:32bit - # ARG_MAX=1048320 - 3000 for environment expansion - env['MAXLINELENGTH'] = 1045320 - env['PKGINFO'] = 'pkginfo' - env['PKGCHK'] = '/usr/sbin/pkgchk' - env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin' - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/virtualenv.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/virtualenv.py deleted file mode 100644 index 3416b4153db..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/virtualenv.py +++ /dev/null @@ -1,120 +0,0 @@ -"""SCons.Platform.virtualenv - -Support for virtualenv. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/virtualenv.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import sys -import SCons.Util - - -virtualenv_enabled_by_default = False - - -def _enable_virtualenv_default(): - return SCons.Util.get_os_env_bool('SCONS_ENABLE_VIRTUALENV', virtualenv_enabled_by_default) - - -def _ignore_virtualenv_default(): - return SCons.Util.get_os_env_bool('SCONS_IGNORE_VIRTUALENV', False) - - -enable_virtualenv = _enable_virtualenv_default() -ignore_virtualenv = _ignore_virtualenv_default() -virtualenv_variables = ['VIRTUAL_ENV', 'PIPENV_ACTIVE'] - - -def _running_in_virtualenv(): - """Returns True, if scons is executed within a virtualenv""" - # see https://stackoverflow.com/a/42580137 - return (hasattr(sys, 'real_prefix') or - (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix)) - - -def _is_path_in(path, base): - """Returns true, if **path** is located under the **base** directory.""" - if not path or not base: # empty path may happen, base too - return False - rp = os.path.relpath(path, base) - return ((not rp.startswith(os.path.pardir)) and (not rp == os.path.curdir)) - - -def _inject_venv_variables(env): - if 'ENV' not in env: - env['ENV'] = {} - ENV = env['ENV'] - for name in virtualenv_variables: - try: - ENV[name] = os.environ[name] - except KeyError: - pass - -def _inject_venv_path(env, path_list=None): - """Modify environment such that SCons will take into account its virtualenv - when running external tools.""" - if path_list is None: - path_list = os.getenv('PATH') - env.PrependENVPath('PATH', select_paths_in_venv(path_list)) - - -def select_paths_in_venv(path_list): - """Returns a list of paths from **path_list** which are under virtualenv's - home directory.""" - if SCons.Util.is_String(path_list): - path_list = path_list.split(os.path.pathsep) - # Find in path_list the paths under the virtualenv's home - return [path for path in path_list if IsInVirtualenv(path)] - - -def ImportVirtualenv(env): - """Copies virtualenv-related environment variables from OS environment - to ``env['ENV']`` and prepends virtualenv's PATH to ``env['ENV']['PATH']``. - """ - _inject_venv_variables(env) - _inject_venv_path(env) - - -def Virtualenv(): - """Returns path to the virtualenv home if scons is executing within a - virtualenv or None, if not.""" - if _running_in_virtualenv(): - return sys.prefix - return None - - -def IsInVirtualenv(path): - """Returns True, if **path** is under virtualenv's home directory. If not, - or if we don't use virtualenv, returns False.""" - return _is_path_in(path, Virtualenv()) - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/win32.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/win32.py deleted file mode 100644 index b386aface40..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Platform/win32.py +++ /dev/null @@ -1,485 +0,0 @@ -"""SCons.Platform.win32 - -Platform-specific initialization for Win32 systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/win32.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path -import sys -import tempfile - -from SCons.Platform.posix import exitvalmap -from SCons.Platform import TempFileMunge -from SCons.Platform.virtualenv import ImportVirtualenv -from SCons.Platform.virtualenv import ignore_virtualenv, enable_virtualenv -import SCons.Util - -CHOCO_DEFAULT_PATH = [ - r'C:\ProgramData\chocolatey\bin' -] - -try: - import msvcrt - import win32api - import win32con -except ImportError: - parallel_msg = \ - "you do not seem to have the pywin32 extensions installed;\n" + \ - "\tparallel (-j) builds may not work reliably with open Python files." -except AttributeError: - parallel_msg = \ - "your pywin32 extensions do not support file handle operations;\n" + \ - "\tparallel (-j) builds may not work reliably with open Python files." -else: - parallel_msg = None - - if sys.version_info.major == 2: - import __builtin__ - - _builtin_file = __builtin__.file - _builtin_open = __builtin__.open - - def _scons_fixup_mode(mode): - """Adjust 'mode' to mark handle as non-inheritable. - - SCons is multithreaded, so allowing handles to be inherited by - children opens us up to races, where (e.g.) processes spawned by - the Taskmaster may inherit and retain references to files opened - by other threads. This may lead to sharing violations and, - ultimately, build failures. - - By including 'N' as part of fopen's 'mode' parameter, all file - handles returned from these functions are atomically marked as - non-inheritable. - """ - if not mode: - # Python's default is 'r'. - # https://docs.python.org/2/library/functions.html#open - mode = 'rN' - elif 'N' not in mode: - mode += 'N' - return mode - - class _scons_file(_builtin_file): - def __init__(self, name, mode=None, *args, **kwargs): - _builtin_file.__init__(self, name, _scons_fixup_mode(mode), - *args, **kwargs) - - def _scons_open(name, mode=None, *args, **kwargs): - return _builtin_open(name, _scons_fixup_mode(mode), - *args, **kwargs) - - __builtin__.file = _scons_file - __builtin__.open = _scons_open - - - -if False: - # Now swap out shutil.filecopy and filecopy2 for win32 api native CopyFile - try: - from ctypes import windll - import shutil - - CopyFile = windll.kernel32.CopyFileA - SetFileTime = windll.kernel32.SetFileTime - - _shutil_copy = shutil.copy - _shutil_copy2 = shutil.copy2 - - shutil.copy2 = CopyFile - - def win_api_copyfile(src,dst): - CopyFile(src,dst) - os.utime(dst) - - shutil.copy = win_api_copyfile - - except AttributeError: - parallel_msg = \ - "Couldn't override shutil.copy or shutil.copy2 falling back to shutil defaults" - - - - - - - -try: - import threading - spawn_lock = threading.Lock() - - # This locked version of spawnve works around a Windows - # MSVCRT bug, because its spawnve is not thread-safe. - # Without this, python can randomly crash while using -jN. - # See the python bug at http://bugs.python.org/issue6476 - # and SCons issue at - # https://github.com/SCons/scons/issues/2449 - def spawnve(mode, file, args, env): - spawn_lock.acquire() - try: - if mode == os.P_WAIT: - ret = os.spawnve(os.P_NOWAIT, file, args, env) - else: - ret = os.spawnve(mode, file, args, env) - finally: - spawn_lock.release() - if mode == os.P_WAIT: - pid, status = os.waitpid(ret, 0) - ret = status >> 8 - return ret -except ImportError: - # Use the unsafe method of spawnve. - # Please, don't try to optimize this try-except block - # away by assuming that the threading module is always present. - # In the test test/option-j.py we intentionally call SCons with - # a fake threading.py that raises an import exception right away, - # simulating a non-existent package. - def spawnve(mode, file, args, env): - return os.spawnve(mode, file, args, env) - -# The upshot of all this is that, if you are using Python 1.5.2, -# you had better have cmd or command.com in your PATH when you run -# scons. - - -def piped_spawn(sh, escape, cmd, args, env, stdout, stderr): - # There is no direct way to do that in python. What we do - # here should work for most cases: - # In case stdout (stderr) is not redirected to a file, - # we redirect it into a temporary file tmpFileStdout - # (tmpFileStderr) and copy the contents of this file - # to stdout (stderr) given in the argument - if not sh: - sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") - return 127 - else: - # one temporary file for stdout and stderr - tmpFileStdout = os.path.normpath(tempfile.mktemp()) - tmpFileStderr = os.path.normpath(tempfile.mktemp()) - - # check if output is redirected - stdoutRedirected = 0 - stderrRedirected = 0 - for arg in args: - # are there more possibilities to redirect stdout ? - if arg.find( ">", 0, 1 ) != -1 or arg.find( "1>", 0, 2 ) != -1: - stdoutRedirected = 1 - # are there more possibilities to redirect stderr ? - if arg.find( "2>", 0, 2 ) != -1: - stderrRedirected = 1 - - # redirect output of non-redirected streams to our tempfiles - if stdoutRedirected == 0: - args.append(">" + str(tmpFileStdout)) - if stderrRedirected == 0: - args.append("2>" + str(tmpFileStderr)) - - # actually do the spawn - try: - args = [sh, '/C', escape(' '.join(args))] - ret = spawnve(os.P_WAIT, sh, args, env) - except OSError as e: - # catch any error - try: - ret = exitvalmap[e.errno] - except KeyError: - sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e.errno, cmd, e.strerror)) - if stderr is not None: - stderr.write("scons: %s: %s\n" % (cmd, e.strerror)) - # copy child output from tempfiles to our streams - # and do clean up stuff - if stdout is not None and stdoutRedirected == 0: - try: - with open(tmpFileStdout, "r" ) as tmp: - stdout.write(tmp.read()) - os.remove(tmpFileStdout) - except (IOError, OSError): - pass - - if stderr is not None and stderrRedirected == 0: - try: - with open(tmpFileStderr, "r" ) as tmp: - stderr.write(tmp.read()) - os.remove(tmpFileStderr) - except (IOError, OSError): - pass - return ret - - -def exec_spawn(l, env): - try: - result = spawnve(os.P_WAIT, l[0], l, env) - except (OSError, EnvironmentError) as e: - try: - result = exitvalmap[e.errno] - sys.stderr.write("scons: %s: %s\n" % (l[0], e.strerror)) - except KeyError: - result = 127 - if len(l) > 2: - if len(l[2]) < 1000: - command = ' '.join(l[0:3]) - else: - command = l[0] - else: - command = l[0] - sys.stderr.write("scons: unknown OSError exception code %d - '%s': %s\n" % (e.errno, command, e.strerror)) - return result - - -def spawn(sh, escape, cmd, args, env): - if not sh: - sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") - return 127 - return exec_spawn([sh, '/C', escape(' '.join(args))], env) - -# Windows does not allow special characters in file names anyway, so no -# need for a complex escape function, we will just quote the arg, except -# that "cmd /c" requires that if an argument ends with a backslash it -# needs to be escaped so as not to interfere with closing double quote -# that we add. -def escape(x): - if x[-1] == '\\': - x = x + '\\' - return '"' + x + '"' - -# Get the windows system directory name -_system_root = None - - -def get_system_root(): - global _system_root - if _system_root is not None: - return _system_root - - # A resonable default if we can't read the registry - val = os.environ.get('SystemRoot', "C:\\WINDOWS") - - if SCons.Util.can_read_reg: - try: - # Look for Windows NT system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows NT\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - except SCons.Util.RegError: - try: - # Okay, try the Windows 9x system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - except KeyboardInterrupt: - raise - except: - pass - - # Ensure system root is a string and not unicode - # (This only matters for py27 were unicode in env passed to POpen fails) - val = str(val) - _system_root = val - return val - - -def get_program_files_dir(): - """ - Get the location of the program files directory - Returns - ------- - - """ - # Now see if we can look in the registry... - val = '' - if SCons.Util.can_read_reg: - try: - # Look for Windows Program Files directory - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir') - except SCons.Util.RegError: - val = '' - pass - - if val == '': - # A reasonable default if we can't read the registry - # (Actually, it's pretty reasonable even if we can :-) - val = os.path.join(os.path.dirname(get_system_root()),"Program Files") - - return val - - -class ArchDefinition(object): - """ - Determine which windows CPU were running on. - A class for defining architecture-specific settings and logic. - """ - def __init__(self, arch, synonyms=[]): - self.arch = arch - self.synonyms = synonyms - -SupportedArchitectureList = [ - ArchDefinition( - 'x86', - ['i386', 'i486', 'i586', 'i686'], - ), - - ArchDefinition( - 'x86_64', - ['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'], - ), - - ArchDefinition( - 'ia64', - ['IA64'], - ), -] - -SupportedArchitectureMap = {} -for a in SupportedArchitectureList: - SupportedArchitectureMap[a.arch] = a - for s in a.synonyms: - SupportedArchitectureMap[s] = a - - -def get_architecture(arch=None): - """Returns the definition for the specified architecture string. - - If no string is specified, the system default is returned (as defined - by the PROCESSOR_ARCHITEW6432 or PROCESSOR_ARCHITECTURE environment - variables). - """ - if arch is None: - arch = os.environ.get('PROCESSOR_ARCHITEW6432') - if not arch: - arch = os.environ.get('PROCESSOR_ARCHITECTURE') - return SupportedArchitectureMap.get(arch, ArchDefinition('', [''])) - - -def generate(env): - # Attempt to find cmd.exe (for WinNT/2k/XP) or - # command.com for Win9x - cmd_interp = '' - # First see if we can look in the registry... - if SCons.Util.can_read_reg: - try: - # Look for Windows NT system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows NT\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - cmd_interp = os.path.join(val, 'System32\\cmd.exe') - except SCons.Util.RegError: - try: - # Okay, try the Windows 9x system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - cmd_interp = os.path.join(val, 'command.com') - except KeyboardInterrupt: - raise - except: - pass - - # For the special case of not having access to the registry, we - # use a temporary path and pathext to attempt to find the command - # interpreter. If we fail, we try to find the interpreter through - # the env's PATH. The problem with that is that it might not - # contain an ENV and a PATH. - if not cmd_interp: - systemroot = get_system_root() - tmp_path = systemroot + os.pathsep + \ - os.path.join(systemroot,'System32') - tmp_pathext = '.com;.exe;.bat;.cmd' - if 'PATHEXT' in os.environ: - tmp_pathext = os.environ['PATHEXT'] - cmd_interp = SCons.Util.WhereIs('cmd', tmp_path, tmp_pathext) - if not cmd_interp: - cmd_interp = SCons.Util.WhereIs('command', tmp_path, tmp_pathext) - - if not cmd_interp: - cmd_interp = env.Detect('cmd') - if not cmd_interp: - cmd_interp = env.Detect('command') - - if 'ENV' not in env: - env['ENV'] = {} - - # Import things from the external environment to the construction - # environment's ENV. This is a potential slippery slope, because we - # *don't* want to make builds dependent on the user's environment by - # default. We're doing this for SystemRoot, though, because it's - # needed for anything that uses sockets, and seldom changes, and - # for SystemDrive because it's related. - # - # Weigh the impact carefully before adding other variables to this list. - import_env = ['SystemDrive', 'SystemRoot', 'TEMP', 'TMP' ] - for var in import_env: - v = os.environ.get(var) - if v: - env['ENV'][var] = v - - if 'COMSPEC' not in env['ENV']: - v = os.environ.get("COMSPEC") - if v: - env['ENV']['COMSPEC'] = v - - env.AppendENVPath('PATH', get_system_root() + '\\System32') - - env['ENV']['PATHEXT'] = '.COM;.EXE;.BAT;.CMD' - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.obj' - env['SHOBJPREFIX'] = '$OBJPREFIX' - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '.exe' - env['LIBPREFIX'] = '' - env['LIBSUFFIX'] = '.lib' - env['SHLIBPREFIX'] = '' - env['SHLIBSUFFIX'] = '.dll' - env['LIBPREFIXES'] = [ '$LIBPREFIX' ] - env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ] - env['PSPAWN'] = piped_spawn - env['SPAWN'] = spawn - env['SHELL'] = cmd_interp - env['TEMPFILE'] = TempFileMunge - env['TEMPFILEPREFIX'] = '@' - env['MAXLINELENGTH'] = 2048 - env['ESCAPE'] = escape - - env['HOST_OS'] = 'win32' - env['HOST_ARCH'] = get_architecture().arch - - if enable_virtualenv and not ignore_virtualenv: - ImportVirtualenv(env) - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/SConf.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/SConf.py deleted file mode 100644 index 8f917a54f80..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/SConf.py +++ /dev/null @@ -1,1107 +0,0 @@ -"""SCons.SConf - -Autoconf-like configuration support. - -In other words, SConf allows to run tests on the build machine to detect -capabilities of system and do some things based on result: generate config -files, header files for C/C++, update variables in environment. - -Tests on the build system can detect if compiler sees header files, if -libraries are installed, if some command line options are supported etc. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/SConf.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.compat - -import io -import os -import re -import sys -import traceback - -import SCons.Action -import SCons.Builder -import SCons.Errors -import SCons.Job -import SCons.Node.FS -import SCons.Taskmaster -import SCons.Util -import SCons.Warnings -import SCons.Conftest - -from SCons.Debug import Trace -from collections import defaultdict - -# Turn off the Conftest error logging -SCons.Conftest.LogInputFiles = 0 -SCons.Conftest.LogErrorMessages = 0 - -# Set -build_type = None -build_types = ['clean', 'help'] - -def SetBuildType(type): - global build_type - build_type = type - -# to be set, if we are in dry-run mode -dryrun = 0 - -AUTO=0 # use SCons dependency scanning for up-to-date checks -FORCE=1 # force all tests to be rebuilt -CACHE=2 # force all tests to be taken from cache (raise an error, if necessary) -cache_mode = AUTO - -def SetCacheMode(mode): - """Set the Configure cache mode. mode must be one of "auto", "force", - or "cache".""" - global cache_mode - if mode == "auto": - cache_mode = AUTO - elif mode == "force": - cache_mode = FORCE - elif mode == "cache": - cache_mode = CACHE - else: - raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode) - -progress_display = SCons.Util.display # will be overwritten by SCons.Script -def SetProgressDisplay(display): - """Set the progress display to use (called from SCons.Script)""" - global progress_display - progress_display = display - -SConfFS = None - -_ac_build_counter = defaultdict(int) -_ac_config_logs = {} # all config.log files created in this build -_ac_config_hs = {} # all config.h files created in this build -sconf_global = None # current sconf object - -def _createConfigH(target, source, env): - t = open(str(target[0]), "w") - defname = re.sub('[^A-Za-z0-9_]', '_', str(target[0]).upper()) - t.write("""#ifndef %(DEFNAME)s_SEEN -#define %(DEFNAME)s_SEEN - -""" % {'DEFNAME' : defname}) - t.write(source[0].get_contents().decode()) - t.write(""" -#endif /* %(DEFNAME)s_SEEN */ -""" % {'DEFNAME' : defname}) - t.close() - -def _stringConfigH(target, source, env): - return "scons: Configure: creating " + str(target[0]) - - -def NeedConfigHBuilder(): - if len(_ac_config_hs) == 0: - return False - else: - return True - -def CreateConfigHBuilder(env): - """Called if necessary just before the building targets phase begins.""" - action = SCons.Action.Action(_createConfigH, - _stringConfigH) - sconfigHBld = SCons.Builder.Builder(action=action) - env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} ) - for k in list(_ac_config_hs.keys()): - env.SConfigHBuilder(k, env.Value(_ac_config_hs[k])) - - -class SConfWarning(SCons.Warnings.Warning): - pass -SCons.Warnings.enableWarningClass(SConfWarning) - -# some error definitions -class SConfError(SCons.Errors.UserError): - def __init__(self,msg): - SCons.Errors.UserError.__init__(self,msg) - -class ConfigureDryRunError(SConfError): - """Raised when a file or directory needs to be updated during a Configure - process, but the user requested a dry-run""" - def __init__(self,target): - if not isinstance(target, SCons.Node.FS.File): - msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target) - else: - msg = 'Cannot update configure test "%s" within a dry-run.' % str(target) - SConfError.__init__(self,msg) - -class ConfigureCacheError(SConfError): - """Raised when a use explicitely requested the cache feature, but the test - is run the first time.""" - def __init__(self,target): - SConfError.__init__(self, '"%s" is not yet built and cache is forced.' % str(target)) - - -# define actions for building text files -def _createSource(target, source, env): - fd = open(str(target[0]), "w") - fd.write(source[0].get_contents().decode()) - fd.close() - - -def _stringSource( target, source, env ): - return (str(target[0]) + ' <-\n |' + - source[0].get_contents().decode().replace( '\n', "\n |" ) ) - -class SConfBuildInfo(SCons.Node.FS.FileBuildInfo): - """ - Special build info for targets of configure tests. Additional members - are result (did the builder succeed last time?) and string, which - contains messages of the original build phase. - """ - __slots__ = ('result', 'string') - - def __init__(self): - self.result = None # -> 0/None -> no error, != 0 error - self.string = None # the stdout / stderr output when building the target - - def set_build_result(self, result, string): - self.result = result - self.string = string - - -class Streamer(object): - """ - 'Sniffer' for a file-like writable object. Similar to the unix tool tee. - """ - def __init__(self, orig): - self.orig = orig - self.s = io.StringIO() - - def write(self, str): - if self.orig: - self.orig.write(str) - try: - self.s.write(str) - except TypeError as e: - # "unicode argument expected" bug in IOStream (python 2.x) - self.s.write(str.decode()) - - def writelines(self, lines): - for l in lines: - self.write(l + '\n') - - def getvalue(self): - """ - Return everything written to orig since the Streamer was created. - """ - return self.s.getvalue() - - def flush(self): - if self.orig: - self.orig.flush() - self.s.flush() - - -class SConfBuildTask(SCons.Taskmaster.AlwaysTask): - """ - This is almost the same as SCons.Script.BuildTask. Handles SConfErrors - correctly and knows about the current cache_mode. - """ - def display(self, message): - if sconf_global.logstream: - sconf_global.logstream.write("scons: Configure: " + message + "\n") - - def display_cached_string(self, bi): - """ - Logs the original builder messages, given the SConfBuildInfo instance - bi. - """ - if not isinstance(bi, SConfBuildInfo): - SCons.Warnings.warn(SConfWarning, - "The stored build information has an unexpected class: %s" % bi.__class__) - else: - self.display("The original builder output was:\n" + - (" |" + str(bi.string)).replace("\n", "\n |")) - - def failed(self): - # check, if the reason was a ConfigureDryRunError or a - # ConfigureCacheError and if yes, reraise the exception - exc_type = self.exc_info()[0] - if issubclass(exc_type, SConfError): - # TODO pylint E0704: bare raise not inside except - raise - elif issubclass(exc_type, SCons.Errors.BuildError): - # we ignore Build Errors (occurs, when a test doesn't pass) - # Clear the exception to prevent the contained traceback - # to build a reference cycle. - self.exc_clear() - else: - self.display('Caught exception while building "%s":\n' % - self.targets[0]) - sys.excepthook(*self.exc_info()) - return SCons.Taskmaster.Task.failed(self) - - def collect_node_states(self): - # returns (is_up_to_date, cached_error, cachable) - # where is_up_to_date is 1, if the node(s) are up_to_date - # cached_error is 1, if the node(s) are up_to_date, but the - # build will fail - # cachable is 0, if some nodes are not in our cache - T = 0 - changed = False - cached_error = False - cachable = True - for t in self.targets: - if T: Trace('%s' % (t)) - bi = t.get_stored_info().binfo - if isinstance(bi, SConfBuildInfo): - if T: Trace(': SConfBuildInfo') - if cache_mode == CACHE: - t.set_state(SCons.Node.up_to_date) - if T: Trace(': set_state(up_to-date)') - else: - if T: Trace(': get_state() %s' % t.get_state()) - if T: Trace(': changed() %s' % t.changed()) - if (t.get_state() != SCons.Node.up_to_date and t.changed()): - changed = True - if T: Trace(': changed %s' % changed) - cached_error = cached_error or bi.result - else: - if T: Trace(': else') - # the node hasn't been built in a SConf context or doesn't - # exist - cachable = False - changed = ( t.get_state() != SCons.Node.up_to_date ) - if T: Trace(': changed %s' % changed) - if T: Trace('\n') - return (not changed, cached_error, cachable) - - def execute(self): - if not self.targets[0].has_builder(): - return - - sconf = sconf_global - - is_up_to_date, cached_error, cachable = self.collect_node_states() - - if cache_mode == CACHE and not cachable: - raise ConfigureCacheError(self.targets[0]) - elif cache_mode == FORCE: - is_up_to_date = 0 - - if cached_error and is_up_to_date: - self.display("Building \"%s\" failed in a previous run and all " - "its sources are up to date." % str(self.targets[0])) - binfo = self.targets[0].get_stored_info().binfo - self.display_cached_string(binfo) - raise SCons.Errors.BuildError # will be 'caught' in self.failed - elif is_up_to_date: - self.display("\"%s\" is up to date." % str(self.targets[0])) - binfo = self.targets[0].get_stored_info().binfo - self.display_cached_string(binfo) - elif dryrun: - raise ConfigureDryRunError(self.targets[0]) - else: - # note stdout and stderr are the same here - s = sys.stdout = sys.stderr = Streamer(sys.stdout) - try: - env = self.targets[0].get_build_env() - env['PSTDOUT'] = env['PSTDERR'] = s - try: - sconf.cached = 0 - self.targets[0].build() - finally: - sys.stdout = sys.stderr = env['PSTDOUT'] = \ - env['PSTDERR'] = sconf.logstream - except KeyboardInterrupt: - raise - except SystemExit: - exc_value = sys.exc_info()[1] - raise SCons.Errors.ExplicitExit(self.targets[0],exc_value.code) - except Exception as e: - for t in self.targets: - binfo = SConfBuildInfo() - binfo.merge(t.get_binfo()) - binfo.set_build_result(1, s.getvalue()) - sconsign_entry = SCons.SConsign.SConsignEntry() - sconsign_entry.binfo = binfo - #sconsign_entry.ninfo = self.get_ninfo() - # We'd like to do this as follows: - # t.store_info(binfo) - # However, we need to store it as an SConfBuildInfo - # object, and store_info() will turn it into a - # regular FileNodeInfo if the target is itself a - # regular File. - sconsign = t.dir.sconsign() - sconsign.set_entry(t.name, sconsign_entry) - sconsign.merge() - raise e - else: - for t in self.targets: - binfo = SConfBuildInfo() - binfo.merge(t.get_binfo()) - binfo.set_build_result(0, s.getvalue()) - sconsign_entry = SCons.SConsign.SConsignEntry() - sconsign_entry.binfo = binfo - #sconsign_entry.ninfo = self.get_ninfo() - # We'd like to do this as follows: - # t.store_info(binfo) - # However, we need to store it as an SConfBuildInfo - # object, and store_info() will turn it into a - # regular FileNodeInfo if the target is itself a - # regular File. - sconsign = t.dir.sconsign() - sconsign.set_entry(t.name, sconsign_entry) - sconsign.merge() - -class SConfBase(object): - """This is simply a class to represent a configure context. After - creating a SConf object, you can call any tests. After finished with your - tests, be sure to call the Finish() method, which returns the modified - environment. - Some words about caching: In most cases, it is not necessary to cache - Test results explicitly. Instead, we use the scons dependency checking - mechanism. For example, if one wants to compile a test program - (SConf.TryLink), the compiler is only called, if the program dependencies - have changed. However, if the program could not be compiled in a former - SConf run, we need to explicitly cache this error. - """ - - def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR', - log_file='$CONFIGURELOG', config_h = None, _depth = 0): - """Constructor. Pass additional tests in the custom_tests-dictionary, - e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest - defines a custom test. - Note also the conf_dir and log_file arguments (you may want to - build tests in the VariantDir, not in the SourceDir) - """ - global SConfFS - - # Now create isolated override so setting source_decider doesn't affect parent Environment - if cache_mode == FORCE: - self.original_env = env - self.env = env.Clone() - - # Set up the Decider() to force rebuilds by saying - # that every source has changed. Note that we still - # call the environment's underlying source decider so - # that the correct .sconsign info will get calculated - # and keep the build state consistent. - def force_build(dependency, target, prev_ni, - repo_node=None, - env_decider=env.decide_source): - try: - env_decider(dependency, target, prev_ni, repo_node) - except Exception as e: - raise e - return True - - if self.env.decide_source.__code__ is not force_build.__code__: - self.env.Decider(force_build) - - else: - self.env = env - - # print("Override env:%s"%env) - - if not SConfFS: - SConfFS = SCons.Node.FS.default_fs or \ - SCons.Node.FS.FS(env.fs.pathTop) - if sconf_global is not None: - raise SCons.Errors.UserError - - if log_file is not None: - log_file = SConfFS.File(env.subst(log_file)) - self.logfile = log_file - self.logstream = None - self.lastTarget = None - self.depth = _depth - self.cached = 0 # will be set, if all test results are cached - - # add default tests - default_tests = { - 'CheckCC' : CheckCC, - 'CheckCXX' : CheckCXX, - 'CheckSHCC' : CheckSHCC, - 'CheckSHCXX' : CheckSHCXX, - 'CheckFunc' : CheckFunc, - 'CheckType' : CheckType, - 'CheckTypeSize' : CheckTypeSize, - 'CheckDeclaration' : CheckDeclaration, - 'CheckHeader' : CheckHeader, - 'CheckCHeader' : CheckCHeader, - 'CheckCXXHeader' : CheckCXXHeader, - 'CheckLib' : CheckLib, - 'CheckLibWithHeader' : CheckLibWithHeader, - 'CheckProg' : CheckProg, - } - self.AddTests(default_tests) - self.AddTests(custom_tests) - self.confdir = SConfFS.Dir(env.subst(conf_dir)) - if config_h is not None: - config_h = SConfFS.File(config_h) - self.config_h = config_h - self._startup() - - def Finish(self): - """Call this method after finished with your tests: - env = sconf.Finish() - """ - self._shutdown() - - return self.env - - def Define(self, name, value = None, comment = None): - """ - Define a pre processor symbol name, with the optional given value in the - current config header. - - If value is None (default), then #define name is written. If value is not - none, then #define name value is written. - - comment is a string which will be put as a C comment in the header, to explain the meaning of the value - (appropriate C comments will be added automatically). - """ - lines = [] - if comment: - comment_str = "/* %s */" % comment - lines.append(comment_str) - - if value is not None: - define_str = "#define %s %s" % (name, value) - else: - define_str = "#define %s" % name - lines.append(define_str) - lines.append('') - - self.config_h_text = self.config_h_text + '\n'.join(lines) - - def BuildNodes(self, nodes): - """ - Tries to build the given nodes immediately. Returns 1 on success, - 0 on error. - """ - if self.logstream is not None: - # override stdout / stderr to write in log file - oldStdout = sys.stdout - sys.stdout = self.logstream - oldStderr = sys.stderr - sys.stderr = self.logstream - - # the engine assumes the current path is the SConstruct directory ... - old_fs_dir = SConfFS.getcwd() - old_os_dir = os.getcwd() - SConfFS.chdir(SConfFS.Top, change_os_dir=1) - - # Because we take responsibility here for writing out our - # own .sconsign info (see SConfBuildTask.execute(), above), - # we override the store_info() method with a null place-holder - # so we really control how it gets written. - for n in nodes: - n.store_info = 0 - if not hasattr(n, 'attributes'): - n.attributes = SCons.Node.Node.Attrs() - n.attributes.keep_targetinfo = 1 - - if True: - # Some checkers have intermediate files (for example anything that compiles a c file into a program to run - # Those files need to be set to not release their target info, otherwise taskmaster will throw a - # Nonetype not callable - for c in n.children(scan=False): - # Keep debug code here. - # print("Checking [%s] for builders and then setting keep_targetinfo"%c) - if c.has_builder(): - n.store_info = 0 - if not hasattr(c, 'attributes'): - c.attributes = SCons.Node.Node.Attrs() - c.attributes.keep_targetinfo = 1 - # pass - - ret = 1 - - try: - # ToDo: use user options for calc - save_max_drift = SConfFS.get_max_drift() - SConfFS.set_max_drift(0) - tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask) - # we don't want to build tests in parallel - jobs = SCons.Job.Jobs(1, tm ) - jobs.run() - for n in nodes: - state = n.get_state() - if (state != SCons.Node.executed and - state != SCons.Node.up_to_date): - # the node could not be built. we return 0 in this case - ret = 0 - finally: - SConfFS.set_max_drift(save_max_drift) - os.chdir(old_os_dir) - SConfFS.chdir(old_fs_dir, change_os_dir=0) - if self.logstream is not None: - # restore stdout / stderr - sys.stdout = oldStdout - sys.stderr = oldStderr - return ret - - def pspawn_wrapper(self, sh, escape, cmd, args, env): - """Wrapper function for handling piped spawns. - - This looks to the calling interface (in Action.py) like a "normal" - spawn, but associates the call with the PSPAWN variable from - the construction environment and with the streams to which we - want the output logged. This gets slid into the construction - environment as the SPAWN variable so Action.py doesn't have to - know or care whether it's spawning a piped command or not. - """ - return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream) - - def TryBuild(self, builder, text=None, extension=""): - """Low level TryBuild implementation. Normally you don't need to - call that - you can use TryCompile / TryLink / TryRun instead - """ - global _ac_build_counter - - # Make sure we have a PSPAWN value, and save the current - # SPAWN value. - try: - self.pspawn = self.env['PSPAWN'] - except KeyError: - raise SCons.Errors.UserError('Missing PSPAWN construction variable.') - try: - save_spawn = self.env['SPAWN'] - except KeyError: - raise SCons.Errors.UserError('Missing SPAWN construction variable.') - - nodesToBeBuilt = [] - sourcetext = self.env.Value(text) - f = "conftest" - - if text is not None: - textSig = SCons.Util.MD5signature(sourcetext) - textSigCounter = str(_ac_build_counter[textSig]) - _ac_build_counter[textSig] += 1 - - f = "_".join([f, textSig, textSigCounter]) - textFile = self.confdir.File(f + extension) - textFileNode = self.env.SConfSourceBuilder(target=textFile, - source=sourcetext) - nodesToBeBuilt.extend(textFileNode) - - source = textFile - target = textFile.File(f + "SConfActionsContentDummyTarget") - else: - source = None - target = None - - action = builder.builder.action.get_contents(target=target, source=[source], env=self.env) - actionsig = SCons.Util.MD5signature(action) - f = "_".join([f, actionsig]) - - pref = self.env.subst( builder.builder.prefix ) - suff = self.env.subst( builder.builder.suffix ) - target = self.confdir.File(pref + f + suff) - - try: - # Slide our wrapper into the construction environment as - # the SPAWN function. - self.env['SPAWN'] = self.pspawn_wrapper - - nodes = builder(target = target, source = source) - if not SCons.Util.is_List(nodes): - nodes = [nodes] - nodesToBeBuilt.extend(nodes) - result = self.BuildNodes(nodesToBeBuilt) - - finally: - self.env['SPAWN'] = save_spawn - - if result: - self.lastTarget = nodes[0] - else: - self.lastTarget = None - - return result - - def TryAction(self, action, text = None, extension = ""): - """Tries to execute the given action with optional source file - contents and optional source file extension , - Returns the status (0 : failed, 1 : ok) and the contents of the - output file. - """ - builder = SCons.Builder.Builder(action=action) - self.env.Append( BUILDERS = {'SConfActionBuilder' : builder} ) - ok = self.TryBuild(self.env.SConfActionBuilder, text, extension) - del self.env['BUILDERS']['SConfActionBuilder'] - if ok: - outputStr = self.lastTarget.get_text_contents() - return (1, outputStr) - return (0, "") - - def TryCompile( self, text, extension): - """Compiles the program given in text to an env.Object, using extension - as file extension (e.g. '.c'). Returns 1, if compilation was - successful, 0 otherwise. The target is saved in self.lastTarget (for - further processing). - """ - return self.TryBuild(self.env.Object, text, extension) - - def TryLink( self, text, extension ): - """Compiles the program given in text to an executable env.Program, - using extension as file extension (e.g. '.c'). Returns 1, if - compilation was successful, 0 otherwise. The target is saved in - self.lastTarget (for further processing). - """ - return self.TryBuild(self.env.Program, text, extension ) - - def TryRun(self, text, extension ): - """Compiles and runs the program given in text, using extension - as file extension (e.g. '.c'). Returns (1, outputStr) on success, - (0, '') otherwise. The target (a file containing the program's stdout) - is saved in self.lastTarget (for further processing). - """ - ok = self.TryLink(text, extension) - if( ok ): - prog = self.lastTarget - pname = prog.get_internal_path() - output = self.confdir.File(os.path.basename(pname)+'.out') - node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ]) - ok = self.BuildNodes(node) - if ok: - outputStr = SCons.Util.to_str(output.get_contents()) - return( 1, outputStr) - return (0, "") - - class TestWrapper(object): - """A wrapper around Tests (to ensure sanity)""" - def __init__(self, test, sconf): - self.test = test - self.sconf = sconf - def __call__(self, *args, **kw): - if not self.sconf.active: - raise SCons.Errors.UserError - context = CheckContext(self.sconf) - ret = self.test(context, *args, **kw) - if self.sconf.config_h is not None: - self.sconf.config_h_text = self.sconf.config_h_text + context.config_h - context.Result("error: no result") - return ret - - def AddTest(self, test_name, test_instance): - """Adds test_class to this SConf instance. It can be called with - self.test_name(...)""" - setattr(self, test_name, SConfBase.TestWrapper(test_instance, self)) - - def AddTests(self, tests): - """Adds all the tests given in the tests dictionary to this SConf - instance - """ - for name in list(tests.keys()): - self.AddTest(name, tests[name]) - - def _createDir( self, node ): - dirName = str(node) - if dryrun: - if not os.path.isdir( dirName ): - raise ConfigureDryRunError(dirName) - else: - if not os.path.isdir( dirName ): - os.makedirs( dirName ) - - def _startup(self): - """Private method. Set up logstream, and set the environment - variables necessary for a piped build - """ - global _ac_config_logs - global sconf_global - global SConfFS - - self.lastEnvFs = self.env.fs - self.env.fs = SConfFS - self._createDir(self.confdir) - self.confdir.up().add_ignore( [self.confdir] ) - - if self.logfile is not None and not dryrun: - # truncate logfile, if SConf.Configure is called for the first time - # in a build - if self.logfile in _ac_config_logs: - log_mode = "a" - else: - _ac_config_logs[self.logfile] = None - log_mode = "w" - fp = open(str(self.logfile), log_mode) - self.logstream = SCons.Util.Unbuffered(fp) - # logfile may stay in a build directory, so we tell - # the build system not to override it with a eventually - # existing file with the same name in the source directory - self.logfile.dir.add_ignore( [self.logfile] ) - - tb = traceback.extract_stack()[-3-self.depth] - old_fs_dir = SConfFS.getcwd() - SConfFS.chdir(SConfFS.Top, change_os_dir=0) - self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' % - (tb[0], tb[1], str(self.confdir)) ) - SConfFS.chdir(old_fs_dir) - else: - self.logstream = None - # we use a special builder to create source files from TEXT - action = SCons.Action.Action(_createSource, - _stringSource) - sconfSrcBld = SCons.Builder.Builder(action=action) - self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} ) - self.config_h_text = _ac_config_hs.get(self.config_h, "") - self.active = 1 - # only one SConf instance should be active at a time ... - sconf_global = self - - def _shutdown(self): - """Private method. Reset to non-piped spawn""" - global sconf_global, _ac_config_hs - - if not self.active: - raise SCons.Errors.UserError("Finish may be called only once!") - if self.logstream is not None and not dryrun: - self.logstream.write("\n") - self.logstream.close() - self.logstream = None - - # Now reset the decider if we changed it due to --config=force - # We saved original Environment passed in and cloned it to isolate - # it from being changed. - if cache_mode == FORCE: - self.env.Decider(self.original_env.decide_source) - - # remove the SConfSourceBuilder from the environment - blds = self.env['BUILDERS'] - del blds['SConfSourceBuilder'] - self.env.Replace( BUILDERS=blds ) - - self.active = 0 - sconf_global = None - if self.config_h is not None: - _ac_config_hs[self.config_h] = self.config_h_text - self.env.fs = self.lastEnvFs - -class CheckContext(object): - """Provides a context for configure tests. Defines how a test writes to the - screen and log file. - - A typical test is just a callable with an instance of CheckContext as - first argument: - - def CheckCustom(context, ...): - context.Message('Checking my weird test ... ') - ret = myWeirdTestFunction(...) - context.Result(ret) - - Often, myWeirdTestFunction will be one of - context.TryCompile/context.TryLink/context.TryRun. The results of - those are cached, for they are only rebuild, if the dependencies have - changed. - """ - - def __init__(self, sconf): - """Constructor. Pass the corresponding SConf instance.""" - self.sconf = sconf - self.did_show_result = 0 - - # for Conftest.py: - self.vardict = {} - self.havedict = {} - self.headerfilename = None - self.config_h = "" # config_h text will be stored here - # we don't regenerate the config.h file after each test. That means, - # that tests won't be able to include the config.h file, and so - # they can't do an #ifdef HAVE_XXX_H. This shouldn't be a major - # issue, though. If it turns out, that we need to include config.h - # in tests, we must ensure, that the dependencies are worked out - # correctly. Note that we can't use Conftest.py's support for config.h, - # cause we will need to specify a builder for the config.h file ... - - def Message(self, text): - """Inform about what we are doing right now, e.g. - 'Checking for SOMETHING ... ' - """ - self.Display(text) - self.sconf.cached = 1 - self.did_show_result = 0 - - def Result(self, res): - """Inform about the result of the test. If res is not a string, displays - 'yes' or 'no' depending on whether res is evaluated as true or false. - The result is only displayed when self.did_show_result is not set. - """ - if isinstance(res, str): - text = res - elif res: - text = "yes" - else: - text = "no" - - if self.did_show_result == 0: - # Didn't show result yet, do it now. - self.Display(text + "\n") - self.did_show_result = 1 - - def TryBuild(self, *args, **kw): - return self.sconf.TryBuild(*args, **kw) - - def TryAction(self, *args, **kw): - return self.sconf.TryAction(*args, **kw) - - def TryCompile(self, *args, **kw): - return self.sconf.TryCompile(*args, **kw) - - def TryLink(self, *args, **kw): - return self.sconf.TryLink(*args, **kw) - - def TryRun(self, *args, **kw): - return self.sconf.TryRun(*args, **kw) - - def __getattr__( self, attr ): - if( attr == 'env' ): - return self.sconf.env - elif( attr == 'lastTarget' ): - return self.sconf.lastTarget - else: - raise AttributeError("CheckContext instance has no attribute '%s'" % attr) - - #### Stuff used by Conftest.py (look there for explanations). - - def BuildProg(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. - return not self.TryBuild(self.env.Program, text, ext) - - def CompileProg(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. - return not self.TryBuild(self.env.Object, text, ext) - - def CompileSharedObject(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc. - return not self.TryBuild(self.env.SharedObject, text, ext) - - def RunProg(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. - st, out = self.TryRun(text, ext) - return not st, out - - def AppendLIBS(self, lib_name_list): - oldLIBS = self.env.get( 'LIBS', [] ) - self.env.Append(LIBS = lib_name_list) - return oldLIBS - - def PrependLIBS(self, lib_name_list): - oldLIBS = self.env.get( 'LIBS', [] ) - self.env.Prepend(LIBS = lib_name_list) - return oldLIBS - - def SetLIBS(self, val): - oldLIBS = self.env.get( 'LIBS', [] ) - self.env.Replace(LIBS = val) - return oldLIBS - - def Display(self, msg): - if self.sconf.cached: - # We assume that Display is called twice for each test here - # once for the Checking for ... message and once for the result. - # The self.sconf.cached flag can only be set between those calls - msg = "(cached) " + msg - self.sconf.cached = 0 - progress_display(msg, append_newline=0) - self.Log("scons: Configure: " + msg + "\n") - - def Log(self, msg): - if self.sconf.logstream is not None: - self.sconf.logstream.write(msg) - - #### End of stuff used by Conftest.py. - - -def SConf(*args, **kw): - if kw.get(build_type, True): - kw['_depth'] = kw.get('_depth', 0) + 1 - for bt in build_types: - try: - del kw[bt] - except KeyError: - pass - return SConfBase(*args, **kw) - else: - return SCons.Util.Null() - - -def CheckFunc(context, function_name, header = None, language = None): - res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language) - context.did_show_result = 1 - return not res - -def CheckType(context, type_name, includes = "", language = None): - res = SCons.Conftest.CheckType(context, type_name, - header = includes, language = language) - context.did_show_result = 1 - return not res - -def CheckTypeSize(context, type_name, includes = "", language = None, expect = None): - res = SCons.Conftest.CheckTypeSize(context, type_name, - header = includes, language = language, - expect = expect) - context.did_show_result = 1 - return res - -def CheckDeclaration(context, declaration, includes = "", language = None): - res = SCons.Conftest.CheckDeclaration(context, declaration, - includes = includes, - language = language) - context.did_show_result = 1 - return not res - -def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'): - # used by CheckHeader and CheckLibWithHeader to produce C - #include - # statements from the specified header (list) - if not SCons.Util.is_List(headers): - headers = [headers] - l = [] - if leaveLast: - lastHeader = headers[-1] - headers = headers[:-1] - else: - lastHeader = None - for s in headers: - l.append("#include %s%s%s\n" - % (include_quotes[0], s, include_quotes[1])) - return ''.join(l), lastHeader - -def CheckHeader(context, header, include_quotes = '<>', language = None): - """ - A test for a C or C++ header file. - """ - prog_prefix, hdr_to_check = \ - createIncludesFromHeaders(header, 1, include_quotes) - res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix, - language = language, - include_quotes = include_quotes) - context.did_show_result = 1 - return not res - -def CheckCC(context): - res = SCons.Conftest.CheckCC(context) - context.did_show_result = 1 - return not res - -def CheckCXX(context): - res = SCons.Conftest.CheckCXX(context) - context.did_show_result = 1 - return not res - -def CheckSHCC(context): - res = SCons.Conftest.CheckSHCC(context) - context.did_show_result = 1 - return not res - -def CheckSHCXX(context): - res = SCons.Conftest.CheckSHCXX(context) - context.did_show_result = 1 - return not res - -# Bram: Make this function obsolete? CheckHeader() is more generic. - -def CheckCHeader(context, header, include_quotes = '""'): - """ - A test for a C header file. - """ - return CheckHeader(context, header, include_quotes, language = "C") - - -# Bram: Make this function obsolete? CheckHeader() is more generic. - -def CheckCXXHeader(context, header, include_quotes = '""'): - """ - A test for a C++ header file. - """ - return CheckHeader(context, header, include_quotes, language = "C++") - - -def CheckLib(context, library = None, symbol = "main", - header = None, language = None, autoadd = 1): - """ - A test for a library. See also CheckLibWithHeader. - Note that library may also be None to test whether the given symbol - compiles without flags. - """ - - if not library: - library = [None] - - if not SCons.Util.is_List(library): - library = [library] - - # ToDo: accept path for the library - res = SCons.Conftest.CheckLib(context, library, symbol, header = header, - language = language, autoadd = autoadd) - context.did_show_result = 1 - return not res - -# XXX -# Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H. - -def CheckLibWithHeader(context, libs, header, language, - call = None, autoadd = 1): - # ToDo: accept path for library. Support system header files. - """ - Another (more sophisticated) test for a library. - Checks, if library and header is available for language (may be 'C' - or 'CXX'). Call maybe be a valid expression _with_ a trailing ';'. - As in CheckLib, we support library=None, to test if the call compiles - without extra link flags. - """ - prog_prefix, dummy = \ - createIncludesFromHeaders(header, 0) - if libs == []: - libs = [None] - - if not SCons.Util.is_List(libs): - libs = [libs] - - res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix, - call = call, language = language, autoadd = autoadd) - context.did_show_result = 1 - return not res - -def CheckProg(context, prog_name): - """Simple check if a program exists in the path. Returns the path - for the application, or None if not found. - """ - res = SCons.Conftest.CheckProg(context, prog_name) - context.did_show_result = 1 - return res - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/SConsign.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/SConsign.py deleted file mode 100644 index 24bfd216b6e..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/SConsign.py +++ /dev/null @@ -1,433 +0,0 @@ -"""SCons.SConsign - -Writing and reading information to the .sconsign file or files. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -from __future__ import print_function - -__revision__ = "src/engine/SCons/SConsign.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.compat - -import os -import pickle - -import SCons.dblite -import SCons.Warnings - -from SCons.compat import PICKLE_PROTOCOL - - -def corrupt_dblite_warning(filename): - SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, - "Ignoring corrupt .sconsign file: %s"%filename) - -SCons.dblite.ignore_corrupt_dbfiles = 1 -SCons.dblite.corruption_warning = corrupt_dblite_warning - -# XXX Get rid of the global array so this becomes re-entrant. -sig_files = [] - -# Info for the database SConsign implementation (now the default): -# "DataBase" is a dictionary that maps top-level SConstruct directories -# to open database handles. -# "DB_Module" is the Python database module to create the handles. -# "DB_Name" is the base name of the database file (minus any -# extension the underlying DB module will add). -DataBase = {} -DB_Module = SCons.dblite -DB_Name = ".sconsign" -DB_sync_list = [] - - -def Get_DataBase(dir): - global DataBase, DB_Module, DB_Name - top = dir.fs.Top - if not os.path.isabs(DB_Name) and top.repositories: - mode = "c" - for d in [top] + top.repositories: - if dir.is_under(d): - try: - return DataBase[d], mode - except KeyError: - path = d.entry_abspath(DB_Name) - try: db = DataBase[d] = DB_Module.open(path, mode) - except (IOError, OSError): - pass - else: - if mode != "r": - DB_sync_list.append(db) - return db, mode - mode = "r" - try: - return DataBase[top], "c" - except KeyError: - db = DataBase[top] = DB_Module.open(DB_Name, "c") - DB_sync_list.append(db) - return db, "c" - except TypeError: - print("DataBase =", DataBase) - raise - - -def Reset(): - """Reset global state. Used by unit tests that end up using - SConsign multiple times to get a clean slate for each test.""" - global sig_files, DB_sync_list - sig_files = [] - DB_sync_list = [] - -normcase = os.path.normcase - - -def write(): - global sig_files - for sig_file in sig_files: - sig_file.write(sync=0) - for db in DB_sync_list: - try: - syncmethod = db.sync - except AttributeError: - pass # Not all dbm modules have sync() methods. - else: - syncmethod() - try: - closemethod = db.close - except AttributeError: - pass # Not all dbm modules have close() methods. - else: - closemethod() - - -class SConsignEntry(object): - """ - Wrapper class for the generic entry in a .sconsign file. - The Node subclass populates it with attributes as it pleases. - - XXX As coded below, we do expect a '.binfo' attribute to be added, - but we'll probably generalize this in the next refactorings. - """ - __slots__ = ("binfo", "ninfo", "__weakref__") - current_version_id = 2 - - def __init__(self): - # Create an object attribute from the class attribute so it ends up - # in the pickled data in the .sconsign file. - #_version_id = self.current_version_id - pass - - def convert_to_sconsign(self): - self.binfo.convert_to_sconsign() - - def convert_from_sconsign(self, dir, name): - self.binfo.convert_from_sconsign(dir, name) - - def __getstate__(self): - state = getattr(self, '__dict__', {}).copy() - for obj in type(self).mro(): - for name in getattr(obj,'__slots__',()): - if hasattr(self, name): - state[name] = getattr(self, name) - - state['_version_id'] = self.current_version_id - try: - del state['__weakref__'] - except KeyError: - pass - return state - - def __setstate__(self, state): - for key, value in state.items(): - if key not in ('_version_id','__weakref__'): - setattr(self, key, value) - - -class Base(object): - """ - This is the controlling class for the signatures for the collection of - entries associated with a specific directory. The actual directory - association will be maintained by a subclass that is specific to - the underlying storage method. This class provides a common set of - methods for fetching and storing the individual bits of information - that make up signature entry. - """ - def __init__(self): - self.entries = {} - self.dirty = False - self.to_be_merged = {} - - def get_entry(self, filename): - """ - Fetch the specified entry attribute. - """ - return self.entries[filename] - - def set_entry(self, filename, obj): - """ - Set the entry. - """ - self.entries[filename] = obj - self.dirty = True - - def do_not_set_entry(self, filename, obj): - pass - - def store_info(self, filename, node): - entry = node.get_stored_info() - entry.binfo.merge(node.get_binfo()) - self.to_be_merged[filename] = node - self.dirty = True - - def do_not_store_info(self, filename, node): - pass - - def merge(self): - for key, node in self.to_be_merged.items(): - entry = node.get_stored_info() - try: - ninfo = entry.ninfo - except AttributeError: - # This happens with SConf Nodes, because the configuration - # subsystem takes direct control over how the build decision - # is made and its information stored. - pass - else: - ninfo.merge(node.get_ninfo()) - self.entries[key] = entry - self.to_be_merged = {} - - -class DB(Base): - """ - A Base subclass that reads and writes signature information - from a global .sconsign.db* file--the actual file suffix is - determined by the database module. - """ - def __init__(self, dir): - Base.__init__(self) - - self.dir = dir - - db, mode = Get_DataBase(dir) - - # Read using the path relative to the top of the Repository - # (self.dir.tpath) from which we're fetching the signature - # information. - path = normcase(dir.get_tpath()) - try: - rawentries = db[path] - except KeyError: - pass - else: - try: - self.entries = pickle.loads(rawentries) - if not isinstance(self.entries, dict): - self.entries = {} - raise TypeError - except KeyboardInterrupt: - raise - except Exception as e: - SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, - "Ignoring corrupt sconsign entry : %s (%s)\n"%(self.dir.get_tpath(), e)) - for key, entry in self.entries.items(): - entry.convert_from_sconsign(dir, key) - - if mode == "r": - # This directory is actually under a repository, which means - # likely they're reaching in directly for a dependency on - # a file there. Don't actually set any entry info, so we - # won't try to write to that .sconsign.dblite file. - self.set_entry = self.do_not_set_entry - self.store_info = self.do_not_store_info - - global sig_files - sig_files.append(self) - - def write(self, sync=1): - if not self.dirty: - return - - self.merge() - - db, mode = Get_DataBase(self.dir) - - # Write using the path relative to the top of the SConstruct - # directory (self.dir.path), not relative to the top of - # the Repository; we only write to our own .sconsign file, - # not to .sconsign files in Repositories. - path = normcase(self.dir.get_internal_path()) - for key, entry in self.entries.items(): - entry.convert_to_sconsign() - db[path] = pickle.dumps(self.entries, PICKLE_PROTOCOL) - - if sync: - try: - syncmethod = db.sync - except AttributeError: - # Not all anydbm modules have sync() methods. - pass - else: - syncmethod() - - -class Dir(Base): - def __init__(self, fp=None, dir=None): - """ - fp - file pointer to read entries from - """ - Base.__init__(self) - - if not fp: - return - - self.entries = pickle.load(fp) - if not isinstance(self.entries, dict): - self.entries = {} - raise TypeError - - if dir: - for key, entry in self.entries.items(): - entry.convert_from_sconsign(dir, key) - - -class DirFile(Dir): - """ - Encapsulates reading and writing a per-directory .sconsign file. - """ - def __init__(self, dir): - """ - dir - the directory for the file - """ - - self.dir = dir - self.sconsign = os.path.join(dir.get_internal_path(), '.sconsign') - - try: - fp = open(self.sconsign, 'rb') - except IOError: - fp = None - - try: - Dir.__init__(self, fp, dir) - except KeyboardInterrupt: - raise - except Exception: - SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, - "Ignoring corrupt .sconsign file: %s"%self.sconsign) - - try: - fp.close() - except AttributeError: - pass - - global sig_files - sig_files.append(self) - - def write(self, sync=1): - """ - Write the .sconsign file to disk. - - Try to write to a temporary file first, and rename it if we - succeed. If we can't write to the temporary file, it's - probably because the directory isn't writable (and if so, - how did we build anything in this directory, anyway?), so - try to write directly to the .sconsign file as a backup. - If we can't rename, try to copy the temporary contents back - to the .sconsign file. Either way, always try to remove - the temporary file at the end. - """ - if not self.dirty: - return - - self.merge() - - temp = os.path.join(self.dir.get_internal_path(), '.scons%d' % os.getpid()) - try: - file = open(temp, 'wb') - fname = temp - except IOError: - try: - file = open(self.sconsign, 'wb') - fname = self.sconsign - except IOError: - return - for key, entry in self.entries.items(): - entry.convert_to_sconsign() - pickle.dump(self.entries, file, PICKLE_PROTOCOL) - file.close() - if fname != self.sconsign: - try: - mode = os.stat(self.sconsign)[0] - os.chmod(self.sconsign, 0o666) - os.unlink(self.sconsign) - except (IOError, OSError): - # Try to carry on in the face of either OSError - # (things like permission issues) or IOError (disk - # or network issues). If there's a really dangerous - # issue, it should get re-raised by the calls below. - pass - try: - os.rename(fname, self.sconsign) - except OSError: - # An OSError failure to rename may indicate something - # like the directory has no write permission, but - # the .sconsign file itself might still be writable, - # so try writing on top of it directly. An IOError - # here, or in any of the following calls, would get - # raised, indicating something like a potentially - # serious disk or network issue. - with open(self.sconsign, 'wb') as f, open(fname, 'rb') as f2: - f.write(f2.read()) - os.chmod(self.sconsign, mode) - try: - os.unlink(temp) - except (IOError, OSError): - pass - -ForDirectory = DB - - -def File(name, dbm_module=None): - """ - Arrange for all signatures to be stored in a global .sconsign.db* - file. - """ - global ForDirectory, DB_Name, DB_Module - if name is None: - ForDirectory = DirFile - DB_Module = None - else: - ForDirectory = DB - DB_Name = name - if dbm_module is not None: - DB_Module = dbm_module - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/C.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/C.py deleted file mode 100644 index 32e0499a7eb..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/C.py +++ /dev/null @@ -1,131 +0,0 @@ -"""SCons.Scanner.C - -This module implements the dependency scanner for C/C++ code. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/C.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Node.FS -import SCons.Scanner -import SCons.Util - -import SCons.cpp - -class SConsCPPScanner(SCons.cpp.PreProcessor): - """ - SCons-specific subclass of the cpp.py module's processing. - - We subclass this so that: 1) we can deal with files represented - by Nodes, not strings; 2) we can keep track of the files that are - missing. - """ - def __init__(self, *args, **kw): - SCons.cpp.PreProcessor.__init__(self, *args, **kw) - self.missing = [] - def initialize_result(self, fname): - self.result = SCons.Util.UniqueList([fname]) - def finalize_result(self, fname): - return self.result[1:] - def find_include_file(self, t): - keyword, quote, fname = t - result = SCons.Node.FS.find_file(fname, self.searchpath[quote]) - if not result: - self.missing.append((fname, self.current_file)) - return result - def read_file(self, file): - try: - with open(str(file.rfile())) as fp: - return fp.read() - except EnvironmentError as e: - self.missing.append((file, self.current_file)) - return '' - -def dictify_CPPDEFINES(env): - cppdefines = env.get('CPPDEFINES', {}) - if cppdefines is None: - return {} - if SCons.Util.is_Sequence(cppdefines): - result = {} - for c in cppdefines: - if SCons.Util.is_Sequence(c): - result[c[0]] = c[1] - else: - result[c] = None - return result - if not SCons.Util.is_Dict(cppdefines): - return {cppdefines : None} - return cppdefines - -class SConsCPPScannerWrapper(object): - """ - The SCons wrapper around a cpp.py scanner. - - This is the actual glue between the calling conventions of generic - SCons scanners, and the (subclass of) cpp.py class that knows how - to look for #include lines with reasonably real C-preprocessor-like - evaluation of #if/#ifdef/#else/#elif lines. - """ - def __init__(self, name, variable): - self.name = name - self.path = SCons.Scanner.FindPathDirs(variable) - def __call__(self, node, env, path = ()): - cpp = SConsCPPScanner(current = node.get_dir(), - cpppath = path, - dict = dictify_CPPDEFINES(env)) - result = cpp(node) - for included, includer in cpp.missing: - fmt = "No dependency generated for file: %s (included from: %s) -- file not found" - SCons.Warnings.warn(SCons.Warnings.DependencyWarning, - fmt % (included, includer)) - return result - - def recurse_nodes(self, nodes): - return nodes - def select(self, node): - return self - -def CScanner(): - """Return a prototype Scanner instance for scanning source files - that use the C pre-processor""" - - # Here's how we would (or might) use the CPP scanner code above that - # knows how to evaluate #if/#ifdef/#else/#elif lines when searching - # for #includes. This is commented out for now until we add the - # right configurability to let users pick between the scanners. - #return SConsCPPScannerWrapper("CScanner", "CPPPATH") - - cs = SCons.Scanner.ClassicCPP("CScanner", - "$CPPSUFFIXES", - "CPPPATH", - '^[ \t]*#[ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")') - return cs - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/D.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/D.py deleted file mode 100644 index 427c596d459..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/D.py +++ /dev/null @@ -1,73 +0,0 @@ -"""SCons.Scanner.D - -Scanner for the Digital Mars "D" programming language. - -Coded by Andy Friesen -17 Nov 2003 - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/D.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Scanner - -def DScanner(): - """Return a prototype Scanner instance for scanning D source files""" - ds = D() - return ds - -class D(SCons.Scanner.Classic): - def __init__ (self): - SCons.Scanner.Classic.__init__ ( - self, - name = "DScanner", - suffixes = '$DSUFFIXES', - path_variable = 'DPATH', - regex = r'(?:import\s+)([\w\s=,.]+)(?:\s*:[\s\w,=]+)?(?:;)' - ) - - def find_include(self, include, source_dir, path): - # translate dots (package separators) to slashes - inc = include.replace('.', '/') - - i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path) - if i is None: - i = SCons.Node.FS.find_file (inc + '.di', (source_dir,) + path) - return i, include - - def find_include_names(self, node): - includes = [] - for iii in self.cre.findall(node.get_text_contents()): - for jjj in iii.split(','): - kkk = jjj.split('=')[-1] - includes.append(kkk.strip()) - return includes - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Dir.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Dir.py deleted file mode 100644 index c3e825c442e..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Dir.py +++ /dev/null @@ -1,109 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Scanner/Dir.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Node.FS -import SCons.Scanner - -def only_dirs(nodes): - is_Dir = lambda n: isinstance(n.disambiguate(), SCons.Node.FS.Dir) - return [node for node in nodes if is_Dir(node)] - -def DirScanner(**kw): - """Return a prototype Scanner instance for scanning - directories for on-disk files""" - kw['node_factory'] = SCons.Node.FS.Entry - kw['recursive'] = only_dirs - return SCons.Scanner.Base(scan_on_disk, "DirScanner", **kw) - -def DirEntryScanner(**kw): - """Return a prototype Scanner instance for "scanning" - directory Nodes for their in-memory entries""" - kw['node_factory'] = SCons.Node.FS.Entry - kw['recursive'] = None - return SCons.Scanner.Base(scan_in_memory, "DirEntryScanner", **kw) - -skip_entry = {} - -skip_entry_list = [ - '.', - '..', - '.sconsign', - # Used by the native dblite.py module. - '.sconsign.dblite', - # Used by dbm and dumbdbm. - '.sconsign.dir', - # Used by dbm. - '.sconsign.pag', - # Used by dumbdbm. - '.sconsign.dat', - '.sconsign.bak', - # Used by some dbm emulations using Berkeley DB. - '.sconsign.db', -] - -for skip in skip_entry_list: - skip_entry[skip] = 1 - skip_entry[SCons.Node.FS._my_normcase(skip)] = 1 - -do_not_scan = lambda k: k not in skip_entry - -def scan_on_disk(node, env, path=()): - """ - Scans a directory for on-disk files and directories therein. - - Looking up the entries will add these to the in-memory Node tree - representation of the file system, so all we have to do is just - that and then call the in-memory scanning function. - """ - try: - flist = node.fs.listdir(node.get_abspath()) - except (IOError, OSError): - return [] - e = node.Entry - for f in filter(do_not_scan, flist): - # Add ./ to the beginning of the file name so if it begins with a - # '#' we don't look it up relative to the top-level directory. - e('./' + f) - return scan_in_memory(node, env, path) - -def scan_in_memory(node, env, path=()): - """ - "Scans" a Node.FS.Dir for its in-memory entries. - """ - try: - entries = node.entries - except AttributeError: - # It's not a Node.FS.Dir (or doesn't look enough like one for - # our purposes), which can happen if a target list containing - # mixed Node types (Dirs and Files, for example) has a Dir as - # the first entry. - return [] - entry_list = sorted(filter(do_not_scan, list(entries.keys()))) - return [entries[n] for n in entry_list] - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Fortran.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Fortran.py deleted file mode 100644 index 93be76c5ccf..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Fortran.py +++ /dev/null @@ -1,324 +0,0 @@ -"""SCons.Scanner.Fortran - -This module implements the dependency scanner for Fortran code. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Scanner/Fortran.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import re - -import SCons.Node -import SCons.Node.FS -import SCons.Scanner -import SCons.Util -import SCons.Warnings - -class F90Scanner(SCons.Scanner.Classic): - """ - A Classic Scanner subclass for Fortran source files which takes - into account both USE and INCLUDE statements. This scanner will - work for both F77 and F90 (and beyond) compilers. - - Currently, this scanner assumes that the include files do not contain - USE statements. To enable the ability to deal with USE statements - in include files, add logic right after the module names are found - to loop over each include file, search for and locate each USE - statement, and append each module name to the list of dependencies. - Caching the search results in a common dictionary somewhere so that - the same include file is not searched multiple times would be a - smart thing to do. - """ - - def __init__(self, name, suffixes, path_variable, - use_regex, incl_regex, def_regex, *args, **kw): - - self.cre_use = re.compile(use_regex, re.M) - self.cre_incl = re.compile(incl_regex, re.M) - self.cre_def = re.compile(def_regex, re.M) - - def _scan(node, env, path, self=self): - node = node.rfile() - - if not node.exists(): - return [] - - return self.scan(node, env, path) - - kw['function'] = _scan - kw['path_function'] = SCons.Scanner.FindPathDirs(path_variable) - kw['recursive'] = 1 - kw['skeys'] = suffixes - kw['name'] = name - - SCons.Scanner.Current.__init__(self, *args, **kw) - - def scan(self, node, env, path=()): - - # cache the includes list in node so we only scan it once: - if node.includes is not None: - mods_and_includes = node.includes - else: - # retrieve all included filenames - includes = self.cre_incl.findall(node.get_text_contents()) - # retrieve all USE'd module names - modules = self.cre_use.findall(node.get_text_contents()) - # retrieve all defined module names - defmodules = self.cre_def.findall(node.get_text_contents()) - - # Remove all USE'd module names that are defined in the same file - # (case-insensitively) - d = {} - for m in defmodules: - d[m.lower()] = 1 - modules = [m for m in modules if m.lower() not in d] - - # Convert module name to a .mod filename - suffix = env.subst('$FORTRANMODSUFFIX') - modules = [x.lower() + suffix for x in modules] - # Remove unique items from the list - mods_and_includes = SCons.Util.unique(includes+modules) - node.includes = mods_and_includes - - # This is a hand-coded DSU (decorate-sort-undecorate, or - # Schwartzian transform) pattern. The sort key is the raw name - # of the file as specifed on the USE or INCLUDE line, which lets - # us keep the sort order constant regardless of whether the file - # is actually found in a Repository or locally. - nodes = [] - source_dir = node.get_dir() - if callable(path): - path = path() - for dep in mods_and_includes: - n, i = self.find_include(dep, source_dir, path) - - if n is None: - SCons.Warnings.warn(SCons.Warnings.DependencyWarning, - "No dependency generated for file: %s (referenced by: %s) -- file not found" % (i, node)) - else: - sortkey = self.sort_key(dep) - nodes.append((sortkey, n)) - - return [pair[1] for pair in sorted(nodes)] - -def FortranScan(path_variable="FORTRANPATH"): - """Return a prototype Scanner instance for scanning source files - for Fortran USE & INCLUDE statements""" - -# The USE statement regex matches the following: -# -# USE module_name -# USE :: module_name -# USE, INTRINSIC :: module_name -# USE, NON_INTRINSIC :: module_name -# -# Limitations -# -# -- While the regex can handle multiple USE statements on one line, -# it cannot properly handle them if they are commented out. -# In either of the following cases: -# -# ! USE mod_a ; USE mod_b [entire line is commented out] -# USE mod_a ! ; USE mod_b [in-line comment of second USE statement] -# -# the second module name (mod_b) will be picked up as a dependency -# even though it should be ignored. The only way I can see -# to rectify this would be to modify the scanner to eliminate -# the call to re.findall, read in the contents of the file, -# treating the comment character as an end-of-line character -# in addition to the normal linefeed, loop over each line, -# weeding out the comments, and looking for the USE statements. -# One advantage to this is that the regex passed to the scanner -# would no longer need to match a semicolon. -# -# -- I question whether or not we need to detect dependencies to -# INTRINSIC modules because these are built-in to the compiler. -# If we consider them a dependency, will SCons look for them, not -# find them, and kill the build? Or will we there be standard -# compiler-specific directories we will need to point to so the -# compiler and SCons can locate the proper object and mod files? - -# Here is a breakdown of the regex: -# -# (?i) : regex is case insensitive -# ^ : start of line -# (?: : group a collection of regex symbols without saving the match as a "group" -# ^|; : matches either the start of the line or a semicolon - semicolon -# ) : end the unsaved grouping -# \s* : any amount of white space -# USE : match the string USE, case insensitive -# (?: : group a collection of regex symbols without saving the match as a "group" -# \s+| : match one or more whitespace OR .... (the next entire grouped set of regex symbols) -# (?: : group a collection of regex symbols without saving the match as a "group" -# (?: : establish another unsaved grouping of regex symbols -# \s* : any amount of white space -# , : match a comma -# \s* : any amount of white space -# (?:NON_)? : optionally match the prefix NON_, case insensitive -# INTRINSIC : match the string INTRINSIC, case insensitive -# )? : optionally match the ", INTRINSIC/NON_INTRINSIC" grouped expression -# \s* : any amount of white space -# :: : match a double colon that must appear after the INTRINSIC/NON_INTRINSIC attribute -# ) : end the unsaved grouping -# ) : end the unsaved grouping -# \s* : match any amount of white space -# (\w+) : match the module name that is being USE'd -# -# - use_regex = r"(?i)(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)" - - -# The INCLUDE statement regex matches the following: -# -# INCLUDE 'some_Text' -# INCLUDE "some_Text" -# INCLUDE "some_Text" ; INCLUDE "some_Text" -# INCLUDE kind_"some_Text" -# INCLUDE kind_'some_Text" -# -# where some_Text can include any alphanumeric and/or special character -# as defined by the Fortran 2003 standard. -# -# Limitations: -# -# -- The Fortran standard dictates that a " or ' in the INCLUDE'd -# string must be represented as a "" or '', if the quotes that wrap -# the entire string are either a ' or ", respectively. While the -# regular expression below can detect the ' or " characters just fine, -# the scanning logic, presently is unable to detect them and reduce -# them to a single instance. This probably isn't an issue since, -# in practice, ' or " are not generally used in filenames. -# -# -- This regex will not properly deal with multiple INCLUDE statements -# when the entire line has been commented out, ala -# -# ! INCLUDE 'some_file' ; INCLUDE 'some_file' -# -# In such cases, it will properly ignore the first INCLUDE file, -# but will actually still pick up the second. Interestingly enough, -# the regex will properly deal with these cases: -# -# INCLUDE 'some_file' -# INCLUDE 'some_file' !; INCLUDE 'some_file' -# -# To get around the above limitation, the FORTRAN programmer could -# simply comment each INCLUDE statement separately, like this -# -# ! INCLUDE 'some_file' !; INCLUDE 'some_file' -# -# The way I see it, the only way to get around this limitation would -# be to modify the scanning logic to replace the calls to re.findall -# with a custom loop that processes each line separately, throwing -# away fully commented out lines before attempting to match against -# the INCLUDE syntax. -# -# Here is a breakdown of the regex: -# -# (?i) : regex is case insensitive -# (?: : begin a non-saving group that matches the following: -# ^ : either the start of the line -# | : or -# ['">]\s*; : a semicolon that follows a single quote, -# double quote or greater than symbol (with any -# amount of whitespace in between). This will -# allow the regex to match multiple INCLUDE -# statements per line (although it also requires -# the positive lookahead assertion that is -# used below). It will even properly deal with -# (i.e. ignore) cases in which the additional -# INCLUDES are part of an in-line comment, ala -# " INCLUDE 'someFile' ! ; INCLUDE 'someFile2' " -# ) : end of non-saving group -# \s* : any amount of white space -# INCLUDE : match the string INCLUDE, case insensitive -# \s+ : match one or more white space characters -# (?\w+_)? : match the optional "kind-param _" prefix allowed by the standard -# [<"'] : match the include delimiter - an apostrophe, double quote, or less than symbol -# (.+?) : match one or more characters that make up -# the included path and file name and save it -# in a group. The Fortran standard allows for -# any non-control character to be used. The dot -# operator will pick up any character, including -# control codes, but I can't conceive of anyone -# putting control codes in their file names. -# The question mark indicates it is non-greedy so -# that regex will match only up to the next quote, -# double quote, or greater than symbol -# (?=["'>]) : positive lookahead assertion to match the include -# delimiter - an apostrophe, double quote, or -# greater than symbol. This level of complexity -# is required so that the include delimiter is -# not consumed by the match, thus allowing the -# sub-regex discussed above to uniquely match a -# set of semicolon-separated INCLUDE statements -# (as allowed by the F2003 standard) - - include_regex = r"""(?i)(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" - -# The MODULE statement regex finds module definitions by matching -# the following: -# -# MODULE module_name -# -# but *not* the following: -# -# MODULE PROCEDURE procedure_name -# MODULE SUBROUTINE subroutine_name -# MODULE FUNCTION function_name -# MODULE PURE SUBROUTINE|FUNCTION subroutine_name|function_name -# MODULE ELEMENTAL SUBROUTINE|FUNCTION subroutine_name|function_name -# -# Here is a breakdown of the regex: -# -# (?i) : regex is case insensitive -# ^\s* : any amount of white space -# MODULE : match the string MODULE, case -# insensitive -# \s+ : match one or more white space -# characters -# (?!PROCEDURE|SUBROUTINE|FUNCTION|PURE|ELEMENTAL) -# : but *don't* match if the next word -# matches PROCEDURE, SUBROUTINE, -# FUNCTION, PURE or ELEMENTAL (negative -# lookahead assertion), case insensitive -# (\w+) : match one or more alphanumeric -# characters that make up the defined -# module name and save it in a group - - def_regex = r"""(?i)^\s*MODULE\s+(?!PROCEDURE|SUBROUTINE|FUNCTION|PURE|ELEMENTAL)(\w+)""" - - scanner = F90Scanner("FortranScan", - "$FORTRANSUFFIXES", - path_variable, - use_regex, - include_regex, - def_regex) - return scanner - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/IDL.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/IDL.py deleted file mode 100644 index 6569c1f9c35..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/IDL.py +++ /dev/null @@ -1,48 +0,0 @@ -"""SCons.Scanner.IDL - -This module implements the dependency scanner for IDL (Interface -Definition Language) files. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/IDL.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Node.FS -import SCons.Scanner - -def IDLScan(): - """Return a prototype Scanner instance for scanning IDL source files""" - cs = SCons.Scanner.ClassicCPP("IDLScan", - "$IDLSUFFIXES", - "CPPPATH", - '^[ \t]*(?:#[ \t]*include|[ \t]*import)[ \t]+(<|")([^>"]+)(>|")') - return cs - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/LaTeX.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/LaTeX.py deleted file mode 100644 index 4e43eaa7822..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/LaTeX.py +++ /dev/null @@ -1,429 +0,0 @@ -"""SCons.Scanner.LaTeX - -This module implements the dependency scanner for LaTeX code. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/LaTeX.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import re - -import SCons.Scanner -import SCons.Util - -# list of graphics file extensions for TeX and LaTeX -TexGraphics = ['.eps', '.ps'] -#LatexGraphics = ['.pdf', '.png', '.jpg', '.gif', '.tif'] -LatexGraphics = [ '.png', '.jpg', '.gif', '.tif'] - - -# Used as a return value of modify_env_var if the variable is not set. -class _Null(object): - pass -_null = _Null - -# The user specifies the paths in env[variable], similar to other builders. -# They may be relative and must be converted to absolute, as expected -# by LaTeX and Co. The environment may already have some paths in -# env['ENV'][var]. These paths are honored, but the env[var] paths have -# higher precedence. All changes are un-done on exit. -def modify_env_var(env, var, abspath): - try: - save = env['ENV'][var] - except KeyError: - save = _null - env.PrependENVPath(var, abspath) - try: - if SCons.Util.is_List(env[var]): - env.PrependENVPath(var, [os.path.abspath(str(p)) for p in env[var]]) - else: - # Split at os.pathsep to convert into absolute path - env.PrependENVPath(var, [os.path.abspath(p) for p in str(env[var]).split(os.pathsep)]) - except KeyError: - pass - - # Convert into a string explicitly to append ":" (without which it won't search system - # paths as well). The problem is that env.AppendENVPath(var, ":") - # does not work, refuses to append ":" (os.pathsep). - - if SCons.Util.is_List(env['ENV'][var]): - env['ENV'][var] = os.pathsep.join(env['ENV'][var]) - # Append the trailing os.pathsep character here to catch the case with no env[var] - env['ENV'][var] = env['ENV'][var] + os.pathsep - - return save - -class FindENVPathDirs(object): - """ - A class to bind a specific E{*}PATH variable name to a function that - will return all of the E{*}path directories. - """ - def __init__(self, variable): - self.variable = variable - def __call__(self, env, dir=None, target=None, source=None, argument=None): - import SCons.PathList - try: - path = env['ENV'][self.variable] - except KeyError: - return () - - dir = dir or env.fs._cwd - path = SCons.PathList.PathList(path).subst_path(env, target, source) - return tuple(dir.Rfindalldirs(path)) - - - -def LaTeXScanner(): - """ - Return a prototype Scanner instance for scanning LaTeX source files - when built with latex. - """ - ds = LaTeX(name = "LaTeXScanner", - suffixes = '$LATEXSUFFIXES', - # in the search order, see below in LaTeX class docstring - graphics_extensions = TexGraphics, - recursive = 0) - return ds - -def PDFLaTeXScanner(): - """ - Return a prototype Scanner instance for scanning LaTeX source files - when built with pdflatex. - """ - ds = LaTeX(name = "PDFLaTeXScanner", - suffixes = '$LATEXSUFFIXES', - # in the search order, see below in LaTeX class docstring - graphics_extensions = LatexGraphics, - recursive = 0) - return ds - -class LaTeX(SCons.Scanner.Base): - """ - Class for scanning LaTeX files for included files. - - Unlike most scanners, which use regular expressions that just - return the included file name, this returns a tuple consisting - of the keyword for the inclusion ("include", "includegraphics", - "input", or "bibliography"), and then the file name itself. - Based on a quick look at LaTeX documentation, it seems that we - should append .tex suffix for the "include" keywords, append .tex if - there is no extension for the "input" keyword, and need to add .bib - for the "bibliography" keyword that does not accept extensions by itself. - - Finally, if there is no extension for an "includegraphics" keyword - latex will append .ps or .eps to find the file, while pdftex may use .pdf, - .jpg, .tif, .mps, or .png. - - The actual subset and search order may be altered by - DeclareGraphicsExtensions command. This complication is ignored. - The default order corresponds to experimentation with teTeX:: - - $ latex --version - pdfeTeX 3.141592-1.21a-2.2 (Web2C 7.5.4) - kpathsea version 3.5.4 - - The order is: - ['.eps', '.ps'] for latex - ['.png', '.pdf', '.jpg', '.tif']. - - Another difference is that the search path is determined by the type - of the file being searched: - env['TEXINPUTS'] for "input" and "include" keywords - env['TEXINPUTS'] for "includegraphics" keyword - env['TEXINPUTS'] for "lstinputlisting" keyword - env['BIBINPUTS'] for "bibliography" keyword - env['BSTINPUTS'] for "bibliographystyle" keyword - env['INDEXSTYLE'] for "makeindex" keyword, no scanning support needed just allows user to set it if needed. - - FIXME: also look for the class or style in document[class|style]{} - FIXME: also look for the argument of bibliographystyle{} - """ - keyword_paths = {'include': 'TEXINPUTS', - 'input': 'TEXINPUTS', - 'includegraphics': 'TEXINPUTS', - 'bibliography': 'BIBINPUTS', - 'bibliographystyle': 'BSTINPUTS', - 'addbibresource': 'BIBINPUTS', - 'addglobalbib': 'BIBINPUTS', - 'addsectionbib': 'BIBINPUTS', - 'makeindex': 'INDEXSTYLE', - 'usepackage': 'TEXINPUTS', - 'lstinputlisting': 'TEXINPUTS'} - env_variables = SCons.Util.unique(list(keyword_paths.values())) - two_arg_commands = ['import', 'subimport', - 'includefrom', 'subincludefrom', - 'inputfrom', 'subinputfrom'] - - def __init__(self, name, suffixes, graphics_extensions, *args, **kw): - regex = r''' - \\( - include - | includegraphics(?:\s*\[[^\]]+\])? - | lstinputlisting(?:\[[^\]]+\])? - | input - | import - | subimport - | includefrom - | subincludefrom - | inputfrom - | subinputfrom - | bibliography - | addbibresource - | addglobalbib - | addsectionbib - | usepackage - ) - \s*{([^}]*)} # first arg - (?: \s*{([^}]*)} )? # maybe another arg - ''' - self.cre = re.compile(regex, re.M | re.X) - self.comment_re = re.compile(r'^((?:(?:\\%)|[^%\n])*)(.*)$', re.M) - - self.graphics_extensions = graphics_extensions - - def _scan(node, env, path=(), self=self): - node = node.rfile() - if not node.exists(): - return [] - return self.scan_recurse(node, path) - - class FindMultiPathDirs(object): - """The stock FindPathDirs function has the wrong granularity: - it is called once per target, while we need the path that depends - on what kind of included files is being searched. This wrapper - hides multiple instances of FindPathDirs, one per the LaTeX path - variable in the environment. When invoked, the function calculates - and returns all the required paths as a dictionary (converted into - a tuple to become hashable). Then the scan function converts it - back and uses a dictionary of tuples rather than a single tuple - of paths. - """ - def __init__(self, dictionary): - self.dictionary = {} - for k,n in dictionary.items(): - self.dictionary[k] = ( SCons.Scanner.FindPathDirs(n), - FindENVPathDirs(n) ) - - def __call__(self, env, dir=None, target=None, source=None, - argument=None): - di = {} - for k,(c,cENV) in self.dictionary.items(): - di[k] = ( c(env, dir=None, target=None, source=None, - argument=None) , - cENV(env, dir=None, target=None, source=None, - argument=None) ) - # To prevent "dict is not hashable error" - return tuple(di.items()) - - class LaTeXScanCheck(object): - """Skip all but LaTeX source files, i.e., do not scan *.eps, - *.pdf, *.jpg, etc. - """ - def __init__(self, suffixes): - self.suffixes = suffixes - def __call__(self, node, env): - current = not node.has_builder() or node.is_up_to_date() - scannable = node.get_suffix() in env.subst_list(self.suffixes)[0] - # Returning false means that the file is not scanned. - return scannable and current - - kw['function'] = _scan - kw['path_function'] = FindMultiPathDirs(LaTeX.keyword_paths) - kw['recursive'] = 0 - kw['skeys'] = suffixes - kw['scan_check'] = LaTeXScanCheck(suffixes) - kw['name'] = name - - SCons.Scanner.Base.__init__(self, *args, **kw) - - def _latex_names(self, include_type, filename): - if include_type == 'input': - base, ext = os.path.splitext( filename ) - if ext == "": - return [filename + '.tex'] - if include_type in ('include', 'import', 'subimport', - 'includefrom', 'subincludefrom', - 'inputfrom', 'subinputfrom'): - base, ext = os.path.splitext( filename ) - if ext == "": - return [filename + '.tex'] - if include_type == 'bibliography': - base, ext = os.path.splitext( filename ) - if ext == "": - return [filename + '.bib'] - if include_type == 'usepackage': - base, ext = os.path.splitext( filename ) - if ext == "": - return [filename + '.sty'] - if include_type == 'includegraphics': - base, ext = os.path.splitext( filename ) - if ext == "": - #return [filename+e for e in self.graphics_extensions + TexGraphics] - # use the line above to find dependencies for the PDF builder - # when only an .eps figure is present. Since it will be found - # if the user tells scons how to make the pdf figure, leave - # it out for now. - return [filename+e for e in self.graphics_extensions] - return [filename] - - def sort_key(self, include): - return SCons.Node.FS._my_normcase(str(include)) - - def find_include(self, include, source_dir, path): - inc_type, inc_subdir, inc_filename = include - try: - sub_paths = path[inc_type] - except (IndexError, KeyError): - sub_paths = ((), ()) - try_names = self._latex_names(inc_type, inc_filename) - - # There are three search paths to try: - # 1. current directory "source_dir" - # 2. env[var] - # 3. env['ENV'][var] - search_paths = [(source_dir,)] + list(sub_paths) - - for n in try_names: - for search_path in search_paths: - paths = tuple([d.Dir(inc_subdir) for d in search_path]) - i = SCons.Node.FS.find_file(n, paths) - if i: - return i, include - return None, include - - def canonical_text(self, text): - """Standardize an input TeX-file contents. - - Currently: - * removes comments, unwrapping comment-wrapped lines. - """ - out = [] - line_continues_a_comment = False - for line in text.splitlines(): - line,comment = self.comment_re.findall(line)[0] - if line_continues_a_comment: - out[-1] = out[-1] + line.lstrip() - else: - out.append(line) - line_continues_a_comment = len(comment) > 0 - return '\n'.join(out).rstrip()+'\n' - - def scan(self, node, subdir='.'): - # Modify the default scan function to allow for the regular - # expression to return a comma separated list of file names - # as can be the case with the bibliography keyword. - - # Cache the includes list in node so we only scan it once: - # path_dict = dict(list(path)) - # add option for whitespace (\s) before the '[' - noopt_cre = re.compile(r'\s*\[.*$') - if node.includes is not None: - includes = node.includes - else: - text = self.canonical_text(node.get_text_contents()) - includes = self.cre.findall(text) - # 1. Split comma-separated lines, e.g. - # ('bibliography', 'phys,comp') - # should become two entries - # ('bibliography', 'phys') - # ('bibliography', 'comp') - # 2. Remove the options, e.g., such as - # ('includegraphics[clip,width=0.7\\linewidth]', 'picture.eps') - # should become - # ('includegraphics', 'picture.eps') - split_includes = [] - for include in includes: - inc_type = noopt_cre.sub('', include[0]) - inc_subdir = subdir - if inc_type in self.two_arg_commands: - inc_subdir = os.path.join(subdir, include[1]) - inc_list = include[2].split(',') - else: - inc_list = include[1].split(',') - for inc in inc_list: - split_includes.append((inc_type, inc_subdir, inc)) - - includes = split_includes - node.includes = includes - - return includes - - def scan_recurse(self, node, path=()): - """ do a recursive scan of the top level target file - This lets us search for included files based on the - directory of the main file just as latex does""" - - path_dict = dict(list(path)) - - queue = [] - queue.extend( self.scan(node) ) - seen = {} - - # This is a hand-coded DSU (decorate-sort-undecorate, or - # Schwartzian transform) pattern. The sort key is the raw name - # of the file as specifed on the \include, \input, etc. line. - # TODO: what about the comment in the original Classic scanner: - # """which lets - # us keep the sort order constant regardless of whether the file - # is actually found in a Repository or locally.""" - nodes = [] - source_dir = node.get_dir() - #for include in includes: - while queue: - - include = queue.pop() - inc_type, inc_subdir, inc_filename = include - - try: - if seen[inc_filename] == 1: - continue - except KeyError: - seen[inc_filename] = 1 - - # - # Handle multiple filenames in include[1] - # - n, i = self.find_include(include, source_dir, path_dict) - if n is None: - # Do not bother with 'usepackage' warnings, as they most - # likely refer to system-level files - if inc_type != 'usepackage': - SCons.Warnings.warn(SCons.Warnings.DependencyWarning, - "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) - else: - sortkey = self.sort_key(n) - nodes.append((sortkey, n)) - # recurse down - queue.extend( self.scan(n, inc_subdir) ) - - return [pair[1] for pair in sorted(nodes)] - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Prog.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Prog.py deleted file mode 100644 index 5f9015d2e53..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/Prog.py +++ /dev/null @@ -1,116 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/Prog.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Node -import SCons.Node.FS -import SCons.Scanner -import SCons.Util - -# global, set by --debug=findlibs -print_find_libs = None - -def ProgramScanner(**kw): - """Return a prototype Scanner instance for scanning executable - files for static-lib dependencies""" - kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH') - ps = SCons.Scanner.Base(scan, "ProgramScanner", **kw) - return ps - -def _subst_libs(env, libs): - """ - Substitute environment variables and split into list. - """ - if SCons.Util.is_String(libs): - libs = env.subst(libs) - if SCons.Util.is_String(libs): - libs = libs.split() - elif SCons.Util.is_Sequence(libs): - _libs = [] - for l in libs: - _libs += _subst_libs(env, l) - libs = _libs - else: - # libs is an object (Node, for example) - libs = [libs] - return libs - -def scan(node, env, libpath = ()): - """ - This scanner scans program files for static-library - dependencies. It will search the LIBPATH environment variable - for libraries specified in the LIBS variable, returning any - files it finds as dependencies. - """ - try: - libs = env['LIBS'] - except KeyError: - # There are no LIBS in this environment, so just return a null list: - return [] - - libs = _subst_libs(env, libs) - - try: - prefix = env['LIBPREFIXES'] - if not SCons.Util.is_List(prefix): - prefix = [ prefix ] - except KeyError: - prefix = [ '' ] - - try: - suffix = env['LIBSUFFIXES'] - if not SCons.Util.is_List(suffix): - suffix = [ suffix ] - except KeyError: - suffix = [ '' ] - - pairs = [] - for suf in map(env.subst, suffix): - for pref in map(env.subst, prefix): - pairs.append((pref, suf)) - - result = [] - - if callable(libpath): - libpath = libpath() - - find_file = SCons.Node.FS.find_file - adjustixes = SCons.Util.adjustixes - for lib in libs: - if SCons.Util.is_String(lib): - for pref, suf in pairs: - l = adjustixes(lib, pref, suf) - l = find_file(l, libpath, verbose=print_find_libs) - if l: - result.append(l) - else: - result.append(lib) - - return result - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/RC.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/RC.py deleted file mode 100644 index 47c6ca26ec5..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/RC.py +++ /dev/null @@ -1,66 +0,0 @@ -"""SCons.Scanner.RC - -This module implements the dependency scanner for RC (Interface -Definition Language) files. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/RC.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import re - -import SCons.Node.FS -import SCons.Scanner - - -def no_tlb(nodes): - """ - Filter out .tlb files as they are binary and shouldn't be scanned - """ - # print("Nodes:%s"%[str(n) for n in nodes]) - return [n for n in nodes if str(n)[-4:] != '.tlb'] - - -def RCScan(): - """Return a prototype Scanner instance for scanning RC source files""" - - res_re= r'^(?:\s*#\s*(?:include)|' \ - r'.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' \ - r'\s*.*?)' \ - r'\s*(<|"| )([^>"\s]+)(?:[>"\s])*$' - resScanner = SCons.Scanner.ClassicCPP("ResourceScanner", - "$RCSUFFIXES", - "CPPPATH", - res_re, - recursive=no_tlb) - - return resScanner - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/SWIG.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/SWIG.py deleted file mode 100644 index 7ff78a684fa..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/SWIG.py +++ /dev/null @@ -1,45 +0,0 @@ -"""SCons.Scanner.SWIG - -This module implements the dependency scanner for SWIG code. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/SWIG.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Scanner - -SWIGSuffixes = [ '.i' ] - -def SWIGScanner(): - expr = r'^[ \t]*%[ \t]*(?:include|import|extern)[ \t]*(<|"?)([^>\s"]+)(?:>|"?)' - scanner = SCons.Scanner.ClassicCPP("SWIGScanner", ".i", "SWIGPATH", expr) - return scanner - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/__init__.py deleted file mode 100644 index 98845332e91..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Scanner/__init__.py +++ /dev/null @@ -1,421 +0,0 @@ -"""SCons.Scanner - -The Scanner package for the SCons software construction utility. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Scanner/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import re - -import SCons.Node.FS -import SCons.Util - - -class _Null(object): - pass - -# This is used instead of None as a default argument value so None can be -# used as an actual argument value. -_null = _Null - -def Scanner(function, *args, **kw): - """ - Public interface factory function for creating different types - of Scanners based on the different types of "functions" that may - be supplied. - - TODO: Deprecate this some day. We've moved the functionality - inside the Base class and really don't need this factory function - any more. It was, however, used by some of our Tool modules, so - the call probably ended up in various people's custom modules - patterned on SCons code. - """ - if SCons.Util.is_Dict(function): - return Selector(function, *args, **kw) - else: - return Base(function, *args, **kw) - - - -class FindPathDirs(object): - """ - A class to bind a specific E{*}PATH variable name to a function that - will return all of the E{*}path directories. - """ - def __init__(self, variable): - self.variable = variable - def __call__(self, env, dir=None, target=None, source=None, argument=None): - import SCons.PathList - try: - path = env[self.variable] - except KeyError: - return () - - dir = dir or env.fs._cwd - path = SCons.PathList.PathList(path).subst_path(env, target, source) - return tuple(dir.Rfindalldirs(path)) - - - -class Base(object): - """ - The base class for dependency scanners. This implements - straightforward, single-pass scanning of a single file. - """ - - def __init__(self, - function, - name = "NONE", - argument = _null, - skeys = _null, - path_function = None, - # Node.FS.Base so that, by default, it's okay for a - # scanner to return a Dir, File or Entry. - node_class = SCons.Node.FS.Base, - node_factory = None, - scan_check = None, - recursive = None): - """ - Construct a new scanner object given a scanner function. - - 'function' - a scanner function taking two or three - arguments and returning a list of strings. - - 'name' - a name for identifying this scanner object. - - 'argument' - an optional argument that, if specified, will be - passed to both the scanner function and the path_function. - - 'skeys' - an optional list argument that can be used to determine - which scanner should be used for a given Node. In the case of File - nodes, for example, the 'skeys' would be file suffixes. - - 'path_function' - a function that takes four or five arguments - (a construction environment, Node for the directory containing - the SConscript file that defined the primary target, list of - target nodes, list of source nodes, and optional argument for - this instance) and returns a tuple of the directories that can - be searched for implicit dependency files. May also return a - callable() which is called with no args and returns the tuple - (supporting Bindable class). - - 'node_class' - the class of Nodes which this scan will return. - If node_class is None, then this scanner will not enforce any - Node conversion and will return the raw results from the - underlying scanner function. - - 'node_factory' - the factory function to be called to translate - the raw results returned by the scanner function into the - expected node_class objects. - - 'scan_check' - a function to be called to first check whether - this node really needs to be scanned. - - 'recursive' - specifies that this scanner should be invoked - recursively on all of the implicit dependencies it returns - (the canonical example being #include lines in C source files). - May be a callable, which will be called to filter the list - of nodes found to select a subset for recursive scanning - (the canonical example being only recursively scanning - subdirectories within a directory). - - The scanner function's first argument will be a Node that should - be scanned for dependencies, the second argument will be an - Environment object, the third argument will be the tuple of paths - returned by the path_function, and the fourth argument will be - the value passed into 'argument', and the returned list should - contain the Nodes for all the direct dependencies of the file. - - Examples: - - s = Scanner(my_scanner_function) - - s = Scanner(function = my_scanner_function) - - s = Scanner(function = my_scanner_function, argument = 'foo') - - """ - - # Note: this class could easily work with scanner functions that take - # something other than a filename as an argument (e.g. a database - # node) and a dependencies list that aren't file names. All that - # would need to be changed is the documentation. - - self.function = function - self.path_function = path_function - self.name = name - self.argument = argument - - if skeys is _null: - if SCons.Util.is_Dict(function): - skeys = list(function.keys()) - else: - skeys = [] - self.skeys = skeys - - self.node_class = node_class - self.node_factory = node_factory - self.scan_check = scan_check - if callable(recursive): - self.recurse_nodes = recursive - elif recursive: - self.recurse_nodes = self._recurse_all_nodes - else: - self.recurse_nodes = self._recurse_no_nodes - - def path(self, env, dir=None, target=None, source=None): - if not self.path_function: - return () - if self.argument is not _null: - return self.path_function(env, dir, target, source, self.argument) - else: - return self.path_function(env, dir, target, source) - - def __call__(self, node, env, path=()): - """ - This method scans a single object. 'node' is the node - that will be passed to the scanner function, and 'env' is the - environment that will be passed to the scanner function. A list of - direct dependency nodes for the specified node will be returned. - """ - if self.scan_check and not self.scan_check(node, env): - return [] - - self = self.select(node) - - if self.argument is not _null: - node_list = self.function(node, env, path, self.argument) - else: - node_list = self.function(node, env, path) - - kw = {} - if hasattr(node, 'dir'): - kw['directory'] = node.dir - node_factory = env.get_factory(self.node_factory) - nodes = [] - for l in node_list: - if self.node_class and not isinstance(l, self.node_class): - l = node_factory(l, **kw) - nodes.append(l) - return nodes - - def __eq__(self, other): - try: - return self.__dict__ == other.__dict__ - except AttributeError: - # other probably doesn't have a __dict__ - return self.__dict__ == other - - def __hash__(self): - return id(self) - - def __str__(self): - return self.name - - def add_skey(self, skey): - """Add a skey to the list of skeys""" - self.skeys.append(skey) - - def get_skeys(self, env=None): - if env and SCons.Util.is_String(self.skeys): - return env.subst_list(self.skeys)[0] - return self.skeys - - def select(self, node): - if SCons.Util.is_Dict(self.function): - key = node.scanner_key() - try: - return self.function[key] - except KeyError: - return None - else: - return self - - def _recurse_all_nodes(self, nodes): - return nodes - - def _recurse_no_nodes(self, nodes): - return [] - - # recurse_nodes = _recurse_no_nodes - - def add_scanner(self, skey, scanner): - self.function[skey] = scanner - self.add_skey(skey) - - -class Selector(Base): - """ - A class for selecting a more specific scanner based on the - scanner_key() (suffix) for a specific Node. - - TODO: This functionality has been moved into the inner workings of - the Base class, and this class will be deprecated at some point. - (It was never exposed directly as part of the public interface, - although it is used by the Scanner() factory function that was - used by various Tool modules and therefore was likely a template - for custom modules that may be out there.) - """ - def __init__(self, dict, *args, **kw): - Base.__init__(self, None, *args, **kw) - self.dict = dict - self.skeys = list(dict.keys()) - - def __call__(self, node, env, path=()): - return self.select(node)(node, env, path) - - def select(self, node): - try: - return self.dict[node.scanner_key()] - except KeyError: - return None - - def add_scanner(self, skey, scanner): - self.dict[skey] = scanner - self.add_skey(skey) - - -class Current(Base): - """ - A class for scanning files that are source files (have no builder) - or are derived files and are current (which implies that they exist, - either locally or in a repository). - """ - - def __init__(self, *args, **kw): - def current_check(node, env): - return not node.has_builder() or node.is_up_to_date() - kw['scan_check'] = current_check - Base.__init__(self, *args, **kw) - -class Classic(Current): - """ - A Scanner subclass to contain the common logic for classic CPP-style - include scanning, but which can be customized to use different - regular expressions to find the includes. - - Note that in order for this to work "out of the box" (without - overriding the find_include() and sort_key() methods), the regular - expression passed to the constructor must return the name of the - include file in group 0. - """ - - def __init__(self, name, suffixes, path_variable, regex, *args, **kw): - - self.cre = re.compile(regex, re.M) - - def _scan(node, _, path=(), self=self): - node = node.rfile() - if not node.exists(): - return [] - return self.scan(node, path) - - kw['function'] = _scan - kw['path_function'] = FindPathDirs(path_variable) - - # Allow recursive to propagate if child class specifies. - # In this case resource scanner needs to specify a filter on which files - # get recursively processed. Previously was hardcoded to 1 instead of - # defaulted to 1. - kw['recursive'] = kw.get('recursive', 1) - kw['skeys'] = suffixes - kw['name'] = name - - Current.__init__(self, *args, **kw) - - def find_include(self, include, source_dir, path): - n = SCons.Node.FS.find_file(include, (source_dir,) + tuple(path)) - return n, include - - def sort_key(self, include): - return SCons.Node.FS._my_normcase(include) - - def find_include_names(self, node): - return self.cre.findall(node.get_text_contents()) - - def scan(self, node, path=()): - - # cache the includes list in node so we only scan it once: - if node.includes is not None: - includes = node.includes - else: - includes = self.find_include_names(node) - # Intern the names of the include files. Saves some memory - # if the same header is included many times. - node.includes = list(map(SCons.Util.silent_intern, includes)) - - # This is a hand-coded DSU (decorate-sort-undecorate, or - # Schwartzian transform) pattern. The sort key is the raw name - # of the file as specifed on the #include line (including the - # " or <, since that may affect what file is found), which lets - # us keep the sort order constant regardless of whether the file - # is actually found in a Repository or locally. - nodes = [] - source_dir = node.get_dir() - if callable(path): - path = path() - for include in includes: - n, i = self.find_include(include, source_dir, path) - - if n is None: - SCons.Warnings.warn(SCons.Warnings.DependencyWarning, - "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) - else: - nodes.append((self.sort_key(include), n)) - - return [pair[1] for pair in sorted(nodes)] - -class ClassicCPP(Classic): - """ - A Classic Scanner subclass which takes into account the type of - bracketing used to include the file, and uses classic CPP rules - for searching for the files based on the bracketing. - - Note that in order for this to work, the regular expression passed - to the constructor must return the leading bracket in group 0, and - the contained filename in group 1. - """ - def find_include(self, include, source_dir, path): - include = list(map(SCons.Util.to_str, include)) - if include[0] == '"': - paths = (source_dir,) + tuple(path) - else: - paths = tuple(path) + (source_dir,) - - n = SCons.Node.FS.find_file(include[1], paths) - - i = SCons.Util.silent_intern(include[1]) - return n, i - - def sort_key(self, include): - return SCons.Node.FS._my_normcase(' '.join(include)) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/Interactive.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/Interactive.py deleted file mode 100644 index b2c134c84ba..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/Interactive.py +++ /dev/null @@ -1,376 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from __future__ import print_function - -__revision__ = "src/engine/SCons/Script/Interactive.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """ -SCons interactive mode -""" - -# TODO: -# -# This has the potential to grow into something with a really big life -# of its own, which might or might not be a good thing. Nevertheless, -# here are some enhancements that will probably be requested some day -# and are worth keeping in mind (assuming this takes off): -# -# - A command to re-read / re-load the SConscript files. This may -# involve allowing people to specify command-line options (e.g. -f, -# -I, --no-site-dir) that affect how the SConscript files are read. -# -# - Additional command-line options on the "build" command. -# -# Of the supported options that seemed to make sense (after a quick -# pass through the list), the ones that seemed likely enough to be -# used are listed in the man page and have explicit test scripts. -# -# These had code changed in Script/Main.py to support them, but didn't -# seem likely to be used regularly, so had no test scripts added: -# -# build --diskcheck=* -# build --implicit-cache=* -# build --implicit-deps-changed=* -# build --implicit-deps-unchanged=* -# -# These look like they should "just work" with no changes to the -# existing code, but like those above, look unlikely to be used and -# therefore had no test scripts added: -# -# build --random -# -# These I'm not sure about. They might be useful for individual -# "build" commands, and may even work, but they seem unlikely enough -# that we'll wait until they're requested before spending any time on -# writing test scripts for them, or investigating whether they work. -# -# build -q [??? is there a useful analog to the exit status?] -# build --duplicate= -# build --profile= -# build --max-drift= -# build --warn=* -# build --Y -# -# - Most of the SCons command-line options that the "build" command -# supports should be settable as default options that apply to all -# subsequent "build" commands. Maybe a "set {option}" command that -# maps to "SetOption('{option}')". -# -# - Need something in the 'help' command that prints the -h output. -# -# - A command to run the configure subsystem separately (must see how -# this interacts with the new automake model). -# -# - Command-line completion of target names; maybe even of SCons options? -# Completion is something that's supported by the Python cmd module, -# so this should be doable without too much trouble. -# - -import cmd -import copy -import os -import re -import shlex -import sys - -try: - import readline -except ImportError: - pass - -class SConsInteractiveCmd(cmd.Cmd): - """\ - -build [TARGETS] Build the specified TARGETS and their dependencies. 'b' is a synonym. -clean [TARGETS] Clean (remove) the specified TARGETS and their dependencies. 'c' is a synonym. -exit Exit SCons interactive mode. -help [COMMAND] Prints help for the specified COMMAND. 'h' and '?' are synonyms. -shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and '!' are synonyms. -version Prints SCons version information. -""" - - synonyms = { - 'b' : 'build', - 'c' : 'clean', - 'h' : 'help', - 'scons' : 'build', - 'sh' : 'shell', - } - - def __init__(self, **kw): - cmd.Cmd.__init__(self) - for key, val in kw.items(): - setattr(self, key, val) - - if sys.platform == 'win32': - self.shell_variable = 'COMSPEC' - else: - self.shell_variable = 'SHELL' - - def default(self, argv): - print("*** Unknown command: %s" % argv[0]) - - def onecmd(self, line): - line = line.strip() - if not line: - print(self.lastcmd) - return self.emptyline() - self.lastcmd = line - if line[0] == '!': - line = 'shell ' + line[1:] - elif line[0] == '?': - line = 'help ' + line[1:] - if os.sep == '\\': - line = line.replace('\\', '\\\\') - argv = shlex.split(line) - argv[0] = self.synonyms.get(argv[0], argv[0]) - if not argv[0]: - return self.default(line) - else: - try: - func = getattr(self, 'do_' + argv[0]) - except AttributeError: - return self.default(argv) - return func(argv) - - def do_build(self, argv): - """\ - build [TARGETS] Build the specified TARGETS and their - dependencies. 'b' is a synonym. - """ - import SCons.Node - import SCons.SConsign - import SCons.Script.Main - - options = copy.deepcopy(self.options) - - options, targets = self.parser.parse_args(argv[1:], values=options) - - SCons.Script.COMMAND_LINE_TARGETS = targets - - if targets: - SCons.Script.BUILD_TARGETS = targets - else: - # If the user didn't specify any targets on the command line, - # use the list of default targets. - SCons.Script.BUILD_TARGETS = SCons.Script._build_plus_default - - nodes = SCons.Script.Main._build_targets(self.fs, - options, - targets, - self.target_top) - - if not nodes: - return - - # Call each of the Node's alter_targets() methods, which may - # provide additional targets that ended up as part of the build - # (the canonical example being a VariantDir() when we're building - # from a source directory) and which we therefore need their - # state cleared, too. - x = [] - for n in nodes: - x.extend(n.alter_targets()[0]) - nodes.extend(x) - - # Clean up so that we can perform the next build correctly. - # - # We do this by walking over all the children of the targets, - # and clearing their state. - # - # We currently have to re-scan each node to find their - # children, because built nodes have already been partially - # cleared and don't remember their children. (In scons - # 0.96.1 and earlier, this wasn't the case, and we didn't - # have to re-scan the nodes.) - # - # Because we have to re-scan each node, we can't clear the - # nodes as we walk over them, because we may end up rescanning - # a cleared node as we scan a later node. Therefore, only - # store the list of nodes that need to be cleared as we walk - # the tree, and clear them in a separate pass. - # - # XXX: Someone more familiar with the inner workings of scons - # may be able to point out a more efficient way to do this. - - SCons.Script.Main.progress_display("scons: Clearing cached node information ...") - - seen_nodes = {} - - def get_unseen_children(node, parent, seen_nodes=seen_nodes): - def is_unseen(node, seen_nodes=seen_nodes): - return node not in seen_nodes - return [child for child in node.children(scan=1) if is_unseen(child)] - - def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes): - seen_nodes[node] = 1 - - # If this file is in a VariantDir and has a - # corresponding source file in the source tree, remember the - # node in the source tree, too. This is needed in - # particular to clear cached implicit dependencies on the - # source file, since the scanner will scan it if the - # VariantDir was created with duplicate=0. - try: - rfile_method = node.rfile - except AttributeError: - return - else: - rfile = rfile_method() - if rfile != node: - seen_nodes[rfile] = 1 - - for node in nodes: - walker = SCons.Node.Walker(node, - kids_func=get_unseen_children, - eval_func=add_to_seen_nodes) - n = walker.get_next() - while n: - n = walker.get_next() - - for node in list(seen_nodes.keys()): - # Call node.clear() to clear most of the state - node.clear() - # node.clear() doesn't reset node.state, so call - # node.set_state() to reset it manually - node.set_state(SCons.Node.no_state) - node.implicit = None - - # Debug: Uncomment to verify that all Taskmaster reference - # counts have been reset to zero. - #if node.ref_count != 0: - # from SCons.Debug import Trace - # Trace('node %s, ref_count %s !!!\n' % (node, node.ref_count)) - - SCons.SConsign.Reset() - SCons.Script.Main.progress_display("scons: done clearing node information.") - - def do_clean(self, argv): - """\ - clean [TARGETS] Clean (remove) the specified TARGETS - and their dependencies. 'c' is a synonym. - """ - return self.do_build(['build', '--clean'] + argv[1:]) - - def do_EOF(self, argv): - print() - self.do_exit(argv) - - def _do_one_help(self, arg): - try: - # If help_() exists, then call it. - func = getattr(self, 'help_' + arg) - except AttributeError: - try: - func = getattr(self, 'do_' + arg) - except AttributeError: - doc = None - else: - doc = self._doc_to_help(func) - if doc: - sys.stdout.write(doc + '\n') - sys.stdout.flush() - else: - doc = self.strip_initial_spaces(func()) - if doc: - sys.stdout.write(doc + '\n') - sys.stdout.flush() - - def _doc_to_help(self, obj): - doc = obj.__doc__ - if doc is None: - return '' - return self._strip_initial_spaces(doc) - - def _strip_initial_spaces(self, s): - lines = s.split('\n') - spaces = re.match(' *', lines[0]).group(0) - def strip_spaces(l, spaces=spaces): - if l[:len(spaces)] == spaces: - l = l[len(spaces):] - return l - lines = list(map(strip_spaces, lines)) - return '\n'.join(lines) - - def do_exit(self, argv): - """\ - exit Exit SCons interactive mode. - """ - sys.exit(0) - - def do_help(self, argv): - """\ - help [COMMAND] Prints help for the specified COMMAND. 'h' - and '?' are synonyms. - """ - if argv[1:]: - for arg in argv[1:]: - if self._do_one_help(arg): - break - else: - # If bare 'help' is called, print this class's doc - # string (if it has one). - doc = self._doc_to_help(self.__class__) - if doc: - sys.stdout.write(doc + '\n') - sys.stdout.flush() - - def do_shell(self, argv): - """\ - shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and - '!' are synonyms. - """ - import subprocess - argv = argv[1:] - if not argv: - argv = os.environ[self.shell_variable] - try: - # Per "[Python-Dev] subprocess insufficiently platform-independent?" - # http://mail.python.org/pipermail/python-dev/2008-August/081979.html "+ - # Doing the right thing with an argument list currently - # requires different shell= values on Windows and Linux. - p = subprocess.Popen(argv, shell=(sys.platform=='win32')) - except EnvironmentError as e: - sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror)) - else: - p.wait() - - def do_version(self, argv): - """\ - version Prints SCons version information. - """ - sys.stdout.write(self.parser.version + '\n') - -def interact(fs, parser, options, targets, target_top): - c = SConsInteractiveCmd(prompt = 'scons>>> ', - fs = fs, - parser = parser, - options = options, - targets = targets, - target_top = target_top) - c.cmdloop() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/Main.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/Main.py deleted file mode 100644 index 880f77ba769..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/Main.py +++ /dev/null @@ -1,1447 +0,0 @@ -"""SCons.Script - -This file implements the main() function used by the scons script. - -Architecturally, this *is* the scons script, and will likely only be -called from the external "scons" wrapper. Consequently, anything here -should not be, or be considered, part of the build engine. If it's -something that we expect other software to want to use, it should go in -some other module. If it's specific to the "scons" script invocation, -it goes here. -""" - -from __future__ import print_function - - -unsupported_python_version = (2, 6, 0) -deprecated_python_version = (2, 7, 0) - - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Script/Main.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -import SCons.compat - -import os -import sys -import time -import traceback -import sysconfig -import platform - -import SCons.CacheDir -import SCons.Debug -import SCons.Defaults -import SCons.Environment -import SCons.Errors -import SCons.Job -import SCons.Node -import SCons.Node.FS -import SCons.Platform -import SCons.Platform.virtualenv -import SCons.SConf -import SCons.Script -import SCons.Taskmaster -import SCons.Util -import SCons.Warnings - -import SCons.Script.Interactive - -# Global variables -first_command_start = None -last_command_end = None -print_objects = 0 -print_memoizer = 0 -print_stacktrace = 0 -print_time = 0 -print_action_timestamps = 0 -sconscript_time = 0 -cumulative_command_time = 0 -exit_status = 0 # final exit status, assume success by default -this_build_status = 0 # "exit status" of an individual build -num_jobs = None -delayed_warnings = [] -jobs = None - -def fetch_win32_parallel_msg(): - # A subsidiary function that exists solely to isolate this import - # so we don't have to pull it in on all platforms, and so that an - # in-line "import" statement in the _main() function below doesn't - # cause warnings about local names shadowing use of the 'SCons' - # global in nest scopes and UnboundLocalErrors and the like in some - # versions (2.1) of Python. - import SCons.Platform.win32 - return SCons.Platform.win32.parallel_msg - - -def revert_io(): - # This call is added to revert stderr and stdout to the original - # ones just in case some build rule or something else in the system - # has redirected them elsewhere. - sys.stderr = sys.__stderr__ - sys.stdout = sys.__stdout__ - - -class SConsPrintHelpException(Exception): - pass - - -display = SCons.Util.display -progress_display = SCons.Util.DisplayEngine() - - -class Progressor(object): - prev = '' - count = 0 - target_string = '$TARGET' - - def __init__(self, obj, interval=1, file=None, overwrite=False): - if file is None: - file = sys.stdout - - self.obj = obj - self.file = file - self.interval = interval - self.overwrite = overwrite - - if callable(obj): - self.func = obj - elif SCons.Util.is_List(obj): - self.func = self.spinner - elif obj.find(self.target_string) != -1: - self.func = self.replace_string - else: - self.func = self.string - - def write(self, s): - self.file.write(s) - self.file.flush() - self.prev = s - - def erase_previous(self): - if self.prev: - length = len(self.prev) - if self.prev[-1] in ('\n', '\r'): - length = length - 1 - self.write(' ' * length + '\r') - self.prev = '' - - def spinner(self, node): - self.write(self.obj[self.count % len(self.obj)]) - - def string(self, node): - self.write(self.obj) - - def replace_string(self, node): - self.write(self.obj.replace(self.target_string, str(node))) - - def __call__(self, node): - self.count = self.count + 1 - if (self.count % self.interval) == 0: - if self.overwrite: - self.erase_previous() - self.func(node) - -ProgressObject = SCons.Util.Null() - -def Progress(*args, **kw): - global ProgressObject - ProgressObject = Progressor(*args, **kw) - -# Task control. -# - -_BuildFailures = [] - - -def GetBuildFailures(): - return _BuildFailures - - -class BuildTask(SCons.Taskmaster.OutOfDateTask): - """An SCons build task.""" - progress = ProgressObject - - def display(self, message): - display('scons: ' + message) - - def prepare(self): - self.progress(self.targets[0]) - return SCons.Taskmaster.OutOfDateTask.prepare(self) - - def needs_execute(self): - if SCons.Taskmaster.OutOfDateTask.needs_execute(self): - return True - if self.top and self.targets[0].has_builder(): - display("scons: `%s' is up to date." % str(self.node)) - return False - - def execute(self): - if print_time: - start_time = time.time() - global first_command_start - if first_command_start is None: - first_command_start = start_time - SCons.Taskmaster.OutOfDateTask.execute(self) - if print_time: - global cumulative_command_time - global last_command_end - finish_time = time.time() - last_command_end = finish_time - cumulative_command_time = cumulative_command_time+finish_time-start_time - if print_action_timestamps: - sys.stdout.write("Command execution start timestamp: %s: %f\n"%(str(self.node), start_time)) - sys.stdout.write("Command execution end timestamp: %s: %f\n"%(str(self.node), finish_time)) - sys.stdout.write("Command execution time: %s: %f seconds\n"%(str(self.node), finish_time-start_time)) - - def do_failed(self, status=2): - _BuildFailures.append(self.exception[1]) - global exit_status - global this_build_status - if self.options.ignore_errors: - SCons.Taskmaster.OutOfDateTask.executed(self) - elif self.options.keep_going: - SCons.Taskmaster.OutOfDateTask.fail_continue(self) - exit_status = status - this_build_status = status - else: - SCons.Taskmaster.OutOfDateTask.fail_stop(self) - exit_status = status - this_build_status = status - - def executed(self): - t = self.targets[0] - if self.top and not t.has_builder() and not t.side_effect: - if not t.exists(): - if t.__class__.__name__ in ('File', 'Dir', 'Entry'): - errstr="Do not know how to make %s target `%s' (%s)." % (t.__class__.__name__, t, t.get_abspath()) - else: # Alias or Python or ... - errstr="Do not know how to make %s target `%s'." % (t.__class__.__name__, t) - sys.stderr.write("scons: *** " + errstr) - if not self.options.keep_going: - sys.stderr.write(" Stop.") - sys.stderr.write("\n") - try: - raise SCons.Errors.BuildError(t, errstr) - except KeyboardInterrupt: - raise - except: - self.exception_set() - self.do_failed() - else: - print("scons: Nothing to be done for `%s'." % t) - SCons.Taskmaster.OutOfDateTask.executed(self) - else: - SCons.Taskmaster.OutOfDateTask.executed(self) - - def failed(self): - # Handle the failure of a build task. The primary purpose here - # is to display the various types of Errors and Exceptions - # appropriately. - exc_info = self.exc_info() - try: - t, e, tb = exc_info - except ValueError: - t, e = exc_info - tb = None - - if t is None: - # The Taskmaster didn't record an exception for this Task; - # see if the sys module has one. - try: - t, e, tb = sys.exc_info()[:] - except ValueError: - t, e = exc_info - tb = None - - # Deprecated string exceptions will have their string stored - # in the first entry of the tuple. - if e is None: - e = t - - buildError = SCons.Errors.convert_to_BuildError(e) - if not buildError.node: - buildError.node = self.node - - node = buildError.node - if not SCons.Util.is_List(node): - node = [ node ] - nodename = ', '.join(map(str, node)) - - errfmt = "scons: *** [%s] %s\n" - sys.stderr.write(errfmt % (nodename, buildError)) - - if (buildError.exc_info[2] and buildError.exc_info[1] and - not isinstance( - buildError.exc_info[1], - (EnvironmentError, SCons.Errors.StopError, - SCons.Errors.UserError))): - type, value, trace = buildError.exc_info - if tb and print_stacktrace: - sys.stderr.write("scons: internal stack trace:\n") - traceback.print_tb(tb, file=sys.stderr) - traceback.print_exception(type, value, trace) - elif tb and print_stacktrace: - sys.stderr.write("scons: internal stack trace:\n") - traceback.print_tb(tb, file=sys.stderr) - - self.exception = (e, buildError, tb) # type, value, traceback - self.do_failed(buildError.exitstatus) - - self.exc_clear() - - def postprocess(self): - if self.top: - t = self.targets[0] - for tp in self.options.tree_printers: - tp.display(t) - if self.options.debug_includes: - tree = t.render_include_tree() - if tree: - print() - print(tree) - SCons.Taskmaster.OutOfDateTask.postprocess(self) - - def make_ready(self): - """Make a task ready for execution""" - SCons.Taskmaster.OutOfDateTask.make_ready(self) - if self.out_of_date and self.options.debug_explain: - explanation = self.out_of_date[0].explain() - if explanation: - sys.stdout.write("scons: " + explanation) - - -class CleanTask(SCons.Taskmaster.AlwaysTask): - """An SCons clean task.""" - def fs_delete(self, path, pathstr, remove=True): - try: - if os.path.lexists(path): - if os.path.isfile(path) or os.path.islink(path): - if remove: os.unlink(path) - display("Removed " + pathstr) - elif os.path.isdir(path) and not os.path.islink(path): - # delete everything in the dir - for e in sorted(os.listdir(path)): - p = os.path.join(path, e) - s = os.path.join(pathstr, e) - if os.path.isfile(p): - if remove: os.unlink(p) - display("Removed " + s) - else: - self.fs_delete(p, s, remove) - # then delete dir itself - if remove: os.rmdir(path) - display("Removed directory " + pathstr) - else: - errstr = "Path '%s' exists but isn't a file or directory." - raise SCons.Errors.UserError(errstr % (pathstr)) - except SCons.Errors.UserError as e: - print(e) - except (IOError, OSError) as e: - print("scons: Could not remove '%s':" % pathstr, e.strerror) - - def _get_files_to_clean(self): - result = [] - target = self.targets[0] - if target.has_builder() or target.side_effect: - result = [t for t in self.targets if not t.noclean] - return result - - def _clean_targets(self, remove=True): - target = self.targets[0] - if target in SCons.Environment.CleanTargets: - files = SCons.Environment.CleanTargets[target] - for f in files: - self.fs_delete(f.get_abspath(), str(f), remove) - - def show(self): - for t in self._get_files_to_clean(): - if not t.isdir(): - display("Removed " + str(t)) - self._clean_targets(remove=False) - - def remove(self): - for t in self._get_files_to_clean(): - try: - removed = t.remove() - except OSError as e: - # An OSError may indicate something like a permissions - # issue, an IOError would indicate something like - # the file not existing. In either case, print a - # message and keep going to try to remove as many - # targets as possible. - print("scons: Could not remove '{0}'".format(str(t)), e.strerror) - else: - if removed: - display("Removed " + str(t)) - self._clean_targets(remove=True) - - execute = remove - - # We want the Taskmaster to update the Node states (and therefore - # handle reference counts, etc.), but we don't want to call - # back to the Node's post-build methods, which would do things - # we don't want, like store .sconsign information. - executed = SCons.Taskmaster.Task.executed_without_callbacks - - # Have the Taskmaster arrange to "execute" all of the targets, because - # we'll figure out ourselves (in remove() or show() above) whether - # anything really needs to be done. - make_ready = SCons.Taskmaster.Task.make_ready_all - - def prepare(self): - pass - -class QuestionTask(SCons.Taskmaster.AlwaysTask): - """An SCons task for the -q (question) option.""" - def prepare(self): - pass - - def execute(self): - if self.targets[0].get_state() != SCons.Node.up_to_date or \ - (self.top and not self.targets[0].exists()): - global exit_status - global this_build_status - exit_status = 1 - this_build_status = 1 - self.tm.stop() - - def executed(self): - pass - - -class TreePrinter(object): - def __init__(self, derived=False, prune=False, status=False): - self.derived = derived - self.prune = prune - self.status = status - def get_all_children(self, node): - return node.all_children() - def get_derived_children(self, node): - children = node.all_children(None) - return [x for x in children if x.has_builder()] - def display(self, t): - if self.derived: - func = self.get_derived_children - else: - func = self.get_all_children - s = self.status and 2 or 0 - SCons.Util.print_tree(t, func, prune=self.prune, showtags=s) - - -def python_version_string(): - return sys.version.split()[0] - -def python_version_unsupported(version=sys.version_info): - return version < unsupported_python_version - -def python_version_deprecated(version=sys.version_info): - return version < deprecated_python_version - - -class FakeOptionParser(object): - """ - A do-nothing option parser, used for the initial OptionsParser variable. - - During normal SCons operation, the OptionsParser is created right - away by the main() function. Certain tests scripts however, can - introspect on different Tool modules, the initialization of which - can try to add a new, local option to an otherwise uninitialized - OptionsParser object. This allows that introspection to happen - without blowing up. - - """ - class FakeOptionValues(object): - def __getattr__(self, attr): - return None - values = FakeOptionValues() - def add_local_option(self, *args, **kw): - pass - -OptionsParser = FakeOptionParser() - -def AddOption(*args, **kw): - if 'default' not in kw: - kw['default'] = None - result = OptionsParser.add_local_option(*args, **kw) - return result - -def GetOption(name): - return getattr(OptionsParser.values, name) - -def SetOption(name, value): - return OptionsParser.values.set_option(name, value) - -def PrintHelp(file=None): - OptionsParser.print_help(file=file) - -class Stats(object): - def __init__(self): - self.stats = [] - self.labels = [] - self.append = self.do_nothing - self.print_stats = self.do_nothing - def enable(self, outfp): - self.outfp = outfp - self.append = self.do_append - self.print_stats = self.do_print - def do_nothing(self, *args, **kw): - pass - -class CountStats(Stats): - def do_append(self, label): - self.labels.append(label) - self.stats.append(SCons.Debug.fetchLoggedInstances()) - def do_print(self): - stats_table = {} - for s in self.stats: - for n in [t[0] for t in s]: - stats_table[n] = [0, 0, 0, 0] - i = 0 - for s in self.stats: - for n, c in s: - stats_table[n][i] = c - i = i + 1 - self.outfp.write("Object counts:\n") - pre = [" "] - post = [" %s\n"] - l = len(self.stats) - fmt1 = ''.join(pre + [' %7s']*l + post) - fmt2 = ''.join(pre + [' %7d']*l + post) - labels = self.labels[:l] - labels.append(("", "Class")) - self.outfp.write(fmt1 % tuple([x[0] for x in labels])) - self.outfp.write(fmt1 % tuple([x[1] for x in labels])) - for k in sorted(stats_table.keys()): - r = stats_table[k][:l] + [k] - self.outfp.write(fmt2 % tuple(r)) - -count_stats = CountStats() - -class MemStats(Stats): - def do_append(self, label): - self.labels.append(label) - self.stats.append(SCons.Debug.memory()) - def do_print(self): - fmt = 'Memory %-32s %12d\n' - for label, stats in zip(self.labels, self.stats): - self.outfp.write(fmt % (label, stats)) - -memory_stats = MemStats() - -# utility functions - -def _scons_syntax_error(e): - """Handle syntax errors. Print out a message and show where the error - occurred. - """ - etype, value, tb = sys.exc_info() - lines = traceback.format_exception_only(etype, value) - for line in lines: - sys.stderr.write(line+'\n') - sys.exit(2) - -def find_deepest_user_frame(tb): - """ - Find the deepest stack frame that is not part of SCons. - - Input is a "pre-processed" stack trace in the form - returned by traceback.extract_tb() or traceback.extract_stack() - """ - - tb.reverse() - - # find the deepest traceback frame that is not part - # of SCons: - for frame in tb: - filename = frame[0] - if filename.find(os.sep+'SCons'+os.sep) == -1: - return frame - return tb[0] - -def _scons_user_error(e): - """Handle user errors. Print out a message and a description of the - error, along with the line number and routine where it occured. - The file and line number will be the deepest stack frame that is - not part of SCons itself. - """ - global print_stacktrace - etype, value, tb = sys.exc_info() - if print_stacktrace: - traceback.print_exception(etype, value, tb) - filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb)) - sys.stderr.write("\nscons: *** %s\n" % value) - sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) - sys.exit(2) - -def _scons_user_warning(e): - """Handle user warnings. Print out a message and a description of - the warning, along with the line number and routine where it occured. - The file and line number will be the deepest stack frame that is - not part of SCons itself. - """ - etype, value, tb = sys.exc_info() - filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb)) - sys.stderr.write("\nscons: warning: %s\n" % e) - sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) - -def _scons_internal_warning(e): - """Slightly different from _scons_user_warning in that we use the - *current call stack* rather than sys.exc_info() to get our stack trace. - This is used by the warnings framework to print warnings.""" - filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_stack()) - sys.stderr.write("\nscons: warning: %s\n" % e.args[0]) - sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) - -def _scons_internal_error(): - """Handle all errors but user errors. Print out a message telling - the user what to do in this case and print a normal trace. - """ - print('internal error') - traceback.print_exc() - sys.exit(2) - -def _SConstruct_exists(dirname='', repositories=[], filelist=None): - """This function checks that an SConstruct file exists in a directory. - If so, it returns the path of the file. By default, it checks the - current directory. - """ - if not filelist: - filelist = ['SConstruct', 'Sconstruct', 'sconstruct', 'SConstruct.py', 'Sconstruct.py', 'sconstruct.py'] - for file in filelist: - sfile = os.path.join(dirname, file) - if os.path.isfile(sfile): - return sfile - if not os.path.isabs(sfile): - for rep in repositories: - if os.path.isfile(os.path.join(rep, sfile)): - return sfile - return None - -def _set_debug_values(options): - global print_memoizer, print_objects, print_stacktrace, print_time, print_action_timestamps - - debug_values = options.debug - - if "count" in debug_values: - # All of the object counts are within "if track_instances:" blocks, - # which get stripped when running optimized (with python -O or - # from compiled *.pyo files). Provide a warning if __debug__ is - # stripped, so it doesn't just look like --debug=count is broken. - enable_count = False - if __debug__: enable_count = True - if enable_count: - count_stats.enable(sys.stdout) - SCons.Debug.track_instances = True - else: - msg = "--debug=count is not supported when running SCons\n" + \ - "\twith the python -O option or optimized (.pyo) modules." - SCons.Warnings.warn(SCons.Warnings.NoObjectCountWarning, msg) - if "dtree" in debug_values: - options.tree_printers.append(TreePrinter(derived=True)) - options.debug_explain = ("explain" in debug_values) - if "findlibs" in debug_values: - SCons.Scanner.Prog.print_find_libs = "findlibs" - options.debug_includes = ("includes" in debug_values) - print_memoizer = ("memoizer" in debug_values) - if "memory" in debug_values: - memory_stats.enable(sys.stdout) - print_objects = ("objects" in debug_values) - if print_objects: - SCons.Debug.track_instances = True - if "presub" in debug_values: - SCons.Action.print_actions_presub = 1 - if "stacktrace" in debug_values: - print_stacktrace = 1 - if "stree" in debug_values: - options.tree_printers.append(TreePrinter(status=True)) - if "time" in debug_values: - print_time = 1 - if "action-timestamps" in debug_values: - print_time = 1 - print_action_timestamps = 1 - if "tree" in debug_values: - options.tree_printers.append(TreePrinter()) - if "prepare" in debug_values: - SCons.Taskmaster.print_prepare = 1 - if "duplicate" in debug_values: - SCons.Node.print_duplicate = 1 - -def _create_path(plist): - path = '.' - for d in plist: - if os.path.isabs(d): - path = d - else: - path = path + '/' + d - return path - -def _load_site_scons_dir(topdir, site_dir_name=None): - """Load the site_scons dir under topdir. - Prepends site_scons to sys.path, imports site_scons/site_init.py, - and prepends site_scons/site_tools to default toolpath.""" - if site_dir_name: - err_if_not_found = True # user specified: err if missing - else: - site_dir_name = "site_scons" - err_if_not_found = False - - site_dir = os.path.join(topdir, site_dir_name) - if not os.path.exists(site_dir): - if err_if_not_found: - raise SCons.Errors.UserError("site dir %s not found."%site_dir) - return - - site_init_filename = "site_init.py" - site_init_modname = "site_init" - site_tools_dirname = "site_tools" - # prepend to sys.path - sys.path = [os.path.abspath(site_dir)] + sys.path - site_init_file = os.path.join(site_dir, site_init_filename) - site_tools_dir = os.path.join(site_dir, site_tools_dirname) - if os.path.exists(site_init_file): - import imp, re - try: - try: - fp, pathname, description = imp.find_module(site_init_modname, - [site_dir]) - # Load the file into SCons.Script namespace. This is - # opaque and clever; m is the module object for the - # SCons.Script module, and the exec ... in call executes a - # file (or string containing code) in the context of the - # module's dictionary, so anything that code defines ends - # up adding to that module. This is really short, but all - # the error checking makes it longer. - try: - m = sys.modules['SCons.Script'] - except Exception as e: - fmt = 'cannot import site_init.py: missing SCons.Script module %s' - raise SCons.Errors.InternalError(fmt % repr(e)) - try: - sfx = description[0] - modname = os.path.basename(pathname)[:-len(sfx)] - site_m = {"__file__": pathname, "__name__": modname, "__doc__": None} - re_special = re.compile("__[^_]+__") - for k in list(m.__dict__.keys()): - if not re_special.match(k): - site_m[k] = m.__dict__[k] - - # This is the magic. - exec(compile(fp.read(), fp.name, 'exec'), site_m) - except KeyboardInterrupt: - raise - except Exception as e: - fmt = '*** Error loading site_init file %s:\n' - sys.stderr.write(fmt % repr(site_init_file)) - raise - else: - for k in site_m: - if not re_special.match(k): - m.__dict__[k] = site_m[k] - except KeyboardInterrupt: - raise - except ImportError as e: - fmt = '*** cannot import site init file %s:\n' - sys.stderr.write(fmt % repr(site_init_file)) - raise - finally: - if fp: - fp.close() - if os.path.exists(site_tools_dir): - # prepend to DefaultToolpath - SCons.Tool.DefaultToolpath.insert(0, os.path.abspath(site_tools_dir)) - -def _load_all_site_scons_dirs(topdir, verbose=None): - """Load all of the predefined site_scons dir. - Order is significant; we load them in order from most generic - (machine-wide) to most specific (topdir). - The verbose argument is only for testing. - """ - platform = SCons.Platform.platform_default() - - def homedir(d): - return os.path.expanduser('~/'+d) - - if platform == 'win32' or platform == 'cygwin': - # Note we use $ here instead of %...% because older - # pythons (prior to 2.6?) didn't expand %...% on Windows. - # This set of dirs should work on XP, Vista, 7 and later. - sysdirs=[ - os.path.expandvars('$ALLUSERSPROFILE\\Application Data\\scons'), - os.path.expandvars('$USERPROFILE\\Local Settings\\Application Data\\scons')] - appdatadir = os.path.expandvars('$APPDATA\\scons') - if appdatadir not in sysdirs: - sysdirs.append(appdatadir) - sysdirs.append(homedir('.scons')) - - elif platform == 'darwin': # MacOS X - sysdirs=['/Library/Application Support/SCons', - '/opt/local/share/scons', # (for MacPorts) - '/sw/share/scons', # (for Fink) - homedir('Library/Application Support/SCons'), - homedir('.scons')] - elif platform == 'sunos': # Solaris - sysdirs=['/opt/sfw/scons', - '/usr/share/scons', - homedir('.scons')] - else: # Linux, HPUX, etc. - # assume posix-like, i.e. platform == 'posix' - sysdirs=['/usr/share/scons', - homedir('.scons')] - - dirs=sysdirs + [topdir] - for d in dirs: - if verbose: # this is used by unit tests. - print("Loading site dir ", d) - _load_site_scons_dir(d) - -def test_load_all_site_scons_dirs(d): - _load_all_site_scons_dirs(d, True) - -def version_string(label, module): - version = module.__version__ - build = module.__build__ - if build: - if build[0] != '.': - build = '.' + build - version = version + build - fmt = "\t%s: v%s, %s, by %s on %s\n" - return fmt % (label, - version, - module.__date__, - module.__developer__, - module.__buildsys__) - -def path_string(label, module): - path = module.__path__ - return "\t%s path: %s\n"%(label,path) - -def _main(parser): - global exit_status - global this_build_status - - options = parser.values - - # Here's where everything really happens. - - # First order of business: set up default warnings and then - # handle the user's warning options, so that we can issue (or - # suppress) appropriate warnings about anything that might happen, - # as configured by the user. - - default_warnings = [ SCons.Warnings.WarningOnByDefault, - SCons.Warnings.DeprecatedWarning, - ] - - for warning in default_warnings: - SCons.Warnings.enableWarningClass(warning) - SCons.Warnings._warningOut = _scons_internal_warning - SCons.Warnings.process_warn_strings(options.warn) - - # Now that we have the warnings configuration set up, we can actually - # issue (or suppress) any warnings about warning-worthy things that - # occurred while the command-line options were getting parsed. - try: - dw = options.delayed_warnings - except AttributeError: - pass - else: - delayed_warnings.extend(dw) - for warning_type, message in delayed_warnings: - SCons.Warnings.warn(warning_type, message) - - if not SCons.Platform.virtualenv.virtualenv_enabled_by_default: - if options.enable_virtualenv: - SCons.Platform.virtualenv.enable_virtualenv = True - - if options.ignore_virtualenv: - SCons.Platform.virtualenv.ignore_virtualenv = True - - if options.diskcheck: - SCons.Node.FS.set_diskcheck(options.diskcheck) - - # Next, we want to create the FS object that represents the outside - # world's file system, as that's central to a lot of initialization. - # To do this, however, we need to be in the directory from which we - # want to start everything, which means first handling any relevant - # options that might cause us to chdir somewhere (-C, -D, -U, -u). - if options.directory: - script_dir = os.path.abspath(_create_path(options.directory)) - else: - script_dir = os.getcwd() - - target_top = None - if options.climb_up: - target_top = '.' # directory to prepend to targets - while script_dir and not _SConstruct_exists(script_dir, - options.repository, - options.file): - script_dir, last_part = os.path.split(script_dir) - if last_part: - target_top = os.path.join(last_part, target_top) - else: - script_dir = '' - - if script_dir and script_dir != os.getcwd(): - if not options.silent: - display("scons: Entering directory `%s'" % script_dir) - try: - os.chdir(script_dir) - except OSError: - sys.stderr.write("Could not change directory to %s\n" % script_dir) - - # Now that we're in the top-level SConstruct directory, go ahead - # and initialize the FS object that represents the file system, - # and make it the build engine default. - fs = SCons.Node.FS.get_default_fs() - - for rep in options.repository: - fs.Repository(rep) - - # Now that we have the FS object, the next order of business is to - # check for an SConstruct file (or other specified config file). - # If there isn't one, we can bail before doing any more work. - scripts = [] - if options.file: - scripts.extend(options.file) - if not scripts: - sfile = _SConstruct_exists(repositories=options.repository, - filelist=options.file) - if sfile: - scripts.append(sfile) - - if not scripts: - if options.help: - # There's no SConstruct, but they specified -h. - # Give them the options usage now, before we fail - # trying to read a non-existent SConstruct file. - raise SConsPrintHelpException - raise SCons.Errors.UserError("No SConstruct file found.") - - if scripts[0] == "-": - d = fs.getcwd() - else: - d = fs.File(scripts[0]).dir - fs.set_SConstruct_dir(d) - - _set_debug_values(options) - SCons.Node.implicit_cache = options.implicit_cache - SCons.Node.implicit_deps_changed = options.implicit_deps_changed - SCons.Node.implicit_deps_unchanged = options.implicit_deps_unchanged - - if options.no_exec: - SCons.SConf.dryrun = 1 - SCons.Action.execute_actions = None - if options.question: - SCons.SConf.dryrun = 1 - if options.clean: - SCons.SConf.SetBuildType('clean') - if options.help: - SCons.SConf.SetBuildType('help') - SCons.SConf.SetCacheMode(options.config) - SCons.SConf.SetProgressDisplay(progress_display) - - if options.no_progress or options.silent: - progress_display.set_mode(0) - - if options.site_dir: - _load_site_scons_dir(d.get_internal_path(), options.site_dir) - elif not options.no_site_dir: - _load_all_site_scons_dirs(d.get_internal_path()) - - if options.include_dir: - sys.path = options.include_dir + sys.path - - # If we're about to start SCons in the interactive mode, - # inform the FS about this right here. Else, the release_target_info - # method could get called on some nodes, like the used "gcc" compiler, - # when using the Configure methods within the SConscripts. - # This would then cause subtle bugs, as already happened in #2971. - if options.interactive: - SCons.Node.interactive = True - - # That should cover (most of) the options. Next, set up the variables - # that hold command-line arguments, so the SConscript files that we - # read and execute have access to them. - targets = [] - xmit_args = [] - for a in parser.largs: - if a[:1] == '-': - continue - if '=' in a: - xmit_args.append(a) - else: - targets.append(a) - SCons.Script._Add_Targets(targets + parser.rargs) - SCons.Script._Add_Arguments(xmit_args) - - # If stdout is not a tty, replace it with a wrapper object to call flush - # after every write. - # - # Tty devices automatically flush after every newline, so the replacement - # isn't necessary. Furthermore, if we replace sys.stdout, the readline - # module will no longer work. This affects the behavior during - # --interactive mode. --interactive should only be used when stdin and - # stdout refer to a tty. - if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty(): - sys.stdout = SCons.Util.Unbuffered(sys.stdout) - if not hasattr(sys.stderr, 'isatty') or not sys.stderr.isatty(): - sys.stderr = SCons.Util.Unbuffered(sys.stderr) - - memory_stats.append('before reading SConscript files:') - count_stats.append(('pre-', 'read')) - - # And here's where we (finally) read the SConscript files. - - progress_display("scons: Reading SConscript files ...") - - start_time = time.time() - try: - for script in scripts: - SCons.Script._SConscript._SConscript(fs, script) - except SCons.Errors.StopError as e: - # We had problems reading an SConscript file, such as it - # couldn't be copied in to the VariantDir. Since we're just - # reading SConscript files and haven't started building - # things yet, stop regardless of whether they used -i or -k - # or anything else. - revert_io() - sys.stderr.write("scons: *** %s Stop.\n" % e) - sys.exit(2) - global sconscript_time - sconscript_time = time.time() - start_time - - progress_display("scons: done reading SConscript files.") - - memory_stats.append('after reading SConscript files:') - count_stats.append(('post-', 'read')) - - # Re-{enable,disable} warnings in case they disabled some in - # the SConscript file. - # - # We delay enabling the PythonVersionWarning class until here so that, - # if they explicitly disabled it in either in the command line or in - # $SCONSFLAGS, or in the SConscript file, then the search through - # the list of deprecated warning classes will find that disabling - # first and not issue the warning. - #SCons.Warnings.enableWarningClass(SCons.Warnings.PythonVersionWarning) - SCons.Warnings.process_warn_strings(options.warn) - - # Now that we've read the SConscript files, we can check for the - # warning about deprecated Python versions--delayed until here - # in case they disabled the warning in the SConscript files. - if python_version_deprecated(): - msg = "Support for pre-%s Python version (%s) is deprecated.\n" + \ - " If this will cause hardship, contact scons-dev@scons.org" - deprecated_version_string = ".".join(map(str, deprecated_python_version)) - SCons.Warnings.warn(SCons.Warnings.PythonVersionWarning, - msg % (deprecated_version_string, python_version_string())) - - if not options.help: - # [ ] Clarify why we need to create Builder here at all, and - # why it is created in DefaultEnvironment - # https://bitbucket.org/scons/scons/commits/d27a548aeee8ad5e67ea75c2d19a7d305f784e30 - if SCons.SConf.NeedConfigHBuilder(): - SCons.SConf.CreateConfigHBuilder(SCons.Defaults.DefaultEnvironment()) - - # Now re-parse the command-line options (any to the left of a '--' - # argument, that is) with any user-defined command-line options that - # the SConscript files may have added to the parser object. This will - # emit the appropriate error message and exit if any unknown option - # was specified on the command line. - - parser.preserve_unknown_options = False - parser.parse_args(parser.largs, options) - - if options.help: - help_text = SCons.Script.help_text - if help_text is None: - # They specified -h, but there was no Help() inside the - # SConscript files. Give them the options usage. - raise SConsPrintHelpException - else: - print(help_text) - print("Use scons -H for help about command-line options.") - exit_status = 0 - return - - # Change directory to the top-level SConstruct directory, then tell - # the Node.FS subsystem that we're all done reading the SConscript - # files and calling Repository() and VariantDir() and changing - # directories and the like, so it can go ahead and start memoizing - # the string values of file system nodes. - - fs.chdir(fs.Top) - - SCons.Node.FS.save_strings(1) - - # Now that we've read the SConscripts we can set the options - # that are SConscript settable: - SCons.Node.implicit_cache = options.implicit_cache - SCons.Node.FS.set_duplicate(options.duplicate) - fs.set_max_drift(options.max_drift) - - SCons.Job.explicit_stack_size = options.stack_size - - if options.md5_chunksize: - SCons.Node.FS.File.md5_chunksize = options.md5_chunksize - - platform = SCons.Platform.platform_module() - - if options.interactive: - SCons.Script.Interactive.interact(fs, OptionsParser, options, - targets, target_top) - - else: - - # Build the targets - nodes = _build_targets(fs, options, targets, target_top) - if not nodes: - revert_io() - print('Found nothing to build') - exit_status = 2 - -def _build_targets(fs, options, targets, target_top): - - global this_build_status - this_build_status = 0 - - progress_display.set_mode(not (options.no_progress or options.silent)) - display.set_mode(not options.silent) - SCons.Action.print_actions = not options.silent - SCons.Action.execute_actions = not options.no_exec - SCons.Node.do_store_info = not options.no_exec - SCons.SConf.dryrun = options.no_exec - - if options.diskcheck: - SCons.Node.FS.set_diskcheck(options.diskcheck) - - SCons.CacheDir.cache_enabled = not options.cache_disable - SCons.CacheDir.cache_readonly = options.cache_readonly - SCons.CacheDir.cache_debug = options.cache_debug - SCons.CacheDir.cache_force = options.cache_force - SCons.CacheDir.cache_show = options.cache_show - - if options.no_exec: - CleanTask.execute = CleanTask.show - else: - CleanTask.execute = CleanTask.remove - - lookup_top = None - if targets or SCons.Script.BUILD_TARGETS != SCons.Script._build_plus_default: - # They specified targets on the command line or modified - # BUILD_TARGETS in the SConscript file(s), so if they used -u, - # -U or -D, we have to look up targets relative to the top, - # but we build whatever they specified. - if target_top: - lookup_top = fs.Dir(target_top) - target_top = None - - targets = SCons.Script.BUILD_TARGETS - else: - # There are no targets specified on the command line, - # so if they used -u, -U or -D, we may have to restrict - # what actually gets built. - d = None - if target_top: - if options.climb_up == 1: - # -u, local directory and below - target_top = fs.Dir(target_top) - lookup_top = target_top - elif options.climb_up == 2: - # -D, all Default() targets - target_top = None - lookup_top = None - elif options.climb_up == 3: - # -U, local SConscript Default() targets - target_top = fs.Dir(target_top) - def check_dir(x, target_top=target_top): - if hasattr(x, 'cwd') and x.cwd is not None: - cwd = x.cwd.srcnode() - return cwd == target_top - else: - # x doesn't have a cwd, so it's either not a target, - # or not a file, so go ahead and keep it as a default - # target and let the engine sort it out: - return 1 - d = [tgt for tgt in SCons.Script.DEFAULT_TARGETS if check_dir(tgt)] - SCons.Script.DEFAULT_TARGETS[:] = d - target_top = None - lookup_top = None - - targets = SCons.Script._Get_Default_Targets(d, fs) - - if not targets: - sys.stderr.write("scons: *** No targets specified and no Default() targets found. Stop.\n") - return None - - def Entry(x, ltop=lookup_top, ttop=target_top, fs=fs): - if isinstance(x, SCons.Node.Node): - node = x - else: - node = None - # Why would ltop be None? Unfortunately this happens. - if ltop is None: ltop = '' - # Curdir becomes important when SCons is called with -u, -C, - # or similar option that changes directory, and so the paths - # of targets given on the command line need to be adjusted. - curdir = os.path.join(os.getcwd(), str(ltop)) - for lookup in SCons.Node.arg2nodes_lookups: - node = lookup(x, curdir=curdir) - if node is not None: - break - if node is None: - node = fs.Entry(x, directory=ltop, create=1) - if ttop and not node.is_under(ttop): - if isinstance(node, SCons.Node.FS.Dir) and ttop.is_under(node): - node = ttop - else: - node = None - return node - - nodes = [_f for _f in map(Entry, targets) if _f] - - task_class = BuildTask # default action is to build targets - opening_message = "Building targets ..." - closing_message = "done building targets." - if options.keep_going: - failure_message = "done building targets (errors occurred during build)." - else: - failure_message = "building terminated because of errors." - if options.question: - task_class = QuestionTask - try: - if options.clean: - task_class = CleanTask - opening_message = "Cleaning targets ..." - closing_message = "done cleaning targets." - if options.keep_going: - failure_message = "done cleaning targets (errors occurred during clean)." - else: - failure_message = "cleaning terminated because of errors." - except AttributeError: - pass - - task_class.progress = ProgressObject - - if options.random: - def order(dependencies): - """Randomize the dependencies.""" - import random - random.shuffle(dependencies) - return dependencies - else: - def order(dependencies): - """Leave the order of dependencies alone.""" - return dependencies - - if options.taskmastertrace_file == '-': - tmtrace = sys.stdout - elif options.taskmastertrace_file: - tmtrace = open(options.taskmastertrace_file, 'w') - else: - tmtrace = None - taskmaster = SCons.Taskmaster.Taskmaster(nodes, task_class, order, tmtrace) - - # Let the BuildTask objects get at the options to respond to the - # various print_* settings, tree_printer list, etc. - BuildTask.options = options - - - is_pypy = platform.python_implementation() == 'PyPy' - # As of 3.7, python removed support for threadless platforms. - # See https://www.python.org/dev/peps/pep-0011/ - is_37_or_later = sys.version_info >= (3, 7) - python_has_threads = sysconfig.get_config_var('WITH_THREAD') or is_pypy or is_37_or_later - # to check if python configured with threads. - global num_jobs, jobs - num_jobs = options.num_jobs - jobs = SCons.Job.Jobs(num_jobs, taskmaster) - if num_jobs > 1: - msg = None - if sys.platform == 'win32': - msg = fetch_win32_parallel_msg() - elif jobs.num_jobs == 1 or not python_has_threads: - msg = "parallel builds are unsupported by this version of Python;\n" + \ - "\tignoring -j or num_jobs option.\n" - if msg: - SCons.Warnings.warn(SCons.Warnings.NoParallelSupportWarning, msg) - - memory_stats.append('before building targets:') - count_stats.append(('pre-', 'build')) - - def jobs_postfunc( - jobs=jobs, - options=options, - closing_message=closing_message, - failure_message=failure_message - ): - if jobs.were_interrupted(): - if not options.no_progress and not options.silent: - sys.stderr.write("scons: Build interrupted.\n") - global exit_status - global this_build_status - exit_status = 2 - this_build_status = 2 - - if this_build_status: - progress_display("scons: " + failure_message) - else: - progress_display("scons: " + closing_message) - if not options.no_exec: - if jobs.were_interrupted(): - progress_display("scons: writing .sconsign file.") - SCons.SConsign.write() - - progress_display("scons: " + opening_message) - jobs.run(postfunc = jobs_postfunc) - - memory_stats.append('after building targets:') - count_stats.append(('post-', 'build')) - - return nodes - -def _exec_main(parser, values): - sconsflags = os.environ.get('SCONSFLAGS', '') - all_args = sconsflags.split() + sys.argv[1:] - - options, args = parser.parse_args(all_args, values) - - if isinstance(options.debug, list) and "pdb" in options.debug: - import pdb - pdb.Pdb().runcall(_main, parser) - elif options.profile_file: - # compat layer imports "cProfile" for us if it's available. - from profile import Profile - - prof = Profile() - try: - prof.runcall(_main, parser) - finally: - prof.dump_stats(options.profile_file) - else: - _main(parser) - -def main(): - global OptionsParser - global exit_status - global first_command_start - - # Check up front for a Python version we do not support. We - # delay the check for deprecated Python versions until later, - # after the SConscript files have been read, in case they - # disable that warning. - if python_version_unsupported(): - msg = "scons: *** SCons version %s does not run under Python version %s.\n" - sys.stderr.write(msg % (SCons.__version__, python_version_string())) - sys.exit(1) - - parts = ["SCons by Steven Knight et al.:\n"] - try: - import __main__ - parts.append(version_string("script", __main__)) - except (ImportError, AttributeError): - # On Windows there is no scons.py, so there is no - # __main__.__version__, hence there is no script version. - pass - parts.append(version_string("engine", SCons)) - parts.append(path_string("engine", SCons)) - parts.append("Copyright (c) 2001 - 2019 The SCons Foundation") - version = ''.join(parts) - - from . import SConsOptions - parser = SConsOptions.Parser(version) - values = SConsOptions.SConsValues(parser.get_default_values()) - - OptionsParser = parser - - try: - try: - _exec_main(parser, values) - finally: - revert_io() - except SystemExit as s: - if s: - exit_status = s.code - except KeyboardInterrupt: - print("scons: Build interrupted.") - sys.exit(2) - except SyntaxError as e: - _scons_syntax_error(e) - except SCons.Errors.InternalError: - _scons_internal_error() - except SCons.Errors.UserError as e: - _scons_user_error(e) - except SConsPrintHelpException: - parser.print_help() - exit_status = 0 - except SCons.Errors.BuildError as e: - print(e) - exit_status = e.exitstatus - except: - # An exception here is likely a builtin Python exception Python - # code in an SConscript file. Show them precisely what the - # problem was and where it happened. - SCons.Script._SConscript.SConscript_exception() - sys.exit(2) - - memory_stats.print_stats() - count_stats.print_stats() - - if print_objects: - SCons.Debug.listLoggedInstances('*') - #SCons.Debug.dumpLoggedInstances('*') - - if print_memoizer: - SCons.Memoize.Dump("Memoizer (memory cache) hits and misses:") - - # Dump any development debug info that may have been enabled. - # These are purely for internal debugging during development, so - # there's no need to control them with --debug= options; they're - # controlled by changing the source code. - SCons.Debug.dump_caller_counts() - SCons.Taskmaster.dump_stats() - - if print_time: - total_time = time.time() - SCons.Script.start_time - if num_jobs == 1: - ct = cumulative_command_time - else: - if last_command_end is None or first_command_start is None: - ct = 0.0 - else: - ct = last_command_end - first_command_start - scons_time = total_time - sconscript_time - ct - print("Total build time: %f seconds"%total_time) - print("Total SConscript file execution time: %f seconds"%sconscript_time) - print("Total SCons execution time: %f seconds"%scons_time) - print("Total command execution time: %f seconds"%ct) - - sys.exit(exit_status) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/SConsOptions.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/SConsOptions.py deleted file mode 100644 index 8b25cc43266..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/SConsOptions.py +++ /dev/null @@ -1,984 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Script/SConsOptions.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import optparse -import re -import sys -import textwrap - -no_hyphen_re = re.compile(r'(\s+|(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') - -try: - from gettext import gettext -except ImportError: - def gettext(message): - return message -_ = gettext - -import SCons.Node.FS -import SCons.Platform.virtualenv -import SCons.Warnings - -OptionValueError = optparse.OptionValueError -SUPPRESS_HELP = optparse.SUPPRESS_HELP - -diskcheck_all = SCons.Node.FS.diskcheck_types() - -def diskcheck_convert(value): - if value is None: - return [] - if not SCons.Util.is_List(value): - value = value.split(',') - result = [] - for v in value: - v = v.lower() - if v == 'all': - result = diskcheck_all - elif v == 'none': - result = [] - elif v in diskcheck_all: - result.append(v) - else: - raise ValueError(v) - return result - - -class SConsValues(optparse.Values): - """ - Holder class for uniform access to SCons options, regardless - of whether or not they can be set on the command line or in the - SConscript files (using the SetOption() function). - - A SCons option value can originate three different ways: - - 1) set on the command line; - 2) set in an SConscript file; - 3) the default setting (from the the op.add_option() - calls in the Parser() function, below). - - The command line always overrides a value set in a SConscript file, - which in turn always overrides default settings. Because we want - to support user-specified options in the SConscript file itself, - though, we may not know about all of the options when the command - line is first parsed, so we can't make all the necessary precedence - decisions at the time the option is configured. - - The solution implemented in this class is to keep these different sets - of settings separate (command line, SConscript file, and default) - and to override the __getattr__() method to check them in turn. - This should allow the rest of the code to just fetch values as - attributes of an instance of this class, without having to worry - about where they came from. - - Note that not all command line options are settable from SConscript - files, and the ones that are must be explicitly added to the - "settable" list in this class, and optionally validated and coerced - in the set_option() method. - """ - - def __init__(self, defaults): - self.__dict__['__defaults__'] = defaults - self.__dict__['__SConscript_settings__'] = {} - - def __getattr__(self, attr): - """ - Fetches an options value, checking first for explicit settings - from the command line (which are direct attributes), then the - SConscript file settings, then the default values. - """ - try: - return self.__dict__[attr] - except KeyError: - try: - return self.__dict__['__SConscript_settings__'][attr] - except KeyError: - try: - return getattr(self.__dict__['__defaults__'], attr) - except KeyError: - # Added because with py3 this is a new class, - # not a classic class, and due to the way - # In that case it will create an object without - # __defaults__, and then query for __setstate__ - # which will throw an exception of KeyError - # deepcopy() is expecting AttributeError if __setstate__ - # is not available. - raise AttributeError(attr) - - - settable = [ - 'clean', - 'diskcheck', - 'duplicate', - 'help', - 'implicit_cache', - 'max_drift', - 'md5_chunksize', - 'no_exec', - 'num_jobs', - 'random', - 'stack_size', - 'warn', - 'silent' - ] - - def set_option(self, name, value): - """ - Sets an option from an SConscript file. - """ - if name not in self.settable: - raise SCons.Errors.UserError("This option is not settable from a SConscript file: %s"%name) - - if name == 'num_jobs': - try: - value = int(value) - if value < 1: - raise ValueError - except ValueError: - raise SCons.Errors.UserError("A positive integer is required: %s"%repr(value)) - elif name == 'max_drift': - try: - value = int(value) - except ValueError: - raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) - elif name == 'duplicate': - try: - value = str(value) - except ValueError: - raise SCons.Errors.UserError("A string is required: %s"%repr(value)) - if value not in SCons.Node.FS.Valid_Duplicates: - raise SCons.Errors.UserError("Not a valid duplication style: %s" % value) - # Set the duplicate style right away so it can affect linking - # of SConscript files. - SCons.Node.FS.set_duplicate(value) - elif name == 'diskcheck': - try: - value = diskcheck_convert(value) - except ValueError as v: - raise SCons.Errors.UserError("Not a valid diskcheck value: %s"%v) - if 'diskcheck' not in self.__dict__: - # No --diskcheck= option was specified on the command line. - # Set this right away so it can affect the rest of the - # file/Node lookups while processing the SConscript files. - SCons.Node.FS.set_diskcheck(value) - elif name == 'stack_size': - try: - value = int(value) - except ValueError: - raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) - elif name == 'md5_chunksize': - try: - value = int(value) - except ValueError: - raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) - elif name == 'warn': - if SCons.Util.is_String(value): - value = [value] - value = self.__SConscript_settings__.get(name, []) + value - SCons.Warnings.process_warn_strings(value) - - self.__SConscript_settings__[name] = value - - -class SConsOption(optparse.Option): - def convert_value(self, opt, value): - if value is not None: - if self.nargs in (1, '?'): - return self.check_value(opt, value) - else: - return tuple([self.check_value(opt, v) for v in value]) - - def process(self, opt, value, values, parser): - - # First, convert the value(s) to the right type. Howl if any - # value(s) are bogus. - value = self.convert_value(opt, value) - - # And then take whatever action is expected of us. - # This is a separate method to make life easier for - # subclasses to add new actions. - return self.take_action( - self.action, self.dest, opt, value, values, parser) - - def _check_nargs_optional(self): - if self.nargs == '?' and self._short_opts: - fmt = "option %s: nargs='?' is incompatible with short options" - raise SCons.Errors.UserError(fmt % self._short_opts[0]) - - CHECK_METHODS = optparse.Option.CHECK_METHODS + [_check_nargs_optional] - CONST_ACTIONS = optparse.Option.CONST_ACTIONS + optparse.Option.TYPED_ACTIONS - -class SConsOptionGroup(optparse.OptionGroup): - """ - A subclass for SCons-specific option groups. - - The only difference between this and the base class is that we print - the group's help text flush left, underneath their own title but - lined up with the normal "SCons Options". - """ - def format_help(self, formatter): - """ - Format an option group's help text, outdenting the title so it's - flush with the "SCons Options" title we print at the top. - """ - formatter.dedent() - result = formatter.format_heading(self.title) - formatter.indent() - result = result + optparse.OptionContainer.format_help(self, formatter) - return result - -class SConsOptionParser(optparse.OptionParser): - preserve_unknown_options = False - - def error(self, msg): - # overridden OptionValueError exception handler - self.print_usage(sys.stderr) - sys.stderr.write("SCons Error: %s\n" % msg) - sys.exit(2) - - def _process_long_opt(self, rargs, values): - """ - SCons-specific processing of long options. - - This is copied directly from the normal - optparse._process_long_opt() method, except that, if configured - to do so, we catch the exception thrown when an unknown option - is encountered and just stick it back on the "leftover" arguments - for later (re-)processing. - """ - arg = rargs.pop(0) - - # Value explicitly attached to arg? Pretend it's the next argument. - if "=" in arg: - (opt, next_arg) = arg.split("=", 1) - rargs.insert(0, next_arg) - had_explicit_value = True - else: - opt = arg - had_explicit_value = False - - try: - if opt != self._match_long_opt(opt): - raise optparse.BadOptionError( - "'%s'. Did you mean '%s'?" - % (opt, self._match_long_opt(opt)) - ) - except optparse.BadOptionError: - if self.preserve_unknown_options: - # SCons-specific: if requested, add unknown options to - # the "leftover arguments" list for later processing. - self.largs.append(arg) - if had_explicit_value: - # The unknown option will be re-processed later, - # so undo the insertion of the explicit value. - rargs.pop(0) - return - raise - - option = self._long_opt[opt] - if option.takes_value(): - nargs = option.nargs - if nargs == '?': - if had_explicit_value: - value = rargs.pop(0) - else: - value = option.const - elif len(rargs) < nargs: - if nargs == 1: - if not option.choices: - self.error(_("%s option requires an argument") % opt) - else: - msg = _("%s option requires an argument " % opt) - msg += _("(choose from %s)" - % ', '.join(option.choices)) - self.error(msg) - else: - self.error(_("%s option requires %d arguments") - % (opt, nargs)) - elif nargs == 1: - value = rargs.pop(0) - else: - value = tuple(rargs[0:nargs]) - del rargs[0:nargs] - - elif had_explicit_value: - self.error(_("%s option does not take a value") % opt) - - else: - value = None - - option.process(opt, value, values, self) - - def reparse_local_options(self): - """ - Re-parse the leftover command-line options stored - in self.largs, so that any value overridden on the - command line is immediately available if the user turns - around and does a GetOption() right away. - - We mimic the processing of the single args - in the original OptionParser._process_args(), but here we - allow exact matches for long-opts only (no partial - argument names!). - - Else, this would lead to problems in add_local_option() - below. When called from there, we try to reparse the - command-line arguments that - 1. haven't been processed so far (self.largs), but - 2. are possibly not added to the list of options yet. - - So, when we only have a value for "--myargument" yet, - a command-line argument of "--myarg=test" would set it. - Responsible for this behaviour is the method - _match_long_opt(), which allows for partial matches of - the option name, as long as the common prefix appears to - be unique. - This would lead to further confusion, because we might want - to add another option "--myarg" later on (see issue #2929). - - """ - rargs = [] - largs_restore = [] - # Loop over all remaining arguments - skip = False - for l in self.largs: - if skip: - # Accept all remaining arguments as they are - largs_restore.append(l) - else: - if len(l) > 2 and l[0:2] == "--": - # Check long option - lopt = (l,) - if "=" in l: - # Split into option and value - lopt = l.split("=", 1) - - if lopt[0] in self._long_opt: - # Argument is already known - rargs.append('='.join(lopt)) - else: - # Not known yet, so reject for now - largs_restore.append('='.join(lopt)) - else: - if l == "--" or l == "-": - # Stop normal processing and don't - # process the rest of the command-line opts - largs_restore.append(l) - skip = True - else: - rargs.append(l) - - # Parse the filtered list - self.parse_args(rargs, self.values) - # Restore the list of remaining arguments for the - # next call of AddOption/add_local_option... - self.largs = self.largs + largs_restore - - def add_local_option(self, *args, **kw): - """ - Adds a local option to the parser. - - This is initiated by an AddOption() call to add a user-defined - command-line option. We add the option to a separate option - group for the local options, creating the group if necessary. - """ - try: - group = self.local_option_group - except AttributeError: - group = SConsOptionGroup(self, 'Local Options') - group = self.add_option_group(group) - self.local_option_group = group - - result = group.add_option(*args, **kw) - - if result: - # The option was added successfully. We now have to add the - # default value to our object that holds the default values - # (so that an attempt to fetch the option's attribute will - # yield the default value when not overridden) and then - # we re-parse the leftover command-line options, so that - # any value overridden on the command line is immediately - # available if the user turns around and does a GetOption() - # right away. - setattr(self.values.__defaults__, result.dest, result.default) - self.reparse_local_options() - - return result - -class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter): - def format_usage(self, usage): - return "usage: %s\n" % usage - - def format_heading(self, heading): - """ - This translates any heading of "options" or "Options" into - "SCons Options." Unfortunately, we have to do this here, - because those titles are hard-coded in the optparse calls. - """ - if heading == 'Options': - heading = "SCons Options" - return optparse.IndentedHelpFormatter.format_heading(self, heading) - - def format_option(self, option): - """ - A copy of the normal optparse.IndentedHelpFormatter.format_option() - method. This has been snarfed so we can modify text wrapping to - out liking: - - -- add our own regular expression that doesn't break on hyphens - (so things like --no-print-directory don't get broken); - - -- wrap the list of options themselves when it's too long - (the wrapper.fill(opts) call below); - - -- set the subsequent_indent when wrapping the help_text. - """ - # The help for each option consists of two parts: - # * the opt strings and metavars - # eg. ("-x", or "-fFILENAME, --file=FILENAME") - # * the user-supplied help string - # eg. ("turn on expert mode", "read data from FILENAME") - # - # If possible, we write both of these on the same line: - # -x turn on expert mode - # - # But if the opt string list is too long, we put the help - # string on a second line, indented to the same column it would - # start in if it fit on the first line. - # -fFILENAME, --file=FILENAME - # read data from FILENAME - result = [] - - opts = self.option_strings[option] - opt_width = self.help_position - self.current_indent - 2 - if len(opts) > opt_width: - wrapper = textwrap.TextWrapper(width=self.width, - initial_indent = ' ', - subsequent_indent = ' ') - wrapper.wordsep_re = no_hyphen_re - opts = wrapper.fill(opts) + '\n' - indent_first = self.help_position - else: # start help on same line as opts - opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts) - indent_first = 0 - result.append(opts) - if option.help: - - help_text = self.expand_default(option) - - # SCons: indent every line of the help text but the first. - wrapper = textwrap.TextWrapper(width=self.help_width, - subsequent_indent = ' ') - wrapper.wordsep_re = no_hyphen_re - help_lines = wrapper.wrap(help_text) - result.append("%*s%s\n" % (indent_first, "", help_lines[0])) - for line in help_lines[1:]: - result.append("%*s%s\n" % (self.help_position, "", line)) - elif opts[-1] != "\n": - result.append("\n") - return "".join(result) - -def Parser(version): - """ - Returns an options parser object initialized with the standard - SCons options. - """ - - formatter = SConsIndentedHelpFormatter(max_help_position=30) - - op = SConsOptionParser(option_class=SConsOption, - add_help_option=False, - formatter=formatter, - usage="usage: scons [OPTION] [TARGET] ...",) - - op.preserve_unknown_options = True - op.version = version - - # Add the options to the parser we just created. - # - # These are in the order we want them to show up in the -H help - # text, basically alphabetical. Each op.add_option() call below - # should have a consistent format: - # - # op.add_option("-L", "--long-option-name", - # nargs=1, type="string", - # dest="long_option_name", default='foo', - # action="callback", callback=opt_long_option, - # help="help text goes here", - # metavar="VAR") - # - # Even though the optparse module constructs reasonable default - # destination names from the long option names, we're going to be - # explicit about each one for easier readability and so this code - # will at least show up when grepping the source for option attribute - # names, or otherwise browsing the source code. - - # options ignored for compatibility - def opt_ignore(option, opt, value, parser): - sys.stderr.write("Warning: ignoring %s option\n" % opt) - op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w", - "--environment-overrides", - "--no-keep-going", - "--no-print-directory", - "--print-directory", - "--stop", - "--touch", - action="callback", callback=opt_ignore, - help="Ignored for compatibility.") - - op.add_option('-c', '--clean', '--remove', - dest="clean", default=False, - action="store_true", - help="Remove specified targets and dependencies.") - - op.add_option('-C', '--directory', - nargs=1, type="string", - dest="directory", default=[], - action="append", - help="Change to DIR before doing anything.", - metavar="DIR") - - op.add_option('--cache-debug', - nargs=1, - dest="cache_debug", default=None, - action="store", - help="Print CacheDir debug info to FILE.", - metavar="FILE") - - op.add_option('--cache-disable', '--no-cache', - dest='cache_disable', default=False, - action="store_true", - help="Do not retrieve built targets from CacheDir.") - - op.add_option('--cache-force', '--cache-populate', - dest='cache_force', default=False, - action="store_true", - help="Copy already-built targets into the CacheDir.") - - op.add_option('--cache-readonly', - dest='cache_readonly', default=False, - action="store_true", - help="Do not update CacheDir with built targets.") - - op.add_option('--cache-show', - dest='cache_show', default=False, - action="store_true", - help="Print build actions for files from CacheDir.") - - def opt_invalid(group, value, options): - """report an invalid option from a group""" - errmsg = "`%s' is not a valid %s option type, try:\n" % (value, group) - return errmsg + " %s" % ", ".join(options) - - def opt_invalid_rm(group, value, msg): - """report an invalid option from a group: recognized but removed""" - errmsg = "`%s' is not a valid %s option type " % (value, group) - return errmsg + msg - - config_options = ["auto", "force" ,"cache"] - - opt_config_help = "Controls Configure subsystem: %s." \ - % ", ".join(config_options) - - op.add_option('--config', - nargs=1, choices=config_options, - dest="config", default="auto", - help = opt_config_help, - metavar="MODE") - - op.add_option('-D', - dest="climb_up", default=None, - action="store_const", const=2, - help="Search up directory tree for SConstruct, " - "build all Default() targets.") - - deprecated_debug_options = {} - - removed_debug_options = { - "dtree" : '; please use --tree=derived instead', - "nomemoizer" : '; there is no replacement', - "stree" : '; please use --tree=all,status instead', - "tree" : '; please use --tree=all instead', - } - - debug_options = ["count", "duplicate", "explain", "findlibs", - "includes", "memoizer", "memory", "objects", - "pdb", "prepare", "presub", "stacktrace", - "time", "action-timestamps"] - - def opt_debug(option, opt, value__, parser, - debug_options=debug_options, - deprecated_debug_options=deprecated_debug_options, - removed_debug_options=removed_debug_options): - for value in value__.split(','): - if value in debug_options: - parser.values.debug.append(value) - elif value in deprecated_debug_options: - parser.values.debug.append(value) - try: - parser.values.delayed_warnings - except AttributeError: - parser.values.delayed_warnings = [] - msg = deprecated_debug_options[value] - w = "The --debug=%s option is deprecated%s." % (value, msg) - t = (SCons.Warnings.DeprecatedDebugOptionsWarning, w) - parser.values.delayed_warnings.append(t) - elif value in removed_debug_options: - msg = removed_debug_options[value] - raise OptionValueError(opt_invalid_rm('debug', value, msg)) - else: - raise OptionValueError(opt_invalid('debug', value, debug_options)) - - opt_debug_help = "Print various types of debugging information: %s." \ - % ", ".join(debug_options) - op.add_option('--debug', - nargs=1, type="string", - dest="debug", default=[], - action="callback", callback=opt_debug, - help=opt_debug_help, - metavar="TYPE") - - def opt_diskcheck(option, opt, value, parser): - try: - diskcheck_value = diskcheck_convert(value) - except ValueError as e: - raise OptionValueError("`%s' is not a valid diskcheck type" % e) - setattr(parser.values, option.dest, diskcheck_value) - - op.add_option('--diskcheck', - nargs=1, type="string", - dest='diskcheck', default=None, - action="callback", callback=opt_diskcheck, - help="Enable specific on-disk checks.", - metavar="TYPE") - - def opt_duplicate(option, opt, value, parser): - if value not in SCons.Node.FS.Valid_Duplicates: - raise OptionValueError(opt_invalid('duplication', value, - SCons.Node.FS.Valid_Duplicates)) - setattr(parser.values, option.dest, value) - # Set the duplicate style right away so it can affect linking - # of SConscript files. - SCons.Node.FS.set_duplicate(value) - - opt_duplicate_help = "Set the preferred duplication methods. Must be one of " \ - + ", ".join(SCons.Node.FS.Valid_Duplicates) - - op.add_option('--duplicate', - nargs=1, type="string", - dest="duplicate", default='hard-soft-copy', - action="callback", callback=opt_duplicate, - help=opt_duplicate_help) - - if not SCons.Platform.virtualenv.virtualenv_enabled_by_default: - op.add_option('--enable-virtualenv', - dest="enable_virtualenv", - action="store_true", - help="Import certain virtualenv variables to SCons") - - op.add_option('-f', '--file', '--makefile', '--sconstruct', - nargs=1, type="string", - dest="file", default=[], - action="append", - help="Read FILE as the top-level SConstruct file.") - - op.add_option('-h', '--help', - dest="help", default=False, - action="store_true", - help="Print defined help message, or this one.") - - op.add_option("-H", "--help-options", - action="help", - help="Print this message and exit.") - - op.add_option('-i', '--ignore-errors', - dest='ignore_errors', default=False, - action="store_true", - help="Ignore errors from build actions.") - - op.add_option('-I', '--include-dir', - nargs=1, - dest='include_dir', default=[], - action="append", - help="Search DIR for imported Python modules.", - metavar="DIR") - - op.add_option('--ignore-virtualenv', - dest="ignore_virtualenv", - action="store_true", - help="Do not import virtualenv variables to SCons") - - op.add_option('--implicit-cache', - dest='implicit_cache', default=False, - action="store_true", - help="Cache implicit dependencies") - - def opt_implicit_deps(option, opt, value, parser): - setattr(parser.values, 'implicit_cache', True) - setattr(parser.values, option.dest, True) - - op.add_option('--implicit-deps-changed', - dest="implicit_deps_changed", default=False, - action="callback", callback=opt_implicit_deps, - help="Ignore cached implicit dependencies.") - - op.add_option('--implicit-deps-unchanged', - dest="implicit_deps_unchanged", default=False, - action="callback", callback=opt_implicit_deps, - help="Ignore changes in implicit dependencies.") - - op.add_option('--interact', '--interactive', - dest='interactive', default=False, - action="store_true", - help="Run in interactive mode.") - - op.add_option('-j', '--jobs', - nargs=1, type="int", - dest="num_jobs", default=1, - action="store", - help="Allow N jobs at once.", - metavar="N") - - op.add_option('-k', '--keep-going', - dest='keep_going', default=False, - action="store_true", - help="Keep going when a target can't be made.") - - op.add_option('--max-drift', - nargs=1, type="int", - dest='max_drift', default=SCons.Node.FS.default_max_drift, - action="store", - help="Set maximum system clock drift to N seconds.", - metavar="N") - - op.add_option('--md5-chunksize', - nargs=1, type="int", - dest='md5_chunksize', default=SCons.Node.FS.File.md5_chunksize, - action="store", - help="Set chunk-size for MD5 signature computation to N kilobytes.", - metavar="N") - - op.add_option('-n', '--no-exec', '--just-print', '--dry-run', '--recon', - dest='no_exec', default=False, - action="store_true", - help="Don't build; just print commands.") - - op.add_option('--no-site-dir', - dest='no_site_dir', default=False, - action="store_true", - help="Don't search or use the usual site_scons dir.") - - op.add_option('--profile', - nargs=1, - dest="profile_file", default=None, - action="store", - help="Profile SCons and put results in FILE.", - metavar="FILE") - - op.add_option('-q', '--question', - dest="question", default=False, - action="store_true", - help="Don't build; exit status says if up to date.") - - op.add_option('-Q', - dest='no_progress', default=False, - action="store_true", - help="Suppress \"Reading/Building\" progress messages.") - - op.add_option('--random', - dest="random", default=False, - action="store_true", - help="Build dependencies in random order.") - - op.add_option('-s', '--silent', '--quiet', - dest="silent", default=False, - action="store_true", - help="Don't print commands.") - - op.add_option('--site-dir', - nargs=1, - dest='site_dir', default=None, - action="store", - help="Use DIR instead of the usual site_scons dir.", - metavar="DIR") - - op.add_option('--stack-size', - nargs=1, type="int", - dest='stack_size', - action="store", - help="Set the stack size of the threads used to run jobs to N kilobytes.", - metavar="N") - - op.add_option('--taskmastertrace', - nargs=1, - dest="taskmastertrace_file", default=None, - action="store", - help="Trace Node evaluation to FILE.", - metavar="FILE") - - tree_options = ["all", "derived", "prune", "status"] - - def opt_tree(option, opt, value, parser, tree_options=tree_options): - from . import Main - tp = Main.TreePrinter() - for o in value.split(','): - if o == 'all': - tp.derived = False - elif o == 'derived': - tp.derived = True - elif o == 'prune': - tp.prune = True - elif o == 'status': - tp.status = True - else: - raise OptionValueError(opt_invalid('--tree', o, tree_options)) - parser.values.tree_printers.append(tp) - - opt_tree_help = "Print a dependency tree in various formats: %s." \ - % ", ".join(tree_options) - - op.add_option('--tree', - nargs=1, type="string", - dest="tree_printers", default=[], - action="callback", callback=opt_tree, - help=opt_tree_help, - metavar="OPTIONS") - - op.add_option('-u', '--up', '--search-up', - dest="climb_up", default=0, - action="store_const", const=1, - help="Search up directory tree for SConstruct, " - "build targets at or below current directory.") - - op.add_option('-U', - dest="climb_up", default=0, - action="store_const", const=3, - help="Search up directory tree for SConstruct, " - "build Default() targets from local SConscript.") - - def opt_version(option, opt, value, parser): - sys.stdout.write(parser.version + '\n') - sys.exit(0) - op.add_option("-v", "--version", - action="callback", callback=opt_version, - help="Print the SCons version number and exit.") - - def opt_warn(option, opt, value, parser, tree_options=tree_options): - if SCons.Util.is_String(value): - value = value.split(',') - parser.values.warn.extend(value) - - op.add_option('--warn', '--warning', - nargs=1, type="string", - dest="warn", default=[], - action="callback", callback=opt_warn, - help="Enable or disable warnings.", - metavar="WARNING-SPEC") - - op.add_option('-Y', '--repository', '--srcdir', - nargs=1, - dest="repository", default=[], - action="append", - help="Search REPOSITORY for source and target files.") - - - # Options from Make and Cons classic that we do not yet support, - # but which we may support someday and whose (potential) meanings - # we don't want to change. These all get a "the -X option is not - # yet implemented" message and don't show up in the help output. - - def opt_not_yet(option, opt, value, parser): - msg = "Warning: the %s option is not yet implemented\n" % opt - sys.stderr.write(msg) - - op.add_option('-l', '--load-average', '--max-load', - nargs=1, type="float", - dest="load_average", default=0, - action="callback", callback=opt_not_yet, - # action="store", - # help="Don't start multiple jobs unless load is below " - # "LOAD-AVERAGE." - help=SUPPRESS_HELP) - op.add_option('--list-actions', - dest="list_actions", - action="callback", callback=opt_not_yet, - # help="Don't build; list files and build actions." - help=SUPPRESS_HELP) - op.add_option('--list-derived', - dest="list_derived", - action="callback", callback=opt_not_yet, - # help="Don't build; list files that would be built." - help=SUPPRESS_HELP) - op.add_option('--list-where', - dest="list_where", - action="callback", callback=opt_not_yet, - # help="Don't build; list files and where defined." - help=SUPPRESS_HELP) - op.add_option('-o', '--old-file', '--assume-old', - nargs=1, type="string", - dest="old_file", default=[], - action="callback", callback=opt_not_yet, - # action="append", - # help = "Consider FILE to be old; don't rebuild it." - help=SUPPRESS_HELP) - op.add_option('--override', - nargs=1, type="string", - action="callback", callback=opt_not_yet, - dest="override", - # help="Override variables as specified in FILE." - help=SUPPRESS_HELP) - op.add_option('-p', - action="callback", callback=opt_not_yet, - dest="p", - # help="Print internal environments/objects." - help=SUPPRESS_HELP) - op.add_option('-r', '-R', '--no-builtin-rules', '--no-builtin-variables', - action="callback", callback=opt_not_yet, - dest="no_builtin_rules", - # help="Clear default environments and variables." - help=SUPPRESS_HELP) - op.add_option('--write-filenames', - nargs=1, type="string", - dest="write_filenames", - action="callback", callback=opt_not_yet, - # help="Write all filenames examined into FILE." - help=SUPPRESS_HELP) - op.add_option('-W', '--new-file', '--assume-new', '--what-if', - nargs=1, type="string", - dest="new_file", - action="callback", callback=opt_not_yet, - # help="Consider FILE to be changed." - help=SUPPRESS_HELP) - op.add_option('--warn-undefined-variables', - dest="warn_undefined_variables", - action="callback", callback=opt_not_yet, - # help="Warn when an undefined variable is referenced." - help=SUPPRESS_HELP) - return op - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/SConscript.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/SConscript.py deleted file mode 100644 index 97073ba4c24..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/SConscript.py +++ /dev/null @@ -1,686 +0,0 @@ -"""SCons.Script.SConscript - -This module defines the Python API provided to SConscript and SConstruct -files. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Script/SConscript.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Environment -import SCons.Errors -import SCons.Node -import SCons.Node.Alias -import SCons.Node.FS -import SCons.Platform -import SCons.SConf -import SCons.Script.Main -import SCons.Tool -from SCons.Util import is_List, is_String, is_Dict, flatten - -from . import Main - -import collections -import os -import os.path -import re -import sys -import traceback -import time - -class SConscriptReturn(Exception): - pass - -launch_dir = os.path.abspath(os.curdir) - -GlobalDict = None - -# global exports set by Export(): -global_exports = {} - -# chdir flag -sconscript_chdir = 1 - -def get_calling_namespaces(): - """Return the locals and globals for the function that called - into this module in the current call stack.""" - try: 1//0 - except ZeroDivisionError: - # Don't start iterating with the current stack-frame to - # prevent creating reference cycles (f_back is safe). - frame = sys.exc_info()[2].tb_frame.f_back - - # Find the first frame that *isn't* from this file. This means - # that we expect all of the SCons frames that implement an Export() - # or SConscript() call to be in this file, so that we can identify - # the first non-Script.SConscript frame as the user's local calling - # environment, and the locals and globals dictionaries from that - # frame as the calling namespaces. See the comment below preceding - # the DefaultEnvironmentCall block for even more explanation. - while frame.f_globals.get("__name__") == __name__: - frame = frame.f_back - - return frame.f_locals, frame.f_globals - - -def compute_exports(exports): - """Compute a dictionary of exports given one of the parameters - to the Export() function or the exports argument to SConscript().""" - - loc, glob = get_calling_namespaces() - - retval = {} - try: - for export in exports: - if is_Dict(export): - retval.update(export) - else: - try: - retval[export] = loc[export] - except KeyError: - retval[export] = glob[export] - except KeyError as x: - raise SCons.Errors.UserError("Export of non-existent variable '%s'"%x) - - return retval - -class Frame(object): - """A frame on the SConstruct/SConscript call stack""" - def __init__(self, fs, exports, sconscript): - self.globals = BuildDefaultGlobals() - self.retval = None - self.prev_dir = fs.getcwd() - self.exports = compute_exports(exports) # exports from the calling SConscript - # make sure the sconscript attr is a Node. - if isinstance(sconscript, SCons.Node.Node): - self.sconscript = sconscript - elif sconscript == '-': - self.sconscript = None - else: - self.sconscript = fs.File(str(sconscript)) - -# the SConstruct/SConscript call stack: -call_stack = [] - -# For documentation on the methods in this file, see the scons man-page - -def Return(*vars, **kw): - retval = [] - try: - fvars = flatten(vars) - for var in fvars: - for v in var.split(): - retval.append(call_stack[-1].globals[v]) - except KeyError as x: - raise SCons.Errors.UserError("Return of non-existent variable '%s'"%x) - - if len(retval) == 1: - call_stack[-1].retval = retval[0] - else: - call_stack[-1].retval = tuple(retval) - - stop = kw.get('stop', True) - - if stop: - raise SConscriptReturn - - -stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :) - -def handle_missing_SConscript(f, must_exist=None): - """Take appropriate action on missing file in SConscript() call. - - Print a warning or raise an exception on missing file. - On first warning, print a deprecation message. - - Args: - f (str): path of missing configuration file - must_exist (bool): raise exception if file does not exist - - Raises: - UserError if 'must_exist' is True or if global - SCons.Script._no_missing_sconscript is True. - """ - - if must_exist or (SCons.Script._no_missing_sconscript and must_exist is not False): - msg = "Fatal: missing SConscript '%s'" % f.get_internal_path() - raise SCons.Errors.UserError(msg) - - if SCons.Script._warn_missing_sconscript_deprecated: - msg = "Calling missing SConscript without error is deprecated.\n" + \ - "Transition by adding must_exist=0 to SConscript calls.\n" + \ - "Missing SConscript '%s'" % f.get_internal_path() - SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg) - SCons.Script._warn_missing_sconscript_deprecated = False - else: - msg = "Ignoring missing SConscript '%s'" % f.get_internal_path() - SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg) - -def _SConscript(fs, *files, **kw): - top = fs.Top - sd = fs.SConstruct_dir.rdir() - exports = kw.get('exports', []) - - # evaluate each SConscript file - results = [] - for fn in files: - call_stack.append(Frame(fs, exports, fn)) - old_sys_path = sys.path - try: - SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1 - if fn == "-": - exec(sys.stdin.read(), call_stack[-1].globals) - else: - if isinstance(fn, SCons.Node.Node): - f = fn - else: - f = fs.File(str(fn)) - _file_ = None - - # Change directory to the top of the source - # tree to make sure the os's cwd and the cwd of - # fs match so we can open the SConscript. - fs.chdir(top, change_os_dir=1) - if f.rexists(): - actual = f.rfile() - _file_ = open(actual.get_abspath(), "rb") - elif f.srcnode().rexists(): - actual = f.srcnode().rfile() - _file_ = open(actual.get_abspath(), "rb") - elif f.has_src_builder(): - # The SConscript file apparently exists in a source - # code management system. Build it, but then clear - # the builder so that it doesn't get built *again* - # during the actual build phase. - f.build() - f.built() - f.builder_set(None) - if f.exists(): - _file_ = open(f.get_abspath(), "rb") - if _file_: - # Chdir to the SConscript directory. Use a path - # name relative to the SConstruct file so that if - # we're using the -f option, we're essentially - # creating a parallel SConscript directory structure - # in our local directory tree. - # - # XXX This is broken for multiple-repository cases - # where the SConstruct and SConscript files might be - # in different Repositories. For now, cross that - # bridge when someone comes to it. - try: - src_dir = kw['src_dir'] - except KeyError: - ldir = fs.Dir(f.dir.get_path(sd)) - else: - ldir = fs.Dir(src_dir) - if not ldir.is_under(f.dir): - # They specified a source directory, but - # it's above the SConscript directory. - # Do the sensible thing and just use the - # SConcript directory. - ldir = fs.Dir(f.dir.get_path(sd)) - try: - fs.chdir(ldir, change_os_dir=sconscript_chdir) - except OSError: - # There was no local directory, so we should be - # able to chdir to the Repository directory. - # Note that we do this directly, not through - # fs.chdir(), because we still need to - # interpret the stuff within the SConscript file - # relative to where we are logically. - fs.chdir(ldir, change_os_dir=0) - os.chdir(actual.dir.get_abspath()) - - # Append the SConscript directory to the beginning - # of sys.path so Python modules in the SConscript - # directory can be easily imported. - sys.path = [ f.dir.get_abspath() ] + sys.path - - # This is the magic line that actually reads up - # and executes the stuff in the SConscript file. - # The locals for this frame contain the special - # bottom-of-the-stack marker so that any - # exceptions that occur when processing this - # SConscript can base the printed frames at this - # level and not show SCons internals as well. - call_stack[-1].globals.update({stack_bottom:1}) - old_file = call_stack[-1].globals.get('__file__') - try: - del call_stack[-1].globals['__file__'] - except KeyError: - pass - try: - try: - if Main.print_time: - time1 = time.time() - scriptdata = _file_.read() - scriptname = _file_.name - _file_.close() - exec(compile(scriptdata, scriptname, 'exec'), call_stack[-1].globals) - except SConscriptReturn: - pass - finally: - if Main.print_time: - time2 = time.time() - print('SConscript:%s took %0.3f ms' % (f.get_abspath(), (time2 - time1) * 1000.0)) - - if old_file is not None: - call_stack[-1].globals.update({__file__:old_file}) - else: - handle_missing_SConscript(f, kw.get('must_exist', None)) - - finally: - SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1 - sys.path = old_sys_path - frame = call_stack.pop() - try: - fs.chdir(frame.prev_dir, change_os_dir=sconscript_chdir) - except OSError: - # There was no local directory, so chdir to the - # Repository directory. Like above, we do this - # directly. - fs.chdir(frame.prev_dir, change_os_dir=0) - rdir = frame.prev_dir.rdir() - rdir._create() # Make sure there's a directory there. - try: - os.chdir(rdir.get_abspath()) - except OSError as e: - # We still couldn't chdir there, so raise the error, - # but only if actions are being executed. - # - # If the -n option was used, the directory would *not* - # have been created and we should just carry on and - # let things muddle through. This isn't guaranteed - # to work if the SConscript files are reading things - # from disk (for example), but it should work well - # enough for most configurations. - if SCons.Action.execute_actions: - raise e - - results.append(frame.retval) - - # if we only have one script, don't return a tuple - if len(results) == 1: - return results[0] - else: - return tuple(results) - -def SConscript_exception(file=sys.stderr): - """Print an exception stack trace just for the SConscript file(s). - This will show users who have Python errors where the problem is, - without cluttering the output with all of the internal calls leading - up to where we exec the SConscript.""" - exc_type, exc_value, exc_tb = sys.exc_info() - tb = exc_tb - while tb and stack_bottom not in tb.tb_frame.f_locals: - tb = tb.tb_next - if not tb: - # We did not find our exec statement, so this was actually a bug - # in SCons itself. Show the whole stack. - tb = exc_tb - stack = traceback.extract_tb(tb) - try: - type = exc_type.__name__ - except AttributeError: - type = str(exc_type) - if type[:11] == "exceptions.": - type = type[11:] - file.write('%s: %s:\n' % (type, exc_value)) - for fname, line, func, text in stack: - file.write(' File "%s", line %d:\n' % (fname, line)) - file.write(' %s\n' % text) - -def annotate(node): - """Annotate a node with the stack frame describing the - SConscript file and line number that created it.""" - tb = sys.exc_info()[2] - while tb and stack_bottom not in tb.tb_frame.f_locals: - tb = tb.tb_next - if not tb: - # We did not find any exec of an SConscript file: what?! - raise SCons.Errors.InternalError("could not find SConscript stack frame") - node.creator = traceback.extract_stack(tb)[0] - -# The following line would cause each Node to be annotated using the -# above function. Unfortunately, this is a *huge* performance hit, so -# leave this disabled until we find a more efficient mechanism. -#SCons.Node.Annotate = annotate - -class SConsEnvironment(SCons.Environment.Base): - """An Environment subclass that contains all of the methods that - are particular to the wrapper SCons interface and which aren't - (or shouldn't be) part of the build engine itself. - - Note that not all of the methods of this class have corresponding - global functions, there are some private methods. - """ - - # - # Private methods of an SConsEnvironment. - # - def _exceeds_version(self, major, minor, v_major, v_minor): - """Return 1 if 'major' and 'minor' are greater than the version - in 'v_major' and 'v_minor', and 0 otherwise.""" - return (major > v_major or (major == v_major and minor > v_minor)) - - def _get_major_minor_revision(self, version_string): - """Split a version string into major, minor and (optionally) - revision parts. - - This is complicated by the fact that a version string can be - something like 3.2b1.""" - version = version_string.split(' ')[0].split('.') - v_major = int(version[0]) - v_minor = int(re.match(r'\d+', version[1]).group()) - if len(version) >= 3: - v_revision = int(re.match(r'\d+', version[2]).group()) - else: - v_revision = 0 - return v_major, v_minor, v_revision - - def _get_SConscript_filenames(self, ls, kw): - """ - Convert the parameters passed to SConscript() calls into a list - of files and export variables. If the parameters are invalid, - throws SCons.Errors.UserError. Returns a tuple (l, e) where l - is a list of SConscript filenames and e is a list of exports. - """ - exports = [] - - if len(ls) == 0: - try: - dirs = kw["dirs"] - except KeyError: - raise SCons.Errors.UserError("Invalid SConscript usage - no parameters") - - if not is_List(dirs): - dirs = [ dirs ] - dirs = list(map(str, dirs)) - - name = kw.get('name', 'SConscript') - - files = [os.path.join(n, name) for n in dirs] - - elif len(ls) == 1: - - files = ls[0] - - elif len(ls) == 2: - - files = ls[0] - exports = self.Split(ls[1]) - - else: - - raise SCons.Errors.UserError("Invalid SConscript() usage - too many arguments") - - if not is_List(files): - files = [ files ] - - if kw.get('exports'): - exports.extend(self.Split(kw['exports'])) - - variant_dir = kw.get('variant_dir') - if variant_dir: - if len(files) != 1: - raise SCons.Errors.UserError("Invalid SConscript() usage - can only specify one SConscript with a variant_dir") - duplicate = kw.get('duplicate', 1) - src_dir = kw.get('src_dir') - if not src_dir: - src_dir, fname = os.path.split(str(files[0])) - files = [os.path.join(str(variant_dir), fname)] - else: - if not isinstance(src_dir, SCons.Node.Node): - src_dir = self.fs.Dir(src_dir) - fn = files[0] - if not isinstance(fn, SCons.Node.Node): - fn = self.fs.File(fn) - if fn.is_under(src_dir): - # Get path relative to the source directory. - fname = fn.get_path(src_dir) - files = [os.path.join(str(variant_dir), fname)] - else: - files = [fn.get_abspath()] - kw['src_dir'] = variant_dir - self.fs.VariantDir(variant_dir, src_dir, duplicate) - - return (files, exports) - - # - # Public methods of an SConsEnvironment. These get - # entry points in the global namespace so they can be called - # as global functions. - # - - def Configure(self, *args, **kw): - if not SCons.Script.sconscript_reading: - raise SCons.Errors.UserError("Calling Configure from Builders is not supported.") - kw['_depth'] = kw.get('_depth', 0) + 1 - return SCons.Environment.Base.Configure(self, *args, **kw) - - def Default(self, *targets): - SCons.Script._Set_Default_Targets(self, targets) - - def EnsureSConsVersion(self, major, minor, revision=0): - """Exit abnormally if the SCons version is not late enough.""" - # split string to avoid replacement during build process - if SCons.__version__ == '__' + 'VERSION__': - SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning, - "EnsureSConsVersion is ignored for development version") - return - scons_ver = self._get_major_minor_revision(SCons.__version__) - if scons_ver < (major, minor, revision): - if revision: - scons_ver_string = '%d.%d.%d' % (major, minor, revision) - else: - scons_ver_string = '%d.%d' % (major, minor) - print("SCons %s or greater required, but you have SCons %s" % \ - (scons_ver_string, SCons.__version__)) - sys.exit(2) - - def EnsurePythonVersion(self, major, minor): - """Exit abnormally if the Python version is not late enough.""" - if sys.version_info < (major, minor): - v = sys.version.split()[0] - print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v)) - sys.exit(2) - - def Exit(self, value=0): - sys.exit(value) - - def Export(self, *vars, **kw): - for var in vars: - global_exports.update(compute_exports(self.Split(var))) - global_exports.update(kw) - - def GetLaunchDir(self): - global launch_dir - return launch_dir - - def GetOption(self, name): - name = self.subst(name) - return SCons.Script.Main.GetOption(name) - - def Help(self, text, append=False): - text = self.subst(text, raw=1) - SCons.Script.HelpFunction(text, append=append) - - def Import(self, *vars): - try: - frame = call_stack[-1] - globals = frame.globals - exports = frame.exports - for var in vars: - var = self.Split(var) - for v in var: - if v == '*': - globals.update(global_exports) - globals.update(exports) - else: - if v in exports: - globals[v] = exports[v] - else: - globals[v] = global_exports[v] - except KeyError as x: - raise SCons.Errors.UserError("Import of non-existent variable '%s'"%x) - - def SConscript(self, *ls, **kw): - """Execute SCons configuration files. - - Parameters: - *ls (str or list): configuration file(s) to execute. - - Keyword arguments: - dirs (list): execute SConscript in each listed directory. - name (str): execute script 'name' (used only with 'dirs'). - exports (list or dict): locally export variables the - called script(s) can import. - variant_dir (str): mirror sources needed for the build in - a variant directory to allow building in it. - duplicate (bool): physically duplicate sources instead of just - adjusting paths of derived files (used only with 'variant_dir') - (default is True). - must_exist (bool): fail if a requested script is missing - (default is False, default is deprecated). - - Returns: - list of variables returned by the called script - - Raises: - UserError: a script is not found and such exceptions are enabled. - """ - - def subst_element(x, subst=self.subst): - if SCons.Util.is_List(x): - x = list(map(subst, x)) - else: - x = subst(x) - return x - ls = list(map(subst_element, ls)) - subst_kw = {} - for key, val in kw.items(): - if is_String(val): - val = self.subst(val) - elif SCons.Util.is_List(val): - val = [self.subst(v) if is_String(v) else v for v in val] - subst_kw[key] = val - - files, exports = self._get_SConscript_filenames(ls, subst_kw) - subst_kw['exports'] = exports - return _SConscript(self.fs, *files, **subst_kw) - - def SConscriptChdir(self, flag): - global sconscript_chdir - sconscript_chdir = flag - - def SetOption(self, name, value): - name = self.subst(name) - SCons.Script.Main.SetOption(name, value) - -# -# -# -SCons.Environment.Environment = SConsEnvironment - -def Configure(*args, **kw): - if not SCons.Script.sconscript_reading: - raise SCons.Errors.UserError("Calling Configure from Builders is not supported.") - kw['_depth'] = 1 - return SCons.SConf.SConf(*args, **kw) - -# It's very important that the DefaultEnvironmentCall() class stay in this -# file, with the get_calling_namespaces() function, the compute_exports() -# function, the Frame class and the SConsEnvironment.Export() method. -# These things make up the calling stack leading up to the actual global -# Export() or SConscript() call that the user issued. We want to allow -# users to export local variables that they define, like so: -# -# def func(): -# x = 1 -# Export('x') -# -# To support this, the get_calling_namespaces() function assumes that -# the *first* stack frame that's not from this file is the local frame -# for the Export() or SConscript() call. - -_DefaultEnvironmentProxy = None - -def get_DefaultEnvironmentProxy(): - global _DefaultEnvironmentProxy - if not _DefaultEnvironmentProxy: - default_env = SCons.Defaults.DefaultEnvironment() - _DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env) - return _DefaultEnvironmentProxy - -class DefaultEnvironmentCall(object): - """A class that implements "global function" calls of - Environment methods by fetching the specified method from the - DefaultEnvironment's class. Note that this uses an intermediate - proxy class instead of calling the DefaultEnvironment method - directly so that the proxy can override the subst() method and - thereby prevent expansion of construction variables (since from - the user's point of view this was called as a global function, - with no associated construction environment).""" - def __init__(self, method_name, subst=0): - self.method_name = method_name - if subst: - self.factory = SCons.Defaults.DefaultEnvironment - else: - self.factory = get_DefaultEnvironmentProxy - def __call__(self, *args, **kw): - env = self.factory() - method = getattr(env, self.method_name) - return method(*args, **kw) - - -def BuildDefaultGlobals(): - """ - Create a dictionary containing all the default globals for - SConstruct and SConscript files. - """ - - global GlobalDict - if GlobalDict is None: - GlobalDict = {} - - import SCons.Script - d = SCons.Script.__dict__ - def not_a_module(m, d=d, mtype=type(SCons.Script)): - return not isinstance(d[m], mtype) - for m in filter(not_a_module, dir(SCons.Script)): - GlobalDict[m] = d[m] - - return GlobalDict.copy() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/__init__.py deleted file mode 100644 index 5292c103f2c..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Script/__init__.py +++ /dev/null @@ -1,431 +0,0 @@ -"""SCons.Script - -This file implements the main() function used by the scons script. - -Architecturally, this *is* the scons script, and will likely only be -called from the external "scons" wrapper. Consequently, anything here -should not be, or be considered, part of the build engine. If it's -something that we expect other software to want to use, it should go in -some other module. If it's specific to the "scons" script invocation, -it goes here. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Script/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import time -start_time = time.time() - -import collections -import os - -try: - from StringIO import StringIO -except ImportError: - from io import StringIO - -import sys - -# Special chicken-and-egg handling of the "--debug=memoizer" flag: -# -# SCons.Memoize contains a metaclass implementation that affects how -# the other classes are instantiated. The Memoizer may add shim methods -# to classes that have methods that cache computed values in order to -# count and report the hits and misses. -# -# If we wait to enable the Memoization until after we've parsed the -# command line options normally, it will be too late, because the Memoizer -# will have already analyzed the classes that it's Memoizing and decided -# to not add the shims. So we use a special-case, up-front check for -# the "--debug=memoizer" flag and enable Memoizer before we import any -# of the other modules that use it. - -_args = sys.argv + os.environ.get('SCONSFLAGS', '').split() -if "--debug=memoizer" in _args: - import SCons.Memoize - import SCons.Warnings - try: - SCons.Memoize.EnableMemoization() - except SCons.Warnings.Warning: - # Some warning was thrown. Arrange for it to be displayed - # or not after warnings are configured. - from . import Main - exc_type, exc_value, tb = sys.exc_info() - Main.delayed_warnings.append((exc_type, exc_value)) -del _args - -import SCons.Action -import SCons.Builder -import SCons.Environment -import SCons.Node.FS -import SCons.Platform -import SCons.Platform.virtualenv -import SCons.Scanner -import SCons.SConf -import SCons.Subst -import SCons.Tool -import SCons.Util -import SCons.Variables -import SCons.Defaults - -from . import Main - -main = Main.main - -# The following are global class definitions and variables that used to -# live directly in this module back before 0.96.90, when it contained -# a lot of code. Some SConscript files in widely-distributed packages -# (Blender is the specific example) actually reached into SCons.Script -# directly to use some of these. Rather than break those SConscript -# files, we're going to propagate these names into the SCons.Script -# namespace here. -# -# Some of these are commented out because it's *really* unlikely anyone -# used them, but we're going to leave the comment here to try to make -# it obvious what to do if the situation arises. -BuildTask = Main.BuildTask -CleanTask = Main.CleanTask -QuestionTask = Main.QuestionTask -#PrintHelp = Main.PrintHelp -#SConscriptSettableOptions = Main.SConscriptSettableOptions - -AddOption = Main.AddOption -PrintHelp = Main.PrintHelp -GetOption = Main.GetOption -SetOption = Main.SetOption -Progress = Main.Progress -GetBuildFailures = Main.GetBuildFailures - -#keep_going_on_error = Main.keep_going_on_error -#print_dtree = Main.print_dtree -#print_explanations = Main.print_explanations -#print_includes = Main.print_includes -#print_objects = Main.print_objects -#print_time = Main.print_time -#print_tree = Main.print_tree -#memory_stats = Main.memory_stats -#ignore_errors = Main.ignore_errors -#sconscript_time = Main.sconscript_time -#command_time = Main.command_time -#exit_status = Main.exit_status -#profiling = Main.profiling -#repositories = Main.repositories - -# -from . import SConscript -_SConscript = SConscript - -call_stack = _SConscript.call_stack - -# -Action = SCons.Action.Action -AddMethod = SCons.Util.AddMethod -AllowSubstExceptions = SCons.Subst.SetAllowableExceptions -Builder = SCons.Builder.Builder -Configure = _SConscript.Configure -Environment = SCons.Environment.Environment -#OptParser = SCons.SConsOptions.OptParser -FindPathDirs = SCons.Scanner.FindPathDirs -Platform = SCons.Platform.Platform -Virtualenv = SCons.Platform.virtualenv.Virtualenv -Return = _SConscript.Return -Scanner = SCons.Scanner.Base -Tool = SCons.Tool.Tool -WhereIs = SCons.Util.WhereIs - -# -BoolVariable = SCons.Variables.BoolVariable -EnumVariable = SCons.Variables.EnumVariable -ListVariable = SCons.Variables.ListVariable -PackageVariable = SCons.Variables.PackageVariable -PathVariable = SCons.Variables.PathVariable - - -# Action factories. -Chmod = SCons.Defaults.Chmod -Copy = SCons.Defaults.Copy -Delete = SCons.Defaults.Delete -Mkdir = SCons.Defaults.Mkdir -Move = SCons.Defaults.Move -Touch = SCons.Defaults.Touch - -# Pre-made, public scanners. -CScanner = SCons.Tool.CScanner -DScanner = SCons.Tool.DScanner -DirScanner = SCons.Defaults.DirScanner -ProgramScanner = SCons.Tool.ProgramScanner -SourceFileScanner = SCons.Tool.SourceFileScanner - -# Functions we might still convert to Environment methods. -CScan = SCons.Defaults.CScan -DefaultEnvironment = SCons.Defaults.DefaultEnvironment - -# Other variables we provide. -class TargetList(collections.UserList): - def _do_nothing(self, *args, **kw): - pass - def _add_Default(self, list): - self.extend(list) - def _clear(self): - del self[:] - -ARGUMENTS = {} -ARGLIST = [] -BUILD_TARGETS = TargetList() -COMMAND_LINE_TARGETS = [] -DEFAULT_TARGETS = [] - -# BUILD_TARGETS can be modified in the SConscript files. If so, we -# want to treat the modified BUILD_TARGETS list as if they specified -# targets on the command line. To do that, though, we need to know if -# BUILD_TARGETS was modified through "official" APIs or by hand. We do -# this by updating two lists in parallel, the documented BUILD_TARGETS -# list, above, and this internal _build_plus_default targets list which -# should only have "official" API changes. Then Script/Main.py can -# compare these two afterwards to figure out if the user added their -# own targets to BUILD_TARGETS. -_build_plus_default = TargetList() - -def _Add_Arguments(alist): - for arg in alist: - a, b = arg.split('=', 1) - ARGUMENTS[a] = b - ARGLIST.append((a, b)) - -def _Add_Targets(tlist): - if tlist: - COMMAND_LINE_TARGETS.extend(tlist) - BUILD_TARGETS.extend(tlist) - BUILD_TARGETS._add_Default = BUILD_TARGETS._do_nothing - BUILD_TARGETS._clear = BUILD_TARGETS._do_nothing - _build_plus_default.extend(tlist) - _build_plus_default._add_Default = _build_plus_default._do_nothing - _build_plus_default._clear = _build_plus_default._do_nothing - -def _Set_Default_Targets_Has_Been_Called(d, fs): - return DEFAULT_TARGETS - -def _Set_Default_Targets_Has_Not_Been_Called(d, fs): - if d is None: - d = [fs.Dir('.')] - return d - -_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called - -def _Set_Default_Targets(env, tlist): - global DEFAULT_TARGETS - global _Get_Default_Targets - _Get_Default_Targets = _Set_Default_Targets_Has_Been_Called - for t in tlist: - if t is None: - # Delete the elements from the list in-place, don't - # reassign an empty list to DEFAULT_TARGETS, so that the - # variables will still point to the same object we point to. - del DEFAULT_TARGETS[:] - BUILD_TARGETS._clear() - _build_plus_default._clear() - elif isinstance(t, SCons.Node.Node): - DEFAULT_TARGETS.append(t) - BUILD_TARGETS._add_Default([t]) - _build_plus_default._add_Default([t]) - else: - nodes = env.arg2nodes(t, env.fs.Entry) - DEFAULT_TARGETS.extend(nodes) - BUILD_TARGETS._add_Default(nodes) - _build_plus_default._add_Default(nodes) - -# -help_text = None - -def HelpFunction(text, append=False): - global help_text - if help_text is None: - if append: - s = StringIO() - PrintHelp(s) - help_text = s.getvalue() - s.close() - else: - help_text = "" - - help_text= help_text + text - - -# -# Will be non-zero if we are reading an SConscript file. -sconscript_reading = 0 - -_no_missing_sconscript = False -_warn_missing_sconscript_deprecated = True - -def set_missing_sconscript_error(flag=1): - """Set behavior on missing file in SConscript() call. Returns previous value""" - global _no_missing_sconscript - old = _no_missing_sconscript - _no_missing_sconscript = flag - return old - -# -def Variables(files=[], args=ARGUMENTS): - return SCons.Variables.Variables(files, args) - - -# The list of global functions to add to the SConscript name space -# that end up calling corresponding methods or Builders in the -# DefaultEnvironment(). -GlobalDefaultEnvironmentFunctions = [ - # Methods from the SConsEnvironment class, above. - 'Default', - 'EnsurePythonVersion', - 'EnsureSConsVersion', - 'Exit', - 'Export', - 'GetLaunchDir', - 'Help', - 'Import', - #'SConscript', is handled separately, below. - 'SConscriptChdir', - - # Methods from the Environment.Base class. - 'AddPostAction', - 'AddPreAction', - 'Alias', - 'AlwaysBuild', - 'CacheDir', - 'Clean', - #The Command() method is handled separately, below. - 'Decider', - 'Depends', - 'Dir', - 'NoClean', - 'NoCache', - 'Entry', - 'Execute', - 'File', - 'FindFile', - 'FindInstalledFiles', - 'FindSourceFiles', - 'Flatten', - 'GetBuildPath', - 'Glob', - 'Ignore', - 'Install', - 'InstallAs', - 'InstallVersionedLib', - 'Literal', - 'Local', - 'ParseDepends', - 'Precious', - 'PyPackageDir', - 'Repository', - 'Requires', - 'SConsignFile', - 'SideEffect', - 'SourceCode', - 'Split', - 'Tag', - 'Value', - 'VariantDir', -] - -GlobalDefaultBuilders = [ - # Supported builders. - 'CFile', - 'CXXFile', - 'DVI', - 'Jar', - 'Java', - 'JavaH', - 'Library', - 'LoadableModule', - 'M4', - 'MSVSProject', - 'Object', - 'PCH', - 'PDF', - 'PostScript', - 'Program', - 'RES', - 'RMIC', - 'SharedLibrary', - 'SharedObject', - 'StaticLibrary', - 'StaticObject', - 'Substfile', - 'Tar', - 'Textfile', - 'TypeLibrary', - 'Zip', - 'Package', -] - -for name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders: - exec ("%s = _SConscript.DefaultEnvironmentCall(%s)" % (name, repr(name))) -del name - -# There are a handful of variables that used to live in the -# Script/SConscript.py module that some SConscript files out there were -# accessing directly as SCons.Script.SConscript.*. The problem is that -# "SConscript" in this namespace is no longer a module, it's a global -# function call--or more precisely, an object that implements a global -# function call through the default Environment. Nevertheless, we can -# maintain backwards compatibility for SConscripts that were reaching in -# this way by hanging some attributes off the "SConscript" object here. -SConscript = _SConscript.DefaultEnvironmentCall('SConscript') - -# Make SConscript look enough like the module it used to be so -# that pychecker doesn't barf. -SConscript.__name__ = 'SConscript' - -SConscript.Arguments = ARGUMENTS -SConscript.ArgList = ARGLIST -SConscript.BuildTargets = BUILD_TARGETS -SConscript.CommandLineTargets = COMMAND_LINE_TARGETS -SConscript.DefaultTargets = DEFAULT_TARGETS - -# The global Command() function must be handled differently than the -# global functions for other construction environment methods because -# we want people to be able to use Actions that must expand $TARGET -# and $SOURCE later, when (and if) the Action is invoked to build -# the target(s). We do this with the subst=1 argument, which creates -# a DefaultEnvironmentCall instance that wraps up a normal default -# construction environment that performs variable substitution, not a -# proxy that doesn't. -# -# There's a flaw here, though, because any other $-variables on a command -# line will *also* be expanded, each to a null string, but that should -# only be a problem in the unusual case where someone was passing a '$' -# on a command line and *expected* the $ to get through to the shell -# because they were calling Command() and not env.Command()... This is -# unlikely enough that we're going to leave this as is and cross that -# bridge if someone actually comes to it. -Command = _SConscript.DefaultEnvironmentCall('Command', subst=1) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Subst.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Subst.py deleted file mode 100644 index 04ee78f3444..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Subst.py +++ /dev/null @@ -1,969 +0,0 @@ -"""SCons.Subst - -SCons string substitution. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Subst.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import collections -import re - -import SCons.Errors - -from SCons.Util import is_String, is_Sequence - -# Indexed by the SUBST_* constants below. -_strconv = [SCons.Util.to_String_for_subst, - SCons.Util.to_String_for_subst, - SCons.Util.to_String_for_signature] - - - -AllowableExceptions = (IndexError, NameError) - -def SetAllowableExceptions(*excepts): - global AllowableExceptions - AllowableExceptions = [_f for _f in excepts if _f] - -def raise_exception(exception, target, s): - name = exception.__class__.__name__ - msg = "%s `%s' trying to evaluate `%s'" % (name, exception, s) - if target: - raise SCons.Errors.BuildError(target[0], msg) - else: - raise SCons.Errors.UserError(msg) - - - -class Literal(object): - """A wrapper for a string. If you use this object wrapped - around a string, then it will be interpreted as literal. - When passed to the command interpreter, all special - characters will be escaped.""" - def __init__(self, lstr): - self.lstr = lstr - - def __str__(self): - return self.lstr - - def escape(self, escape_func): - return escape_func(self.lstr) - - def for_signature(self): - return self.lstr - - def is_literal(self): - return 1 - - def __eq__(self, other): - if not isinstance(other, Literal): - return False - return self.lstr == other.lstr - - def __neq__(self, other): - return not self.__eq__(other) - - def __hash__(self): - return hash(self.lstr) - -class SpecialAttrWrapper(object): - """This is a wrapper for what we call a 'Node special attribute.' - This is any of the attributes of a Node that we can reference from - Environment variable substitution, such as $TARGET.abspath or - $SOURCES[1].filebase. We implement the same methods as Literal - so we can handle special characters, plus a for_signature method, - such that we can return some canonical string during signature - calculation to avoid unnecessary rebuilds.""" - - def __init__(self, lstr, for_signature=None): - """The for_signature parameter, if supplied, will be the - canonical string we return from for_signature(). Else - we will simply return lstr.""" - self.lstr = lstr - if for_signature: - self.forsig = for_signature - else: - self.forsig = lstr - - def __str__(self): - return self.lstr - - def escape(self, escape_func): - return escape_func(self.lstr) - - def for_signature(self): - return self.forsig - - def is_literal(self): - return 1 - -def quote_spaces(arg): - """Generic function for putting double quotes around any string that - has white space in it.""" - if ' ' in arg or '\t' in arg: - return '"%s"' % arg - else: - return str(arg) - -class CmdStringHolder(collections.UserString): - """This is a special class used to hold strings generated by - scons_subst() and scons_subst_list(). It defines a special method - escape(). When passed a function with an escape algorithm for a - particular platform, it will return the contained string with the - proper escape sequences inserted. - """ - def __init__(self, cmd, literal=None): - collections.UserString.__init__(self, cmd) - self.literal = literal - - def is_literal(self): - return self.literal - - def escape(self, escape_func, quote_func=quote_spaces): - """Escape the string with the supplied function. The - function is expected to take an arbitrary string, then - return it with all special characters escaped and ready - for passing to the command interpreter. - - After calling this function, the next call to str() will - return the escaped string. - """ - - if self.is_literal(): - return escape_func(self.data) - elif ' ' in self.data or '\t' in self.data: - return quote_func(self.data) - else: - return self.data - -def escape_list(mylist, escape_func): - """Escape a list of arguments by running the specified escape_func - on every object in the list that has an escape() method.""" - def escape(obj, escape_func=escape_func): - try: - e = obj.escape - except AttributeError: - return obj - else: - return e(escape_func) - return list(map(escape, mylist)) - -class NLWrapper(object): - """A wrapper class that delays turning a list of sources or targets - into a NodeList until it's needed. The specified function supplied - when the object is initialized is responsible for turning raw nodes - into proxies that implement the special attributes like .abspath, - .source, etc. This way, we avoid creating those proxies just - "in case" someone is going to use $TARGET or the like, and only - go through the trouble if we really have to. - - In practice, this might be a wash performance-wise, but it's a little - cleaner conceptually... - """ - - def __init__(self, list, func): - self.list = list - self.func = func - def _return_nodelist(self): - return self.nodelist - def _gen_nodelist(self): - mylist = self.list - if mylist is None: - mylist = [] - elif not is_Sequence(mylist): - mylist = [mylist] - # The map(self.func) call is what actually turns - # a list into appropriate proxies. - self.nodelist = SCons.Util.NodeList(list(map(self.func, mylist))) - self._create_nodelist = self._return_nodelist - return self.nodelist - _create_nodelist = _gen_nodelist - - -class Targets_or_Sources(collections.UserList): - """A class that implements $TARGETS or $SOURCES expansions by in turn - wrapping a NLWrapper. This class handles the different methods used - to access the list, calling the NLWrapper to create proxies on demand. - - Note that we subclass collections.UserList purely so that the - is_Sequence() function will identify an object of this class as - a list during variable expansion. We're not really using any - collections.UserList methods in practice. - """ - def __init__(self, nl): - self.nl = nl - def __getattr__(self, attr): - nl = self.nl._create_nodelist() - return getattr(nl, attr) - def __getitem__(self, i): - nl = self.nl._create_nodelist() - return nl[i] - def __getslice__(self, i, j): - nl = self.nl._create_nodelist() - i = max(i, 0); j = max(j, 0) - return nl[i:j] - def __str__(self): - nl = self.nl._create_nodelist() - return str(nl) - def __repr__(self): - nl = self.nl._create_nodelist() - return repr(nl) - -class Target_or_Source(object): - """A class that implements $TARGET or $SOURCE expansions by in turn - wrapping a NLWrapper. This class handles the different methods used - to access an individual proxy Node, calling the NLWrapper to create - a proxy on demand. - """ - def __init__(self, nl): - self.nl = nl - def __getattr__(self, attr): - nl = self.nl._create_nodelist() - try: - nl0 = nl[0] - except IndexError: - # If there is nothing in the list, then we have no attributes to - # pass through, so raise AttributeError for everything. - raise AttributeError("NodeList has no attribute: %s" % attr) - return getattr(nl0, attr) - def __str__(self): - nl = self.nl._create_nodelist() - if nl: - return str(nl[0]) - return '' - def __repr__(self): - nl = self.nl._create_nodelist() - if nl: - return repr(nl[0]) - return '' - -class NullNodeList(SCons.Util.NullSeq): - def __call__(self, *args, **kwargs): return '' - def __str__(self): return '' - -NullNodesList = NullNodeList() - -def subst_dict(target, source): - """Create a dictionary for substitution of special - construction variables. - - This translates the following special arguments: - - target - the target (object or array of objects), - used to generate the TARGET and TARGETS - construction variables - - source - the source (object or array of objects), - used to generate the SOURCES and SOURCE - construction variables - """ - dict = {} - - if target: - def get_tgt_subst_proxy(thing): - try: - subst_proxy = thing.get_subst_proxy() - except AttributeError: - subst_proxy = thing # probably a string, just return it - return subst_proxy - tnl = NLWrapper(target, get_tgt_subst_proxy) - dict['TARGETS'] = Targets_or_Sources(tnl) - dict['TARGET'] = Target_or_Source(tnl) - - # This is a total cheat, but hopefully this dictionary goes - # away soon anyway. We just let these expand to $TARGETS - # because that's "good enough" for the use of ToolSurrogates - # (see test/ToolSurrogate.py) to generate documentation. - dict['CHANGED_TARGETS'] = '$TARGETS' - dict['UNCHANGED_TARGETS'] = '$TARGETS' - else: - dict['TARGETS'] = NullNodesList - dict['TARGET'] = NullNodesList - - if source: - def get_src_subst_proxy(node): - try: - rfile = node.rfile - except AttributeError: - pass - else: - node = rfile() - try: - return node.get_subst_proxy() - except AttributeError: - return node # probably a String, just return it - snl = NLWrapper(source, get_src_subst_proxy) - dict['SOURCES'] = Targets_or_Sources(snl) - dict['SOURCE'] = Target_or_Source(snl) - - # This is a total cheat, but hopefully this dictionary goes - # away soon anyway. We just let these expand to $TARGETS - # because that's "good enough" for the use of ToolSurrogates - # (see test/ToolSurrogate.py) to generate documentation. - dict['CHANGED_SOURCES'] = '$SOURCES' - dict['UNCHANGED_SOURCES'] = '$SOURCES' - else: - dict['SOURCES'] = NullNodesList - dict['SOURCE'] = NullNodesList - - return dict - - -class StringSubber(object): - """A class to construct the results of a scons_subst() call. - - This binds a specific construction environment, mode, target and - source with two methods (substitute() and expand()) that handle - the expansion. - """ - def __init__(self, env, mode, conv, gvars): - self.env = env - self.mode = mode - self.conv = conv - self.gvars = gvars - - def expand(self, s, lvars): - """Expand a single "token" as necessary, returning an - appropriate string containing the expansion. - - This handles expanding different types of things (strings, - lists, callables) appropriately. It calls the wrapper - substitute() method to re-expand things as necessary, so that - the results of expansions of side-by-side strings still get - re-evaluated separately, not smushed together. - """ - if is_String(s): - try: - s0, s1 = s[:2] - except (IndexError, ValueError): - return s - if s0 != '$': - return s - if s1 == '$': - # In this case keep the double $'s which we'll later - # swap for a single dollar sign as we need to retain - # this information to properly avoid matching "$("" when - # the actual text was "$$("" (or "$)"" when "$$)"" ) - return '$$' - elif s1 in '()': - return s - else: - key = s[1:] - if key[0] == '{' or '.' in key: - if key[0] == '{': - key = key[1:-1] - - # Store for error messages if we fail to expand the - # value - old_s = s - s = None - if key in lvars: - s = lvars[key] - elif key in self.gvars: - s = self.gvars[key] - else: - try: - s = eval(key, self.gvars, lvars) - except KeyboardInterrupt: - raise - except Exception as e: - if e.__class__ in AllowableExceptions: - return '' - raise_exception(e, lvars['TARGETS'], old_s) - - if s is None and NameError not in AllowableExceptions: - raise_exception(NameError(key), lvars['TARGETS'], old_s) - elif s is None: - return '' - - # Before re-expanding the result, handle - # recursive expansion by copying the local - # variable dictionary and overwriting a null - # string for the value of the variable name - # we just expanded. - # - # This could potentially be optimized by only - # copying lvars when s contains more expansions, - # but lvars is usually supposed to be pretty - # small, and deeply nested variable expansions - # are probably more the exception than the norm, - # so it should be tolerable for now. - lv = lvars.copy() - var = key.split('.')[0] - lv[var] = '' - return self.substitute(s, lv) - elif is_Sequence(s): - def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars): - return conv(substitute(l, lvars)) - return list(map(func, s)) - elif callable(s): - try: - s = s(target=lvars['TARGETS'], - source=lvars['SOURCES'], - env=self.env, - for_signature=(self.mode != SUBST_CMD)) - except TypeError: - # This probably indicates that it's a callable - # object that doesn't match our calling arguments - # (like an Action). - if self.mode == SUBST_RAW: - return s - s = self.conv(s) - return self.substitute(s, lvars) - elif s is None: - return '' - else: - return s - - def substitute(self, args, lvars): - """Substitute expansions in an argument or list of arguments. - - This serves as a wrapper for splitting up a string into - separate tokens. - """ - if is_String(args) and not isinstance(args, CmdStringHolder): - args = str(args) # In case it's a UserString. - try: - def sub_match(match): - return self.conv(self.expand(match.group(1), lvars)) - result = _dollar_exps.sub(sub_match, args) - except TypeError: - # If the internal conversion routine doesn't return - # strings (it could be overridden to return Nodes, for - # example), then the 1.5.2 re module will throw this - # exception. Back off to a slower, general-purpose - # algorithm that works for all data types. - args = _separate_args.findall(args) - result = [] - for a in args: - result.append(self.conv(self.expand(a, lvars))) - if len(result) == 1: - result = result[0] - else: - result = ''.join(map(str, result)) - return result - else: - return self.expand(args, lvars) - - -class ListSubber(collections.UserList): - """A class to construct the results of a scons_subst_list() call. - - Like StringSubber, this class binds a specific construction - environment, mode, target and source with two methods - (substitute() and expand()) that handle the expansion. - - In addition, however, this class is used to track the state of - the result(s) we're gathering so we can do the appropriate thing - whenever we have to append another word to the result--start a new - line, start a new word, append to the current word, etc. We do - this by setting the "append" attribute to the right method so - that our wrapper methods only need ever call ListSubber.append(), - and the rest of the object takes care of doing the right thing - internally. - """ - def __init__(self, env, mode, conv, gvars): - collections.UserList.__init__(self, []) - self.env = env - self.mode = mode - self.conv = conv - self.gvars = gvars - - if self.mode == SUBST_RAW: - self.add_strip = lambda x: self.append(x) - else: - self.add_strip = lambda x: None - self.in_strip = None - self.next_line() - - def expanded(self, s): - """Determines if the string s requires further expansion. - - Due to the implementation of ListSubber expand will call - itself 2 additional times for an already expanded string. This - method is used to determine if a string is already fully - expanded and if so exit the loop early to prevent these - recursive calls. - """ - if not is_String(s) or isinstance(s, CmdStringHolder): - return False - - s = str(s) # in case it's a UserString - return _separate_args.findall(s) is None - - def expand(self, s, lvars, within_list): - """Expand a single "token" as necessary, appending the - expansion to the current result. - - This handles expanding different types of things (strings, - lists, callables) appropriately. It calls the wrapper - substitute() method to re-expand things as necessary, so that - the results of expansions of side-by-side strings still get - re-evaluated separately, not smushed together. - """ - - if is_String(s): - try: - s0, s1 = s[:2] - except (IndexError, ValueError): - self.append(s) - return - if s0 != '$': - self.append(s) - return - if s1 == '$': - self.append('$') - elif s1 == '(': - self.open_strip('$(') - elif s1 == ')': - self.close_strip('$)') - else: - key = s[1:] - if key[0] == '{' or key.find('.') >= 0: - if key[0] == '{': - key = key[1:-1] - - # Store for error messages if we fail to expand the - # value - old_s = s - s = None - if key in lvars: - s = lvars[key] - elif key in self.gvars: - s = self.gvars[key] - else: - try: - s = eval(key, self.gvars, lvars) - except KeyboardInterrupt: - raise - except Exception as e: - if e.__class__ in AllowableExceptions: - return - raise_exception(e, lvars['TARGETS'], old_s) - - if s is None and NameError not in AllowableExceptions: - raise_exception(NameError(), lvars['TARGETS'], old_s) - elif s is None: - return - - # If the string is already full expanded there's no - # need to continue recursion. - if self.expanded(s): - self.append(s) - return - - # Before re-expanding the result, handle - # recursive expansion by copying the local - # variable dictionary and overwriting a null - # string for the value of the variable name - # we just expanded. - lv = lvars.copy() - var = key.split('.')[0] - lv[var] = '' - self.substitute(s, lv, 0) - self.this_word() - elif is_Sequence(s): - for a in s: - self.substitute(a, lvars, 1) - self.next_word() - elif callable(s): - try: - s = s(target=lvars['TARGETS'], - source=lvars['SOURCES'], - env=self.env, - for_signature=(self.mode != SUBST_CMD)) - except TypeError: - # This probably indicates that it's a callable - # object that doesn't match our calling arguments - # (like an Action). - if self.mode == SUBST_RAW: - self.append(s) - return - s = self.conv(s) - self.substitute(s, lvars, within_list) - elif s is None: - self.this_word() - else: - self.append(s) - - def substitute(self, args, lvars, within_list): - """Substitute expansions in an argument or list of arguments. - - This serves as a wrapper for splitting up a string into - separate tokens. - """ - - if is_String(args) and not isinstance(args, CmdStringHolder): - args = str(args) # In case it's a UserString. - args = _separate_args.findall(args) - for a in args: - if a[0] in ' \t\n\r\f\v': - if '\n' in a: - self.next_line() - elif within_list: - self.append(a) - else: - self.next_word() - else: - self.expand(a, lvars, within_list) - else: - self.expand(args, lvars, within_list) - - def next_line(self): - """Arrange for the next word to start a new line. This - is like starting a new word, except that we have to append - another line to the result.""" - collections.UserList.append(self, []) - self.next_word() - - def this_word(self): - """Arrange for the next word to append to the end of the - current last word in the result.""" - self.append = self.add_to_current_word - - def next_word(self): - """Arrange for the next word to start a new word.""" - self.append = self.add_new_word - - def add_to_current_word(self, x): - """Append the string x to the end of the current last word - in the result. If that is not possible, then just add - it as a new word. Make sure the entire concatenated string - inherits the object attributes of x (in particular, the - escape function) by wrapping it as CmdStringHolder.""" - - if not self.in_strip or self.mode != SUBST_SIG: - try: - current_word = self[-1][-1] - except IndexError: - self.add_new_word(x) - else: - # All right, this is a hack and it should probably - # be refactored out of existence in the future. - # The issue is that we want to smoosh words together - # and make one file name that gets escaped if - # we're expanding something like foo$EXTENSION, - # but we don't want to smoosh them together if - # it's something like >$TARGET, because then we'll - # treat the '>' like it's part of the file name. - # So for now, just hard-code looking for the special - # command-line redirection characters... - try: - last_char = str(current_word)[-1] - except IndexError: - last_char = '\0' - if last_char in '<>|': - self.add_new_word(x) - else: - y = current_word + x - - # We used to treat a word appended to a literal - # as a literal itself, but this caused problems - # with interpreting quotes around space-separated - # targets on command lines. Removing this makes - # none of the "substantive" end-to-end tests fail, - # so we'll take this out but leave it commented - # for now in case there's a problem not covered - # by the test cases and we need to resurrect this. - #literal1 = self.literal(self[-1][-1]) - #literal2 = self.literal(x) - y = self.conv(y) - if is_String(y): - #y = CmdStringHolder(y, literal1 or literal2) - y = CmdStringHolder(y, None) - self[-1][-1] = y - - def add_new_word(self, x): - if not self.in_strip or self.mode != SUBST_SIG: - literal = self.literal(x) - x = self.conv(x) - if is_String(x): - x = CmdStringHolder(x, literal) - self[-1].append(x) - self.append = self.add_to_current_word - - def literal(self, x): - try: - l = x.is_literal - except AttributeError: - return None - else: - return l() - - def open_strip(self, x): - """Handle the "open strip" $( token.""" - self.add_strip(x) - self.in_strip = 1 - - def close_strip(self, x): - """Handle the "close strip" $) token.""" - self.add_strip(x) - self.in_strip = None - - -# Constants for the "mode" parameter to scons_subst_list() and -# scons_subst(). SUBST_RAW gives the raw command line. SUBST_CMD -# gives a command line suitable for passing to a shell. SUBST_SIG -# gives a command line appropriate for calculating the signature -# of a command line...if this changes, we should rebuild. -SUBST_CMD = 0 -SUBST_RAW = 1 -SUBST_SIG = 2 - -_rm = re.compile(r'\$[()]') - -# Note the pattern below only matches $( or $) when there is no -# preceeding $. (Thus the (? Node B(Pending) --> Node C (NoState) - ^ | - | | - +-------------------------------------+ - - Now, when the Taskmaster examines the Node C's child Node A, - it finds that Node A is in the "pending" state. Therefore, - Node A is a pending child of node C. - - Pending children indicate that the Taskmaster has potentially - loop back through a cycle. We say potentially because it could - also occur when a DAG is evaluated in parallel. For example, - consider the following graph:: - - Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... - | ^ - | | - +----------> Node D (NoState) --------+ - / - Next candidate / - - The Taskmaster first evaluates the nodes A, B, and C and - starts building some children of node C. Assuming, that the - maximum parallel level has not been reached, the Taskmaster - will examine Node D. It will find that Node C is a pending - child of Node D. - - In summary, evaluating a graph with a cycle will always - involve a pending child at one point. A pending child might - indicate either a cycle or a diamond-shaped DAG. Only a - fraction of the nodes ends-up being a "pending child" of - another node. This keeps the pending_children set small in - practice. - - We can differentiate between the two cases if we wait until - the end of the build. At this point, all the pending children - nodes due to a diamond-shaped DAG will have been properly - built (or will have failed to build). But, the pending - children involved in a cycle will still be in the pending - state. - - The taskmaster removes nodes from the pending_children set as - soon as a pending_children node moves out of the pending - state. This also helps to keep the pending_children set small. - """ - - for n in self.pending_children: - assert n.state in (NODE_PENDING, NODE_EXECUTING), \ - (str(n), StateString[n.state]) - assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) - for p in n.waiting_parents: - assert p.ref_count > 0, (str(n), str(p), p.ref_count) - - - def trace_message(self, message): - return 'Taskmaster: %s\n' % message - - def trace_node(self, node): - return '<%-10s %-3s %s>' % (StateString[node.get_state()], - node.ref_count, - repr(str(node))) - - def _find_next_ready_node(self): - """ - Finds the next node that is ready to be built. - - This is *the* main guts of the DAG walk. We loop through the - list of candidates, looking for something that has no un-built - children (i.e., that is a leaf Node or has dependencies that are - all leaf Nodes or up-to-date). Candidate Nodes are re-scanned - (both the target Node itself and its sources, which are always - scanned in the context of a given target) to discover implicit - dependencies. A Node that must wait for some children to be - built will be put back on the candidates list after the children - have finished building. A Node that has been put back on the - candidates list in this way may have itself (or its sources) - re-scanned, in order to handle generated header files (e.g.) and - the implicit dependencies therein. - - Note that this method does not do any signature calculation or - up-to-date check itself. All of that is handled by the Task - class. This is purely concerned with the dependency graph walk. - """ - - self.ready_exc = None - - T = self.trace - if T: T.write(SCons.Util.UnicodeType('\n') + self.trace_message('Looking for a node to evaluate')) - - while True: - node = self.next_candidate() - if node is None: - if T: T.write(self.trace_message('No candidate anymore.') + u'\n') - return None - - node = node.disambiguate() - state = node.get_state() - - # For debugging only: - # - # try: - # self._validate_pending_children() - # except: - # self.ready_exc = sys.exc_info() - # return node - - if CollectStats: - if not hasattr(node.attributes, 'stats'): - node.attributes.stats = Stats() - StatsNodes.append(node) - S = node.attributes.stats - S.considered = S.considered + 1 - else: - S = None - - if T: T.write(self.trace_message(u' Considering node %s and its children:' % self.trace_node(node))) - - if state == NODE_NO_STATE: - # Mark this node as being on the execution stack: - node.set_state(NODE_PENDING) - elif state > NODE_PENDING: - # Skip this node if it has already been evaluated: - if S: S.already_handled = S.already_handled + 1 - if T: T.write(self.trace_message(u' already handled (executed)')) - continue - - executor = node.get_executor() - - try: - children = executor.get_all_children() - except SystemExit: - exc_value = sys.exc_info()[1] - e = SCons.Errors.ExplicitExit(node, exc_value.code) - self.ready_exc = (SCons.Errors.ExplicitExit, e) - if T: T.write(self.trace_message(' SystemExit')) - return node - except Exception as e: - # We had a problem just trying to figure out the - # children (like a child couldn't be linked in to a - # VariantDir, or a Scanner threw something). Arrange to - # raise the exception when the Task is "executed." - self.ready_exc = sys.exc_info() - if S: S.problem = S.problem + 1 - if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e)) - return node - - children_not_visited = [] - children_pending = set() - children_not_ready = [] - children_failed = False - - for child in chain(executor.get_all_prerequisites(), children): - childstate = child.get_state() - - if T: T.write(self.trace_message(u' ' + self.trace_node(child))) - - if childstate == NODE_NO_STATE: - children_not_visited.append(child) - elif childstate == NODE_PENDING: - children_pending.add(child) - elif childstate == NODE_FAILED: - children_failed = True - - if childstate <= NODE_EXECUTING: - children_not_ready.append(child) - - # These nodes have not even been visited yet. Add - # them to the list so that on some next pass we can - # take a stab at evaluating them (or their children). - if children_not_visited: - if len(children_not_visited) > 1: - children_not_visited.reverse() - self.candidates.extend(self.order(children_not_visited)) - - # if T and children_not_visited: - # T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited))) - # T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates))) - - # Skip this node if any of its children have failed. - # - # This catches the case where we're descending a top-level - # target and one of our children failed while trying to be - # built by a *previous* descent of an earlier top-level - # target. - # - # It can also occur if a node is reused in multiple - # targets. One first descends though the one of the - # target, the next time occurs through the other target. - # - # Note that we can only have failed_children if the - # --keep-going flag was used, because without it the build - # will stop before diving in the other branch. - # - # Note that even if one of the children fails, we still - # added the other children to the list of candidate nodes - # to keep on building (--keep-going). - if children_failed: - for n in executor.get_action_targets(): - n.set_state(NODE_FAILED) - - if S: S.child_failed = S.child_failed + 1 - if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node))) - continue - - if children_not_ready: - for child in children_not_ready: - # We're waiting on one or more derived targets - # that have not yet finished building. - if S: S.not_built = S.not_built + 1 - - # Add this node to the waiting parents lists of - # anything we're waiting on, with a reference - # count so we can be put back on the list for - # re-evaluation when they've all finished. - node.ref_count = node.ref_count + child.add_to_waiting_parents(node) - if T: T.write(self.trace_message(u' adjusted ref count: %s, child %s' % - (self.trace_node(node), repr(str(child))))) - - if T: - for pc in children_pending: - T.write(self.trace_message(' adding %s to the pending children set\n' % - self.trace_node(pc))) - self.pending_children = self.pending_children | children_pending - - continue - - # Skip this node if it has side-effects that are - # currently being built: - wait_side_effects = False - for se in executor.get_action_side_effects(): - if se.get_state() == NODE_EXECUTING: - se.add_to_waiting_s_e(node) - wait_side_effects = True - - if wait_side_effects: - if S: S.side_effects = S.side_effects + 1 - continue - - # The default when we've gotten through all of the checks above: - # this node is ready to be built. - if S: S.build = S.build + 1 - if T: T.write(self.trace_message(u'Evaluating %s\n' % - self.trace_node(node))) - - # For debugging only: - # - # try: - # self._validate_pending_children() - # except: - # self.ready_exc = sys.exc_info() - # return node - - return node - - return None - - def next_task(self): - """ - Returns the next task to be executed. - - This simply asks for the next Node to be evaluated, and then wraps - it in the specific Task subclass with which we were initialized. - """ - node = self._find_next_ready_node() - - if node is None: - return None - - executor = node.get_executor() - if executor is None: - return None - - tlist = executor.get_all_targets() - - task = self.tasker(self, tlist, node in self.original_top, node) - try: - task.make_ready() - except Exception as e : - # We had a problem just trying to get this task ready (like - # a child couldn't be linked to a VariantDir when deciding - # whether this node is current). Arrange to raise the - # exception when the Task is "executed." - self.ready_exc = sys.exc_info() - - if self.ready_exc: - task.exception_set(self.ready_exc) - - self.ready_exc = None - - return task - - def will_not_build(self, nodes, node_func=lambda n: None): - """ - Perform clean-up about nodes that will never be built. Invokes - a user defined function on all of these nodes (including all - of their parents). - """ - - T = self.trace - - pending_children = self.pending_children - - to_visit = set(nodes) - pending_children = pending_children - to_visit - - if T: - for n in nodes: - T.write(self.trace_message(' removing node %s from the pending children set\n' % - self.trace_node(n))) - try: - while len(to_visit): - node = to_visit.pop() - node_func(node) - - # Prune recursion by flushing the waiting children - # list immediately. - parents = node.waiting_parents - node.waiting_parents = set() - - to_visit = to_visit | parents - pending_children = pending_children - parents - - for p in parents: - p.ref_count = p.ref_count - 1 - if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' % - self.trace_node(p))) - except KeyError: - # The container to_visit has been emptied. - pass - - # We have the stick back the pending_children list into the - # taskmaster because the python 1.5.2 compatibility does not - # allow us to use in-place updates - self.pending_children = pending_children - - def stop(self): - """ - Stops the current build completely. - """ - self.next_candidate = self.no_next_candidate - - def cleanup(self): - """ - Check for dependency cycles. - """ - if not self.pending_children: - return - - nclist = [(n, find_cycle([n], set())) for n in self.pending_children] - - genuine_cycles = [ - node for node,cycle in nclist - if cycle or node.get_state() != NODE_EXECUTED - ] - if not genuine_cycles: - # All of the "cycles" found were single nodes in EXECUTED state, - # which is to say, they really weren't cycles. Just return. - return - - desc = 'Found dependency cycle(s):\n' - for node, cycle in nclist: - if cycle: - desc = desc + " " + " -> ".join(map(str, cycle)) + "\n" - else: - desc = desc + \ - " Internal Error: no cycle found for node %s (%s) in state %s\n" % \ - (node, repr(node), StateString[node.get_state()]) - - raise SCons.Errors.UserError(desc) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/386asm.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/386asm.py deleted file mode 100644 index d398f07fc24..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/386asm.py +++ /dev/null @@ -1,61 +0,0 @@ -"""SCons.Tool.386asm - -Tool specification for the 386ASM assembler for the Phar Lap ETS embedded -operating system. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/386asm.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.PharLapCommon import addPharLapPaths -import SCons.Util - -as_module = __import__('as', globals(), locals(), [], 1) - -def generate(env): - """Add Builders and construction variables for ar to an Environment.""" - as_module.generate(env) - - env['AS'] = '386asm' - env['ASFLAGS'] = SCons.Util.CLVar('') - env['ASPPFLAGS'] = '$ASFLAGS' - env['ASCOM'] = '$AS $ASFLAGS $SOURCES -o $TARGET' - env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $SOURCES -o $TARGET' - - addPharLapPaths(env) - -def exists(env): - return env.Detect('386asm') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/DCommon.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/DCommon.py deleted file mode 100644 index 0e0cf09cb8f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/DCommon.py +++ /dev/null @@ -1,69 +0,0 @@ -from __future__ import print_function - -"""SCons.Tool.DCommon - -Common code for the various D tools. - -Coded by Russel Winder (russel@winder.org.uk) -2012-09-06 -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/DCommon.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - - -def isD(env, source): - if not source: - return 0 - for s in source: - if s.sources: - ext = os.path.splitext(str(s.sources[0]))[1] - if ext == '.d': - return 1 - return 0 - - -def addDPATHToEnv(env, executable): - dPath = env.WhereIs(executable) - if dPath: - phobosDir = dPath[:dPath.rindex(executable)] + '/../src/phobos' - if os.path.isdir(phobosDir): - env.Append(DPATH=[phobosDir]) - - -def allAtOnceEmitter(target, source, env): - if env['DC'] in ('ldc2', 'dmd'): - env.SideEffect(str(target[0]) + '.o', target[0]) - env.Clean(target[0], str(target[0]) + '.o') - return target, source - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/FortranCommon.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/FortranCommon.py deleted file mode 100644 index af573fa0468..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/FortranCommon.py +++ /dev/null @@ -1,281 +0,0 @@ -"""SCons.Tool.FortranCommon - -Stuff for processing Fortran, common to all fortran dialects. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/FortranCommon.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import re -import os.path - -import SCons.Action -import SCons.Defaults -import SCons.Scanner.Fortran -import SCons.Tool -import SCons.Util - -def isfortran(env, source): - """Return 1 if any of code in source has fortran files in it, 0 - otherwise.""" - try: - fsuffixes = env['FORTRANSUFFIXES'] - except KeyError: - # If no FORTRANSUFFIXES, no fortran tool, so there is no need to look - # for fortran sources. - return 0 - - if not source: - # Source might be None for unusual cases like SConf. - return 0 - for s in source: - if s.sources: - ext = os.path.splitext(str(s.sources[0]))[1] - if ext in fsuffixes: - return 1 - return 0 - -def _fortranEmitter(target, source, env): - node = source[0].rfile() - if not node.exists() and not node.is_derived(): - print("Could not locate " + str(node.name)) - return ([], []) - # This has to match the def_regex in the Fortran scanner - mod_regex = r"""(?i)^\s*MODULE\s+(?!PROCEDURE|SUBROUTINE|FUNCTION|PURE|ELEMENTAL)(\w+)""" - cre = re.compile(mod_regex,re.M) - # Retrieve all USE'd module names - modules = cre.findall(node.get_text_contents()) - # Remove unique items from the list - modules = SCons.Util.unique(modules) - # Convert module name to a .mod filename - suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source) - moddir = env.subst('$FORTRANMODDIR', target=target, source=source) - modules = [x.lower() + suffix for x in modules] - for m in modules: - target.append(env.fs.File(m, moddir)) - return (target, source) - -def FortranEmitter(target, source, env): - target, source = _fortranEmitter(target, source, env) - return SCons.Defaults.StaticObjectEmitter(target, source, env) - -def ShFortranEmitter(target, source, env): - target, source = _fortranEmitter(target, source, env) - return SCons.Defaults.SharedObjectEmitter(target, source, env) - -def ComputeFortranSuffixes(suffixes, ppsuffixes): - """suffixes are fortran source files, and ppsuffixes the ones to be - pre-processed. Both should be sequences, not strings.""" - assert len(suffixes) > 0 - s = suffixes[0] - sup = s.upper() - upper_suffixes = [_.upper() for _ in suffixes] - if SCons.Util.case_sensitive_suffixes(s, sup): - ppsuffixes.extend(upper_suffixes) - else: - suffixes.extend(upper_suffixes) - -def CreateDialectActions(dialect): - """Create dialect specific actions.""" - CompAction = SCons.Action.Action('$%sCOM ' % dialect, '$%sCOMSTR' % dialect) - CompPPAction = SCons.Action.Action('$%sPPCOM ' % dialect, '$%sPPCOMSTR' % dialect) - ShCompAction = SCons.Action.Action('$SH%sCOM ' % dialect, '$SH%sCOMSTR' % dialect) - ShCompPPAction = SCons.Action.Action('$SH%sPPCOM ' % dialect, '$SH%sPPCOMSTR' % dialect) - - return CompAction, CompPPAction, ShCompAction, ShCompPPAction - -def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_module = 0): - """Add dialect specific construction variables.""" - ComputeFortranSuffixes(suffixes, ppsuffixes) - - fscan = SCons.Scanner.Fortran.FortranScan("%sPATH" % dialect) - - for suffix in suffixes + ppsuffixes: - SCons.Tool.SourceFileScanner.add_scanner(suffix, fscan) - - env.AppendUnique(FORTRANSUFFIXES = suffixes + ppsuffixes) - - compaction, compppaction, shcompaction, shcompppaction = \ - CreateDialectActions(dialect) - - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - for suffix in suffixes: - static_obj.add_action(suffix, compaction) - shared_obj.add_action(suffix, shcompaction) - static_obj.add_emitter(suffix, FortranEmitter) - shared_obj.add_emitter(suffix, ShFortranEmitter) - - for suffix in ppsuffixes: - static_obj.add_action(suffix, compppaction) - shared_obj.add_action(suffix, shcompppaction) - static_obj.add_emitter(suffix, FortranEmitter) - shared_obj.add_emitter(suffix, ShFortranEmitter) - - if '%sFLAGS' % dialect not in env: - env['%sFLAGS' % dialect] = SCons.Util.CLVar('') - - if 'SH%sFLAGS' % dialect not in env: - env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect) - - # If a tool does not define fortran prefix/suffix for include path, use C ones - if 'INC%sPREFIX' % dialect not in env: - env['INC%sPREFIX' % dialect] = '$INCPREFIX' - - if 'INC%sSUFFIX' % dialect not in env: - env['INC%sSUFFIX' % dialect] = '$INCSUFFIX' - - env['_%sINCFLAGS' % dialect] = '$( ${_concat(INC%sPREFIX, %sPATH, INC%sSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' % (dialect, dialect, dialect) - - if support_module == 1: - env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) - env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) - env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) - env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) - else: - env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) - env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) - env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) - env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) - -def add_fortran_to_env(env): - """Add Builders and construction variables for Fortran to an Environment.""" - try: - FortranSuffixes = env['FORTRANFILESUFFIXES'] - except KeyError: - FortranSuffixes = ['.f', '.for', '.ftn'] - - #print("Adding %s to fortran suffixes" % FortranSuffixes) - try: - FortranPPSuffixes = env['FORTRANPPFILESUFFIXES'] - except KeyError: - FortranPPSuffixes = ['.fpp', '.FPP'] - - DialectAddToEnv(env, "FORTRAN", FortranSuffixes, - FortranPPSuffixes, support_module = 1) - - env['FORTRANMODPREFIX'] = '' # like $LIBPREFIX - env['FORTRANMODSUFFIX'] = '.mod' # like $LIBSUFFIX - - env['FORTRANMODDIR'] = '' # where the compiler should place .mod files - env['FORTRANMODDIRPREFIX'] = '' # some prefix to $FORTRANMODDIR - similar to $INCPREFIX - env['FORTRANMODDIRSUFFIX'] = '' # some suffix to $FORTRANMODDIR - similar to $INCSUFFIX - env['_FORTRANMODFLAG'] = '$( ${_concat(FORTRANMODDIRPREFIX, FORTRANMODDIR, FORTRANMODDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' - -def add_f77_to_env(env): - """Add Builders and construction variables for f77 to an Environment.""" - try: - F77Suffixes = env['F77FILESUFFIXES'] - except KeyError: - F77Suffixes = ['.f77'] - - #print("Adding %s to f77 suffixes" % F77Suffixes) - try: - F77PPSuffixes = env['F77PPFILESUFFIXES'] - except KeyError: - F77PPSuffixes = [] - - DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes) - -def add_f90_to_env(env): - """Add Builders and construction variables for f90 to an Environment.""" - try: - F90Suffixes = env['F90FILESUFFIXES'] - except KeyError: - F90Suffixes = ['.f90'] - - #print("Adding %s to f90 suffixes" % F90Suffixes) - try: - F90PPSuffixes = env['F90PPFILESUFFIXES'] - except KeyError: - F90PPSuffixes = [] - - DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes, - support_module = 1) - -def add_f95_to_env(env): - """Add Builders and construction variables for f95 to an Environment.""" - try: - F95Suffixes = env['F95FILESUFFIXES'] - except KeyError: - F95Suffixes = ['.f95'] - - #print("Adding %s to f95 suffixes" % F95Suffixes) - try: - F95PPSuffixes = env['F95PPFILESUFFIXES'] - except KeyError: - F95PPSuffixes = [] - - DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes, - support_module = 1) - -def add_f03_to_env(env): - """Add Builders and construction variables for f03 to an Environment.""" - try: - F03Suffixes = env['F03FILESUFFIXES'] - except KeyError: - F03Suffixes = ['.f03'] - - #print("Adding %s to f95 suffixes" % F95Suffixes) - try: - F03PPSuffixes = env['F03PPFILESUFFIXES'] - except KeyError: - F03PPSuffixes = [] - - DialectAddToEnv(env, "F03", F03Suffixes, F03PPSuffixes, - support_module = 1) - -def add_f08_to_env(env): - """Add Builders and construction variables for f08 to an Environment.""" - try: - F08Suffixes = env['F08FILESUFFIXES'] - except KeyError: - F08Suffixes = ['.f08'] - - try: - F08PPSuffixes = env['F08PPFILESUFFIXES'] - except KeyError: - F08PPSuffixes = [] - - DialectAddToEnv(env, "F08", F08Suffixes, F08PPSuffixes, - support_module = 1) - -def add_all_to_env(env): - """Add builders and construction variables for all supported fortran - dialects.""" - add_fortran_to_env(env) - add_f77_to_env(env) - add_f90_to_env(env) - add_f95_to_env(env) - add_f03_to_env(env) - add_f08_to_env(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/GettextCommon.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/GettextCommon.py deleted file mode 100644 index f03c256c9c1..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/GettextCommon.py +++ /dev/null @@ -1,469 +0,0 @@ -"""SCons.Tool.GettextCommon module - -Used by several tools of `gettext` toolset. -""" - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/GettextCommon.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Warnings -import re - - -############################################################################# -class XgettextToolWarning(SCons.Warnings.Warning): pass - - -class XgettextNotFound(XgettextToolWarning): pass - - -class MsginitToolWarning(SCons.Warnings.Warning): pass - - -class MsginitNotFound(MsginitToolWarning): pass - - -class MsgmergeToolWarning(SCons.Warnings.Warning): pass - - -class MsgmergeNotFound(MsgmergeToolWarning): pass - - -class MsgfmtToolWarning(SCons.Warnings.Warning): pass - - -class MsgfmtNotFound(MsgfmtToolWarning): pass - - -############################################################################# -SCons.Warnings.enableWarningClass(XgettextToolWarning) -SCons.Warnings.enableWarningClass(XgettextNotFound) -SCons.Warnings.enableWarningClass(MsginitToolWarning) -SCons.Warnings.enableWarningClass(MsginitNotFound) -SCons.Warnings.enableWarningClass(MsgmergeToolWarning) -SCons.Warnings.enableWarningClass(MsgmergeNotFound) -SCons.Warnings.enableWarningClass(MsgfmtToolWarning) -SCons.Warnings.enableWarningClass(MsgfmtNotFound) - - -############################################################################# - -############################################################################# -class _POTargetFactory(object): - """ A factory of `PO` target files. - - Factory defaults differ from these of `SCons.Node.FS.FS`. We set `precious` - (this is required by builders and actions gettext) and `noclean` flags by - default for all produced nodes. - """ - - def __init__(self, env, nodefault=True, alias=None, precious=True - , noclean=True): - """ Object constructor. - - **Arguments** - - - *env* (`SCons.Environment.Environment`) - - *nodefault* (`boolean`) - if `True`, produced nodes will be ignored - from default target `'.'` - - *alias* (`string`) - if provided, produced nodes will be automatically - added to this alias, and alias will be set as `AlwaysBuild` - - *precious* (`boolean`) - if `True`, the produced nodes will be set as - `Precious`. - - *noclen* (`boolean`) - if `True`, the produced nodes will be excluded - from `Clean`. - """ - self.env = env - self.alias = alias - self.precious = precious - self.noclean = noclean - self.nodefault = nodefault - - def _create_node(self, name, factory, directory=None, create=1): - """ Create node, and set it up to factory settings. """ - import SCons.Util - node = factory(name, directory, create) - node.set_noclean(self.noclean) - node.set_precious(self.precious) - if self.nodefault: - self.env.Ignore('.', node) - if self.alias: - self.env.AlwaysBuild(self.env.Alias(self.alias, node)) - return node - - def Entry(self, name, directory=None, create=1): - """ Create `SCons.Node.FS.Entry` """ - return self._create_node(name, self.env.fs.Entry, directory, create) - - def File(self, name, directory=None, create=1): - """ Create `SCons.Node.FS.File` """ - return self._create_node(name, self.env.fs.File, directory, create) - - -############################################################################# - -############################################################################# -_re_comment = re.compile(r'(#[^\n\r]+)$', re.M) -_re_lang = re.compile(r'([a-zA-Z0-9_]+)', re.M) - - -############################################################################# -def _read_linguas_from_files(env, linguas_files=None): - """ Parse `LINGUAS` file and return list of extracted languages """ - import SCons.Util - import SCons.Environment - global _re_comment - global _re_lang - if not SCons.Util.is_List(linguas_files) \ - and not SCons.Util.is_String(linguas_files) \ - and not isinstance(linguas_files, SCons.Node.FS.Base) \ - and linguas_files: - # If, linguas_files==True or such, then read 'LINGUAS' file. - linguas_files = ['LINGUAS'] - if linguas_files is None: - return [] - fnodes = env.arg2nodes(linguas_files) - linguas = [] - for fnode in fnodes: - contents = _re_comment.sub("", fnode.get_text_contents()) - ls = [l for l in _re_lang.findall(contents) if l] - linguas.extend(ls) - return linguas - - -############################################################################# - -############################################################################# -from SCons.Builder import BuilderBase - - -############################################################################# -class _POFileBuilder(BuilderBase): - """ `PO` file builder. - - This is multi-target single-source builder. In typical situation the source - is single `POT` file, e.g. `messages.pot`, and there are multiple `PO` - targets to be updated from this `POT`. We must run - `SCons.Builder.BuilderBase._execute()` separatelly for each target to track - dependencies separatelly for each target file. - - **NOTE**: if we call `SCons.Builder.BuilderBase._execute(.., target, ...)` - with target being list of all targets, all targets would be rebuilt each time - one of the targets from this list is missing. This would happen, for example, - when new language `ll` enters `LINGUAS_FILE` (at this moment there is no - `ll.po` file yet). To avoid this, we override - `SCons.Builder.BuilerBase._execute()` and call it separatelly for each - target. Here we also append to the target list the languages read from - `LINGUAS_FILE`. - """ - - # - # * The argument for overriding _execute(): We must use environment with - # builder overrides applied (see BuilderBase.__init__(). Here it comes for - # free. - # * The argument against using 'emitter': The emitter is called too late - # by BuilderBase._execute(). If user calls, for example: - # - # env.POUpdate(LINGUAS_FILE = 'LINGUAS') - # - # the builder throws error, because it is called with target=None, - # source=None and is trying to "generate" sources or target list first. - # If user calls - # - # env.POUpdate(['foo', 'baz'], LINGUAS_FILE = 'LINGUAS') - # - # the env.BuilderWrapper() calls our builder with target=None, - # source=['foo', 'baz']. The BuilderBase._execute() then splits execution - # and execute iterativelly (recursion) self._execute(None, source[i]). - # After that it calls emitter (which is quite too late). The emitter is - # also called in each iteration, what makes things yet worse. - def __init__(self, env, **kw): - if 'suffix' not in kw: - kw['suffix'] = '$POSUFFIX' - if 'src_suffix' not in kw: - kw['src_suffix'] = '$POTSUFFIX' - if 'src_builder' not in kw: - kw['src_builder'] = '_POTUpdateBuilder' - if 'single_source' not in kw: - kw['single_source'] = True - alias = None - if 'target_alias' in kw: - alias = kw['target_alias'] - del kw['target_alias'] - if 'target_factory' not in kw: - kw['target_factory'] = _POTargetFactory(env, alias=alias).File - BuilderBase.__init__(self, **kw) - - def _execute(self, env, target, source, *args, **kw): - """ Execute builder's actions. - - Here we append to `target` the languages read from `$LINGUAS_FILE` and - apply `SCons.Builder.BuilderBase._execute()` separatelly to each target. - The arguments and return value are same as for - `SCons.Builder.BuilderBase._execute()`. - """ - import SCons.Util - import SCons.Node - linguas_files = None - if 'LINGUAS_FILE' in env and env['LINGUAS_FILE']: - linguas_files = env['LINGUAS_FILE'] - # This prevents endless recursion loop (we'll be invoked once for - # each target appended here, we must not extend the list again). - env['LINGUAS_FILE'] = None - linguas = _read_linguas_from_files(env, linguas_files) - if SCons.Util.is_List(target): - target.extend(linguas) - elif target is not None: - target = [target] + linguas - else: - target = linguas - if not target: - # Let the SCons.BuilderBase to handle this patologic situation - return BuilderBase._execute(self, env, target, source, *args, **kw) - # The rest is ours - if not SCons.Util.is_List(target): - target = [target] - result = [] - for tgt in target: - r = BuilderBase._execute(self, env, [tgt], source, *args, **kw) - result.extend(r) - if linguas_files is not None: - env['LINGUAS_FILE'] = linguas_files - return SCons.Node.NodeList(result) - - -############################################################################# - -import SCons.Environment - - -############################################################################# -def _translate(env, target=None, source=SCons.Environment._null, *args, **kw): - """ Function for `Translate()` pseudo-builder """ - if target is None: target = [] - pot = env.POTUpdate(None, source, *args, **kw) - po = env.POUpdate(target, pot, *args, **kw) - return po - - -############################################################################# - -############################################################################# -class RPaths(object): - """ Callable object, which returns pathnames relative to SCons current - working directory. - - It seems like `SCons.Node.FS.Base.get_path()` returns absolute paths - for nodes that are outside of current working directory (`env.fs.getcwd()`). - Here, we often have `SConscript`, `POT` and `PO` files within `po/` - directory and source files (e.g. `*.c`) outside of it. When generating `POT` - template file, references to source files are written to `POT` template, so - a translator may later quickly jump to appropriate source file and line from - its `PO` editor (e.g. `poedit`). Relative paths in `PO` file are usually - interpreted by `PO` editor as paths relative to the place, where `PO` file - lives. The absolute paths would make resultant `POT` file nonportable, as - the references would be correct only on the machine, where `POT` file was - recently re-created. For such reason, we need a function, which always - returns relative paths. This is the purpose of `RPaths` callable object. - - The `__call__` method returns paths relative to current working directory, but - we assume, that *xgettext(1)* is run from the directory, where target file is - going to be created. - - Note, that this may not work for files distributed over several hosts or - across different drives on windows. We assume here, that single local - filesystem holds both source files and target `POT` templates. - - Intended use of `RPaths` - in `xgettext.py`:: - - def generate(env): - from GettextCommon import RPaths - ... - sources = '$( ${_concat( "", SOURCES, "", __env__, XgettextRPaths, TARGET, SOURCES)} $)' - env.Append( - ... - XGETTEXTCOM = 'XGETTEXT ... ' + sources, - ... - XgettextRPaths = RPaths(env) - ) - """ - - # NOTE: This callable object returns pathnames of dirs/files relative to - # current working directory. The pathname remains relative also for entries - # that are outside of current working directory (node, that - # SCons.Node.FS.File and siblings return absolute path in such case). For - # simplicity we compute path relative to current working directory, this - # seems be enough for our purposes (don't need TARGET variable and - # SCons.Defaults.Variable_Caller stuff). - - def __init__(self, env): - """ Initialize `RPaths` callable object. - - **Arguments**: - - - *env* - a `SCons.Environment.Environment` object, defines *current - working dir*. - """ - self.env = env - - # FIXME: I'm not sure, how it should be implemented (what the *args are in - # general, what is **kw). - def __call__(self, nodes, *args, **kw): - """ Return nodes' paths (strings) relative to current working directory. - - **Arguments**: - - - *nodes* ([`SCons.Node.FS.Base`]) - list of nodes. - - *args* - currently unused. - - *kw* - currently unused. - - **Returns**: - - - Tuple of strings, which represent paths relative to current working - directory (for given environment). - """ - import os - import SCons.Node.FS - rpaths = () - cwd = self.env.fs.getcwd().get_abspath() - for node in nodes: - rpath = None - if isinstance(node, SCons.Node.FS.Base): - rpath = os.path.relpath(node.get_abspath(), cwd) - # FIXME: Other types possible here? - if rpath is not None: - rpaths += (rpath,) - return rpaths - - -############################################################################# - -############################################################################# -def _init_po_files(target, source, env): - """ Action function for `POInit` builder. """ - nop = lambda target, source, env: 0 - if 'POAUTOINIT' in env: - autoinit = env['POAUTOINIT'] - else: - autoinit = False - # Well, if everything outside works well, this loop should do single - # iteration. Otherwise we are rebuilding all the targets even, if just - # one has changed (but is this our fault?). - for tgt in target: - if not tgt.exists(): - if autoinit: - action = SCons.Action.Action('$MSGINITCOM', '$MSGINITCOMSTR') - else: - msg = 'File ' + repr(str(tgt)) + ' does not exist. ' \ - + 'If you are a translator, you can create it through: \n' \ - + '$MSGINITCOM' - action = SCons.Action.Action(nop, msg) - status = action([tgt], source, env) - if status: return status - return 0 - - -############################################################################# - -############################################################################# -def _detect_xgettext(env): - """ Detects *xgettext(1)* binary """ - if 'XGETTEXT' in env: - return env['XGETTEXT'] - xgettext = env.Detect('xgettext') - if xgettext: - return xgettext - raise SCons.Errors.StopError(XgettextNotFound, "Could not detect xgettext") - return None - - -############################################################################# -def _xgettext_exists(env): - return _detect_xgettext(env) - - -############################################################################# - -############################################################################# -def _detect_msginit(env): - """ Detects *msginit(1)* program. """ - if 'MSGINIT' in env: - return env['MSGINIT'] - msginit = env.Detect('msginit') - if msginit: - return msginit - raise SCons.Errors.StopError(MsginitNotFound, "Could not detect msginit") - return None - - -############################################################################# -def _msginit_exists(env): - return _detect_msginit(env) - - -############################################################################# - -############################################################################# -def _detect_msgmerge(env): - """ Detects *msgmerge(1)* program. """ - if 'MSGMERGE' in env: - return env['MSGMERGE'] - msgmerge = env.Detect('msgmerge') - if msgmerge: - return msgmerge - raise SCons.Errors.StopError(MsgmergeNotFound, "Could not detect msgmerge") - return None - - -############################################################################# -def _msgmerge_exists(env): - return _detect_msgmerge(env) - - -############################################################################# - -############################################################################# -def _detect_msgfmt(env): - """ Detects *msgmfmt(1)* program. """ - if 'MSGFMT' in env: - return env['MSGFMT'] - msgfmt = env.Detect('msgfmt') - if msgfmt: - return msgfmt - raise SCons.Errors.StopError(MsgfmtNotFound, "Could not detect msgfmt") - return None - - -############################################################################# -def _msgfmt_exists(env): - return _detect_msgfmt(env) - - -############################################################################# - -############################################################################# -def tool_list(platform, env): - """ List tools that shall be generated by top-level `gettext` tool """ - return ['xgettext', 'msginit', 'msgmerge', 'msgfmt'] - -############################################################################# diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/JavaCommon.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/JavaCommon.py deleted file mode 100644 index f9e36849104..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/JavaCommon.py +++ /dev/null @@ -1,512 +0,0 @@ -"""SCons.Tool.JavaCommon - -Stuff for processing Java. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/JavaCommon.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path -import re -import glob - -java_parsing = 1 - -default_java_version = '1.4' - -# a switch for which jdk versions to use the Scope state for smarter -# anonymous inner class parsing. -scopeStateVersions = ('1.8') - -# Glob patterns for use in finding where the JDK is. -# These are pairs, *dir_glob used in the general case, -# *version_dir_glob if matching only a specific version. -# For now only used for Windows. -java_win32_dir_glob = 'C:/Program Files*/Java/jdk*/bin' -# On windows, since Java 9, there is a dash between 'jdk' and the version -# string that wasn't there before. this glob should catch either way. -java_win32_version_dir_glob = 'C:/Program Files*/Java/jdk*%s*/bin' - -# Glob patterns for use in finding where the JDK headers are. -# These are pairs, *dir_glob used in the general case, -# *version_dir_glob if matching only a specific version. -java_macos_include_dir_glob = '/System/Library/Frameworks/JavaVM.framework/Headers/' -java_macos_version_include_dir_glob = '/System/Library/Frameworks/JavaVM.framework/Versions/%s*/Headers/' - -java_linux_include_dirs_glob = [ - '/usr/lib/jvm/default-java/include', - '/usr/lib/jvm/java-*/include' -] -# Need to match path like below (from Centos 7) -# /usr/lib/jvm/java-1.8.0-openjdk-1.8.0.191.b12-0.el7_5.x86_64/include/ -java_linux_version_include_dirs_glob = [ - '/usr/lib/jvm/java-*-sun-%s*/include', - '/usr/lib/jvm/java-%s*-openjdk*/include', - '/usr/java/jdk%s*/include' -] - -if java_parsing: - # Parse Java files for class names. - # - # This is a really cool parser from Charles Crain - # that finds appropriate class names in Java source. - - # A regular expression that will find, in a java file: - # newlines; - # double-backslashes; - # a single-line comment "//"; - # single or double quotes preceeded by a backslash; - # single quotes, double quotes, open or close braces, semi-colons, - # periods, open or close parentheses; - # floating-point numbers; - # any alphanumeric token (keyword, class name, specifier); - # any alphanumeric token surrounded by angle brackets (generics); - # the multi-line comment begin and end tokens /* and */; - # array declarations "[]". - _reToken = re.compile(r'(\n|\\\\|//|\\[\'"]|[\'"\{\}\;\.\(\)]|' + - r'\d*\.\d*|[A-Za-z_][\w\$\.]*|<[A-Za-z_]\w+>|' + - r'/\*|\*/|\[\])') - - - class OuterState(object): - """The initial state for parsing a Java file for classes, - interfaces, and anonymous inner classes.""" - - def __init__(self, version=default_java_version): - - if version not in ('1.1', '1.2', '1.3', '1.4', '1.5', '1.6', '1.7', - '1.8', '5', '6', '9.0', '10.0', '11.0', '12.0'): - msg = "Java version %s not supported" % version - raise NotImplementedError(msg) - - self.version = version - self.listClasses = [] - self.listOutputs = [] - self.stackBrackets = [] - self.brackets = 0 - self.nextAnon = 1 - self.localClasses = [] - self.stackAnonClassBrackets = [] - self.anonStacksStack = [[0]] - self.package = None - - def trace(self): - pass - - def __getClassState(self): - try: - return self.classState - except AttributeError: - ret = ClassState(self) - self.classState = ret - return ret - - def __getPackageState(self): - try: - return self.packageState - except AttributeError: - ret = PackageState(self) - self.packageState = ret - return ret - - def __getAnonClassState(self): - try: - return self.anonState - except AttributeError: - self.outer_state = self - ret = SkipState(1, AnonClassState(self)) - self.anonState = ret - return ret - - def __getSkipState(self): - try: - return self.skipState - except AttributeError: - ret = SkipState(1, self) - self.skipState = ret - return ret - - def _getAnonStack(self): - return self.anonStacksStack[-1] - - def openBracket(self): - self.brackets = self.brackets + 1 - - def closeBracket(self): - self.brackets = self.brackets - 1 - if len(self.stackBrackets) and \ - self.brackets == self.stackBrackets[-1]: - self.listOutputs.append('$'.join(self.listClasses)) - self.localClasses.pop() - self.listClasses.pop() - self.anonStacksStack.pop() - self.stackBrackets.pop() - if len(self.stackAnonClassBrackets) and \ - self.brackets == self.stackAnonClassBrackets[-1] and \ - self.version not in scopeStateVersions: - self._getAnonStack().pop() - self.stackAnonClassBrackets.pop() - - def parseToken(self, token): - if token[:2] == '//': - return IgnoreState('\n', self) - elif token == '/*': - return IgnoreState('*/', self) - elif token == '{': - self.openBracket() - elif token == '}': - self.closeBracket() - elif token in ['"', "'"]: - return IgnoreState(token, self) - elif token == "new": - # anonymous inner class - if len(self.listClasses) > 0: - return self.__getAnonClassState() - return self.__getSkipState() # Skip the class name - elif token in ['class', 'interface', 'enum']: - if len(self.listClasses) == 0: - self.nextAnon = 1 - self.stackBrackets.append(self.brackets) - return self.__getClassState() - elif token == 'package': - return self.__getPackageState() - elif token == '.': - # Skip the attribute, it might be named "class", in which - # case we don't want to treat the following token as - # an inner class name... - return self.__getSkipState() - return self - - def addAnonClass(self): - """Add an anonymous inner class""" - if self.version in ('1.1', '1.2', '1.3', '1.4'): - clazz = self.listClasses[0] - self.listOutputs.append('%s$%d' % (clazz, self.nextAnon)) - elif self.version in ('1.5', '1.6', '1.7', '1.8', '5', '6', '9.0', '10.0', '11.0', '12.0'): - self.stackAnonClassBrackets.append(self.brackets) - className = [] - className.extend(self.listClasses) - self._getAnonStack()[-1] = self._getAnonStack()[-1] + 1 - for anon in self._getAnonStack(): - className.append(str(anon)) - self.listOutputs.append('$'.join(className)) - - self.nextAnon = self.nextAnon + 1 - self._getAnonStack().append(0) - - def setPackage(self, package): - self.package = package - - - class ScopeState(object): - """ - A state that parses code within a scope normally, - within the confines of a scope. - """ - - def __init__(self, old_state): - self.outer_state = old_state.outer_state - self.old_state = old_state - self.brackets = 0 - - def __getClassState(self): - try: - return self.classState - except AttributeError: - ret = ClassState(self) - self.classState = ret - return ret - - def __getAnonClassState(self): - try: - return self.anonState - except AttributeError: - ret = SkipState(1, AnonClassState(self)) - self.anonState = ret - return ret - - def __getSkipState(self): - try: - return self.skipState - except AttributeError: - ret = SkipState(1, self) - self.skipState = ret - return ret - - def openBracket(self): - self.brackets = self.brackets + 1 - - def closeBracket(self): - self.brackets = self.brackets - 1 - - def parseToken(self, token): - # if self.brackets == 0: - # return self.old_state.parseToken(token) - if token[:2] == '//': - return IgnoreState('\n', self) - elif token == '/*': - return IgnoreState('*/', self) - elif token == '{': - self.openBracket() - elif token == '}': - self.closeBracket() - if self.brackets == 0: - self.outer_state._getAnonStack().pop() - return self.old_state - elif token in ['"', "'"]: - return IgnoreState(token, self) - elif token == "new": - # anonymous inner class - return self.__getAnonClassState() - elif token == '.': - # Skip the attribute, it might be named "class", in which - # case we don't want to treat the following token as - # an inner class name... - return self.__getSkipState() - return self - - - class AnonClassState(object): - """A state that looks for anonymous inner classes.""" - - def __init__(self, old_state): - # outer_state is always an instance of OuterState - self.outer_state = old_state.outer_state - self.old_state = old_state - self.brace_level = 0 - - def parseToken(self, token): - # This is an anonymous class if and only if the next - # non-whitespace token is a bracket. Everything between - # braces should be parsed as normal java code. - if token[:2] == '//': - return IgnoreState('\n', self) - elif token == '/*': - return IgnoreState('*/', self) - elif token == '\n': - return self - elif token[0] == '<' and token[-1] == '>': - return self - elif token == '(': - self.brace_level = self.brace_level + 1 - return self - if self.brace_level > 0: - if token == 'new': - # look further for anonymous inner class - return SkipState(1, AnonClassState(self)) - elif token in ['"', "'"]: - return IgnoreState(token, self) - elif token == ')': - self.brace_level = self.brace_level - 1 - return self - if token == '{': - self.outer_state.addAnonClass() - if self.outer_state.version in scopeStateVersions: - return ScopeState(old_state=self.old_state).parseToken(token) - return self.old_state.parseToken(token) - - - class SkipState(object): - """A state that will skip a specified number of tokens before - reverting to the previous state.""" - - def __init__(self, tokens_to_skip, old_state): - self.tokens_to_skip = tokens_to_skip - self.old_state = old_state - - def parseToken(self, token): - self.tokens_to_skip = self.tokens_to_skip - 1 - if self.tokens_to_skip < 1: - return self.old_state - return self - - - class ClassState(object): - """A state we go into when we hit a class or interface keyword.""" - - def __init__(self, outer_state): - # outer_state is always an instance of OuterState - self.outer_state = outer_state - - def parseToken(self, token): - # the next non-whitespace token should be the name of the class - if token == '\n': - return self - # If that's an inner class which is declared in a method, it - # requires an index prepended to the class-name, e.g. - # 'Foo$1Inner' - # https://github.com/SCons/scons/issues/2087 - if self.outer_state.localClasses and \ - self.outer_state.stackBrackets[-1] > \ - self.outer_state.stackBrackets[-2] + 1: - locals = self.outer_state.localClasses[-1] - try: - idx = locals[token] - locals[token] = locals[token] + 1 - except KeyError: - locals[token] = 1 - token = str(locals[token]) + token - self.outer_state.localClasses.append({}) - self.outer_state.listClasses.append(token) - self.outer_state.anonStacksStack.append([0]) - return self.outer_state - - - class IgnoreState(object): - """A state that will ignore all tokens until it gets to a - specified token.""" - - def __init__(self, ignore_until, old_state): - self.ignore_until = ignore_until - self.old_state = old_state - - def parseToken(self, token): - if self.ignore_until == token: - return self.old_state - return self - - - class PackageState(object): - """The state we enter when we encounter the package keyword. - We assume the next token will be the package name.""" - - def __init__(self, outer_state): - # outer_state is always an instance of OuterState - self.outer_state = outer_state - - def parseToken(self, token): - self.outer_state.setPackage(token) - return self.outer_state - - - def parse_java_file(fn, version=default_java_version): - with open(fn, 'r') as f: - data = f.read() - return parse_java(data, version) - - - def parse_java(contents, version=default_java_version, trace=None): - """Parse a .java file and return a double of package directory, - plus a list of .class files that compiling that .java file will - produce""" - package = None - initial = OuterState(version) - currstate = initial - for token in _reToken.findall(contents): - # The regex produces a bunch of groups, but only one will - # have anything in it. - currstate = currstate.parseToken(token) - if trace: trace(token, currstate) - if initial.package: - package = initial.package.replace('.', os.sep) - return (package, initial.listOutputs) - -else: - # Don't actually parse Java files for class names. - # - # We might make this a configurable option in the future if - # Java-file parsing takes too long (although it shouldn't relative - # to how long the Java compiler itself seems to take...). - - def parse_java_file(fn): - """ "Parse" a .java file. - - This actually just splits the file name, so the assumption here - is that the file name matches the public class name, and that - the path to the file is the same as the package name. - """ - return os.path.split(fn) - - -def get_java_install_dirs(platform, version=None): - """ - Find the java jdk installation directories. - - This list is intended to supply as "default paths" for use when looking - up actual java binaries. - - :param platform: selector for search algorithm. - :param version: If specified, only look for java sdk's of this version - :return: list of default paths for java. - """ - - paths = [] - if platform == 'win32': - if version: - paths = glob.glob(java_win32_version_dir_glob % version) - else: - paths = glob.glob(java_win32_dir_glob) - else: - # other platforms, do nothing for now - pass - - return sorted(paths) - - -def get_java_include_paths(env, javac, version): - """ - Find java include paths for JNI building. - - :param env: construction environment, used to extract platform. - :param javac: path to detected javac. - :return: list of paths. - """ - - paths = [] - if not javac: - # there are no paths if we've not detected javac. - pass - elif env['PLATFORM'] == 'win32': - # on Windows, we have the right path to javac, so look locally - javac_bin_dir = os.path.dirname(javac) - java_inc_dir = os.path.normpath(os.path.join(javac_bin_dir, '..', 'include')) - paths = [java_inc_dir, os.path.join(java_inc_dir, 'win32')] - elif env['PLATFORM'] == 'darwin': - if not version: - paths = [java_macos_include_dir_glob] - else: - paths = sorted(glob.glob(java_macos_version_include_dir_glob % version)) - else: - base_paths = [] - if not version: - for p in java_linux_include_dirs_glob: - base_paths.extend(glob.glob(p)) - else: - for p in java_linux_version_include_dirs_glob: - base_paths.extend(glob.glob(p % version)) - - for p in base_paths: - paths.extend([p, os.path.join(p, 'linux')]) - - # print("PATHS:%s"%paths) - return paths - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/__init__.py deleted file mode 100644 index c184aad0496..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/MSCommon/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """ -Common functions for Microsoft Visual Studio and Visual C/C++. -""" - -import copy -import os -import re -import subprocess - -import SCons.Errors -import SCons.Platform.win32 -import SCons.Util - -from SCons.Tool.MSCommon.sdk import mssdk_exists, \ - mssdk_setup_env - -from SCons.Tool.MSCommon.vc import msvc_exists, \ - msvc_setup_env, \ - msvc_setup_env_once, \ - msvc_version_to_maj_min - -from SCons.Tool.MSCommon.vs import get_default_version, \ - get_vs_by_version, \ - merge_default_version, \ - msvs_exists, \ - query_versions - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/arch.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/arch.py deleted file mode 100644 index 75e72647d78..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/arch.py +++ /dev/null @@ -1,67 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/MSCommon/arch.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """Module to define supported Windows chip architectures. -""" - -import os - -class ArchDefinition(object): - """ - A class for defining architecture-specific settings and logic. - """ - def __init__(self, arch, synonyms=[]): - self.arch = arch - self.synonyms = synonyms - -SupportedArchitectureList = [ - ArchDefinition( - 'x86', - ['i386', 'i486', 'i586', 'i686'], - ), - - ArchDefinition( - 'x86_64', - ['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'], - ), - - ArchDefinition( - 'ia64', - ['IA64'], - ), - - ArchDefinition( - 'arm', - ['ARM'], - ), - -] - -SupportedArchitectureMap = {} -for a in SupportedArchitectureList: - SupportedArchitectureMap[a.arch] = a - for s in a.synonyms: - SupportedArchitectureMap[s] = a - diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/common.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/common.py deleted file mode 100644 index 6f979e65377..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/common.py +++ /dev/null @@ -1,284 +0,0 @@ -""" -Common helper functions for working with the Microsoft tool chain. -""" -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/MSCommon/common.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import copy -import json -import os -import re -import subprocess -import sys - -import SCons.Util - -# SCONS_MSCOMMON_DEBUG is internal-use so undocumented: -# set to '-' to print to console, else set to filename to log to -LOGFILE = os.environ.get('SCONS_MSCOMMON_DEBUG') -if LOGFILE == '-': - def debug(message): - print(message) -elif LOGFILE: - import logging - logging.basicConfig( - format='%(relativeCreated)05dms:pid%(process)05d:MSCommon/%(filename)s:%(message)s', - filename=LOGFILE, - level=logging.DEBUG) - debug = logging.getLogger(name=__name__).debug -else: - debug = lambda x: None - - -# SCONS_CACHE_MSVC_CONFIG is public, and is documented. -CONFIG_CACHE = os.environ.get('SCONS_CACHE_MSVC_CONFIG') -if CONFIG_CACHE in ('1', 'true', 'True'): - CONFIG_CACHE = os.path.join(os.path.expanduser('~'), '.scons_msvc_cache') - -def read_script_env_cache(): - """ fetch cached msvc env vars if requested, else return empty dict """ - envcache = {} - if CONFIG_CACHE: - try: - with open(CONFIG_CACHE, 'r') as f: - envcache = json.load(f) - #TODO can use more specific FileNotFoundError when py2 dropped - except IOError: - # don't fail if no cache file, just proceed without it - pass - return envcache - - -def write_script_env_cache(cache): - """ write out cache of msvc env vars if requested """ - if CONFIG_CACHE: - try: - with open(CONFIG_CACHE, 'w') as f: - json.dump(cache, f, indent=2) - except TypeError: - # data can't serialize to json, don't leave partial file - os.remove(CONFIG_CACHE) - except IOError: - # can't write the file, just skip - pass - - -_is_win64 = None - -def is_win64(): - """Return true if running on windows 64 bits. - - Works whether python itself runs in 64 bits or 32 bits.""" - # Unfortunately, python does not provide a useful way to determine - # if the underlying Windows OS is 32-bit or 64-bit. Worse, whether - # the Python itself is 32-bit or 64-bit affects what it returns, - # so nothing in sys.* or os.* help. - - # Apparently the best solution is to use env vars that Windows - # sets. If PROCESSOR_ARCHITECTURE is not x86, then the python - # process is running in 64 bit mode (on a 64-bit OS, 64-bit - # hardware, obviously). - # If this python is 32-bit but the OS is 64, Windows will set - # ProgramW6432 and PROCESSOR_ARCHITEW6432 to non-null. - # (Checking for HKLM\Software\Wow6432Node in the registry doesn't - # work, because some 32-bit installers create it.) - global _is_win64 - if _is_win64 is None: - # I structured these tests to make it easy to add new ones or - # add exceptions in the future, because this is a bit fragile. - _is_win64 = False - if os.environ.get('PROCESSOR_ARCHITECTURE', 'x86') != 'x86': - _is_win64 = True - if os.environ.get('PROCESSOR_ARCHITEW6432'): - _is_win64 = True - if os.environ.get('ProgramW6432'): - _is_win64 = True - return _is_win64 - - -def read_reg(value, hkroot=SCons.Util.HKEY_LOCAL_MACHINE): - return SCons.Util.RegGetValue(hkroot, value)[0] - -def has_reg(value): - """Return True if the given key exists in HKEY_LOCAL_MACHINE, False - otherwise.""" - try: - SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, value) - ret = True - except SCons.Util.WinError: - ret = False - return ret - -# Functions for fetching environment variable settings from batch files. - -def normalize_env(env, keys, force=False): - """Given a dictionary representing a shell environment, add the variables - from os.environ needed for the processing of .bat files; the keys are - controlled by the keys argument. - - It also makes sure the environment values are correctly encoded. - - If force=True, then all of the key values that exist are copied - into the returned dictionary. If force=false, values are only - copied if the key does not already exist in the copied dictionary. - - Note: the environment is copied.""" - normenv = {} - if env: - for k in list(env.keys()): - normenv[k] = copy.deepcopy(env[k]) - - for k in keys: - if k in os.environ and (force or k not in normenv): - normenv[k] = os.environ[k] - - # This shouldn't be necessary, since the default environment should include system32, - # but keep this here to be safe, since it's needed to find reg.exe which the MSVC - # bat scripts use. - sys32_dir = os.path.join(os.environ.get("SystemRoot", - os.environ.get("windir", r"C:\Windows\system32")), - "System32") - - if sys32_dir not in normenv['PATH']: - normenv['PATH'] = normenv['PATH'] + os.pathsep + sys32_dir - - # Without Wbem in PATH, vcvarsall.bat has a "'wmic' is not recognized" - # error starting with Visual Studio 2017, although the script still - # seems to work anyway. - sys32_wbem_dir = os.path.join(sys32_dir, 'Wbem') - if sys32_wbem_dir not in normenv['PATH']: - normenv['PATH'] = normenv['PATH'] + os.pathsep + sys32_wbem_dir - - debug("PATH: %s"%normenv['PATH']) - - return normenv - -def get_output(vcbat, args = None, env = None): - """Parse the output of given bat file, with given args.""" - - if env is None: - # Create a blank environment, for use in launching the tools - env = SCons.Environment.Environment(tools=[]) - - # TODO: This is a hard-coded list of the variables that (may) need - # to be imported from os.environ[] for v[sc]*vars*.bat file - # execution to work. This list should really be either directly - # controlled by vc.py, or else derived from the common_tools_var - # settings in vs.py. - vs_vc_vars = [ - 'COMSPEC', - # VS100 and VS110: Still set, but modern MSVC setup scripts will - # discard these if registry has values. However Intel compiler setup - # script still requires these as of 2013/2014. - 'VS140COMNTOOLS', - 'VS120COMNTOOLS', - 'VS110COMNTOOLS', - 'VS100COMNTOOLS', - 'VS90COMNTOOLS', - 'VS80COMNTOOLS', - 'VS71COMNTOOLS', - 'VS70COMNTOOLS', - 'VS60COMNTOOLS', - ] - env['ENV'] = normalize_env(env['ENV'], vs_vc_vars, force=False) - - if args: - debug("Calling '%s %s'" % (vcbat, args)) - popen = SCons.Action._subproc(env, - '"%s" %s & set' % (vcbat, args), - stdin='devnull', - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - else: - debug("Calling '%s'" % vcbat) - popen = SCons.Action._subproc(env, - '"%s" & set' % vcbat, - stdin='devnull', - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - # Use the .stdout and .stderr attributes directly because the - # .communicate() method uses the threading module on Windows - # and won't work under Pythons not built with threading. - with popen.stdout: - stdout = popen.stdout.read() - with popen.stderr: - stderr = popen.stderr.read() - - # Extra debug logic, uncomment if necessary -# debug('get_output():stdout:%s'%stdout) -# debug('get_output():stderr:%s'%stderr) - - if stderr: - # TODO: find something better to do with stderr; - # this at least prevents errors from getting swallowed. - sys.stderr.write(stderr) - if popen.wait() != 0: - raise IOError(stderr.decode("mbcs")) - - output = stdout.decode("mbcs") - return output - -KEEPLIST = ("INCLUDE", "LIB", "LIBPATH", "PATH", 'VSCMD_ARG_app_plat') -def parse_output(output, keep=KEEPLIST): - """ - Parse output from running visual c++/studios vcvarsall.bat and running set - To capture the values listed in keep - """ - - # dkeep is a dict associating key: path_list, where key is one item from - # keep, and path_list the associated list of paths - dkeep = dict([(i, []) for i in keep]) - - # rdk will keep the regex to match the .bat file output line starts - rdk = {} - for i in keep: - rdk[i] = re.compile('%s=(.*)' % i, re.I) - - def add_env(rmatch, key, dkeep=dkeep): - path_list = rmatch.group(1).split(os.pathsep) - for path in path_list: - # Do not add empty paths (when a var ends with ;) - if path: - # XXX: For some reason, VC98 .bat file adds "" around the PATH - # values, and it screws up the environment later, so we strip - # it. - path = path.strip('"') - dkeep[key].append(str(path)) - - for line in output.splitlines(): - for k, value in rdk.items(): - match = value.match(line) - if match: - add_env(match, k) - - return dkeep - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/netframework.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/netframework.py deleted file mode 100644 index c2d5d7cd1be..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/netframework.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/MSCommon/netframework.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """ -""" - -import os -import re -import SCons.Util - -from .common import read_reg, debug - -# Original value recorded by dcournapeau -_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot' -# On SGK's system -_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\Microsoft SDKs\.NETFramework\v2.0\InstallationFolder' - -def find_framework_root(): - # XXX: find it from environment (FrameworkDir) - try: - froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT) - debug("Found framework install root in registry: {}".format(froot)) - except SCons.Util.WinError as e: - debug("Could not read reg key {}".format(_FRAMEWORKDIR_HKEY_ROOT)) - return None - - if not os.path.exists(froot): - debug("{} not found on fs".format(froot)) - return None - - return froot - -def query_versions(): - froot = find_framework_root() - if froot: - contents = os.listdir(froot) - - l = re.compile('v[0-9]+.*') - versions = [e for e in contents if l.match(e)] - - def versrt(a,b): - # since version numbers aren't really floats... - aa = a[1:] - bb = b[1:] - aal = aa.split('.') - bbl = bb.split('.') - # sequence comparison in python is lexicographical - # which is exactly what we want. - # Note we sort backwards so the highest version is first. - return (aal > bbl) - (aal < bbl) - - versions.sort(versrt) - else: - versions = [] - - return versions - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/sdk.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/sdk.py deleted file mode 100644 index 811ee24d1c6..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/sdk.py +++ /dev/null @@ -1,413 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -__revision__ = "src/engine/SCons/Tool/MSCommon/sdk.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """Module to detect the Platform/Windows SDK - -PSDK 2003 R1 is the earliest version detected. -""" - -import os - -import SCons.Errors -import SCons.Util - -from . import common - -debug = common.debug - -# SDK Checks. This is of course a mess as everything else on MS platforms. Here -# is what we do to detect the SDK: -# -# For Windows SDK >= 6.0: just look into the registry entries: -# HKLM\Software\Microsoft\Microsoft SDKs\Windows -# All the keys in there are the available versions. -# -# For Platform SDK before 6.0 (2003 server R1 and R2, etc...), there does not -# seem to be any sane registry key, so the precise location is hardcoded. -# -# For versions below 2003R1, it seems the PSDK is included with Visual Studio? -# -# Also, per the following: -# http://benjamin.smedbergs.us/blog/tag/atl/ -# VC++ Professional comes with the SDK, VC++ Express does not. - -# Location of the SDK (checked for 6.1 only) -_CURINSTALLED_SDK_HKEY_ROOT = \ - r"Software\Microsoft\Microsoft SDKs\Windows\CurrentInstallFolder" - - -class SDKDefinition(object): - """ - An abstract base class for trying to find installed SDK directories. - """ - def __init__(self, version, **kw): - self.version = version - self.__dict__.update(kw) - - def find_sdk_dir(self): - """Try to find the MS SDK from the registry. - - Return None if failed or the directory does not exist. - """ - if not SCons.Util.can_read_reg: - debug('find_sdk_dir(): can not read registry') - return None - - hkey = self.HKEY_FMT % self.hkey_data - debug('find_sdk_dir(): checking registry:{}'.format(hkey)) - - try: - sdk_dir = common.read_reg(hkey) - except SCons.Util.WinError as e: - debug('find_sdk_dir(): no SDK registry key {}'.format(repr(hkey))) - return None - - debug('find_sdk_dir(): Trying SDK Dir: {}'.format(sdk_dir)) - - if not os.path.exists(sdk_dir): - debug('find_sdk_dir(): {} not on file system'.format(sdk_dir)) - return None - - ftc = os.path.join(sdk_dir, self.sanity_check_file) - if not os.path.exists(ftc): - debug("find_sdk_dir(): sanity check {} not found".format(ftc)) - return None - - return sdk_dir - - def get_sdk_dir(self): - """Return the MSSSDK given the version string.""" - try: - return self._sdk_dir - except AttributeError: - sdk_dir = self.find_sdk_dir() - self._sdk_dir = sdk_dir - return sdk_dir - - def get_sdk_vc_script(self,host_arch, target_arch): - """ Return the script to initialize the VC compiler installed by SDK - """ - - if (host_arch == 'amd64' and target_arch == 'x86'): - # No cross tools needed compiling 32 bits on 64 bit machine - host_arch=target_arch - - arch_string=target_arch - if (host_arch != target_arch): - arch_string='%s_%s'%(host_arch,target_arch) - - debug("get_sdk_vc_script():arch_string:%s host_arch:%s target_arch:%s"%(arch_string, - host_arch, - target_arch)) - file=self.vc_setup_scripts.get(arch_string,None) - debug("get_sdk_vc_script():file:%s"%file) - return file - -class WindowsSDK(SDKDefinition): - """ - A subclass for trying to find installed Windows SDK directories. - """ - HKEY_FMT = r'Software\Microsoft\Microsoft SDKs\Windows\v%s\InstallationFolder' - def __init__(self, *args, **kw): - SDKDefinition.__init__(self, *args, **kw) - self.hkey_data = self.version - -class PlatformSDK(SDKDefinition): - """ - A subclass for trying to find installed Platform SDK directories. - """ - HKEY_FMT = r'Software\Microsoft\MicrosoftSDK\InstalledSDKS\%s\Install Dir' - def __init__(self, *args, **kw): - SDKDefinition.__init__(self, *args, **kw) - self.hkey_data = self.uuid - -# -# The list of VC initialization scripts installed by the SDK -# These should be tried if the vcvarsall.bat TARGET_ARCH fails -preSDK61VCSetupScripts = { 'x86' : r'bin\vcvars32.bat', - 'amd64' : r'bin\vcvarsamd64.bat', - 'x86_amd64': r'bin\vcvarsx86_amd64.bat', - 'x86_ia64' : r'bin\vcvarsx86_ia64.bat', - 'ia64' : r'bin\vcvarsia64.bat'} - -SDK61VCSetupScripts = {'x86' : r'bin\vcvars32.bat', - 'amd64' : r'bin\amd64\vcvarsamd64.bat', - 'x86_amd64': r'bin\x86_amd64\vcvarsx86_amd64.bat', - 'x86_ia64' : r'bin\x86_ia64\vcvarsx86_ia64.bat', - 'ia64' : r'bin\ia64\vcvarsia64.bat'} - -SDK70VCSetupScripts = { 'x86' : r'bin\vcvars32.bat', - 'amd64' : r'bin\vcvars64.bat', - 'x86_amd64': r'bin\vcvarsx86_amd64.bat', - 'x86_ia64' : r'bin\vcvarsx86_ia64.bat', - 'ia64' : r'bin\vcvarsia64.bat'} - -SDK100VCSetupScripts = {'x86' : r'bin\vcvars32.bat', - 'amd64' : r'bin\vcvars64.bat', - 'x86_amd64': r'bin\x86_amd64\vcvarsx86_amd64.bat', - 'x86_arm' : r'bin\x86_arm\vcvarsx86_arm.bat'} - - -# The list of support SDKs which we know how to detect. -# -# The first SDK found in the list is the one used by default if there -# are multiple SDKs installed. Barring good reasons to the contrary, -# this means we should list SDKs from most recent to oldest. -# -# If you update this list, update the documentation in Tool/mssdk.xml. -SupportedSDKList = [ - WindowsSDK('10.0A', - sanity_check_file=r'bin\SetEnv.Cmd', - include_subdir='include', - lib_subdir={ - 'x86' : ['lib'], - 'x86_64' : [r'lib\x64'], - 'ia64' : [r'lib\ia64'], - }, - vc_setup_scripts = SDK70VCSetupScripts, - ), - WindowsSDK('10.0', - sanity_check_file=r'bin\SetEnv.Cmd', - include_subdir='include', - lib_subdir={ - 'x86' : ['lib'], - 'x86_64' : [r'lib\x64'], - 'ia64' : [r'lib\ia64'], - }, - vc_setup_scripts = SDK70VCSetupScripts, - ), - WindowsSDK('7.1', - sanity_check_file=r'bin\SetEnv.Cmd', - include_subdir='include', - lib_subdir={ - 'x86' : ['lib'], - 'x86_64' : [r'lib\x64'], - 'ia64' : [r'lib\ia64'], - }, - vc_setup_scripts = SDK70VCSetupScripts, - ), - WindowsSDK('7.0A', - sanity_check_file=r'bin\SetEnv.Cmd', - include_subdir='include', - lib_subdir={ - 'x86' : ['lib'], - 'x86_64' : [r'lib\x64'], - 'ia64' : [r'lib\ia64'], - }, - vc_setup_scripts = SDK70VCSetupScripts, - ), - WindowsSDK('7.0', - sanity_check_file=r'bin\SetEnv.Cmd', - include_subdir='include', - lib_subdir={ - 'x86' : ['lib'], - 'x86_64' : [r'lib\x64'], - 'ia64' : [r'lib\ia64'], - }, - vc_setup_scripts = SDK70VCSetupScripts, - ), - WindowsSDK('6.1', - sanity_check_file=r'bin\SetEnv.Cmd', - include_subdir='include', - lib_subdir={ - 'x86' : ['lib'], - 'x86_64' : [r'lib\x64'], - 'ia64' : [r'lib\ia64'], - }, - vc_setup_scripts = SDK61VCSetupScripts, - ), - - WindowsSDK('6.0A', - sanity_check_file=r'include\windows.h', - include_subdir='include', - lib_subdir={ - 'x86' : ['lib'], - 'x86_64' : [r'lib\x64'], - 'ia64' : [r'lib\ia64'], - }, - vc_setup_scripts = preSDK61VCSetupScripts, - ), - - WindowsSDK('6.0', - sanity_check_file=r'bin\gacutil.exe', - include_subdir='include', - lib_subdir='lib', - vc_setup_scripts = preSDK61VCSetupScripts, - ), - - PlatformSDK('2003R2', - sanity_check_file=r'SetEnv.Cmd', - uuid="D2FF9F89-8AA2-4373-8A31-C838BF4DBBE1", - vc_setup_scripts = preSDK61VCSetupScripts, - ), - - PlatformSDK('2003R1', - sanity_check_file=r'SetEnv.Cmd', - uuid="8F9E5EF3-A9A5-491B-A889-C58EFFECE8B3", - vc_setup_scripts = preSDK61VCSetupScripts, - ), -] - -SupportedSDKMap = {} -for sdk in SupportedSDKList: - SupportedSDKMap[sdk.version] = sdk - - -# Finding installed SDKs isn't cheap, because it goes not only to the -# registry but also to the disk to sanity-check that there is, in fact, -# an SDK installed there and that the registry entry isn't just stale. -# Find this information once, when requested, and cache it. - -InstalledSDKList = None -InstalledSDKMap = None - -def get_installed_sdks(): - global InstalledSDKList - global InstalledSDKMap - debug('get_installed_sdks()') - if InstalledSDKList is None: - InstalledSDKList = [] - InstalledSDKMap = {} - for sdk in SupportedSDKList: - debug('trying to find SDK %s' % sdk.version) - if sdk.get_sdk_dir(): - debug('found SDK %s' % sdk.version) - InstalledSDKList.append(sdk) - InstalledSDKMap[sdk.version] = sdk - return InstalledSDKList - - -# We may be asked to update multiple construction environments with -# SDK information. When doing this, we check on-disk for whether -# the SDK has 'mfc' and 'atl' subdirectories. Since going to disk -# is expensive, cache results by directory. - -SDKEnvironmentUpdates = {} - -def set_sdk_by_directory(env, sdk_dir): - global SDKEnvironmentUpdates - debug('set_sdk_by_directory: Using dir:%s'%sdk_dir) - try: - env_tuple_list = SDKEnvironmentUpdates[sdk_dir] - except KeyError: - env_tuple_list = [] - SDKEnvironmentUpdates[sdk_dir] = env_tuple_list - - include_path = os.path.join(sdk_dir, 'include') - mfc_path = os.path.join(include_path, 'mfc') - atl_path = os.path.join(include_path, 'atl') - - if os.path.exists(mfc_path): - env_tuple_list.append(('INCLUDE', mfc_path)) - if os.path.exists(atl_path): - env_tuple_list.append(('INCLUDE', atl_path)) - env_tuple_list.append(('INCLUDE', include_path)) - - env_tuple_list.append(('LIB', os.path.join(sdk_dir, 'lib'))) - env_tuple_list.append(('LIBPATH', os.path.join(sdk_dir, 'lib'))) - env_tuple_list.append(('PATH', os.path.join(sdk_dir, 'bin'))) - - for variable, directory in env_tuple_list: - env.PrependENVPath(variable, directory) - -def get_sdk_by_version(mssdk): - if mssdk not in SupportedSDKMap: - raise SCons.Errors.UserError("SDK version {} is not supported".format(repr(mssdk))) - get_installed_sdks() - return InstalledSDKMap.get(mssdk) - -def get_default_sdk(): - """Set up the default Platform/Windows SDK.""" - get_installed_sdks() - if not InstalledSDKList: - return None - return InstalledSDKList[0] - -def mssdk_setup_env(env): - debug('mssdk_setup_env()') - if 'MSSDK_DIR' in env: - sdk_dir = env['MSSDK_DIR'] - if sdk_dir is None: - return - sdk_dir = env.subst(sdk_dir) - debug('mssdk_setup_env: Using MSSDK_DIR:{}'.format(sdk_dir)) - elif 'MSSDK_VERSION' in env: - sdk_version = env['MSSDK_VERSION'] - if sdk_version is None: - msg = "SDK version is specified as None" - raise SCons.Errors.UserError(msg) - sdk_version = env.subst(sdk_version) - mssdk = get_sdk_by_version(sdk_version) - if mssdk is None: - msg = "SDK version %s is not installed" % sdk_version - raise SCons.Errors.UserError(msg) - sdk_dir = mssdk.get_sdk_dir() - debug('mssdk_setup_env: Using MSSDK_VERSION:%s'%sdk_dir) - elif 'MSVS_VERSION' in env: - msvs_version = env['MSVS_VERSION'] - debug('mssdk_setup_env:Getting MSVS_VERSION from env:%s'%msvs_version) - if msvs_version is None: - debug('mssdk_setup_env thinks msvs_version is None') - return - msvs_version = env.subst(msvs_version) - from . import vs - msvs = vs.get_vs_by_version(msvs_version) - debug('mssdk_setup_env:msvs is :%s'%msvs) - if not msvs: - debug('mssdk_setup_env: no VS version detected, bailingout:%s'%msvs) - return - sdk_version = msvs.sdk_version - debug('msvs.sdk_version is %s'%sdk_version) - if not sdk_version: - return - mssdk = get_sdk_by_version(sdk_version) - if not mssdk: - mssdk = get_default_sdk() - if not mssdk: - return - sdk_dir = mssdk.get_sdk_dir() - debug('mssdk_setup_env: Using MSVS_VERSION:%s'%sdk_dir) - else: - mssdk = get_default_sdk() - if not mssdk: - return - sdk_dir = mssdk.get_sdk_dir() - debug('mssdk_setup_env: not using any env values. sdk_dir:%s'%sdk_dir) - - set_sdk_by_directory(env, sdk_dir) - - #print "No MSVS_VERSION: this is likely to be a bug" - -def mssdk_exists(version=None): - sdks = get_installed_sdks() - if version is None: - return len(sdks) > 0 - return version in sdks - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/vc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/vc.py deleted file mode 100644 index 101bc52eaf2..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/vc.py +++ /dev/null @@ -1,842 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -# TODO: -# * supported arch for versions: for old versions of batch file without -# argument, giving bogus argument cannot be detected, so we have to hardcode -# this here -# * print warning when msvc version specified but not found -# * find out why warning do not print -# * test on 64 bits XP + VS 2005 (and VS 6 if possible) -# * SDK -# * Assembly -__revision__ = "src/engine/SCons/Tool/MSCommon/vc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """Module for Visual C/C++ detection and configuration. -""" -import SCons.compat -import SCons.Util - -import subprocess -import os -import platform -import sys -from string import digits as string_digits -if sys.version_info[0] == 2: - import collections - -import SCons.Warnings -from SCons.Tool import find_program_path - -from . import common - -debug = common.debug - -from . import sdk - -get_installed_sdks = sdk.get_installed_sdks - - -class VisualCException(Exception): - pass - -class UnsupportedVersion(VisualCException): - pass - -class MSVCUnsupportedHostArch(VisualCException): - pass - -class MSVCUnsupportedTargetArch(VisualCException): - pass - -class MissingConfiguration(VisualCException): - pass - -class NoVersionFound(VisualCException): - pass - -class BatchFileExecutionError(VisualCException): - pass - -# Dict to 'canonalize' the arch -_ARCH_TO_CANONICAL = { - "amd64" : "amd64", - "emt64" : "amd64", - "i386" : "x86", - "i486" : "x86", - "i586" : "x86", - "i686" : "x86", - "ia64" : "ia64", # deprecated - "itanium" : "ia64", # deprecated - "x86" : "x86", - "x86_64" : "amd64", - "arm" : "arm", - "arm64" : "arm64", - "aarch64" : "arm64", -} - -_HOST_TARGET_TO_CL_DIR_GREATER_THAN_14 = { - ("amd64","amd64") : ("Hostx64","x64"), - ("amd64","x86") : ("Hostx64","x86"), - ("amd64","arm") : ("Hostx64","arm"), - ("amd64","arm64") : ("Hostx64","arm64"), - ("x86","amd64") : ("Hostx86","x64"), - ("x86","x86") : ("Hostx86","x86"), - ("x86","arm") : ("Hostx86","arm"), - ("x86","arm64") : ("Hostx86","arm64"), -} - -# get path to the cl.exe dir for older VS versions -# based off a tuple of (host, target) platforms -_HOST_TARGET_TO_CL_DIR = { - ("amd64","amd64") : "amd64", - ("amd64","x86") : "amd64_x86", - ("amd64","arm") : "amd64_arm", - ("amd64","arm64") : "amd64_arm64", - ("x86","amd64") : "x86_amd64", - ("x86","x86") : "", - ("x86","arm") : "x86_arm", - ("x86","arm64") : "x86_arm64", -} - -# Given a (host, target) tuple, return the argument for the bat file. -# Both host and targets should be canonalized. -_HOST_TARGET_ARCH_TO_BAT_ARCH = { - ("x86", "x86"): "x86", - ("x86", "amd64"): "x86_amd64", - ("x86", "x86_amd64"): "x86_amd64", - ("amd64", "x86_amd64"): "x86_amd64", # This is present in (at least) VS2012 express - ("amd64", "amd64"): "amd64", - ("amd64", "x86"): "x86", - ("x86", "ia64"): "x86_ia64", # gone since 14.0 - ("arm", "arm"): "arm", # since 14.0, maybe gone 14.1? - ("x86", "arm"): "x86_arm", # since 14.0 - ("x86", "arm64"): "x86_arm64", # since 14.1 - ("amd64", "arm"): "amd64_arm", # since 14.0 - ("amd64", "arm64"): "amd64_arm64", # since 14.1 -} - -_CL_EXE_NAME = 'cl.exe' - -def get_msvc_version_numeric(msvc_version): - """Get the raw version numbers from a MSVC_VERSION string, so it - could be cast to float or other numeric values. For example, '14.0Exp' - would get converted to '14.0'. - - Args: - msvc_version: str - string representing the version number, could contain non - digit characters - - Returns: - str: the value converted to a numeric only string - - """ - return ''.join([x for x in msvc_version if x in string_digits + '.']) - -def get_host_target(env): - debug('get_host_target()') - - host_platform = env.get('HOST_ARCH') - if not host_platform: - host_platform = platform.machine() - - # Solaris returns i86pc for both 32 and 64 bit architectures - if host_platform == "i86pc": - if platform.architecture()[0] == "64bit": - host_platform = "amd64" - else: - host_platform = "x86" - - # Retain user requested TARGET_ARCH - req_target_platform = env.get('TARGET_ARCH') - debug('get_host_target() req_target_platform:%s'%req_target_platform) - - if req_target_platform: - # If user requested a specific platform then only try that one. - target_platform = req_target_platform - else: - target_platform = host_platform - - try: - host = _ARCH_TO_CANONICAL[host_platform.lower()] - except KeyError: - msg = "Unrecognized host architecture %s" - raise MSVCUnsupportedHostArch(msg % repr(host_platform)) - - try: - target = _ARCH_TO_CANONICAL[target_platform.lower()] - except KeyError: - all_archs = str(list(_ARCH_TO_CANONICAL.keys())) - raise MSVCUnsupportedTargetArch("Unrecognized target architecture %s\n\tValid architectures: %s" % (target_platform, all_archs)) - - return (host, target,req_target_platform) - -# If you update this, update SupportedVSList in Tool/MSCommon/vs.py, and the -# MSVC_VERSION documentation in Tool/msvc.xml. -_VCVER = ["14.3", "14.2", "14.1", "14.0", "14.0Exp", "12.0", "12.0Exp", "11.0", "11.0Exp", "10.0", "10.0Exp", "9.0", "9.0Exp","8.0", "8.0Exp","7.1", "7.0", "6.0"] - -# if using vswhere, a further mapping is needed -_VCVER_TO_VSWHERE_VER = { - '14.3': '[17.0, 18.0)', - '14.2' : '[16.0, 17.0)', - '14.1' : '[15.0, 16.0)', -} - -_VCVER_TO_PRODUCT_DIR = { - '14.3': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'')], # not set by this version - '14.2' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'')], # VS 2019 doesn't set this key - '14.1' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'')], # VS 2017 doesn't set this key - '14.0' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir')], - '14.0Exp' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\14.0\Setup\VC\ProductDir')], - '12.0' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\12.0\Setup\VC\ProductDir'), - ], - '12.0Exp' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\12.0\Setup\VC\ProductDir'), - ], - '11.0': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\11.0\Setup\VC\ProductDir'), - ], - '11.0Exp' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\11.0\Setup\VC\ProductDir'), - ], - '10.0': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\10.0\Setup\VC\ProductDir'), - ], - '10.0Exp' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\10.0\Setup\VC\ProductDir'), - ], - '9.0': [ - (SCons.Util.HKEY_CURRENT_USER, r'Microsoft\DevDiv\VCForPython\9.0\installdir',), - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir',), - ], - '9.0Exp' : [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'), - ], - '8.0': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\8.0\Setup\VC\ProductDir'), - ], - '8.0Exp': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\8.0\Setup\VC\ProductDir'), - ], - '7.1': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\7.1\Setup\VC\ProductDir'), - ], - '7.0': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\7.0\Setup\VC\ProductDir'), - ], - '6.0': [ - (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual C++\ProductDir'), - ] -} - -def msvc_version_to_maj_min(msvc_version): - msvc_version_numeric = get_msvc_version_numeric(msvc_version) - - t = msvc_version_numeric.split(".") - if not len(t) == 2: - raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric)) - try: - maj = int(t[0]) - min = int(t[1]) - return maj, min - except ValueError as e: - raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric)) - -def is_host_target_supported(host_target, msvc_version): - """Check if (host, target) pair is supported for a VC version. - - :note: only checks whether a given version *may* support the given (host, - target), not that the toolchain is actually present on the machine. - :param tuple host_target: canonalized host-targets pair, e.g. - ("x86", "amd64") for cross compilation from 32 bit Windows to 64 bits. - :param str msvc_version: Visual C++ version (major.minor), e.g. "10.0" - :returns: True or False - """ - # We assume that any Visual Studio version supports x86 as a target - if host_target[1] != "x86": - maj, min = msvc_version_to_maj_min(msvc_version) - if maj < 8: - return False - return True - - -def find_vc_pdir_vswhere(msvc_version): - """ - Find the MSVC product directory using the vswhere program. - - :param msvc_version: MSVC version to search for - :return: MSVC install dir or None - :raises UnsupportedVersion: if the version is not known by this file - """ - - try: - vswhere_version = _VCVER_TO_VSWHERE_VER[msvc_version] - except KeyError: - debug("Unknown version of MSVC: %s" % msvc_version) - raise UnsupportedVersion("Unknown version %s" % msvc_version) - - # For bug 3333 - support default location of vswhere for both 64 and 32 bit windows - # installs. - for pf in ['Program Files (x86)', 'Program Files']: - vswhere_path = os.path.join( - 'C:\\', - pf, - 'Microsoft Visual Studio', - 'Installer', - 'vswhere.exe' - ) - if os.path.exists(vswhere_path): - # If we found vswhere, then use it. - break - else: - # No vswhere on system, no install info available - return None - - vswhere_cmd = [vswhere_path, - '-products', '*', - '-version', vswhere_version, - '-property', 'installationPath'] - - #TODO PY27 cannot use Popen as context manager - # try putting it back to the old way for now - sp = subprocess.Popen(vswhere_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - vsdir, err = sp.communicate() - if vsdir: - vsdir = vsdir.decode("mbcs").splitlines() - # vswhere could easily return multiple lines - # we could define a way to pick the one we prefer, but since - # this data is currently only used to make a check for existence, - # returning the first hit should be good enough for now. - vc_pdir = os.path.join(vsdir[0], 'VC') - return vc_pdir - else: - # No vswhere on system, no install info available - return None - - -def find_vc_pdir(msvc_version): - """Find the MSVC product directory for the given version. - - Tries to look up the path using a registry key from the table - _VCVER_TO_PRODUCT_DIR; if there is no key, calls find_vc_pdir_wshere - for help instead. - - Args: - msvc_version: str - msvc version (major.minor, e.g. 10.0) - - Returns: - str: Path found in registry, or None - - Raises: - UnsupportedVersion: if the version is not known by this file. - MissingConfiguration: found version but the directory is missing. - - Both exceptions inherit from VisualCException. - """ - root = 'Software\\' - try: - hkeys = _VCVER_TO_PRODUCT_DIR[msvc_version] - except KeyError: - debug("Unknown version of MSVC: %s" % msvc_version) - raise UnsupportedVersion("Unknown version %s" % msvc_version) - - for hkroot, key in hkeys: - try: - comps = None - if not key: - comps = find_vc_pdir_vswhere(msvc_version) - if not comps: - debug('find_vc_pdir_vswhere(): no VC found for version {}'.format(repr(msvc_version))) - raise SCons.Util.WinError - debug('find_vc_pdir_vswhere(): VC found: {}'.format(repr(msvc_version))) - return comps - else: - if common.is_win64(): - try: - # ordinally at win64, try Wow6432Node first. - comps = common.read_reg(root + 'Wow6432Node\\' + key, hkroot) - except SCons.Util.WinError as e: - # at Microsoft Visual Studio for Python 2.7, value is not in Wow6432Node - pass - if not comps: - # not Win64, or Microsoft Visual Studio for Python 2.7 - comps = common.read_reg(root + key, hkroot) - except SCons.Util.WinError as e: - debug('find_vc_dir(): no VC registry key {}'.format(repr(key))) - else: - debug('find_vc_dir(): found VC in registry: {}'.format(comps)) - if os.path.exists(comps): - return comps - else: - debug('find_vc_dir(): reg says dir is {}, but it does not exist. (ignoring)'.format(comps)) - raise MissingConfiguration("registry dir {} not found on the filesystem".format(comps)) - return None - -def find_batch_file(env,msvc_version,host_arch,target_arch): - """ - Find the location of the batch script which should set up the compiler - for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress - """ - pdir = find_vc_pdir(msvc_version) - if pdir is None: - raise NoVersionFound("No version of Visual Studio found") - - debug('find_batch_file() in {}'.format(pdir)) - - # filter out e.g. "Exp" from the version name - msvc_ver_numeric = get_msvc_version_numeric(msvc_version) - vernum = float(msvc_ver_numeric) - if 7 <= vernum < 8: - pdir = os.path.join(pdir, os.pardir, "Common7", "Tools") - batfilename = os.path.join(pdir, "vsvars32.bat") - elif vernum < 7: - pdir = os.path.join(pdir, "Bin") - batfilename = os.path.join(pdir, "vcvars32.bat") - elif 8 <= vernum <= 14: - batfilename = os.path.join(pdir, "vcvarsall.bat") - else: # vernum >= 14.1 VS2017 and above - batfilename = os.path.join(pdir, "Auxiliary", "Build", "vcvarsall.bat") - - if not os.path.exists(batfilename): - debug("Not found: %s" % batfilename) - batfilename = None - - installed_sdks = get_installed_sdks() - for _sdk in installed_sdks: - sdk_bat_file = _sdk.get_sdk_vc_script(host_arch,target_arch) - if not sdk_bat_file: - debug("find_batch_file() not found:%s"%_sdk) - else: - sdk_bat_file_path = os.path.join(pdir,sdk_bat_file) - if os.path.exists(sdk_bat_file_path): - debug('find_batch_file() sdk_bat_file_path:%s'%sdk_bat_file_path) - return (batfilename, sdk_bat_file_path) - return (batfilename, None) - - -__INSTALLED_VCS_RUN = None -_VC_TOOLS_VERSION_FILE_PATH = ['Auxiliary', 'Build', 'Microsoft.VCToolsVersion.default.txt'] -_VC_TOOLS_VERSION_FILE = os.sep.join(_VC_TOOLS_VERSION_FILE_PATH) - -def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version): - """Find the cl.exe on the filesystem in the vc_dir depending on - TARGET_ARCH, HOST_ARCH and the msvc version. TARGET_ARCH and - HOST_ARCH can be extracted from the passed env, unless its None, - which then the native platform is assumed the host and target. - - Args: - env: Environment - a construction environment, usually if this is passed its - because there is a desired TARGET_ARCH to be used when searching - for a cl.exe - vc_dir: str - the path to the VC dir in the MSVC installation - msvc_version: str - msvc version (major.minor, e.g. 10.0) - - Returns: - bool: - - """ - - # determine if there is a specific target platform we want to build for and - # use that to find a list of valid VCs, default is host platform == target platform - # and same for if no env is specified to extract target platform from - if env: - (host_platform, target_platform, req_target_platform) = get_host_target(env) - else: - host_platform = platform.machine().lower() - target_platform = host_platform - - host_platform = _ARCH_TO_CANONICAL[host_platform] - target_platform = _ARCH_TO_CANONICAL[target_platform] - - debug('_check_cl_exists_in_vc_dir(): host platform %s, target platform %s for version %s' % (host_platform, target_platform, msvc_version)) - - ver_num = float(get_msvc_version_numeric(msvc_version)) - - # make sure the cl.exe exists meaning the tool is installed - if ver_num > 14: - # 2017 and newer allowed multiple versions of the VC toolset to be installed at the same time. - # Just get the default tool version for now - #TODO: support setting a specific minor VC version - default_toolset_file = os.path.join(vc_dir, _VC_TOOLS_VERSION_FILE) - try: - with open(default_toolset_file) as f: - vc_specific_version = f.readlines()[0].strip() - except IOError: - debug('_check_cl_exists_in_vc_dir(): failed to read ' + default_toolset_file) - return False - except IndexError: - debug('_check_cl_exists_in_vc_dir(): failed to find MSVC version in ' + default_toolset_file) - return False - - host_trgt_dir = _HOST_TARGET_TO_CL_DIR_GREATER_THAN_14.get((host_platform, target_platform), None) - if host_trgt_dir is None: - debug('_check_cl_exists_in_vc_dir(): unsupported host/target platform combo: (%s,%s)'%(host_platform, target_platform)) - return False - - cl_path = os.path.join(vc_dir, 'Tools','MSVC', vc_specific_version, 'bin', host_trgt_dir[0], host_trgt_dir[1], _CL_EXE_NAME) - debug('_check_cl_exists_in_vc_dir(): checking for ' + _CL_EXE_NAME + ' at ' + cl_path) - if os.path.exists(cl_path): - debug('_check_cl_exists_in_vc_dir(): found ' + _CL_EXE_NAME + '!') - return True - - elif ver_num <= 14 and ver_num >= 8: - - # Set default value to be -1 as "" which is the value for x86/x86 yields true when tested - # if not host_trgt_dir - host_trgt_dir = _HOST_TARGET_TO_CL_DIR.get((host_platform, target_platform), None) - if host_trgt_dir is None: - debug('_check_cl_exists_in_vc_dir(): unsupported host/target platform combo') - return False - - cl_path = os.path.join(vc_dir, 'bin', host_trgt_dir, _CL_EXE_NAME) - debug('_check_cl_exists_in_vc_dir(): checking for ' + _CL_EXE_NAME + ' at ' + cl_path) - - cl_path_exists = os.path.exists(cl_path) - if not cl_path_exists and host_platform == 'amd64': - # older versions of visual studio only had x86 binaries, - # so if the host platform is amd64, we need to check cross - # compile options (x86 binary compiles some other target on a 64 bit os) - - # Set default value to be -1 as "" which is the value for x86/x86 yields true when tested - # if not host_trgt_dir - host_trgt_dir = _HOST_TARGET_TO_CL_DIR.get(('x86', target_platform), None) - if host_trgt_dir is None: - return False - - cl_path = os.path.join(vc_dir, 'bin', host_trgt_dir, _CL_EXE_NAME) - debug('_check_cl_exists_in_vc_dir(): checking for ' + _CL_EXE_NAME + ' at ' + cl_path) - cl_path_exists = os.path.exists(cl_path) - - if cl_path_exists: - debug('_check_cl_exists_in_vc_dir(): found ' + _CL_EXE_NAME + '!') - return True - - elif ver_num < 8 and ver_num >= 6: - # not sure about these versions so if a walk the VC dir (could be slow) - for root, _, files in os.walk(vc_dir): - if _CL_EXE_NAME in files: - debug('get_installed_vcs ' + _CL_EXE_NAME + ' found %s' % os.path.join(root, _CL_EXE_NAME)) - return True - return False - else: - # version not support return false - debug('_check_cl_exists_in_vc_dir(): unsupported MSVC version: ' + str(ver_num)) - - return False - -def cached_get_installed_vcs(env=None): - global __INSTALLED_VCS_RUN - - if __INSTALLED_VCS_RUN is None: - ret = get_installed_vcs(env) - __INSTALLED_VCS_RUN = ret - - return __INSTALLED_VCS_RUN - -def get_installed_vcs(env=None): - installed_versions = [] - - for ver in _VCVER: - debug('trying to find VC %s' % ver) - try: - VC_DIR = find_vc_pdir(ver) - if VC_DIR: - debug('found VC %s' % ver) - if _check_cl_exists_in_vc_dir(env, VC_DIR, ver): - installed_versions.append(ver) - else: - debug('find_vc_pdir no compiler found %s' % ver) - else: - debug('find_vc_pdir return None for ver %s' % ver) - except (MSVCUnsupportedTargetArch, MSVCUnsupportedHostArch): - # Allow this exception to propagate further as it should cause - # SCons to exit with an error code - raise - except VisualCException as e: - debug('did not find VC %s: caught exception %s' % (ver, str(e))) - return installed_versions - -def reset_installed_vcs(): - """Make it try again to find VC. This is just for the tests.""" - __INSTALLED_VCS_RUN = None - -# Running these batch files isn't cheap: most of the time spent in -# msvs.generate() is due to vcvars*.bat. In a build that uses "tools='msvs'" -# in multiple environments, for example: -# env1 = Environment(tools='msvs') -# env2 = Environment(tools='msvs') -# we can greatly improve the speed of the second and subsequent Environment -# (or Clone) calls by memoizing the environment variables set by vcvars*.bat. -# -# Updated: by 2018, vcvarsall.bat had gotten so expensive (vs2017 era) -# it was breaking CI builds because the test suite starts scons so many -# times and the existing memo logic only helped with repeated calls -# within the same scons run. Windows builds on the CI system were split -# into chunks to get around single-build time limits. -# With VS2019 it got even slower and an optional persistent cache file -# was introduced. The cache now also stores only the parsed vars, -# not the entire output of running the batch file - saves a bit -# of time not parsing every time. - -script_env_cache = None - -def script_env(script, args=None): - global script_env_cache - - if script_env_cache is None: - script_env_cache = common.read_script_env_cache() - cache_key = "{}--{}".format(script, args) - cache_data = script_env_cache.get(cache_key, None) - if cache_data is None: - stdout = common.get_output(script, args) - - # Stupid batch files do not set return code: we take a look at the - # beginning of the output for an error message instead - olines = stdout.splitlines() - if olines[0].startswith("The specified configuration type is missing"): - raise BatchFileExecutionError("\n".join(olines[:2])) - - cache_data = common.parse_output(stdout) - script_env_cache[cache_key] = cache_data - # once we updated cache, give a chance to write out if user wanted - common.write_script_env_cache(script_env_cache) - else: - #TODO: Python 2 cleanup - # If we "hit" data from the json file, we have a Py2 problem: - # keys & values will be unicode. don't detect, just convert. - if sys.version_info[0] == 2: - def convert(data): - if isinstance(data, basestring): - return str(data) - elif isinstance(data, collections.Mapping): - return dict(map(convert, data.iteritems())) - elif isinstance(data, collections.Iterable): - return type(data)(map(convert, data)) - else: - return data - - cache_data = convert(cache_data) - - return cache_data - -def get_default_version(env): - debug('get_default_version()') - - msvc_version = env.get('MSVC_VERSION') - msvs_version = env.get('MSVS_VERSION') - - debug('get_default_version(): msvc_version:%s msvs_version:%s'%(msvc_version,msvs_version)) - - if msvs_version and not msvc_version: - SCons.Warnings.warn( - SCons.Warnings.DeprecatedWarning, - "MSVS_VERSION is deprecated: please use MSVC_VERSION instead ") - return msvs_version - elif msvc_version and msvs_version: - if not msvc_version == msvs_version: - SCons.Warnings.warn( - SCons.Warnings.VisualVersionMismatch, - "Requested msvc version (%s) and msvs version (%s) do " \ - "not match: please use MSVC_VERSION only to request a " \ - "visual studio version, MSVS_VERSION is deprecated" \ - % (msvc_version, msvs_version)) - return msvs_version - if not msvc_version: - installed_vcs = cached_get_installed_vcs(env) - debug('installed_vcs:%s' % installed_vcs) - if not installed_vcs: - #msg = 'No installed VCs' - #debug('msv %s' % repr(msg)) - #SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, msg) - debug('msvc_setup_env: No installed VCs') - return None - msvc_version = installed_vcs[0] - debug('msvc_setup_env: using default installed MSVC version %s' % repr(msvc_version)) - - return msvc_version - -def msvc_setup_env_once(env): - try: - has_run = env["MSVC_SETUP_RUN"] - except KeyError: - has_run = False - - if not has_run: - msvc_setup_env(env) - env["MSVC_SETUP_RUN"] = True - -def msvc_find_valid_batch_script(env, version): - debug('msvc_find_valid_batch_script()') - # Find the host platform, target platform, and if present the requested - # target platform - platforms = get_host_target(env) - debug(" msvs_find_valid_batch_script(): host_platform %s, target_platform %s req_target_platform:%s" % platforms) - - host_platform, target_platform, req_target_platform = platforms - try_target_archs = [target_platform] - - # VS2012 has a "cross compile" environment to build 64 bit - # with x86_amd64 as the argument to the batch setup script - if req_target_platform in ('amd64', 'x86_64'): - try_target_archs.append('x86_amd64') - elif not req_target_platform and target_platform in ['amd64', 'x86_64']: - # There may not be "native" amd64, but maybe "cross" x86_amd64 tools - try_target_archs.append('x86_amd64') - # If the user hasn't specifically requested a TARGET_ARCH, and - # The TARGET_ARCH is amd64 then also try 32 bits if there are no viable - # 64 bit tools installed - try_target_archs.append('x86') - - debug("msvs_find_valid_batch_script(): host_platform: %s try_target_archs:%s"%(host_platform, try_target_archs)) - - d = None - for tp in try_target_archs: - # Set to current arch. - env['TARGET_ARCH']=tp - - debug("msvc_find_valid_batch_script() trying target_platform:%s"%tp) - host_target = (host_platform, tp) - if not is_host_target_supported(host_target, version): - warn_msg = "host, target = %s not supported for MSVC version %s" % \ - (host_target, version) - SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) - arg = _HOST_TARGET_ARCH_TO_BAT_ARCH[host_target] - - # Get just version numbers - maj, min = msvc_version_to_maj_min(version) - # VS2015+ - if maj >= 14: - if env.get('MSVC_UWP_APP') == '1': - # Initialize environment variables with store/universal paths - arg += ' store' - - # Try to locate a batch file for this host/target platform combo - try: - (vc_script, sdk_script) = find_batch_file(env, version, host_platform, tp) - debug('msvc_find_valid_batch_script() vc_script:%s sdk_script:%s'%(vc_script,sdk_script)) - except VisualCException as e: - msg = str(e) - debug('Caught exception while looking for batch file (%s)' % msg) - warn_msg = "VC version %s not installed. " + \ - "C/C++ compilers are most likely not set correctly.\n" + \ - " Installed versions are: %s" - warn_msg = warn_msg % (version, cached_get_installed_vcs(env)) - SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) - continue - - # Try to use the located batch file for this host/target platform combo - debug('msvc_find_valid_batch_script() use_script 2 %s, args:%s' % (repr(vc_script), arg)) - found = None - if vc_script: - try: - d = script_env(vc_script, args=arg) - found = vc_script - except BatchFileExecutionError as e: - debug('msvc_find_valid_batch_script() use_script 3: failed running VC script %s: %s: Error:%s'%(repr(vc_script),arg,e)) - vc_script=None - continue - if not vc_script and sdk_script: - debug('msvc_find_valid_batch_script() use_script 4: trying sdk script: %s'%(sdk_script)) - try: - d = script_env(sdk_script) - found = sdk_script - except BatchFileExecutionError as e: - debug('msvc_find_valid_batch_script() use_script 5: failed running SDK script %s: Error:%s'%(repr(sdk_script),e)) - continue - elif not vc_script and not sdk_script: - debug('msvc_find_valid_batch_script() use_script 6: Neither VC script nor SDK script found') - continue - - debug("msvc_find_valid_batch_script() Found a working script/target: %s/%s"%(repr(found),arg)) - break # We've found a working target_platform, so stop looking - - # If we cannot find a viable installed compiler, reset the TARGET_ARCH - # To it's initial value - if not d: - env['TARGET_ARCH']=req_target_platform - - return d - - -def msvc_setup_env(env): - debug('msvc_setup_env()') - - version = get_default_version(env) - if version is None: - warn_msg = "No version of Visual Studio compiler found - C/C++ " \ - "compilers most likely not set correctly" - SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) - return None - debug('msvc_setup_env: using specified MSVC version %s' % repr(version)) - - # XXX: we set-up both MSVS version for backward - # compatibility with the msvs tool - env['MSVC_VERSION'] = version - env['MSVS_VERSION'] = version - env['MSVS'] = {} - - - use_script = env.get('MSVC_USE_SCRIPT', True) - if SCons.Util.is_String(use_script): - debug('msvc_setup_env() use_script 1 %s' % repr(use_script)) - d = script_env(use_script) - elif use_script: - d = msvc_find_valid_batch_script(env,version) - debug('msvc_setup_env() use_script 2 %s' % d) - if not d: - return d - else: - debug('MSVC_USE_SCRIPT set to False') - warn_msg = "MSVC_USE_SCRIPT set to False, assuming environment " \ - "set correctly." - SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) - return None - - for k, v in d.items(): - debug('msvc_setup_env() env:%s -> %s'%(k,v)) - env.PrependENVPath(k, v, delete_existing=True) - - # final check to issue a warning if the compiler is not present - msvc_cl = find_program_path(env, 'cl') - if not msvc_cl: - SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, - "Could not find MSVC compiler 'cl', it may need to be installed separately with Visual Studio") - -def msvc_exists(env=None, version=None): - vcs = cached_get_installed_vcs(env) - if version is None: - return len(vcs) > 0 - return version in vcs diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/vs.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/vs.py deleted file mode 100644 index 972c4f8d200..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/MSCommon/vs.py +++ /dev/null @@ -1,583 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/MSCommon/vs.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """Module to detect Visual Studio and/or Visual C/C++ -""" - -import os - -import SCons.Errors -import SCons.Util - -from .common import debug, \ - get_output, \ - is_win64, \ - normalize_env, \ - parse_output, \ - read_reg - -import SCons.Tool.MSCommon.vc - -class VisualStudio(object): - """ - An abstract base class for trying to find installed versions of - Visual Studio. - """ - def __init__(self, version, **kw): - self.version = version - kw['vc_version'] = kw.get('vc_version', version) - kw['sdk_version'] = kw.get('sdk_version', version) - self.__dict__.update(kw) - self._cache = {} - - def find_batch_file(self): - vs_dir = self.get_vs_dir() - if not vs_dir: - debug('find_executable(): no vs_dir') - return None - batch_file = os.path.join(vs_dir, self.batch_file_path) - batch_file = os.path.normpath(batch_file) - if not os.path.isfile(batch_file): - debug('find_batch_file(): %s not on file system' % batch_file) - return None - return batch_file - - def find_vs_dir_by_vc(self): - SCons.Tool.MSCommon.vc.get_installed_vcs() - dir = SCons.Tool.MSCommon.vc.find_vc_pdir(self.vc_version) - if not dir: - debug('find_vs_dir_by_vc(): no installed VC %s' % self.vc_version) - return None - return os.path.abspath(os.path.join(dir, os.pardir)) - - def find_vs_dir_by_reg(self): - root = 'Software\\' - - if is_win64(): - root = root + 'Wow6432Node\\' - for key in self.hkeys: - if key=='use_dir': - return self.find_vs_dir_by_vc() - key = root + key - try: - comps = read_reg(key) - except SCons.Util.WinError as e: - debug('find_vs_dir_by_reg(): no VS registry key {}'.format(repr(key))) - else: - debug('find_vs_dir_by_reg(): found VS in registry: {}'.format(comps)) - return comps - return None - - def find_vs_dir(self): - """ Can use registry or location of VC to find vs dir - First try to find by registry, and if that fails find via VC dir - """ - - vs_dir=self.find_vs_dir_by_reg() - if not vs_dir: - vs_dir = self.find_vs_dir_by_vc() - debug('find_vs_dir(): found VS in ' + str(vs_dir )) - return vs_dir - - def find_executable(self): - vs_dir = self.get_vs_dir() - if not vs_dir: - debug('find_executable(): no vs_dir ({})'.format(vs_dir)) - return None - executable = os.path.join(vs_dir, self.executable_path) - executable = os.path.normpath(executable) - if not os.path.isfile(executable): - debug('find_executable(): {} not on file system'.format(executable)) - return None - return executable - - def get_batch_file(self): - try: - return self._cache['batch_file'] - except KeyError: - batch_file = self.find_batch_file() - self._cache['batch_file'] = batch_file - return batch_file - - def get_executable(self): - try: - debug('get_executable using cache:%s'%self._cache['executable']) - return self._cache['executable'] - except KeyError: - executable = self.find_executable() - self._cache['executable'] = executable - debug('get_executable not in cache:%s'%executable) - return executable - - def get_vs_dir(self): - try: - return self._cache['vs_dir'] - except KeyError: - vs_dir = self.find_vs_dir() - self._cache['vs_dir'] = vs_dir - return vs_dir - - def get_supported_arch(self): - try: - return self._cache['supported_arch'] - except KeyError: - # RDEVE: for the time being use hardcoded lists - # supported_arch = self.find_supported_arch() - self._cache['supported_arch'] = self.supported_arch - return self.supported_arch - - def reset(self): - self._cache = {} - -# The list of supported Visual Studio versions we know how to detect. -# -# How to look for .bat file ? -# - VS 2008 Express (x86): -# * from registry key productdir, gives the full path to vsvarsall.bat. In -# HKEY_LOCAL_MACHINE): -# Software\Microsoft\VCEpress\9.0\Setup\VC\productdir -# * from environmnent variable VS90COMNTOOLS: the path is then ..\..\VC -# relatively to the path given by the variable. -# -# - VS 2008 Express (WoW6432: 32 bits on windows x64): -# Software\Wow6432Node\Microsoft\VCEpress\9.0\Setup\VC\productdir -# -# - VS 2005 Express (x86): -# * from registry key productdir, gives the full path to vsvarsall.bat. In -# HKEY_LOCAL_MACHINE): -# Software\Microsoft\VCEpress\8.0\Setup\VC\productdir -# * from environmnent variable VS80COMNTOOLS: the path is then ..\..\VC -# relatively to the path given by the variable. -# -# - VS 2005 Express (WoW6432: 32 bits on windows x64): does not seem to have a -# productdir ? -# -# - VS 2003 .Net (pro edition ? x86): -# * from registry key productdir. The path is then ..\Common7\Tools\ -# relatively to the key. The key is in HKEY_LOCAL_MACHINE): -# Software\Microsoft\VisualStudio\7.1\Setup\VC\productdir -# * from environmnent variable VS71COMNTOOLS: the path is the full path to -# vsvars32.bat -# -# - VS 98 (VS 6): -# * from registry key productdir. The path is then Bin -# relatively to the key. The key is in HKEY_LOCAL_MACHINE): -# Software\Microsoft\VisualStudio\6.0\Setup\VC98\productdir -# -# The first version found in the list is the one used by default if -# there are multiple versions installed. Barring good reasons to -# the contrary, this means we should list versions from most recent -# to oldest. Pro versions get listed before Express versions on the -# assumption that, by default, you'd rather use the version you paid -# good money for in preference to whatever Microsoft makes available -# for free. -# -# If you update this list, update _VCVER and _VCVER_TO_PRODUCT_DIR in -# Tool/MSCommon/vc.py, and the MSVC_VERSION documentation in Tool/msvc.xml. - -SupportedVSList = [ - # Visual Studio 2019 - VisualStudio('14.2', - vc_version='14.2', - sdk_version='10.0A', - hkeys=[], - common_tools_var='VS160COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'VC\Auxiliary\Build\vsvars32.bat', - supported_arch=['x86', 'amd64', "arm"], - ), - - # Visual Studio 2017 - VisualStudio('14.1', - vc_version='14.1', - sdk_version='10.0A', - hkeys=[], - common_tools_var='VS150COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'VC\Auxiliary\Build\vsvars32.bat', - supported_arch=['x86', 'amd64', "arm"], - ), - - # Visual Studio 2015 - VisualStudio('14.0', - vc_version='14.0', - sdk_version='10.0', - hkeys=[r'Microsoft\VisualStudio\14.0\Setup\VS\ProductDir'], - common_tools_var='VS140COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64', "arm"], - ), - - # Visual C++ 2015 Express Edition (for Desktop) - VisualStudio('14.0Exp', - vc_version='14.0', - sdk_version='10.0A', - hkeys=[r'Microsoft\VisualStudio\14.0\Setup\VS\ProductDir'], - common_tools_var='VS140COMNTOOLS', - executable_path=r'Common7\IDE\WDExpress.exe', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64', "arm"], - ), - - # Visual Studio 2013 - VisualStudio('12.0', - vc_version='12.0', - sdk_version='8.1A', - hkeys=[r'Microsoft\VisualStudio\12.0\Setup\VS\ProductDir'], - common_tools_var='VS120COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64'], - ), - - # Visual C++ 2013 Express Edition (for Desktop) - VisualStudio('12.0Exp', - vc_version='12.0', - sdk_version='8.1A', - hkeys=[r'Microsoft\VisualStudio\12.0\Setup\VS\ProductDir'], - common_tools_var='VS120COMNTOOLS', - executable_path=r'Common7\IDE\WDExpress.exe', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64'], - ), - - # Visual Studio 2012 - VisualStudio('11.0', - sdk_version='8.0A', - hkeys=[r'Microsoft\VisualStudio\11.0\Setup\VS\ProductDir'], - common_tools_var='VS110COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64'], - ), - - # Visual C++ 2012 Express Edition (for Desktop) - VisualStudio('11.0Exp', - vc_version='11.0', - sdk_version='8.0A', - hkeys=[r'Microsoft\VisualStudio\11.0\Setup\VS\ProductDir'], - common_tools_var='VS110COMNTOOLS', - executable_path=r'Common7\IDE\WDExpress.exe', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64'], - ), - - # Visual Studio 2010 - VisualStudio('10.0', - sdk_version='7.0A', - hkeys=[r'Microsoft\VisualStudio\10.0\Setup\VS\ProductDir'], - common_tools_var='VS100COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64'], - ), - - # Visual C++ 2010 Express Edition - VisualStudio('10.0Exp', - vc_version='10.0', - sdk_version='7.0A', - hkeys=[r'Microsoft\VCExpress\10.0\Setup\VS\ProductDir'], - common_tools_var='VS100COMNTOOLS', - executable_path=r'Common7\IDE\VCExpress.exe', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86'], - ), - - # Visual Studio 2008 - VisualStudio('9.0', - sdk_version='6.0A', - hkeys=[r'Microsoft\VisualStudio\9.0\Setup\VS\ProductDir'], - common_tools_var='VS90COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86', 'amd64'], - ), - - # Visual C++ 2008 Express Edition - VisualStudio('9.0Exp', - vc_version='9.0', - sdk_version='6.0A', - hkeys=[r'Microsoft\VCExpress\9.0\Setup\VS\ProductDir'], - common_tools_var='VS90COMNTOOLS', - executable_path=r'Common7\IDE\VCExpress.exe', - batch_file_path=r'Common7\Tools\vsvars32.bat', - supported_arch=['x86'], - ), - - # Visual Studio 2005 - VisualStudio('8.0', - sdk_version='6.0A', - hkeys=[r'Microsoft\VisualStudio\8.0\Setup\VS\ProductDir'], - common_tools_var='VS80COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - default_dirname='Microsoft Visual Studio 8', - supported_arch=['x86', 'amd64'], - ), - - # Visual C++ 2005 Express Edition - VisualStudio('8.0Exp', - vc_version='8.0Exp', - sdk_version='6.0A', - hkeys=[r'Microsoft\VCExpress\8.0\Setup\VS\ProductDir'], - common_tools_var='VS80COMNTOOLS', - executable_path=r'Common7\IDE\VCExpress.exe', - batch_file_path=r'Common7\Tools\vsvars32.bat', - default_dirname='Microsoft Visual Studio 8', - supported_arch=['x86'], - ), - - # Visual Studio .NET 2003 - VisualStudio('7.1', - sdk_version='6.0', - hkeys=[r'Microsoft\VisualStudio\7.1\Setup\VS\ProductDir'], - common_tools_var='VS71COMNTOOLS', - executable_path=r'Common7\IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - default_dirname='Microsoft Visual Studio .NET 2003', - supported_arch=['x86'], - ), - - # Visual Studio .NET - VisualStudio('7.0', - sdk_version='2003R2', - hkeys=[r'Microsoft\VisualStudio\7.0\Setup\VS\ProductDir'], - common_tools_var='VS70COMNTOOLS', - executable_path=r'IDE\devenv.com', - batch_file_path=r'Common7\Tools\vsvars32.bat', - default_dirname='Microsoft Visual Studio .NET', - supported_arch=['x86'], - ), - - # Visual Studio 6.0 - VisualStudio('6.0', - sdk_version='2003R1', - hkeys=[r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual Studio\ProductDir', - 'use_dir'], - common_tools_var='VS60COMNTOOLS', - executable_path=r'Common\MSDev98\Bin\MSDEV.COM', - batch_file_path=r'Common7\Tools\vsvars32.bat', - default_dirname='Microsoft Visual Studio', - supported_arch=['x86'], - ), -] - -SupportedVSMap = {} -for vs in SupportedVSList: - SupportedVSMap[vs.version] = vs - - -# Finding installed versions of Visual Studio isn't cheap, because it -# goes not only to the registry but also to the disk to sanity-check -# that there is, in fact, a Visual Studio directory there and that the -# registry entry isn't just stale. Find this information once, when -# requested, and cache it. - -InstalledVSList = None -InstalledVSMap = None - -def get_installed_visual_studios(): - global InstalledVSList - global InstalledVSMap - if InstalledVSList is None: - InstalledVSList = [] - InstalledVSMap = {} - for vs in SupportedVSList: - debug('trying to find VS %s' % vs.version) - if vs.get_executable(): - debug('found VS %s' % vs.version) - InstalledVSList.append(vs) - InstalledVSMap[vs.version] = vs - return InstalledVSList - -def reset_installed_visual_studios(): - global InstalledVSList - global InstalledVSMap - InstalledVSList = None - InstalledVSMap = None - for vs in SupportedVSList: - vs.reset() - - # Need to clear installed VC's as well as they are used in finding - # installed VS's - SCons.Tool.MSCommon.vc.reset_installed_vcs() - - -# We may be asked to update multiple construction environments with -# SDK information. When doing this, we check on-disk for whether -# the SDK has 'mfc' and 'atl' subdirectories. Since going to disk -# is expensive, cache results by directory. - -#SDKEnvironmentUpdates = {} -# -#def set_sdk_by_directory(env, sdk_dir): -# global SDKEnvironmentUpdates -# try: -# env_tuple_list = SDKEnvironmentUpdates[sdk_dir] -# except KeyError: -# env_tuple_list = [] -# SDKEnvironmentUpdates[sdk_dir] = env_tuple_list -# -# include_path = os.path.join(sdk_dir, 'include') -# mfc_path = os.path.join(include_path, 'mfc') -# atl_path = os.path.join(include_path, 'atl') -# -# if os.path.exists(mfc_path): -# env_tuple_list.append(('INCLUDE', mfc_path)) -# if os.path.exists(atl_path): -# env_tuple_list.append(('INCLUDE', atl_path)) -# env_tuple_list.append(('INCLUDE', include_path)) -# -# env_tuple_list.append(('LIB', os.path.join(sdk_dir, 'lib'))) -# env_tuple_list.append(('LIBPATH', os.path.join(sdk_dir, 'lib'))) -# env_tuple_list.append(('PATH', os.path.join(sdk_dir, 'bin'))) -# -# for variable, directory in env_tuple_list: -# env.PrependENVPath(variable, directory) - -def msvs_exists(): - return (len(get_installed_visual_studios()) > 0) - -def get_vs_by_version(msvs): - global InstalledVSMap - global SupportedVSMap - - debug('get_vs_by_version()') - if msvs not in SupportedVSMap: - msg = "Visual Studio version %s is not supported" % repr(msvs) - raise SCons.Errors.UserError(msg) - get_installed_visual_studios() - vs = InstalledVSMap.get(msvs) - debug('InstalledVSMap:%s'%InstalledVSMap) - debug('get_vs_by_version: found vs:%s'%vs) - # Some check like this would let us provide a useful error message - # if they try to set a Visual Studio version that's not installed. - # However, we also want to be able to run tests (like the unit - # tests) on systems that don't, or won't ever, have it installed. - # It might be worth resurrecting this, with some configurable - # setting that the tests can use to bypass the check. - #if not vs: - # msg = "Visual Studio version %s is not installed" % repr(msvs) - # raise SCons.Errors.UserError, msg - return vs - -def get_default_version(env): - """Returns the default version string to use for MSVS. - - If no version was requested by the user through the MSVS environment - variable, query all the available visual studios through - get_installed_visual_studios, and take the highest one. - - Return - ------ - version: str - the default version. - """ - if 'MSVS' not in env or not SCons.Util.is_Dict(env['MSVS']): - # get all versions, and remember them for speed later - versions = [vs.version for vs in get_installed_visual_studios()] - env['MSVS'] = {'VERSIONS' : versions} - else: - versions = env['MSVS'].get('VERSIONS', []) - - if 'MSVS_VERSION' not in env: - if versions: - env['MSVS_VERSION'] = versions[0] #use highest version by default - else: - debug('get_default_version: WARNING: no installed versions found, ' - 'using first in SupportedVSList (%s)'%SupportedVSList[0].version) - env['MSVS_VERSION'] = SupportedVSList[0].version - - env['MSVS']['VERSION'] = env['MSVS_VERSION'] - - return env['MSVS_VERSION'] - -def get_default_arch(env): - """Return the default arch to use for MSVS - - if no version was requested by the user through the MSVS_ARCH environment - variable, select x86 - - Return - ------ - arch: str - """ - arch = env.get('MSVS_ARCH', 'x86') - - msvs = InstalledVSMap.get(env['MSVS_VERSION']) - - if not msvs: - arch = 'x86' - elif arch not in msvs.get_supported_arch(): - fmt = "Visual Studio version %s does not support architecture %s" - raise SCons.Errors.UserError(fmt % (env['MSVS_VERSION'], arch)) - - return arch - -def merge_default_version(env): - version = get_default_version(env) - arch = get_default_arch(env) - -def msvs_setup_env(env): - batfilename = msvs.get_batch_file() - msvs = get_vs_by_version(version) - if msvs is None: - return - - # XXX: I think this is broken. This will silently set a bogus tool instead - # of failing, but there is no other way with the current scons tool - # framework - if batfilename is not None: - - vars = ('LIB', 'LIBPATH', 'PATH', 'INCLUDE') - - msvs_list = get_installed_visual_studios() - vscommonvarnames = [vs.common_tools_var for vs in msvs_list] - save_ENV = env['ENV'] - nenv = normalize_env(env['ENV'], - ['COMSPEC'] + vscommonvarnames, - force=True) - try: - output = get_output(batfilename, arch, env=nenv) - finally: - env['ENV'] = save_ENV - vars = parse_output(output, vars) - - for k, v in vars.items(): - env.PrependENVPath(k, v, delete_existing=1) - -def query_versions(): - """Query the system to get available versions of VS. A version is - considered when a batfile is found.""" - msvs_list = get_installed_visual_studios() - versions = [msvs.version for msvs in msvs_list] - return versions - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/PharLapCommon.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/PharLapCommon.py deleted file mode 100644 index 09751d1763f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/PharLapCommon.py +++ /dev/null @@ -1,116 +0,0 @@ -"""SCons.Tool.PharLapCommon - -This module contains common code used by all Tools for the -Phar Lap ETS tool chain. Right now, this is linkloc and -386asm. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/PharLapCommon.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path -import SCons.Errors -import SCons.Util -import re - -def getPharLapPath(): - """Reads the registry to find the installed path of the Phar Lap ETS - development kit. - - Raises UserError if no installed version of Phar Lap can - be found.""" - - if not SCons.Util.can_read_reg: - raise SCons.Errors.InternalError("No Windows registry module was found") - try: - k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, - 'SOFTWARE\\Pharlap\\ETS') - val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir') - - # The following is a hack...there is (not surprisingly) - # an odd issue in the Phar Lap plug in that inserts - # a bunch of junk data after the phar lap path in the - # registry. We must trim it. - idx=val.find('\0') - if idx >= 0: - val = val[:idx] - - return os.path.normpath(val) - except SCons.Util.RegError: - raise SCons.Errors.UserError("Cannot find Phar Lap ETS path in the registry. Is it installed properly?") - -REGEX_ETS_VER = re.compile(r'#define\s+ETS_VER\s+([0-9]+)') - -def getPharLapVersion(): - """Returns the version of the installed ETS Tool Suite as a - decimal number. This version comes from the ETS_VER #define in - the embkern.h header. For example, '#define ETS_VER 1010' (which - is what Phar Lap 10.1 defines) would cause this method to return - 1010. Phar Lap 9.1 does not have such a #define, but this method - will return 910 as a default. - - Raises UserError if no installed version of Phar Lap can - be found.""" - - include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h")) - if not os.path.exists(include_path): - raise SCons.Errors.UserError("Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?") - with open(include_path, 'r') as f: - mo = REGEX_ETS_VER.search(f.read()) - if mo: - return int(mo.group(1)) - # Default return for Phar Lap 9.1 - return 910 - -def addPharLapPaths(env): - """This function adds the path to the Phar Lap binaries, includes, - and libraries, if they are not already there.""" - ph_path = getPharLapPath() - - try: - env_dict = env['ENV'] - except KeyError: - env_dict = {} - env['ENV'] = env_dict - SCons.Util.AddPathIfNotExists(env_dict, 'PATH', - os.path.join(ph_path, 'bin')) - SCons.Util.AddPathIfNotExists(env_dict, 'INCLUDE', - os.path.join(ph_path, 'include')) - SCons.Util.AddPathIfNotExists(env_dict, 'LIB', - os.path.join(ph_path, 'lib')) - SCons.Util.AddPathIfNotExists(env_dict, 'LIB', - os.path.join(ph_path, os.path.normpath('lib/vclib'))) - - env['PHARLAP_PATH'] = getPharLapPath() - env['PHARLAP_VERSION'] = str(getPharLapVersion()) - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/__init__.py deleted file mode 100644 index a75693f5dde..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/__init__.py +++ /dev/null @@ -1,1359 +0,0 @@ -"""SCons.Tool - -SCons tool selection. - -This looks for modules that define a callable object that can modify -a construction environment as appropriate for a given tool (or tool -chain). - -Note that because this subsystem just *selects* a callable that can -modify a construction environment, it's possible for people to define -their own "tool specification" in an arbitrary callable function. No -one needs to use or tie in to this subsystem in order to roll their own -tool definition. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import sys -import re -import os -import shutil - -import SCons.Builder -import SCons.Errors -import SCons.Node.FS -import SCons.Scanner -import SCons.Scanner.C -import SCons.Scanner.D -import SCons.Scanner.LaTeX -import SCons.Scanner.Prog -import SCons.Scanner.SWIG -try: - # Python 3 - from collections.abc import Callable -except ImportError: - # Python 2.7 - from collections import Callable - -DefaultToolpath = [] - -CScanner = SCons.Scanner.C.CScanner() -DScanner = SCons.Scanner.D.DScanner() -LaTeXScanner = SCons.Scanner.LaTeX.LaTeXScanner() -PDFLaTeXScanner = SCons.Scanner.LaTeX.PDFLaTeXScanner() -ProgramScanner = SCons.Scanner.Prog.ProgramScanner() -SourceFileScanner = SCons.Scanner.Base({}, name='SourceFileScanner') -SWIGScanner = SCons.Scanner.SWIG.SWIGScanner() - -CSuffixes = [".c", ".C", ".cxx", ".cpp", ".c++", ".cc", - ".h", ".H", ".hxx", ".hpp", ".hh", - ".F", ".fpp", ".FPP", - ".m", ".mm", - ".S", ".spp", ".SPP", ".sx"] - -DSuffixes = ['.d'] - -IDLSuffixes = [".idl", ".IDL"] - -LaTeXSuffixes = [".tex", ".ltx", ".latex"] - -SWIGSuffixes = ['.i'] - -for suffix in CSuffixes: - SourceFileScanner.add_scanner(suffix, CScanner) - -for suffix in DSuffixes: - SourceFileScanner.add_scanner(suffix, DScanner) - -for suffix in SWIGSuffixes: - SourceFileScanner.add_scanner(suffix, SWIGScanner) - -# FIXME: what should be done here? Two scanners scan the same extensions, -# but look for different files, e.g., "picture.eps" vs. "picture.pdf". -# The builders for DVI and PDF explicitly reference their scanners -# I think that means this is not needed??? -for suffix in LaTeXSuffixes: - SourceFileScanner.add_scanner(suffix, LaTeXScanner) - SourceFileScanner.add_scanner(suffix, PDFLaTeXScanner) - -# Tool aliases are needed for those tools whose module names also -# occur in the python standard library. This causes module shadowing and -# can break using python library functions under python3 -TOOL_ALIASES = { - 'gettext': 'gettext_tool', - 'clang++': 'clangxx', -} - - -class Tool(object): - def __init__(self, name, toolpath=None, **kw): - if toolpath is None: - toolpath = [] - - # Rename if there's a TOOL_ALIAS for this tool - self.name = TOOL_ALIASES.get(name, name) - self.toolpath = toolpath + DefaultToolpath - # remember these so we can merge them into the call - self.init_kw = kw - - module = self._tool_module() - self.generate = module.generate - self.exists = module.exists - if hasattr(module, 'options'): - self.options = module.options - - def _load_dotted_module_py2(self, short_name, full_name, searchpaths=None): - import imp - - splitname = short_name.split('.') - index = 0 - srchpths = searchpaths - for item in splitname: - file, path, desc = imp.find_module(item, srchpths) - mod = imp.load_module(full_name, file, path, desc) - srchpths = [path] - return mod, file - - def _tool_module(self): - oldpythonpath = sys.path - sys.path = self.toolpath + sys.path - # sys.stderr.write("Tool:%s\nPATH:%s\n"%(self.name,sys.path)) - - if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and sys.version_info[1] in (0, 1, 2, 3, 4)): - # Py 2 code - try: - try: - file = None - try: - mod, file = self._load_dotted_module_py2(self.name, self.name, self.toolpath) - return mod - finally: - if file: - file.close() - except ImportError as e: - splitname = self.name.split('.') - if str(e) != "No module named %s" % splitname[0]: - raise SCons.Errors.SConsEnvironmentError(e) - try: - import zipimport - except ImportError: - pass - else: - for aPath in self.toolpath: - try: - importer = zipimport.zipimporter(aPath) - return importer.load_module(self.name) - except ImportError as e: - pass - finally: - sys.path = oldpythonpath - elif sys.version_info[1] > 4: - # From: http://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path/67692#67692 - # import importlib.util - # spec = importlib.util.spec_from_file_location("module.name", "/path/to/file.py") - # foo = importlib.util.module_from_spec(spec) - # spec.loader.exec_module(foo) - # foo.MyClass() - # Py 3 code - - # import pdb; pdb.set_trace() - import importlib.util - - # sys.stderr.write("toolpath:%s\n" % self.toolpath) - # sys.stderr.write("SCONS.TOOL path:%s\n" % sys.modules['SCons.Tool'].__path__) - debug = False - spec = None - found_name = self.name - add_to_scons_tools_namespace = False - for path in self.toolpath: - sepname = self.name.replace('.', os.path.sep) - file_path = os.path.join(path, "%s.py" % sepname) - file_package = os.path.join(path, sepname) - - if debug: sys.stderr.write("Trying:%s %s\n" % (file_path, file_package)) - - if os.path.isfile(file_path): - spec = importlib.util.spec_from_file_location(self.name, file_path) - if debug: print("file_Path:%s FOUND" % file_path) - break - elif os.path.isdir(file_package): - file_package = os.path.join(file_package, '__init__.py') - spec = importlib.util.spec_from_file_location(self.name, file_package) - if debug: print("PACKAGE:%s Found" % file_package) - break - - else: - continue - - if spec is None: - if debug: sys.stderr.write("NO SPEC :%s\n" % self.name) - spec = importlib.util.find_spec("." + self.name, package='SCons.Tool') - if spec: - found_name = 'SCons.Tool.' + self.name - add_to_scons_tools_namespace = True - if debug: sys.stderr.write("Spec Found? .%s :%s\n" % (self.name, spec)) - - if spec is None: - error_string = "No module named %s" % self.name - raise SCons.Errors.SConsEnvironmentError(error_string) - - module = importlib.util.module_from_spec(spec) - if module is None: - if debug: print("MODULE IS NONE:%s" % self.name) - error_string = "No module named %s" % self.name - raise SCons.Errors.SConsEnvironmentError(error_string) - - # Don't reload a tool we already loaded. - sys_modules_value = sys.modules.get(found_name, False) - - found_module = None - if sys_modules_value and sys_modules_value.__file__ == spec.origin: - found_module = sys.modules[found_name] - else: - # Not sure what to do in the case that there already - # exists sys.modules[self.name] but the source file is - # different.. ? - module = spec.loader.load_module(spec.name) - - sys.modules[found_name] = module - if add_to_scons_tools_namespace: - # If we found it in SCons.Tool, then add it to the module - setattr(SCons.Tool, self.name, module) - - found_module = module - - if found_module is not None: - sys.path = oldpythonpath - return found_module - - sys.path = oldpythonpath - - full_name = 'SCons.Tool.' + self.name - try: - return sys.modules[full_name] - except KeyError: - try: - smpath = sys.modules['SCons.Tool'].__path__ - try: - module, file = self._load_dotted_module_py2(self.name, full_name, smpath) - setattr(SCons.Tool, self.name, module) - if file: - file.close() - return module - except ImportError as e: - if str(e) != "No module named %s" % self.name: - raise SCons.Errors.SConsEnvironmentError(e) - try: - import zipimport - importer = zipimport.zipimporter(sys.modules['SCons.Tool'].__path__[0]) - module = importer.load_module(full_name) - setattr(SCons.Tool, self.name, module) - return module - except ImportError as e: - m = "No tool named '%s': %s" % (self.name, e) - raise SCons.Errors.SConsEnvironmentError(m) - except ImportError as e: - m = "No tool named '%s': %s" % (self.name, e) - raise SCons.Errors.SConsEnvironmentError(m) - - def __call__(self, env, *args, **kw): - if self.init_kw is not None: - # Merge call kws into init kws; - # but don't bash self.init_kw. - if kw is not None: - call_kw = kw - kw = self.init_kw.copy() - kw.update(call_kw) - else: - kw = self.init_kw - env.Append(TOOLS=[self.name]) - if hasattr(self, 'options'): - import SCons.Variables - if 'options' not in env: - from SCons.Script import ARGUMENTS - env['options'] = SCons.Variables.Variables(args=ARGUMENTS) - opts = env['options'] - - self.options(opts) - opts.Update(env) - - self.generate(env, *args, **kw) - - def __str__(self): - return self.name - - -########################################################################## -# Create common executable program / library / object builders - -def createProgBuilder(env): - """This is a utility function that creates the Program - Builder in an Environment if it is not there already. - - If it is already there, we return the existing one. - """ - - try: - program = env['BUILDERS']['Program'] - except KeyError: - import SCons.Defaults - program = SCons.Builder.Builder(action=SCons.Defaults.LinkAction, - emitter='$PROGEMITTER', - prefix='$PROGPREFIX', - suffix='$PROGSUFFIX', - src_suffix='$OBJSUFFIX', - src_builder='Object', - target_scanner=ProgramScanner) - env['BUILDERS']['Program'] = program - - return program - - -def createStaticLibBuilder(env): - """This is a utility function that creates the StaticLibrary - Builder in an Environment if it is not there already. - - If it is already there, we return the existing one. - """ - - try: - static_lib = env['BUILDERS']['StaticLibrary'] - except KeyError: - action_list = [SCons.Action.Action("$ARCOM", "$ARCOMSTR")] - if env.get('RANLIB', False) or env.Detect('ranlib'): - ranlib_action = SCons.Action.Action("$RANLIBCOM", "$RANLIBCOMSTR") - action_list.append(ranlib_action) - - static_lib = SCons.Builder.Builder(action=action_list, - emitter='$LIBEMITTER', - prefix='$LIBPREFIX', - suffix='$LIBSUFFIX', - src_suffix='$OBJSUFFIX', - src_builder='StaticObject') - env['BUILDERS']['StaticLibrary'] = static_lib - env['BUILDERS']['Library'] = static_lib - - return static_lib - - -def _call_linker_cb(env, callback, args, result=None): - """Returns the result of env['LINKCALLBACKS'][callback](*args) - if env['LINKCALLBACKS'] is a dictionary and env['LINKCALLBACKS'][callback] - is callable. If these conditions are not met, return the value provided as - the *result* argument. This function is mainly used for generating library - info such as versioned suffixes, symlink maps, sonames etc. by delegating - the core job to callbacks configured by current linker tool""" - - Verbose = False - - if Verbose: - print('_call_linker_cb: args=%r' % args) - print('_call_linker_cb: callback=%r' % callback) - - try: - cbfun = env['LINKCALLBACKS'][callback] - except (KeyError, TypeError): - if Verbose: - print('_call_linker_cb: env["LINKCALLBACKS"][%r] not found or can not be used' % callback) - pass - else: - if Verbose: - print('_call_linker_cb: env["LINKCALLBACKS"][%r] found' % callback) - print('_call_linker_cb: env["LINKCALLBACKS"][%r]=%r' % (callback, cbfun)) - if isinstance(cbfun, Callable): - if Verbose: - print('_call_linker_cb: env["LINKCALLBACKS"][%r] is callable' % callback) - result = cbfun(env, *args) - return result - - -def _call_env_subst(env, string, *args, **kw): - kw2 = {} - for k in ('raw', 'target', 'source', 'conv', 'executor'): - try: - kw2[k] = kw[k] - except KeyError: - pass - return env.subst(string, *args, **kw2) - - -class _ShLibInfoSupport(object): - @property - def libtype(self): - return 'ShLib' - - def get_lib_prefix(self, env, *args, **kw): - return _call_env_subst(env, '$SHLIBPREFIX', *args, **kw) - - def get_lib_suffix(self, env, *args, **kw): - return _call_env_subst(env, '$SHLIBSUFFIX', *args, **kw) - - def get_lib_version(self, env, *args, **kw): - return _call_env_subst(env, '$SHLIBVERSION', *args, **kw) - - def get_lib_noversionsymlinks(self, env, *args, **kw): - return _call_env_subst(env, '$SHLIBNOVERSIONSYMLINKS', *args, **kw) - - -class _LdModInfoSupport(object): - @property - def libtype(self): - return 'LdMod' - - def get_lib_prefix(self, env, *args, **kw): - return _call_env_subst(env, '$LDMODULEPREFIX', *args, **kw) - - def get_lib_suffix(self, env, *args, **kw): - return _call_env_subst(env, '$LDMODULESUFFIX', *args, **kw) - - def get_lib_version(self, env, *args, **kw): - return _call_env_subst(env, '$LDMODULEVERSION', *args, **kw) - - def get_lib_noversionsymlinks(self, env, *args, **kw): - return _call_env_subst(env, '$LDMODULENOVERSIONSYMLINKS', *args, **kw) - - -class _ImpLibInfoSupport(object): - @property - def libtype(self): - return 'ImpLib' - - def get_lib_prefix(self, env, *args, **kw): - return _call_env_subst(env, '$IMPLIBPREFIX', *args, **kw) - - def get_lib_suffix(self, env, *args, **kw): - return _call_env_subst(env, '$IMPLIBSUFFIX', *args, **kw) - - def get_lib_version(self, env, *args, **kw): - version = _call_env_subst(env, '$IMPLIBVERSION', *args, **kw) - if not version: - try: - lt = kw['implib_libtype'] - except KeyError: - pass - else: - if lt == 'ShLib': - version = _call_env_subst(env, '$SHLIBVERSION', *args, **kw) - elif lt == 'LdMod': - version = _call_env_subst(env, '$LDMODULEVERSION', *args, **kw) - return version - - def get_lib_noversionsymlinks(self, env, *args, **kw): - disable = None - try: - env['IMPLIBNOVERSIONSYMLINKS'] - except KeyError: - try: - lt = kw['implib_libtype'] - except KeyError: - pass - else: - if lt == 'ShLib': - disable = _call_env_subst(env, '$SHLIBNOVERSIONSYMLINKS', *args, **kw) - elif lt == 'LdMod': - disable = _call_env_subst(env, '$LDMODULENOVERSIONSYMLINKS', *args, **kw) - else: - disable = _call_env_subst(env, '$IMPLIBNOVERSIONSYMLINKS', *args, **kw) - return disable - - -class _LibInfoGeneratorBase(object): - """Generator base class for library-related info such as suffixes for - versioned libraries, symlink maps, sonames etc. It handles commonities - of SharedLibrary and LoadableModule - """ - _support_classes = {'ShLib': _ShLibInfoSupport, - 'LdMod': _LdModInfoSupport, - 'ImpLib': _ImpLibInfoSupport} - - def __init__(self, libtype, infoname): - self.libtype = libtype - self.infoname = infoname - - @property - def libtype(self): - return self._support.libtype - - @libtype.setter - def libtype(self, libtype): - try: - support_class = self._support_classes[libtype] - except KeyError: - raise ValueError('unsupported libtype %r' % libtype) - self._support = support_class() - - def get_lib_prefix(self, env, *args, **kw): - return self._support.get_lib_prefix(env, *args, **kw) - - def get_lib_suffix(self, env, *args, **kw): - return self._support.get_lib_suffix(env, *args, **kw) - - def get_lib_version(self, env, *args, **kw): - return self._support.get_lib_version(env, *args, **kw) - - def get_lib_noversionsymlinks(self, env, *args, **kw): - return self._support.get_lib_noversionsymlinks(env, *args, **kw) - - # Returns name of generator linker callback that shall be used to generate - # our info for a versioned library. For example, if our libtype is 'ShLib' - # and infoname is 'Prefix', it would return 'VersionedShLibPrefix'. - def get_versioned_lib_info_generator(self, **kw): - try: - libtype = kw['generator_libtype'] - except KeyError: - libtype = self.libtype - return 'Versioned%s%s' % (libtype, self.infoname) - - def generate_versioned_lib_info(self, env, args, result=None, **kw): - callback = self.get_versioned_lib_info_generator(**kw) - return _call_linker_cb(env, callback, args, result) - - -class _LibPrefixGenerator(_LibInfoGeneratorBase): - """Library prefix generator, used as target_prefix in SharedLibrary and - LoadableModule builders""" - - def __init__(self, libtype): - super(_LibPrefixGenerator, self).__init__(libtype, 'Prefix') - - def __call__(self, env, sources=None, **kw): - Verbose = False - - if sources and 'source' not in kw: - kw2 = kw.copy() - kw2['source'] = sources - else: - kw2 = kw - - prefix = self.get_lib_prefix(env, **kw2) - if Verbose: - print("_LibPrefixGenerator: input prefix=%r" % prefix) - - version = self.get_lib_version(env, **kw2) - if Verbose: - print("_LibPrefixGenerator: version=%r" % version) - - if version: - prefix = self.generate_versioned_lib_info(env, [prefix, version], prefix, **kw2) - - if Verbose: - print("_LibPrefixGenerator: return prefix=%r" % prefix) - return prefix - - -ShLibPrefixGenerator = _LibPrefixGenerator('ShLib') -LdModPrefixGenerator = _LibPrefixGenerator('LdMod') -ImpLibPrefixGenerator = _LibPrefixGenerator('ImpLib') - - -class _LibSuffixGenerator(_LibInfoGeneratorBase): - """Library suffix generator, used as target_suffix in SharedLibrary and - LoadableModule builders""" - - def __init__(self, libtype): - super(_LibSuffixGenerator, self).__init__(libtype, 'Suffix') - - def __call__(self, env, sources=None, **kw): - Verbose = False - - if sources and 'source' not in kw: - kw2 = kw.copy() - kw2['source'] = sources - else: - kw2 = kw - - suffix = self.get_lib_suffix(env, **kw2) - if Verbose: - print("_LibSuffixGenerator: input suffix=%r" % suffix) - - version = self.get_lib_version(env, **kw2) - if Verbose: - print("_LibSuffixGenerator: version=%r" % version) - - if version: - suffix = self.generate_versioned_lib_info(env, [suffix, version], suffix, **kw2) - - if Verbose: - print("_LibSuffixGenerator: return suffix=%r" % suffix) - return suffix - - -ShLibSuffixGenerator = _LibSuffixGenerator('ShLib') -LdModSuffixGenerator = _LibSuffixGenerator('LdMod') -ImpLibSuffixGenerator = _LibSuffixGenerator('ImpLib') - - -class _LibSymlinkGenerator(_LibInfoGeneratorBase): - """Library symlink map generator. It generates a list of symlinks that - should be created by SharedLibrary or LoadableModule builders""" - - def __init__(self, libtype): - super(_LibSymlinkGenerator, self).__init__(libtype, 'Symlinks') - - def __call__(self, env, libnode, **kw): - Verbose = False - - if libnode and 'target' not in kw: - kw2 = kw.copy() - kw2['target'] = libnode - else: - kw2 = kw - - if Verbose: - print("_LibSymLinkGenerator: libnode=%r" % libnode.get_path()) - - symlinks = None - - version = self.get_lib_version(env, **kw2) - disable = self.get_lib_noversionsymlinks(env, **kw2) - if Verbose: - print('_LibSymlinkGenerator: version=%r' % version) - print('_LibSymlinkGenerator: disable=%r' % disable) - - if version and not disable: - prefix = self.get_lib_prefix(env, **kw2) - suffix = self.get_lib_suffix(env, **kw2) - symlinks = self.generate_versioned_lib_info(env, [libnode, version, prefix, suffix], **kw2) - - if Verbose: - print('_LibSymlinkGenerator: return symlinks=%r' % StringizeLibSymlinks(symlinks)) - return symlinks - - -ShLibSymlinkGenerator = _LibSymlinkGenerator('ShLib') -LdModSymlinkGenerator = _LibSymlinkGenerator('LdMod') -ImpLibSymlinkGenerator = _LibSymlinkGenerator('ImpLib') - - -class _LibNameGenerator(_LibInfoGeneratorBase): - """Generates "unmangled" library name from a library file node. - - Generally, it's thought to revert modifications done by prefix/suffix - generators (_LibPrefixGenerator/_LibSuffixGenerator) used by a library - builder. For example, on gnulink the suffix generator used by SharedLibrary - builder appends $SHLIBVERSION to $SHLIBSUFFIX producing node name which - ends with "$SHLIBSUFFIX.$SHLIBVERSION". Correspondingly, the implementation - of _LibNameGenerator replaces "$SHLIBSUFFIX.$SHLIBVERSION" with - "$SHLIBSUFFIX" in the node's basename. So that, if $SHLIBSUFFIX is ".so", - $SHLIBVERSION is "0.1.2" and the node path is "/foo/bar/libfoo.so.0.1.2", - the _LibNameGenerator shall return "libfoo.so". Other link tools may - implement it's own way of library name unmangling. - """ - - def __init__(self, libtype): - super(_LibNameGenerator, self).__init__(libtype, 'Name') - - def __call__(self, env, libnode, **kw): - """Returns "demangled" library name""" - Verbose = False - - if libnode and 'target' not in kw: - kw2 = kw.copy() - kw2['target'] = libnode - else: - kw2 = kw - - if Verbose: - print("_LibNameGenerator: libnode=%r" % libnode.get_path()) - - version = self.get_lib_version(env, **kw2) - if Verbose: - print('_LibNameGenerator: version=%r' % version) - - name = None - if version: - prefix = self.get_lib_prefix(env, **kw2) - suffix = self.get_lib_suffix(env, **kw2) - name = self.generate_versioned_lib_info(env, [libnode, version, prefix, suffix], **kw2) - - if not name: - name = os.path.basename(libnode.get_path()) - - if Verbose: - print('_LibNameGenerator: return name=%r' % name) - - return name - - -ShLibNameGenerator = _LibNameGenerator('ShLib') -LdModNameGenerator = _LibNameGenerator('LdMod') -ImpLibNameGenerator = _LibNameGenerator('ImpLib') - - -class _LibSonameGenerator(_LibInfoGeneratorBase): - """Library soname generator. Returns library soname (e.g. libfoo.so.0) for - a given node (e.g. /foo/bar/libfoo.so.0.1.2)""" - - def __init__(self, libtype): - super(_LibSonameGenerator, self).__init__(libtype, 'Soname') - - def __call__(self, env, libnode, **kw): - """Returns a SONAME based on a shared library's node path""" - Verbose = False - - if libnode and 'target' not in kw: - kw2 = kw.copy() - kw2['target'] = libnode - else: - kw2 = kw - - if Verbose: - print("_LibSonameGenerator: libnode=%r" % libnode.get_path()) - - soname = _call_env_subst(env, '$SONAME', **kw2) - if not soname: - version = self.get_lib_version(env, **kw2) - if Verbose: - print("_LibSonameGenerator: version=%r" % version) - if version: - prefix = self.get_lib_prefix(env, **kw2) - suffix = self.get_lib_suffix(env, **kw2) - soname = self.generate_versioned_lib_info(env, [libnode, version, prefix, suffix], **kw2) - - if not soname: - # fallback to library name (as returned by appropriate _LibNameGenerator) - soname = _LibNameGenerator(self.libtype)(env, libnode) - if Verbose: - print("_LibSonameGenerator: FALLBACK: soname=%r" % soname) - - if Verbose: - print("_LibSonameGenerator: return soname=%r" % soname) - - return soname - - -ShLibSonameGenerator = _LibSonameGenerator('ShLib') -LdModSonameGenerator = _LibSonameGenerator('LdMod') - - -def StringizeLibSymlinks(symlinks): - """Converts list with pairs of nodes to list with pairs of node paths - (strings). Used mainly for debugging.""" - if SCons.Util.is_List(symlinks): - try: - return [(k.get_path(), v.get_path()) for k, v in symlinks] - except (TypeError, ValueError): - return symlinks - else: - return symlinks - - -def EmitLibSymlinks(env, symlinks, libnode, **kw): - """Used by emitters to handle (shared/versioned) library symlinks""" - Verbose = False - - # nodes involved in process... all symlinks + library - nodes = list(set([x for x, y in symlinks] + [libnode])) - - clean_targets = kw.get('clean_targets', []) - if not SCons.Util.is_List(clean_targets): - clean_targets = [clean_targets] - - for link, linktgt in symlinks: - env.SideEffect(link, linktgt) - if (Verbose): - print("EmitLibSymlinks: SideEffect(%r,%r)" % (link.get_path(), linktgt.get_path())) - clean_list = [x for x in nodes if x != linktgt] - env.Clean(list(set([linktgt] + clean_targets)), clean_list) - if (Verbose): - print("EmitLibSymlinks: Clean(%r,%r)" % (linktgt.get_path(), [x.get_path() for x in clean_list])) - - -def CreateLibSymlinks(env, symlinks): - """Physically creates symlinks. The symlinks argument must be a list in - form [ (link, linktarget), ... ], where link and linktarget are SCons - nodes. - """ - - Verbose = False - for link, linktgt in symlinks: - linktgt = link.get_dir().rel_path(linktgt) - link = link.get_path() - if (Verbose): - print("CreateLibSymlinks: preparing to add symlink %r -> %r" % (link, linktgt)) - # Delete the (previously created) symlink if exists. Let only symlinks - # to be deleted to prevent accidental deletion of source files... - if env.fs.islink(link): - env.fs.unlink(link) - if (Verbose): - print("CreateLibSymlinks: removed old symlink %r" % link) - # If a file or directory exists with the same name as link, an OSError - # will be thrown, which should be enough, I think. - env.fs.symlink(linktgt, link) - if (Verbose): - print("CreateLibSymlinks: add symlink %r -> %r" % (link, linktgt)) - return 0 - - -def LibSymlinksActionFunction(target, source, env): - for tgt in target: - symlinks = getattr(getattr(tgt, 'attributes', None), 'shliblinks', None) - if symlinks: - CreateLibSymlinks(env, symlinks) - return 0 - - -def LibSymlinksStrFun(target, source, env, *args): - cmd = None - for tgt in target: - symlinks = getattr(getattr(tgt, 'attributes', None), 'shliblinks', None) - if symlinks: - if cmd is None: cmd = "" - if cmd: cmd += "\n" - cmd += "Create symlinks for: %r" % tgt.get_path() - try: - linkstr = ', '.join(["%r->%r" % (k, v) for k, v in StringizeLibSymlinks(symlinks)]) - except (KeyError, ValueError): - pass - else: - cmd += ": %s" % linkstr - return cmd - - -LibSymlinksAction = SCons.Action.Action(LibSymlinksActionFunction, LibSymlinksStrFun) - - -def createSharedLibBuilder(env): - """This is a utility function that creates the SharedLibrary - Builder in an Environment if it is not there already. - - If it is already there, we return the existing one. - """ - - try: - shared_lib = env['BUILDERS']['SharedLibrary'] - except KeyError: - import SCons.Defaults - action_list = [SCons.Defaults.SharedCheck, - SCons.Defaults.ShLinkAction, - LibSymlinksAction] - shared_lib = SCons.Builder.Builder(action=action_list, - emitter="$SHLIBEMITTER", - prefix=ShLibPrefixGenerator, - suffix=ShLibSuffixGenerator, - target_scanner=ProgramScanner, - src_suffix='$SHOBJSUFFIX', - src_builder='SharedObject') - env['BUILDERS']['SharedLibrary'] = shared_lib - - return shared_lib - - -def createLoadableModuleBuilder(env): - """This is a utility function that creates the LoadableModule - Builder in an Environment if it is not there already. - - If it is already there, we return the existing one. - """ - - try: - ld_module = env['BUILDERS']['LoadableModule'] - except KeyError: - import SCons.Defaults - action_list = [SCons.Defaults.SharedCheck, - SCons.Defaults.LdModuleLinkAction, - LibSymlinksAction] - ld_module = SCons.Builder.Builder(action=action_list, - emitter="$LDMODULEEMITTER", - prefix=LdModPrefixGenerator, - suffix=LdModSuffixGenerator, - target_scanner=ProgramScanner, - src_suffix='$SHOBJSUFFIX', - src_builder='SharedObject') - env['BUILDERS']['LoadableModule'] = ld_module - - return ld_module - - -def createObjBuilders(env): - """This is a utility function that creates the StaticObject - and SharedObject Builders in an Environment if they - are not there already. - - If they are there already, we return the existing ones. - - This is a separate function because soooo many Tools - use this functionality. - - The return is a 2-tuple of (StaticObject, SharedObject) - """ - - try: - static_obj = env['BUILDERS']['StaticObject'] - except KeyError: - static_obj = SCons.Builder.Builder(action={}, - emitter={}, - prefix='$OBJPREFIX', - suffix='$OBJSUFFIX', - src_builder=['CFile', 'CXXFile'], - source_scanner=SourceFileScanner, - single_source=1) - env['BUILDERS']['StaticObject'] = static_obj - env['BUILDERS']['Object'] = static_obj - - try: - shared_obj = env['BUILDERS']['SharedObject'] - except KeyError: - shared_obj = SCons.Builder.Builder(action={}, - emitter={}, - prefix='$SHOBJPREFIX', - suffix='$SHOBJSUFFIX', - src_builder=['CFile', 'CXXFile'], - source_scanner=SourceFileScanner, - single_source=1) - env['BUILDERS']['SharedObject'] = shared_obj - - return (static_obj, shared_obj) - - -def createCFileBuilders(env): - """This is a utility function that creates the CFile/CXXFile - Builders in an Environment if they - are not there already. - - If they are there already, we return the existing ones. - - This is a separate function because soooo many Tools - use this functionality. - - The return is a 2-tuple of (CFile, CXXFile) - """ - - try: - c_file = env['BUILDERS']['CFile'] - except KeyError: - c_file = SCons.Builder.Builder(action={}, - emitter={}, - suffix={None: '$CFILESUFFIX'}) - env['BUILDERS']['CFile'] = c_file - - env.SetDefault(CFILESUFFIX='.c') - - try: - cxx_file = env['BUILDERS']['CXXFile'] - except KeyError: - cxx_file = SCons.Builder.Builder(action={}, - emitter={}, - suffix={None: '$CXXFILESUFFIX'}) - env['BUILDERS']['CXXFile'] = cxx_file - env.SetDefault(CXXFILESUFFIX='.cc') - - return (c_file, cxx_file) - - -########################################################################## -# Create common Java builders - -def CreateJarBuilder(env): - """The Jar builder expects a list of class files - which it can package into a jar file. - - The jar tool provides an interface for passing other types - of java files such as .java, directories or swig interfaces - and will build them to class files in which it can package - into the jar. - """ - try: - java_jar = env['BUILDERS']['JarFile'] - except KeyError: - fs = SCons.Node.FS.get_default_fs() - jar_com = SCons.Action.Action('$JARCOM', '$JARCOMSTR') - java_jar = SCons.Builder.Builder(action=jar_com, - suffix='$JARSUFFIX', - src_suffix='$JAVACLASSSUFFIX', - src_builder='JavaClassFile', - source_factory=fs.Entry) - env['BUILDERS']['JarFile'] = java_jar - return java_jar - - -def CreateJavaHBuilder(env): - try: - java_javah = env['BUILDERS']['JavaH'] - except KeyError: - fs = SCons.Node.FS.get_default_fs() - java_javah_com = SCons.Action.Action('$JAVAHCOM', '$JAVAHCOMSTR') - java_javah = SCons.Builder.Builder(action=java_javah_com, - src_suffix='$JAVACLASSSUFFIX', - target_factory=fs.Entry, - source_factory=fs.File, - src_builder='JavaClassFile') - env['BUILDERS']['JavaH'] = java_javah - return java_javah - - -def CreateJavaClassFileBuilder(env): - try: - java_class_file = env['BUILDERS']['JavaClassFile'] - except KeyError: - fs = SCons.Node.FS.get_default_fs() - javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') - java_class_file = SCons.Builder.Builder(action=javac_com, - emitter={}, - # suffix = '$JAVACLASSSUFFIX', - src_suffix='$JAVASUFFIX', - src_builder=['JavaFile'], - target_factory=fs.Entry, - source_factory=fs.File) - env['BUILDERS']['JavaClassFile'] = java_class_file - return java_class_file - - -def CreateJavaClassDirBuilder(env): - try: - java_class_dir = env['BUILDERS']['JavaClassDir'] - except KeyError: - fs = SCons.Node.FS.get_default_fs() - javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') - java_class_dir = SCons.Builder.Builder(action=javac_com, - emitter={}, - target_factory=fs.Dir, - source_factory=fs.Dir) - env['BUILDERS']['JavaClassDir'] = java_class_dir - return java_class_dir - - -def CreateJavaFileBuilder(env): - try: - java_file = env['BUILDERS']['JavaFile'] - except KeyError: - java_file = SCons.Builder.Builder(action={}, - emitter={}, - suffix={None: '$JAVASUFFIX'}) - env['BUILDERS']['JavaFile'] = java_file - env['JAVASUFFIX'] = '.java' - return java_file - - -class ToolInitializerMethod(object): - """ - This is added to a construction environment in place of a - method(s) normally called for a Builder (env.Object, env.StaticObject, - etc.). When called, it has its associated ToolInitializer - object search the specified list of tools and apply the first - one that exists to the construction environment. It then calls - whatever builder was (presumably) added to the construction - environment in place of this particular instance. - """ - - def __init__(self, name, initializer): - """ - Note: we store the tool name as __name__ so it can be used by - the class that attaches this to a construction environment. - """ - self.__name__ = name - self.initializer = initializer - - def get_builder(self, env): - """ - Returns the appropriate real Builder for this method name - after having the associated ToolInitializer object apply - the appropriate Tool module. - """ - builder = getattr(env, self.__name__) - - self.initializer.apply_tools(env) - - builder = getattr(env, self.__name__) - if builder is self: - # There was no Builder added, which means no valid Tool - # for this name was found (or possibly there's a mismatch - # between the name we were called by and the Builder name - # added by the Tool module). - return None - - self.initializer.remove_methods(env) - - return builder - - def __call__(self, env, *args, **kw): - """ - """ - builder = self.get_builder(env) - if builder is None: - return [], [] - return builder(*args, **kw) - - -class ToolInitializer(object): - """ - A class for delayed initialization of Tools modules. - - Instances of this class associate a list of Tool modules with - a list of Builder method names that will be added by those Tool - modules. As part of instantiating this object for a particular - construction environment, we also add the appropriate - ToolInitializerMethod objects for the various Builder methods - that we want to use to delay Tool searches until necessary. - """ - - def __init__(self, env, tools, names): - if not SCons.Util.is_List(tools): - tools = [tools] - if not SCons.Util.is_List(names): - names = [names] - self.env = env - self.tools = tools - self.names = names - self.methods = {} - for name in names: - method = ToolInitializerMethod(name, self) - self.methods[name] = method - env.AddMethod(method) - - def remove_methods(self, env): - """ - Removes the methods that were added by the tool initialization - so we no longer copy and re-bind them when the construction - environment gets cloned. - """ - for method in list(self.methods.values()): - env.RemoveMethod(method) - - def apply_tools(self, env): - """ - Searches the list of associated Tool modules for one that - exists, and applies that to the construction environment. - """ - for t in self.tools: - tool = SCons.Tool.Tool(t) - if tool.exists(env): - env.Tool(tool) - return - - # If we fall through here, there was no tool module found. - # This is where we can put an informative error message - # about the inability to find the tool. We'll start doing - # this as we cut over more pre-defined Builder+Tools to use - # the ToolInitializer class. - - -def Initializers(env): - ToolInitializer(env, ['install'], ['_InternalInstall', '_InternalInstallAs', '_InternalInstallVersionedLib']) - - def Install(self, *args, **kw): - return self._InternalInstall(*args, **kw) - - def InstallAs(self, *args, **kw): - return self._InternalInstallAs(*args, **kw) - - def InstallVersionedLib(self, *args, **kw): - return self._InternalInstallVersionedLib(*args, **kw) - - env.AddMethod(Install) - env.AddMethod(InstallAs) - env.AddMethod(InstallVersionedLib) - - -def FindTool(tools, env): - for tool in tools: - t = Tool(tool) - if t.exists(env): - return tool - return None - - -def FindAllTools(tools, env): - def ToolExists(tool, env=env): - return Tool(tool).exists(env) - - return list(filter(ToolExists, tools)) - - -def tool_list(platform, env): - other_plat_tools = [] - # XXX this logic about what tool to prefer on which platform - # should be moved into either the platform files or - # the tool files themselves. - # The search orders here are described in the man page. If you - # change these search orders, update the man page as well. - if str(platform) == 'win32': - "prefer Microsoft tools on Windows" - linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32'] - c_compilers = ['msvc', 'mingw', 'gcc', 'intelc', 'icl', 'icc', 'cc', 'bcc32'] - cxx_compilers = ['msvc', 'intelc', 'icc', 'g++', 'cxx', 'bcc32'] - assemblers = ['masm', 'nasm', 'gas', '386asm'] - fortran_compilers = ['gfortran', 'g77', 'ifl', 'cvf', 'f95', 'f90', 'fortran'] - ars = ['mslib', 'ar', 'tlib'] - other_plat_tools = ['msvs', 'midl'] - elif str(platform) == 'os2': - "prefer IBM tools on OS/2" - linkers = ['ilink', 'gnulink', ] # 'mslink'] - c_compilers = ['icc', 'gcc', ] # 'msvc', 'cc'] - cxx_compilers = ['icc', 'g++', ] # 'msvc', 'cxx'] - assemblers = ['nasm', ] # 'masm', 'gas'] - fortran_compilers = ['ifl', 'g77'] - ars = ['ar', ] # 'mslib'] - elif str(platform) == 'irix': - "prefer MIPSPro on IRIX" - linkers = ['sgilink', 'gnulink'] - c_compilers = ['sgicc', 'gcc', 'cc'] - cxx_compilers = ['sgicxx', 'g++', 'cxx'] - assemblers = ['as', 'gas'] - fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] - ars = ['sgiar'] - elif str(platform) == 'sunos': - "prefer Forte tools on SunOS" - linkers = ['sunlink', 'gnulink'] - c_compilers = ['suncc', 'gcc', 'cc'] - cxx_compilers = ['suncxx', 'g++', 'cxx'] - assemblers = ['as', 'gas'] - fortran_compilers = ['sunf95', 'sunf90', 'sunf77', 'f95', 'f90', 'f77', - 'gfortran', 'g77', 'fortran'] - ars = ['sunar'] - elif str(platform) == 'hpux': - "prefer aCC tools on HP-UX" - linkers = ['hplink', 'gnulink'] - c_compilers = ['hpcc', 'gcc', 'cc'] - cxx_compilers = ['hpcxx', 'g++', 'cxx'] - assemblers = ['as', 'gas'] - fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] - ars = ['ar'] - elif str(platform) == 'aix': - "prefer AIX Visual Age tools on AIX" - linkers = ['aixlink', 'gnulink'] - c_compilers = ['aixcc', 'gcc', 'cc'] - cxx_compilers = ['aixcxx', 'g++', 'cxx'] - assemblers = ['as', 'gas'] - fortran_compilers = ['f95', 'f90', 'aixf77', 'g77', 'fortran'] - ars = ['ar'] - elif str(platform) == 'darwin': - "prefer GNU tools on Mac OS X, except for some linkers and IBM tools" - linkers = ['applelink', 'gnulink'] - c_compilers = ['gcc', 'cc'] - cxx_compilers = ['g++', 'cxx'] - assemblers = ['as'] - fortran_compilers = ['gfortran', 'f95', 'f90', 'g77'] - ars = ['ar'] - elif str(platform) == 'cygwin': - "prefer GNU tools on Cygwin, except for a platform-specific linker" - linkers = ['cyglink', 'mslink', 'ilink'] - c_compilers = ['gcc', 'msvc', 'intelc', 'icc', 'cc'] - cxx_compilers = ['g++', 'msvc', 'intelc', 'icc', 'cxx'] - assemblers = ['gas', 'nasm', 'masm'] - fortran_compilers = ['gfortran', 'g77', 'ifort', 'ifl', 'f95', 'f90', 'f77'] - ars = ['ar', 'mslib'] - else: - "prefer GNU tools on all other platforms" - linkers = ['gnulink', 'ilink'] - c_compilers = ['gcc', 'intelc', 'icc', 'cc'] - cxx_compilers = ['g++', 'intelc', 'icc', 'cxx'] - assemblers = ['gas', 'nasm', 'masm'] - fortran_compilers = ['gfortran', 'g77', 'ifort', 'ifl', 'f95', 'f90', 'f77'] - ars = ['ar', ] - - if not str(platform) == 'win32': - other_plat_tools += ['m4', 'rpm'] - - c_compiler = FindTool(c_compilers, env) or c_compilers[0] - - # XXX this logic about what tool provides what should somehow be - # moved into the tool files themselves. - if c_compiler and c_compiler == 'mingw': - # MinGW contains a linker, C compiler, C++ compiler, - # Fortran compiler, archiver and assembler: - cxx_compiler = None - linker = None - assembler = None - fortran_compiler = None - ar = None - else: - # Don't use g++ if the C compiler has built-in C++ support: - if c_compiler in ('msvc', 'intelc', 'icc'): - cxx_compiler = None - else: - cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0] - linker = FindTool(linkers, env) or linkers[0] - assembler = FindTool(assemblers, env) or assemblers[0] - fortran_compiler = FindTool(fortran_compilers, env) or fortran_compilers[0] - ar = FindTool(ars, env) or ars[0] - - d_compilers = ['dmd', 'ldc', 'gdc'] - d_compiler = FindTool(d_compilers, env) or d_compilers[0] - - other_tools = FindAllTools(other_plat_tools + [ - # TODO: merge 'install' into 'filesystem' and - # make 'filesystem' the default - 'filesystem', - 'wix', # 'midl', 'msvs', - # Parser generators - 'lex', 'yacc', - # Foreign function interface - 'rpcgen', 'swig', - # Java - 'jar', 'javac', 'javah', 'rmic', - # TeX - 'dvipdf', 'dvips', 'gs', - 'tex', 'latex', 'pdflatex', 'pdftex', - # Archivers - 'tar', 'zip', - # File builders (text) - 'textfile', - ], env) - - tools = ([linker, c_compiler, cxx_compiler, - fortran_compiler, assembler, ar, d_compiler] - + other_tools) - - return [x for x in tools if x] - - -def find_program_path(env, key_program, default_paths=None): - """ - Find the location of a tool using various means. - - Mainly for windows where tools aren't all installed in /usr/bin, etc. - - :param env: Current Construction Environment. - :param key_program: Tool to locate. - :param default_paths: List of additional paths this tool might be found in. - """ - # First search in the SCons path - path = env.WhereIs(key_program) - if path: - return path - - # Then in the OS path - path = SCons.Util.WhereIs(key_program) - if path: - return path - - # Finally, add the defaults and check again. Do not change - # ['ENV']['PATH'] permananetly, the caller can do that if needed. - if default_paths is None: - return path - save_path = env['ENV']['PATH'] - for p in default_paths: - env.AppendENVPath('PATH', p) - path = env.WhereIs(key_program) - env['ENV']['PATH'] = save_path - return path - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixc++.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixc++.py deleted file mode 100644 index 4a78edaf3fe..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixc++.py +++ /dev/null @@ -1,43 +0,0 @@ -"""SCons.Tool.aixc++ - -Tool-specific initialization for IBM xlC / Visual Age C++ compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/aixc++.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -#forward proxy to the preffered cxx version -from SCons.Tool.aixcxx import * - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixcc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixcc.py deleted file mode 100644 index e7bc1ca9572..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixcc.py +++ /dev/null @@ -1,74 +0,0 @@ -"""SCons.Tool.aixcc - -Tool-specific initialization for IBM xlc / Visual Age C compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/aixcc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -import SCons.Platform.aix - -from . import cc - -packages = ['vac.C', 'ibmcxx.cmp'] - -def get_xlc(env): - xlc = env.get('CC', 'xlc') - return SCons.Platform.aix.get_xlc(env, xlc, packages) - -def generate(env): - """Add Builders and construction variables for xlc / Visual Age - suite to an Environment.""" - path, _cc, version = get_xlc(env) - if path and _cc: - _cc = os.path.join(path, _cc) - - if 'CC' not in env: - env['CC'] = _cc - - cc.generate(env) - - if version: - env['CCVERSION'] = version - -def exists(env): - path, _cc, version = get_xlc(env) - if path and _cc: - xlc = os.path.join(path, _cc) - if os.path.exists(xlc): - return xlc - return None - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixcxx.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixcxx.py deleted file mode 100644 index f4ee5fef877..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixcxx.py +++ /dev/null @@ -1,77 +0,0 @@ -"""SCons.Tool.aixc++ - -Tool-specific initialization for IBM xlC / Visual Age C++ compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/aixcxx.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -import SCons.Platform.aix - -import SCons.Tool.cxx -cplusplus = SCons.Tool.cxx -#cplusplus = __import__('cxx', globals(), locals(), []) - -packages = ['vacpp.cmp.core', 'vacpp.cmp.batch', 'vacpp.cmp.C', 'ibmcxx.cmp'] - -def get_xlc(env): - xlc = env.get('CXX', 'xlC') - return SCons.Platform.aix.get_xlc(env, xlc, packages) - -def generate(env): - """Add Builders and construction variables for xlC / Visual Age - suite to an Environment.""" - path, _cxx, version = get_xlc(env) - if path and _cxx: - _cxx = os.path.join(path, _cxx) - - if 'CXX' not in env: - env['CXX'] = _cxx - - cplusplus.generate(env) - - if version: - env['CXXVERSION'] = version - -def exists(env): - path, _cxx, version = get_xlc(env) - if path and _cxx: - xlc = os.path.join(path, _cxx) - if os.path.exists(xlc): - return xlc - return None - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixf77.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixf77.py deleted file mode 100644 index 908e8ad2302..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixf77.py +++ /dev/null @@ -1,80 +0,0 @@ -"""engine.SCons.Tool.aixf77 - -Tool-specific initialization for IBM Visual Age f77 Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/aixf77.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -#import SCons.Platform.aix - -from . import f77 - -# It would be good to look for the AIX F77 package the same way we're now -# looking for the C and C++ packages. This should be as easy as supplying -# the correct package names in the following list and uncommenting the -# SCons.Platform.aix_get_xlc() call in the function below. -packages = [] - -def get_xlf77(env): - xlf77 = env.get('F77', 'xlf77') - xlf77_r = env.get('SHF77', 'xlf77_r') - #return SCons.Platform.aix.get_xlc(env, xlf77, xlf77_r, packages) - return (None, xlf77, xlf77_r, None) - -def generate(env): - """ - Add Builders and construction variables for the Visual Age FORTRAN - compiler to an Environment. - """ - path, _f77, _shf77, version = get_xlf77(env) - if path: - _f77 = os.path.join(path, _f77) - _shf77 = os.path.join(path, _shf77) - - f77.generate(env) - - env['F77'] = _f77 - env['SHF77'] = _shf77 - -def exists(env): - path, _f77, _shf77, version = get_xlf77(env) - if path and _f77: - xlf77 = os.path.join(path, _f77) - if os.path.exists(xlf77): - return xlf77 - return None - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixlink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixlink.py deleted file mode 100644 index 67d65e82a7f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/aixlink.py +++ /dev/null @@ -1,81 +0,0 @@ -"""SCons.Tool.aixlink - -Tool-specific initialization for the IBM Visual Age linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/aixlink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path - -import SCons.Util - -from . import aixcc -from . import link - -import SCons.Tool.cxx -cplusplus = SCons.Tool.cxx -#cplusplus = __import__('cxx', globals(), locals(), []) - - -def smart_linkflags(source, target, env, for_signature): - if cplusplus.iscplusplus(source): - build_dir = env.subst('$BUILDDIR', target=target, source=source) - if build_dir: - return '-qtempinc=' + os.path.join(build_dir, 'tempinc') - return '' - -def generate(env): - """ - Add Builders and construction variables for Visual Age linker to - an Environment. - """ - link.generate(env) - - env['SMARTLINKFLAGS'] = smart_linkflags - env['LINKFLAGS'] = SCons.Util.CLVar('$SMARTLINKFLAGS') - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -qmkshrobj -qsuppress=1501-218') - env['SHLIBSUFFIX'] = '.a' - -def exists(env): - # TODO: sync with link.smart_link() to choose a linker - linkers = { 'CXX': ['aixc++'], 'CC': ['aixcc'] } - alltools = [] - for langvar, linktools in linkers.items(): - if langvar in env: # use CC over CXX when user specified CC but not CXX - return SCons.Tool.FindTool(linktools, env) - alltools.extend(linktools) - return SCons.Tool.FindTool(alltools, env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/applelink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/applelink.py deleted file mode 100644 index f432d613b3e..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/applelink.py +++ /dev/null @@ -1,218 +0,0 @@ -"""SCons.Tool.applelink - -Tool-specific initialization for Apple's gnu-like linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/applelink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -# Even though the Mac is based on the GNU toolchain, it doesn't understand -# the -rpath option, so we use the "link" tool instead of "gnulink". -from . import link - - -class AppleLinkInvalidCurrentVersionException(Exception): - pass - -class AppleLinkInvalidCompatibilityVersionException(Exception): - pass - - -def _applelib_versioned_lib_suffix(env, suffix, version): - """For suffix='.dylib' and version='0.1.2' it returns '.0.1.2.dylib'""" - Verbose = False - if Verbose: - print("_applelib_versioned_lib_suffix: suffix={!r}".format(suffix)) - print("_applelib_versioned_lib_suffix: version={!r}".format(version)) - if version not in suffix: - suffix = "." + version + suffix - if Verbose: - print("_applelib_versioned_lib_suffix: return suffix={!r}".format(suffix)) - return suffix - - -def _applelib_versioned_lib_soname(env, libnode, version, prefix, suffix, name_func): - """For libnode='/optional/dir/libfoo.X.Y.Z.dylib' it returns 'libfoo.X.dylib'""" - Verbose = False - if Verbose: - print("_applelib_versioned_lib_soname: version={!r}".format(version)) - name = name_func(env, libnode, version, prefix, suffix) - if Verbose: - print("_applelib_versioned_lib_soname: name={!r}".format(name)) - major = version.split('.')[0] - (libname,_suffix) = name.split('.') - soname = '.'.join([libname, major, _suffix]) - if Verbose: - print("_applelib_versioned_lib_soname: soname={!r}".format(soname)) - return soname - -def _applelib_versioned_shlib_soname(env, libnode, version, prefix, suffix): - return _applelib_versioned_lib_soname(env, libnode, version, prefix, suffix, link._versioned_shlib_name) - - -# User programmatically describes how SHLIBVERSION maps to values for compat/current. -_applelib_max_version_values = (65535, 255, 255) -def _applelib_check_valid_version(version_string): - """ - Check that the version # is valid. - X[.Y[.Z]] - where X 0-65535 - where Y either not specified or 0-255 - where Z either not specified or 0-255 - :param version_string: - :return: - """ - parts = version_string.split('.') - if len(parts) > 3: - return False, "Version string has too many periods [%s]"%version_string - if len(parts) <= 0: - return False, "Version string unspecified [%s]"%version_string - - for (i, p) in enumerate(parts): - try: - p_i = int(p) - except ValueError: - return False, "Version component %s (from %s) is not a number"%(p, version_string) - if p_i < 0 or p_i > _applelib_max_version_values[i]: - return False, "Version component %s (from %s) is not valid value should be between 0 and %d"%(p, version_string, _applelib_max_version_values[i]) - - return True, "" - - -def _applelib_currentVersionFromSoVersion(source, target, env, for_signature): - """ - A generator function to create the -Wl,-current_version flag if needed. - If env['APPLELINK_NO_CURRENT_VERSION'] contains a true value no flag will be generated - Otherwise if APPLELINK_CURRENT_VERSION is not specified, env['SHLIBVERSION'] - will be used. - - :param source: - :param target: - :param env: - :param for_signature: - :return: A string providing the flag to specify the current_version of the shared library - """ - if env.get('APPLELINK_NO_CURRENT_VERSION', False): - return "" - elif env.get('APPLELINK_CURRENT_VERSION', False): - version_string = env['APPLELINK_CURRENT_VERSION'] - elif env.get('SHLIBVERSION', False): - version_string = env['SHLIBVERSION'] - else: - return "" - - version_string = ".".join(version_string.split('.')[:3]) - - valid, reason = _applelib_check_valid_version(version_string) - if not valid: - raise AppleLinkInvalidCurrentVersionException(reason) - - return "-Wl,-current_version,%s" % version_string - - -def _applelib_compatVersionFromSoVersion(source, target, env, for_signature): - """ - A generator function to create the -Wl,-compatibility_version flag if needed. - If env['APPLELINK_NO_COMPATIBILITY_VERSION'] contains a true value no flag will be generated - Otherwise if APPLELINK_COMPATIBILITY_VERSION is not specified - the first two parts of env['SHLIBVERSION'] will be used with a .0 appended. - - :param source: - :param target: - :param env: - :param for_signature: - :return: A string providing the flag to specify the compatibility_version of the shared library - """ - if env.get('APPLELINK_NO_COMPATIBILITY_VERSION', False): - return "" - elif env.get('APPLELINK_COMPATIBILITY_VERSION', False): - version_string = env['APPLELINK_COMPATIBILITY_VERSION'] - elif env.get('SHLIBVERSION', False): - version_string = ".".join(env['SHLIBVERSION'].split('.')[:2] + ['0']) - else: - return "" - - if version_string is None: - return "" - - valid, reason = _applelib_check_valid_version(version_string) - if not valid: - raise AppleLinkInvalidCompatibilityVersionException(reason) - - return "-Wl,-compatibility_version,%s" % version_string - - -def generate(env): - """Add Builders and construction variables for applelink to an - Environment.""" - link.generate(env) - - env['FRAMEWORKPATHPREFIX'] = '-F' - env['_FRAMEWORKPATH'] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__, RDirs)}' - - env['_FRAMEWORKS'] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}' - env['LINKCOM'] = env['LINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -dynamiclib') - env['SHLINKCOM'] = env['SHLINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' - - - # see: http://docstore.mik.ua/orelly/unix3/mac/ch05_04.htm for proper naming - link._setup_versioned_lib_variables(env, tool = 'applelink')#, use_soname = use_soname) - env['LINKCALLBACKS'] = link._versioned_lib_callbacks() - env['LINKCALLBACKS']['VersionedShLibSuffix'] = _applelib_versioned_lib_suffix - env['LINKCALLBACKS']['VersionedShLibSoname'] = _applelib_versioned_shlib_soname - - env['_APPLELINK_CURRENT_VERSION'] = _applelib_currentVersionFromSoVersion - env['_APPLELINK_COMPATIBILITY_VERSION'] = _applelib_compatVersionFromSoVersion - env['_SHLIBVERSIONFLAGS'] = '$_APPLELINK_CURRENT_VERSION $_APPLELINK_COMPATIBILITY_VERSION ' - env['_LDMODULEVERSIONFLAGS'] = '$_APPLELINK_CURRENT_VERSION $_APPLELINK_COMPATIBILITY_VERSION ' - - # override the default for loadable modules, which are different - # on OS X than dynamic shared libs. echoing what XCode does for - # pre/suffixes: - env['LDMODULEPREFIX'] = '' - env['LDMODULESUFFIX'] = '' - env['LDMODULEFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -bundle') - env['LDMODULECOM'] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' - - env['__SHLIBVERSIONFLAGS'] = '${__libversionflags(__env__,"SHLIBVERSION","_SHLIBVERSIONFLAGS")}' - - - -def exists(env): - return env['PLATFORM'] == 'darwin' - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ar.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ar.py deleted file mode 100644 index 3d8cc8a2f67..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ar.py +++ /dev/null @@ -1,63 +0,0 @@ -"""SCons.Tool.ar - -Tool-specific initialization for ar (library archive). - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/ar.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util - - -def generate(env): - """Add Builders and construction variables for ar to an Environment.""" - SCons.Tool.createStaticLibBuilder(env) - - env['AR'] = 'ar' - env['ARFLAGS'] = SCons.Util.CLVar('rc') - env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES' - env['LIBPREFIX'] = 'lib' - env['LIBSUFFIX'] = '.a' - - if env.get('RANLIB',env.Detect('ranlib')) : - env['RANLIB'] = env.get('RANLIB','ranlib') - env['RANLIBFLAGS'] = SCons.Util.CLVar('') - env['RANLIBCOM'] = '$RANLIB $RANLIBFLAGS $TARGET' - -def exists(env): - return env.Detect('ar') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/as.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/as.py deleted file mode 100644 index 3986a44ced7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/as.py +++ /dev/null @@ -1,78 +0,0 @@ -"""SCons.Tool.as - -Tool-specific initialization for as, the generic Posix assembler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/as.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util - -assemblers = ['as'] - -ASSuffixes = ['.s', '.asm', '.ASM'] -ASPPSuffixes = ['.spp', '.SPP', '.sx'] -if SCons.Util.case_sensitive_suffixes('.s', '.S'): - ASPPSuffixes.extend(['.S']) -else: - ASSuffixes.extend(['.S']) - -def generate(env): - """Add Builders and construction variables for as to an Environment.""" - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - for suffix in ASSuffixes: - static_obj.add_action(suffix, SCons.Defaults.ASAction) - shared_obj.add_action(suffix, SCons.Defaults.ASAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) - - for suffix in ASPPSuffixes: - static_obj.add_action(suffix, SCons.Defaults.ASPPAction) - shared_obj.add_action(suffix, SCons.Defaults.ASPPAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) - - env['AS'] = env.Detect(assemblers) or 'as' - env['ASFLAGS'] = SCons.Util.CLVar('') - env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES' - env['ASPPFLAGS'] = '$ASFLAGS' - env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES' - -def exists(env): - return env.Detect(assemblers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/bcc32.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/bcc32.py deleted file mode 100644 index 7e0b47f2bca..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/bcc32.py +++ /dev/null @@ -1,81 +0,0 @@ -"""SCons.Tool.bcc32 - -XXX - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/bcc32.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path - -import SCons.Defaults -import SCons.Tool -import SCons.Util - -def findIt(program, env): - # First search in the SCons path and then the OS path: - borwin = env.WhereIs(program) or SCons.Util.WhereIs(program) - if borwin: - dir = os.path.dirname(borwin) - env.PrependENVPath('PATH', dir) - return borwin - -def generate(env): - findIt('bcc32', env) - """Add Builders and construction variables for bcc to an - Environment.""" - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - for suffix in ['.c', '.cpp']: - static_obj.add_action(suffix, SCons.Defaults.CAction) - shared_obj.add_action(suffix, SCons.Defaults.ShCAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) - - env['CC'] = 'bcc32' - env['CCFLAGS'] = SCons.Util.CLVar('') - env['CFLAGS'] = SCons.Util.CLVar('') - env['CCCOM'] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' - env['SHCC'] = '$CC' - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') - env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') - env['SHCCCOM'] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' - env['CPPDEFPREFIX'] = '-D' - env['CPPDEFSUFFIX'] = '' - env['INCPREFIX'] = '-I' - env['INCSUFFIX'] = '' - env['SHOBJSUFFIX'] = '.dll' - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 - env['CFILESUFFIX'] = '.cpp' - -def exists(env): - return findIt('bcc32', env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/c++.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/c++.py deleted file mode 100644 index 00aee5df052..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/c++.py +++ /dev/null @@ -1,44 +0,0 @@ -"""SCons.Tool.c++ - -Tool-specific initialization for generic Posix C++ compilers. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/c++.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -#forward proxy to the preffered cxx version -from SCons.Tool.cxx import * - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cc.py deleted file mode 100644 index ffcb6e84cc0..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cc.py +++ /dev/null @@ -1,105 +0,0 @@ -"""SCons.Tool.cc - -Tool-specific initialization for generic Posix C compilers. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/cc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Tool -import SCons.Defaults -import SCons.Util - -CSuffixes = ['.c', '.m'] -if not SCons.Util.case_sensitive_suffixes('.c', '.C'): - CSuffixes.append('.C') - -def add_common_cc_variables(env): - """ - Add underlying common "C compiler" variables that - are used by multiple tools (specifically, c++). - """ - if '_CCCOMCOM' not in env: - env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS' - # It's a hack to test for darwin here, but the alternative - # of creating an applecc.py to contain this seems overkill. - # Maybe someday the Apple platform will require more setup and - # this logic will be moved. - env['FRAMEWORKS'] = SCons.Util.CLVar('') - env['FRAMEWORKPATH'] = SCons.Util.CLVar('') - if env['PLATFORM'] == 'darwin': - env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH' - - if 'CCFLAGS' not in env: - env['CCFLAGS'] = SCons.Util.CLVar('') - - if 'SHCCFLAGS' not in env: - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') - -compilers = ['cc'] - -def generate(env): - """ - Add Builders and construction variables for C compilers to an Environment. - """ - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - for suffix in CSuffixes: - static_obj.add_action(suffix, SCons.Defaults.CAction) - shared_obj.add_action(suffix, SCons.Defaults.ShCAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) - - add_common_cc_variables(env) - - if 'CC' not in env: - env['CC'] = env.Detect(compilers) or compilers[0] - env['CFLAGS'] = SCons.Util.CLVar('') - env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' - env['SHCC'] = '$CC' - env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') - env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' - - env['CPPDEFPREFIX'] = '-D' - env['CPPDEFSUFFIX'] = '' - env['INCPREFIX'] = '-I' - env['INCSUFFIX'] = '' - env['SHOBJSUFFIX'] = '.os' - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 - - env['CFILESUFFIX'] = '.c' - -def exists(env): - return env.Detect(env.get('CC', compilers)) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clang.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clang.py deleted file mode 100644 index 35347b43e72..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clang.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8; -*- - -"""SCons.Tool.clang - -Tool-specific initialization for clang. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -# __revision__ = "src/engine/SCons/Tool/clang.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -# Based on SCons/Tool/gcc.py by Paweł Tomulik 2014 as a separate tool. -# Brought into the SCons mainline by Russel Winder 2017. - -import os -import re -import subprocess -import sys - -import SCons.Util -import SCons.Tool.cc -from SCons.Tool.clangCommon import get_clang_install_dirs - - -compilers = ['clang'] - -def generate(env): - """Add Builders and construction variables for clang to an Environment.""" - SCons.Tool.cc.generate(env) - - if env['PLATFORM'] == 'win32': - # Ensure that we have a proper path for clang - clang = SCons.Tool.find_program_path(env, compilers[0], - default_paths=get_clang_install_dirs(env['PLATFORM'])) - if clang: - clang_bin_dir = os.path.dirname(clang) - env.AppendENVPath('PATH', clang_bin_dir) - - env['CC'] = env.Detect(compilers) or 'clang' - if env['PLATFORM'] in ['cygwin', 'win32']: - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') - else: - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') - - # determine compiler version - if env['CC']: - #pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'], - pipe = SCons.Action._subproc(env, [env['CC'], '--version'], - stdin='devnull', - stderr='devnull', - stdout=subprocess.PIPE) - if pipe.wait() != 0: return - # clang -dumpversion is of no use - with pipe.stdout: - line = pipe.stdout.readline() - if sys.version_info[0] > 2: - line = line.decode() - match = re.search(r'clang +version +([0-9]+(?:\.[0-9]+)+)', line) - if match: - env['CCVERSION'] = match.group(1) - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangCommon/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangCommon/__init__.py deleted file mode 100644 index 37efbf691e7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangCommon/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Common routines and data for clang tools -""" - -clang_win32_dirs = [ - r'C:\Program Files\LLVM\bin', - r'C:\cygwin64\bin', - r'C:\msys64', - r'C:\cygwin\bin', - r'C:\msys', -] - -def get_clang_install_dirs(platform): - if platform == 'win32': - return clang_win32_dirs - else: - return [] \ No newline at end of file diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangxx.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangxx.py deleted file mode 100644 index 9292c21bd31..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangxx.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8; -*- - -"""SCons.Tool.clang++ - -Tool-specific initialization for clang++. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -# __revision__ = "src/engine/SCons/Tool/clangxx.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -# Based on SCons/Tool/g++.py by Paweł Tomulik 2014 as a separate tool. -# Brought into the SCons mainline by Russel Winder 2017. - -import os.path -import re -import subprocess -import sys - -import SCons.Tool -import SCons.Util -import SCons.Tool.cxx -from SCons.Tool.clangCommon import get_clang_install_dirs - - -compilers = ['clang++'] - -def generate(env): - """Add Builders and construction variables for clang++ to an Environment.""" - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - SCons.Tool.cxx.generate(env) - - env['CXX'] = env.Detect(compilers) or 'clang++' - - # platform specific settings - if env['PLATFORM'] == 'aix': - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc') - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - elif env['PLATFORM'] == 'hpux': - env['SHOBJSUFFIX'] = '.pic.o' - elif env['PLATFORM'] == 'sunos': - env['SHOBJSUFFIX'] = '.pic.o' - elif env['PLATFORM'] == 'win32': - # Ensure that we have a proper path for clang++ - clangxx = SCons.Tool.find_program_path(env, compilers[0], default_paths=get_clang_install_dirs(env['PLATFORM'])) - if clangxx: - clangxx_bin_dir = os.path.dirname(clangxx) - env.AppendENVPath('PATH', clangxx_bin_dir) - - # determine compiler version - if env['CXX']: - pipe = SCons.Action._subproc(env, [env['CXX'], '--version'], - stdin='devnull', - stderr='devnull', - stdout=subprocess.PIPE) - if pipe.wait() != 0: - return - - # clang -dumpversion is of no use - with pipe.stdout: - line = pipe.stdout.readline() - if sys.version_info[0] > 2: - line = line.decode() - match = re.search(r'clang +version +([0-9]+(?:\.[0-9]+)+)', line) - if match: - env['CXXVERSION'] = match.group(1) - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cvf.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cvf.py deleted file mode 100644 index 60d5a3d970d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cvf.py +++ /dev/null @@ -1,58 +0,0 @@ -"""engine.SCons.Tool.cvf - -Tool-specific initialization for the Compaq Visual Fortran compiler. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/cvf.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import fortran - -compilers = ['f90'] - -def generate(env): - """Add Builders and construction variables for compaq visual fortran to an Environment.""" - - fortran.generate(env) - - env['FORTRAN'] = 'f90' - env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' - env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' - env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' - env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' - env['OBJSUFFIX'] = '.obj' - env['FORTRANMODDIR'] = '${TARGET.dir}' - env['FORTRANMODDIRPREFIX'] = '/module:' - env['FORTRANMODDIRSUFFIX'] = '' - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cxx.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cxx.py deleted file mode 100644 index aced17702f5..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cxx.py +++ /dev/null @@ -1,100 +0,0 @@ -"""SCons.Tool.c++ - -Tool-specific initialization for generic Posix C++ compilers. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/cxx.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -import SCons.Tool -import SCons.Defaults -import SCons.Util - -compilers = ['CC', 'c++'] - -CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++', '.mm'] -if SCons.Util.case_sensitive_suffixes('.c', '.C'): - CXXSuffixes.append('.C') - -def iscplusplus(source): - if not source: - # Source might be None for unusual cases like SConf. - return 0 - for s in source: - if s.sources: - ext = os.path.splitext(str(s.sources[0]))[1] - if ext in CXXSuffixes: - return 1 - return 0 - -def generate(env): - """ - Add Builders and construction variables for Visual Age C++ compilers - to an Environment. - """ - import SCons.Tool - import SCons.Tool.cc - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - for suffix in CXXSuffixes: - static_obj.add_action(suffix, SCons.Defaults.CXXAction) - shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) - - SCons.Tool.cc.add_common_cc_variables(env) - - if 'CXX' not in env: - env['CXX'] = env.Detect(compilers) or compilers[0] - env['CXXFLAGS'] = SCons.Util.CLVar('') - env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' - env['SHCXX'] = '$CXX' - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') - env['SHCXXCOM'] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' - - env['CPPDEFPREFIX'] = '-D' - env['CPPDEFSUFFIX'] = '' - env['INCPREFIX'] = '-I' - env['INCSUFFIX'] = '' - env['SHOBJSUFFIX'] = '.os' - env['OBJSUFFIX'] = '.o' - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 - - env['CXXFILESUFFIX'] = '.cc' - -def exists(env): - return env.Detect(env.get('CXX', compilers)) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cyglink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cyglink.py deleted file mode 100644 index c3d78de90b7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/cyglink.py +++ /dev/null @@ -1,236 +0,0 @@ -"""SCons.Tool.cyglink - -Customization of gnulink for Cygwin (http://www.cygwin.com/) - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -from __future__ import absolute_import, print_function - -import re -import os - -import SCons.Action -import SCons.Util -import SCons.Tool - -#MAYBE: from . import gnulink -from . import gnulink -from . import link - -def _lib_generator(target, source, env, for_signature, **kw): - try: cmd = kw['cmd'] - except KeyError: cmd = SCons.Util.CLVar(['$SHLINK']) - - try: vp = kw['varprefix'] - except KeyError: vp = 'SHLIB' - - dll = env.FindIxes(target, '%sPREFIX' % vp, '%sSUFFIX' % vp) - if dll: cmd.extend(['-o', dll]) - - cmd.extend(['$SHLINKFLAGS', '$__%sVERSIONFLAGS' % vp, '$__RPATH']) - - implib = env.FindIxes(target, 'IMPLIBPREFIX', 'IMPLIBSUFFIX') - if implib: - cmd.extend([ - '-Wl,--out-implib='+implib.get_string(for_signature), - '-Wl,--export-all-symbols', - '-Wl,--enable-auto-import', - '-Wl,--whole-archive', '$SOURCES', - '-Wl,--no-whole-archive', '$_LIBDIRFLAGS', '$_LIBFLAGS' - ]) - else: - cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS']) - - return [cmd] - - -def shlib_generator(target, source, env, for_signature): - return _lib_generator(target, source, env, for_signature, - varprefix='SHLIB', - cmd = SCons.Util.CLVar(['$SHLINK'])) - -def ldmod_generator(target, source, env, for_signature): - return _lib_generator(target, source, env, for_signature, - varprefix='LDMODULE', - cmd = SCons.Util.CLVar(['$LDMODULE'])) - -def _lib_emitter(target, source, env, **kw): - Verbose = False - - if Verbose: - print("_lib_emitter: target[0]=%r" % target[0].get_path()) - - try: vp = kw['varprefix'] - except KeyError: vp = 'SHLIB' - - try: libtype = kw['libtype'] - except KeyError: libtype = 'ShLib' - - dll = env.FindIxes(target, '%sPREFIX' % vp, '%sSUFFIX' % vp) - no_import_lib = env.get('no_import_lib', 0) - - if Verbose: - print("_lib_emitter: dll=%r" % dll.get_path()) - - if not dll or len(target) > 1: - raise SCons.Errors.UserError("A shared library should have exactly one target with the suffix: %s" % env.subst("$%sSUFFIX" % vp)) - - # Remove any "lib" after the prefix - pre = env.subst('$%sPREFIX' % vp) - if dll.name[len(pre):len(pre)+3] == 'lib': - dll.name = pre + dll.name[len(pre)+3:] - - if Verbose: - print("_lib_emitter: dll.name=%r" % dll.name) - - orig_target = target - target = [env.fs.File(dll)] - target[0].attributes.shared = 1 - - if Verbose: - print("_lib_emitter: after target=[env.fs.File(dll)]: target[0]=%r" % target[0].get_path()) - - # Append an import lib target - if not no_import_lib: - # Create list of target libraries as strings - target_strings = env.ReplaceIxes(orig_target[0], - '%sPREFIX' % vp, '%sSUFFIX' % vp, - 'IMPLIBPREFIX', 'IMPLIBSUFFIX') - if Verbose: - print("_lib_emitter: target_strings=%r" % target_strings) - - implib_target = env.fs.File(target_strings) - if Verbose: - print("_lib_emitter: implib_target=%r" % implib_target.get_path()) - implib_target.attributes.shared = 1 - target.append(implib_target) - - symlinks = SCons.Tool.ImpLibSymlinkGenerator(env, implib_target, - implib_libtype=libtype, - generator_libtype=libtype+'ImpLib') - if Verbose: - print("_lib_emitter: implib symlinks=%r" % SCons.Tool.StringizeLibSymlinks(symlinks)) - if symlinks: - SCons.Tool.EmitLibSymlinks(env, symlinks, implib_target, clean_targets = target[0]) - implib_target.attributes.shliblinks = symlinks - - return (target, source) - -def shlib_emitter(target, source, env): - return _lib_emitter(target, source, env, varprefix='SHLIB', libtype='ShLib') - -def ldmod_emitter(target, source, env): - return _lib_emitter(target, source, env, varprefix='LDMODULE', libtype='LdMod') - -def _versioned_lib_suffix(env, suffix, version): - """Generate versioned shared library suffix from a unversioned one. - If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll'""" - Verbose = False - if Verbose: - print("_versioned_lib_suffix: suffix= ", suffix) - print("_versioned_lib_suffix: version= ", version) - cygversion = re.sub(r'\.', '-', version) - if not suffix.startswith('-' + cygversion): - suffix = '-' + cygversion + suffix - if Verbose: - print("_versioned_lib_suffix: return suffix= ", suffix) - return suffix - -def _versioned_implib_name(env, libnode, version, prefix, suffix, **kw): - return link._versioned_lib_name(env, libnode, version, prefix, suffix, - SCons.Tool.ImpLibPrefixGenerator, - SCons.Tool.ImpLibSuffixGenerator, - implib_libtype=kw['libtype']) - -def _versioned_implib_symlinks(env, libnode, version, prefix, suffix, **kw): - """Generate link names that should be created for a versioned shared library. - Returns a list in the form [ (link, linktarget), ... ] - """ - Verbose = False - - if Verbose: - print("_versioned_implib_symlinks: libnode=%r" % libnode.get_path()) - print("_versioned_implib_symlinks: version=%r" % version) - - try: libtype = kw['libtype'] - except KeyError: libtype = 'ShLib' - - - linkdir = os.path.dirname(libnode.get_path()) - if Verbose: - print("_versioned_implib_symlinks: linkdir=%r" % linkdir) - - name = SCons.Tool.ImpLibNameGenerator(env, libnode, - implib_libtype=libtype, - generator_libtype=libtype+'ImpLib') - if Verbose: - print("_versioned_implib_symlinks: name=%r" % name) - - major = version.split('.')[0] - - link0 = env.fs.File(os.path.join(linkdir, name)) - symlinks = [(link0, libnode)] - - if Verbose: - print("_versioned_implib_symlinks: return symlinks=%r" % SCons.Tool.StringizeLibSymlinks(symlinks)) - - return symlinks - -shlib_action = SCons.Action.Action(shlib_generator, generator=1) -ldmod_action = SCons.Action.Action(ldmod_generator, generator=1) - -def generate(env): - """Add Builders and construction variables for cyglink to an Environment.""" - gnulink.generate(env) - - env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,-no-undefined') - - env['SHLINKCOM'] = shlib_action - env['LDMODULECOM'] = ldmod_action - env.Append(SHLIBEMITTER = [shlib_emitter]) - env.Append(LDMODULEEMITTER = [ldmod_emitter]) - - env['SHLIBPREFIX'] = 'cyg' - env['SHLIBSUFFIX'] = '.dll' - - env['IMPLIBPREFIX'] = 'lib' - env['IMPLIBSUFFIX'] = '.dll.a' - - # Variables used by versioned shared libraries - env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS' - env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS' - - # SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink... - - # LINKCALLBACKS are NOT inherited from gnulink - env['LINKCALLBACKS'] = { - 'VersionedShLibSuffix' : _versioned_lib_suffix, - 'VersionedLdModSuffix' : _versioned_lib_suffix, - 'VersionedImpLibSuffix' : _versioned_lib_suffix, - 'VersionedShLibName' : link._versioned_shlib_name, - 'VersionedLdModName' : link._versioned_ldmod_name, - 'VersionedShLibImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='ShLib'), - 'VersionedLdModImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='LdMod'), - 'VersionedShLibImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='ShLib'), - 'VersionedLdModImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='LdMod'), - } - - # these variables were set by gnulink but are not used in cyglink - try: del env['_SHLIBSONAME'] - except KeyError: pass - try: del env['_LDMODULESONAME'] - except KeyError: pass - -def exists(env): - return gnulink.exists(env) - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/default.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/default.py deleted file mode 100644 index 355538f4006..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/default.py +++ /dev/null @@ -1,50 +0,0 @@ -"""SCons.Tool.default - -Initialization with a default tool list. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/default.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Tool - -def generate(env): - """Add default tools.""" - for t in SCons.Tool.tool_list(env['PLATFORM'], env): - SCons.Tool.Tool(t)(env) - -def exists(env): - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dmd.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dmd.py deleted file mode 100644 index 74c86735934..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dmd.py +++ /dev/null @@ -1,161 +0,0 @@ -from __future__ import print_function - -"""SCons.Tool.dmd - -Tool-specific initialization for the Digital Mars D compiler. -(http://digitalmars.com/d) - -Originally coded by Andy Friesen (andy@ikagames.com) -15 November 2003 - -Evolved by Russel Winder (russel@winder.org.uk) -2010-02-07 onwards - -Compiler variables: - DC - The name of the D compiler to use. Defaults to dmd or gdmd, - whichever is found. - DPATH - List of paths to search for import modules. - DVERSIONS - List of version tags to enable when compiling. - DDEBUG - List of debug tags to enable when compiling. - -Linker related variables: - LIBS - List of library files to link in. - DLINK - Name of the linker to use. Defaults to dmd or gdmd, - whichever is found. - DLINKFLAGS - List of linker flags. - -Lib tool variables: - DLIB - Name of the lib tool to use. Defaults to lib. - DLIBFLAGS - List of flags to pass to the lib tool. - LIBS - Same as for the linker. (libraries to pull into the .lib) -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/dmd.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import subprocess - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Scanner.D -import SCons.Tool - -import SCons.Tool.DCommon as DCommon - - -def generate(env): - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - static_obj.add_action('.d', SCons.Defaults.DAction) - shared_obj.add_action('.d', SCons.Defaults.ShDAction) - static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter) - - env['DC'] = env.Detect(['dmd', 'ldmd2', 'gdmd']) or 'dmd' - env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of$TARGET $SOURCES' - env['_DINCFLAGS'] = '${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)}' - env['_DVERFLAGS'] = '${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)}' - env['_DDEBUGFLAGS'] = '${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)}' - env['_DFLAGS'] = '${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)}' - - env['SHDC'] = '$DC' - env['SHDCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -fPIC -of$TARGET $SOURCES' - - env['DPATH'] = ['#/'] - env['DFLAGS'] = [] - env['DVERSIONS'] = [] - env['DDEBUG'] = [] - - if env['DC']: - DCommon.addDPATHToEnv(env, env['DC']) - - env['DINCPREFIX'] = '-I' - env['DINCSUFFIX'] = '' - env['DVERPREFIX'] = '-version=' - env['DVERSUFFIX'] = '' - env['DDEBUGPREFIX'] = '-debug=' - env['DDEBUGSUFFIX'] = '' - env['DFLAGPREFIX'] = '-' - env['DFLAGSUFFIX'] = '' - env['DFILESUFFIX'] = '.d' - - env['DLINK'] = '$DC' - env['DLINKFLAGS'] = SCons.Util.CLVar('') - env['DLINKCOM'] = '$DLINK -of$TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS' - - env['SHDLINK'] = '$DC' - env['SHDLINKFLAGS'] = SCons.Util.CLVar('$DLINKFLAGS -shared -defaultlib=libphobos2.so') - env['SHDLINKCOM'] = '$DLINK -of$TARGET $SHDLINKFLAGS $__SHDLIBVERSIONFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS' - - env['DLIBLINKPREFIX'] = '' if env['PLATFORM'] == 'win32' else '-L-l' - env['DLIBLINKSUFFIX'] = '.lib' if env['PLATFORM'] == 'win32' else '' - env['_DLIBFLAGS'] = '${_stripixes(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}' - - env['DLIBDIRPREFIX'] = '-L-L' - env['DLIBDIRSUFFIX'] = '' - env['_DLIBDIRFLAGS'] = '${_concat(DLIBDIRPREFIX, LIBPATH, DLIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)}' - - env['DLIB'] = 'lib' if env['PLATFORM'] == 'win32' else 'ar cr' - env['DLIBCOM'] = '$DLIB $_DLIBFLAGS {0}$TARGET $SOURCES $_DLIBFLAGS'.format('-c ' if env['PLATFORM'] == 'win32' else '') - - # env['_DLIBFLAGS'] = '${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)}' - - env['DLIBFLAGPREFIX'] = '-' - env['DLIBFLAGSUFFIX'] = '' - - # __RPATH is set to $_RPATH in the platform specification if that - # platform supports it. - env['DRPATHPREFIX'] = '-L-rpath,' if env['PLATFORM'] == 'darwin' else '-L-rpath=' - env['DRPATHSUFFIX'] = '' - env['_DRPATH'] = '${_concat(DRPATHPREFIX, RPATH, DRPATHSUFFIX, __env__)}' - - # Support for versioned libraries - env['_SHDLIBVERSIONFLAGS'] = '$SHDLIBVERSIONFLAGS -L-soname=$_SHDLIBSONAME' - env['_SHDLIBSONAME'] = '${DShLibSonameGenerator(__env__,TARGET)}' - # NOTE: this is a quick hack, the soname will only work if there is - # c/c++ linker loaded which provides callback for the ShLibSonameGenerator - env['DShLibSonameGenerator'] = SCons.Tool.ShLibSonameGenerator - # NOTE: this is only for further reference, currently $SHDLIBVERSION does - # not work, the user must use $SHLIBVERSION - env['SHDLIBVERSION'] = '$SHLIBVERSION' - env['SHDLIBVERSIONFLAGS'] = [] - - env['BUILDERS']['ProgramAllAtOnce'] = SCons.Builder.Builder( - action='$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -of$TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS', - emitter=DCommon.allAtOnceEmitter, - ) - - -def exists(env): - return env.Detect(['dmd', 'ldmd2', 'gdmd']) - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/docbook/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/docbook/__init__.py deleted file mode 100644 index 147556d626d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/docbook/__init__.py +++ /dev/null @@ -1,881 +0,0 @@ - -"""SCons.Tool.docbook - -Tool-specific initialization for Docbook. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -import os -import glob -import re - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Script -import SCons.Tool -import SCons.Util - - -__debug_tool_location = False -# Get full path to this script -scriptpath = os.path.dirname(os.path.realpath(__file__)) - -# Local folder for the collection of DocBook XSLs -db_xsl_folder = 'docbook-xsl-1.76.1' - -# Do we have libxml2/libxslt/lxml? -has_libxml2 = True -has_lxml = True -try: - import libxml2 - import libxslt -except: - has_libxml2 = False -try: - import lxml -except: - has_lxml = False - -# Set this to True, to prefer xsltproc over libxml2 and lxml -prefer_xsltproc = False - -# Regexs for parsing Docbook XML sources of MAN pages -re_manvolnum = re.compile("([^<]*)") -re_refname = re.compile("([^<]*)") - -# -# Helper functions -# -def __extend_targets_sources(target, source): - """ Prepare the lists of target and source files. """ - if not SCons.Util.is_List(target): - target = [target] - if not source: - source = target[:] - elif not SCons.Util.is_List(source): - source = [source] - if len(target) < len(source): - target.extend(source[len(target):]) - - return target, source - -def __init_xsl_stylesheet(kw, env, user_xsl_var, default_path): - if kw.get('DOCBOOK_XSL','') == '': - xsl_style = kw.get('xsl', env.subst(user_xsl_var)) - if xsl_style == '': - path_args = [scriptpath, db_xsl_folder] + default_path - xsl_style = os.path.join(*path_args) - kw['DOCBOOK_XSL'] = xsl_style - -def __select_builder(lxml_builder, libxml2_builder, cmdline_builder): - """ Selects a builder, based on which Python modules are present. """ - if prefer_xsltproc: - return cmdline_builder - - if not has_libxml2: - # At the moment we prefer libxml2 over lxml, the latter can lead - # to conflicts when installed together with libxml2. - if has_lxml: - return lxml_builder - else: - return cmdline_builder - - return libxml2_builder - -def __ensure_suffix(t, suffix): - """ Ensure that the target t has the given suffix. """ - tpath = str(t) - if not tpath.endswith(suffix): - return tpath+suffix - - return t - -def __ensure_suffix_stem(t, suffix): - """ Ensure that the target t has the given suffix, and return the file's stem. """ - tpath = str(t) - if not tpath.endswith(suffix): - stem = tpath - tpath += suffix - - return tpath, stem - else: - stem, ext = os.path.splitext(tpath) - - return t, stem - -def __get_xml_text(root): - """ Return the text for the given root node (xml.dom.minidom). """ - txt = "" - for e in root.childNodes: - if (e.nodeType == e.TEXT_NODE): - txt += e.data - return txt - -def __create_output_dir(base_dir): - """ Ensure that the output directory base_dir exists. """ - root, tail = os.path.split(base_dir) - dir = None - if tail: - if base_dir.endswith('/'): - dir = base_dir - else: - dir = root - else: - if base_dir.endswith('/'): - dir = base_dir - - if dir and not os.path.isdir(dir): - os.makedirs(dir) - - -# -# Supported command line tools and their call "signature" -# -xsltproc_com_priority = ['xsltproc', 'saxon', 'saxon-xslt', 'xalan'] - -# TODO: Set minimum version of saxon-xslt to be 8.x (lower than this only supports xslt 1.0. -# see: http://saxon.sourceforge.net/saxon6.5.5/ -# see: http://saxon.sourceforge.net/ -xsltproc_com = {'xsltproc' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE', - 'saxon' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE $DOCBOOK_XSLTPROCPARAMS', - # Note if saxon-xslt is version 5.5 the proper arguments are: (swap order of docbook_xsl and source) - # 'saxon-xslt' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $SOURCE $DOCBOOK_XSL $DOCBOOK_XSLTPROCPARAMS', - 'saxon-xslt' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE $DOCBOOK_XSLTPROCPARAMS', - 'xalan' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -q -out $TARGET -xsl $DOCBOOK_XSL -in $SOURCE'} -xmllint_com = {'xmllint' : '$DOCBOOK_XMLLINT $DOCBOOK_XMLLINTFLAGS --xinclude $SOURCE > $TARGET'} -fop_com = {'fop' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -fo $SOURCE -pdf $TARGET', - 'xep' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -valid -fo $SOURCE -pdf $TARGET', - 'jw' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -f docbook -b pdf $SOURCE -o $TARGET'} - -def __detect_cl_tool(env, chainkey, cdict, cpriority=None): - """ - Helper function, picks a command line tool from the list - and initializes its environment variables. - """ - if env.get(chainkey,'') == '': - clpath = '' - - if cpriority is None: - cpriority = cdict.keys() - for cltool in cpriority: - if __debug_tool_location: - print("DocBook: Looking for %s"%cltool) - clpath = env.WhereIs(cltool) - if clpath: - if __debug_tool_location: - print("DocBook: Found:%s"%cltool) - env[chainkey] = clpath - if not env[chainkey + 'COM']: - env[chainkey + 'COM'] = cdict[cltool] - break - -def _detect(env): - """ - Detect all the command line tools that we might need for creating - the requested output formats. - """ - global prefer_xsltproc - - if env.get('DOCBOOK_PREFER_XSLTPROC',''): - prefer_xsltproc = True - - if ((not has_libxml2 and not has_lxml) or (prefer_xsltproc)): - # Try to find the XSLT processors - __detect_cl_tool(env, 'DOCBOOK_XSLTPROC', xsltproc_com, xsltproc_com_priority) - __detect_cl_tool(env, 'DOCBOOK_XMLLINT', xmllint_com) - - __detect_cl_tool(env, 'DOCBOOK_FOP', fop_com, ['fop','xep','jw']) - -# -# Scanners -# -include_re = re.compile('fileref\\s*=\\s*["|\']([^\\n]*)["|\']') -sentity_re = re.compile('') - -def __xml_scan(node, env, path, arg): - """ Simple XML file scanner, detecting local images and XIncludes as implicit dependencies. """ - # Does the node exist yet? - if not os.path.isfile(str(node)): - return [] - - if env.get('DOCBOOK_SCANENT',''): - # Use simple pattern matching for system entities..., no support - # for recursion yet. - contents = node.get_text_contents() - return sentity_re.findall(contents) - - xsl_file = os.path.join(scriptpath,'utils','xmldepend.xsl') - if not has_libxml2 or prefer_xsltproc: - if has_lxml and not prefer_xsltproc: - - from lxml import etree - - xsl_tree = etree.parse(xsl_file) - doc = etree.parse(str(node)) - result = doc.xslt(xsl_tree) - - depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith(" 1: - env.Clean(outfiles[0], outfiles[1:]) - - - return result - -def DocbookSlidesPdf(env, target, source=None, *args, **kw): - """ - A pseudo-Builder, providing a Docbook toolchain for PDF slides output. - """ - # Init list of targets/sources - target, source = __extend_targets_sources(target, source) - - # Init XSL stylesheet - __init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESPDF', ['slides','fo','plain.xsl']) - - # Setup builder - __builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder) - - # Create targets - result = [] - for t,s in zip(target,source): - t, stem = __ensure_suffix_stem(t, '.pdf') - xsl = __builder.__call__(env, stem+'.fo', s, **kw) - env.Depends(xsl, kw['DOCBOOK_XSL']) - result.extend(xsl) - result.extend(__fop_builder.__call__(env, t, xsl, **kw)) - - return result - -def DocbookSlidesHtml(env, target, source=None, *args, **kw): - """ - A pseudo-Builder, providing a Docbook toolchain for HTML slides output. - """ - # Init list of targets/sources - if not SCons.Util.is_List(target): - target = [target] - if not source: - source = target - target = ['index.html'] - elif not SCons.Util.is_List(source): - source = [source] - - # Init XSL stylesheet - __init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESHTML', ['slides','html','plain.xsl']) - - # Setup builder - __builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder) - - # Detect base dir - base_dir = kw.get('base_dir', '') - if base_dir: - __create_output_dir(base_dir) - - # Create targets - result = [] - r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw) - env.Depends(r, kw['DOCBOOK_XSL']) - result.extend(r) - # Add supporting files for cleanup - env.Clean(r, [os.path.join(base_dir, 'toc.html')] + - glob.glob(os.path.join(base_dir, 'foil*.html'))) - - return result - -def DocbookXInclude(env, target, source, *args, **kw): - """ - A pseudo-Builder, for resolving XIncludes in a separate processing step. - """ - # Init list of targets/sources - target, source = __extend_targets_sources(target, source) - - # Setup builder - __builder = __select_builder(__xinclude_lxml_builder,__xinclude_libxml2_builder,__xmllint_builder) - - # Create targets - result = [] - for t,s in zip(target,source): - result.extend(__builder.__call__(env, t, s, **kw)) - - return result - -def DocbookXslt(env, target, source=None, *args, **kw): - """ - A pseudo-Builder, applying a simple XSL transformation to the input file. - """ - # Init list of targets/sources - target, source = __extend_targets_sources(target, source) - - # Init XSL stylesheet - kw['DOCBOOK_XSL'] = kw.get('xsl', 'transform.xsl') - - # Setup builder - __builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder) - - # Create targets - result = [] - for t,s in zip(target,source): - r = __builder.__call__(env, t, s, **kw) - env.Depends(r, kw['DOCBOOK_XSL']) - result.extend(r) - - return result - - -def generate(env): - """Add Builders and construction variables for docbook to an Environment.""" - - env.SetDefault( - # Default names for customized XSL stylesheets - DOCBOOK_DEFAULT_XSL_EPUB = '', - DOCBOOK_DEFAULT_XSL_HTML = '', - DOCBOOK_DEFAULT_XSL_HTMLCHUNKED = '', - DOCBOOK_DEFAULT_XSL_HTMLHELP = '', - DOCBOOK_DEFAULT_XSL_PDF = '', - DOCBOOK_DEFAULT_XSL_MAN = '', - DOCBOOK_DEFAULT_XSL_SLIDESPDF = '', - DOCBOOK_DEFAULT_XSL_SLIDESHTML = '', - - # Paths to the detected executables - DOCBOOK_XSLTPROC = '', - DOCBOOK_XMLLINT = '', - DOCBOOK_FOP = '', - - # Additional flags for the text processors - DOCBOOK_XSLTPROCFLAGS = SCons.Util.CLVar(''), - DOCBOOK_XMLLINTFLAGS = SCons.Util.CLVar(''), - DOCBOOK_FOPFLAGS = SCons.Util.CLVar(''), - DOCBOOK_XSLTPROCPARAMS = SCons.Util.CLVar(''), - - # Default command lines for the detected executables - DOCBOOK_XSLTPROCCOM = xsltproc_com['xsltproc'], - DOCBOOK_XMLLINTCOM = xmllint_com['xmllint'], - DOCBOOK_FOPCOM = fop_com['fop'], - - # Screen output for the text processors - DOCBOOK_XSLTPROCCOMSTR = None, - DOCBOOK_XMLLINTCOMSTR = None, - DOCBOOK_FOPCOMSTR = None, - - ) - _detect(env) - - env.AddMethod(DocbookEpub, "DocbookEpub") - env.AddMethod(DocbookHtml, "DocbookHtml") - env.AddMethod(DocbookHtmlChunked, "DocbookHtmlChunked") - env.AddMethod(DocbookHtmlhelp, "DocbookHtmlhelp") - env.AddMethod(DocbookPdf, "DocbookPdf") - env.AddMethod(DocbookMan, "DocbookMan") - env.AddMethod(DocbookSlidesPdf, "DocbookSlidesPdf") - env.AddMethod(DocbookSlidesHtml, "DocbookSlidesHtml") - env.AddMethod(DocbookXInclude, "DocbookXInclude") - env.AddMethod(DocbookXslt, "DocbookXslt") - - -def exists(env): - return 1 diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvi.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvi.py deleted file mode 100644 index 1a7f7af5b28..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvi.py +++ /dev/null @@ -1,64 +0,0 @@ -"""SCons.Tool.dvi - -Common DVI Builder definition for various other Tool modules that use it. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/dvi.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Builder -import SCons.Tool - -DVIBuilder = None - -def generate(env): - try: - env['BUILDERS']['DVI'] - except KeyError: - global DVIBuilder - - if DVIBuilder is None: - # The suffix is hard-coded to '.dvi', not configurable via a - # construction variable like $DVISUFFIX, because the output - # file name is hard-coded within TeX. - DVIBuilder = SCons.Builder.Builder(action = {}, - source_scanner = SCons.Tool.LaTeXScanner, - suffix = '.dvi', - emitter = {}, - source_ext_match = None) - - env['BUILDERS']['DVI'] = DVIBuilder - -def exists(env): - # This only puts a skeleton Builder in place, so if someone - # references this Tool directly, it's always "available." - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvipdf.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvipdf.py deleted file mode 100644 index 6012e5b8956..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvipdf.py +++ /dev/null @@ -1,125 +0,0 @@ -"""SCons.Tool.dvipdf - -Tool-specific initialization for dvipdf. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/dvipdf.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Defaults -import SCons.Tool.pdf -import SCons.Tool.tex -import SCons.Util - -_null = SCons.Scanner.LaTeX._null - -def DviPdfPsFunction(XXXDviAction, target = None, source= None, env=None): - """A builder for DVI files that sets the TEXPICTS environment - variable before running dvi2ps or dvipdf.""" - - try: - abspath = source[0].attributes.path - except AttributeError : - abspath = '' - - saved_env = SCons.Scanner.LaTeX.modify_env_var(env, 'TEXPICTS', abspath) - - result = XXXDviAction(target, source, env) - - if saved_env is _null: - try: - del env['ENV']['TEXPICTS'] - except KeyError: - pass # was never set - else: - env['ENV']['TEXPICTS'] = saved_env - - return result - -def DviPdfFunction(target = None, source= None, env=None): - result = DviPdfPsFunction(PDFAction,target,source,env) - return result - -def DviPdfStrFunction(target = None, source= None, env=None): - """A strfunction for dvipdf that returns the appropriate - command string for the no_exec options.""" - if env.GetOption("no_exec"): - result = env.subst('$DVIPDFCOM',0,target,source) - else: - result = '' - return result - -PDFAction = None -DVIPDFAction = None - -def PDFEmitter(target, source, env): - """Strips any .aux or .log files from the input source list. - These are created by the TeX Builder that in all likelihood was - used to generate the .dvi file we're using as input, and we only - care about the .dvi file. - """ - def strip_suffixes(n): - return not SCons.Util.splitext(str(n))[1] in ['.aux', '.log'] - source = [src for src in source if strip_suffixes(src)] - return (target, source) - -def generate(env): - """Add Builders and construction variables for dvipdf to an Environment.""" - global PDFAction - if PDFAction is None: - PDFAction = SCons.Action.Action('$DVIPDFCOM', '$DVIPDFCOMSTR') - - global DVIPDFAction - if DVIPDFAction is None: - DVIPDFAction = SCons.Action.Action(DviPdfFunction, strfunction = DviPdfStrFunction) - - from . import pdf - pdf.generate(env) - - bld = env['BUILDERS']['PDF'] - bld.add_action('.dvi', DVIPDFAction) - bld.add_emitter('.dvi', PDFEmitter) - - env['DVIPDF'] = 'dvipdf' - env['DVIPDFFLAGS'] = SCons.Util.CLVar('') - env['DVIPDFCOM'] = 'cd ${TARGET.dir} && $DVIPDF $DVIPDFFLAGS ${SOURCE.file} ${TARGET.file}' - - # Deprecated synonym. - env['PDFCOM'] = ['$DVIPDFCOM'] - -def exists(env): - SCons.Tool.tex.generate_darwin(env) - return env.Detect('dvipdf') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvips.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvips.py deleted file mode 100644 index 9fce42c9a51..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/dvips.py +++ /dev/null @@ -1,95 +0,0 @@ -"""SCons.Tool.dvips - -Tool-specific initialization for dvips. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/dvips.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Builder -import SCons.Tool.dvipdf -import SCons.Util - -def DviPsFunction(target = None, source= None, env=None): - result = SCons.Tool.dvipdf.DviPdfPsFunction(PSAction,target,source,env) - return result - -def DviPsStrFunction(target = None, source= None, env=None): - """A strfunction for dvipdf that returns the appropriate - command string for the no_exec options.""" - if env.GetOption("no_exec"): - result = env.subst('$PSCOM',0,target,source) - else: - result = '' - return result - -PSAction = None -DVIPSAction = None -PSBuilder = None - -def generate(env): - """Add Builders and construction variables for dvips to an Environment.""" - global PSAction - if PSAction is None: - PSAction = SCons.Action.Action('$PSCOM', '$PSCOMSTR') - - global DVIPSAction - if DVIPSAction is None: - DVIPSAction = SCons.Action.Action(DviPsFunction, strfunction = DviPsStrFunction) - - global PSBuilder - if PSBuilder is None: - PSBuilder = SCons.Builder.Builder(action = PSAction, - prefix = '$PSPREFIX', - suffix = '$PSSUFFIX', - src_suffix = '.dvi', - src_builder = 'DVI', - single_source=True) - - env['BUILDERS']['PostScript'] = PSBuilder - - env['DVIPS'] = 'dvips' - env['DVIPSFLAGS'] = SCons.Util.CLVar('') - # I'm not quite sure I got the directories and filenames right for variant_dir - # We need to be in the correct directory for the sake of latex \includegraphics eps included files. - env['PSCOM'] = 'cd ${TARGET.dir} && $DVIPS $DVIPSFLAGS -o ${TARGET.file} ${SOURCE.file}' - env['PSPREFIX'] = '' - env['PSSUFFIX'] = '.ps' - -def exists(env): - SCons.Tool.tex.generate_darwin(env) - return env.Detect('dvips') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f03.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f03.py deleted file mode 100644 index 77a37b99253..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f03.py +++ /dev/null @@ -1,63 +0,0 @@ -"""engine.SCons.Tool.f03 - -Tool-specific initialization for the generic Posix f03 Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/f03.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util -from . import fortran -from SCons.Tool.FortranCommon import add_all_to_env, add_f03_to_env - -compilers = ['f03'] - -def generate(env): - add_all_to_env(env) - add_f03_to_env(env) - - fcomp = env.Detect(compilers) or 'f03' - env['F03'] = fcomp - env['SHF03'] = fcomp - - env['FORTRAN'] = fcomp - env['SHFORTRAN'] = fcomp - - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f08.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f08.py deleted file mode 100644 index e7318b9a81f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f08.py +++ /dev/null @@ -1,65 +0,0 @@ -"""engine.SCons.Tool.f08 - -Tool-specific initialization for the generic Posix f08 Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -from __future__ import absolute_import - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/f08.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util -from . import fortran -from SCons.Tool.FortranCommon import add_all_to_env, add_f08_to_env - -compilers = ['f08'] - -def generate(env): - add_all_to_env(env) - add_f08_to_env(env) - - fcomp = env.Detect(compilers) or 'f08' - env['F08'] = fcomp - env['SHF08'] = fcomp - - env['FORTRAN'] = fcomp - env['SHFORTRAN'] = fcomp - - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f77.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f77.py deleted file mode 100644 index 0777822ce74..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f77.py +++ /dev/null @@ -1,62 +0,0 @@ -"""engine.SCons.Tool.f77 - -Tool-specific initialization for the generic Posix f77 Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/f77.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Scanner.Fortran -import SCons.Tool -import SCons.Util -from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env - -compilers = ['f77'] - -def generate(env): - add_all_to_env(env) - add_f77_to_env(env) - - fcomp = env.Detect(compilers) or 'f77' - env['F77'] = fcomp - env['SHF77'] = fcomp - - env['FORTRAN'] = fcomp - env['SHFORTRAN'] = fcomp - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f90.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f90.py deleted file mode 100644 index 6a370bdf799..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f90.py +++ /dev/null @@ -1,62 +0,0 @@ -"""engine.SCons.Tool.f90 - -Tool-specific initialization for the generic Posix f90 Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/f90.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Scanner.Fortran -import SCons.Tool -import SCons.Util -from SCons.Tool.FortranCommon import add_all_to_env, add_f90_to_env - -compilers = ['f90'] - -def generate(env): - add_all_to_env(env) - add_f90_to_env(env) - - fc = env.Detect(compilers) or 'f90' - env['F90'] = fc - env['SHF90'] = fc - - env['FORTRAN'] = fc - env['SHFORTRAN'] = fc - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f95.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f95.py deleted file mode 100644 index fca7d620276..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/f95.py +++ /dev/null @@ -1,63 +0,0 @@ -"""engine.SCons.Tool.f95 - -Tool-specific initialization for the generic Posix f95 Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/f95.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util -from . import fortran -from SCons.Tool.FortranCommon import add_all_to_env, add_f95_to_env - -compilers = ['f95'] - -def generate(env): - add_all_to_env(env) - add_f95_to_env(env) - - fcomp = env.Detect(compilers) or 'f95' - env['F95'] = fcomp - env['SHF95'] = fcomp - - env['FORTRAN'] = fcomp - env['SHFORTRAN'] = fcomp - - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/filesystem.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/filesystem.py deleted file mode 100644 index ea16abf95f6..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/filesystem.py +++ /dev/null @@ -1,98 +0,0 @@ -"""SCons.Tool.filesystem - -Tool-specific initialization for the filesystem tools. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/filesystem.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons -from SCons.Tool.install import copyFunc - -copyToBuilder, copyAsBuilder = None, None - -def copyto_emitter(target, source, env): - """ changes the path of the source to be under the target (which - are assumed to be directories. - """ - n_target = [] - - for t in target: - n_target = n_target + [t.File( str( s ) ) for s in source] - - return (n_target, source) - -def copy_action_func(target, source, env): - assert( len(target) == len(source) ), "\ntarget: %s\nsource: %s" %(list(map(str, target)),list(map(str, source))) - - for t, s in zip(target, source): - if copyFunc(t.get_path(), s.get_path(), env): - return 1 - - return 0 - -def copy_action_str(target, source, env): - return env.subst_target_source(env['COPYSTR'], 0, target, source) - -copy_action = SCons.Action.Action( copy_action_func, copy_action_str ) - -def generate(env): - try: - env['BUILDERS']['CopyTo'] - env['BUILDERS']['CopyAs'] - except KeyError as e: - global copyToBuilder - if copyToBuilder is None: - copyToBuilder = SCons.Builder.Builder( - action = copy_action, - target_factory = env.fs.Dir, - source_factory = env.fs.Entry, - multi = 1, - emitter = [ copyto_emitter, ] ) - - global copyAsBuilder - if copyAsBuilder is None: - copyAsBuilder = SCons.Builder.Builder( - action = copy_action, - target_factory = env.fs.Entry, - source_factory = env.fs.Entry ) - - env['BUILDERS']['CopyTo'] = copyToBuilder - env['BUILDERS']['CopyAs'] = copyAsBuilder - - env['COPYSTR'] = 'Copy file(s): "$SOURCES" to "$TARGETS"' - -def exists(env): - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/fortran.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/fortran.py deleted file mode 100644 index 7b332c9aa26..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/fortran.py +++ /dev/null @@ -1,62 +0,0 @@ -"""SCons.Tool.fortran - -Tool-specific initialization for a generic Posix f77/f90 Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/fortran.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import re - -import SCons.Action -import SCons.Defaults -import SCons.Scanner.Fortran -import SCons.Tool -import SCons.Util -from SCons.Tool.FortranCommon import add_all_to_env, add_fortran_to_env - -compilers = ['f95', 'f90', 'f77'] - -def generate(env): - add_all_to_env(env) - add_fortran_to_env(env) - - fc = env.Detect(compilers) or 'f77' - env['SHFORTRAN'] = fc - env['FORTRAN'] = fc - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/g++.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/g++.py deleted file mode 100644 index eace13c7299..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/g++.py +++ /dev/null @@ -1,45 +0,0 @@ -"""SCons.Tool.g++ - -Tool-specific initialization for g++. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/g++.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -#forward proxy to the preffered cxx version -from SCons.Tool.gxx import * - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/g77.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/g77.py deleted file mode 100644 index f3f935d2f9d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/g77.py +++ /dev/null @@ -1,73 +0,0 @@ -"""engine.SCons.Tool.g77 - -Tool-specific initialization for g77. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/g77.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util -from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env - -compilers = ['g77', 'f77'] - -def generate(env): - """Add Builders and construction variables for g77 to an Environment.""" - add_all_to_env(env) - add_f77_to_env(env) - - fcomp = env.Detect(compilers) or 'g77' - if env['PLATFORM'] in ['cygwin', 'win32']: - env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS') - env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS') - else: - env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -fPIC') - env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -fPIC') - - env['FORTRAN'] = fcomp - env['SHFORTRAN'] = '$FORTRAN' - - env['F77'] = fcomp - env['SHF77'] = '$F77' - - env['INCFORTRANPREFIX'] = "-I" - env['INCFORTRANSUFFIX'] = "" - - env['INCF77PREFIX'] = "-I" - env['INCF77SUFFIX'] = "" - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gas.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gas.py deleted file mode 100644 index 9b6020164ee..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gas.py +++ /dev/null @@ -1,56 +0,0 @@ -"""SCons.Tool.gas - -Tool-specific initialization for as, the Gnu assembler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/gas.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -try: - as_module = __import__('as', globals(), locals(), []) -except: - as_module = __import__(__package__+'.as', globals(), locals(), ['*']) - -assemblers = ['as', 'gas'] - -def generate(env): - """Add Builders and construction variables for as to an Environment.""" - as_module.generate(env) - - env['AS'] = env.Detect(assemblers) or 'as' - -def exists(env): - return env.Detect(assemblers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gcc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gcc.py deleted file mode 100644 index 65b9f573f4c..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gcc.py +++ /dev/null @@ -1,111 +0,0 @@ -"""SCons.Tool.gcc - -Tool-specific initialization for gcc. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/gcc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import cc -import os -import re -import subprocess - -import SCons.Util - -compilers = ['gcc', 'cc'] - - -def generate(env): - """Add Builders and construction variables for gcc to an Environment.""" - - if 'CC' not in env: - env['CC'] = env.Detect(compilers) or compilers[0] - - cc.generate(env) - - if env['PLATFORM'] in ['cygwin', 'win32']: - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') - else: - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') - # determine compiler version - version = detect_version(env, env['CC']) - if version: - env['CCVERSION'] = version - - -def exists(env): - # is executable, and is a GNU compiler (or accepts '--version' at least) - return detect_version(env, env.Detect(env.get('CC', compilers))) - - -def detect_version(env, cc): - """Return the version of the GNU compiler, or None if it is not a GNU compiler.""" - version = None - cc = env.subst(cc) - if not cc: - return version - - # -dumpversion was added in GCC 3.0. As long as we're supporting - # GCC versions older than that, we should use --version and a - # regular expression. - # pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'], - pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'], - stdin='devnull', - stderr='devnull', - stdout=subprocess.PIPE) - if pipe.wait() != 0: - return version - - with pipe.stdout: - # -dumpversion variant: - # line = pipe.stdout.read().strip() - # --version variant: - line = SCons.Util.to_str(pipe.stdout.readline()) - # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer: - # So continue with reading to let the child process actually terminate. - while SCons.Util.to_str(pipe.stdout.readline()): - pass - - # -dumpversion variant: - # if line: - # version = line - # --version variant: - match = re.search(r'[0-9]+(\.[0-9]+)+', line) - if match: - version = match.group(0) - - return version - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gdc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gdc.py deleted file mode 100644 index 4c392e84bc1..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gdc.py +++ /dev/null @@ -1,145 +0,0 @@ -from __future__ import print_function - -"""SCons.Tool.gdc - -Tool-specific initialization for the GDC compiler. -(https://github.com/D-Programming-GDC/GDC) - -Developed by Russel Winder (russel@winder.org.uk) -2012-05-09 onwards - -Compiler variables: - DC - The name of the D compiler to use. Defaults to gdc. - DPATH - List of paths to search for import modules. - DVERSIONS - List of version tags to enable when compiling. - DDEBUG - List of debug tags to enable when compiling. - -Linker related variables: - LIBS - List of library files to link in. - DLINK - Name of the linker to use. Defaults to gdc. - DLINKFLAGS - List of linker flags. - -Lib tool variables: - DLIB - Name of the lib tool to use. Defaults to lib. - DLIBFLAGS - List of flags to pass to the lib tool. - LIBS - Same as for the linker. (libraries to pull into the .lib) -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/gdc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Defaults -import SCons.Tool - -import SCons.Tool.DCommon as DCommon - - -def generate(env): - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - static_obj.add_action('.d', SCons.Defaults.DAction) - shared_obj.add_action('.d', SCons.Defaults.ShDAction) - static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter) - - env['DC'] = env.Detect('gdc') or 'gdc' - env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -o $TARGET $SOURCES' - env['_DINCFLAGS'] = '${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)}' - env['_DVERFLAGS'] = '${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)}' - env['_DDEBUGFLAGS'] = '${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)}' - env['_DFLAGS'] = '${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)}' - - env['SHDC'] = '$DC' - env['SHDCOM'] = '$SHDC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -fPIC -c -o $TARGET $SOURCES' - - env['DPATH'] = ['#/'] - env['DFLAGS'] = [] - env['DVERSIONS'] = [] - env['DDEBUG'] = [] - - if env['DC']: - DCommon.addDPATHToEnv(env, env['DC']) - - env['DINCPREFIX'] = '-I' - env['DINCSUFFIX'] = '' - env['DVERPREFIX'] = '-version=' - env['DVERSUFFIX'] = '' - env['DDEBUGPREFIX'] = '-debug=' - env['DDEBUGSUFFIX'] = '' - env['DFLAGPREFIX'] = '-' - env['DFLAGSUFFIX'] = '' - env['DFILESUFFIX'] = '.d' - - env['DLINK'] = '$DC' - env['DLINKFLAGS'] = SCons.Util.CLVar('') - env['DLINKCOM'] = '$DLINK -o $TARGET $DLINKFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - - env['SHDLINK'] = '$DC' - env['SHDLINKFLAGS'] = SCons.Util.CLVar('$DLINKFLAGS -shared -shared-libphobos') - env['SHDLINKCOM'] = '$DLINK -o $TARGET $SHDLINKFLAGS $__SHDLIBVERSIONFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - - env['DLIB'] = 'lib' if env['PLATFORM'] == 'win32' else 'ar cr' - env['DLIBCOM'] = '$DLIB $_DLIBFLAGS {0}$TARGET $SOURCES $_DLINKLIBFLAGS'.format('-c ' if env['PLATFORM'] == 'win32' else '') - - env['_DLIBFLAGS'] = '${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)}' - - env['DLIBFLAGPREFIX'] = '-' - env['DLIBFLAGSUFFIX'] = '' - env['DLINKFLAGPREFIX'] = '-' - env['DLINKFLAGSUFFIX'] = '' - - # __RPATH is set to $_RPATH in the platform specification if that - # platform supports it. - env['RPATHPREFIX'] = '-Wl,-rpath=' - env['RPATHSUFFIX'] = '' - env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' - - # Support for versioned libraries - env['_SHDLIBVERSIONFLAGS'] = '$SHDLIBVERSIONFLAGS -Wl,-soname=$_SHDLIBSONAME' - env['_SHDLIBSONAME'] = '${DShLibSonameGenerator(__env__,TARGET)}' - # NOTE: this is a quick hack, the soname will only work if there is - # c/c++ linker loaded which provides callback for the ShLibSonameGenerator - env['DShLibSonameGenerator'] = SCons.Tool.ShLibSonameGenerator - # NOTE: this is only for further reference, currently $SHDLIBVERSION does - # not work, the user must use $SHLIBVERSION - env['SHDLIBVERSION'] = '$SHLIBVERSION' - env['SHDLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS' - - env['BUILDERS']['ProgramAllAtOnce'] = SCons.Builder.Builder( - action='$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -o $TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS', - emitter=DCommon.allAtOnceEmitter, - ) - - -def exists(env): - return env.Detect('gdc') - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gettext_tool.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gettext_tool.py deleted file mode 100644 index 5891e40807f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gettext_tool.py +++ /dev/null @@ -1,60 +0,0 @@ -"""gettext tool -""" - - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/gettext_tool.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -############################################################################# -def generate(env,**kw): - import sys - import os - import SCons.Tool - from SCons.Platform.mingw import MINGW_DEFAULT_PATHS - from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS - - from SCons.Tool.GettextCommon \ - import _translate, tool_list - for t in tool_list(env['PLATFORM'], env): - if sys.platform == 'win32': - tool = SCons.Tool.find_program_path(env, t, default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS ) - if tool: - tool_bin_dir = os.path.dirname(tool) - env.AppendENVPath('PATH', tool_bin_dir) - else: - SCons.Warnings.Warning(t + ' tool requested, but binary not found in ENV PATH') - env.Tool(t) - env.AddMethod(_translate, 'Translate') -############################################################################# - -############################################################################# -def exists(env): - from SCons.Tool.GettextCommon \ - import _xgettext_exists, _msginit_exists, \ - _msgmerge_exists, _msgfmt_exists - try: - return _xgettext_exists(env) and _msginit_exists(env) \ - and _msgmerge_exists(env) and _msgfmt_exists(env) - except: - return False -############################################################################# diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gfortran.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gfortran.py deleted file mode 100644 index 986ebc6078b..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gfortran.py +++ /dev/null @@ -1,66 +0,0 @@ -"""SCons.Tool.gfortran - -Tool-specific initialization for gfortran, the GNU Fortran 95/Fortran -2003 compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/gfortran.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -from . import fortran - -def generate(env): - """Add Builders and construction variables for gfortran to an - Environment.""" - fortran.generate(env) - - for dialect in ['F77', 'F90', 'FORTRAN', 'F95', 'F03', 'F08']: - env['%s' % dialect] = 'gfortran' - env['SH%s' % dialect] = '$%s' % dialect - if env['PLATFORM'] in ['cygwin', 'win32']: - env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect) - else: - env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect) - - env['INC%sPREFIX' % dialect] = "-I" - env['INC%sSUFFIX' % dialect] = "" - - env['FORTRANMODDIRPREFIX'] = "-J" - -def exists(env): - return env.Detect('gfortran') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gnulink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gnulink.py deleted file mode 100644 index 0715ffcf140..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gnulink.py +++ /dev/null @@ -1,80 +0,0 @@ -"""SCons.Tool.gnulink - -Tool-specific initialization for the gnu linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/gnulink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util -import SCons.Tool -import os -import sys -import re - -from . import link - - -def generate(env): - """Add Builders and construction variables for gnulink to an Environment.""" - link.generate(env) - - if env['PLATFORM'] == 'hpux': - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared -fPIC') - - # __RPATH is set to $_RPATH in the platform specification if that - # platform supports it. - env['RPATHPREFIX'] = '-Wl,-rpath=' - env['RPATHSUFFIX'] = '' - env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' - - # OpenBSD doesn't usually use SONAME for libraries - use_soname = not sys.platform.startswith('openbsd') - link._setup_versioned_lib_variables(env, tool = 'gnulink', use_soname = use_soname) - env['LINKCALLBACKS'] = link._versioned_lib_callbacks() - - # For backward-compatibility with older SCons versions - env['SHLIBVERSIONFLAGS'] = SCons.Util.CLVar('-Wl,-Bsymbolic') - -def exists(env): - # TODO: sync with link.smart_link() to choose a linker - linkers = { 'CXX': ['g++'], 'CC': ['gcc'] } - alltools = [] - for langvar, linktools in linkers.items(): - if langvar in env: # use CC over CXX when user specified CC but not CXX - return SCons.Tool.FindTool(linktools, env) - alltools.extend(linktools) - return SCons.Tool.FindTool(alltools, env) # find CXX or CC - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gs.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gs.py deleted file mode 100644 index 3e9543facf5..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gs.py +++ /dev/null @@ -1,91 +0,0 @@ -"""SCons.Tool.gs - -Tool-specific initialization for Ghostscript. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/gs.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Builder -import SCons.Platform -import SCons.Util - -# Ghostscript goes by different names on different platforms... -platform = SCons.Platform.platform_default() - -if platform == 'os2': - gs = 'gsos2' -elif platform == 'win32': - gs = 'gswin32c' -else: - gs = 'gs' - -GhostscriptAction = None - -def generate(env): - """Add Builders and construction variables for Ghostscript to an - Environment.""" - global GhostscriptAction - # The following try-except block enables us to use the Tool - # in standalone mode (without the accompanying pdf.py), - # whenever we need an explicit call of gs via the Gs() - # Builder ... - try: - if GhostscriptAction is None: - GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR') - - from SCons.Tool import pdf - pdf.generate(env) - - bld = env['BUILDERS']['PDF'] - bld.add_action('.ps', GhostscriptAction) - except ImportError as e: - pass - - gsbuilder = SCons.Builder.Builder(action = SCons.Action.Action('$GSCOM', '$GSCOMSTR')) - env['BUILDERS']['Gs'] = gsbuilder - - env['GS'] = gs - env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite') - env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES' - - -def exists(env): - if 'PS2PDF' in env: - return env.Detect(env['PS2PDF']) - else: - return env.Detect(gs) or SCons.Util.WhereIs(gs) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gxx.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gxx.py deleted file mode 100644 index 2eb678dcb8c..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/gxx.py +++ /dev/null @@ -1,81 +0,0 @@ -"""SCons.Tool.g++ - -Tool-specific initialization for g++. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/gxx.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import re -import subprocess - -import SCons.Tool -import SCons.Util - -from . import gcc -from . import cxx - -compilers = ['g++'] - - -def generate(env): - """Add Builders and construction variables for g++ to an Environment.""" - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - if 'CXX' not in env: - env['CXX'] = env.Detect(compilers) or compilers[0] - - cxx.generate(env) - - # platform specific settings - if env['PLATFORM'] == 'aix': - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc') - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - elif env['PLATFORM'] == 'hpux': - env['SHOBJSUFFIX'] = '.pic.o' - elif env['PLATFORM'] == 'sunos': - env['SHOBJSUFFIX'] = '.pic.o' - # determine compiler version - version = gcc.detect_version(env, env['CXX']) - if version: - env['CXXVERSION'] = version - - -def exists(env): - # is executable, and is a GNU compiler (or accepts '--version' at least) - return gcc.detect_version(env, env.Detect(env.get('CXX', compilers))) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpc++.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpc++.py deleted file mode 100644 index ee94e226b40..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpc++.py +++ /dev/null @@ -1,45 +0,0 @@ -"""SCons.Tool.hpc++ - -Tool-specific initialization for c++ on HP/UX. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/hpc++.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -#forward proxy to the preffered cxx version -from SCons.Tool.hpcxx import * - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpcc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpcc.py deleted file mode 100644 index f118cf3b11a..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpcc.py +++ /dev/null @@ -1,53 +0,0 @@ -"""SCons.Tool.hpcc - -Tool-specific initialization for HP aCC and cc. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/hpcc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -from . import cc - -def generate(env): - """Add Builders and construction variables for aCC & cc to an Environment.""" - cc.generate(env) - - env['CXX'] = 'aCC' - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z') - -def exists(env): - return env.Detect('aCC') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpcxx.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpcxx.py deleted file mode 100644 index e4dcb978888..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hpcxx.py +++ /dev/null @@ -1,88 +0,0 @@ -"""SCons.Tool.hpc++ - -Tool-specific initialization for c++ on HP/UX. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/hpcxx.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -import SCons.Util - -import SCons.Tool.cxx -cplusplus = SCons.Tool.cxx -#cplusplus = __import__('cxx', globals(), locals(), []) - - -acc = None - -# search for the acc compiler and linker front end - -try: - dirs = os.listdir('/opt') -except (IOError, OSError): - # Not being able to read the directory because it doesn't exist - # (IOError) or isn't readable (OSError) is okay. - dirs = [] - -for dir in dirs: - cc = '/opt/' + dir + '/bin/aCC' - if os.path.exists(cc): - acc = cc - break - - -def generate(env): - """Add Builders and construction variables for g++ to an Environment.""" - cplusplus.generate(env) - - if acc: - env['CXX'] = acc or 'aCC' - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') - # determine version of aCC - with os.popen(acc + ' -V 2>&1') as p: - line = p.readline().rstrip() - if line.find('aCC: HP ANSI C++') == 0: - env['CXXVERSION'] = line.split()[-1] - - if env['PLATFORM'] == 'cygwin': - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') - else: - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') - -def exists(env): - return acc - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hplink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hplink.py deleted file mode 100644 index 9f90071a84c..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/hplink.py +++ /dev/null @@ -1,77 +0,0 @@ -"""SCons.Tool.hplink - -Tool-specific initialization for the HP linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/hplink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path - -import SCons.Util - -from . import link - -ccLinker = None - -# search for the acc compiler and linker front end - -try: - dirs = os.listdir('/opt') -except (IOError, OSError): - # Not being able to read the directory because it doesn't exist - # (IOError) or isn't readable (OSError) is okay. - dirs = [] - -for dir in dirs: - linker = '/opt/' + dir + '/bin/aCC' - if os.path.exists(linker): - ccLinker = linker - break - -def generate(env): - """ - Add Builders and construction variables for Visual Age linker to - an Environment. - """ - link.generate(env) - - env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,+s -Wl,+vnocompatwarnings') - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -b') - env['SHLIBSUFFIX'] = '.sl' - -def exists(env): - return ccLinker - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icc.py deleted file mode 100644 index 877a2126c07..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icc.py +++ /dev/null @@ -1,59 +0,0 @@ -"""engine.SCons.Tool.icc - -Tool-specific initialization for the OS/2 icc compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/icc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import cc - -def generate(env): - """Add Builders and construction variables for the OS/2 to an Environment.""" - cc.generate(env) - - env['CC'] = 'icc' - env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' - env['CXXCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' - env['CPPDEFPREFIX'] = '/D' - env['CPPDEFSUFFIX'] = '' - env['INCPREFIX'] = '/I' - env['INCSUFFIX'] = '' - env['CFILESUFFIX'] = '.c' - env['CXXFILESUFFIX'] = '.cc' - -def exists(env): - return env.Detect('icc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icl.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icl.py deleted file mode 100644 index be67bdc7921..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icl.py +++ /dev/null @@ -1,52 +0,0 @@ -"""engine.SCons.Tool.icl - -Tool-specific initialization for the Intel C/C++ compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/icl.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Tool.intelc - -# This has been completely superseded by intelc.py, which can -# handle both Windows and Linux versions. - -def generate(*args, **kw): - """Add Builders and construction variables for icl to an Environment.""" - return SCons.Tool.intelc.generate(*args, **kw) - -def exists(*args, **kw): - return SCons.Tool.intelc.exists(*args, **kw) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ifl.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ifl.py deleted file mode 100644 index 2139b9f712d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ifl.py +++ /dev/null @@ -1,72 +0,0 @@ -"""SCons.Tool.ifl - -Tool-specific initialization for the Intel Fortran compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/ifl.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -from SCons.Scanner.Fortran import FortranScan -from .FortranCommon import add_all_to_env - -def generate(env): - """Add Builders and construction variables for ifl to an Environment.""" - fscan = FortranScan("FORTRANPATH") - SCons.Tool.SourceFileScanner.add_scanner('.i', fscan) - SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan) - - if 'FORTRANFILESUFFIXES' not in env: - env['FORTRANFILESUFFIXES'] = ['.i'] - else: - env['FORTRANFILESUFFIXES'].append('.i') - - if 'F90FILESUFFIXES' not in env: - env['F90FILESUFFIXES'] = ['.i90'] - else: - env['F90FILESUFFIXES'].append('.i90') - - add_all_to_env(env) - - env['FORTRAN'] = 'ifl' - env['SHFORTRAN'] = '$FORTRAN' - env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' - env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' - env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' - env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' - -def exists(env): - return env.Detect('ifl') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ifort.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ifort.py deleted file mode 100644 index a248b3e6a1b..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ifort.py +++ /dev/null @@ -1,88 +0,0 @@ -"""SCons.Tool.ifort - -Tool-specific initialization for newer versions of the Intel Fortran Compiler -for Linux/Windows (and possibly Mac OS X). - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/ifort.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -from SCons.Scanner.Fortran import FortranScan -from .FortranCommon import add_all_to_env - -def generate(env): - """Add Builders and construction variables for ifort to an Environment.""" - # ifort supports Fortran 90 and Fortran 95 - # Additionally, ifort recognizes more file extensions. - fscan = FortranScan("FORTRANPATH") - SCons.Tool.SourceFileScanner.add_scanner('.i', fscan) - SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan) - - if 'FORTRANFILESUFFIXES' not in env: - env['FORTRANFILESUFFIXES'] = ['.i'] - else: - env['FORTRANFILESUFFIXES'].append('.i') - - if 'F90FILESUFFIXES' not in env: - env['F90FILESUFFIXES'] = ['.i90'] - else: - env['F90FILESUFFIXES'].append('.i90') - - add_all_to_env(env) - - fc = 'ifort' - - for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: - env['%s' % dialect] = fc - env['SH%s' % dialect] = '$%s' % dialect - if env['PLATFORM'] == 'posix': - env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect) - - if env['PLATFORM'] == 'win32': - # On Windows, the ifort compiler specifies the object on the - # command line with -object:, not -o. Massage the necessary - # command-line construction variables. - for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: - for var in ['%sCOM' % dialect, '%sPPCOM' % dialect, - 'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]: - env[var] = env[var].replace('-o $TARGET', '-object:$TARGET') - env['FORTRANMODDIRPREFIX'] = "/module:" - else: - env['FORTRANMODDIRPREFIX'] = "-module " - -def exists(env): - return env.Detect('ifort') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ilink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ilink.py deleted file mode 100644 index 289235ccedb..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ilink.py +++ /dev/null @@ -1,59 +0,0 @@ -"""SCons.Tool.ilink - -Tool-specific initialization for the OS/2 ilink linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/ilink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util - -def generate(env): - """Add Builders and construction variables for ilink to an Environment.""" - SCons.Tool.createProgBuilder(env) - - env['LINK'] = 'ilink' - env['LINKFLAGS'] = SCons.Util.CLVar('') - env['LINKCOM'] = '$LINK $LINKFLAGS /O:$TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - env['LIBDIRPREFIX']='/LIBPATH:' - env['LIBDIRSUFFIX']='' - env['LIBLINKPREFIX']='' - env['LIBLINKSUFFIX']='$LIBSUFFIX' - -def exists(env): - return env.Detect('ilink') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ilink32.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ilink32.py deleted file mode 100644 index 33df7903c10..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ilink32.py +++ /dev/null @@ -1,60 +0,0 @@ -"""SCons.Tool.ilink32 - -XXX - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/ilink32.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Tool -import SCons.Tool.bcc32 -import SCons.Util - -def generate(env): - """Add Builders and construction variables for Borland ilink to an - Environment.""" - SCons.Tool.createSharedLibBuilder(env) - SCons.Tool.createProgBuilder(env) - - env['LINK'] = '$CC' - env['LINKFLAGS'] = SCons.Util.CLVar('') - env['LINKCOM'] = '$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS' - env['LIBDIRPREFIX']='' - env['LIBDIRSUFFIX']='' - env['LIBLINKPREFIX']='' - env['LIBLINKSUFFIX']='$LIBSUFFIX' - - -def exists(env): - # Uses bcc32 to do linking as it generally knows where the standard - # LIBS are and set up the linking correctly - return SCons.Tool.bcc32.findIt('bcc32', env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/install.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/install.py deleted file mode 100644 index f998baac4dc..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/install.py +++ /dev/null @@ -1,428 +0,0 @@ -"""SCons.Tool.install - -Tool-specific initialization for the install tool. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/install.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import re -import shutil -import stat - -import SCons.Action -import SCons.Tool -import SCons.Util - -# -# We keep track of *all* installed files. -_INSTALLED_FILES = [] -_UNIQUE_INSTALLED_FILES = None - -class CopytreeError(EnvironmentError): - pass - -# This is a patched version of shutil.copytree from python 2.5. It -# doesn't fail if the dir exists, which regular copytree does -# (annoyingly). Note the XXX comment in the docstring. -def scons_copytree(src, dst, symlinks=False): - """Recursively copy a directory tree using copy2(). - - The destination directory must not already exist. - If exception(s) occur, an CopytreeError is raised with a list of reasons. - - If the optional symlinks flag is true, symbolic links in the - source tree result in symbolic links in the destination tree; if - it is false, the contents of the files pointed to by symbolic - links are copied. - - XXX Consider this example code rather than the ultimate tool. - - """ - names = os.listdir(src) - # garyo@genarts.com fix: check for dir before making dirs. - if not os.path.exists(dst): - os.makedirs(dst) - errors = [] - for name in names: - srcname = os.path.join(src, name) - dstname = os.path.join(dst, name) - try: - if symlinks and os.path.islink(srcname): - linkto = os.readlink(srcname) - os.symlink(linkto, dstname) - elif os.path.isdir(srcname): - scons_copytree(srcname, dstname, symlinks) - else: - shutil.copy2(srcname, dstname) - # XXX What about devices, sockets etc.? - except (IOError, os.error) as why: - errors.append((srcname, dstname, str(why))) - # catch the CopytreeError from the recursive copytree so that we can - # continue with other files - except CopytreeError as err: - errors.extend(err.args[0]) - try: - shutil.copystat(src, dst) - except SCons.Util.WinError: - # can't copy file access times on Windows - pass - except OSError as why: - errors.extend((src, dst, str(why))) - if errors: - raise CopytreeError(errors) - - -# -# Functions doing the actual work of the Install Builder. -# -def copyFunc(dest, source, env): - """Install a source file or directory into a destination by copying, - (including copying permission/mode bits).""" - - if os.path.isdir(source): - if os.path.exists(dest): - if not os.path.isdir(dest): - raise SCons.Errors.UserError("cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest), str(source))) - else: - parent = os.path.split(dest)[0] - if not os.path.exists(parent): - os.makedirs(parent) - scons_copytree(source, dest) - else: - shutil.copy2(source, dest) - st = os.stat(source) - os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - - return 0 - -# -# Functions doing the actual work of the InstallVersionedLib Builder. -# -def copyFuncVersionedLib(dest, source, env): - """Install a versioned library into a destination by copying, - (including copying permission/mode bits) and then creating - required symlinks.""" - - if os.path.isdir(source): - raise SCons.Errors.UserError("cannot install directory `%s' as a version library" % str(source) ) - else: - # remove the link if it is already there - try: - os.remove(dest) - except: - pass - shutil.copy2(source, dest) - st = os.stat(source) - os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - installShlibLinks(dest, source, env) - - return 0 - -def listShlibLinksToInstall(dest, source, env): - install_links = [] - source = env.arg2nodes(source) - dest = env.fs.File(dest) - install_dir = dest.get_dir() - for src in source: - symlinks = getattr(getattr(src,'attributes',None), 'shliblinks', None) - if symlinks: - for link, linktgt in symlinks: - link_base = os.path.basename(link.get_path()) - linktgt_base = os.path.basename(linktgt.get_path()) - install_link = env.fs.File(link_base, install_dir) - install_linktgt = env.fs.File(linktgt_base, install_dir) - install_links.append((install_link, install_linktgt)) - return install_links - -def installShlibLinks(dest, source, env): - """If we are installing a versioned shared library create the required links.""" - Verbose = False - symlinks = listShlibLinksToInstall(dest, source, env) - if Verbose: - print('installShlibLinks: symlinks={:r}'.format(SCons.Tool.StringizeLibSymlinks(symlinks))) - if symlinks: - SCons.Tool.CreateLibSymlinks(env, symlinks) - return - -def installFunc(target, source, env): - """Install a source file into a target using the function specified - as the INSTALL construction variable.""" - try: - install = env['INSTALL'] - except KeyError: - raise SCons.Errors.UserError('Missing INSTALL construction variable.') - - assert len(target)==len(source), \ - "Installing source %s into target %s: target and source lists must have same length."%(list(map(str, source)), list(map(str, target))) - for t,s in zip(target,source): - if install(t.get_path(),s.get_path(),env): - return 1 - - return 0 - -def installFuncVersionedLib(target, source, env): - """Install a versioned library into a target using the function specified - as the INSTALLVERSIONEDLIB construction variable.""" - try: - install = env['INSTALLVERSIONEDLIB'] - except KeyError: - raise SCons.Errors.UserError('Missing INSTALLVERSIONEDLIB construction variable.') - - assert len(target)==len(source), \ - "Installing source %s into target %s: target and source lists must have same length."%(list(map(str, source)), list(map(str, target))) - for t,s in zip(target,source): - if hasattr(t.attributes, 'shlibname'): - tpath = os.path.join(t.get_dir(), t.attributes.shlibname) - else: - tpath = t.get_path() - if install(tpath,s.get_path(),env): - return 1 - - return 0 - -def stringFunc(target, source, env): - installstr = env.get('INSTALLSTR') - if installstr: - return env.subst_target_source(installstr, 0, target, source) - target = str(target[0]) - source = str(source[0]) - if os.path.isdir(source): - type = 'directory' - else: - type = 'file' - return 'Install %s: "%s" as "%s"' % (type, source, target) - -# -# Emitter functions -# -def add_targets_to_INSTALLED_FILES(target, source, env): - """ An emitter that adds all target files to the list stored in the - _INSTALLED_FILES global variable. This way all installed files of one - scons call will be collected. - """ - global _INSTALLED_FILES, _UNIQUE_INSTALLED_FILES - _INSTALLED_FILES.extend(target) - - _UNIQUE_INSTALLED_FILES = None - return (target, source) - -def add_versioned_targets_to_INSTALLED_FILES(target, source, env): - """ An emitter that adds all target files to the list stored in the - _INSTALLED_FILES global variable. This way all installed files of one - scons call will be collected. - """ - global _INSTALLED_FILES, _UNIQUE_INSTALLED_FILES - Verbose = False - _INSTALLED_FILES.extend(target) - if Verbose: - print("add_versioned_targets_to_INSTALLED_FILES: target={:r}".format(list(map(str, target)))) - symlinks = listShlibLinksToInstall(target[0], source, env) - if symlinks: - SCons.Tool.EmitLibSymlinks(env, symlinks, target[0]) - _UNIQUE_INSTALLED_FILES = None - return (target, source) - -class DESTDIR_factory(object): - """ A node factory, where all files will be relative to the dir supplied - in the constructor. - """ - def __init__(self, env, dir): - self.env = env - self.dir = env.arg2nodes( dir, env.fs.Dir )[0] - - def Entry(self, name): - name = SCons.Util.make_path_relative(name) - return self.dir.Entry(name) - - def Dir(self, name): - name = SCons.Util.make_path_relative(name) - return self.dir.Dir(name) - -# -# The Builder Definition -# -install_action = SCons.Action.Action(installFunc, stringFunc) -installas_action = SCons.Action.Action(installFunc, stringFunc) -installVerLib_action = SCons.Action.Action(installFuncVersionedLib, stringFunc) - -BaseInstallBuilder = None - -def InstallBuilderWrapper(env, target=None, source=None, dir=None, **kw): - if target and dir: - import SCons.Errors - raise SCons.Errors.UserError("Both target and dir defined for Install(), only one may be defined.") - if not dir: - dir=target - - import SCons.Script - install_sandbox = SCons.Script.GetOption('install_sandbox') - if install_sandbox: - target_factory = DESTDIR_factory(env, install_sandbox) - else: - target_factory = env.fs - - try: - dnodes = env.arg2nodes(dir, target_factory.Dir) - except TypeError: - raise SCons.Errors.UserError("Target `%s' of Install() is a file, but should be a directory. Perhaps you have the Install() arguments backwards?" % str(dir)) - sources = env.arg2nodes(source, env.fs.Entry) - tgt = [] - for dnode in dnodes: - for src in sources: - # Prepend './' so the lookup doesn't interpret an initial - # '#' on the file name portion as meaning the Node should - # be relative to the top-level SConstruct directory. - target = env.fs.Entry('.'+os.sep+src.name, dnode) - tgt.extend(BaseInstallBuilder(env, target, src, **kw)) - return tgt - - -def InstallAsBuilderWrapper(env, target=None, source=None, **kw): - result = [] - for src, tgt in map(lambda x, y: (x, y), source, target): - result.extend(BaseInstallBuilder(env, tgt, src, **kw)) - return result - -BaseVersionedInstallBuilder = None - - -def InstallVersionedBuilderWrapper(env, target=None, source=None, dir=None, **kw): - if target and dir: - import SCons.Errors - raise SCons.Errors.UserError("Both target and dir defined for Install(), only one may be defined.") - if not dir: - dir=target - - import SCons.Script - install_sandbox = SCons.Script.GetOption('install_sandbox') - if install_sandbox: - target_factory = DESTDIR_factory(env, install_sandbox) - else: - target_factory = env.fs - - try: - dnodes = env.arg2nodes(dir, target_factory.Dir) - except TypeError: - raise SCons.Errors.UserError("Target `%s' of Install() is a file, but should be a directory. Perhaps you have the Install() arguments backwards?" % str(dir)) - sources = env.arg2nodes(source, env.fs.Entry) - tgt = [] - for dnode in dnodes: - for src in sources: - # Prepend './' so the lookup doesn't interpret an initial - # '#' on the file name portion as meaning the Node should - # be relative to the top-level SConstruct directory. - target = env.fs.Entry('.'+os.sep+src.name, dnode) - tgt.extend(BaseVersionedInstallBuilder(env, target, src, **kw)) - return tgt - -added = None - - -def generate(env): - - from SCons.Script import AddOption, GetOption - global added - if not added: - added = 1 - AddOption('--install-sandbox', - dest='install_sandbox', - type="string", - action="store", - help='A directory under which all installed files will be placed.') - - global BaseInstallBuilder - if BaseInstallBuilder is None: - install_sandbox = GetOption('install_sandbox') - if install_sandbox: - target_factory = DESTDIR_factory(env, install_sandbox) - else: - target_factory = env.fs - - BaseInstallBuilder = SCons.Builder.Builder( - action = install_action, - target_factory = target_factory.Entry, - source_factory = env.fs.Entry, - multi = 1, - emitter = [ add_targets_to_INSTALLED_FILES, ], - source_scanner = SCons.Scanner.Base( {}, name = 'Install', recursive = False ), - name = 'InstallBuilder') - - global BaseVersionedInstallBuilder - if BaseVersionedInstallBuilder is None: - install_sandbox = GetOption('install_sandbox') - if install_sandbox: - target_factory = DESTDIR_factory(env, install_sandbox) - else: - target_factory = env.fs - - BaseVersionedInstallBuilder = SCons.Builder.Builder( - action = installVerLib_action, - target_factory = target_factory.Entry, - source_factory = env.fs.Entry, - multi = 1, - emitter = [ add_versioned_targets_to_INSTALLED_FILES, ], - name = 'InstallVersionedBuilder') - - env['BUILDERS']['_InternalInstall'] = InstallBuilderWrapper - env['BUILDERS']['_InternalInstallAs'] = InstallAsBuilderWrapper - env['BUILDERS']['_InternalInstallVersionedLib'] = InstallVersionedBuilderWrapper - - # We'd like to initialize this doing something like the following, - # but there isn't yet support for a ${SOURCE.type} expansion that - # will print "file" or "directory" depending on what's being - # installed. For now we punt by not initializing it, and letting - # the stringFunc() that we put in the action fall back to the - # hand-crafted default string if it's not set. - # - #try: - # env['INSTALLSTR'] - #except KeyError: - # env['INSTALLSTR'] = 'Install ${SOURCE.type}: "$SOURCES" as "$TARGETS"' - - try: - env['INSTALL'] - except KeyError: - env['INSTALL'] = copyFunc - - try: - env['INSTALLVERSIONEDLIB'] - except KeyError: - env['INSTALLVERSIONEDLIB'] = copyFuncVersionedLib - -def exists(env): - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/intelc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/intelc.py deleted file mode 100644 index c45c71a10d9..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/intelc.py +++ /dev/null @@ -1,608 +0,0 @@ -"""SCons.Tool.icl - -Tool-specific initialization for the Intel C/C++ compiler. -Supports Linux and Windows compilers, v7 and up. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from __future__ import division, print_function - -__revision__ = "src/engine/SCons/Tool/intelc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import math, sys, os.path, glob, string, re - -is_windows = sys.platform == 'win32' -is_win64 = is_windows and (os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64' or - ('PROCESSOR_ARCHITEW6432' in os.environ and - os.environ['PROCESSOR_ARCHITEW6432'] == 'AMD64')) -is_linux = sys.platform.startswith('linux') -is_mac = sys.platform == 'darwin' - -if is_windows: - import SCons.Tool.msvc -elif is_linux: - import SCons.Tool.gcc -elif is_mac: - import SCons.Tool.gcc -import SCons.Util -import SCons.Warnings - -# Exceptions for this tool -class IntelCError(SCons.Errors.InternalError): - pass -class MissingRegistryError(IntelCError): # missing registry entry - pass -class MissingDirError(IntelCError): # dir not found - pass -class NoRegistryModuleError(IntelCError): # can't read registry at all - pass - -def linux_ver_normalize(vstr): - """Normalize a Linux compiler version number. - Intel changed from "80" to "9.0" in 2005, so we assume if the number - is greater than 60 it's an old-style number and otherwise new-style. - Always returns an old-style float like 80 or 90 for compatibility with Windows. - Shades of Y2K!""" - # Check for version number like 9.1.026: return 91.026 - # XXX needs to be updated for 2011+ versions (like 2011.11.344 which is compiler v12.1.5) - m = re.match(r'([0-9]+)\.([0-9]+)\.([0-9]+)', vstr) - if m: - vmaj,vmin,build = m.groups() - return float(vmaj) * 10. + float(vmin) + float(build) / 1000. - else: - f = float(vstr) - if is_windows: - return f - else: - if f < 60: return f * 10.0 - else: return f - -def check_abi(abi): - """Check for valid ABI (application binary interface) name, - and map into canonical one""" - if not abi: - return None - abi = abi.lower() - # valid_abis maps input name to canonical name - if is_windows: - valid_abis = {'ia32' : 'ia32', - 'x86' : 'ia32', - 'ia64' : 'ia64', - 'em64t' : 'em64t', - 'amd64' : 'em64t'} - if is_linux: - valid_abis = {'ia32' : 'ia32', - 'x86' : 'ia32', - 'x86_64' : 'x86_64', - 'em64t' : 'x86_64', - 'amd64' : 'x86_64'} - if is_mac: - valid_abis = {'ia32' : 'ia32', - 'x86' : 'ia32', - 'x86_64' : 'x86_64', - 'em64t' : 'x86_64'} - try: - abi = valid_abis[abi] - except KeyError: - raise SCons.Errors.UserError("Intel compiler: Invalid ABI %s, valid values are %s"% \ - (abi, list(valid_abis.keys()))) - return abi - -def get_version_from_list(v, vlist): - """See if we can match v (string) in vlist (list of strings) - Linux has to match in a fuzzy way.""" - if is_windows: - # Simple case, just find it in the list - if v in vlist: return v - else: return None - else: - # Fuzzy match: normalize version number first, but still return - # original non-normalized form. - fuzz = 0.001 - for vi in vlist: - if math.fabs(linux_ver_normalize(vi) - linux_ver_normalize(v)) < fuzz: - return vi - # Not found - return None - -def get_intel_registry_value(valuename, version=None, abi=None): - """ - Return a value from the Intel compiler registry tree. (Windows only) - """ - # Open the key: - if is_win64: - K = 'Software\\Wow6432Node\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() - else: - K = 'Software\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() - try: - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) - except SCons.Util.RegError: - # For version 13 and later, check UUID subkeys for valuename - if is_win64: - K = 'Software\\Wow6432Node\\Intel\\Suites\\' + version + "\\Defaults\\C++\\" + abi.upper() - else: - K = 'Software\\Intel\\Suites\\' + version + "\\Defaults\\C++\\" + abi.upper() - try: - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) - uuid = SCons.Util.RegQueryValueEx(k, 'SubKey')[0] - - if is_win64: - K = 'Software\\Wow6432Node\\Intel\\Suites\\' + version + "\\" + uuid + "\\C++" - else: - K = 'Software\\Intel\\Suites\\' + version + "\\" + uuid + "\\C++" - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) - - try: - v = SCons.Util.RegQueryValueEx(k, valuename)[0] - return v # or v.encode('iso-8859-1', 'replace') to remove unicode? - except SCons.Util.RegError: - if abi.upper() == 'EM64T': - abi = 'em64t_native' - if is_win64: - K = 'Software\\Wow6432Node\\Intel\\Suites\\' + version + "\\" + uuid + "\\C++\\" + abi.upper() - else: - K = 'Software\\Intel\\Suites\\' + version + "\\" + uuid + "\\C++\\" + abi.upper() - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) - - try: - v = SCons.Util.RegQueryValueEx(k, valuename)[0] - return v # or v.encode('iso-8859-1', 'replace') to remove unicode? - except SCons.Util.RegError: - raise MissingRegistryError("%s was not found in the registry, for Intel compiler version %s, abi='%s'"%(K, version,abi)) - - except SCons.Util.RegError: - raise MissingRegistryError("%s was not found in the registry, for Intel compiler version %s, abi='%s'"%(K, version,abi)) - except SCons.Util.WinError: - raise MissingRegistryError("%s was not found in the registry, for Intel compiler version %s, abi='%s'"%(K, version,abi)) - - # Get the value: - try: - v = SCons.Util.RegQueryValueEx(k, valuename)[0] - return v # or v.encode('iso-8859-1', 'replace') to remove unicode? - except SCons.Util.RegError: - raise MissingRegistryError("%s\\%s was not found in the registry."%(K, valuename)) - - -def get_all_compiler_versions(): - """Returns a sorted list of strings, like "70" or "80" or "9.0" - with most recent compiler version first. - """ - versions=[] - if is_windows: - if is_win64: - keyname = 'Software\\WoW6432Node\\Intel\\Compilers\\C++' - else: - keyname = 'Software\\Intel\\Compilers\\C++' - try: - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, - keyname) - except SCons.Util.WinError: - # For version 13 or later, check for default instance UUID - if is_win64: - keyname = 'Software\\WoW6432Node\\Intel\\Suites' - else: - keyname = 'Software\\Intel\\Suites' - try: - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, - keyname) - except SCons.Util.WinError: - return [] - i = 0 - versions = [] - try: - while i < 100: - subkey = SCons.Util.RegEnumKey(k, i) # raises SConsEnvironmentError - # Check that this refers to an existing dir. - # This is not 100% perfect but should catch common - # installation issues like when the compiler was installed - # and then the install directory deleted or moved (rather - # than uninstalling properly), so the registry values - # are still there. - if subkey == 'Defaults': # Ignore default instances - i = i + 1 - continue - ok = False - for try_abi in ('IA32', 'IA32e', 'IA64', 'EM64T'): - try: - d = get_intel_registry_value('ProductDir', subkey, try_abi) - except MissingRegistryError: - continue # not found in reg, keep going - if os.path.exists(d): ok = True - if ok: - versions.append(subkey) - else: - try: - # Registry points to nonexistent dir. Ignore this - # version. - value = get_intel_registry_value('ProductDir', subkey, 'IA32') - except MissingRegistryError as e: - - # Registry key is left dangling (potentially - # after uninstalling). - - print("scons: *** Ignoring the registry key for the Intel compiler version %s.\n" \ - "scons: *** It seems that the compiler was uninstalled and that the registry\n" \ - "scons: *** was not cleaned up properly.\n" % subkey) - else: - print("scons: *** Ignoring "+str(value)) - - i = i + 1 - except EnvironmentError: - # no more subkeys - pass - elif is_linux or is_mac: - for d in glob.glob('/opt/intel_cc_*'): - # Typical dir here is /opt/intel_cc_80. - m = re.search(r'cc_(.*)$', d) - if m: - versions.append(m.group(1)) - for d in glob.glob('/opt/intel/cc*/*'): - # Typical dir here is /opt/intel/cc/9.0 for IA32, - # /opt/intel/cce/9.0 for EMT64 (AMD64) - m = re.search(r'([0-9][0-9.]*)$', d) - if m: - versions.append(m.group(1)) - for d in glob.glob('/opt/intel/Compiler/*'): - # Typical dir here is /opt/intel/Compiler/11.1 - m = re.search(r'([0-9][0-9.]*)$', d) - if m: - versions.append(m.group(1)) - for d in glob.glob('/opt/intel/composerxe-*'): - # Typical dir here is /opt/intel/composerxe-2011.4.184 - m = re.search(r'([0-9][0-9.]*)$', d) - if m: - versions.append(m.group(1)) - for d in glob.glob('/opt/intel/composer_xe_*'): - # Typical dir here is /opt/intel/composer_xe_2011_sp1.11.344 - # The _sp1 is useless, the installers are named 2011.9.x, 2011.10.x, 2011.11.x - m = re.search(r'([0-9]{0,4})(?:_sp\d*)?\.([0-9][0-9.]*)$', d) - if m: - versions.append("%s.%s"%(m.group(1), m.group(2))) - for d in glob.glob('/opt/intel/compilers_and_libraries_*'): - # JPA: For the new version of Intel compiler 2016.1. - m = re.search(r'([0-9]{0,4})(?:_sp\d*)?\.([0-9][0-9.]*)$', d) - if m: - versions.append("%s.%s"%(m.group(1), m.group(2))) - - def keyfunc(str): - """Given a dot-separated version string, return a tuple of ints representing it.""" - return [int(x) for x in str.split('.')] - # split into ints, sort, then remove dups - return sorted(SCons.Util.unique(versions), key=keyfunc, reverse=True) - -def get_intel_compiler_top(version, abi): - """ - Return the main path to the top-level dir of the Intel compiler, - using the given version. - The compiler will be in /bin/icl.exe (icc on linux), - the include dir is /include, etc. - """ - - if is_windows: - if not SCons.Util.can_read_reg: - raise NoRegistryModuleError("No Windows registry module was found") - top = get_intel_registry_value('ProductDir', version, abi) - archdir={'x86_64': 'intel64', - 'amd64' : 'intel64', - 'em64t' : 'intel64', - 'x86' : 'ia32', - 'i386' : 'ia32', - 'ia32' : 'ia32' - }[abi] # for v11 and greater - # pre-11, icl was in Bin. 11 and later, it's in Bin/ apparently. - if not os.path.exists(os.path.join(top, "Bin", "icl.exe")) \ - and not os.path.exists(os.path.join(top, "Bin", abi, "icl.exe")) \ - and not os.path.exists(os.path.join(top, "Bin", archdir, "icl.exe")): - raise MissingDirError("Can't find Intel compiler in %s"%(top)) - elif is_mac or is_linux: - def find_in_2008style_dir(version): - # first dir is new (>=9.0) style, second is old (8.0) style. - dirs=('/opt/intel/cc/%s', '/opt/intel_cc_%s') - if abi == 'x86_64': - dirs=('/opt/intel/cce/%s',) # 'e' stands for 'em64t', aka x86_64 aka amd64 - top=None - for d in dirs: - if os.path.exists(os.path.join(d%version, "bin", "icc")): - top = d%version - break - return top - def find_in_2010style_dir(version): - dirs=('/opt/intel/Compiler/%s/*'%version) - # typically /opt/intel/Compiler/11.1/064 (then bin/intel64/icc) - dirs=glob.glob(dirs) - # find highest sub-version number by reverse sorting and picking first existing one. - dirs.sort() - dirs.reverse() - top=None - for d in dirs: - if (os.path.exists(os.path.join(d, "bin", "ia32", "icc")) or - os.path.exists(os.path.join(d, "bin", "intel64", "icc"))): - top = d - break - return top - def find_in_2011style_dir(version): - # The 2011 (compiler v12) dirs are inconsistent, so just redo the search from - # get_all_compiler_versions and look for a match (search the newest form first) - top=None - for d in glob.glob('/opt/intel/composer_xe_*'): - # Typical dir here is /opt/intel/composer_xe_2011_sp1.11.344 - # The _sp1 is useless, the installers are named 2011.9.x, 2011.10.x, 2011.11.x - m = re.search(r'([0-9]{0,4})(?:_sp\d*)?\.([0-9][0-9.]*)$', d) - if m: - cur_ver = "%s.%s"%(m.group(1), m.group(2)) - if cur_ver == version and \ - (os.path.exists(os.path.join(d, "bin", "ia32", "icc")) or - os.path.exists(os.path.join(d, "bin", "intel64", "icc"))): - top = d - break - if not top: - for d in glob.glob('/opt/intel/composerxe-*'): - # Typical dir here is /opt/intel/composerxe-2011.4.184 - m = re.search(r'([0-9][0-9.]*)$', d) - if m and m.group(1) == version and \ - (os.path.exists(os.path.join(d, "bin", "ia32", "icc")) or - os.path.exists(os.path.join(d, "bin", "intel64", "icc"))): - top = d - break - return top - def find_in_2016style_dir(version): - # The 2016 (compiler v16) dirs are inconsistent from previous. - top = None - for d in glob.glob('/opt/intel/compilers_and_libraries_%s/linux'%version): - if os.path.exists(os.path.join(d, "bin", "ia32", "icc")) or os.path.exists(os.path.join(d, "bin", "intel64", "icc")): - top = d - break - return top - - top = find_in_2016style_dir(version) or find_in_2011style_dir(version) or find_in_2010style_dir(version) or find_in_2008style_dir(version) - # print "INTELC: top=",top - if not top: - raise MissingDirError("Can't find version %s Intel compiler in %s (abi='%s')"%(version,top, abi)) - return top - - -def generate(env, version=None, abi=None, topdir=None, verbose=0): - r"""Add Builders and construction variables for Intel C/C++ compiler - to an Environment. - args: - version: (string) compiler version to use, like "80" - abi: (string) 'win32' or whatever Itanium version wants - topdir: (string) compiler top dir, like - "c:\Program Files\Intel\Compiler70" - If topdir is used, version and abi are ignored. - verbose: (int) if >0, prints compiler version used. - """ - if not (is_mac or is_linux or is_windows): - # can't handle this platform - return - - if is_windows: - SCons.Tool.msvc.generate(env) - elif is_linux: - SCons.Tool.gcc.generate(env) - elif is_mac: - SCons.Tool.gcc.generate(env) - - # if version is unspecified, use latest - vlist = get_all_compiler_versions() - if not version: - if vlist: - version = vlist[0] - else: - # User may have specified '90' but we need to get actual dirname '9.0'. - # get_version_from_list does that mapping. - v = get_version_from_list(version, vlist) - if not v: - raise SCons.Errors.UserError("Invalid Intel compiler version %s: "%version + \ - "installed versions are %s"%(', '.join(vlist))) - version = v - - # if abi is unspecified, use ia32 - # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here) - abi = check_abi(abi) - if abi is None: - if is_mac or is_linux: - # Check if we are on 64-bit linux, default to 64 then. - uname_m = os.uname()[4] - if uname_m == 'x86_64': - abi = 'x86_64' - else: - abi = 'ia32' - else: - if is_win64: - abi = 'em64t' - else: - abi = 'ia32' - - if version and not topdir: - try: - topdir = get_intel_compiler_top(version, abi) - except (SCons.Util.RegError, IntelCError): - topdir = None - - if not topdir: - # Normally this is an error, but it might not be if the compiler is - # on $PATH and the user is importing their env. - class ICLTopDirWarning(SCons.Warnings.Warning): - pass - if (is_mac or is_linux) and not env.Detect('icc') or \ - is_windows and not env.Detect('icl'): - - SCons.Warnings.enableWarningClass(ICLTopDirWarning) - SCons.Warnings.warn(ICLTopDirWarning, - "Failed to find Intel compiler for version='%s', abi='%s'"% - (str(version), str(abi))) - else: - # should be cleaned up to say what this other version is - # since in this case we have some other Intel compiler installed - SCons.Warnings.enableWarningClass(ICLTopDirWarning) - SCons.Warnings.warn(ICLTopDirWarning, - "Can't find Intel compiler top dir for version='%s', abi='%s'"% - (str(version), str(abi))) - - if topdir: - archdir={'x86_64': 'intel64', - 'amd64' : 'intel64', - 'em64t' : 'intel64', - 'x86' : 'ia32', - 'i386' : 'ia32', - 'ia32' : 'ia32' - }[abi] # for v11 and greater - if os.path.exists(os.path.join(topdir, 'bin', archdir)): - bindir="bin/%s"%archdir - libdir="lib/%s"%archdir - else: - bindir="bin" - libdir="lib" - if verbose: - print("Intel C compiler: using version %s (%g), abi %s, in '%s/%s'"%\ - (repr(version), linux_ver_normalize(version),abi,topdir,bindir)) - if is_linux: - # Show the actual compiler version by running the compiler. - os.system('%s/%s/icc --version'%(topdir,bindir)) - if is_mac: - # Show the actual compiler version by running the compiler. - os.system('%s/%s/icc --version'%(topdir,bindir)) - - env['INTEL_C_COMPILER_TOP'] = topdir - if is_linux: - paths={'INCLUDE' : 'include', - 'LIB' : libdir, - 'PATH' : bindir, - 'LD_LIBRARY_PATH' : libdir} - for p in list(paths.keys()): - env.PrependENVPath(p, os.path.join(topdir, paths[p])) - if is_mac: - paths={'INCLUDE' : 'include', - 'LIB' : libdir, - 'PATH' : bindir, - 'LD_LIBRARY_PATH' : libdir} - for p in list(paths.keys()): - env.PrependENVPath(p, os.path.join(topdir, paths[p])) - if is_windows: - # env key reg valname default subdir of top - paths=(('INCLUDE', 'IncludeDir', 'Include'), - ('LIB' , 'LibDir', 'Lib'), - ('PATH' , 'BinDir', 'Bin')) - # We are supposed to ignore version if topdir is set, so set - # it to the emptry string if it's not already set. - if version is None: - version = '' - # Each path has a registry entry, use that or default to subdir - for p in paths: - try: - path=get_intel_registry_value(p[1], version, abi) - # These paths may have $(ICInstallDir) - # which needs to be substituted with the topdir. - path=path.replace('$(ICInstallDir)', topdir + os.sep) - except IntelCError: - # Couldn't get it from registry: use default subdir of topdir - env.PrependENVPath(p[0], os.path.join(topdir, p[2])) - else: - env.PrependENVPath(p[0], path.split(os.pathsep)) - # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]])) - - if is_windows: - env['CC'] = 'icl' - env['CXX'] = 'icl' - env['LINK'] = 'xilink' - else: - env['CC'] = 'icc' - env['CXX'] = 'icpc' - # Don't reset LINK here; - # use smart_link which should already be here from link.py. - #env['LINK'] = '$CC' - env['AR'] = 'xiar' - env['LD'] = 'xild' # not used by default - - # This is not the exact (detailed) compiler version, - # just the major version as determined above or specified - # by the user. It is a float like 80 or 90, in normalized form for Linux - # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0) - if version: - env['INTEL_C_COMPILER_VERSION']=linux_ver_normalize(version) - - if is_windows: - # Look for license file dir - # in system environment, registry, and default location. - envlicdir = os.environ.get("INTEL_LICENSE_FILE", '') - K = r'SOFTWARE\Intel\Licenses' - try: - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) - reglicdir = SCons.Util.RegQueryValueEx(k, "w_cpp")[0] - except (AttributeError, SCons.Util.RegError): - reglicdir = "" - defaultlicdir = r'C:\Program Files\Common Files\Intel\Licenses' - - licdir = None - for ld in [envlicdir, reglicdir]: - # If the string contains an '@', then assume it's a network - # license (port@system) and good by definition. - if ld and (ld.find('@') != -1 or os.path.exists(ld)): - licdir = ld - break - if not licdir: - licdir = defaultlicdir - if not os.path.exists(licdir): - class ICLLicenseDirWarning(SCons.Warnings.Warning): - pass - SCons.Warnings.enableWarningClass(ICLLicenseDirWarning) - SCons.Warnings.warn(ICLLicenseDirWarning, - "Intel license dir was not found." - " Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s)." - " Using the default path as a last resort." - % (envlicdir, reglicdir, defaultlicdir)) - env['ENV']['INTEL_LICENSE_FILE'] = licdir - -def exists(env): - if not (is_mac or is_linux or is_windows): - # can't handle this platform - return 0 - - try: - versions = get_all_compiler_versions() - except (SCons.Util.RegError, IntelCError): - versions = None - detected = versions is not None and len(versions) > 0 - if not detected: - # try env.Detect, maybe that will work - if is_windows: - return env.Detect('icl') - elif is_linux: - return env.Detect('icc') - elif is_mac: - return env.Detect('icc') - return detected - -# end of file - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ipkg.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ipkg.py deleted file mode 100644 index e1fad3530bd..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ipkg.py +++ /dev/null @@ -1,75 +0,0 @@ -"""SCons.Tool.ipkg - -Tool-specific initialization for ipkg. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -The ipkg tool calls the ipkg-build. Its only argument should be the -packages fake_root. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/ipkg.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os - -import SCons.Builder - -def generate(env): - """Add Builders and construction variables for ipkg to an Environment.""" - try: - bld = env['BUILDERS']['Ipkg'] - except KeyError: - bld = SCons.Builder.Builder(action='$IPKGCOM', - suffix='$IPKGSUFFIX', - source_scanner=None, - target_scanner=None) - env['BUILDERS']['Ipkg'] = bld - - - env['IPKG'] = 'ipkg-build' - env['IPKGCOM'] = '$IPKG $IPKGFLAGS ${SOURCE}' - - if env.WhereIs('id'): - with os.popen('id -un') as p: - env['IPKGUSER'] = p.read().strip() - with os.popen('id -gn') as p: - env['IPKGGROUP'] = p.read().strip() - env['IPKGFLAGS'] = SCons.Util.CLVar('-o $IPKGUSER -g $IPKGGROUP') - env['IPKGSUFFIX'] = '.ipk' - -def exists(env): - """ - Can we find the tool - """ - return env.Detect('ipkg-build') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/jar.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/jar.py deleted file mode 100644 index f2a328bf9c2..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/jar.py +++ /dev/null @@ -1,241 +0,0 @@ -"""SCons.Tool.jar - -Tool-specific initialization for jar. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/jar.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" -import os - -import SCons.Subst -import SCons.Util -from SCons.Node.FS import _my_normcase -from SCons.Tool.JavaCommon import get_java_install_dirs - - -def jarSources(target, source, env, for_signature): - """Only include sources that are not a manifest file.""" - try: - env['JARCHDIR'] - except KeyError: - jarchdir_set = False - else: - jarchdir_set = True - jarchdir = env.subst('$JARCHDIR', target=target, source=source) - if jarchdir: - jarchdir = env.fs.Dir(jarchdir) - result = [] - for src in source: - contents = src.get_text_contents() - if not contents.startswith("Manifest-Version"): - if jarchdir_set: - _chdir = jarchdir - else: - try: - _chdir = src.attributes.java_classdir - except AttributeError: - _chdir = None - if _chdir: - # If we are changing the dir with -C, then sources should - # be relative to that directory. - src = SCons.Subst.Literal(src.get_path(_chdir)) - result.append('-C') - result.append(_chdir) - result.append(src) - return result - -def jarManifest(target, source, env, for_signature): - """Look in sources for a manifest file, if any.""" - for src in source: - contents = src.get_text_contents() - if contents.startswith("Manifest-Version"): - return src - return '' - -def jarFlags(target, source, env, for_signature): - """If we have a manifest, make sure that the 'm' - flag is specified.""" - jarflags = env.subst('$JARFLAGS', target=target, source=source) - for src in source: - contents = src.get_text_contents() - if contents.startswith("Manifest-Version"): - if 'm' not in jarflags: - return jarflags + 'm' - break - return jarflags - -def Jar(env, target = None, source = [], *args, **kw): - """ - A pseudo-Builder wrapper around the separate Jar sources{File,Dir} - Builders. - """ - - # jar target should not be a list so assume they passed - # no target and want implicit target to be made and the arg - # was actaully the list of sources - if SCons.Util.is_List(target) and source == []: - SCons.Warnings.Warning("Making implicit target jar file, " + - "and treating the list as sources") - source = target - target = None - - # mutiple targets pass so build each target the same from the - # same source - #TODO Maybe this should only be done once, and the result copied - # for each target since it should result in the same? - if SCons.Util.is_List(target) and SCons.Util.is_List(source): - jars = [] - for single_target in target: - jars += env.Jar( target = single_target, source = source, *args, **kw) - return jars - - # they passed no target so make a target implicitly - if target is None: - try: - # make target from the first source file - target = os.path.splitext(str(source[0]))[0] + env.subst('$JARSUFFIX') - except: - # something strange is happening but attempt anyways - SCons.Warnings.Warning("Could not make implicit target from sources, using directory") - target = os.path.basename(str(env.Dir('.'))) + env.subst('$JARSUFFIX') - - # make lists out of our target and sources - if not SCons.Util.is_List(target): - target = [target] - if not SCons.Util.is_List(source): - source = [source] - - # setup for checking through all the sources and handle accordingly - java_class_suffix = env.subst('$JAVACLASSSUFFIX') - java_suffix = env.subst('$JAVASUFFIX') - target_nodes = [] - - # function for determining what to do with a file and not a directory - # if its already a class file then it can be used as a - # source for jar, otherwise turn it into a class file then - # return the source - def file_to_class(s): - if _my_normcase(str(s)).endswith(java_suffix): - return env.JavaClassFile(source = s, *args, **kw) - else: - return [env.fs.File(s)] - - # function for calling the JavaClassDir builder if a directory is - # passed as a source to Jar builder. The JavaClassDir builder will - # return an empty list if there were not target classes built from - # the directory, in this case assume the user wanted the directory - # copied into the jar as is (it contains other files such as - # resources or class files compiled from proir commands) - # TODO: investigate the expexcted behavior for directories that - # have mixed content, such as Java files along side other files - # files. - def dir_to_class(s): - dir_targets = env.JavaClassDir(source = s, *args, **kw) - if(dir_targets == []): - # no classes files could be built from the source dir - # so pass the dir as is. - return [env.fs.Dir(s)] - else: - return dir_targets - - # loop through the sources and handle each accordingly - # the goal here is to get all the source files into a class - # file or a directory that contains class files - for s in SCons.Util.flatten(source): - s = env.subst(s) - if isinstance(s, SCons.Node.FS.Base): - if isinstance(s, SCons.Node.FS.File): - # found a file so make sure its a class file - target_nodes.extend(file_to_class(s)) - else: - # found a dir so get the class files out of it - target_nodes.extend(dir_to_class(s)) - else: - try: - # source is string try to convert it to file - target_nodes.extend(file_to_class(env.fs.File(s))) - continue - except: - pass - - try: - # source is string try to covnert it to dir - target_nodes.extend(dir_to_class(env.fs.Dir(s))) - continue - except: - pass - - SCons.Warnings.Warning("File: " + str(s) + " could not be identified as File or Directory, skipping.") - - # at this point all our sources have been converted to classes or directories of class - # so pass it to the Jar builder - return env.JarFile(target = target, source = target_nodes, *args, **kw) - -def generate(env): - """Add Builders and construction variables for jar to an Environment.""" - SCons.Tool.CreateJarBuilder(env) - - SCons.Tool.CreateJavaFileBuilder(env) - SCons.Tool.CreateJavaClassFileBuilder(env) - SCons.Tool.CreateJavaClassDirBuilder(env) - - env.AddMethod(Jar) - - if env['PLATFORM'] == 'win32': - # Ensure that we have a proper path for jar - paths = get_java_install_dirs('win32') - jar = SCons.Tool.find_program_path(env, 'jar', default_paths=paths) - if jar: - jar_bin_dir = os.path.dirname(jar) - env.AppendENVPath('PATH', jar_bin_dir) - - env['JAR'] = 'jar' - env['JARFLAGS'] = SCons.Util.CLVar('cf') - env['_JARFLAGS'] = jarFlags - env['_JARMANIFEST'] = jarManifest - env['_JARSOURCES'] = jarSources - env['_JARCOM'] = '$JAR $_JARFLAGS $TARGET $_JARMANIFEST $_JARSOURCES' - env['JARCOM'] = "${TEMPFILE('$_JARCOM','$JARCOMSTR')}" - env['JARSUFFIX'] = '.jar' - -def exists(env): - # As reported by Jan Nijtmans in issue #2730, the simple - # return env.Detect('jar') - # doesn't always work during initialization. For now, we - # stop trying to detect an executable (analogous to the - # javac Builder). - # TODO: Come up with a proper detect() routine...and enable it. - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/javac.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/javac.py deleted file mode 100644 index aeb52ce41cf..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/javac.py +++ /dev/null @@ -1,248 +0,0 @@ -"""SCons.Tool.javac - -Tool-specific initialization for javac. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/javac.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path -from collections import OrderedDict - -import SCons.Action -import SCons.Builder -from SCons.Node.FS import _my_normcase -from SCons.Tool.JavaCommon import parse_java_file, get_java_install_dirs, get_java_include_paths -import SCons.Util - -def classname(path): - """Turn a string (path name) into a Java class name.""" - return os.path.normpath(path).replace(os.sep, '.') - -def emit_java_classes(target, source, env): - """Create and return lists of source java files - and their corresponding target class files. - """ - java_suffix = env.get('JAVASUFFIX', '.java') - class_suffix = env.get('JAVACLASSSUFFIX', '.class') - - target[0].must_be_same(SCons.Node.FS.Dir) - classdir = target[0] - - s = source[0].rentry().disambiguate() - if isinstance(s, SCons.Node.FS.File): - sourcedir = s.dir.rdir() - elif isinstance(s, SCons.Node.FS.Dir): - sourcedir = s.rdir() - else: - raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % s.__class__) - - slist = [] - js = _my_normcase(java_suffix) - for entry in source: - entry = entry.rentry().disambiguate() - if isinstance(entry, SCons.Node.FS.File): - slist.append(entry) - elif isinstance(entry, SCons.Node.FS.Dir): - result = OrderedDict() - dirnode = entry.rdir() - def find_java_files(arg, dirpath, filenames): - java_files = sorted([n for n in filenames - if _my_normcase(n).endswith(js)]) - mydir = dirnode.Dir(dirpath) - java_paths = [mydir.File(f) for f in java_files] - for jp in java_paths: - arg[jp] = True - for dirpath, dirnames, filenames in os.walk(dirnode.get_abspath()): - find_java_files(result, dirpath, filenames) - entry.walk(find_java_files, result) - - slist.extend(list(result.keys())) - else: - raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % entry.__class__) - - version = env.get('JAVAVERSION', '1.4') - full_tlist = [] - for f in slist: - tlist = [] - source_file_based = True - pkg_dir = None - if not f.is_derived(): - pkg_dir, classes = parse_java_file(f.rfile().get_abspath(), version) - if classes: - source_file_based = False - if pkg_dir: - d = target[0].Dir(pkg_dir) - p = pkg_dir + os.sep - else: - d = target[0] - p = '' - for c in classes: - t = d.File(c + class_suffix) - t.attributes.java_classdir = classdir - t.attributes.java_sourcedir = sourcedir - t.attributes.java_classname = classname(p + c) - tlist.append(t) - - if source_file_based: - base = f.name[:-len(java_suffix)] - if pkg_dir: - t = target[0].Dir(pkg_dir).File(base + class_suffix) - else: - t = target[0].File(base + class_suffix) - t.attributes.java_classdir = classdir - t.attributes.java_sourcedir = f.dir - t.attributes.java_classname = classname(base) - tlist.append(t) - - for t in tlist: - t.set_specific_source([f]) - - full_tlist.extend(tlist) - - return full_tlist, slist - -JavaAction = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') - -JavaBuilder = SCons.Builder.Builder(action = JavaAction, - emitter = emit_java_classes, - target_factory = SCons.Node.FS.Entry, - source_factory = SCons.Node.FS.Entry) - -class pathopt(object): - """ - Callable object for generating javac-style path options from - a construction variable (e.g. -classpath, -sourcepath). - """ - def __init__(self, opt, var, default=None): - self.opt = opt - self.var = var - self.default = default - - def __call__(self, target, source, env, for_signature): - path = env[self.var] - if path and not SCons.Util.is_List(path): - path = [path] - if self.default: - default = env[self.default] - if default: - if not SCons.Util.is_List(default): - default = [default] - path = path + default - if path: - return [self.opt, os.pathsep.join(map(str, path))] - else: - return [] - -def Java(env, target, source, *args, **kw): - """ - A pseudo-Builder wrapper around the separate JavaClass{File,Dir} - Builders. - """ - if not SCons.Util.is_List(target): - target = [target] - if not SCons.Util.is_List(source): - source = [source] - - # Pad the target list with repetitions of the last element in the - # list so we have a target for every source element. - target = target + ([target[-1]] * (len(source) - len(target))) - - java_suffix = env.subst('$JAVASUFFIX') - result = [] - - for t, s in zip(target, source): - if isinstance(s, SCons.Node.FS.Base): - if isinstance(s, SCons.Node.FS.File): - b = env.JavaClassFile - else: - b = env.JavaClassDir - else: - if os.path.isfile(s): - b = env.JavaClassFile - elif os.path.isdir(s): - b = env.JavaClassDir - elif s[-len(java_suffix):] == java_suffix: - b = env.JavaClassFile - else: - b = env.JavaClassDir - result.extend(b(t, s, *args, **kw)) - - return result - -def generate(env): - """Add Builders and construction variables for javac to an Environment.""" - java_file = SCons.Tool.CreateJavaFileBuilder(env) - java_class = SCons.Tool.CreateJavaClassFileBuilder(env) - java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env) - java_class.add_emitter(None, emit_java_classes) - java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes) - java_class_dir.emitter = emit_java_classes - - env.AddMethod(Java) - - version = env.get('JAVAVERSION', None) - - if env['PLATFORM'] == 'win32': - # Ensure that we have a proper path for javac - paths = get_java_install_dirs('win32', version=version) - javac = SCons.Tool.find_program_path(env, 'javac', default_paths=paths) - if javac: - javac_bin_dir = os.path.dirname(javac) - env.AppendENVPath('PATH', javac_bin_dir) - else: - javac = SCons.Tool.find_program_path(env, 'javac') - - env['JAVAINCLUDES'] = get_java_include_paths(env, javac, version) - - - env['JAVAC'] = 'javac' - env['JAVACFLAGS'] = SCons.Util.CLVar('') - env['JAVABOOTCLASSPATH'] = [] - env['JAVACLASSPATH'] = [] - env['JAVASOURCEPATH'] = [] - env['_javapathopt'] = pathopt - env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} ' - env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} ' - env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} ' - env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}' - env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES' - env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}" - env['JAVACLASSSUFFIX'] = '.class' - env['JAVASUFFIX'] = '.java' - -def exists(env): - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/javah.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/javah.py deleted file mode 100644 index b41c742fe59..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/javah.py +++ /dev/null @@ -1,147 +0,0 @@ -"""SCons.Tool.javah - -Tool-specific initialization for javah. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/javah.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -import SCons.Action -import SCons.Builder -import SCons.Node.FS -import SCons.Tool.javac -import SCons.Util -from SCons.Tool.JavaCommon import get_java_install_dirs - - -def emit_java_headers(target, source, env): - """Create and return lists of Java stub header files that will - be created from a set of class files. - """ - class_suffix = env.get('JAVACLASSSUFFIX', '.class') - classdir = env.get('JAVACLASSDIR') - - if not classdir: - try: - s = source[0] - except IndexError: - classdir = '.' - else: - try: - classdir = s.attributes.java_classdir - except AttributeError: - classdir = '.' - classdir = env.Dir(classdir).rdir() - - if str(classdir) == '.': - c_ = None - else: - c_ = str(classdir) + os.sep - - slist = [] - for src in source: - try: - classname = src.attributes.java_classname - except AttributeError: - classname = str(src) - if c_ and classname[:len(c_)] == c_: - classname = classname[len(c_):] - if class_suffix and classname[-len(class_suffix):] == class_suffix: - classname = classname[:-len(class_suffix)] - classname = SCons.Tool.javac.classname(classname) - s = src.rfile() - s.attributes.java_classname = classname - slist.append(s) - - s = source[0].rfile() - if not hasattr(s.attributes, 'java_classdir'): - s.attributes.java_classdir = classdir - - if target[0].__class__ is SCons.Node.FS.File: - tlist = target - else: - if not isinstance(target[0], SCons.Node.FS.Dir): - target[0].__class__ = SCons.Node.FS.Dir - target[0]._morph() - tlist = [] - for s in source: - fname = s.attributes.java_classname.replace('.', '_') + '.h' - t = target[0].File(fname) - t.attributes.java_lookupdir = target[0] - tlist.append(t) - - return tlist, source - -def JavaHOutFlagGenerator(target, source, env, for_signature): - try: - t = target[0] - except (AttributeError, IndexError, TypeError): - t = target - try: - return '-d ' + str(t.attributes.java_lookupdir) - except AttributeError: - return '-o ' + str(t) - -def getJavaHClassPath(env,target, source, for_signature): - path = "${SOURCE.attributes.java_classdir}" - if 'JAVACLASSPATH' in env and env['JAVACLASSPATH']: - path = SCons.Util.AppendPath(path, env['JAVACLASSPATH']) - return "-classpath %s" % (path) - -def generate(env): - """Add Builders and construction variables for javah to an Environment.""" - java_javah = SCons.Tool.CreateJavaHBuilder(env) - java_javah.emitter = emit_java_headers - - if env['PLATFORM'] == 'win32': - # Ensure that we have a proper path for javah - paths = get_java_install_dirs('win32') - javah = SCons.Tool.find_program_path(env, 'javah', default_paths=paths) - if javah: - javah_bin_dir = os.path.dirname(javah) - env.AppendENVPath('PATH', javah_bin_dir) - - env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator - env['JAVAH'] = 'javah' - env['JAVAHFLAGS'] = SCons.Util.CLVar('') - env['_JAVAHCLASSPATH'] = getJavaHClassPath - env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}' - env['JAVACLASSSUFFIX'] = '.class' - -def exists(env): - return env.Detect('javah') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/latex.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/latex.py deleted file mode 100644 index 6cfd93ad2a4..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/latex.py +++ /dev/null @@ -1,80 +0,0 @@ -"""SCons.Tool.latex - -Tool-specific initialization for LaTeX. -Generates .dvi files from .latex or .ltx files - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/latex.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Defaults -import SCons.Scanner.LaTeX -import SCons.Util -import SCons.Tool -import SCons.Tool.tex - -def LaTeXAuxFunction(target = None, source= None, env=None): - result = SCons.Tool.tex.InternalLaTeXAuxAction( SCons.Tool.tex.LaTeXAction, target, source, env ) - if result != 0: - SCons.Tool.tex.check_file_error_message(env['LATEX']) - return result - -LaTeXAuxAction = SCons.Action.Action(LaTeXAuxFunction, - strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) - -def generate(env): - """Add Builders and construction variables for LaTeX to an Environment.""" - - env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) - - from . import dvi - dvi.generate(env) - - from . import pdf - pdf.generate(env) - - bld = env['BUILDERS']['DVI'] - bld.add_action('.ltx', LaTeXAuxAction) - bld.add_action('.latex', LaTeXAuxAction) - bld.add_emitter('.ltx', SCons.Tool.tex.tex_eps_emitter) - bld.add_emitter('.latex', SCons.Tool.tex.tex_eps_emitter) - - SCons.Tool.tex.generate_common(env) - -def exists(env): - SCons.Tool.tex.generate_darwin(env) - return env.Detect('latex') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ldc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ldc.py deleted file mode 100644 index 639dd486229..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ldc.py +++ /dev/null @@ -1,158 +0,0 @@ -from __future__ import print_function - -"""SCons.Tool.ldc - -Tool-specific initialization for the LDC compiler. -(https://github.com/ldc-developers/ldc) - -Developed by Russel Winder (russel@winder.org.uk) -2012-05-09 onwards - -Compiler variables: - DC - The name of the D compiler to use. Defaults to ldc2. - DPATH - List of paths to search for import modules. - DVERSIONS - List of version tags to enable when compiling. - DDEBUG - List of debug tags to enable when compiling. - -Linker related variables: - LIBS - List of library files to link in. - DLINK - Name of the linker to use. Defaults to ldc2. - DLINKFLAGS - List of linker flags. - -Lib tool variables: - DLIB - Name of the lib tool to use. Defaults to lib. - DLIBFLAGS - List of flags to pass to the lib tool. - LIBS - Same as for the linker. (libraries to pull into the .lib) -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/ldc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import subprocess - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Scanner.D -import SCons.Tool - -import SCons.Tool.DCommon as DCommon - - -def generate(env): - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - static_obj.add_action('.d', SCons.Defaults.DAction) - shared_obj.add_action('.d', SCons.Defaults.ShDAction) - static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter) - - env['DC'] = env.Detect('ldc2') or 'ldc2' - env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of=$TARGET $SOURCES' - env['_DINCFLAGS'] = '${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)}' - env['_DVERFLAGS'] = '${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)}' - env['_DDEBUGFLAGS'] = '${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)}' - env['_DFLAGS'] = '${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)}' - - env['SHDC'] = '$DC' - env['SHDCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -relocation-model=pic -of=$TARGET $SOURCES' - - env['DPATH'] = ['#/'] - env['DFLAGS'] = [] - env['DVERSIONS'] = [] - env['DDEBUG'] = [] - - if env['DC']: - DCommon.addDPATHToEnv(env, env['DC']) - - env['DINCPREFIX'] = '-I=' - env['DINCSUFFIX'] = '' - env['DVERPREFIX'] = '-version=' - env['DVERSUFFIX'] = '' - env['DDEBUGPREFIX'] = '-debug=' - env['DDEBUGSUFFIX'] = '' - env['DFLAGPREFIX'] = '-' - env['DFLAGSUFFIX'] = '' - env['DFILESUFFIX'] = '.d' - - env['DLINK'] = '$DC' - env['DLINKFLAGS'] = SCons.Util.CLVar('') - env['DLINKCOM'] = '$DLINK -of=$TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS' - - env['SHDLINK'] = '$DC' - env['SHDLINKFLAGS'] = SCons.Util.CLVar('$DLINKFLAGS -shared -defaultlib=phobos2-ldc') - - env['SHDLINKCOM'] = '$DLINK -of=$TARGET $SHDLINKFLAGS $__SHDLIBVERSIONFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS -L-ldruntime-ldc' - - env['DLIBLINKPREFIX'] = '' if env['PLATFORM'] == 'win32' else '-L-l' - env['DLIBLINKSUFFIX'] = '.lib' if env['PLATFORM'] == 'win32' else '' - # env['_DLIBFLAGS'] = '${_concat(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, __env__, RDirs, TARGET, SOURCE)}' - env['_DLIBFLAGS'] = '${_stripixes(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}' - - env['DLIBDIRPREFIX'] = '-L-L' - env['DLIBDIRSUFFIX'] = '' - env['_DLIBDIRFLAGS'] = '${_concat(DLIBDIRPREFIX, LIBPATH, DLIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)}' - - env['DLIB'] = 'lib' if env['PLATFORM'] == 'win32' else 'ar cr' - env['DLIBCOM'] = '$DLIB $_DLIBFLAGS {0}$TARGET $SOURCES $_DLIBFLAGS'.format('-c ' if env['PLATFORM'] == 'win32' else '') - - # env['_DLIBFLAGS'] = '${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)}' - - env['DLIBFLAGPREFIX'] = '-' - env['DLIBFLAGSUFFIX'] = '' - - # __RPATH is set to $_RPATH in the platform specification if that - # platform supports it. - env['DRPATHPREFIX'] = '-L-Wl,-rpath,' if env['PLATFORM'] == 'darwin' else '-L-rpath=' - env['DRPATHSUFFIX'] = '' - env['_DRPATH'] = '${_concat(DRPATHPREFIX, RPATH, DRPATHSUFFIX, __env__)}' - - # Support for versioned libraries - env['_SHDLIBVERSIONFLAGS'] = '$SHDLIBVERSIONFLAGS -L-soname=$_SHDLIBSONAME' - env['_SHDLIBSONAME'] = '${DShLibSonameGenerator(__env__,TARGET)}' - # NOTE: this is a quick hack, the soname will only work if there is - # c/c++ linker loaded which provides callback for the ShLibSonameGenerator - env['DShLibSonameGenerator'] = SCons.Tool.ShLibSonameGenerator - # NOTE: this is only for further reference, currently $SHDLIBVERSION does - # not work, the user must use $SHLIBVERSION - env['SHDLIBVERSION'] = '$SHLIBVERSION' - env['SHDLIBVERSIONFLAGS'] = [] - - env['BUILDERS']['ProgramAllAtOnce'] = SCons.Builder.Builder( - action='$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -of=$TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS', - emitter=DCommon.allAtOnceEmitter, - ) - - -def exists(env): - return env.Detect('ldc2') - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/lex.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/lex.py deleted file mode 100644 index db1ca6d0f87..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/lex.py +++ /dev/null @@ -1,141 +0,0 @@ -"""SCons.Tool.lex - -Tool-specific initialization for lex. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/lex.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import sys - -import SCons.Action -import SCons.Tool -import SCons.Util -from SCons.Platform.mingw import MINGW_DEFAULT_PATHS -from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS -from SCons.Platform.win32 import CHOCO_DEFAULT_PATH - -LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR") - -if sys.platform == 'win32': - BINS = ['flex', 'lex', 'win_flex'] -else: - BINS = ["flex", "lex"] - -def lexEmitter(target, source, env): - sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0])) - - if sourceExt == ".lm": # If using Objective-C - target = [sourceBase + ".m"] # the extension is ".m". - - # This emitter essentially tries to add to the target all extra - # files generated by flex. - - # Different options that are used to trigger the creation of extra files. - fileGenOptions = ["--header-file=", "--tables-file="] - - lexflags = env.subst("$LEXFLAGS", target=target, source=source) - for option in SCons.Util.CLVar(lexflags): - for fileGenOption in fileGenOptions: - l = len(fileGenOption) - if option[:l] == fileGenOption: - # A file generating option is present, so add the - # file name to the target list. - fileName = option[l:].strip() - target.append(fileName) - return (target, source) - -def get_lex_path(env, append_paths=False): - """ - Find the path to the lex tool, searching several possible names - - Only called in the Windows case, so the default_path - can be Windows-specific - - :param env: current construction environment - :param append_paths: if set, add the path to the tool to PATH - :return: path to lex tool, if found - """ - for prog in BINS: - bin_path = SCons.Tool.find_program_path( - env, - prog, - default_paths=CHOCO_DEFAULT_PATH + MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS ) - if bin_path: - if append_paths: - env.AppendENVPath('PATH', os.path.dirname(bin_path)) - return bin_path - SCons.Warnings.Warning('lex tool requested, but lex or flex binary not found in ENV PATH') - - -def generate(env): - """Add Builders and construction variables for lex to an Environment.""" - c_file, cxx_file = SCons.Tool.createCFileBuilders(env) - - # C - c_file.add_action(".l", LexAction) - c_file.add_emitter(".l", lexEmitter) - - c_file.add_action(".lex", LexAction) - c_file.add_emitter(".lex", lexEmitter) - - # Objective-C - cxx_file.add_action(".lm", LexAction) - cxx_file.add_emitter(".lm", lexEmitter) - - # C++ - cxx_file.add_action(".ll", LexAction) - cxx_file.add_emitter(".ll", lexEmitter) - - env["LEXFLAGS"] = SCons.Util.CLVar("") - - if sys.platform == 'win32': - # ignore the return - we do not need the full path here - _ = get_lex_path(env, append_paths=True) - env["LEX"] = env.Detect(BINS) - if not env.get("LEXUNISTD"): - env["LEXUNISTD"] = SCons.Util.CLVar("") - env["LEXCOM"] = "$LEX $LEXUNISTD $LEXFLAGS -t $SOURCES > $TARGET" - else: - env["LEX"] = env.Detect(BINS) - env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET" - -def exists(env): - if sys.platform == 'win32': - return get_lex_path(env) - else: - return env.Detect(BINS) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/link.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/link.py deleted file mode 100644 index c712732605f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/link.py +++ /dev/null @@ -1,362 +0,0 @@ -"""SCons.Tool.link - -Tool-specific initialization for the generic Posix linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/link.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import sys -import re -import os - -import SCons.Tool -import SCons.Util -import SCons.Warnings - -from SCons.Tool.FortranCommon import isfortran - -from SCons.Tool.DCommon import isD - -from SCons.Tool.cxx import iscplusplus - -issued_mixed_link_warning = False - - -def smart_link(source, target, env, for_signature): - has_cplusplus = iscplusplus(source) - has_fortran = isfortran(env, source) - has_d = isD(env, source) - if has_cplusplus and has_fortran and not has_d: - global issued_mixed_link_warning - if not issued_mixed_link_warning: - msg = "Using $CXX to link Fortran and C++ code together.\n\t" + \ - "This may generate a buggy executable if the '%s'\n\t" + \ - "compiler does not know how to deal with Fortran runtimes." - SCons.Warnings.warn(SCons.Warnings.FortranCxxMixWarning, - msg % env.subst('$CXX')) - issued_mixed_link_warning = True - return '$CXX' - elif has_d: - env['LINKCOM'] = env['DLINKCOM'] - env['SHLINKCOM'] = env['SHDLINKCOM'] - return '$DC' - elif has_fortran: - return '$FORTRAN' - elif has_cplusplus: - return '$CXX' - return '$CC' - - -def _lib_emitter(target, source, env, **kw): - Verbose = False - if Verbose: - print("_lib_emitter: target[0]={!r}".format(target[0].get_path())) - for tgt in target: - if SCons.Util.is_String(tgt): - tgt = env.File(tgt) - tgt.attributes.shared = 1 - - try: - symlink_generator = kw['symlink_generator'] - except KeyError: - pass - else: - if Verbose: - print("_lib_emitter: symlink_generator={!r}".format(symlink_generator)) - symlinks = symlink_generator(env, target[0]) - if Verbose: - print("_lib_emitter: symlinks={!r}".format(symlinks)) - - if symlinks: - SCons.Tool.EmitLibSymlinks(env, symlinks, target[0]) - target[0].attributes.shliblinks = symlinks - return (target, source) - - -def shlib_emitter(target, source, env): - return _lib_emitter(target, source, env, symlink_generator=SCons.Tool.ShLibSymlinkGenerator) - - -def ldmod_emitter(target, source, env): - return _lib_emitter(target, source, env, symlink_generator=SCons.Tool.LdModSymlinkGenerator) - - -# This is generic enough to be included here... -def _versioned_lib_name(env, libnode, version, prefix, suffix, prefix_generator, suffix_generator, **kw): - """For libnode='/optional/dir/libfoo.so.X.Y.Z' it returns 'libfoo.so'""" - Verbose = False - - if Verbose: - print("_versioned_lib_name: libnode={!r}".format(libnode.get_path())) - print("_versioned_lib_name: version={!r}".format(version)) - print("_versioned_lib_name: prefix={!r}".format(prefix)) - print("_versioned_lib_name: suffix={!r}".format(suffix)) - print("_versioned_lib_name: suffix_generator={!r}".format(suffix_generator)) - - versioned_name = os.path.basename(libnode.get_path()) - if Verbose: - print("_versioned_lib_name: versioned_name={!r}".format(versioned_name)) - - versioned_prefix = prefix_generator(env, **kw) - versioned_suffix = suffix_generator(env, **kw) - if Verbose: - print("_versioned_lib_name: versioned_prefix={!r}".format(versioned_prefix)) - print("_versioned_lib_name: versioned_suffix={!r}".format(versioned_suffix)) - - versioned_prefix_re = '^' + re.escape(versioned_prefix) - versioned_suffix_re = re.escape(versioned_suffix) + '$' - name = re.sub(versioned_prefix_re, prefix, versioned_name) - name = re.sub(versioned_suffix_re, suffix, name) - if Verbose: - print("_versioned_lib_name: name={!r}".format(name)) - return name - - -def _versioned_shlib_name(env, libnode, version, prefix, suffix, **kw): - prefix_generator = SCons.Tool.ShLibPrefixGenerator - suffix_generator = SCons.Tool.ShLibSuffixGenerator - return _versioned_lib_name(env, libnode, version, prefix, suffix, prefix_generator, suffix_generator, **kw) - - -def _versioned_ldmod_name(env, libnode, version, prefix, suffix, **kw): - prefix_generator = SCons.Tool.LdModPrefixGenerator - suffix_generator = SCons.Tool.LdModSuffixGenerator - return _versioned_lib_name(env, libnode, version, prefix, suffix, prefix_generator, suffix_generator, **kw) - - -def _versioned_lib_suffix(env, suffix, version): - """For suffix='.so' and version='0.1.2' it returns '.so.0.1.2'""" - Verbose = False - if Verbose: - print("_versioned_lib_suffix: suffix={!r}".format(suffix)) - print("_versioned_lib_suffix: version={!r}".format(version)) - if not suffix.endswith(version): - suffix = suffix + '.' + version - if Verbose: - print("_versioned_lib_suffix: return suffix={!r}".format(suffix)) - return suffix - - -def _versioned_lib_soname(env, libnode, version, prefix, suffix, name_func): - """For libnode='/optional/dir/libfoo.so.X.Y.Z' it returns 'libfoo.so.X'""" - Verbose = False - if Verbose: - print("_versioned_lib_soname: version={!r}".format(version)) - name = name_func(env, libnode, version, prefix, suffix) - if Verbose: - print("_versioned_lib_soname: name={!r}".format(name)) - major = version.split('.')[0] - soname = name + '.' + major - if Verbose: - print("_versioned_lib_soname: soname={!r}".format(soname)) - return soname - - -def _versioned_shlib_soname(env, libnode, version, prefix, suffix): - return _versioned_lib_soname(env, libnode, version, prefix, suffix, _versioned_shlib_name) - - -def _versioned_ldmod_soname(env, libnode, version, prefix, suffix): - return _versioned_lib_soname(env, libnode, version, prefix, suffix, _versioned_ldmod_name) - - -def _versioned_lib_symlinks(env, libnode, version, prefix, suffix, name_func, soname_func): - """Generate link names that should be created for a versioned shared library. - Returns a dictionary in the form { linkname : linktarget } - """ - Verbose = False - - if Verbose: - print("_versioned_lib_symlinks: libnode={!r}".format(libnode.get_path())) - print("_versioned_lib_symlinks: version={!r}".format(version)) - - if sys.platform.startswith('openbsd'): - # OpenBSD uses x.y shared library versioning numbering convention - # and doesn't use symlinks to backwards-compatible libraries - if Verbose: - print("_versioned_lib_symlinks: return symlinks={!r}".format(None)) - return None - - linkdir = libnode.get_dir() - if Verbose: - print("_versioned_lib_symlinks: linkdir={!r}".format(linkdir.get_path())) - - name = name_func(env, libnode, version, prefix, suffix) - if Verbose: - print("_versioned_lib_symlinks: name={!r}".format(name)) - - soname = soname_func(env, libnode, version, prefix, suffix) - if Verbose: - print("_versioned_lib_symlinks: soname={!r}".format(soname)) - - link0 = env.fs.File(soname, linkdir) - link1 = env.fs.File(name, linkdir) - - # We create direct symlinks, not daisy-chained. - if link0 == libnode: - # This enables SHLIBVERSION without periods (e.g. SHLIBVERSION=1) - symlinks = [(link1, libnode)] - else: - # This handles usual SHLIBVERSION, i.e. '1.2', '1.2.3', etc. - symlinks = [(link0, libnode), (link1, libnode)] - - if Verbose: - print("_versioned_lib_symlinks: return symlinks={!r}".format(SCons.Tool.StringizeLibSymlinks(symlinks))) - - return symlinks - - -def _versioned_shlib_symlinks(env, libnode, version, prefix, suffix): - name_func = env['LINKCALLBACKS']['VersionedShLibName'] - soname_func = env['LINKCALLBACKS']['VersionedShLibSoname'] - - return _versioned_lib_symlinks(env, libnode, version, prefix, suffix, name_func, soname_func) - - -def _versioned_ldmod_symlinks(env, libnode, version, prefix, suffix): - name_func = _versioned_ldmod_name - soname_func = _versioned_ldmod_soname - - name_func = env['LINKCALLBACKS']['VersionedLdModName'] - soname_func = env['LINKCALLBACKS']['VersionedLdModSoname'] - - return _versioned_lib_symlinks(env, libnode, version, prefix, suffix, name_func, soname_func) - - -def _versioned_lib_callbacks(): - return { - 'VersionedShLibSuffix': _versioned_lib_suffix, - 'VersionedLdModSuffix': _versioned_lib_suffix, - 'VersionedShLibSymlinks': _versioned_shlib_symlinks, - 'VersionedLdModSymlinks': _versioned_ldmod_symlinks, - 'VersionedShLibName': _versioned_shlib_name, - 'VersionedLdModName': _versioned_ldmod_name, - 'VersionedShLibSoname': _versioned_shlib_soname, - 'VersionedLdModSoname': _versioned_ldmod_soname, - }.copy() - - -def _setup_versioned_lib_variables(env, **kw): - """ - Setup all variables required by the versioning machinery - """ - - tool = None - try: - tool = kw['tool'] - except KeyError: - pass - - use_soname = False - try: - use_soname = kw['use_soname'] - except KeyError: - pass - - # The $_SHLIBVERSIONFLAGS define extra commandline flags used when - # building VERSIONED shared libraries. It's always set, but used only - # when VERSIONED library is built (see __SHLIBVERSIONFLAGS in SCons/Defaults.py). - if use_soname: - # If the linker uses SONAME, then we need this little automata - if tool == 'sunlink': - env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS -h $_SHLIBSONAME' - env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS -h $_LDMODULESONAME' - else: - env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS -Wl,-soname=$_SHLIBSONAME' - env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS -Wl,-soname=$_LDMODULESONAME' - env['_SHLIBSONAME'] = '${ShLibSonameGenerator(__env__,TARGET)}' - env['_LDMODULESONAME'] = '${LdModSonameGenerator(__env__,TARGET)}' - env['ShLibSonameGenerator'] = SCons.Tool.ShLibSonameGenerator - env['LdModSonameGenerator'] = SCons.Tool.LdModSonameGenerator - else: - env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS' - env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS' - - # LDOMDULVERSIONFLAGS should always default to $SHLIBVERSIONFLAGS - env['LDMODULEVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS' - - -def generate(env): - """Add Builders and construction variables for gnulink to an Environment.""" - SCons.Tool.createSharedLibBuilder(env) - SCons.Tool.createProgBuilder(env) - - env['SHLINK'] = '$LINK' - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') - env['SHLINKCOM'] = '$SHLINK -o $TARGET $SHLINKFLAGS $__SHLIBVERSIONFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - - # don't set up the emitter, because AppendUnique will generate a list - # starting with None :-( - env.Append(SHLIBEMITTER=[shlib_emitter]) - - env['SMARTLINK'] = smart_link - env['LINK'] = "$SMARTLINK" - env['LINKFLAGS'] = SCons.Util.CLVar('') - - # __RPATH is only set to something ($_RPATH typically) on platforms that support it. - env['LINKCOM'] = '$LINK -o $TARGET $LINKFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - env['LIBDIRPREFIX'] = '-L' - env['LIBDIRSUFFIX'] = '' - env['_LIBFLAGS'] = '${_stripixes(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}' - env['LIBLINKPREFIX'] = '-l' - env['LIBLINKSUFFIX'] = '' - - if env['PLATFORM'] == 'hpux': - env['SHLIBSUFFIX'] = '.sl' - elif env['PLATFORM'] == 'aix': - env['SHLIBSUFFIX'] = '.a' - - # For most platforms, a loadable module is the same as a shared - # library. Platforms which are different can override these, but - # setting them the same means that LoadableModule works everywhere. - SCons.Tool.createLoadableModuleBuilder(env) - env['LDMODULE'] = '$SHLINK' - env.Append(LDMODULEEMITTER=[ldmod_emitter]) - env['LDMODULEPREFIX'] = '$SHLIBPREFIX' - env['LDMODULESUFFIX'] = '$SHLIBSUFFIX' - env['LDMODULEFLAGS'] = '$SHLINKFLAGS' - env[ - 'LDMODULECOM'] = '$LDMODULE -o $TARGET $LDMODULEFLAGS $__LDMODULEVERSIONFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - env['LDMODULEVERSION'] = '$SHLIBVERSION' - env['LDMODULENOVERSIONSYMLINKS'] = '$SHLIBNOVERSIONSYMLINKS' - - -def exists(env): - # This module isn't really a Tool on its own, it's common logic for - # other linkers. - return None - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/linkloc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/linkloc.py deleted file mode 100644 index 508b09aeeb8..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/linkloc.py +++ /dev/null @@ -1,113 +0,0 @@ -"""SCons.Tool.linkloc - -Tool specification for the LinkLoc linker for the Phar Lap ETS embedded -operating system. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/linkloc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import re - -import SCons.Action -import SCons.Defaults -import SCons.Errors -import SCons.Tool -import SCons.Util - -from SCons.Tool.MSCommon import msvs_exists, merge_default_version -from SCons.Tool.PharLapCommon import addPharLapPaths - -_re_linker_command = re.compile(r'(\s)@\s*([^\s]+)') - -def repl_linker_command(m): - # Replaces any linker command file directives (e.g. "@foo.lnk") with - # the actual contents of the file. - try: - with open(m.group(2), "r") as f: - return m.group(1) + f.read() - except IOError: - # the linker should return an error if it can't - # find the linker command file so we will remain quiet. - # However, we will replace the @ with a # so we will not continue - # to find it with recursive substitution - return m.group(1) + '#' + m.group(2) - -class LinklocGenerator(object): - def __init__(self, cmdline): - self.cmdline = cmdline - - def __call__(self, env, target, source, for_signature): - if for_signature: - # Expand the contents of any linker command files recursively - subs = 1 - strsub = env.subst(self.cmdline, target=target, source=source) - while subs: - strsub, subs = _re_linker_command.subn(repl_linker_command, strsub) - return strsub - else: - return "${TEMPFILE('" + self.cmdline + "')}" - -def generate(env): - """Add Builders and construction variables for ar to an Environment.""" - SCons.Tool.createSharedLibBuilder(env) - SCons.Tool.createProgBuilder(env) - - env['SUBST_CMD_FILE'] = LinklocGenerator - env['SHLINK'] = '$LINK' - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS') - env['SHLINKCOM'] = '${SUBST_CMD_FILE("$SHLINK $SHLINKFLAGS $_LIBDIRFLAGS $_LIBFLAGS -dll $TARGET $SOURCES")}' - env['SHLIBEMITTER']= None - env['LDMODULEEMITTER']= None - env['LINK'] = "linkloc" - env['LINKFLAGS'] = SCons.Util.CLVar('') - env['LINKCOM'] = '${SUBST_CMD_FILE("$LINK $LINKFLAGS $_LIBDIRFLAGS $_LIBFLAGS -exe $TARGET $SOURCES")}' - env['LIBDIRPREFIX']='-libpath ' - env['LIBDIRSUFFIX']='' - env['LIBLINKPREFIX']='-lib ' - env['LIBLINKSUFFIX']='$LIBSUFFIX' - - # Set-up ms tools paths for default version - merge_default_version(env) - - addPharLapPaths(env) - -def exists(env): - if msvs_exists(): - return env.Detect('linkloc') - else: - return 0 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/m4.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/m4.py deleted file mode 100644 index b71f6befd32..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/m4.py +++ /dev/null @@ -1,63 +0,0 @@ -"""SCons.Tool.m4 - -Tool-specific initialization for m4. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/m4.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Builder -import SCons.Util - -def generate(env): - """Add Builders and construction variables for m4 to an Environment.""" - M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR') - bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4') - - env['BUILDERS']['M4'] = bld - - # .m4 files might include other files, and it would be pretty hard - # to write a scanner for it, so let's just cd to the dir of the m4 - # file and run from there. - # The src_suffix setup is like so: file.c.m4 -> file.c, - # file.cpp.m4 -> file.cpp etc. - env['M4'] = 'm4' - env['M4FLAGS'] = SCons.Util.CLVar('-E') - env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}' - -def exists(env): - return env.Detect('m4') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/masm.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/masm.py deleted file mode 100644 index 08977147e5b..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/masm.py +++ /dev/null @@ -1,77 +0,0 @@ -"""SCons.Tool.masm - -Tool-specific initialization for the Microsoft Assembler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/masm.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util - -ASSuffixes = ['.s', '.asm', '.ASM'] -ASPPSuffixes = ['.spp', '.SPP', '.sx'] -if SCons.Util.case_sensitive_suffixes('.s', '.S'): - ASPPSuffixes.extend(['.S']) -else: - ASSuffixes.extend(['.S']) - -def generate(env): - """Add Builders and construction variables for masm to an Environment.""" - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - for suffix in ASSuffixes: - static_obj.add_action(suffix, SCons.Defaults.ASAction) - shared_obj.add_action(suffix, SCons.Defaults.ASAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) - - for suffix in ASPPSuffixes: - static_obj.add_action(suffix, SCons.Defaults.ASPPAction) - shared_obj.add_action(suffix, SCons.Defaults.ASPPAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) - - env['AS'] = 'ml' - env['ASFLAGS'] = SCons.Util.CLVar('/nologo') - env['ASPPFLAGS'] = '$ASFLAGS' - env['ASCOM'] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES' - env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES' - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - -def exists(env): - return env.Detect('ml') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/midl.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/midl.py deleted file mode 100644 index 5ec5974f7f1..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/midl.py +++ /dev/null @@ -1,88 +0,0 @@ -"""SCons.Tool.midl - -Tool-specific initialization for midl (Microsoft IDL compiler). - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/midl.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Scanner.IDL -import SCons.Util - -from .MSCommon import msvc_exists - -def midl_emitter(target, source, env): - """Produces a list of outputs from the MIDL compiler""" - base, _ = SCons.Util.splitext(str(target[0])) - tlb = target[0] - incl = base + '.h' - interface = base + '_i.c' - targets = [tlb, incl, interface] - - midlcom = env['MIDLCOM'] - - if midlcom.find('/proxy') != -1: - proxy = base + '_p.c' - targets.append(proxy) - if midlcom.find('/dlldata') != -1: - dlldata = base + '_data.c' - targets.append(dlldata) - - return (targets, source) - -idl_scanner = SCons.Scanner.IDL.IDLScan() - -midl_action = SCons.Action.Action('$MIDLCOM', '$MIDLCOMSTR') - -midl_builder = SCons.Builder.Builder(action = midl_action, - src_suffix = '.idl', - suffix='.tlb', - emitter = midl_emitter, - source_scanner = idl_scanner) - -def generate(env): - """Add Builders and construction variables for midl to an Environment.""" - - env['MIDL'] = 'MIDL.EXE' - env['MIDLFLAGS'] = SCons.Util.CLVar('/nologo') - env['MIDLCOM'] = '$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL' - env['BUILDERS']['TypeLibrary'] = midl_builder - -def exists(env): - return msvc_exists(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mingw.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mingw.py deleted file mode 100644 index 3152bf29581..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mingw.py +++ /dev/null @@ -1,204 +0,0 @@ -"""SCons.Tool.gcc - -Tool-specific initialization for MinGW (http://www.mingw.org/) - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/mingw.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path -import glob - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Tool -import SCons.Util - -mingw_paths = [ - r'c:\MinGW\bin', - r'C:\cygwin64\bin', - r'C:\msys64', - r'C:\cygwin\bin', - r'C:\msys', -] - - -def shlib_generator(target, source, env, for_signature): - cmd = SCons.Util.CLVar(['$SHLINK', '$SHLINKFLAGS']) - - dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') - if dll: cmd.extend(['-o', dll]) - - cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS']) - - implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') - if implib: cmd.append('-Wl,--out-implib,' + implib.get_string(for_signature)) - - def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') - insert_def = env.subst("$WINDOWS_INSERT_DEF") - if insert_def not in ['', '0', 0] and def_target: \ - cmd.append('-Wl,--output-def,' + def_target.get_string(for_signature)) - - return [cmd] - - -def shlib_emitter(target, source, env): - dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') - no_import_lib = env.get('no_import_lib', 0) - - if not dll: - raise SCons.Errors.UserError( - "A shared library should have exactly one target with the suffix: %s Target(s) are:%s" % \ - (env.subst("$SHLIBSUFFIX"), ",".join([str(t) for t in target]))) - - if not no_import_lib and \ - not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'): - # Create list of target libraries as strings - targetStrings = env.ReplaceIxes(dll, - 'SHLIBPREFIX', 'SHLIBSUFFIX', - 'LIBPREFIX', 'LIBSUFFIX') - - # Now add file nodes to target list - target.append(env.fs.File(targetStrings)) - - # Append a def file target if there isn't already a def file target - # or a def file source or the user has explicitly asked for the target - # to be emitted. - def_source = env.FindIxes(source, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') - def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') - skip_def_insert = env.subst("$WINDOWS_INSERT_DEF") in ['', '0', 0] - if not def_source and not def_target and not skip_def_insert: - # Create list of target libraries and def files as strings - targetStrings = env.ReplaceIxes(dll, - 'SHLIBPREFIX', 'SHLIBSUFFIX', - 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') - - # Now add file nodes to target list - target.append(env.fs.File(targetStrings)) - - return (target, source) - - -shlib_action = SCons.Action.Action(shlib_generator, '$SHLINKCOMSTR', generator=1) -ldmodule_action = SCons.Action.Action(shlib_generator, '$LDMODULECOMSTR', generator=1) - -res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR') - -res_builder = SCons.Builder.Builder(action=res_action, suffix='.o', - source_scanner=SCons.Tool.SourceFileScanner) -SCons.Tool.SourceFileScanner.add_scanner('.rc', SCons.Defaults.CScan) - -# This is what we search for to find mingw: -# key_program = 'mingw32-gcc' -key_program = 'mingw32-make' - - -def find_version_specific_mingw_paths(): - r""" - One example of default mingw install paths is: - C:\mingw-w64\x86_64-6.3.0-posix-seh-rt_v5-rev2\mingw64\bin - - Use glob'ing to find such and add to mingw_paths - """ - new_paths = glob.glob(r"C:\mingw-w64\*\mingw64\bin") - - return new_paths - - -def generate(env): - global mingw_paths - # Check for reasoanble mingw default paths - mingw_paths += find_version_specific_mingw_paths() - - mingw = SCons.Tool.find_program_path(env, key_program, default_paths=mingw_paths) - if mingw: - mingw_bin_dir = os.path.dirname(mingw) - env.AppendENVPath('PATH', mingw_bin_dir) - - # Most of mingw is the same as gcc and friends... - gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas', 'gfortran', 'm4'] - for tool in gnu_tools: - SCons.Tool.Tool(tool)(env) - - # ... but a few things differ: - env['CC'] = 'gcc' - # make sure the msvc tool doesnt break us, it added a /flag - if 'CCFLAGS' in env: - # make sure its a CLVar to handle list or str cases - if type(env['CCFLAGS']) is not SCons.Util.CLVar: - env['CCFLAGS'] = SCons.Util.CLVar(env['CCFLAGS']) - env['CCFLAGS'] = SCons.Util.CLVar(str(env['CCFLAGS']).replace('/nologo', '')) - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') - env['CXX'] = 'g++' - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') - env['SHLINKCOM'] = shlib_action - env['LDMODULECOM'] = ldmodule_action - env.Append(SHLIBEMITTER=[shlib_emitter]) - env.Append(LDMODULEEMITTER=[shlib_emitter]) - env['AS'] = 'as' - - env['WIN32DEFPREFIX'] = '' - env['WIN32DEFSUFFIX'] = '.def' - env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}' - env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}' - - env['SHOBJSUFFIX'] = '.o' - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - env['RC'] = 'windres' - env['RCFLAGS'] = SCons.Util.CLVar('') - env['RCINCFLAGS'] = '$( ${_concat(RCINCPREFIX, CPPPATH, RCINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' - env['RCINCPREFIX'] = '--include-dir ' - env['RCINCSUFFIX'] = '' - env['RCCOM'] = '$RC $_CPPDEFFLAGS $RCINCFLAGS ${RCINCPREFIX} ${SOURCE.dir} $RCFLAGS -i $SOURCE -o $TARGET' - env['BUILDERS']['RES'] = res_builder - - # Some setting from the platform also have to be overridden: - env['OBJSUFFIX'] = '.o' - env['LIBPREFIX'] = 'lib' - env['LIBSUFFIX'] = '.a' - env['PROGSUFFIX'] = '.exe' - - -def exists(env): - mingw = SCons.Tool.find_program_path(env, key_program, default_paths=mingw_paths) - if mingw: - mingw_bin_dir = os.path.dirname(mingw) - env.AppendENVPath('PATH', mingw_bin_dir) - - return mingw - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msgfmt.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msgfmt.py deleted file mode 100644 index 1ee45e7dab2..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msgfmt.py +++ /dev/null @@ -1,122 +0,0 @@ -""" msgfmt tool """ - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/msgfmt.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Builder import BuilderBase -############################################################################# -class _MOFileBuilder(BuilderBase): - """ The builder class for `MO` files. - - The reason for this builder to exists and its purpose is quite simillar - as for `_POFileBuilder`. This time, we extend list of sources, not targets, - and call `BuilderBase._execute()` only once (as we assume single-target - here). - """ - - def _execute(self, env, target, source, *args, **kw): - # Here we add support for 'LINGUAS_FILE' keyword. Emitter is not suitable - # in this case, as it is called too late (after multiple sources - # are handled single_source builder. - import SCons.Util - from SCons.Tool.GettextCommon import _read_linguas_from_files - linguas_files = None - if 'LINGUAS_FILE' in env and env['LINGUAS_FILE'] is not None: - linguas_files = env['LINGUAS_FILE'] - # This should prevent from endless recursion. - env['LINGUAS_FILE'] = None - # We read only languages. Suffixes shall be added automatically. - linguas = _read_linguas_from_files(env, linguas_files) - if SCons.Util.is_List(source): - source.extend(linguas) - elif source is not None: - source = [source] + linguas - else: - source = linguas - result = BuilderBase._execute(self,env,target,source,*args, **kw) - if linguas_files is not None: - env['LINGUAS_FILE'] = linguas_files - return result -############################################################################# - -############################################################################# -def _create_mo_file_builder(env, **kw): - """ Create builder object for `MOFiles` builder """ - import SCons.Action - # FIXME: What factory use for source? Ours or their? - kw['action'] = SCons.Action.Action('$MSGFMTCOM','$MSGFMTCOMSTR') - kw['suffix'] = '$MOSUFFIX' - kw['src_suffix'] = '$POSUFFIX' - kw['src_builder'] = '_POUpdateBuilder' - kw['single_source'] = True - return _MOFileBuilder(**kw) -############################################################################# - -############################################################################# -def generate(env,**kw): - """ Generate `msgfmt` tool """ - import sys - import os - import SCons.Util - import SCons.Tool - from SCons.Tool.GettextCommon import _detect_msgfmt - from SCons.Platform.mingw import MINGW_DEFAULT_PATHS - from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS - - if sys.platform == 'win32': - msgfmt = SCons.Tool.find_program_path(env, 'msgfmt', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS ) - if msgfmt: - msgfmt_bin_dir = os.path.dirname(msgfmt) - env.AppendENVPath('PATH', msgfmt_bin_dir) - else: - SCons.Warnings.Warning('msgfmt tool requested, but binary not found in ENV PATH') - - try: - env['MSGFMT'] = _detect_msgfmt(env) - except: - env['MSGFMT'] = 'msgfmt' - env.SetDefault( - MSGFMTFLAGS = [ SCons.Util.CLVar('-c') ], - MSGFMTCOM = '$MSGFMT $MSGFMTFLAGS -o $TARGET $SOURCE', - MSGFMTCOMSTR = '', - MOSUFFIX = ['.mo'], - POSUFFIX = ['.po'] - ) - env.Append( BUILDERS = { 'MOFiles' : _create_mo_file_builder(env) } ) -############################################################################# - -############################################################################# -def exists(env): - """ Check if the tool exists """ - from SCons.Tool.GettextCommon import _msgfmt_exists - try: - return _msgfmt_exists(env) - except: - return False -############################################################################# - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msginit.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msginit.py deleted file mode 100644 index 8ce9f02a1bf..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msginit.py +++ /dev/null @@ -1,134 +0,0 @@ -""" msginit tool - -Tool specific initialization of msginit tool. -""" - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/msginit.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Warnings -import SCons.Builder -import re - -############################################################################# -def _optional_no_translator_flag(env): - """ Return '--no-translator' flag if we run *msginit(1)* in non-interactive - mode.""" - import SCons.Util - if 'POAUTOINIT' in env: - autoinit = env['POAUTOINIT'] - else: - autoinit = False - if autoinit: - return [SCons.Util.CLVar('--no-translator')] - else: - return [SCons.Util.CLVar('')] -############################################################################# - -############################################################################# -def _POInitBuilder(env, **kw): - """ Create builder object for `POInit` builder. """ - import SCons.Action - from SCons.Tool.GettextCommon import _init_po_files, _POFileBuilder - action = SCons.Action.Action(_init_po_files, None) - return _POFileBuilder(env, action=action, target_alias='$POCREATE_ALIAS') -############################################################################# - -############################################################################# -from SCons.Environment import _null -############################################################################# -def _POInitBuilderWrapper(env, target=None, source=_null, **kw): - """ Wrapper for _POFileBuilder. We use it to make user's life easier. - - This wrapper checks for `$POTDOMAIN` construction variable (or override in - `**kw`) and treats it appropriatelly. - """ - if source is _null: - if 'POTDOMAIN' in kw: - domain = kw['POTDOMAIN'] - elif 'POTDOMAIN' in env: - domain = env['POTDOMAIN'] - else: - domain = 'messages' - source = [ domain ] # NOTE: Suffix shall be appended automatically - return env._POInitBuilder(target, source, **kw) -############################################################################# - -############################################################################# -def generate(env,**kw): - """ Generate the `msginit` tool """ - import sys - import os - import SCons.Util - import SCons.Tool - from SCons.Tool.GettextCommon import _detect_msginit - from SCons.Platform.mingw import MINGW_DEFAULT_PATHS - from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS - - if sys.platform == 'win32': - msginit = SCons.Tool.find_program_path(env, 'msginit', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS ) - if msginit: - msginit_bin_dir = os.path.dirname(msginit) - env.AppendENVPath('PATH', msginit_bin_dir) - else: - SCons.Warnings.Warning('msginit tool requested, but binary not found in ENV PATH') - - try: - env['MSGINIT'] = _detect_msginit(env) - except: - env['MSGINIT'] = 'msginit' - msginitcom = '$MSGINIT ${_MSGNoTranslator(__env__)} -l ${_MSGINITLOCALE}' \ - + ' $MSGINITFLAGS -i $SOURCE -o $TARGET' - # NOTE: We set POTSUFFIX here, in case the 'xgettext' is not loaded - # (sometimes we really don't need it) - env.SetDefault( - POSUFFIX = ['.po'], - POTSUFFIX = ['.pot'], - _MSGINITLOCALE = '${TARGET.filebase}', - _MSGNoTranslator = _optional_no_translator_flag, - MSGINITCOM = msginitcom, - MSGINITCOMSTR = '', - MSGINITFLAGS = [ ], - POAUTOINIT = False, - POCREATE_ALIAS = 'po-create' - ) - env.Append( BUILDERS = { '_POInitBuilder' : _POInitBuilder(env) } ) - env.AddMethod(_POInitBuilderWrapper, 'POInit') - env.AlwaysBuild(env.Alias('$POCREATE_ALIAS')) -############################################################################# - -############################################################################# -def exists(env): - """ Check if the tool exists """ - from SCons.Tool.GettextCommon import _msginit_exists - try: - return _msginit_exists(env) - except: - return False -############################################################################# - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msgmerge.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msgmerge.py deleted file mode 100644 index 748041db670..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msgmerge.py +++ /dev/null @@ -1,117 +0,0 @@ -""" msgmerget tool - -Tool specific initialization for `msgmerge` tool. -""" - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/msgmerge.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -############################################################################# -def _update_or_init_po_files(target, source, env): - """ Action function for `POUpdate` builder """ - import SCons.Action - from SCons.Tool.GettextCommon import _init_po_files - for tgt in target: - if tgt.rexists(): - action = SCons.Action.Action('$MSGMERGECOM', '$MSGMERGECOMSTR') - else: - action = _init_po_files - status = action([tgt], source, env) - if status : return status - return 0 -############################################################################# - -############################################################################# -def _POUpdateBuilder(env, **kw): - """ Create an object of `POUpdate` builder """ - import SCons.Action - from SCons.Tool.GettextCommon import _POFileBuilder - action = SCons.Action.Action(_update_or_init_po_files, None) - return _POFileBuilder(env, action=action, target_alias='$POUPDATE_ALIAS') -############################################################################# - -############################################################################# -from SCons.Environment import _null -############################################################################# -def _POUpdateBuilderWrapper(env, target=None, source=_null, **kw): - """ Wrapper for `POUpdate` builder - make user's life easier """ - if source is _null: - if 'POTDOMAIN' in kw: - domain = kw['POTDOMAIN'] - elif 'POTDOMAIN' in env and env['POTDOMAIN']: - domain = env['POTDOMAIN'] - else: - domain = 'messages' - source = [ domain ] # NOTE: Suffix shall be appended automatically - return env._POUpdateBuilder(target, source, **kw) -############################################################################# - -############################################################################# -def generate(env,**kw): - """ Generate the `msgmerge` tool """ - import sys - import os - import SCons.Tool - from SCons.Tool.GettextCommon import _detect_msgmerge - from SCons.Platform.mingw import MINGW_DEFAULT_PATHS - from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS - - if sys.platform == 'win32': - msgmerge = SCons.Tool.find_program_path(env, 'msgmerge', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS ) - if msgmerge: - msgmerge_bin_dir = os.path.dirname(msgmerge) - env.AppendENVPath('PATH', msgmerge_bin_dir) - else: - SCons.Warnings.Warning('msgmerge tool requested, but binary not found in ENV PATH') - try: - env['MSGMERGE'] = _detect_msgmerge(env) - except: - env['MSGMERGE'] = 'msgmerge' - env.SetDefault( - POTSUFFIX = ['.pot'], - POSUFFIX = ['.po'], - MSGMERGECOM = '$MSGMERGE $MSGMERGEFLAGS --update $TARGET $SOURCE', - MSGMERGECOMSTR = '', - MSGMERGEFLAGS = [ ], - POUPDATE_ALIAS = 'po-update' - ) - env.Append(BUILDERS = { '_POUpdateBuilder':_POUpdateBuilder(env) }) - env.AddMethod(_POUpdateBuilderWrapper, 'POUpdate') - env.AlwaysBuild(env.Alias('$POUPDATE_ALIAS')) -############################################################################# - -############################################################################# -def exists(env): - """ Check if the tool exists """ - from SCons.Tool.GettextCommon import _msgmerge_exists - try: - return _msgmerge_exists(env) - except: - return False -############################################################################# - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mslib.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mslib.py deleted file mode 100644 index 88b5b1e109a..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mslib.py +++ /dev/null @@ -1,73 +0,0 @@ -"""SCons.Tool.mslib - -Tool-specific initialization for lib (MicroSoft library archiver). - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/mslib.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os - -import SCons.Defaults -import SCons.Tool -import SCons.Tool.msvs -import SCons.Tool.msvc -import SCons.Util - -from .MSCommon import msvc_exists, msvc_setup_env_once - -def generate(env): - """Add Builders and construction variables for lib to an Environment.""" - SCons.Tool.createStaticLibBuilder(env) - - # Set-up ms tools paths - msvc_setup_env_once(env) - - env['AR'] = 'lib' - env['ARFLAGS'] = SCons.Util.CLVar('/nologo') - env['ARCOM'] = "${TEMPFILE('$AR $ARFLAGS /OUT:$TARGET $SOURCES','$ARCOMSTR')}" - env['LIBPREFIX'] = '' - env['LIBSUFFIX'] = '.lib' - - # Issue #3350 - # Change tempfile argument joining character from a space to a newline - # mslink will fail if any single line is too long, but is fine with many lines - # in a tempfile - env['TEMPFILEARGJOIN'] = os.linesep - - -def exists(env): - return msvc_exists(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mslink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mslink.py deleted file mode 100644 index d995d3bdcc4..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mslink.py +++ /dev/null @@ -1,344 +0,0 @@ -"""SCons.Tool.mslink - -Tool-specific initialization for the Microsoft linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/mslink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path - -import SCons.Action -import SCons.Defaults -import SCons.Errors -import SCons.Platform.win32 -import SCons.Tool -import SCons.Tool.msvc -import SCons.Tool.msvs -import SCons.Util - -from .MSCommon import msvc_setup_env_once, msvc_exists - -def pdbGenerator(env, target, source, for_signature): - try: - return ['/PDB:%s' % target[0].attributes.pdb, '/DEBUG'] - except (AttributeError, IndexError): - return None - -def _dllTargets(target, source, env, for_signature, paramtp): - listCmd = [] - dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp) - if dll: listCmd.append("/out:%s"%dll.get_string(for_signature)) - - implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') - if implib: listCmd.append("/implib:%s"%implib.get_string(for_signature)) - - return listCmd - -def _dllSources(target, source, env, for_signature, paramtp): - listCmd = [] - - deffile = env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX") - for src in source: - # Check explicitly for a non-None deffile so that the __cmp__ - # method of the base SCons.Util.Proxy class used for some Node - # proxies doesn't try to use a non-existent __dict__ attribute. - if deffile and src == deffile: - # Treat this source as a .def file. - listCmd.append("/def:%s" % src.get_string(for_signature)) - else: - # Just treat it as a generic source file. - listCmd.append(src) - return listCmd - -def windowsShlinkTargets(target, source, env, for_signature): - return _dllTargets(target, source, env, for_signature, 'SHLIB') - -def windowsShlinkSources(target, source, env, for_signature): - return _dllSources(target, source, env, for_signature, 'SHLIB') - -def _windowsLdmodTargets(target, source, env, for_signature): - """Get targets for loadable modules.""" - return _dllTargets(target, source, env, for_signature, 'LDMODULE') - -def _windowsLdmodSources(target, source, env, for_signature): - """Get sources for loadable modules.""" - return _dllSources(target, source, env, for_signature, 'LDMODULE') - -def _dllEmitter(target, source, env, paramtp): - """Common implementation of dll emitter.""" - SCons.Tool.msvc.validate_vars(env) - - extratargets = [] - extrasources = [] - - dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp) - no_import_lib = env.get('no_import_lib', 0) - - if not dll: - raise SCons.Errors.UserError('A shared library should have exactly one target with the suffix: %s' % env.subst('$%sSUFFIX' % paramtp)) - - insert_def = env.subst("$WINDOWS_INSERT_DEF") - if insert_def not in ['', '0', 0] and \ - not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"): - - # append a def file to the list of sources - extrasources.append( - env.ReplaceIxes(dll, - '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, - "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX")) - - version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0')) - if version_num >= 8.0 and \ - (env.get('WINDOWS_INSERT_MANIFEST', 0) or env.get('WINDOWS_EMBED_MANIFEST', 0)): - # MSVC 8 and above automatically generate .manifest files that must be installed - extratargets.append( - env.ReplaceIxes(dll, - '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, - "WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX")) - - if 'PDB' in env and env['PDB']: - pdb = env.arg2nodes('$PDB', target=target, source=source)[0] - extratargets.append(pdb) - target[0].attributes.pdb = pdb - - if version_num >= 11.0 and env.get('PCH', 0): - # MSVC 11 and above need the PCH object file to be added to the link line, - # otherwise you get link error LNK2011. - pchobj = SCons.Util.splitext(str(env['PCH']))[0] + '.obj' - # print "prog_emitter, version %s, appending pchobj %s"%(version_num, pchobj) - if pchobj not in extrasources: - extrasources.append(pchobj) - - if not no_import_lib and \ - not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"): - # Append an import library to the list of targets. - extratargets.append( - env.ReplaceIxes(dll, - '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, - "LIBPREFIX", "LIBSUFFIX")) - # and .exp file is created if there are exports from a DLL - extratargets.append( - env.ReplaceIxes(dll, - '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, - "WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX")) - - return (target+extratargets, source+extrasources) - -def windowsLibEmitter(target, source, env): - return _dllEmitter(target, source, env, 'SHLIB') - -def ldmodEmitter(target, source, env): - """Emitter for loadable modules. - - Loadable modules are identical to shared libraries on Windows, but building - them is subject to different parameters (LDMODULE*). - """ - return _dllEmitter(target, source, env, 'LDMODULE') - -def prog_emitter(target, source, env): - SCons.Tool.msvc.validate_vars(env) - - extratargets = [] - extrasources = [] - - exe = env.FindIxes(target, "PROGPREFIX", "PROGSUFFIX") - if not exe: - raise SCons.Errors.UserError("An executable should have exactly one target with the suffix: %s" % env.subst("$PROGSUFFIX")) - - version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0')) - if version_num >= 8.0 and \ - (env.get('WINDOWS_INSERT_MANIFEST', 0) or env.get('WINDOWS_EMBED_MANIFEST', 0)): - # MSVC 8 and above automatically generate .manifest files that have to be installed - extratargets.append( - env.ReplaceIxes(exe, - "PROGPREFIX", "PROGSUFFIX", - "WINDOWSPROGMANIFESTPREFIX", "WINDOWSPROGMANIFESTSUFFIX")) - - if 'PDB' in env and env['PDB']: - pdb = env.arg2nodes('$PDB', target=target, source=source)[0] - extratargets.append(pdb) - target[0].attributes.pdb = pdb - - if version_num >= 11.0 and env.get('PCH', 0): - # MSVC 11 and above need the PCH object file to be added to the link line, - # otherwise you get link error LNK2011. - pchobj = SCons.Util.splitext(str(env['PCH']))[0] + '.obj' - # print("prog_emitter, version %s, appending pchobj %s"%(version_num, pchobj)) - if pchobj not in extrasources: - extrasources.append(pchobj) - - return (target+extratargets,source+extrasources) - -def RegServerFunc(target, source, env): - if 'register' in env and env['register']: - ret = regServerAction([target[0]], [source[0]], env) - if ret: - raise SCons.Errors.UserError("Unable to register %s" % target[0]) - else: - print("Registered %s sucessfully" % target[0]) - return ret - return 0 - -# These are the actual actions run to embed the manifest. -# They are only called from the Check versions below. -embedManifestExeAction = SCons.Action.Action('$MTEXECOM') -embedManifestDllAction = SCons.Action.Action('$MTSHLIBCOM') - -def embedManifestDllCheck(target, source, env): - """Function run by embedManifestDllCheckAction to check for existence of manifest - and other conditions, and embed the manifest by calling embedManifestDllAction if so.""" - if env.get('WINDOWS_EMBED_MANIFEST', 0): - manifestSrc = target[0].get_abspath() + '.manifest' - if os.path.exists(manifestSrc): - ret = (embedManifestDllAction) ([target[0]],None,env) - if ret: - raise SCons.Errors.UserError("Unable to embed manifest into %s" % (target[0])) - return ret - else: - print('(embed: no %s.manifest found; not embedding.)'%str(target[0])) - return 0 - -def embedManifestExeCheck(target, source, env): - """Function run by embedManifestExeCheckAction to check for existence of manifest - and other conditions, and embed the manifest by calling embedManifestExeAction if so.""" - if env.get('WINDOWS_EMBED_MANIFEST', 0): - manifestSrc = target[0].get_abspath() + '.manifest' - if os.path.exists(manifestSrc): - ret = (embedManifestExeAction) ([target[0]],None,env) - if ret: - raise SCons.Errors.UserError("Unable to embed manifest into %s" % (target[0])) - return ret - else: - print('(embed: no %s.manifest found; not embedding.)'%str(target[0])) - return 0 - -embedManifestDllCheckAction = SCons.Action.Action(embedManifestDllCheck, None) -embedManifestExeCheckAction = SCons.Action.Action(embedManifestExeCheck, None) - -regServerAction = SCons.Action.Action("$REGSVRCOM", "$REGSVRCOMSTR") -regServerCheck = SCons.Action.Action(RegServerFunc, None) -shlibLinkAction = SCons.Action.Action('${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}', '$SHLINKCOMSTR') -compositeShLinkAction = shlibLinkAction + regServerCheck + embedManifestDllCheckAction -ldmodLinkAction = SCons.Action.Action('${TEMPFILE("$LDMODULE $LDMODULEFLAGS $_LDMODULE_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_LDMODULE_SOURCES", "$LDMODULECOMSTR")}', '$LDMODULECOMSTR') -compositeLdmodAction = ldmodLinkAction + regServerCheck + embedManifestDllCheckAction -exeLinkAction = SCons.Action.Action('${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}', '$LINKCOMSTR') -compositeLinkAction = exeLinkAction + embedManifestExeCheckAction - -def generate(env): - """Add Builders and construction variables for ar to an Environment.""" - SCons.Tool.createSharedLibBuilder(env) - SCons.Tool.createProgBuilder(env) - - env['SHLINK'] = '$LINK' - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll') - env['_SHLINK_TARGETS'] = windowsShlinkTargets - env['_SHLINK_SOURCES'] = windowsShlinkSources - env['SHLINKCOM'] = compositeShLinkAction - env.Append(SHLIBEMITTER = [windowsLibEmitter]) - env.Append(LDMODULEEMITTER = [windowsLibEmitter]) - env['LINK'] = 'link' - env['LINKFLAGS'] = SCons.Util.CLVar('/nologo') - env['_PDB'] = pdbGenerator - env['LINKCOM'] = compositeLinkAction - env.Append(PROGEMITTER = [prog_emitter]) - env['LIBDIRPREFIX']='/LIBPATH:' - env['LIBDIRSUFFIX']='' - env['LIBLINKPREFIX']='' - env['LIBLINKSUFFIX']='$LIBSUFFIX' - - env['WIN32DEFPREFIX'] = '' - env['WIN32DEFSUFFIX'] = '.def' - env['WIN32_INSERT_DEF'] = 0 - env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}' - env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}' - env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}' - - env['WIN32EXPPREFIX'] = '' - env['WIN32EXPSUFFIX'] = '.exp' - env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}' - env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}' - - env['WINDOWSSHLIBMANIFESTPREFIX'] = '' - env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest' - env['WINDOWSPROGMANIFESTPREFIX'] = '' - env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest' - - env['REGSVRACTION'] = regServerCheck - env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32') - env['REGSVRFLAGS'] = '/s ' - env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}' - - env['WINDOWS_EMBED_MANIFEST'] = 0 - env['MT'] = 'mt' - #env['MTFLAGS'] = ['-hashupdate'] - env['MTFLAGS'] = SCons.Util.CLVar('/nologo') - # Note: use - here to prevent build failure if no manifest produced. - # This seems much simpler than a fancy system using a function action to see - # if the manifest actually exists before trying to run mt with it. - env['MTEXECOM'] = '-$MT $MTFLAGS -manifest ${TARGET}.manifest $_MANIFEST_SOURCES -outputresource:$TARGET;1' - env['MTSHLIBCOM'] = '-$MT $MTFLAGS -manifest ${TARGET}.manifest $_MANIFEST_SOURCES -outputresource:$TARGET;2' - # TODO Future work garyo 27-Feb-11 - env['_MANIFEST_SOURCES'] = None # _windowsManifestSources - - # Set-up ms tools paths - msvc_setup_env_once(env) - - - # Loadable modules are on Windows the same as shared libraries, but they - # are subject to different build parameters (LDMODULE* variables). - # Therefore LDMODULE* variables correspond as much as possible to - # SHLINK*/SHLIB* ones. - SCons.Tool.createLoadableModuleBuilder(env) - env['LDMODULE'] = '$SHLINK' - env['LDMODULEPREFIX'] = '$SHLIBPREFIX' - env['LDMODULESUFFIX'] = '$SHLIBSUFFIX' - env['LDMODULEFLAGS'] = '$SHLINKFLAGS' - env['_LDMODULE_TARGETS'] = _windowsLdmodTargets - env['_LDMODULE_SOURCES'] = _windowsLdmodSources - env['LDMODULEEMITTER'] = [ldmodEmitter] - env['LDMODULECOM'] = compositeLdmodAction - - # Issue #3350 - # Change tempfile argument joining character from a space to a newline - # mslink will fail if any single line is too long, but is fine with many lines - # in a tempfile - env['TEMPFILEARGJOIN'] = os.linesep - -def exists(env): - return msvc_exists(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mssdk.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mssdk.py deleted file mode 100644 index 55cb54aa262..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mssdk.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/mssdk.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -"""engine.SCons.Tool.mssdk - -Tool-specific initialization for Microsoft SDKs, both Platform -SDKs and Windows SDKs. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -from .MSCommon import mssdk_exists, \ - mssdk_setup_env - -def generate(env): - """Add construction variables for an MS SDK to an Environment.""" - mssdk_setup_env(env) - -def exists(env): - return mssdk_exists() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msvc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msvc.py deleted file mode 100644 index 00130154270..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msvc.py +++ /dev/null @@ -1,309 +0,0 @@ -"""engine.SCons.Tool.msvc - -Tool-specific initialization for Microsoft Visual C/C++. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/msvc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import os -import re -import sys - -import SCons.Action -import SCons.Builder -import SCons.Errors -import SCons.Platform.win32 -import SCons.Tool -import SCons.Tool.msvs -import SCons.Util -import SCons.Warnings -import SCons.Scanner.RC - -from .MSCommon import msvc_exists, msvc_setup_env_once, msvc_version_to_maj_min - -CSuffixes = ['.c', '.C'] -CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++'] - -def validate_vars(env): - """Validate the PCH and PCHSTOP construction variables.""" - if 'PCH' in env and env['PCH']: - if 'PCHSTOP' not in env: - raise SCons.Errors.UserError("The PCHSTOP construction must be defined if PCH is defined.") - if not SCons.Util.is_String(env['PCHSTOP']): - raise SCons.Errors.UserError("The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP']) - -def msvc_set_PCHPDBFLAGS(env): - """ - Set appropriate PCHPDBFLAGS for the MSVC version being used. - """ - if env.get('MSVC_VERSION',False): - maj, min = msvc_version_to_maj_min(env['MSVC_VERSION']) - if maj < 8: - env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}']) - else: - env['PCHPDBFLAGS'] = '' - else: - # Default if we can't determine which version of MSVC we're using - env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}']) - - -def pch_emitter(target, source, env): - """Adds the object file target.""" - - validate_vars(env) - - pch = None - obj = None - - for t in target: - if SCons.Util.splitext(str(t))[1] == '.pch': - pch = t - if SCons.Util.splitext(str(t))[1] == '.obj': - obj = t - - if not obj: - obj = SCons.Util.splitext(str(pch))[0]+'.obj' - - target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work - - return (target, source) - -def object_emitter(target, source, env, parent_emitter): - """Sets up the PCH dependencies for an object file.""" - - validate_vars(env) - - parent_emitter(target, source, env) - - # Add a dependency, but only if the target (e.g. 'Source1.obj') - # doesn't correspond to the pre-compiled header ('Source1.pch'). - # If the basenames match, then this was most likely caused by - # someone adding the source file to both the env.PCH() and the - # env.Program() calls, and adding the explicit dependency would - # cause a cycle on the .pch file itself. - # - # See issue #2505 for a discussion of what to do if it turns - # out this assumption causes trouble in the wild: - # https://github.com/SCons/scons/issues/2505 - if 'PCH' in env: - pch = env['PCH'] - if str(target[0]) != SCons.Util.splitext(str(pch))[0] + '.obj': - env.Depends(target, pch) - - return (target, source) - -def static_object_emitter(target, source, env): - return object_emitter(target, source, env, - SCons.Defaults.StaticObjectEmitter) - -def shared_object_emitter(target, source, env): - return object_emitter(target, source, env, - SCons.Defaults.SharedObjectEmitter) - -pch_action = SCons.Action.Action('$PCHCOM', '$PCHCOMSTR') -pch_builder = SCons.Builder.Builder(action=pch_action, suffix='.pch', - emitter=pch_emitter, - source_scanner=SCons.Tool.SourceFileScanner) - - -# Logic to build .rc files into .res files (resource files) -res_scanner = SCons.Scanner.RC.RCScan() -res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR') -res_builder = SCons.Builder.Builder(action=res_action, - src_suffix='.rc', - suffix='.res', - src_builder=[], - source_scanner=res_scanner) - -def msvc_batch_key(action, env, target, source): - """ - Returns a key to identify unique batches of sources for compilation. - - If batching is enabled (via the $MSVC_BATCH setting), then all - target+source pairs that use the same action, defined by the same - environment, and have the same target and source directories, will - be batched. - - Returning None specifies that the specified target+source should not - be batched with other compilations. - """ - - # Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH - # was set to False. This new version should work better. - # Note we need to do the env.subst so $MSVC_BATCH can be a reference to - # another construction variable, which is why we test for False and 0 - # as strings. - if 'MSVC_BATCH' not in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None): - # We're not using batching; return no key. - return None - t = target[0] - s = source[0] - if os.path.splitext(t.name)[0] != os.path.splitext(s.name)[0]: - # The base names are different, so this *must* be compiled - # separately; return no key. - return None - return (id(action), id(env), t.dir, s.dir) - -def msvc_output_flag(target, source, env, for_signature): - """ - Returns the correct /Fo flag for batching. - - If batching is disabled or there's only one source file, then we - return an /Fo string that specifies the target explicitly. Otherwise, - we return an /Fo string that just specifies the first target's - directory (where the Visual C/C++ compiler will put the .obj files). - """ - - # Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH - # was set to False. This new version should work better. Removed - # len(source)==1 as batch mode can compile only one file - # (and it also fixed problem with compiling only one changed file - # with batch mode enabled) - if 'MSVC_BATCH' not in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None): - return '/Fo$TARGET' - else: - # The Visual C/C++ compiler requires a \ at the end of the /Fo - # option to indicate an output directory. We use os.sep here so - # that the test(s) for this can be run on non-Windows systems - # without having a hard-coded backslash mess up command-line - # argument parsing. - # Adding double os.sep's as if the TARGET.dir has a space or otherwise - # needs to be quoted they are needed per MSVC's odd behavior - # See: https://github.com/SCons/scons/issues/3106 - return '/Fo${TARGET.dir}' + os.sep*2 - -CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR", - batch_key=msvc_batch_key, - targets='$CHANGED_TARGETS') -ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR", - batch_key=msvc_batch_key, - targets='$CHANGED_TARGETS') -CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR", - batch_key=msvc_batch_key, - targets='$CHANGED_TARGETS') -ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR", - batch_key=msvc_batch_key, - targets='$CHANGED_TARGETS') - -def generate(env): - """Add Builders and construction variables for MSVC++ to an Environment.""" - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - # TODO(batch): shouldn't reach in to cmdgen this way; necessary - # for now to bypass the checks in Builder.DictCmdGenerator.__call__() - # and allow .cc and .cpp to be compiled in the same command line. - static_obj.cmdgen.source_ext_match = False - shared_obj.cmdgen.source_ext_match = False - - for suffix in CSuffixes: - static_obj.add_action(suffix, CAction) - shared_obj.add_action(suffix, ShCAction) - static_obj.add_emitter(suffix, static_object_emitter) - shared_obj.add_emitter(suffix, shared_object_emitter) - - for suffix in CXXSuffixes: - static_obj.add_action(suffix, CXXAction) - shared_obj.add_action(suffix, ShCXXAction) - static_obj.add_emitter(suffix, static_object_emitter) - shared_obj.add_emitter(suffix, shared_object_emitter) - - env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}']) - env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s \\\"/Fp%s\\\""%(PCHSTOP or "",File(PCH))) or ""}']) - env['_MSVC_OUTPUT_FLAG'] = msvc_output_flag - env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS' - env['CC'] = 'cl' - env['CCFLAGS'] = SCons.Util.CLVar('/nologo') - env['CFLAGS'] = SCons.Util.CLVar('') - env['CCCOM'] = '${TEMPFILE("$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM","$CCCOMSTR")}' - env['SHCC'] = '$CC' - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') - env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') - env['SHCCCOM'] = '${TEMPFILE("$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCCCOMSTR")}' - env['CXX'] = '$CC' - env['CXXFLAGS'] = SCons.Util.CLVar('$( /TP $)') - env['CXXCOM'] = '${TEMPFILE("$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM","$CXXCOMSTR")}' - env['SHCXX'] = '$CXX' - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') - env['SHCXXCOM'] = '${TEMPFILE("$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCXXCOMSTR")}' - env['CPPDEFPREFIX'] = '/D' - env['CPPDEFSUFFIX'] = '' - env['INCPREFIX'] = '/I' - env['INCSUFFIX'] = '' -# env.Append(OBJEMITTER = [static_object_emitter]) -# env.Append(SHOBJEMITTER = [shared_object_emitter]) - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - - env['RC'] = 'rc' - env['RCFLAGS'] = SCons.Util.CLVar('/nologo') - env['RCSUFFIXES']=['.rc','.rc2'] - env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES' - env['BUILDERS']['RES'] = res_builder - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.obj' - env['SHOBJPREFIX'] = '$OBJPREFIX' - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - - # MSVC probably wont support unistd.h so default - # without it for lex generation - env["LEXUNISTD"] = SCons.Util.CLVar("--nounistd") - - # Set-up ms tools paths - msvc_setup_env_once(env) - - env['CFILESUFFIX'] = '.c' - env['CXXFILESUFFIX'] = '.cc' - - msvc_set_PCHPDBFLAGS(env) - - # Issue #3350 - # Change tempfile argument joining character from a space to a newline - # mslink will fail if any single line is too long, but is fine with many lines - # in a tempfile - env['TEMPFILEARGJOIN'] = os.linesep - - - env['PCHCOM'] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS' - env['BUILDERS']['PCH'] = pch_builder - - if 'ENV' not in env: - env['ENV'] = {} - if 'SystemRoot' not in env['ENV']: # required for dlls in the winsxs folders - env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root() - -def exists(env): - return msvc_exists(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msvs.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msvs.py deleted file mode 100644 index 4a4b3903d8c..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/msvs.py +++ /dev/null @@ -1,2078 +0,0 @@ -"""SCons.Tool.msvs - -Tool-specific initialization for Microsoft Visual Studio project files. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/msvs.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.compat - -import base64 -import hashlib -import ntpath -import os -import pickle -import re -import sys - -import SCons.Builder -import SCons.Node.FS -import SCons.Platform.win32 -import SCons.Script.SConscript -import SCons.PathList -import SCons.Util -import SCons.Warnings - -from .MSCommon import msvc_exists, msvc_setup_env_once -from SCons.Defaults import processDefines -from SCons.compat import PICKLE_PROTOCOL - -############################################################################## -# Below here are the classes and functions for generation of -# DSP/DSW/SLN/VCPROJ files. -############################################################################## - -def xmlify(s): - s = s.replace("&", "&") # do this first - s = s.replace("'", "'") - s = s.replace('"', """) - s = s.replace('<', "<") - s = s.replace('>', ">") - s = s.replace('\n', ' ') - return s - -def processIncludes(includes, env, target, source): - """ - Process a CPPPATH list in includes, given the env, target and source. - Returns a list of directory paths. These paths are absolute so we avoid - putting pound-prefixed paths in a Visual Studio project file. - """ - return [env.Dir(i).abspath for i in - SCons.PathList.PathList(includes).subst_path(env, target, source)] - - -external_makefile_guid = '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}' - -def _generateGUID(slnfile, name): - """This generates a dummy GUID for the sln file to use. It is - based on the MD5 signatures of the sln filename plus the name of - the project. It basically just needs to be unique, and not - change with each invocation.""" - m = hashlib.md5() - # Normalize the slnfile path to a Windows path (\ separators) so - # the generated file has a consistent GUID even if we generate - # it on a non-Windows platform. - m.update(bytearray(ntpath.normpath(str(slnfile)) + str(name),'utf-8')) - solution = m.hexdigest().upper() - # convert most of the signature to GUID form (discard the rest) - solution = "{" + solution[:8] + "-" + solution[8:12] + "-" + solution[12:16] + "-" + solution[16:20] + "-" + solution[20:32] + "}" - return solution - -version_re = re.compile(r'(\d+\.\d+)(.*)') - -def msvs_parse_version(s): - """ - Split a Visual Studio version, which may in fact be something like - '7.0Exp', into is version number (returned as a float) and trailing - "suite" portion. - """ - num, suite = version_re.match(s).groups() - return float(num), suite - -# This is how we re-invoke SCons from inside MSVS Project files. -# The problem is that we might have been invoked as either scons.bat -# or scons.py. If we were invoked directly as scons.py, then we could -# use sys.argv[0] to find the SCons "executable," but that doesn't work -# if we were invoked as scons.bat, which uses "python -c" to execute -# things and ends up with "-c" as sys.argv[0]. Consequently, we have -# the MSVS Project file invoke SCons the same way that scons.bat does, -# which works regardless of how we were invoked. -def getExecScriptMain(env, xml=None): - scons_home = env.get('SCONS_HOME') - if not scons_home and 'SCONS_LIB_DIR' in os.environ: - scons_home = os.environ['SCONS_LIB_DIR'] - if scons_home: - exec_script_main = "from os.path import join; import sys; sys.path = [ r'%s' ] + sys.path; import SCons.Script; SCons.Script.main()" % scons_home - else: - version = SCons.__version__ - exec_script_main = "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-%(version)s'), join(sys.prefix, 'scons-%(version)s'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons') ] + sys.path; import SCons.Script; SCons.Script.main()" % locals() - if xml: - exec_script_main = xmlify(exec_script_main) - return exec_script_main - -# The string for the Python executable we tell the Project file to use -# is either sys.executable or, if an external PYTHON_ROOT environment -# variable exists, $(PYTHON)ROOT\\python.exe (generalized a little to -# pluck the actual executable name from sys.executable). -try: - python_root = os.environ['PYTHON_ROOT'] -except KeyError: - python_executable = sys.executable -else: - python_executable = os.path.join('$$(PYTHON_ROOT)', - os.path.split(sys.executable)[1]) - -class Config(object): - pass - -def splitFully(path): - dir, base = os.path.split(path) - if dir and dir != '' and dir != path: - return splitFully(dir)+[base] - if base == '': - return [] - return [base] - -def makeHierarchy(sources): - """Break a list of files into a hierarchy; for each value, if it is a string, - then it is a file. If it is a dictionary, it is a folder. The string is - the original path of the file.""" - - hierarchy = {} - for file in sources: - path = splitFully(file) - if len(path): - dict = hierarchy - for part in path[:-1]: - if part not in dict: - dict[part] = {} - dict = dict[part] - dict[path[-1]] = file - #else: - # print 'Warning: failed to decompose path for '+str(file) - return hierarchy - -class _UserGenerator(object): - ''' - Base class for .dsp.user file generator - ''' - # Default instance values. - # Ok ... a bit defensive, but it does not seem reasonable to crash the - # build for a workspace user file. :-) - usrhead = None - usrdebg = None - usrconf = None - createfile = False - def __init__(self, dspfile, source, env): - # DebugSettings should be a list of debug dictionary sorted in the same order - # as the target list and variants - if 'variant' not in env: - raise SCons.Errors.InternalError("You must specify a 'variant' argument (i.e. 'Debug' or " +\ - "'Release') to create an MSVSProject.") - elif SCons.Util.is_String(env['variant']): - variants = [env['variant']] - elif SCons.Util.is_List(env['variant']): - variants = env['variant'] - - if 'DebugSettings' not in env or env['DebugSettings'] is None: - dbg_settings = [] - elif SCons.Util.is_Dict(env['DebugSettings']): - dbg_settings = [env['DebugSettings']] - elif SCons.Util.is_List(env['DebugSettings']): - if len(env['DebugSettings']) != len(variants): - raise SCons.Errors.InternalError("Sizes of 'DebugSettings' and 'variant' lists must be the same.") - dbg_settings = [] - for ds in env['DebugSettings']: - if SCons.Util.is_Dict(ds): - dbg_settings.append(ds) - else: - dbg_settings.append({}) - else: - dbg_settings = [] - - if len(dbg_settings) == 1: - dbg_settings = dbg_settings * len(variants) - - self.createfile = self.usrhead and self.usrdebg and self.usrconf and \ - dbg_settings and bool([ds for ds in dbg_settings if ds]) - - if self.createfile: - dbg_settings = dict(list(zip(variants, dbg_settings))) - for var, src in dbg_settings.items(): - # Update only expected keys - trg = {} - for key in [k for k in list(self.usrdebg.keys()) if k in src]: - trg[key] = str(src[key]) - self.configs[var].debug = trg - - def UserHeader(self): - encoding = self.env.subst('$MSVSENCODING') - versionstr = self.versionstr - self.usrfile.write(self.usrhead % locals()) - - def UserProject(self): - pass - - def Build(self): - if not self.createfile: - return - try: - filename = self.dspabs +'.user' - self.usrfile = open(filename, 'w') - except IOError as detail: - raise SCons.Errors.InternalError('Unable to open "' + filename + '" for writing:' + str(detail)) - else: - self.UserHeader() - self.UserProject() - self.usrfile.close() - -V9UserHeader = """\ - - -\t -""" - -V9UserConfiguration = """\ -\t\t -\t\t\t -\t\t -""" - -V9DebugSettings = { -'Command':'$(TargetPath)', -'WorkingDirectory': None, -'CommandArguments': None, -'Attach':'false', -'DebuggerType':'3', -'Remote':'1', -'RemoteMachine': None, -'RemoteCommand': None, -'HttpUrl': None, -'PDBPath': None, -'SQLDebugging': None, -'Environment': None, -'EnvironmentMerge':'true', -'DebuggerFlavor': None, -'MPIRunCommand': None, -'MPIRunArguments': None, -'MPIRunWorkingDirectory': None, -'ApplicationCommand': None, -'ApplicationArguments': None, -'ShimCommand': None, -'MPIAcceptMode': None, -'MPIAcceptFilter': None, -} - -class _GenerateV7User(_UserGenerator): - """Generates a Project file for MSVS .NET""" - def __init__(self, dspfile, source, env): - if self.version_num >= 9.0: - self.usrhead = V9UserHeader - self.usrconf = V9UserConfiguration - self.usrdebg = V9DebugSettings - _UserGenerator.__init__(self, dspfile, source, env) - - def UserProject(self): - confkeys = sorted(self.configs.keys()) - for kind in confkeys: - variant = self.configs[kind].variant - platform = self.configs[kind].platform - debug = self.configs[kind].debug - if debug: - debug_settings = '\n'.join(['\t\t\t\t%s="%s"' % (key, xmlify(value)) - for key, value in debug.items() - if value is not None]) - self.usrfile.write(self.usrconf % locals()) - self.usrfile.write('\t\n') - -V10UserHeader = """\ - - -""" - -V10UserConfiguration = """\ -\t -%(debug_settings)s -\t -""" - -V10DebugSettings = { -'LocalDebuggerCommand': None, -'LocalDebuggerCommandArguments': None, -'LocalDebuggerEnvironment': None, -'DebuggerFlavor': 'WindowsLocalDebugger', -'LocalDebuggerWorkingDirectory': None, -'LocalDebuggerAttach': None, -'LocalDebuggerDebuggerType': None, -'LocalDebuggerMergeEnvironment': None, -'LocalDebuggerSQLDebugging': None, -'RemoteDebuggerCommand': None, -'RemoteDebuggerCommandArguments': None, -'RemoteDebuggerWorkingDirectory': None, -'RemoteDebuggerServerName': None, -'RemoteDebuggerConnection': None, -'RemoteDebuggerDebuggerType': None, -'RemoteDebuggerAttach': None, -'RemoteDebuggerSQLDebugging': None, -'DeploymentDirectory': None, -'AdditionalFiles': None, -'RemoteDebuggerDeployDebugCppRuntime': None, -'WebBrowserDebuggerHttpUrl': None, -'WebBrowserDebuggerDebuggerType': None, -'WebServiceDebuggerHttpUrl': None, -'WebServiceDebuggerDebuggerType': None, -'WebServiceDebuggerSQLDebugging': None, -} - -class _GenerateV10User(_UserGenerator): - """Generates a Project'user file for MSVS 2010 or later""" - - def __init__(self, dspfile, source, env): - version_num, suite = msvs_parse_version(env['MSVS_VERSION']) - if version_num >= 14.2: - # Visual Studio 2019 is considered to be version 16. - self.versionstr = '16.0' - elif version_num >= 14.1: - # Visual Studio 2017 is considered to be version 15. - self.versionstr = '15.0' - elif version_num == 14.0: - self.versionstr = '14.0' - else: - self.versionstr = '4.0' - self.usrhead = V10UserHeader - self.usrconf = V10UserConfiguration - self.usrdebg = V10DebugSettings - _UserGenerator.__init__(self, dspfile, source, env) - - def UserProject(self): - confkeys = sorted(self.configs.keys()) - for kind in confkeys: - variant = self.configs[kind].variant - platform = self.configs[kind].platform - debug = self.configs[kind].debug - if debug: - debug_settings = '\n'.join(['\t\t<%s>%s' % (key, xmlify(value), key) - for key, value in debug.items() - if value is not None]) - self.usrfile.write(self.usrconf % locals()) - self.usrfile.write('') - -class _DSPGenerator(object): - """ Base class for DSP generators """ - - srcargs = [ - 'srcs', - 'incs', - 'localincs', - 'resources', - 'misc'] - - def __init__(self, dspfile, source, env): - self.dspfile = str(dspfile) - try: - get_abspath = dspfile.get_abspath - except AttributeError: - self.dspabs = os.path.abspath(dspfile) - else: - self.dspabs = get_abspath() - - if 'variant' not in env: - raise SCons.Errors.InternalError("You must specify a 'variant' argument (i.e. 'Debug' or " +\ - "'Release') to create an MSVSProject.") - elif SCons.Util.is_String(env['variant']): - variants = [env['variant']] - elif SCons.Util.is_List(env['variant']): - variants = env['variant'] - - if 'buildtarget' not in env or env['buildtarget'] is None: - buildtarget = [''] - elif SCons.Util.is_String(env['buildtarget']): - buildtarget = [env['buildtarget']] - elif SCons.Util.is_List(env['buildtarget']): - if len(env['buildtarget']) != len(variants): - raise SCons.Errors.InternalError("Sizes of 'buildtarget' and 'variant' lists must be the same.") - buildtarget = [] - for bt in env['buildtarget']: - if SCons.Util.is_String(bt): - buildtarget.append(bt) - else: - buildtarget.append(bt.get_abspath()) - else: - buildtarget = [env['buildtarget'].get_abspath()] - if len(buildtarget) == 1: - bt = buildtarget[0] - buildtarget = [] - for _ in variants: - buildtarget.append(bt) - - if 'outdir' not in env or env['outdir'] is None: - outdir = [''] - elif SCons.Util.is_String(env['outdir']): - outdir = [env['outdir']] - elif SCons.Util.is_List(env['outdir']): - if len(env['outdir']) != len(variants): - raise SCons.Errors.InternalError("Sizes of 'outdir' and 'variant' lists must be the same.") - outdir = [] - for s in env['outdir']: - if SCons.Util.is_String(s): - outdir.append(s) - else: - outdir.append(s.get_abspath()) - else: - outdir = [env['outdir'].get_abspath()] - if len(outdir) == 1: - s = outdir[0] - outdir = [] - for v in variants: - outdir.append(s) - - if 'runfile' not in env or env['runfile'] is None: - runfile = buildtarget[-1:] - elif SCons.Util.is_String(env['runfile']): - runfile = [env['runfile']] - elif SCons.Util.is_List(env['runfile']): - if len(env['runfile']) != len(variants): - raise SCons.Errors.InternalError("Sizes of 'runfile' and 'variant' lists must be the same.") - runfile = [] - for s in env['runfile']: - if SCons.Util.is_String(s): - runfile.append(s) - else: - runfile.append(s.get_abspath()) - else: - runfile = [env['runfile'].get_abspath()] - if len(runfile) == 1: - s = runfile[0] - runfile = [] - for v in variants: - runfile.append(s) - - self.sconscript = env['MSVSSCONSCRIPT'] - - def GetKeyFromEnv(env, key, variants): - """ - Retrieves a specific key from the environment. If the key is - present, it is expected to either be a string or a list with length - equal to the number of variants. The function returns a list of - the desired value (e.g. cpp include paths) guaranteed to be of - length equal to the length of the variants list. - """ - if key not in env or env[key] is None: - return [''] * len(variants) - elif SCons.Util.is_String(env[key]): - return [env[key]] * len(variants) - elif SCons.Util.is_List(env[key]): - if len(env[key]) != len(variants): - raise SCons.Errors.InternalError("Sizes of '%s' and 'variant' lists must be the same." % key) - else: - return env[key] - else: - raise SCons.Errors.InternalError("Unsupported type for key '%s' in environment: %s" % - (key, type(env[key]))) - - cmdargs = GetKeyFromEnv(env, 'cmdargs', variants) - - # The caller is allowed to put 'cppdefines' and/or 'cpppaths' in the - # environment, which is useful if they want to provide per-variant - # values for these. Otherwise, we fall back to using the global - # 'CPPDEFINES' and 'CPPPATH' functions. - if 'cppdefines' in env: - cppdefines = GetKeyFromEnv(env, 'cppdefines', variants) - else: - cppdefines = [env.get('CPPDEFINES', [])] * len(variants) - if 'cpppaths' in env: - cpppaths = GetKeyFromEnv(env, 'cpppaths', variants) - else: - cpppaths = [env.get('CPPPATH', [])] * len(variants) - - self.env = env - - if 'name' in self.env: - self.name = self.env['name'] - else: - self.name = os.path.basename(SCons.Util.splitext(self.dspfile)[0]) - self.name = self.env.subst(self.name) - - sourcenames = [ - 'Source Files', - 'Header Files', - 'Local Headers', - 'Resource Files', - 'Other Files'] - - self.sources = {} - for n in sourcenames: - self.sources[n] = [] - - self.configs = {} - - self.nokeep = 0 - if 'nokeep' in env and env['variant'] != 0: - self.nokeep = 1 - - if self.nokeep == 0 and os.path.exists(self.dspabs): - self.Parse() - - for t in zip(sourcenames,self.srcargs): - if t[1] in self.env: - if SCons.Util.is_List(self.env[t[1]]): - for i in self.env[t[1]]: - if not i in self.sources[t[0]]: - self.sources[t[0]].append(i) - else: - if not self.env[t[1]] in self.sources[t[0]]: - self.sources[t[0]].append(self.env[t[1]]) - - for n in sourcenames: - self.sources[n].sort(key=lambda a: a.lower()) - - def AddConfig(self, variant, buildtarget, outdir, runfile, cmdargs, cppdefines, cpppaths, dspfile=dspfile, env=env): - config = Config() - config.buildtarget = buildtarget - config.outdir = outdir - config.cmdargs = cmdargs - config.cppdefines = cppdefines - config.runfile = runfile - - # Dir objects can't be pickled, so we need an absolute path here. - config.cpppaths = processIncludes(cpppaths, env, None, None) - - match = re.match(r'(.*)\|(.*)', variant) - if match: - config.variant = match.group(1) - config.platform = match.group(2) - else: - config.variant = variant - config.platform = 'Win32' - - self.configs[variant] = config - print("Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dspfile) + "'") - - for i in range(len(variants)): - AddConfig(self, variants[i], buildtarget[i], outdir[i], runfile[i], cmdargs[i], cppdefines[i], cpppaths[i]) - - self.platforms = [] - for key in list(self.configs.keys()): - platform = self.configs[key].platform - if platform not in self.platforms: - self.platforms.append(platform) - - def Build(self): - pass - -V6DSPHeader = """\ -# Microsoft Developer Studio Project File - Name="%(name)s" - Package Owner=<4> -# Microsoft Developer Studio Generated Build File, Format Version 6.00 -# ** DO NOT EDIT ** - -# TARGTYPE "Win32 (x86) External Target" 0x0106 - -CFG=%(name)s - Win32 %(confkey)s -!MESSAGE This is not a valid makefile. To build this project using NMAKE, -!MESSAGE use the Export Makefile command and run -!MESSAGE -!MESSAGE NMAKE /f "%(name)s.mak". -!MESSAGE -!MESSAGE You can specify a configuration when running NMAKE -!MESSAGE by defining the macro CFG on the command line. For example: -!MESSAGE -!MESSAGE NMAKE /f "%(name)s.mak" CFG="%(name)s - Win32 %(confkey)s" -!MESSAGE -!MESSAGE Possible choices for configuration are: -!MESSAGE -""" - -class _GenerateV6DSP(_DSPGenerator): - """Generates a Project file for MSVS 6.0""" - - def PrintHeader(self): - # pick a default config - confkeys = sorted(self.configs.keys()) - - name = self.name - confkey = confkeys[0] - - self.file.write(V6DSPHeader % locals()) - - for kind in confkeys: - self.file.write('!MESSAGE "%s - Win32 %s" (based on "Win32 (x86) External Target")\n' % (name, kind)) - - self.file.write('!MESSAGE\n\n') - - def PrintProject(self): - name = self.name - self.file.write('# Begin Project\n' - '# PROP AllowPerConfigDependencies 0\n' - '# PROP Scc_ProjName ""\n' - '# PROP Scc_LocalPath ""\n\n') - - first = 1 - confkeys = sorted(self.configs.keys()) - for kind in confkeys: - outdir = self.configs[kind].outdir - buildtarget = self.configs[kind].buildtarget - if first == 1: - self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind)) - first = 0 - else: - self.file.write('\n!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind)) - - env_has_buildtarget = 'MSVSBUILDTARGET' in self.env - if not env_has_buildtarget: - self.env['MSVSBUILDTARGET'] = buildtarget - - # have to write this twice, once with the BASE settings, and once without - for base in ("BASE ",""): - self.file.write('# PROP %sUse_MFC 0\n' - '# PROP %sUse_Debug_Libraries ' % (base, base)) - if 'debug' not in kind.lower(): - self.file.write('0\n') - else: - self.file.write('1\n') - self.file.write('# PROP %sOutput_Dir "%s"\n' - '# PROP %sIntermediate_Dir "%s"\n' % (base,outdir,base,outdir)) - cmd = 'echo Starting SCons && ' + self.env.subst('$MSVSBUILDCOM', 1) - self.file.write('# PROP %sCmd_Line "%s"\n' - '# PROP %sRebuild_Opt "-c && %s"\n' - '# PROP %sTarget_File "%s"\n' - '# PROP %sBsc_Name ""\n' - '# PROP %sTarget_Dir ""\n'\ - %(base,cmd,base,cmd,base,buildtarget,base,base)) - - if not env_has_buildtarget: - del self.env['MSVSBUILDTARGET'] - - self.file.write('\n!ENDIF\n\n' - '# Begin Target\n\n') - for kind in confkeys: - self.file.write('# Name "%s - Win32 %s"\n' % (name,kind)) - self.file.write('\n') - first = 0 - for kind in confkeys: - if first == 0: - self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind)) - first = 1 - else: - self.file.write('!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind)) - self.file.write('!ENDIF\n\n') - self.PrintSourceFiles() - self.file.write('# End Target\n' - '# End Project\n') - - if self.nokeep == 0: - # now we pickle some data and add it to the file -- MSDEV will ignore it. - pdata = pickle.dumps(self.configs,PICKLE_PROTOCOL) - pdata = base64.b64encode(pdata).decode() - self.file.write(pdata + '\n') - pdata = pickle.dumps(self.sources,PICKLE_PROTOCOL) - pdata = base64.b64encode(pdata).decode() - self.file.write(pdata + '\n') - - def PrintSourceFiles(self): - categories = {'Source Files': 'cpp|c|cxx|l|y|def|odl|idl|hpj|bat', - 'Header Files': 'h|hpp|hxx|hm|inl', - 'Local Headers': 'h|hpp|hxx|hm|inl', - 'Resource Files': 'r|rc|ico|cur|bmp|dlg|rc2|rct|bin|cnt|rtf|gif|jpg|jpeg|jpe', - 'Other Files': ''} - - for kind in sorted(list(categories.keys()), key=lambda a: a.lower()): - if not self.sources[kind]: - continue # skip empty groups - - self.file.write('# Begin Group "' + kind + '"\n\n') - typelist = categories[kind].replace('|', ';') - self.file.write('# PROP Default_Filter "' + typelist + '"\n') - - for file in self.sources[kind]: - file = os.path.normpath(file) - self.file.write('# Begin Source File\n\n' - 'SOURCE="' + file + '"\n' - '# End Source File\n') - self.file.write('# End Group\n') - - # add the SConscript file outside of the groups - self.file.write('# Begin Source File\n\n' - 'SOURCE="' + str(self.sconscript) + '"\n' - '# End Source File\n') - - def Parse(self): - try: - dspfile = open(self.dspabs,'r') - except IOError: - return # doesn't exist yet, so can't add anything to configs. - - line = dspfile.readline() - # skip until marker - while line: - if "# End Project" in line: - break - line = dspfile.readline() - - # read to get configs - line = dspfile.readline() - datas = line - while line and line != '\n': - line = dspfile.readline() - datas = datas + line - - # OK, we've found our little pickled cache of data. - try: - datas = base64.decodestring(datas) - data = pickle.loads(datas) - except KeyboardInterrupt: - raise - except: - return # unable to unpickle any data for some reason - - self.configs.update(data) - - # keep reading to get sources - data = None - line = dspfile.readline() - datas = line - while line and line != '\n': - line = dspfile.readline() - datas = datas + line - dspfile.close() - - # OK, we've found our little pickled cache of data. - # it has a "# " in front of it, so we strip that. - try: - datas = base64.decodestring(datas) - data = pickle.loads(datas) - except KeyboardInterrupt: - raise - except: - return # unable to unpickle any data for some reason - - self.sources.update(data) - - def Build(self): - try: - self.file = open(self.dspabs,'w') - except IOError as detail: - raise SCons.Errors.InternalError('Unable to open "' + self.dspabs + '" for writing:' + str(detail)) - else: - self.PrintHeader() - self.PrintProject() - self.file.close() - -V7DSPHeader = """\ - - -""" - -V7DSPConfiguration = """\ -\t\t -\t\t\t -\t\t -""" - -V8DSPHeader = """\ - - -""" - -V8DSPConfiguration = """\ -\t\t -\t\t\t -\t\t -""" -class _GenerateV7DSP(_DSPGenerator, _GenerateV7User): - """Generates a Project file for MSVS .NET""" - - def __init__(self, dspfile, source, env): - _DSPGenerator.__init__(self, dspfile, source, env) - self.version = env['MSVS_VERSION'] - self.version_num, self.suite = msvs_parse_version(self.version) - if self.version_num >= 9.0: - self.versionstr = '9.00' - self.dspheader = V8DSPHeader - self.dspconfiguration = V8DSPConfiguration - elif self.version_num >= 8.0: - self.versionstr = '8.00' - self.dspheader = V8DSPHeader - self.dspconfiguration = V8DSPConfiguration - else: - if self.version_num >= 7.1: - self.versionstr = '7.10' - else: - self.versionstr = '7.00' - self.dspheader = V7DSPHeader - self.dspconfiguration = V7DSPConfiguration - self.file = None - - _GenerateV7User.__init__(self, dspfile, source, env) - - def PrintHeader(self): - env = self.env - versionstr = self.versionstr - name = self.name - encoding = self.env.subst('$MSVSENCODING') - scc_provider = env.get('MSVS_SCC_PROVIDER', '') - scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '') - scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '') - # MSVS_SCC_LOCAL_PATH is kept for backwards compatibility purpose and should - # be deprecated as soon as possible. - scc_local_path_legacy = env.get('MSVS_SCC_LOCAL_PATH', '') - scc_connection_root = env.get('MSVS_SCC_CONNECTION_ROOT', os.curdir) - scc_local_path = os.path.relpath(scc_connection_root, os.path.dirname(self.dspabs)) - project_guid = env.get('MSVS_PROJECT_GUID', '') - if not project_guid: - project_guid = _generateGUID(self.dspfile, '') - if scc_provider != '': - scc_attrs = '\tSccProjectName="%s"\n' % scc_project_name - if scc_aux_path != '': - scc_attrs += '\tSccAuxPath="%s"\n' % scc_aux_path - scc_attrs += ('\tSccLocalPath="%s"\n' - '\tSccProvider="%s"' % (scc_local_path, scc_provider)) - elif scc_local_path_legacy != '': - # This case is kept for backwards compatibility purpose and should - # be deprecated as soon as possible. - scc_attrs = ('\tSccProjectName="%s"\n' - '\tSccLocalPath="%s"' % (scc_project_name, scc_local_path_legacy)) - else: - self.dspheader = self.dspheader.replace('%(scc_attrs)s\n', '') - - self.file.write(self.dspheader % locals()) - - self.file.write('\t\n') - for platform in self.platforms: - self.file.write( - '\t\t\n' % platform) - self.file.write('\t\n') - - if self.version_num >= 8.0: - self.file.write('\t\n' - '\t\n') - - def PrintProject(self): - self.file.write('\t\n') - - confkeys = sorted(self.configs.keys()) - for kind in confkeys: - variant = self.configs[kind].variant - platform = self.configs[kind].platform - outdir = self.configs[kind].outdir - buildtarget = self.configs[kind].buildtarget - runfile = self.configs[kind].runfile - cmdargs = self.configs[kind].cmdargs - cpppaths = self.configs[kind].cpppaths - cppdefines = self.configs[kind].cppdefines - - env_has_buildtarget = 'MSVSBUILDTARGET' in self.env - if not env_has_buildtarget: - self.env['MSVSBUILDTARGET'] = buildtarget - - starting = 'echo Starting SCons && ' - if cmdargs: - cmdargs = ' ' + cmdargs - else: - cmdargs = '' - buildcmd = xmlify(starting + self.env.subst('$MSVSBUILDCOM', 1) + cmdargs) - rebuildcmd = xmlify(starting + self.env.subst('$MSVSREBUILDCOM', 1) + cmdargs) - cleancmd = xmlify(starting + self.env.subst('$MSVSCLEANCOM', 1) + cmdargs) - - # This isn't perfect; CPPDEFINES and CPPPATH can contain $TARGET and $SOURCE, - # so they could vary depending on the command being generated. This code - # assumes they don't. - preprocdefs = xmlify(';'.join(processDefines(cppdefines))) - includepath = xmlify(';'.join(processIncludes(cpppaths, self.env, None, None))) - - if not env_has_buildtarget: - del self.env['MSVSBUILDTARGET'] - - self.file.write(self.dspconfiguration % locals()) - - self.file.write('\t\n') - - if self.version_num >= 7.1: - self.file.write('\t\n' - '\t\n') - - self.PrintSourceFiles() - - self.file.write('\n') - - if self.nokeep == 0: - # now we pickle some data and add it to the file -- MSDEV will ignore it. - pdata = pickle.dumps(self.configs,PICKLE_PROTOCOL) - pdata = base64.b64encode(pdata).decode() - self.file.write('\n') - - def printSources(self, hierarchy, commonprefix): - sorteditems = sorted(hierarchy.items(), key=lambda a: a[0].lower()) - - # First folders, then files - for key, value in sorteditems: - if SCons.Util.is_Dict(value): - self.file.write('\t\t\t\n' % (key)) - self.printSources(value, commonprefix) - self.file.write('\t\t\t\n') - - for key, value in sorteditems: - if SCons.Util.is_String(value): - file = value - if commonprefix: - file = os.path.join(commonprefix, value) - file = os.path.normpath(file) - self.file.write('\t\t\t\n' - '\t\t\t\n' % (file)) - - def PrintSourceFiles(self): - categories = {'Source Files': 'cpp;c;cxx;l;y;def;odl;idl;hpj;bat', - 'Header Files': 'h;hpp;hxx;hm;inl', - 'Local Headers': 'h;hpp;hxx;hm;inl', - 'Resource Files': 'r;rc;ico;cur;bmp;dlg;rc2;rct;bin;cnt;rtf;gif;jpg;jpeg;jpe', - 'Other Files': ''} - - self.file.write('\t\n') - - cats = sorted([k for k in list(categories.keys()) if self.sources[k]], - key=lambda a: a.lower()) - for kind in cats: - if len(cats) > 1: - self.file.write('\t\t\n' % (kind, categories[kind])) - - sources = self.sources[kind] - - # First remove any common prefix - commonprefix = None - s = list(map(os.path.normpath, sources)) - # take the dirname because the prefix may include parts - # of the filenames (e.g. if you have 'dir\abcd' and - # 'dir\acde' then the cp will be 'dir\a' ) - cp = os.path.dirname( os.path.commonprefix(s) ) - if cp and s[0][len(cp)] == os.sep: - # +1 because the filename starts after the separator - sources = [s[len(cp)+1:] for s in sources] - commonprefix = cp - - hierarchy = makeHierarchy(sources) - self.printSources(hierarchy, commonprefix=commonprefix) - - if len(cats)>1: - self.file.write('\t\t\n') - - # add the SConscript file outside of the groups - self.file.write('\t\t\n' - '\t\t\n' % str(self.sconscript)) - - self.file.write('\t\n' - '\t\n' - '\t\n') - - def Parse(self): - try: - dspfile = open(self.dspabs,'r') - except IOError: - return # doesn't exist yet, so can't add anything to configs. - - line = dspfile.readline() - # skip until marker - while line: - if '\n') - - def printFilters(self, hierarchy, name): - sorteditems = sorted(hierarchy.items(), key = lambda a: a[0].lower()) - - for key, value in sorteditems: - if SCons.Util.is_Dict(value): - filter_name = name + '\\' + key - self.filters_file.write('\t\t\n' - '\t\t\t%s\n' - '\t\t\n' % (filter_name, _generateGUID(self.dspabs, filter_name))) - self.printFilters(value, filter_name) - - def printSources(self, hierarchy, kind, commonprefix, filter_name): - keywords = {'Source Files': 'ClCompile', - 'Header Files': 'ClInclude', - 'Local Headers': 'ClInclude', - 'Resource Files': 'None', - 'Other Files': 'None'} - - sorteditems = sorted(hierarchy.items(), key = lambda a: a[0].lower()) - - # First folders, then files - for key, value in sorteditems: - if SCons.Util.is_Dict(value): - self.printSources(value, kind, commonprefix, filter_name + '\\' + key) - - for key, value in sorteditems: - if SCons.Util.is_String(value): - file = value - if commonprefix: - file = os.path.join(commonprefix, value) - file = os.path.normpath(file) - - self.file.write('\t\t<%s Include="%s" />\n' % (keywords[kind], file)) - self.filters_file.write('\t\t<%s Include="%s">\n' - '\t\t\t%s\n' - '\t\t\n' % (keywords[kind], file, filter_name, keywords[kind])) - - def PrintSourceFiles(self): - categories = {'Source Files': 'cpp;c;cxx;l;y;def;odl;idl;hpj;bat', - 'Header Files': 'h;hpp;hxx;hm;inl', - 'Local Headers': 'h;hpp;hxx;hm;inl', - 'Resource Files': 'r;rc;ico;cur;bmp;dlg;rc2;rct;bin;cnt;rtf;gif;jpg;jpeg;jpe', - 'Other Files': ''} - - cats = sorted([k for k in list(categories.keys()) if self.sources[k]], - key = lambda a: a.lower()) - - # print vcxproj.filters file first - self.filters_file.write('\t\n') - for kind in cats: - self.filters_file.write('\t\t\n' - '\t\t\t{7b42d31d-d53c-4868-8b92-ca2bc9fc052f}\n' - '\t\t\t%s\n' - '\t\t\n' % (kind, categories[kind])) - - # First remove any common prefix - sources = self.sources[kind] - commonprefix = None - s = list(map(os.path.normpath, sources)) - # take the dirname because the prefix may include parts - # of the filenames (e.g. if you have 'dir\abcd' and - # 'dir\acde' then the cp will be 'dir\a' ) - cp = os.path.dirname( os.path.commonprefix(s) ) - if cp and s[0][len(cp)] == os.sep: - # +1 because the filename starts after the separator - sources = [s[len(cp)+1:] for s in sources] - commonprefix = cp - - hierarchy = makeHierarchy(sources) - self.printFilters(hierarchy, kind) - - self.filters_file.write('\t\n') - - # then print files and filters - for kind in cats: - self.file.write('\t\n') - self.filters_file.write('\t\n') - - # First remove any common prefix - sources = self.sources[kind] - commonprefix = None - s = list(map(os.path.normpath, sources)) - # take the dirname because the prefix may include parts - # of the filenames (e.g. if you have 'dir\abcd' and - # 'dir\acde' then the cp will be 'dir\a' ) - cp = os.path.dirname( os.path.commonprefix(s) ) - if cp and s[0][len(cp)] == os.sep: - # +1 because the filename starts after the separator - sources = [s[len(cp)+1:] for s in sources] - commonprefix = cp - - hierarchy = makeHierarchy(sources) - self.printSources(hierarchy, kind, commonprefix, kind) - - self.file.write('\t\n') - self.filters_file.write('\t\n') - - # add the SConscript file outside of the groups - self.file.write('\t\n' - '\t\t\n' - #'\t\t\n' - '\t\n' % str(self.sconscript)) - - def Parse(self): - print("_GenerateV10DSP.Parse()") - - def Build(self): - try: - self.file = open(self.dspabs, 'w') - except IOError as detail: - raise SCons.Errors.InternalError('Unable to open "' + self.dspabs + '" for writing:' + str(detail)) - else: - self.PrintHeader() - self.PrintProject() - self.file.close() - - _GenerateV10User.Build(self) - -class _DSWGenerator(object): - """ Base class for DSW generators """ - def __init__(self, dswfile, source, env): - self.dswfile = os.path.normpath(str(dswfile)) - self.dsw_folder_path = os.path.dirname(os.path.abspath(self.dswfile)) - self.env = env - - if 'projects' not in env: - raise SCons.Errors.UserError("You must specify a 'projects' argument to create an MSVSSolution.") - projects = env['projects'] - if not SCons.Util.is_List(projects): - raise SCons.Errors.InternalError("The 'projects' argument must be a list of nodes.") - projects = SCons.Util.flatten(projects) - if len(projects) < 1: - raise SCons.Errors.UserError("You must specify at least one project to create an MSVSSolution.") - self.dspfiles = list(map(str, projects)) - - if 'name' in self.env: - self.name = self.env['name'] - else: - self.name = os.path.basename(SCons.Util.splitext(self.dswfile)[0]) - self.name = self.env.subst(self.name) - - def Build(self): - pass - -class _GenerateV7DSW(_DSWGenerator): - """Generates a Solution file for MSVS .NET""" - def __init__(self, dswfile, source, env): - _DSWGenerator.__init__(self, dswfile, source, env) - - self.file = None - self.version = self.env['MSVS_VERSION'] - self.version_num, self.suite = msvs_parse_version(self.version) - self.versionstr = '7.00' - if self.version_num >= 11.0: - self.versionstr = '12.00' - elif self.version_num >= 10.0: - self.versionstr = '11.00' - elif self.version_num >= 9.0: - self.versionstr = '10.00' - elif self.version_num >= 8.0: - self.versionstr = '9.00' - elif self.version_num >= 7.1: - self.versionstr = '8.00' - - if 'slnguid' in env and env['slnguid']: - self.slnguid = env['slnguid'] - else: - self.slnguid = _generateGUID(dswfile, self.name) - - self.configs = {} - - self.nokeep = 0 - if 'nokeep' in env and env['variant'] != 0: - self.nokeep = 1 - - if self.nokeep == 0 and os.path.exists(self.dswfile): - self.Parse() - - def AddConfig(self, variant, dswfile=dswfile): - config = Config() - - match = re.match(r'(.*)\|(.*)', variant) - if match: - config.variant = match.group(1) - config.platform = match.group(2) - else: - config.variant = variant - config.platform = 'Win32' - - self.configs[variant] = config - print("Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dswfile) + "'") - - if 'variant' not in env: - raise SCons.Errors.InternalError("You must specify a 'variant' argument (i.e. 'Debug' or " +\ - "'Release') to create an MSVS Solution File.") - elif SCons.Util.is_String(env['variant']): - AddConfig(self, env['variant']) - elif SCons.Util.is_List(env['variant']): - for variant in env['variant']: - AddConfig(self, variant) - - self.platforms = [] - for key in list(self.configs.keys()): - platform = self.configs[key].platform - if platform not in self.platforms: - self.platforms.append(platform) - - def GenerateProjectFilesInfo(self): - for dspfile in self.dspfiles: - dsp_folder_path, name = os.path.split(dspfile) - dsp_folder_path = os.path.abspath(dsp_folder_path) - if SCons.Util.splitext(name)[1] == '.filters': - # Ignore .filters project files - continue - dsp_relative_folder_path = os.path.relpath(dsp_folder_path, self.dsw_folder_path) - if dsp_relative_folder_path == os.curdir: - dsp_relative_file_path = name - else: - dsp_relative_file_path = os.path.join(dsp_relative_folder_path, name) - dspfile_info = {'NAME': name, - 'GUID': _generateGUID(dspfile, ''), - 'FOLDER_PATH': dsp_folder_path, - 'FILE_PATH': dspfile, - 'SLN_RELATIVE_FOLDER_PATH': dsp_relative_folder_path, - 'SLN_RELATIVE_FILE_PATH': dsp_relative_file_path} - self.dspfiles_info.append(dspfile_info) - - self.dspfiles_info = [] - GenerateProjectFilesInfo(self) - - def Parse(self): - try: - dswfile = open(self.dswfile,'r') - except IOError: - return # doesn't exist yet, so can't add anything to configs. - - line = dswfile.readline() - while line: - if line[:9] == "EndGlobal": - break - line = dswfile.readline() - - line = dswfile.readline() - datas = line - while line: - line = dswfile.readline() - datas = datas + line - dswfile.close() - - # OK, we've found our little pickled cache of data. - try: - datas = base64.decodestring(datas) - data = pickle.loads(datas) - except KeyboardInterrupt: - raise - except: - return # unable to unpickle any data for some reason - - self.configs.update(data) - - def PrintSolution(self): - """Writes a solution file""" - self.file.write('Microsoft Visual Studio Solution File, Format Version %s\n' % self.versionstr) - if self.version_num >= 14.2: - # Visual Studio 2019 is considered to be version 16. - self.file.write('# Visual Studio 16\n') - elif self.version_num > 14.0: - # Visual Studio 2015 and 2017 are both considered to be version 15. - self.file.write('# Visual Studio 15\n') - elif self.version_num >= 12.0: - self.file.write('# Visual Studio 14\n') - elif self.version_num >= 11.0: - self.file.write('# Visual Studio 11\n') - elif self.version_num >= 10.0: - self.file.write('# Visual Studio 2010\n') - elif self.version_num >= 9.0: - self.file.write('# Visual Studio 2008\n') - elif self.version_num >= 8.0: - self.file.write('# Visual Studio 2005\n') - - for dspinfo in self.dspfiles_info: - name = dspinfo['NAME'] - base, suffix = SCons.Util.splitext(name) - if suffix == '.vcproj': - name = base - self.file.write('Project("%s") = "%s", "%s", "%s"\n' - % (external_makefile_guid, name, dspinfo['SLN_RELATIVE_FILE_PATH'], dspinfo['GUID'])) - if 7.1 <= self.version_num < 8.0: - self.file.write('\tProjectSection(ProjectDependencies) = postProject\n' - '\tEndProjectSection\n') - self.file.write('EndProject\n') - - self.file.write('Global\n') - - env = self.env - if 'MSVS_SCC_PROVIDER' in env: - scc_number_of_projects = len(self.dspfiles) + 1 - slnguid = self.slnguid - scc_provider = env.get('MSVS_SCC_PROVIDER', '').replace(' ', r'\u0020') - scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '').replace(' ', r'\u0020') - scc_connection_root = env.get('MSVS_SCC_CONNECTION_ROOT', os.curdir) - scc_local_path = os.path.relpath(scc_connection_root, self.dsw_folder_path).replace('\\', '\\\\') - self.file.write('\tGlobalSection(SourceCodeControl) = preSolution\n' - '\t\tSccNumberOfProjects = %(scc_number_of_projects)d\n' - '\t\tSccProjectName0 = %(scc_project_name)s\n' - '\t\tSccLocalPath0 = %(scc_local_path)s\n' - '\t\tSccProvider0 = %(scc_provider)s\n' - '\t\tCanCheckoutShared = true\n' % locals()) - sln_relative_path_from_scc = os.path.relpath(self.dsw_folder_path, scc_connection_root) - if sln_relative_path_from_scc != os.curdir: - self.file.write('\t\tSccProjectFilePathRelativizedFromConnection0 = %s\\\\\n' - % sln_relative_path_from_scc.replace('\\', '\\\\')) - if self.version_num < 8.0: - # When present, SolutionUniqueID is automatically removed by VS 2005 - # TODO: check for Visual Studio versions newer than 2005 - self.file.write('\t\tSolutionUniqueID = %s\n' % slnguid) - for dspinfo in self.dspfiles_info: - i = self.dspfiles_info.index(dspinfo) + 1 - dsp_relative_file_path = dspinfo['SLN_RELATIVE_FILE_PATH'].replace('\\', '\\\\') - dsp_scc_relative_folder_path = os.path.relpath(dspinfo['FOLDER_PATH'], scc_connection_root).replace('\\', '\\\\') - self.file.write('\t\tSccProjectUniqueName%(i)s = %(dsp_relative_file_path)s\n' - '\t\tSccLocalPath%(i)d = %(scc_local_path)s\n' - '\t\tCanCheckoutShared = true\n' - '\t\tSccProjectFilePathRelativizedFromConnection%(i)s = %(dsp_scc_relative_folder_path)s\\\\\n' - % locals()) - self.file.write('\tEndGlobalSection\n') - if self.version_num >= 8.0: - self.file.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n') - else: - self.file.write('\tGlobalSection(SolutionConfiguration) = preSolution\n') - - confkeys = sorted(self.configs.keys()) - cnt = 0 - for name in confkeys: - variant = self.configs[name].variant - platform = self.configs[name].platform - if self.version_num >= 8.0: - self.file.write('\t\t%s|%s = %s|%s\n' % (variant, platform, variant, platform)) - else: - self.file.write('\t\tConfigName.%d = %s\n' % (cnt, variant)) - cnt = cnt + 1 - self.file.write('\tEndGlobalSection\n') - if self.version_num <= 7.1: - self.file.write('\tGlobalSection(ProjectDependencies) = postSolution\n' - '\tEndGlobalSection\n') - if self.version_num >= 8.0: - self.file.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n') - else: - self.file.write('\tGlobalSection(ProjectConfiguration) = postSolution\n') - - for name in confkeys: - variant = self.configs[name].variant - platform = self.configs[name].platform - if self.version_num >= 8.0: - for dspinfo in self.dspfiles_info: - guid = dspinfo['GUID'] - self.file.write('\t\t%s.%s|%s.ActiveCfg = %s|%s\n' - '\t\t%s.%s|%s.Build.0 = %s|%s\n' % (guid,variant,platform,variant,platform,guid,variant,platform,variant,platform)) - else: - for dspinfo in self.dspfiles_info: - guid = dspinfo['GUID'] - self.file.write('\t\t%s.%s.ActiveCfg = %s|%s\n' - '\t\t%s.%s.Build.0 = %s|%s\n' %(guid,variant,variant,platform,guid,variant,variant,platform)) - - self.file.write('\tEndGlobalSection\n') - - if self.version_num >= 8.0: - self.file.write('\tGlobalSection(SolutionProperties) = preSolution\n' - '\t\tHideSolutionNode = FALSE\n' - '\tEndGlobalSection\n') - else: - self.file.write('\tGlobalSection(ExtensibilityGlobals) = postSolution\n' - '\tEndGlobalSection\n' - '\tGlobalSection(ExtensibilityAddIns) = postSolution\n' - '\tEndGlobalSection\n') - self.file.write('EndGlobal\n') - if self.nokeep == 0: - pdata = pickle.dumps(self.configs,PICKLE_PROTOCOL) - pdata = base64.b64encode(pdata).decode() - self.file.write(pdata) - self.file.write('\n') - - def Build(self): - try: - self.file = open(self.dswfile,'w') - except IOError as detail: - raise SCons.Errors.InternalError('Unable to open "' + self.dswfile + '" for writing:' + str(detail)) - else: - self.PrintSolution() - self.file.close() - -V6DSWHeader = """\ -Microsoft Developer Studio Workspace File, Format Version 6.00 -# WARNING: DO NOT EDIT OR DELETE THIS WORKSPACE FILE! - -############################################################################### - -Project: "%(name)s"="%(dspfile)s" - Package Owner=<4> - -Package=<5> -{{{ -}}} - -Package=<4> -{{{ -}}} - -############################################################################### - -Global: - -Package=<5> -{{{ -}}} - -Package=<3> -{{{ -}}} - -############################################################################### -""" - -class _GenerateV6DSW(_DSWGenerator): - """Generates a Workspace file for MSVS 6.0""" - - def PrintWorkspace(self): - """ writes a DSW file """ - name = self.name - dspfile = os.path.relpath(self.dspfiles[0], self.dsw_folder_path) - self.file.write(V6DSWHeader % locals()) - - def Build(self): - try: - self.file = open(self.dswfile,'w') - except IOError as detail: - raise SCons.Errors.InternalError('Unable to open "' + self.dswfile + '" for writing:' + str(detail)) - else: - self.PrintWorkspace() - self.file.close() - - -def GenerateDSP(dspfile, source, env): - """Generates a Project file based on the version of MSVS that is being used""" - - version_num = 6.0 - if 'MSVS_VERSION' in env: - version_num, suite = msvs_parse_version(env['MSVS_VERSION']) - if version_num >= 10.0: - g = _GenerateV10DSP(dspfile, source, env) - g.Build() - elif version_num >= 7.0: - g = _GenerateV7DSP(dspfile, source, env) - g.Build() - else: - g = _GenerateV6DSP(dspfile, source, env) - g.Build() - -def GenerateDSW(dswfile, source, env): - """Generates a Solution/Workspace file based on the version of MSVS that is being used""" - - version_num = 6.0 - if 'MSVS_VERSION' in env: - version_num, suite = msvs_parse_version(env['MSVS_VERSION']) - if version_num >= 7.0: - g = _GenerateV7DSW(dswfile, source, env) - g.Build() - else: - g = _GenerateV6DSW(dswfile, source, env) - g.Build() - - -############################################################################## -# Above here are the classes and functions for generation of -# DSP/DSW/SLN/VCPROJ files. -############################################################################## - -def GetMSVSProjectSuffix(target, source, env, for_signature): - return env['MSVS']['PROJECTSUFFIX'] - -def GetMSVSSolutionSuffix(target, source, env, for_signature): - return env['MSVS']['SOLUTIONSUFFIX'] - -def GenerateProject(target, source, env): - # generate the dsp file, according to the version of MSVS. - builddspfile = target[0] - dspfile = builddspfile.srcnode() - - # this detects whether or not we're using a VariantDir - if dspfile is not builddspfile: - try: - bdsp = open(str(builddspfile), "w+") - except IOError as detail: - print('Unable to open "' + str(dspfile) + '" for writing:',detail,'\n') - raise - - bdsp.write("This is just a placeholder file.\nThe real project file is here:\n%s\n" % dspfile.get_abspath()) - bdsp.close() - - GenerateDSP(dspfile, source, env) - - if env.get('auto_build_solution', 1): - builddswfile = target[1] - dswfile = builddswfile.srcnode() - - if dswfile is not builddswfile: - - try: - bdsw = open(str(builddswfile), "w+") - except IOError as detail: - print('Unable to open "' + str(dspfile) + '" for writing:',detail,'\n') - raise - - bdsw.write("This is just a placeholder file.\nThe real workspace file is here:\n%s\n" % dswfile.get_abspath()) - bdsw.close() - - GenerateDSW(dswfile, source, env) - -def GenerateSolution(target, source, env): - GenerateDSW(target[0], source, env) - -def projectEmitter(target, source, env): - """Sets up the DSP dependencies.""" - - # todo: Not sure what sets source to what user has passed as target, - # but this is what happens. When that is fixed, we also won't have - # to make the user always append env['MSVSPROJECTSUFFIX'] to target. - if source[0] == target[0]: - source = [] - - # make sure the suffix is correct for the version of MSVS we're running. - (base, suff) = SCons.Util.splitext(str(target[0])) - suff = env.subst('$MSVSPROJECTSUFFIX') - target[0] = base + suff - - if not source: - source = 'prj_inputs:' - source = source + env.subst('$MSVSSCONSCOM', 1) - source = source + env.subst('$MSVSENCODING', 1) - - # Project file depends on CPPDEFINES and CPPPATH - preprocdefs = xmlify(';'.join(processDefines(env.get('CPPDEFINES', [])))) - includepath = xmlify(';'.join(processIncludes(env.get('CPPPATH', []), env, None, None))) - source = source + "; ppdefs:%s incpath:%s"%(preprocdefs, includepath) - - if 'buildtarget' in env and env['buildtarget'] is not None: - if SCons.Util.is_String(env['buildtarget']): - source = source + ' "%s"' % env['buildtarget'] - elif SCons.Util.is_List(env['buildtarget']): - for bt in env['buildtarget']: - if SCons.Util.is_String(bt): - source = source + ' "%s"' % bt - else: - try: source = source + ' "%s"' % bt.get_abspath() - except AttributeError: raise SCons.Errors.InternalError("buildtarget can be a string, a node, a list of strings or nodes, or None") - else: - try: source = source + ' "%s"' % env['buildtarget'].get_abspath() - except AttributeError: raise SCons.Errors.InternalError("buildtarget can be a string, a node, a list of strings or nodes, or None") - - if 'outdir' in env and env['outdir'] is not None: - if SCons.Util.is_String(env['outdir']): - source = source + ' "%s"' % env['outdir'] - elif SCons.Util.is_List(env['outdir']): - for s in env['outdir']: - if SCons.Util.is_String(s): - source = source + ' "%s"' % s - else: - try: source = source + ' "%s"' % s.get_abspath() - except AttributeError: raise SCons.Errors.InternalError("outdir can be a string, a node, a list of strings or nodes, or None") - else: - try: source = source + ' "%s"' % env['outdir'].get_abspath() - except AttributeError: raise SCons.Errors.InternalError("outdir can be a string, a node, a list of strings or nodes, or None") - - if 'name' in env: - if SCons.Util.is_String(env['name']): - source = source + ' "%s"' % env['name'] - else: - raise SCons.Errors.InternalError("name must be a string") - - if 'variant' in env: - if SCons.Util.is_String(env['variant']): - source = source + ' "%s"' % env['variant'] - elif SCons.Util.is_List(env['variant']): - for variant in env['variant']: - if SCons.Util.is_String(variant): - source = source + ' "%s"' % variant - else: - raise SCons.Errors.InternalError("name must be a string or a list of strings") - else: - raise SCons.Errors.InternalError("variant must be a string or a list of strings") - else: - raise SCons.Errors.InternalError("variant must be specified") - - for s in _DSPGenerator.srcargs: - if s in env: - if SCons.Util.is_String(env[s]): - source = source + ' "%s' % env[s] - elif SCons.Util.is_List(env[s]): - for t in env[s]: - if SCons.Util.is_String(t): - source = source + ' "%s"' % t - else: - raise SCons.Errors.InternalError(s + " must be a string or a list of strings") - else: - raise SCons.Errors.InternalError(s + " must be a string or a list of strings") - - source = source + ' "%s"' % str(target[0]) - source = [SCons.Node.Python.Value(source)] - - targetlist = [target[0]] - sourcelist = source - - if env.get('auto_build_solution', 1): - env['projects'] = [env.File(t).srcnode() for t in targetlist] - t, s = solutionEmitter(target, target, env) - targetlist = targetlist + t - - # Beginning with Visual Studio 2010 for each project file (.vcxproj) we have additional file (.vcxproj.filters) - version_num = 6.0 - if 'MSVS_VERSION' in env: - version_num, suite = msvs_parse_version(env['MSVS_VERSION']) - if version_num >= 10.0: - targetlist.append(targetlist[0] + '.filters') - - return (targetlist, sourcelist) - -def solutionEmitter(target, source, env): - """Sets up the DSW dependencies.""" - - # todo: Not sure what sets source to what user has passed as target, - # but this is what happens. When that is fixed, we also won't have - # to make the user always append env['MSVSSOLUTIONSUFFIX'] to target. - if source[0] == target[0]: - source = [] - - # make sure the suffix is correct for the version of MSVS we're running. - (base, suff) = SCons.Util.splitext(str(target[0])) - suff = env.subst('$MSVSSOLUTIONSUFFIX') - target[0] = base + suff - - if not source: - source = 'sln_inputs:' - - if 'name' in env: - if SCons.Util.is_String(env['name']): - source = source + ' "%s"' % env['name'] - else: - raise SCons.Errors.InternalError("name must be a string") - - if 'variant' in env: - if SCons.Util.is_String(env['variant']): - source = source + ' "%s"' % env['variant'] - elif SCons.Util.is_List(env['variant']): - for variant in env['variant']: - if SCons.Util.is_String(variant): - source = source + ' "%s"' % variant - else: - raise SCons.Errors.InternalError("name must be a string or a list of strings") - else: - raise SCons.Errors.InternalError("variant must be a string or a list of strings") - else: - raise SCons.Errors.InternalError("variant must be specified") - - if 'slnguid' in env: - if SCons.Util.is_String(env['slnguid']): - source = source + ' "%s"' % env['slnguid'] - else: - raise SCons.Errors.InternalError("slnguid must be a string") - - if 'projects' in env: - if SCons.Util.is_String(env['projects']): - source = source + ' "%s"' % env['projects'] - elif SCons.Util.is_List(env['projects']): - for t in env['projects']: - if SCons.Util.is_String(t): - source = source + ' "%s"' % t - - source = source + ' "%s"' % str(target[0]) - source = [SCons.Node.Python.Value(source)] - - return ([target[0]], source) - -projectAction = SCons.Action.Action(GenerateProject, None) - -solutionAction = SCons.Action.Action(GenerateSolution, None) - -projectBuilder = SCons.Builder.Builder(action = '$MSVSPROJECTCOM', - suffix = '$MSVSPROJECTSUFFIX', - emitter = projectEmitter) - -solutionBuilder = SCons.Builder.Builder(action = '$MSVSSOLUTIONCOM', - suffix = '$MSVSSOLUTIONSUFFIX', - emitter = solutionEmitter) - -default_MSVS_SConscript = None - -def generate(env): - """Add Builders and construction variables for Microsoft Visual - Studio project files to an Environment.""" - try: - env['BUILDERS']['MSVSProject'] - except KeyError: - env['BUILDERS']['MSVSProject'] = projectBuilder - - try: - env['BUILDERS']['MSVSSolution'] - except KeyError: - env['BUILDERS']['MSVSSolution'] = solutionBuilder - - env['MSVSPROJECTCOM'] = projectAction - env['MSVSSOLUTIONCOM'] = solutionAction - - if SCons.Script.call_stack: - # XXX Need to find a way to abstract this; the build engine - # shouldn't depend on anything in SCons.Script. - env['MSVSSCONSCRIPT'] = SCons.Script.call_stack[0].sconscript - else: - global default_MSVS_SConscript - if default_MSVS_SConscript is None: - default_MSVS_SConscript = env.File('SConstruct') - env['MSVSSCONSCRIPT'] = default_MSVS_SConscript - - # Allow consumers to provide their own versions of MSVSSCONS and - # MSVSSCONSFLAGS. This helps support consumers who use wrapper scripts to - # invoke scons. - if 'MSVSSCONS' not in env: - env['MSVSSCONS'] = '"%s" -c "%s"' % (python_executable, getExecScriptMain(env)) - if 'MSVSSCONSFLAGS' not in env: - env['MSVSSCONSFLAGS'] = '-C "${MSVSSCONSCRIPT.dir.get_abspath()}" -f ${MSVSSCONSCRIPT.name}' - - env['MSVSSCONSCOM'] = '$MSVSSCONS $MSVSSCONSFLAGS' - env['MSVSBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' - env['MSVSREBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' - env['MSVSCLEANCOM'] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"' - - # Set-up ms tools paths for default version - msvc_setup_env_once(env) - - if 'MSVS_VERSION' in env: - version_num, suite = msvs_parse_version(env['MSVS_VERSION']) - else: - (version_num, suite) = (7.0, None) # guess at a default - if 'MSVS' not in env: - env['MSVS'] = {} - if (version_num < 7.0): - env['MSVS']['PROJECTSUFFIX'] = '.dsp' - env['MSVS']['SOLUTIONSUFFIX'] = '.dsw' - elif (version_num < 10.0): - env['MSVS']['PROJECTSUFFIX'] = '.vcproj' - env['MSVS']['SOLUTIONSUFFIX'] = '.sln' - else: - env['MSVS']['PROJECTSUFFIX'] = '.vcxproj' - env['MSVS']['SOLUTIONSUFFIX'] = '.sln' - - if (version_num >= 10.0): - env['MSVSENCODING'] = 'utf-8' - else: - env['MSVSENCODING'] = 'Windows-1252' - - env['GET_MSVSPROJECTSUFFIX'] = GetMSVSProjectSuffix - env['GET_MSVSSOLUTIONSUFFIX'] = GetMSVSSolutionSuffix - env['MSVSPROJECTSUFFIX'] = '${GET_MSVSPROJECTSUFFIX}' - env['MSVSSOLUTIONSUFFIX'] = '${GET_MSVSSOLUTIONSUFFIX}' - env['SCONS_HOME'] = os.environ.get('SCONS_HOME') - -def exists(env): - return msvc_exists(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mwcc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mwcc.py deleted file mode 100644 index 341a3efcc2f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mwcc.py +++ /dev/null @@ -1,207 +0,0 @@ -"""SCons.Tool.mwcc - -Tool-specific initialization for the Metrowerks CodeWarrior compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/mwcc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path - -import SCons.Util - -def set_vars(env): - """Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars - - MWCW_VERSIONS is set to a list of objects representing installed versions - - MWCW_VERSION is set to the version object that will be used for building. - MWCW_VERSION can be set to a string during Environment - construction to influence which version is chosen, otherwise - the latest one from MWCW_VERSIONS is used. - - Returns true if at least one version is found, false otherwise - """ - desired = env.get('MWCW_VERSION', '') - - # return right away if the variables are already set - if isinstance(desired, MWVersion): - return 1 - elif desired is None: - return 0 - - versions = find_versions() - version = None - - if desired: - for v in versions: - if str(v) == desired: - version = v - elif versions: - version = versions[-1] - - env['MWCW_VERSIONS'] = versions - env['MWCW_VERSION'] = version - - if version is None: - return 0 - - env.PrependENVPath('PATH', version.clpath) - env.PrependENVPath('PATH', version.dllpath) - ENV = env['ENV'] - ENV['CWFolder'] = version.path - ENV['LM_LICENSE_FILE'] = version.license - plus = lambda x: '+%s' % x - ENV['MWCIncludes'] = os.pathsep.join(map(plus, version.includes)) - ENV['MWLibraries'] = os.pathsep.join(map(plus, version.libs)) - return 1 - - -def find_versions(): - """Return a list of MWVersion objects representing installed versions""" - versions = [] - - ### This function finds CodeWarrior by reading from the registry on - ### Windows. Some other method needs to be implemented for other - ### platforms, maybe something that calls env.WhereIs('mwcc') - - if SCons.Util.can_read_reg: - try: - HLM = SCons.Util.HKEY_LOCAL_MACHINE - product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions' - product_key = SCons.Util.RegOpenKeyEx(HLM, product) - - i = 0 - while True: - name = product + '\\' + SCons.Util.RegEnumKey(product_key, i) - name_key = SCons.Util.RegOpenKeyEx(HLM, name) - - try: - version = SCons.Util.RegQueryValueEx(name_key, 'VERSION') - path = SCons.Util.RegQueryValueEx(name_key, 'PATH') - mwv = MWVersion(version[0], path[0], 'Win32-X86') - versions.append(mwv) - except SCons.Util.RegError: - pass - - i = i + 1 - - except SCons.Util.RegError: - pass - - return versions - - -class MWVersion(object): - def __init__(self, version, path, platform): - self.version = version - self.path = path - self.platform = platform - self.clpath = os.path.join(path, 'Other Metrowerks Tools', - 'Command Line Tools') - self.dllpath = os.path.join(path, 'Bin') - - # The Metrowerks tools don't store any configuration data so they - # are totally dumb when it comes to locating standard headers, - # libraries, and other files, expecting all the information - # to be handed to them in environment variables. The members set - # below control what information scons injects into the environment - - ### The paths below give a normal build environment in CodeWarrior for - ### Windows, other versions of CodeWarrior might need different paths. - - msl = os.path.join(path, 'MSL') - support = os.path.join(path, '%s Support' % platform) - - self.license = os.path.join(path, 'license.dat') - self.includes = [msl, support] - self.libs = [msl, support] - - def __str__(self): - return self.version - - -CSuffixes = ['.c', '.C'] -CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++'] - - -def generate(env): - """Add Builders and construction variables for the mwcc to an Environment.""" - import SCons.Defaults - import SCons.Tool - - set_vars(env) - - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - for suffix in CSuffixes: - static_obj.add_action(suffix, SCons.Defaults.CAction) - shared_obj.add_action(suffix, SCons.Defaults.ShCAction) - - for suffix in CXXSuffixes: - static_obj.add_action(suffix, SCons.Defaults.CXXAction) - shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction) - - env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES' - - env['CC'] = 'mwcc' - env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS' - - env['CXX'] = 'mwcc' - env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS' - - env['SHCC'] = '$CC' - env['SHCCFLAGS'] = '$CCFLAGS' - env['SHCFLAGS'] = '$CFLAGS' - env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS' - - env['SHCXX'] = '$CXX' - env['SHCXXFLAGS'] = '$CXXFLAGS' - env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS' - - env['CFILESUFFIX'] = '.c' - env['CXXFILESUFFIX'] = '.cpp' - env['CPPDEFPREFIX'] = '-D' - env['CPPDEFSUFFIX'] = '' - env['INCPREFIX'] = '-I' - env['INCSUFFIX'] = '' - - #env['PCH'] = ? - #env['PCHSTOP'] = ? - - -def exists(env): - return set_vars(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mwld.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mwld.py deleted file mode 100644 index 5af02a674e1..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/mwld.py +++ /dev/null @@ -1,108 +0,0 @@ -"""SCons.Tool.mwld - -Tool-specific initialization for the Metrowerks CodeWarrior linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/mwld.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Tool - - -def generate(env): - """Add Builders and construction variables for lib to an Environment.""" - SCons.Tool.createStaticLibBuilder(env) - SCons.Tool.createSharedLibBuilder(env) - SCons.Tool.createProgBuilder(env) - - env['AR'] = 'mwld' - env['ARCOM'] = '$AR $ARFLAGS -library -o $TARGET $SOURCES' - - env['LIBDIRPREFIX'] = '-L' - env['LIBDIRSUFFIX'] = '' - env['LIBLINKPREFIX'] = '-l' - env['LIBLINKSUFFIX'] = '.lib' - - env['LINK'] = 'mwld' - env['LINKCOM'] = '$LINK $LINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - - env['SHLINK'] = '$LINK' - env['SHLINKFLAGS'] = '$LINKFLAGS' - env['SHLINKCOM'] = shlib_action - env['SHLIBEMITTER']= shlib_emitter - env['LDMODULEEMITTER']= shlib_emitter - - -def exists(env): - import SCons.Tool.mwcc - return SCons.Tool.mwcc.set_vars(env) - - -def shlib_generator(target, source, env, for_signature): - cmd = ['$SHLINK', '$SHLINKFLAGS', '-shared'] - - no_import_lib = env.get('no_import_lib', 0) - if no_import_lib: cmd.extend('-noimplib') - - dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') - if dll: cmd.extend(['-o', dll]) - - implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') - if implib: cmd.extend(['-implib', implib.get_string(for_signature)]) - - cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS']) - - return [cmd] - - -def shlib_emitter(target, source, env): - dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') - no_import_lib = env.get('no_import_lib', 0) - - if not dll: - raise SCons.Errors.UserError("A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX")) - - if not no_import_lib and \ - not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'): - - # Append an import library to the list of targets. - target.append(env.ReplaceIxes(dll, - 'SHLIBPREFIX', 'SHLIBSUFFIX', - 'LIBPREFIX', 'LIBSUFFIX')) - - return target, source - - -shlib_action = SCons.Action.Action(shlib_generator, generator=1) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/nasm.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/nasm.py deleted file mode 100644 index f08930e48e7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/nasm.py +++ /dev/null @@ -1,72 +0,0 @@ -"""SCons.Tool.nasm - -Tool-specific initialization for nasm, the famous Netwide Assembler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/nasm.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util - -ASSuffixes = ['.s', '.asm', '.ASM'] -ASPPSuffixes = ['.spp', '.SPP', '.sx'] -if SCons.Util.case_sensitive_suffixes('.s', '.S'): - ASPPSuffixes.extend(['.S']) -else: - ASSuffixes.extend(['.S']) - -def generate(env): - """Add Builders and construction variables for nasm to an Environment.""" - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - - for suffix in ASSuffixes: - static_obj.add_action(suffix, SCons.Defaults.ASAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - - for suffix in ASPPSuffixes: - static_obj.add_action(suffix, SCons.Defaults.ASPPAction) - static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) - - env['AS'] = 'nasm' - env['ASFLAGS'] = SCons.Util.CLVar('') - env['ASPPFLAGS'] = '$ASFLAGS' - env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES' - env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES' - -def exists(env): - return env.Detect('nasm') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/__init__.py deleted file mode 100644 index 4b947f93c71..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/__init__.py +++ /dev/null @@ -1,322 +0,0 @@ -"""SCons.Tool.Packaging - -SCons Packaging Tool. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/packaging/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Environment -from SCons.Variables import * -from SCons.Errors import * -from SCons.Util import is_List, make_path_relative -from SCons.Warnings import warn, Warning - -import os -import importlib - -__all__ = [ - 'src_targz', 'src_tarbz2', 'src_tarxz', 'src_zip', - 'targz', 'tarbz2', 'tarxz', 'zip', - 'rpm', 'msi', 'ipk', -] - -# -# Utility and Builder function -# -def Tag(env, target, source, *more_tags, **kw_tags): - """ Tag a file with the given arguments, just sets the accordingly named - attribute on the file object. - - TODO: FIXME - """ - if not target: - target=source - first_tag=None - else: - first_tag=source - - if first_tag: - kw_tags[first_tag[0]] = '' - - if len(kw_tags) == 0 and len(more_tags) == 0: - raise UserError("No tags given.") - - # XXX: sanity checks - for x in more_tags: - kw_tags[x] = '' - - if not SCons.Util.is_List(target): - target=[target] - else: - # hmm, sometimes the target list, is a list of a list - # make sure it is flattened prior to processing. - # TODO: perhaps some bug ?!? - target=env.Flatten(target) - - for t in target: - for (k,v) in kw_tags.items(): - # all file tags have to start with PACKAGING_, so we can later - # differentiate between "normal" object attributes and the - # packaging attributes. As the user should not be bothered with - # that, the prefix will be added here if missing. - if k[:10] != 'PACKAGING_': - k='PACKAGING_'+k - t.Tag(k, v) - -def Package(env, target=None, source=None, **kw): - """ Entry point for the package tool. - """ - # check if we need to find the source files ourself - if not source: - source = env.FindInstalledFiles() - - if len(source)==0: - raise UserError("No source for Package() given") - - # decide which types of packages shall be built. Can be defined through - # four mechanisms: command line argument, keyword argument, - # environment argument and default selection( zip or tar.gz ) in that - # order. - try: kw['PACKAGETYPE']=env['PACKAGETYPE'] - except KeyError: pass - - if not kw.get('PACKAGETYPE'): - from SCons.Script import GetOption - kw['PACKAGETYPE'] = GetOption('package_type') - - if kw['PACKAGETYPE'] is None: - if 'Tar' in env['BUILDERS']: - kw['PACKAGETYPE']='targz' - elif 'Zip' in env['BUILDERS']: - kw['PACKAGETYPE']='zip' - else: - raise UserError("No type for Package() given") - - PACKAGETYPE=kw['PACKAGETYPE'] - if not is_List(PACKAGETYPE): - PACKAGETYPE=PACKAGETYPE.split(',') - - # load the needed packagers. - def load_packager(type): - try: - # the specific packager is a relative import - return importlib.import_module("." + type, __name__) - except ImportError as e: - raise SConsEnvironmentError("packager %s not available: %s" % (type, str(e))) - - packagers = list(map(load_packager, PACKAGETYPE)) - - # set up targets and the PACKAGEROOT - try: - # fill up the target list with a default target name until the PACKAGETYPE - # list is of the same size as the target list. - if not target: target = [] - - size_diff = len(PACKAGETYPE)-len(target) - default_name = "%(NAME)s-%(VERSION)s" - - if size_diff>0: - default_target = default_name%kw - target.extend( [default_target]*size_diff ) - - if 'PACKAGEROOT' not in kw: - kw['PACKAGEROOT'] = default_name%kw - - except KeyError as e: - raise SCons.Errors.UserError( "Missing Packagetag '%s'"%e.args[0] ) - - # setup the source files - source=env.arg2nodes(source, env.fs.Entry) - - # call the packager to setup the dependencies. - targets=[] - try: - for packager in packagers: - t=[target.pop(0)] - t=packager.package(env,t,source, **kw) - targets.extend(t) - - assert( len(target) == 0 ) - - except KeyError as e: - raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ - % (e.args[0],packager.__name__) ) - except TypeError as e: - # this exception means that a needed argument for the packager is - # missing. As our packagers get their "tags" as named function - # arguments we need to find out which one is missing. - #TODO: getargspec deprecated in Py3. cleanup when Py2.7 dropped. - try: - from inspect import getfullargspec - argspec = getfullargspec(packager.package) - except ImportError: - from inspect import getargspec - argspec = getargspec(packager.package) - args = argspec.args - if argspec.defaults: - # throw away arguments with default values - args = args[:-len(argspec.defaults)] - args.remove('env') - args.remove('target') - args.remove('source') - # now remove any args for which we have a value in kw. - args = [x for x in args if x not in kw] - - if len(args)==0: - raise # must be a different error, so re-raise - elif len(args)==1: - raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ - % (args[0],packager.__name__) ) - else: - raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\ - % (", ".join(args),packager.__name__) ) - - target=env.arg2nodes(target, env.fs.Entry) - targets.extend(env.Alias( 'package', targets )) - return targets - -# -# SCons tool initialization functions -# - -added = None - -def generate(env): - from SCons.Script import AddOption - global added - if not added: - added = 1 - AddOption('--package-type', - dest='package_type', - default=None, - type="string", - action="store", - help='The type of package to create.') - - try: - env['BUILDERS']['Package'] - env['BUILDERS']['Tag'] - except KeyError: - env['BUILDERS']['Package'] = Package - env['BUILDERS']['Tag'] = Tag - -def exists(env): - return 1 - -# XXX -def options(opts): - opts.AddVariables( - EnumVariable( 'PACKAGETYPE', - 'the type of package to create.', - None, allowed_values=list(map( str, __all__ )), - ignorecase=2 - ) - ) - -# -# Internal utility functions -# - -def copy_attr(f1, f2): - """ copies the special packaging file attributes from f1 to f2. - """ - copyit = lambda x: not hasattr(f2, x) and x[:10] == 'PACKAGING_' - if f1._tags: - pattrs = [tag for tag in f1._tags if copyit(tag)] - for attr in pattrs: - f2.Tag(attr, f1.GetTag(attr)) - -def putintopackageroot(target, source, env, pkgroot, honor_install_location=1): - """ Uses the CopyAs builder to copy all source files to the directory given - in pkgroot. - - If honor_install_location is set and the copied source file has an - PACKAGING_INSTALL_LOCATION attribute, the PACKAGING_INSTALL_LOCATION is - used as the new name of the source file under pkgroot. - - The source file will not be copied if it is already under the the pkgroot - directory. - - All attributes of the source file will be copied to the new file. - """ - # make sure the packageroot is a Dir object. - if SCons.Util.is_String(pkgroot): pkgroot=env.Dir(pkgroot) - if not SCons.Util.is_List(source): source=[source] - - new_source = [] - for file in source: - if SCons.Util.is_String(file): file = env.File(file) - - if file.is_under(pkgroot): - new_source.append(file) - else: - if file.GetTag('PACKAGING_INSTALL_LOCATION') and\ - honor_install_location: - new_name=make_path_relative(file.GetTag('PACKAGING_INSTALL_LOCATION')) - else: - new_name=make_path_relative(file.get_path()) - - new_file=pkgroot.File(new_name) - new_file=env.CopyAs(new_file, file)[0] - copy_attr(file, new_file) - new_source.append(new_file) - - return (target, new_source) - -def stripinstallbuilder(target, source, env): - """ Strips the install builder action from the source list and stores - the final installation location as the "PACKAGING_INSTALL_LOCATION" of - the source of the source file. This effectively removes the final installed - files from the source list while remembering the installation location. - - It also warns about files which have no install builder attached. - """ - def has_no_install_location(file): - return not (file.has_builder() and hasattr(file.builder, 'name') - and file.builder.name in ["InstallBuilder", "InstallAsBuilder"]) - - - if len([src for src in source if has_no_install_location(src)]): - warn(Warning, "there are files to package which have no\ - InstallBuilder attached, this might lead to irreproducible packages") - - n_source=[] - for s in source: - if has_no_install_location(s): - n_source.append(s) - else: - for ss in s.sources: - n_source.append(ss) - copy_attr(s, ss) - ss.Tag('PACKAGING_INSTALL_LOCATION', s.get_path()) - - return (target, n_source) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/ipk.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/ipk.py deleted file mode 100644 index 8b4d79cf38a..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/ipk.py +++ /dev/null @@ -1,189 +0,0 @@ -"""SCons.Tool.Packaging.ipk -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/ipk.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot - -def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION, - SUMMARY, X_IPK_PRIORITY, X_IPK_SECTION, SOURCE_URL, - X_IPK_MAINTAINER, X_IPK_DEPENDS, **kw): - """ This function prepares the packageroot directory for packaging with the - ipkg builder. - """ - SCons.Tool.Tool('ipkg').generate(env) - - # setup the Ipkg builder - bld = env['BUILDERS']['Ipkg'] - target, source = stripinstallbuilder(target, source, env) - target, source = putintopackageroot(target, source, env, PACKAGEROOT) - - # This should be overrideable from the construction environment, - # which it is by using ARCHITECTURE=. - # Guessing based on what os.uname() returns at least allows it - # to work for both i386 and x86_64 Linux systems. - archmap = { - 'i686' : 'i386', - 'i586' : 'i386', - 'i486' : 'i386', - } - - buildarchitecture = os.uname()[4] - buildarchitecture = archmap.get(buildarchitecture, buildarchitecture) - - if 'ARCHITECTURE' in kw: - buildarchitecture = kw['ARCHITECTURE'] - - # setup the kw to contain the mandatory arguments to this function. - # do this before calling any builder or setup function - loc=locals() - del loc['kw'] - kw.update(loc) - del kw['source'], kw['target'], kw['env'] - - # generate the specfile - specfile = gen_ipk_dir(PACKAGEROOT, source, env, kw) - - # override the default target. - if str(target[0])=="%s-%s"%(NAME, VERSION): - target=[ "%s_%s_%s.ipk"%(NAME, VERSION, buildarchitecture) ] - - # now apply the Ipkg builder - return bld(env, target, specfile, **kw) - -def gen_ipk_dir(proot, source, env, kw): - # make sure the packageroot is a Dir object. - if SCons.Util.is_String(proot): proot=env.Dir(proot) - - # create the specfile builder - s_bld=SCons.Builder.Builder( - action = build_specfiles, - ) - - # create the specfile targets - spec_target=[] - control=proot.Dir('CONTROL') - spec_target.append(control.File('control')) - spec_target.append(control.File('conffiles')) - spec_target.append(control.File('postrm')) - spec_target.append(control.File('prerm')) - spec_target.append(control.File('postinst')) - spec_target.append(control.File('preinst')) - - # apply the builder to the specfile targets - s_bld(env, spec_target, source, **kw) - - # the packageroot directory does now contain the specfiles. - return proot - -def build_specfiles(source, target, env): - """ Filter the targets for the needed files and use the variables in env - to create the specfile. - """ - # - # At first we care for the CONTROL/control file, which is the main file for ipk. - # - # For this we need to open multiple files in random order, so we store into - # a dict so they can be easily accessed. - # - # - opened_files={} - def open_file(needle, haystack=None): - try: - return opened_files[needle] - except KeyError: - files = filter(lambda x: x.get_path().rfind(needle) != -1, haystack) - # Py3: filter returns an iterable, not a list - file = list(files)[0] - opened_files[needle] = open(file.get_abspath(), 'w') - return opened_files[needle] - - control_file = open_file('control', target) - - if 'X_IPK_DESCRIPTION' not in env: - env['X_IPK_DESCRIPTION']="%s\n %s"%(env['SUMMARY'], - env['DESCRIPTION'].replace('\n', '\n ')) - - - content = """ -Package: $NAME -Version: $VERSION -Priority: $X_IPK_PRIORITY -Section: $X_IPK_SECTION -Source: $SOURCE_URL -Architecture: $ARCHITECTURE -Maintainer: $X_IPK_MAINTAINER -Depends: $X_IPK_DEPENDS -Description: $X_IPK_DESCRIPTION -""" - - control_file.write(env.subst(content)) - - # - # now handle the various other files, which purpose it is to set post-, - # pre-scripts and mark files as config files. - # - # We do so by filtering the source files for files which are marked with - # the "config" tag and afterwards we do the same for x_ipk_postrm, - # x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags. - # - # The first one will write the name of the file into the file - # CONTROL/configfiles, the latter add the content of the x_ipk_* variable - # into the same named file. - # - for f in [x for x in source if 'PACKAGING_CONFIG' in dir(x)]: - config = open_file('conffiles') - config.write(f.PACKAGING_INSTALL_LOCATION) - config.write('\n') - - for str in 'POSTRM PRERM POSTINST PREINST'.split(): - name="PACKAGING_X_IPK_%s"%str - for f in [x for x in source if name in dir(x)]: - file = open_file(name) - file.write(env[str]) - - # - # close all opened files - for f in list(opened_files.values()): - f.close() - - # call a user specified function - if 'CHANGE_SPECFILE' in env: - content += env['CHANGE_SPECFILE'](target) - - return 0 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/msi.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/msi.py deleted file mode 100644 index 1d47268b243..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/msi.py +++ /dev/null @@ -1,527 +0,0 @@ -"""SCons.Tool.packaging.msi - -The msi packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/packaging/msi.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import SCons -from SCons.Action import Action -from SCons.Builder import Builder - -from xml.dom.minidom import * -from xml.sax.saxutils import escape - -from SCons.Tool.packaging import stripinstallbuilder - -# -# Utility functions -# -def convert_to_id(s, id_set): - """ Some parts of .wxs need an Id attribute (for example: The File and - Directory directives. The charset is limited to A-Z, a-z, digits, - underscores, periods. Each Id must begin with a letter or with a - underscore. Google for "CNDL0015" for information about this. - - Requirements: - * the string created must only contain chars from the target charset. - * the string created must have a minimal editing distance from the - original string. - * the string created must be unique for the whole .wxs file. - - Observation: - * There are 62 chars in the charset. - - Idea: - * filter out forbidden characters. Check for a collision with the help - of the id_set. Add the number of the number of the collision at the - end of the created string. Furthermore care for a correct start of - the string. - """ - charset = 'ABCDEFGHIJKLMNOPQRSTUVWXYabcdefghijklmnopqrstuvwxyz0123456789_.' - if s[0] in '0123456789.': - s = '_' + s - id = ''.join([c for c in s if c in charset]) - - # did we already generate an id for this file? - try: - return id_set[id][s] - except KeyError: - # no we did not, so initialize with the id - if id not in id_set: id_set[id] = { s : id } - # there is a collision, generate an id which is unique by appending - # the collision number - else: id_set[id][s] = id + str(len(id_set[id])) - - return id_set[id][s] - -def is_dos_short_file_name(file): - """ Examine if the given file is in the 8.3 form. - """ - fname, ext = os.path.splitext(file) - proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot - proper_fname = file.isupper() and len(fname) <= 8 - - return proper_ext and proper_fname - -def gen_dos_short_file_name(file, filename_set): - """ See http://support.microsoft.com/default.aspx?scid=kb;en-us;Q142982 - - These are no complete 8.3 dos short names. The ~ char is missing and - replaced with one character from the filename. WiX warns about such - filenames, since a collision might occur. Google for "CNDL1014" for - more information. - """ - # guard this to not confuse the generation - if is_dos_short_file_name(file): - return file - - fname, ext = os.path.splitext(file) # ext contains the dot - - # first try if it suffices to convert to upper - file = file.upper() - if is_dos_short_file_name(file): - return file - - # strip forbidden characters. - forbidden = '."/[]:;=, ' - fname = ''.join([c for c in fname if c not in forbidden]) - - # check if we already generated a filename with the same number: - # thisis1.txt, thisis2.txt etc. - duplicate, num = not None, 1 - while duplicate: - shortname = "%s%s" % (fname[:8-len(str(num))].upper(), str(num)) - if len(ext) >= 2: - shortname = "%s%s" % (shortname, ext[:4].upper()) - - duplicate, num = shortname in filename_set, num+1 - - assert( is_dos_short_file_name(shortname) ), 'shortname is %s, longname is %s' % (shortname, file) - filename_set.append(shortname) - return shortname - -def create_feature_dict(files): - """ X_MSI_FEATURE and doc FileTag's can be used to collect files in a - hierarchy. This function collects the files into this hierarchy. - """ - dict = {} - - def add_to_dict( feature, file ): - if not SCons.Util.is_List( feature ): - feature = [ feature ] - - for f in feature: - if f not in dict: - dict[ f ] = [ file ] - else: - dict[ f ].append( file ) - - for file in files: - if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ): - add_to_dict(file.PACKAGING_X_MSI_FEATURE, file) - elif hasattr( file, 'PACKAGING_DOC' ): - add_to_dict( 'PACKAGING_DOC', file ) - else: - add_to_dict( 'default', file ) - - return dict - -def generate_guids(root): - """ generates globally unique identifiers for parts of the xml which need - them. - - Component tags have a special requirement. Their UUID is only allowed to - change if the list of their contained resources has changed. This allows - for clean removal and proper updates. - - To handle this requirement, the uuid is generated with an md5 hashing the - whole subtree of a xml node. - """ - from hashlib import md5 - - # specify which tags need a guid and in which attribute this should be stored. - needs_id = { 'Product' : 'Id', - 'Package' : 'Id', - 'Component' : 'Guid', - } - - # find all XMl nodes matching the key, retrieve their attribute, hash their - # subtree, convert hash to string and add as a attribute to the xml node. - for (key,value) in needs_id.items(): - node_list = root.getElementsByTagName(key) - attribute = value - for node in node_list: - hash = md5(node.toxml()).hexdigest() - hash_str = '%s-%s-%s-%s-%s' % ( hash[:8], hash[8:12], hash[12:16], hash[16:20], hash[20:] ) - node.attributes[attribute] = hash_str - - - -def string_wxsfile(target, source, env): - return "building WiX file %s"%( target[0].path ) - -def build_wxsfile(target, source, env): - """ Compiles a .wxs file from the keywords given in env['msi_spec'] and - by analyzing the tree of source nodes and their tags. - """ - f = open(target[0].get_abspath(), 'w') - - try: - # Create a document with the Wix root tag - doc = Document() - root = doc.createElement( 'Wix' ) - root.attributes['xmlns']='http://schemas.microsoft.com/wix/2003/01/wi' - doc.appendChild( root ) - - filename_set = [] # this is to circumvent duplicates in the shortnames - id_set = {} # this is to circumvent duplicates in the ids - - # Create the content - build_wxsfile_header_section(root, env) - build_wxsfile_file_section(root, source, env['NAME'], env['VERSION'], env['VENDOR'], filename_set, id_set) - generate_guids(root) - build_wxsfile_features_section(root, source, env['NAME'], env['VERSION'], env['SUMMARY'], id_set) - build_wxsfile_default_gui(root) - build_license_file(target[0].get_dir(), env) - - # write the xml to a file - f.write( doc.toprettyxml() ) - - # call a user specified function - if 'CHANGE_SPECFILE' in env: - env['CHANGE_SPECFILE'](target, source) - - except KeyError as e: - raise SCons.Errors.UserError( '"%s" package field for MSI is missing.' % e.args[0] ) - finally: - f.close() - -# -# setup function -# -def create_default_directory_layout(root, NAME, VERSION, VENDOR, filename_set): - r""" Create the wix default target directory layout and return the innermost - directory. - - We assume that the XML tree delivered in the root argument already contains - the Product tag. - - Everything is put under the PFiles directory property defined by WiX. - After that a directory with the 'VENDOR' tag is placed and then a - directory with the name of the project and its VERSION. This leads to the - following TARGET Directory Layout: - C:\\\\ - Example: C:\Programme\Company\Product-1.2\ - """ - doc = Document() - d1 = doc.createElement( 'Directory' ) - d1.attributes['Id'] = 'TARGETDIR' - d1.attributes['Name'] = 'SourceDir' - - d2 = doc.createElement( 'Directory' ) - d2.attributes['Id'] = 'ProgramFilesFolder' - d2.attributes['Name'] = 'PFiles' - - d3 = doc.createElement( 'Directory' ) - d3.attributes['Id'] = 'VENDOR_folder' - d3.attributes['Name'] = escape( gen_dos_short_file_name( VENDOR, filename_set ) ) - d3.attributes['LongName'] = escape( VENDOR ) - - d4 = doc.createElement( 'Directory' ) - project_folder = "%s-%s" % ( NAME, VERSION ) - d4.attributes['Id'] = 'MY_DEFAULT_FOLDER' - d4.attributes['Name'] = escape( gen_dos_short_file_name( project_folder, filename_set ) ) - d4.attributes['LongName'] = escape( project_folder ) - - d1.childNodes.append( d2 ) - d2.childNodes.append( d3 ) - d3.childNodes.append( d4 ) - - root.getElementsByTagName('Product')[0].childNodes.append( d1 ) - - return d4 - -# -# mandatory and optional file tags -# -def build_wxsfile_file_section(root, files, NAME, VERSION, VENDOR, filename_set, id_set): - """ Builds the Component sections of the wxs file with their included files. - - Files need to be specified in 8.3 format and in the long name format, long - filenames will be converted automatically. - - Features are specficied with the 'X_MSI_FEATURE' or 'DOC' FileTag. - """ - root = create_default_directory_layout( root, NAME, VERSION, VENDOR, filename_set ) - components = create_feature_dict( files ) - factory = Document() - - def get_directory( node, dir ): - """ Returns the node under the given node representing the directory. - - Returns the component node if dir is None or empty. - """ - if dir == '' or not dir: - return node - - Directory = node - dir_parts = dir.split(os.path.sep) - - # to make sure that our directory ids are unique, the parent folders are - # consecutively added to upper_dir - upper_dir = '' - - # walk down the xml tree finding parts of the directory - dir_parts = [d for d in dir_parts if d != ''] - for d in dir_parts[:]: - already_created = [c for c in Directory.childNodes - if c.nodeName == 'Directory' - and c.attributes['LongName'].value == escape(d)] - - if already_created: - Directory = already_created[0] - dir_parts.remove(d) - upper_dir += d - else: - break - - for d in dir_parts: - nDirectory = factory.createElement( 'Directory' ) - nDirectory.attributes['LongName'] = escape( d ) - nDirectory.attributes['Name'] = escape( gen_dos_short_file_name( d, filename_set ) ) - upper_dir += d - nDirectory.attributes['Id'] = convert_to_id( upper_dir, id_set ) - - Directory.childNodes.append( nDirectory ) - Directory = nDirectory - - return Directory - - for file in files: - drive, path = os.path.splitdrive( file.PACKAGING_INSTALL_LOCATION ) - filename = os.path.basename( path ) - dirname = os.path.dirname( path ) - - h = { - # tagname : default value - 'PACKAGING_X_MSI_VITAL' : 'yes', - 'PACKAGING_X_MSI_FILEID' : convert_to_id(filename, id_set), - 'PACKAGING_X_MSI_LONGNAME' : filename, - 'PACKAGING_X_MSI_SHORTNAME' : gen_dos_short_file_name(filename, filename_set), - 'PACKAGING_X_MSI_SOURCE' : file.get_path(), - } - - # fill in the default tags given above. - for k,v in [ (k, v) for (k,v) in h.items() if not hasattr(file, k) ]: - setattr( file, k, v ) - - File = factory.createElement( 'File' ) - File.attributes['LongName'] = escape( file.PACKAGING_X_MSI_LONGNAME ) - File.attributes['Name'] = escape( file.PACKAGING_X_MSI_SHORTNAME ) - File.attributes['Source'] = escape( file.PACKAGING_X_MSI_SOURCE ) - File.attributes['Id'] = escape( file.PACKAGING_X_MSI_FILEID ) - File.attributes['Vital'] = escape( file.PACKAGING_X_MSI_VITAL ) - - # create the Tag under which this file should appear - Component = factory.createElement('Component') - Component.attributes['DiskId'] = '1' - Component.attributes['Id'] = convert_to_id( filename, id_set ) - - # hang the component node under the root node and the file node - # under the component node. - Directory = get_directory( root, dirname ) - Directory.childNodes.append( Component ) - Component.childNodes.append( File ) - -# -# additional functions -# -def build_wxsfile_features_section(root, files, NAME, VERSION, SUMMARY, id_set): - """ This function creates the tag based on the supplied xml tree. - - This is achieved by finding all s and adding them to a default target. - - It should be called after the tree has been built completly. We assume - that a MY_DEFAULT_FOLDER Property is defined in the wxs file tree. - - Furthermore a top-level with the name and VERSION of the software will be created. - - An PACKAGING_X_MSI_FEATURE can either be a string, where the feature - DESCRIPTION will be the same as its title or a Tuple, where the first - part will be its title and the second its DESCRIPTION. - """ - factory = Document() - Feature = factory.createElement('Feature') - Feature.attributes['Id'] = 'complete' - Feature.attributes['ConfigurableDirectory'] = 'MY_DEFAULT_FOLDER' - Feature.attributes['Level'] = '1' - Feature.attributes['Title'] = escape( '%s %s' % (NAME, VERSION) ) - Feature.attributes['Description'] = escape( SUMMARY ) - Feature.attributes['Display'] = 'expand' - - for (feature, files) in create_feature_dict(files).items(): - SubFeature = factory.createElement('Feature') - SubFeature.attributes['Level'] = '1' - - if SCons.Util.is_Tuple(feature): - SubFeature.attributes['Id'] = convert_to_id( feature[0], id_set ) - SubFeature.attributes['Title'] = escape(feature[0]) - SubFeature.attributes['Description'] = escape(feature[1]) - else: - SubFeature.attributes['Id'] = convert_to_id( feature, id_set ) - if feature=='default': - SubFeature.attributes['Description'] = 'Main Part' - SubFeature.attributes['Title'] = 'Main Part' - elif feature=='PACKAGING_DOC': - SubFeature.attributes['Description'] = 'Documentation' - SubFeature.attributes['Title'] = 'Documentation' - else: - SubFeature.attributes['Description'] = escape(feature) - SubFeature.attributes['Title'] = escape(feature) - - # build the componentrefs. As one of the design decision is that every - # file is also a component we walk the list of files and create a - # reference. - for f in files: - ComponentRef = factory.createElement('ComponentRef') - ComponentRef.attributes['Id'] = convert_to_id( os.path.basename(f.get_path()), id_set ) - SubFeature.childNodes.append(ComponentRef) - - Feature.childNodes.append(SubFeature) - - root.getElementsByTagName('Product')[0].childNodes.append(Feature) - -def build_wxsfile_default_gui(root): - """ This function adds a default GUI to the wxs file - """ - factory = Document() - Product = root.getElementsByTagName('Product')[0] - - UIRef = factory.createElement('UIRef') - UIRef.attributes['Id'] = 'WixUI_Mondo' - Product.childNodes.append(UIRef) - - UIRef = factory.createElement('UIRef') - UIRef.attributes['Id'] = 'WixUI_ErrorProgressText' - Product.childNodes.append(UIRef) - -def build_license_file(directory, spec): - """ Creates a License.rtf file with the content of "X_MSI_LICENSE_TEXT" - in the given directory - """ - name, text = '', '' - - try: - name = spec['LICENSE'] - text = spec['X_MSI_LICENSE_TEXT'] - except KeyError: - pass # ignore this as X_MSI_LICENSE_TEXT is optional - - if name!='' or text!='': - with open(os.path.join(directory.get_path(), 'License.rtf'), 'w') as f: - f.write('{\\rtf') - if text!='': - f.write(text.replace('\n', '\\par ')) - else: - f.write(name+'\\par\\par') - f.write('}') - -# -# mandatory and optional package tags -# -def build_wxsfile_header_section(root, spec): - """ Adds the xml file node which define the package meta-data. - """ - # Create the needed DOM nodes and add them at the correct position in the tree. - factory = Document() - Product = factory.createElement( 'Product' ) - Package = factory.createElement( 'Package' ) - - root.childNodes.append( Product ) - Product.childNodes.append( Package ) - - # set "mandatory" default values - if 'X_MSI_LANGUAGE' not in spec: - spec['X_MSI_LANGUAGE'] = '1033' # select english - - # mandatory sections, will throw a KeyError if the tag is not available - Product.attributes['Name'] = escape( spec['NAME'] ) - Product.attributes['Version'] = escape( spec['VERSION'] ) - Product.attributes['Manufacturer'] = escape( spec['VENDOR'] ) - Product.attributes['Language'] = escape( spec['X_MSI_LANGUAGE'] ) - Package.attributes['Description'] = escape( spec['SUMMARY'] ) - - # now the optional tags, for which we avoid the KeyErrror exception - if 'DESCRIPTION' in spec: - Package.attributes['Comments'] = escape( spec['DESCRIPTION'] ) - - if 'X_MSI_UPGRADE_CODE' in spec: - Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE'] ) - - # We hardcode the media tag as our current model cannot handle it. - Media = factory.createElement('Media') - Media.attributes['Id'] = '1' - Media.attributes['Cabinet'] = 'default.cab' - Media.attributes['EmbedCab'] = 'yes' - root.getElementsByTagName('Product')[0].childNodes.append(Media) - -# this builder is the entry-point for .wxs file compiler. -wxs_builder = Builder( - action = Action( build_wxsfile, string_wxsfile ), - ensure_suffix = '.wxs' ) - -def package(env, target, source, PACKAGEROOT, NAME, VERSION, - DESCRIPTION, SUMMARY, VENDOR, X_MSI_LANGUAGE, **kw): - # make sure that the Wix Builder is in the environment - SCons.Tool.Tool('wix').generate(env) - - # get put the keywords for the specfile compiler. These are the arguments - # given to the package function and all optional ones stored in kw, minus - # the the source, target and env one. - loc = locals() - del loc['kw'] - kw.update(loc) - del kw['source'], kw['target'], kw['env'] - - # strip the install builder from the source files - target, source = stripinstallbuilder(target, source, env) - - # put the arguments into the env and call the specfile builder. - env['msi_spec'] = kw - specfile = wxs_builder(* [env, target, source], **kw) - - # now call the WiX Tool with the built specfile added as a source. - msifile = env.WiX(target, specfile) - - # return the target and source tuple. - return (msifile, source+[specfile]) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/rpm.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/rpm.py deleted file mode 100644 index 1554d096551..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/rpm.py +++ /dev/null @@ -1,362 +0,0 @@ -"""SCons.Tool.Packaging.rpm - -The rpm packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/packaging/rpm.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os - -import SCons.Builder -import SCons.Tool.rpmutils - -from SCons.Environment import OverrideEnvironment -from SCons.Tool.packaging import stripinstallbuilder, src_targz -from SCons.Errors import UserError - -def package(env, target, source, PACKAGEROOT, NAME, VERSION, - PACKAGEVERSION, DESCRIPTION, SUMMARY, X_RPM_GROUP, LICENSE, - **kw): - # initialize the rpm tool - SCons.Tool.Tool('rpm').generate(env) - - bld = env['BUILDERS']['Rpm'] - - # Generate a UserError whenever the target name has been set explicitly, - # since rpm does not allow for controlling it. This is detected by - # checking if the target has been set to the default by the Package() - # Environment function. - if str(target[0])!="%s-%s"%(NAME, VERSION): - raise UserError( "Setting target is not supported for rpm." ) - else: - # Deduce the build architecture, but allow it to be overridden - # by setting ARCHITECTURE in the construction env. - buildarchitecture = SCons.Tool.rpmutils.defaultMachine() - if 'ARCHITECTURE' in kw: - buildarchitecture = kw['ARCHITECTURE'] - - fmt = '%s-%s-%s.%s.rpm' - srcrpm = fmt % (NAME, VERSION, PACKAGEVERSION, 'src') - binrpm = fmt % (NAME, VERSION, PACKAGEVERSION, buildarchitecture) - - target = [ srcrpm, binrpm ] - - # get the correct arguments into the kw hash - loc=locals() - del loc['kw'] - kw.update(loc) - del kw['source'], kw['target'], kw['env'] - - # if no "SOURCE_URL" tag is given add a default one. - if 'SOURCE_URL' not in kw: - kw['SOURCE_URL']=(str(target[0])+".tar.gz").replace('.rpm', '') - - # mangle the source and target list for the rpmbuild - env = OverrideEnvironment(env, kw) - target, source = stripinstallbuilder(target, source, env) - target, source = addspecfile(target, source, env) - target, source = collectintargz(target, source, env) - - # now call the rpm builder to actually build the packet. - return bld(env, target, source, **kw) - -def collectintargz(target, source, env): - """ Puts all source files into a tar.gz file. """ - # the rpm tool depends on a source package, until this is changed - # this hack needs to be here that tries to pack all sources in. - sources = env.FindSourceFiles() - - # filter out the target we are building the source list for. - sources = [s for s in sources if s not in target] - - # find the .spec file for rpm and add it since it is not necessarily found - # by the FindSourceFiles function. - sources.extend( [s for s in source if str(s).rfind('.spec')!=-1] ) - # sort to keep sources from changing order across builds - sources.sort() - - # as the source contains the url of the source package this rpm package - # is built from, we extract the target name - tarball = (str(target[0])+".tar.gz").replace('.rpm', '') - try: - tarball = env['SOURCE_URL'].split('/')[-1] - except KeyError as e: - raise SCons.Errors.UserError( "Missing PackageTag '%s' for RPM packager" % e.args[0] ) - - tarball = src_targz.package(env, source=sources, target=tarball, - PACKAGEROOT=env['PACKAGEROOT'], ) - - return (target, tarball) - -def addspecfile(target, source, env): - specfile = "%s-%s" % (env['NAME'], env['VERSION']) - - bld = SCons.Builder.Builder(action = build_specfile, - suffix = '.spec', - target_factory = SCons.Node.FS.File) - - source.extend(bld(env, specfile, source)) - - return (target,source) - -def build_specfile(target, source, env): - """ Builds a RPM specfile from a dictionary with string metadata and - by analyzing a tree of nodes. - """ - with open(target[0].get_abspath(), 'w') as ofp: - try: - ofp.write(build_specfile_header(env)) - ofp.write(build_specfile_sections(env)) - ofp.write(build_specfile_filesection(env, source)) - - # call a user specified function - if 'CHANGE_SPECFILE' in env: - env['CHANGE_SPECFILE'](target, source) - - except KeyError as e: - raise SCons.Errors.UserError('"%s" package field for RPM is missing.' % e.args[0]) - - -# -# mandatory and optional package tag section -# -def build_specfile_sections(spec): - """ Builds the sections of a rpm specfile. - """ - str = "" - - mandatory_sections = { - 'DESCRIPTION' : '\n%%description\n%s\n\n', } - - str = str + SimpleTagCompiler(mandatory_sections).compile( spec ) - - optional_sections = { - 'DESCRIPTION_' : '%%description -l %s\n%s\n\n', - 'CHANGELOG' : '%%changelog\n%s\n\n', - 'X_RPM_PREINSTALL' : '%%pre\n%s\n\n', - 'X_RPM_POSTINSTALL' : '%%post\n%s\n\n', - 'X_RPM_PREUNINSTALL' : '%%preun\n%s\n\n', - 'X_RPM_POSTUNINSTALL' : '%%postun\n%s\n\n', - 'X_RPM_VERIFY' : '%%verify\n%s\n\n', - - # These are for internal use but could possibly be overridden - 'X_RPM_PREP' : '%%prep\n%s\n\n', - 'X_RPM_BUILD' : '%%build\n%s\n\n', - 'X_RPM_INSTALL' : '%%install\n%s\n\n', - 'X_RPM_CLEAN' : '%%clean\n%s\n\n', - } - - # Default prep, build, install and clean rules - # TODO: optimize those build steps, to not compile the project a second time - if 'X_RPM_PREP' not in spec: - spec['X_RPM_PREP'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' + '\n%setup -q' - - if 'X_RPM_BUILD' not in spec: - spec['X_RPM_BUILD'] = '[ ! -e "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && mkdir "$RPM_BUILD_ROOT"' - - if 'X_RPM_INSTALL' not in spec: - spec['X_RPM_INSTALL'] = 'scons --install-sandbox="$RPM_BUILD_ROOT" "$RPM_BUILD_ROOT"' - - if 'X_RPM_CLEAN' not in spec: - spec['X_RPM_CLEAN'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' - - str = str + SimpleTagCompiler(optional_sections, mandatory=0).compile( spec ) - - return str - -def build_specfile_header(spec): - """ Builds all sections but the %file of a rpm specfile - """ - str = "" - - # first the mandatory sections - mandatory_header_fields = { - 'NAME' : '%%define name %s\nName: %%{name}\n', - 'VERSION' : '%%define version %s\nVersion: %%{version}\n', - 'PACKAGEVERSION' : '%%define release %s\nRelease: %%{release}\n', - 'X_RPM_GROUP' : 'Group: %s\n', - 'SUMMARY' : 'Summary: %s\n', - 'LICENSE' : 'License: %s\n', - } - - str = str + SimpleTagCompiler(mandatory_header_fields).compile( spec ) - - # now the optional tags - optional_header_fields = { - 'VENDOR' : 'Vendor: %s\n', - 'X_RPM_URL' : 'Url: %s\n', - 'SOURCE_URL' : 'Source: %s\n', - 'SUMMARY_' : 'Summary(%s): %s\n', - 'ARCHITECTURE' : 'BuildArch: %s\n', - 'X_RPM_DISTRIBUTION' : 'Distribution: %s\n', - 'X_RPM_ICON' : 'Icon: %s\n', - 'X_RPM_PACKAGER' : 'Packager: %s\n', - 'X_RPM_GROUP_' : 'Group(%s): %s\n', - - 'X_RPM_REQUIRES' : 'Requires: %s\n', - 'X_RPM_PROVIDES' : 'Provides: %s\n', - 'X_RPM_CONFLICTS' : 'Conflicts: %s\n', - 'X_RPM_BUILDREQUIRES' : 'BuildRequires: %s\n', - - 'X_RPM_SERIAL' : 'Serial: %s\n', - 'X_RPM_EPOCH' : 'Epoch: %s\n', - 'X_RPM_AUTOREQPROV' : 'AutoReqProv: %s\n', - 'X_RPM_EXCLUDEARCH' : 'ExcludeArch: %s\n', - 'X_RPM_EXCLUSIVEARCH' : 'ExclusiveArch: %s\n', - 'X_RPM_PREFIX' : 'Prefix: %s\n', - - # internal use - 'X_RPM_BUILDROOT' : 'BuildRoot: %s\n', - } - - # fill in default values: - # Adding a BuildRequires renders the .rpm unbuildable under systems which - # are not managed by rpm, since the database to resolve this dependency is - # missing (take Gentoo as an example) - #if 'X_RPM_BUILDREQUIRES' not in spec: - # spec['X_RPM_BUILDREQUIRES'] = 'scons' - - if 'X_RPM_BUILDROOT' not in spec: - spec['X_RPM_BUILDROOT'] = '%{_tmppath}/%{name}-%{version}-%{release}' - - str = str + SimpleTagCompiler(optional_header_fields, mandatory=0).compile( spec ) - - # Add any extra specfile definitions the user may have supplied. - # These flags get no processing, they are just added. - # github #3164: if we don't turn off debug package generation - # the tests which build packages all fail. If there are no - # extra flags, default to adding this one. If the user wants - # to turn this back on, supply the flag set to None. - - if 'X_RPM_EXTRADEFS' not in spec: - spec['X_RPM_EXTRADEFS'] = ['%global debug_package %{nil}'] - for extra in spec['X_RPM_EXTRADEFS']: - str += extra + '\n' - - return str - -# -# mandatory and optional file tags -# -def build_specfile_filesection(spec, files): - """ builds the %file section of the specfile - """ - str = '%files\n' - - if 'X_RPM_DEFATTR' not in spec: - spec['X_RPM_DEFATTR'] = '(-,root,root)' - - str = str + '%%defattr %s\n' % spec['X_RPM_DEFATTR'] - - supported_tags = { - 'PACKAGING_CONFIG' : '%%config %s', - 'PACKAGING_CONFIG_NOREPLACE' : '%%config(noreplace) %s', - 'PACKAGING_DOC' : '%%doc %s', - 'PACKAGING_UNIX_ATTR' : '%%attr %s', - 'PACKAGING_LANG_' : '%%lang(%s) %s', - 'PACKAGING_X_RPM_VERIFY' : '%%verify %s', - 'PACKAGING_X_RPM_DIR' : '%%dir %s', - 'PACKAGING_X_RPM_DOCDIR' : '%%docdir %s', - 'PACKAGING_X_RPM_GHOST' : '%%ghost %s', } - - for file in files: - # build the tagset - tags = {} - for k in list(supported_tags.keys()): - try: - v = file.GetTag(k) - if v: - tags[k] = v - except AttributeError: - pass - - # compile the tagset - str = str + SimpleTagCompiler(supported_tags, mandatory=0).compile( tags ) - - str = str + ' ' - str = str + file.GetTag('PACKAGING_INSTALL_LOCATION') - str = str + '\n\n' - - return str - -class SimpleTagCompiler(object): - """ This class is a simple string substition utility: - the replacement specfication is stored in the tagset dictionary, something - like: - { "abc" : "cdef %s ", - "abc_" : "cdef %s %s" } - - the compile function gets a value dictionary, which may look like: - { "abc" : "ghij", - "abc_gh" : "ij" } - - The resulting string will be: - "cdef ghij cdef gh ij" - """ - def __init__(self, tagset, mandatory=1): - self.tagset = tagset - self.mandatory = mandatory - - def compile(self, values): - """ Compiles the tagset and returns a str containing the result - """ - def is_international(tag): - return tag.endswith('_') - - def get_country_code(tag): - return tag[-2:] - - def strip_country_code(tag): - return tag[:-2] - - replacements = list(self.tagset.items()) - - str = "" - domestic = [t for t in replacements if not is_international(t[0])] - for key, replacement in domestic: - try: - str = str + replacement % values[key] - except KeyError as e: - if self.mandatory: - raise e - - international = [t for t in replacements if is_international(t[0])] - for key, replacement in international: - try: - x = [t for t in values.items() if strip_country_code(t[0]) == key] - int_values_for_key = [(get_country_code(t[0]),t[1]) for t in x] - for v in int_values_for_key: - str = str + replacement % v - except KeyError as e: - if self.mandatory: - raise e - - return str - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_tarbz2.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_tarbz2.py deleted file mode 100644 index 1889ec96960..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_tarbz2.py +++ /dev/null @@ -1,43 +0,0 @@ -"""SCons.Tool.Packaging.src_tarbz2 - -The tarbz2 SRC packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/src_tarbz2.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Tar'] - bld.set_suffix('.tar.bz2') - target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0) - return bld(env, target, source, TARFLAGS='-jc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_targz.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_targz.py deleted file mode 100644 index 0d3a3db6322..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_targz.py +++ /dev/null @@ -1,43 +0,0 @@ -"""SCons.Tool.Packaging.src_targz - -The targz SRC packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/src_targz.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Tar'] - bld.set_suffix('.tar.gz') - target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0) - return bld(env, target, source, TARFLAGS='-zc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_tarxz.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_tarxz.py deleted file mode 100644 index 3042cac34ad..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_tarxz.py +++ /dev/null @@ -1,43 +0,0 @@ -"""SCons.Tool.Packaging.src_tarxz - -The tarxz SRC packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/src_tarxz.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Tar'] - bld.set_suffix('.tar.xz') - target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0) - return bld(env, target, source, TARFLAGS='-Jc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_zip.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_zip.py deleted file mode 100644 index 207fb1c4b76..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/src_zip.py +++ /dev/null @@ -1,43 +0,0 @@ -"""SCons.Tool.Packaging.zip - -The zip SRC packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/src_zip.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Zip'] - bld.set_suffix('.zip') - target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0) - return bld(env, target, source) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/tarbz2.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/tarbz2.py deleted file mode 100644 index f34813b0398..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/tarbz2.py +++ /dev/null @@ -1,44 +0,0 @@ -"""SCons.Tool.Packaging.tarbz2 - -The tarbz2 packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/tarbz2.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Tar'] - bld.set_suffix('.tar.bz2') - target, source = putintopackageroot(target, source, env, PACKAGEROOT) - target, source = stripinstallbuilder(target, source, env) - return bld(env, target, source, TARFLAGS='-jc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/targz.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/targz.py deleted file mode 100644 index 8c694aff058..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/targz.py +++ /dev/null @@ -1,44 +0,0 @@ -"""SCons.Tool.Packaging.targz - -The targz packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/targz.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Tar'] - bld.set_suffix('.tar.gz') - target, source = stripinstallbuilder(target, source, env) - target, source = putintopackageroot(target, source, env, PACKAGEROOT) - return bld(env, target, source, TARFLAGS='-zc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/tarxz.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/tarxz.py deleted file mode 100644 index dc52aa91137..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/tarxz.py +++ /dev/null @@ -1,44 +0,0 @@ -"""SCons.Tool.Packaging.tarxz - -The tarxz packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/tarxz.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Tar'] - bld.set_suffix('.tar.xz') - target, source = putintopackageroot(target, source, env, PACKAGEROOT) - target, source = stripinstallbuilder(target, source, env) - return bld(env, target, source, TARFLAGS='-Jc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/zip.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/zip.py deleted file mode 100644 index c7da7e9feaa..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/packaging/zip.py +++ /dev/null @@ -1,44 +0,0 @@ -"""SCons.Tool.Packaging.zip - -The zip SRC packager. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/packaging/zip.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot - -def package(env, target, source, PACKAGEROOT, **kw): - bld = env['BUILDERS']['Zip'] - bld.set_suffix('.zip') - target, source = stripinstallbuilder(target, source, env) - target, source = putintopackageroot(target, source, env, PACKAGEROOT) - return bld(env, target, source) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdf.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdf.py deleted file mode 100644 index 6d6310f51bd..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdf.py +++ /dev/null @@ -1,78 +0,0 @@ -"""SCons.Tool.pdf - -Common PDF Builder definition for various other Tool modules that use it. -Add an explicit action to run epstopdf to convert .eps files to .pdf - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/pdf.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Builder -import SCons.Tool - -PDFBuilder = None - -EpsPdfAction = SCons.Action.Action('$EPSTOPDFCOM', '$EPSTOPDFCOMSTR') - -def generate(env): - try: - env['BUILDERS']['PDF'] - except KeyError: - global PDFBuilder - if PDFBuilder is None: - PDFBuilder = SCons.Builder.Builder(action = {}, - source_scanner = SCons.Tool.PDFLaTeXScanner, - prefix = '$PDFPREFIX', - suffix = '$PDFSUFFIX', - emitter = {}, - source_ext_match = None, - single_source=True) - env['BUILDERS']['PDF'] = PDFBuilder - - env['PDFPREFIX'] = '' - env['PDFSUFFIX'] = '.pdf' - -# put the epstopdf builder in this routine so we can add it after -# the pdftex builder so that one is the default for no source suffix -def generate2(env): - bld = env['BUILDERS']['PDF'] - #bld.add_action('.ps', EpsPdfAction) # this is covered by direct Ghostcript action in gs.py - bld.add_action('.eps', EpsPdfAction) - - env['EPSTOPDF'] = 'epstopdf' - env['EPSTOPDFFLAGS'] = SCons.Util.CLVar('') - env['EPSTOPDFCOM'] = '$EPSTOPDF $EPSTOPDFFLAGS ${SOURCE} --outfile=${TARGET}' - -def exists(env): - # This only puts a skeleton Builder in place, so if someone - # references this Tool directly, it's always "available." - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdflatex.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdflatex.py deleted file mode 100644 index 75503651552..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdflatex.py +++ /dev/null @@ -1,84 +0,0 @@ -"""SCons.Tool.pdflatex - -Tool-specific initialization for pdflatex. -Generates .pdf files from .latex or .ltx files - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/pdflatex.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Util -import SCons.Tool.pdf -import SCons.Tool.tex - -PDFLaTeXAction = None - -def PDFLaTeXAuxFunction(target = None, source= None, env=None): - result = SCons.Tool.tex.InternalLaTeXAuxAction( PDFLaTeXAction, target, source, env ) - if result != 0: - SCons.Tool.tex.check_file_error_message(env['PDFLATEX']) - return result - -PDFLaTeXAuxAction = None - -def generate(env): - """Add Builders and construction variables for pdflatex to an Environment.""" - global PDFLaTeXAction - if PDFLaTeXAction is None: - PDFLaTeXAction = SCons.Action.Action('$PDFLATEXCOM', '$PDFLATEXCOMSTR') - - global PDFLaTeXAuxAction - if PDFLaTeXAuxAction is None: - PDFLaTeXAuxAction = SCons.Action.Action(PDFLaTeXAuxFunction, - strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) - - env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) - - from . import pdf - pdf.generate(env) - - bld = env['BUILDERS']['PDF'] - bld.add_action('.ltx', PDFLaTeXAuxAction) - bld.add_action('.latex', PDFLaTeXAuxAction) - bld.add_emitter('.ltx', SCons.Tool.tex.tex_pdf_emitter) - bld.add_emitter('.latex', SCons.Tool.tex.tex_pdf_emitter) - - SCons.Tool.tex.generate_common(env) - -def exists(env): - SCons.Tool.tex.generate_darwin(env) - return env.Detect('pdflatex') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdftex.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdftex.py deleted file mode 100644 index b6595c1623c..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/pdftex.py +++ /dev/null @@ -1,109 +0,0 @@ -"""SCons.Tool.pdftex - -Tool-specific initialization for pdftex. -Generates .pdf files from .tex files - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/pdftex.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import SCons.Action -import SCons.Util -import SCons.Tool.tex - -PDFTeXAction = None - -# This action might be needed more than once if we are dealing with -# labels and bibtex. -PDFLaTeXAction = None - -def PDFLaTeXAuxAction(target = None, source= None, env=None): - result = SCons.Tool.tex.InternalLaTeXAuxAction( PDFLaTeXAction, target, source, env ) - return result - -def PDFTeXLaTeXFunction(target = None, source= None, env=None): - """A builder for TeX and LaTeX that scans the source file to - decide the "flavor" of the source and then executes the appropriate - program.""" - basedir = os.path.split(str(source[0]))[0] - abspath = os.path.abspath(basedir) - - if SCons.Tool.tex.is_LaTeX(source,env,abspath): - result = PDFLaTeXAuxAction(target,source,env) - if result != 0: - SCons.Tool.tex.check_file_error_message(env['PDFLATEX']) - else: - result = PDFTeXAction(target,source,env) - if result != 0: - SCons.Tool.tex.check_file_error_message(env['PDFTEX']) - return result - -PDFTeXLaTeXAction = None - -def generate(env): - """Add Builders and construction variables for pdftex to an Environment.""" - global PDFTeXAction - if PDFTeXAction is None: - PDFTeXAction = SCons.Action.Action('$PDFTEXCOM', '$PDFTEXCOMSTR') - - global PDFLaTeXAction - if PDFLaTeXAction is None: - PDFLaTeXAction = SCons.Action.Action("$PDFLATEXCOM", "$PDFLATEXCOMSTR") - - global PDFTeXLaTeXAction - if PDFTeXLaTeXAction is None: - PDFTeXLaTeXAction = SCons.Action.Action(PDFTeXLaTeXFunction, - strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) - - env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) - - from . import pdf - pdf.generate(env) - - bld = env['BUILDERS']['PDF'] - bld.add_action('.tex', PDFTeXLaTeXAction) - bld.add_emitter('.tex', SCons.Tool.tex.tex_pdf_emitter) - - # Add the epstopdf builder after the pdftex builder - # so pdftex is the default for no source suffix - pdf.generate2(env) - - SCons.Tool.tex.generate_common(env) - -def exists(env): - SCons.Tool.tex.generate_darwin(env) - return env.Detect('pdftex') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/qt.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/qt.py deleted file mode 100644 index 351d487f1d7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/qt.py +++ /dev/null @@ -1,374 +0,0 @@ - -"""SCons.Tool.qt - -Tool-specific initialization for Qt. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/qt.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import re -import glob - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Scanner -import SCons.Tool -import SCons.Util -import SCons.Tool.cxx -cplusplus = SCons.Tool.cxx - -class ToolQtWarning(SCons.Warnings.Warning): - pass - -class GeneratedMocFileNotIncluded(ToolQtWarning): - pass - -class QtdirNotFound(ToolQtWarning): - pass - -SCons.Warnings.enableWarningClass(ToolQtWarning) - -header_extensions = [".h", ".hxx", ".hpp", ".hh"] -if SCons.Util.case_sensitive_suffixes('.h', '.H'): - header_extensions.append('.H') - -cxx_suffixes = cplusplus.CXXSuffixes - - -def find_platform_specific_qt_paths(): - """ - find non-standard QT paths - - If the platform does not put QT tools in standard search paths, - the path is expected to be set using QTDIR. SCons violates - the normal rule of not pulling from the user's environment - in this case. However, some test cases try to validate what - happens when QTDIR is unset, so we need to try to make a guess. - - :return: a guess at a path - """ - - # qt_bin_dirs = [] - qt_bin_dir = None - if os.path.isfile('/etc/redhat-release'): - with open('/etc/redhat-release','r') as rr: - lines = rr.readlines() - distro = lines[0].split()[0] - if distro == 'CentOS': - # Centos installs QT under /usr/{lib,lib64}/qt{4,5,-3.3}/bin - # so we need to handle this differently - # qt_bin_dirs = glob.glob('/usr/lib64/qt*/bin') - # TODO: all current Fedoras do the same, need to look deeper here. - qt_bin_dir = '/usr/lib64/qt-3.3/bin' - - return qt_bin_dir - - -QT_BIN_DIR = find_platform_specific_qt_paths() - -def checkMocIncluded(target, source, env): - moc = target[0] - cpp = source[0] - # looks like cpp.includes is cleared before the build stage :-( - # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/ - path = SCons.Defaults.CScan.path(env, moc.cwd) - includes = SCons.Defaults.CScan(cpp, env, path) - if moc not in includes: - SCons.Warnings.warn( - GeneratedMocFileNotIncluded, - "Generated moc file '%s' is not included by '%s'" % - (str(moc), str(cpp))) - -def find_file(filename, paths, node_factory): - for dir in paths: - node = node_factory(filename, dir) - if node.rexists(): - return node - return None - -class _Automoc(object): - """ - Callable class, which works as an emitter for Programs, SharedLibraries and - StaticLibraries. - """ - - def __init__(self, objBuilderName): - self.objBuilderName = objBuilderName - - def __call__(self, target, source, env): - """ - Smart autoscan function. Gets the list of objects for the Program - or Lib. Adds objects and builders for the special qt files. - """ - try: - if int(env.subst('$QT_AUTOSCAN')) == 0: - return target, source - except ValueError: - pass - try: - debug = int(env.subst('$QT_DEBUG')) - except ValueError: - debug = 0 - - # some shortcuts used in the scanner - splitext = SCons.Util.splitext - objBuilder = getattr(env, self.objBuilderName) - - # some regular expressions: - # Q_OBJECT detection - q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]') - # cxx and c comment 'eater' - #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)') - # CW: something must be wrong with the regexp. See also bug #998222 - # CURRENTLY THERE IS NO TEST CASE FOR THAT - - # The following is kind of hacky to get builders working properly (FIXME) - objBuilderEnv = objBuilder.env - objBuilder.env = env - mocBuilderEnv = env.Moc.env - env.Moc.env = env - - # make a deep copy for the result; MocH objects will be appended - out_sources = source[:] - - for obj in source: - if not obj.has_builder(): - # binary obj file provided - if debug: - print("scons: qt: '%s' seems to be a binary. Discarded." % str(obj)) - continue - cpp = obj.sources[0] - if not splitext(str(cpp))[1] in cxx_suffixes: - if debug: - print("scons: qt: '%s' is no cxx file. Discarded." % str(cpp)) - # c or fortran source - continue - #cpp_contents = comment.sub('', cpp.get_text_contents()) - if debug: - print("scons: qt: Getting contents of %s" % cpp) - cpp_contents = cpp.get_text_contents() - h=None - for h_ext in header_extensions: - # try to find the header file in the corresponding source - # directory - hname = splitext(cpp.name)[0] + h_ext - h = find_file(hname, (cpp.get_dir(),), env.File) - if h: - if debug: - print("scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))) - #h_contents = comment.sub('', h.get_text_contents()) - h_contents = h.get_text_contents() - break - if not h and debug: - print("scons: qt: no header for '%s'." % (str(cpp))) - if h and q_object_search.search(h_contents): - # h file with the Q_OBJECT macro found -> add moc_cpp - moc_cpp = env.Moc(h) - moc_o = objBuilder(moc_cpp) - out_sources.append(moc_o) - #moc_cpp.target_scanner = SCons.Defaults.CScan - if debug: - print("scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))) - if cpp and q_object_search.search(cpp_contents): - # cpp file with Q_OBJECT macro found -> add moc - # (to be included in cpp) - moc = env.Moc(cpp) - env.Ignore(moc, moc) - if debug: - print("scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))) - #moc.source_scanner = SCons.Defaults.CScan - # restore the original env attributes (FIXME) - objBuilder.env = objBuilderEnv - env.Moc.env = mocBuilderEnv - - return (target, out_sources) - -AutomocShared = _Automoc('SharedObject') -AutomocStatic = _Automoc('StaticObject') - -def _detect(env): - """Not really safe, but fast method to detect the QT library""" - - QTDIR = env.get('QTDIR',None) - if not QTDIR: - QTDIR = os.environ.get('QTDIR',None) - if not QTDIR: - moc = env.WhereIs('moc') or env.WhereIs('moc',QT_BIN_DIR) - if moc: - QTDIR = os.path.dirname(os.path.dirname(moc)) - SCons.Warnings.warn( - QtdirNotFound, - "Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR) - else: - QTDIR = None - SCons.Warnings.warn( - QtdirNotFound, - "Could not detect qt, using empty QTDIR") - return QTDIR - -def uicEmitter(target, source, env): - adjustixes = SCons.Util.adjustixes - bs = SCons.Util.splitext(str(source[0].name))[0] - bs = os.path.join(str(target[0].get_dir()),bs) - # first target (header) is automatically added by builder - if len(target) < 2: - # second target is implementation - target.append(adjustixes(bs, - env.subst('$QT_UICIMPLPREFIX'), - env.subst('$QT_UICIMPLSUFFIX'))) - if len(target) < 3: - # third target is moc file - target.append(adjustixes(bs, - env.subst('$QT_MOCHPREFIX'), - env.subst('$QT_MOCHSUFFIX'))) - return target, source - -def uicScannerFunc(node, env, path): - lookout = [] - lookout.extend(env['CPPPATH']) - lookout.append(str(node.rfile().dir)) - includes = re.findall("(.*?)", node.get_text_contents()) - result = [] - for incFile in includes: - dep = env.FindFile(incFile,lookout) - if dep: - result.append(dep) - return result - -uicScanner = SCons.Scanner.Base(uicScannerFunc, - name = "UicScanner", - node_class = SCons.Node.FS.File, - node_factory = SCons.Node.FS.File, - recursive = 0) - -def generate(env): - """Add Builders and construction variables for qt to an Environment.""" - CLVar = SCons.Util.CLVar - Action = SCons.Action.Action - Builder = SCons.Builder.Builder - - env.SetDefault(QTDIR = _detect(env), - QT_BINPATH = os.path.join('$QTDIR', 'bin'), - QT_CPPPATH = os.path.join('$QTDIR', 'include'), - QT_LIBPATH = os.path.join('$QTDIR', 'lib'), - QT_MOC = os.path.join('$QT_BINPATH','moc'), - QT_UIC = os.path.join('$QT_BINPATH','uic'), - QT_LIB = 'qt', # may be set to qt-mt - - QT_AUTOSCAN = 1, # scan for moc'able sources - - # Some QT specific flags. I don't expect someone wants to - # manipulate those ... - QT_UICIMPLFLAGS = CLVar(''), - QT_UICDECLFLAGS = CLVar(''), - QT_MOCFROMHFLAGS = CLVar(''), - QT_MOCFROMCXXFLAGS = CLVar('-i'), - - # suffixes/prefixes for the headers / sources to generate - QT_UICDECLPREFIX = '', - QT_UICDECLSUFFIX = '.h', - QT_UICIMPLPREFIX = 'uic_', - QT_UICIMPLSUFFIX = '$CXXFILESUFFIX', - QT_MOCHPREFIX = 'moc_', - QT_MOCHSUFFIX = '$CXXFILESUFFIX', - QT_MOCCXXPREFIX = '', - QT_MOCCXXSUFFIX = '.moc', - QT_UISUFFIX = '.ui', - - # Commands for the qt support ... - # command to generate header, implementation and moc-file - # from a .ui file - QT_UICCOM = [ - CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'), - CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} ' - '-o ${TARGETS[1]} $SOURCE'), - CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')], - # command to generate meta object information for a class - # declarated in a header - QT_MOCFROMHCOM = ( - '$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'), - # command to generate meta object information for a class - # declarated in a cpp file - QT_MOCFROMCXXCOM = [ - CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'), - Action(checkMocIncluded,None)]) - - # ... and the corresponding builders - uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'), - emitter=uicEmitter, - src_suffix='$QT_UISUFFIX', - suffix='$QT_UICDECLSUFFIX', - prefix='$QT_UICDECLPREFIX', - source_scanner=uicScanner) - mocBld = Builder(action={}, prefix={}, suffix={}) - for h in header_extensions: - act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR') - mocBld.add_action(h, act) - mocBld.prefix[h] = '$QT_MOCHPREFIX' - mocBld.suffix[h] = '$QT_MOCHSUFFIX' - for cxx in cxx_suffixes: - act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR') - mocBld.add_action(cxx, act) - mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX' - mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX' - - # register the builders - env['BUILDERS']['Uic'] = uicBld - env['BUILDERS']['Moc'] = mocBld - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - static_obj.add_src_builder('Uic') - shared_obj.add_src_builder('Uic') - - # We use the emitters of Program / StaticLibrary / SharedLibrary - # to scan for moc'able files - # We can't refer to the builders directly, we have to fetch them - # as Environment attributes because that sets them up to be called - # correctly later by our emitter. - env.AppendUnique(PROGEMITTER =[AutomocStatic], - SHLIBEMITTER=[AutomocShared], - LDMODULEEMITTER=[AutomocShared], - LIBEMITTER =[AutomocStatic], - # Of course, we need to link against the qt libraries - CPPPATH=["$QT_CPPPATH"], - LIBPATH=["$QT_LIBPATH"], - LIBS=['$QT_LIB']) - -def exists(env): - return _detect(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rmic.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rmic.py deleted file mode 100644 index 9ff16745e39..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rmic.py +++ /dev/null @@ -1,139 +0,0 @@ -"""SCons.Tool.rmic - -Tool-specific initialization for rmic. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/rmic.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -import SCons.Action -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -from SCons.Tool.JavaCommon import get_java_install_dirs - - -def emit_rmic_classes(target, source, env): - """Create and return lists of Java RMI stub and skeleton - class files to be created from a set of class files. - """ - class_suffix = env.get('JAVACLASSSUFFIX', '.class') - classdir = env.get('JAVACLASSDIR') - - if not classdir: - try: - s = source[0] - except IndexError: - classdir = '.' - else: - try: - classdir = s.attributes.java_classdir - except AttributeError: - classdir = '.' - classdir = env.Dir(classdir).rdir() - if str(classdir) == '.': - c_ = None - else: - c_ = str(classdir) + os.sep - - slist = [] - for src in source: - try: - classname = src.attributes.java_classname - except AttributeError: - classname = str(src) - if c_ and classname[:len(c_)] == c_: - classname = classname[len(c_):] - if class_suffix and classname[:-len(class_suffix)] == class_suffix: - classname = classname[-len(class_suffix):] - s = src.rfile() - s.attributes.java_classdir = classdir - s.attributes.java_classname = classname - slist.append(s) - - stub_suffixes = ['_Stub'] - if env.get('JAVAVERSION') == '1.4': - stub_suffixes.append('_Skel') - - tlist = [] - for s in source: - for suff in stub_suffixes: - fname = s.attributes.java_classname.replace('.', os.sep) + \ - suff + class_suffix - t = target[0].File(fname) - t.attributes.java_lookupdir = target[0] - tlist.append(t) - - return tlist, source - -RMICAction = SCons.Action.Action('$RMICCOM', '$RMICCOMSTR') - -RMICBuilder = SCons.Builder.Builder(action = RMICAction, - emitter = emit_rmic_classes, - src_suffix = '$JAVACLASSSUFFIX', - target_factory = SCons.Node.FS.Dir, - source_factory = SCons.Node.FS.File) - -def generate(env): - """Add Builders and construction variables for rmic to an Environment.""" - env['BUILDERS']['RMIC'] = RMICBuilder - - if env['PLATFORM'] == 'win32': - version = env.get('JAVAVERSION', None) - # Ensure that we have a proper path for rmic - paths = get_java_install_dirs('win32', version=version) - rmic = SCons.Tool.find_program_path(env, 'rmic', default_paths=paths) - # print("RMIC: %s"%rmic) - if rmic: - rmic_bin_dir = os.path.dirname(rmic) - env.AppendENVPath('PATH', rmic_bin_dir) - - env['RMIC'] = 'rmic' - env['RMICFLAGS'] = SCons.Util.CLVar('') - env['RMICCOM'] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}' - env['JAVACLASSSUFFIX'] = '.class' - -def exists(env): - # As reported by Jan Nijtmans in issue #2730, the simple - # return env.Detect('rmic') - # doesn't always work during initialization. For now, we - # stop trying to detect an executable (analogous to the - # javac Builder). - # TODO: Come up with a proper detect() routine...and enable it. - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpcgen.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpcgen.py deleted file mode 100644 index 1fb22f124d5..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpcgen.py +++ /dev/null @@ -1,70 +0,0 @@ -"""SCons.Tool.rpcgen - -Tool-specific initialization for RPCGEN tools. - -Three normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/rpcgen.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from SCons.Builder import Builder -import SCons.Util - -cmd = "cd ${SOURCE.dir} && $RPCGEN -%s $RPCGENFLAGS %s -o ${TARGET.abspath} ${SOURCE.file}" - -rpcgen_client = cmd % ('l', '$RPCGENCLIENTFLAGS') -rpcgen_header = cmd % ('h', '$RPCGENHEADERFLAGS') -rpcgen_service = cmd % ('m', '$RPCGENSERVICEFLAGS') -rpcgen_xdr = cmd % ('c', '$RPCGENXDRFLAGS') - -def generate(env): - """Add RPCGEN Builders and construction variables for an Environment.""" - - client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x') - header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x') - service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x') - xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x') - env.Append(BUILDERS={'RPCGenClient' : client, - 'RPCGenHeader' : header, - 'RPCGenService' : service, - 'RPCGenXDR' : xdr}) - env['RPCGEN'] = 'rpcgen' - env['RPCGENFLAGS'] = SCons.Util.CLVar('') - env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('') - env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('') - env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('') - env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('') - -def exists(env): - return env.Detect('rpcgen') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpm.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpm.py deleted file mode 100644 index 39e7b59ae2f..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpm.py +++ /dev/null @@ -1,133 +0,0 @@ -"""SCons.Tool.rpm - -Tool-specific initialization for rpm. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -The rpm tool calls the rpmbuild command. The first and only argument should a -tar.gz consisting of the source file and a specfile. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/rpm.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import re -import shutil -import subprocess - -import SCons.Builder -import SCons.Node.FS -import SCons.Util -import SCons.Action -import SCons.Defaults - -def get_cmd(source, env): - tar_file_with_included_specfile = source - if SCons.Util.is_List(source): - tar_file_with_included_specfile = source[0] - return "%s %s %s"%(env['RPM'], env['RPMFLAGS'], - tar_file_with_included_specfile.get_abspath()) - -def build_rpm(target, source, env): - # create a temporary rpm build root. - tmpdir = os.path.join(os.path.dirname(target[0].get_abspath()), 'rpmtemp') - if os.path.exists(tmpdir): - shutil.rmtree(tmpdir) - - # now create the mandatory rpm directory structure. - for d in ['RPMS', 'SRPMS', 'SPECS', 'BUILD']: - os.makedirs(os.path.join(tmpdir, d)) - - # set the topdir as an rpmflag. - env.Prepend(RPMFLAGS = '--define \'_topdir %s\'' % tmpdir) - - # now call rpmbuild to create the rpm package. - handle = subprocess.Popen(get_cmd(source, env), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=True) - with handle.stdout: - output = SCons.Util.to_str(handle.stdout.read()) - status = handle.wait() - - if status: - raise SCons.Errors.BuildError(node=target[0], - errstr=output, - filename=str(target[0])) - else: - # XXX: assume that LC_ALL=C is set while running rpmbuild - output_files = re.compile('Wrote: (.*)').findall(output) - - for output, input in zip(output_files, target): - rpm_output = os.path.basename(output) - expected = os.path.basename(input.get_path()) - - assert expected == rpm_output, "got %s but expected %s" % (rpm_output, expected) - shutil.copy(output, input.get_abspath()) - - - # cleanup before leaving. - shutil.rmtree(tmpdir) - - return status - -def string_rpm(target, source, env): - try: - return env['RPMCOMSTR'] - except KeyError: - return get_cmd(source, env) - -rpmAction = SCons.Action.Action(build_rpm, string_rpm) - -RpmBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$RPMCOM', '$RPMCOMSTR'), - source_scanner = SCons.Defaults.DirScanner, - suffix = '$RPMSUFFIX') - - - -def generate(env): - """Add Builders and construction variables for rpm to an Environment.""" - try: - bld = env['BUILDERS']['Rpm'] - except KeyError: - bld = RpmBuilder - env['BUILDERS']['Rpm'] = bld - - env.SetDefault(RPM = 'LC_ALL=C rpmbuild') - env.SetDefault(RPMFLAGS = SCons.Util.CLVar('-ta')) - env.SetDefault(RPMCOM = rpmAction) - env.SetDefault(RPMSUFFIX = '.rpm') - -def exists(env): - return env.Detect('rpmbuild') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpmutils.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpmutils.py deleted file mode 100644 index 4d8b9beff9a..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/rpmutils.py +++ /dev/null @@ -1,548 +0,0 @@ -"""SCons.Tool.rpmutils.py - -RPM specific helper routines for general usage in the test framework -and SCons core modules. - -Since we check for the RPM package target name in several places, -we have to know which machine/system name RPM will use for the current -hardware setup. The following dictionaries and functions try to -mimic the exact naming rules of the RPM source code. -They were directly derived from the file "rpmrc.in" of the version -rpm-4.9.1.3. For updating to a more recent version of RPM, this Python -script can be used standalone. The usage() function below shows the -exact syntax. - -""" - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/rpmutils.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -import platform -import subprocess - -import SCons.Util - -# Start of rpmrc dictionaries (Marker, don't change or remove!) -os_canon = { - 'AIX' : ['AIX','5'], - 'AmigaOS' : ['AmigaOS','5'], - 'BSD_OS' : ['bsdi','12'], - 'CYGWIN32_95' : ['cygwin32','15'], - 'CYGWIN32_NT' : ['cygwin32','14'], - 'Darwin' : ['darwin','21'], - 'FreeBSD' : ['FreeBSD','8'], - 'HP-UX' : ['hpux10','6'], - 'IRIX' : ['Irix','2'], - 'IRIX64' : ['Irix64','10'], - 'Linux' : ['Linux','1'], - 'Linux/390' : ['OS/390','20'], - 'Linux/ESA' : ['VM/ESA','20'], - 'MacOSX' : ['macosx','21'], - 'MiNT' : ['FreeMiNT','17'], - 'NEXTSTEP' : ['NextStep','11'], - 'OS/390' : ['OS/390','18'], - 'OSF1' : ['osf1','7'], - 'SCO_SV' : ['SCO_SV3.2v5.0.2','9'], - 'SunOS4' : ['SunOS','4'], - 'SunOS5' : ['solaris','3'], - 'UNIX_SV' : ['MP_RAS','16'], - 'VM/ESA' : ['VM/ESA','19'], - 'machten' : ['machten','13'], - 'osf3.2' : ['osf1','7'], - 'osf4.0' : ['osf1','7'], -} - -buildarch_compat = { - 'alpha' : ['noarch'], - 'alphaev5' : ['alpha'], - 'alphaev56' : ['alphaev5'], - 'alphaev6' : ['alphapca56'], - 'alphaev67' : ['alphaev6'], - 'alphapca56' : ['alphaev56'], - 'amd64' : ['x86_64'], - 'armv3l' : ['noarch'], - 'armv4b' : ['noarch'], - 'armv4l' : ['armv3l'], - 'armv4tl' : ['armv4l'], - 'armv5tejl' : ['armv5tel'], - 'armv5tel' : ['armv4tl'], - 'armv6l' : ['armv5tejl'], - 'armv7l' : ['armv6l'], - 'atariclone' : ['m68kmint','noarch'], - 'atarist' : ['m68kmint','noarch'], - 'atariste' : ['m68kmint','noarch'], - 'ataritt' : ['m68kmint','noarch'], - 'athlon' : ['i686'], - 'falcon' : ['m68kmint','noarch'], - 'geode' : ['i586'], - 'hades' : ['m68kmint','noarch'], - 'hppa1.0' : ['parisc'], - 'hppa1.1' : ['hppa1.0'], - 'hppa1.2' : ['hppa1.1'], - 'hppa2.0' : ['hppa1.2'], - 'i386' : ['noarch','fat'], - 'i486' : ['i386'], - 'i586' : ['i486'], - 'i686' : ['i586'], - 'ia32e' : ['x86_64'], - 'ia64' : ['noarch'], - 'm68k' : ['noarch'], - 'milan' : ['m68kmint','noarch'], - 'mips' : ['noarch'], - 'mipsel' : ['noarch'], - 'parisc' : ['noarch'], - 'pentium3' : ['i686'], - 'pentium4' : ['pentium3'], - 'ppc' : ['noarch','fat'], - 'ppc32dy4' : ['noarch'], - 'ppc64' : ['noarch','fat'], - 'ppc64iseries' : ['ppc64'], - 'ppc64pseries' : ['ppc64'], - 'ppc8260' : ['noarch'], - 'ppc8560' : ['noarch'], - 'ppciseries' : ['noarch'], - 'ppcpseries' : ['noarch'], - 's390' : ['noarch'], - 's390x' : ['noarch'], - 'sh3' : ['noarch'], - 'sh4' : ['noarch'], - 'sh4a' : ['sh4'], - 'sparc' : ['noarch'], - 'sparc64' : ['sparcv9v'], - 'sparc64v' : ['sparc64'], - 'sparcv8' : ['sparc'], - 'sparcv9' : ['sparcv8'], - 'sparcv9v' : ['sparcv9'], - 'sun4c' : ['noarch'], - 'sun4d' : ['noarch'], - 'sun4m' : ['noarch'], - 'sun4u' : ['noarch'], - 'x86_64' : ['noarch'], -} - -os_compat = { - 'BSD_OS' : ['bsdi'], - 'Darwin' : ['MacOSX'], - 'FreeMiNT' : ['mint','MiNT','TOS'], - 'IRIX64' : ['IRIX'], - 'MiNT' : ['FreeMiNT','mint','TOS'], - 'TOS' : ['FreeMiNT','MiNT','mint'], - 'bsdi4.0' : ['bsdi'], - 'hpux10.00' : ['hpux9.07'], - 'hpux10.01' : ['hpux10.00'], - 'hpux10.10' : ['hpux10.01'], - 'hpux10.20' : ['hpux10.10'], - 'hpux10.30' : ['hpux10.20'], - 'hpux11.00' : ['hpux10.30'], - 'hpux9.05' : ['hpux9.04'], - 'hpux9.07' : ['hpux9.05'], - 'mint' : ['FreeMiNT','MiNT','TOS'], - 'ncr-sysv4.3' : ['ncr-sysv4.2'], - 'osf4.0' : ['osf3.2','osf1'], - 'solaris2.4' : ['solaris2.3'], - 'solaris2.5' : ['solaris2.3','solaris2.4'], - 'solaris2.6' : ['solaris2.3','solaris2.4','solaris2.5'], - 'solaris2.7' : ['solaris2.3','solaris2.4','solaris2.5','solaris2.6'], -} - -arch_compat = { - 'alpha' : ['axp','noarch'], - 'alphaev5' : ['alpha'], - 'alphaev56' : ['alphaev5'], - 'alphaev6' : ['alphapca56'], - 'alphaev67' : ['alphaev6'], - 'alphapca56' : ['alphaev56'], - 'amd64' : ['x86_64','athlon','noarch'], - 'armv3l' : ['noarch'], - 'armv4b' : ['noarch'], - 'armv4l' : ['armv3l'], - 'armv4tl' : ['armv4l'], - 'armv5tejl' : ['armv5tel'], - 'armv5tel' : ['armv4tl'], - 'armv6l' : ['armv5tejl'], - 'armv7l' : ['armv6l'], - 'atariclone' : ['m68kmint','noarch'], - 'atarist' : ['m68kmint','noarch'], - 'atariste' : ['m68kmint','noarch'], - 'ataritt' : ['m68kmint','noarch'], - 'athlon' : ['i686'], - 'falcon' : ['m68kmint','noarch'], - 'geode' : ['i586'], - 'hades' : ['m68kmint','noarch'], - 'hppa1.0' : ['parisc'], - 'hppa1.1' : ['hppa1.0'], - 'hppa1.2' : ['hppa1.1'], - 'hppa2.0' : ['hppa1.2'], - 'i370' : ['noarch'], - 'i386' : ['noarch','fat'], - 'i486' : ['i386'], - 'i586' : ['i486'], - 'i686' : ['i586'], - 'ia32e' : ['x86_64','athlon','noarch'], - 'ia64' : ['noarch'], - 'milan' : ['m68kmint','noarch'], - 'mips' : ['noarch'], - 'mipsel' : ['noarch'], - 'osfmach3_i386' : ['i486'], - 'osfmach3_i486' : ['i486','osfmach3_i386'], - 'osfmach3_i586' : ['i586','osfmach3_i486'], - 'osfmach3_i686' : ['i686','osfmach3_i586'], - 'osfmach3_ppc' : ['ppc'], - 'parisc' : ['noarch'], - 'pentium3' : ['i686'], - 'pentium4' : ['pentium3'], - 'powerpc' : ['ppc'], - 'powerppc' : ['ppc'], - 'ppc' : ['rs6000'], - 'ppc32dy4' : ['ppc'], - 'ppc64' : ['ppc'], - 'ppc64iseries' : ['ppc64'], - 'ppc64pseries' : ['ppc64'], - 'ppc8260' : ['ppc'], - 'ppc8560' : ['ppc'], - 'ppciseries' : ['ppc'], - 'ppcpseries' : ['ppc'], - 'rs6000' : ['noarch','fat'], - 's390' : ['noarch'], - 's390x' : ['s390','noarch'], - 'sh3' : ['noarch'], - 'sh4' : ['noarch'], - 'sh4a' : ['sh4'], - 'sparc' : ['noarch'], - 'sparc64' : ['sparcv9'], - 'sparc64v' : ['sparc64'], - 'sparcv8' : ['sparc'], - 'sparcv9' : ['sparcv8'], - 'sparcv9v' : ['sparcv9'], - 'sun4c' : ['sparc'], - 'sun4d' : ['sparc'], - 'sun4m' : ['sparc'], - 'sun4u' : ['sparc64'], - 'x86_64' : ['amd64','athlon','noarch'], -} - -buildarchtranslate = { - 'alphaev5' : ['alpha'], - 'alphaev56' : ['alpha'], - 'alphaev6' : ['alpha'], - 'alphaev67' : ['alpha'], - 'alphapca56' : ['alpha'], - 'amd64' : ['x86_64'], - 'armv3l' : ['armv3l'], - 'armv4b' : ['armv4b'], - 'armv4l' : ['armv4l'], - 'armv4tl' : ['armv4tl'], - 'armv5tejl' : ['armv5tejl'], - 'armv5tel' : ['armv5tel'], - 'armv6l' : ['armv6l'], - 'armv7l' : ['armv7l'], - 'atariclone' : ['m68kmint'], - 'atarist' : ['m68kmint'], - 'atariste' : ['m68kmint'], - 'ataritt' : ['m68kmint'], - 'athlon' : ['i386'], - 'falcon' : ['m68kmint'], - 'geode' : ['i386'], - 'hades' : ['m68kmint'], - 'i386' : ['i386'], - 'i486' : ['i386'], - 'i586' : ['i386'], - 'i686' : ['i386'], - 'ia32e' : ['x86_64'], - 'ia64' : ['ia64'], - 'milan' : ['m68kmint'], - 'osfmach3_i386' : ['i386'], - 'osfmach3_i486' : ['i386'], - 'osfmach3_i586' : ['i386'], - 'osfmach3_i686' : ['i386'], - 'osfmach3_ppc' : ['ppc'], - 'pentium3' : ['i386'], - 'pentium4' : ['i386'], - 'powerpc' : ['ppc'], - 'powerppc' : ['ppc'], - 'ppc32dy4' : ['ppc'], - 'ppc64iseries' : ['ppc64'], - 'ppc64pseries' : ['ppc64'], - 'ppc8260' : ['ppc'], - 'ppc8560' : ['ppc'], - 'ppciseries' : ['ppc'], - 'ppcpseries' : ['ppc'], - 's390' : ['s390'], - 's390x' : ['s390x'], - 'sh3' : ['sh3'], - 'sh4' : ['sh4'], - 'sh4a' : ['sh4'], - 'sparc64v' : ['sparc64'], - 'sparcv8' : ['sparc'], - 'sparcv9' : ['sparc'], - 'sparcv9v' : ['sparc'], - 'sun4c' : ['sparc'], - 'sun4d' : ['sparc'], - 'sun4m' : ['sparc'], - 'sun4u' : ['sparc64'], - 'x86_64' : ['x86_64'], -} - -optflags = { - 'alpha' : ['-O2','-g','-mieee'], - 'alphaev5' : ['-O2','-g','-mieee','-mtune=ev5'], - 'alphaev56' : ['-O2','-g','-mieee','-mtune=ev56'], - 'alphaev6' : ['-O2','-g','-mieee','-mtune=ev6'], - 'alphaev67' : ['-O2','-g','-mieee','-mtune=ev67'], - 'alphapca56' : ['-O2','-g','-mieee','-mtune=pca56'], - 'amd64' : ['-O2','-g'], - 'armv3l' : ['-O2','-g','-march=armv3'], - 'armv4b' : ['-O2','-g','-march=armv4'], - 'armv4l' : ['-O2','-g','-march=armv4'], - 'armv4tl' : ['-O2','-g','-march=armv4t'], - 'armv5tejl' : ['-O2','-g','-march=armv5te'], - 'armv5tel' : ['-O2','-g','-march=armv5te'], - 'armv6l' : ['-O2','-g','-march=armv6'], - 'armv7l' : ['-O2','-g','-march=armv7'], - 'atariclone' : ['-O2','-g','-fomit-frame-pointer'], - 'atarist' : ['-O2','-g','-fomit-frame-pointer'], - 'atariste' : ['-O2','-g','-fomit-frame-pointer'], - 'ataritt' : ['-O2','-g','-fomit-frame-pointer'], - 'athlon' : ['-O2','-g','-march=athlon'], - 'falcon' : ['-O2','-g','-fomit-frame-pointer'], - 'fat' : ['-O2','-g','-arch','i386','-arch','ppc'], - 'geode' : ['-Os','-g','-m32','-march=geode'], - 'hades' : ['-O2','-g','-fomit-frame-pointer'], - 'hppa1.0' : ['-O2','-g','-mpa-risc-1-0'], - 'hppa1.1' : ['-O2','-g','-mpa-risc-1-0'], - 'hppa1.2' : ['-O2','-g','-mpa-risc-1-0'], - 'hppa2.0' : ['-O2','-g','-mpa-risc-1-0'], - 'i386' : ['-O2','-g','-march=i386','-mtune=i686'], - 'i486' : ['-O2','-g','-march=i486'], - 'i586' : ['-O2','-g','-march=i586'], - 'i686' : ['-O2','-g','-march=i686'], - 'ia32e' : ['-O2','-g'], - 'ia64' : ['-O2','-g'], - 'm68k' : ['-O2','-g','-fomit-frame-pointer'], - 'milan' : ['-O2','-g','-fomit-frame-pointer'], - 'mips' : ['-O2','-g'], - 'mipsel' : ['-O2','-g'], - 'parisc' : ['-O2','-g','-mpa-risc-1-0'], - 'pentium3' : ['-O2','-g','-march=pentium3'], - 'pentium4' : ['-O2','-g','-march=pentium4'], - 'ppc' : ['-O2','-g','-fsigned-char'], - 'ppc32dy4' : ['-O2','-g','-fsigned-char'], - 'ppc64' : ['-O2','-g','-fsigned-char'], - 'ppc8260' : ['-O2','-g','-fsigned-char'], - 'ppc8560' : ['-O2','-g','-fsigned-char'], - 'ppciseries' : ['-O2','-g','-fsigned-char'], - 'ppcpseries' : ['-O2','-g','-fsigned-char'], - 's390' : ['-O2','-g'], - 's390x' : ['-O2','-g'], - 'sh3' : ['-O2','-g'], - 'sh4' : ['-O2','-g','-mieee'], - 'sh4a' : ['-O2','-g','-mieee'], - 'sparc' : ['-O2','-g','-m32','-mtune=ultrasparc'], - 'sparc64' : ['-O2','-g','-m64','-mtune=ultrasparc'], - 'sparc64v' : ['-O2','-g','-m64','-mtune=niagara'], - 'sparcv8' : ['-O2','-g','-m32','-mtune=ultrasparc','-mv8'], - 'sparcv9' : ['-O2','-g','-m32','-mtune=ultrasparc'], - 'sparcv9v' : ['-O2','-g','-m32','-mtune=niagara'], - 'x86_64' : ['-O2','-g'], -} - -arch_canon = { - 'IP' : ['sgi','7'], - 'alpha' : ['alpha','2'], - 'alphaev5' : ['alphaev5','2'], - 'alphaev56' : ['alphaev56','2'], - 'alphaev6' : ['alphaev6','2'], - 'alphaev67' : ['alphaev67','2'], - 'alphapca56' : ['alphapca56','2'], - 'amd64' : ['amd64','1'], - 'armv3l' : ['armv3l','12'], - 'armv4b' : ['armv4b','12'], - 'armv4l' : ['armv4l','12'], - 'armv5tejl' : ['armv5tejl','12'], - 'armv5tel' : ['armv5tel','12'], - 'armv6l' : ['armv6l','12'], - 'armv7l' : ['armv7l','12'], - 'atariclone' : ['m68kmint','13'], - 'atarist' : ['m68kmint','13'], - 'atariste' : ['m68kmint','13'], - 'ataritt' : ['m68kmint','13'], - 'athlon' : ['athlon','1'], - 'falcon' : ['m68kmint','13'], - 'geode' : ['geode','1'], - 'hades' : ['m68kmint','13'], - 'i370' : ['i370','14'], - 'i386' : ['i386','1'], - 'i486' : ['i486','1'], - 'i586' : ['i586','1'], - 'i686' : ['i686','1'], - 'ia32e' : ['ia32e','1'], - 'ia64' : ['ia64','9'], - 'm68k' : ['m68k','6'], - 'm68kmint' : ['m68kmint','13'], - 'milan' : ['m68kmint','13'], - 'mips' : ['mips','4'], - 'mipsel' : ['mipsel','11'], - 'pentium3' : ['pentium3','1'], - 'pentium4' : ['pentium4','1'], - 'ppc' : ['ppc','5'], - 'ppc32dy4' : ['ppc32dy4','5'], - 'ppc64' : ['ppc64','16'], - 'ppc64iseries' : ['ppc64iseries','16'], - 'ppc64pseries' : ['ppc64pseries','16'], - 'ppc8260' : ['ppc8260','5'], - 'ppc8560' : ['ppc8560','5'], - 'ppciseries' : ['ppciseries','5'], - 'ppcpseries' : ['ppcpseries','5'], - 'rs6000' : ['rs6000','8'], - 's390' : ['s390','14'], - 's390x' : ['s390x','15'], - 'sh' : ['sh','17'], - 'sh3' : ['sh3','17'], - 'sh4' : ['sh4','17'], - 'sh4a' : ['sh4a','17'], - 'sparc' : ['sparc','3'], - 'sparc64' : ['sparc64','2'], - 'sparc64v' : ['sparc64v','2'], - 'sparcv8' : ['sparcv8','3'], - 'sparcv9' : ['sparcv9','3'], - 'sparcv9v' : ['sparcv9v','3'], - 'sun4' : ['sparc','3'], - 'sun4c' : ['sparc','3'], - 'sun4d' : ['sparc','3'], - 'sun4m' : ['sparc','3'], - 'sun4u' : ['sparc64','2'], - 'x86_64' : ['x86_64','1'], - 'xtensa' : ['xtensa','18'], -} - -# End of rpmrc dictionaries (Marker, don't change or remove!) - -def defaultMachine(use_rpm_default=True): - """ Return the canonicalized machine name. """ - - if use_rpm_default: - try: - # This should be the most reliable way to get the default arch - rmachine = subprocess.check_output(['rpm', '--eval=%_target_cpu'], shell=False).rstrip() - rmachine = SCons.Util.to_str(rmachine) - except Exception as e: - # Something went wrong, try again by looking up platform.machine() - return defaultMachine(False) - else: - rmachine = platform.machine() - - # Try to lookup the string in the canon table - if rmachine in arch_canon: - rmachine = arch_canon[rmachine][0] - - return rmachine - -def defaultSystem(): - """ Return the canonicalized system name. """ - rsystem = platform.system() - - # Try to lookup the string in the canon tables - if rsystem in os_canon: - rsystem = os_canon[rsystem][0] - - return rsystem - -def defaultNames(): - """ Return the canonicalized machine and system name. """ - return defaultMachine(), defaultSystem() - -def updateRpmDicts(rpmrc, pyfile): - """ Read the given rpmrc file with RPM definitions and update the - info dictionaries in the file pyfile with it. - The arguments will usually be 'rpmrc.in' from a recent RPM source - tree, and 'rpmutils.py' referring to this script itself. - See also usage() below. - """ - try: - # Read old rpmutils.py file - with open(pyfile,"r") as f: - oldpy = f.readlines() - # Read current rpmrc.in file - with open(rpmrc,"r") as f: - rpm = f.readlines() - # Parse for data - data = {} - # Allowed section names that get parsed - sections = ['optflags', - 'arch_canon', - 'os_canon', - 'buildarchtranslate', - 'arch_compat', - 'os_compat', - 'buildarch_compat'] - for l in rpm: - l = l.rstrip('\n').replace(':',' ') - # Skip comments - if l.lstrip().startswith('#'): - continue - tokens = l.strip().split() - if len(tokens): - key = tokens[0] - if key in sections: - # Have we met this section before? - if tokens[0] not in data: - # No, so insert it - data[key] = {} - # Insert data - data[key][tokens[1]] = tokens[2:] - # Write new rpmutils.py file - with open(pyfile,"w") as out: - pm = 0 - for l in oldpy: - if pm: - if l.startswith('# End of rpmrc dictionaries'): - pm = 0 - out.write(l) - else: - out.write(l) - if l.startswith('# Start of rpmrc dictionaries'): - pm = 1 - # Write data sections to single dictionaries - for key, entries in data.items(): - out.write("%s = {\n" % key) - for arch in sorted(entries.keys()): - out.write(" '%s' : ['%s'],\n" % (arch, "','".join(entries[arch]))) - out.write("}\n\n") - except: - pass - -def usage(): - print("rpmutils.py rpmrc.in rpmutils.py") - -def main(): - import sys - - if len(sys.argv) < 3: - usage() - sys.exit(0) - updateRpmDicts(sys.argv[1], sys.argv[2]) - -if __name__ == "__main__": - main() diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgiar.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgiar.py deleted file mode 100644 index c249815c19e..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgiar.py +++ /dev/null @@ -1,68 +0,0 @@ -"""SCons.Tool.sgiar - -Tool-specific initialization for SGI ar (library archive). If CC -exists, static libraries should be built with it, so the prelinker has -a chance to resolve C++ template instantiations. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sgiar.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util - -def generate(env): - """Add Builders and construction variables for ar to an Environment.""" - SCons.Tool.createStaticLibBuilder(env) - - if env.Detect('CC'): - env['AR'] = 'CC' - env['ARFLAGS'] = SCons.Util.CLVar('-ar') - env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES' - else: - env['AR'] = 'ar' - env['ARFLAGS'] = SCons.Util.CLVar('r') - env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES' - - env['SHLINK'] = '$LINK' - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') - env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' - env['LIBPREFIX'] = 'lib' - env['LIBSUFFIX'] = '.a' - -def exists(env): - return env.Detect('CC') or env.Detect('ar') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgic++.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgic++.py deleted file mode 100644 index 0cb7a4a4c3a..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgic++.py +++ /dev/null @@ -1,43 +0,0 @@ -"""SCons.Tool.sgic++ - -Tool-specific initialization for MIPSpro C++ on SGI. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sgic++.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -#forward proxy to the preffered cxx version -from SCons.Tool.sgicxx import * - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgicc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgicc.py deleted file mode 100644 index b602be82910..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgicc.py +++ /dev/null @@ -1,53 +0,0 @@ -"""SCons.Tool.sgicc - -Tool-specific initialization for MIPSPro cc on SGI. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sgicc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -from . import cc - -def generate(env): - """Add Builders and construction variables for gcc to an Environment.""" - cc.generate(env) - - env['CXX'] = 'CC' - env['SHOBJSUFFIX'] = '.o' - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - -def exists(env): - return env.Detect('cc') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgicxx.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgicxx.py deleted file mode 100644 index 8fe0441531d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgicxx.py +++ /dev/null @@ -1,61 +0,0 @@ -"""SCons.Tool.sgic++ - -Tool-specific initialization for MIPSpro C++ on SGI. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sgicxx.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -import SCons.Tool.cxx -cplusplus = SCons.Tool.cxx -#cplusplus = __import__('cxx', globals(), locals(), []) - - -def generate(env): - """Add Builders and construction variables for SGI MIPS C++ to an Environment.""" - - cplusplus.generate(env) - - env['CXX'] = 'CC' - env['CXXFLAGS'] = SCons.Util.CLVar('-LANG:std') - env['SHCXX'] = '$CXX' - env['SHOBJSUFFIX'] = '.o' - env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - -def exists(env): - return env.Detect('CC') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgilink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgilink.py deleted file mode 100644 index d5d6556caab..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sgilink.py +++ /dev/null @@ -1,62 +0,0 @@ -"""SCons.Tool.sgilink - -Tool-specific initialization for the SGI MIPSPro linker on SGI. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sgilink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -from . import link - -linkers = ['CC', 'cc'] - -def generate(env): - """Add Builders and construction variables for MIPSPro to an Environment.""" - link.generate(env) - - env['LINK'] = env.Detect(linkers) or 'cc' - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') - - # __RPATH is set to $_RPATH in the platform specification if that - # platform supports it. - env['RPATHPREFIX'] = '-rpath ' - env['RPATHSUFFIX'] = '' - env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' - -def exists(env): - return env.Detect(linkers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunar.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunar.py deleted file mode 100644 index af3d6854089..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunar.py +++ /dev/null @@ -1,64 +0,0 @@ -"""engine.SCons.Tool.sunar - -Tool-specific initialization for Solaris (Forte) ar (library archive). If CC -exists, static libraries should be built with it, so that template -instantiations can be resolved. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sunar.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Defaults -import SCons.Tool -import SCons.Util - -def generate(env): - """Add Builders and construction variables for ar to an Environment.""" - SCons.Tool.createStaticLibBuilder(env) - - if env.Detect('CC'): - env['AR'] = 'CC' - env['ARFLAGS'] = SCons.Util.CLVar('-xar') - env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES' - else: - env['AR'] = 'ar' - env['ARFLAGS'] = SCons.Util.CLVar('r') - env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES' - - env['LIBPREFIX'] = 'lib' - env['LIBSUFFIX'] = '.a' - -def exists(env): - return env.Detect('CC') or env.Detect('ar') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunc++.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunc++.py deleted file mode 100644 index 5f09d94e425..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunc++.py +++ /dev/null @@ -1,45 +0,0 @@ -"""SCons.Tool.sunc++ - -Tool-specific initialization for C++ on SunOS / Solaris. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sunc++.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -#forward proxy to the preffered cxx version -from SCons.Tool.suncxx import * - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/suncc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/suncc.py deleted file mode 100644 index f40b6d32605..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/suncc.py +++ /dev/null @@ -1,58 +0,0 @@ -"""SCons.Tool.suncc - -Tool-specific initialization for Sun Solaris (Forte) CC and cc. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/suncc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -from . import cc - -def generate(env): - """ - Add Builders and construction variables for Forte C and C++ compilers - to an Environment. - """ - cc.generate(env) - - env['CXX'] = 'CC' - env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -KPIC') - env['SHOBJPREFIX'] = 'so_' - env['SHOBJSUFFIX'] = '.o' - -def exists(env): - return env.Detect('CC') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/suncxx.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/suncxx.py deleted file mode 100644 index 0c70b680d00..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/suncxx.py +++ /dev/null @@ -1,165 +0,0 @@ -"""SCons.Tool.sunc++ - -Tool-specific initialization for C++ on SunOS / Solaris. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/suncxx.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons - -import os -import re -import subprocess - -from SCons.Util import PY3 -import SCons.Tool.cxx -cplusplus = SCons.Tool.cxx -# cplusplus = __import__('c++', globals(), locals(), []) - -package_info = {} - - -def get_package_info(package_name, pkginfo, pkgchk): - try: - return package_info[package_name] - except KeyError: - version = None - pathname = None - try: - from subprocess import DEVNULL # py3k - except ImportError: - DEVNULL = open(os.devnull, 'wb') - - try: - with open('/var/sadm/install/contents', 'r') as f: - sadm_contents = f.read() - except EnvironmentError: - pass - else: - sadm_re = re.compile(r'^(\S*/bin/CC)(=\S*)? %s$' % package_name, re.M) - sadm_match = sadm_re.search(sadm_contents) - if sadm_match: - pathname = os.path.dirname(sadm_match.group(1)) - - try: - popen_args = {'stdout': subprocess.PIPE, - 'stderr': DEVNULL} - if PY3: - popen_args['universal_newlines'] = True - p = subprocess.Popen([pkginfo, '-l', package_name], - **popen_args) - except EnvironmentError: - pass - else: - pkginfo_contents = p.communicate()[0] - if not PY3: - pkginfo_contents.decode() - version_re = re.compile(r'^ *VERSION:\s*(.*)$', re.M) - version_match = version_re.search(pkginfo_contents) - if version_match: - version = version_match.group(1) - - if pathname is None: - try: - popen_args = {'stdout': subprocess.PIPE, - 'stderr': DEVNULL} - if PY3: - popen_args['universal_newlines'] = True - p = subprocess.Popen([pkgchk, '-l', package_name], - **popen_args) - except EnvironmentError: - pass - else: - pkgchk_contents = p.communicate()[0] - if not PY3: - pkgchk_contents.decode() - pathname_re = re.compile(r'^Pathname:\s*(.*/bin/CC)$', re.M) - pathname_match = pathname_re.search(pkgchk_contents) - if pathname_match: - pathname = os.path.dirname(pathname_match.group(1)) - - package_info[package_name] = (pathname, version) - return package_info[package_name] - - -# use the package installer tool "pkg" to figure out where cppc and what -# version of it is installed -def get_cppc(env): - cxx = env.subst('$CXX') - if cxx: - cppcPath = os.path.dirname(cxx) - else: - cppcPath = None - - cppcVersion = None - - pkginfo = env.subst('$PKGINFO') - pkgchk = env.subst('$PKGCHK') - - for package in ['SPROcpl']: - path, version = get_package_info(package, pkginfo, pkgchk) - if path and version: - cppcPath, cppcVersion = path, version - break - - return (cppcPath, 'CC', 'CC', cppcVersion) - - -def generate(env): - """Add Builders and construction variables for SunPRO C++.""" - path, cxx, shcxx, version = get_cppc(env) - if path: - cxx = os.path.join(path, cxx) - shcxx = os.path.join(path, shcxx) - - cplusplus.generate(env) - - env['CXX'] = cxx - env['SHCXX'] = shcxx - env['CXXVERSION'] = version - env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -KPIC') - env['SHOBJPREFIX'] = 'so_' - env['SHOBJSUFFIX'] = '.o' - - -def exists(env): - path, cxx, shcxx, version = get_cppc(env) - if path and cxx: - cppc = os.path.join(path, cxx) - if os.path.exists(cppc): - return cppc - return None - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf77.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf77.py deleted file mode 100644 index 2ea544754fc..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf77.py +++ /dev/null @@ -1,63 +0,0 @@ -"""SCons.Tool.sunf77 - -Tool-specific initialization for sunf77, the Sun Studio F77 compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sunf77.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -from .FortranCommon import add_all_to_env - -compilers = ['sunf77', 'f77'] - -def generate(env): - """Add Builders and construction variables for sunf77 to an Environment.""" - add_all_to_env(env) - - fcomp = env.Detect(compilers) or 'f77' - env['FORTRAN'] = fcomp - env['F77'] = fcomp - - env['SHFORTRAN'] = '$FORTRAN' - env['SHF77'] = '$F77' - - env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC') - env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -KPIC') - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf90.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf90.py deleted file mode 100644 index d69f8d175b7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf90.py +++ /dev/null @@ -1,64 +0,0 @@ -"""SCons.Tool.sunf90 - -Tool-specific initialization for sunf90, the Sun Studio F90 compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sunf90.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -from .FortranCommon import add_all_to_env - -compilers = ['sunf90', 'f90'] - -def generate(env): - """Add Builders and construction variables for sun f90 compiler to an - Environment.""" - add_all_to_env(env) - - fcomp = env.Detect(compilers) or 'f90' - env['FORTRAN'] = fcomp - env['F90'] = fcomp - - env['SHFORTRAN'] = '$FORTRAN' - env['SHF90'] = '$F90' - - env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC') - env['SHF90FLAGS'] = SCons.Util.CLVar('$F90FLAGS -KPIC') - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf95.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf95.py deleted file mode 100644 index f03c43f6a6a..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunf95.py +++ /dev/null @@ -1,64 +0,0 @@ -"""SCons.Tool.sunf95 - -Tool-specific initialization for sunf95, the Sun Studio F95 compiler. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sunf95.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Util - -from .FortranCommon import add_all_to_env - -compilers = ['sunf95', 'f95'] - -def generate(env): - """Add Builders and construction variables for sunf95 to an - Environment.""" - add_all_to_env(env) - - fcomp = env.Detect(compilers) or 'f95' - env['FORTRAN'] = fcomp - env['F95'] = fcomp - - env['SHFORTRAN'] = '$FORTRAN' - env['SHF95'] = '$F95' - - env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC') - env['SHF95FLAGS'] = SCons.Util.CLVar('$F95FLAGS -KPIC') - -def exists(env): - return env.Detect(compilers) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunlink.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunlink.py deleted file mode 100644 index 25ab4d4e03e..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/sunlink.py +++ /dev/null @@ -1,80 +0,0 @@ -"""SCons.Tool.sunlink - -Tool-specific initialization for the Sun Solaris (Forte) linker. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/sunlink.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import os.path - -import SCons.Util - -from . import link - -ccLinker = None - -# search for the acc compiler and linker front end - -try: - dirs = os.listdir('/opt') -except (IOError, OSError): - # Not being able to read the directory because it doesn't exist - # (IOError) or isn't readable (OSError) is okay. - dirs = [] - -for d in dirs: - linker = '/opt/' + d + '/bin/CC' - if os.path.exists(linker): - ccLinker = linker - break - -def generate(env): - """Add Builders and construction variables for Forte to an Environment.""" - link.generate(env) - - env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G') - - env['RPATHPREFIX'] = '-R' - env['RPATHSUFFIX'] = '' - env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' - - # Support for versioned libraries - link._setup_versioned_lib_variables(env, tool = 'sunlink', use_soname = True) - env['LINKCALLBACKS'] = link._versioned_lib_callbacks() - -def exists(env): - return ccLinker - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/swig.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/swig.py deleted file mode 100644 index d724500beb3..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/swig.py +++ /dev/null @@ -1,219 +0,0 @@ -"""SCons.Tool.swig - -Tool-specific initialization for swig. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" -from __future__ import print_function - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/swig.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import sys -import re -import subprocess - -import SCons.Action -import SCons.Defaults -import SCons.Tool -import SCons.Util -import SCons.Node - -verbose = False - -swigs = [ 'swig', 'swig3.0', 'swig2.0' ] - -SwigAction = SCons.Action.Action('$SWIGCOM', '$SWIGCOMSTR') - -def swigSuffixEmitter(env, source): - if '-c++' in SCons.Util.CLVar(env.subst("$SWIGFLAGS", source=source)): - return '$SWIGCXXFILESUFFIX' - else: - return '$SWIGCFILESUFFIX' - -# Match '%module test', as well as '%module(directors="1") test' -# Also allow for test to be quoted (SWIG permits double quotes, but not single) -# Also allow for the line to have spaces after test if not quoted -_reModule = re.compile(r'%module(\s*\(.*\))?\s+("?)(\S+)\2') - -def _find_modules(src): - """Find all modules referenced by %module lines in `src`, a SWIG .i file. - Returns a list of all modules, and a flag set if SWIG directors have - been requested (SWIG will generate an additional header file in this - case.)""" - directors = 0 - mnames = [] - try: - with open(src) as f: - data = f.read() - matches = _reModule.findall(data) - except IOError: - # If the file's not yet generated, guess the module name from the file stem - matches = [] - mnames.append(os.path.splitext(os.path.basename(src))[0]) - - for m in matches: - mnames.append(m[2]) - directors = directors or 'directors' in m[0] - return mnames, directors - -def _add_director_header_targets(target, env): - # Directors only work with C++ code, not C - suffix = env.subst(env['SWIGCXXFILESUFFIX']) - # For each file ending in SWIGCXXFILESUFFIX, add a new target director - # header by replacing the ending with SWIGDIRECTORSUFFIX. - for x in target[:]: - n = x.name - d = x.dir - if n[-len(suffix):] == suffix: - target.append(d.File(n[:-len(suffix)] + env['SWIGDIRECTORSUFFIX'])) - -def _swigEmitter(target, source, env): - swigflags = env.subst("$SWIGFLAGS", target=target, source=source) - flags = SCons.Util.CLVar(swigflags) - for src in source: - src = str(src.rfile()) - mnames = None - if "-python" in flags and "-noproxy" not in flags: - if mnames is None: - mnames, directors = _find_modules(src) - if directors: - _add_director_header_targets(target, env) - python_files = [m + ".py" for m in mnames] - outdir = env.subst('$SWIGOUTDIR', target=target, source=source) - # .py files should be generated in SWIGOUTDIR if specified, - # otherwise in the same directory as the target - if outdir: - python_files = [env.fs.File(os.path.join(outdir, j)) for j in python_files] - else: - python_files = [target[0].dir.File(m) for m in python_files] - target.extend(python_files) - if "-java" in flags: - if mnames is None: - mnames, directors = _find_modules(src) - if directors: - _add_director_header_targets(target, env) - java_files = [[m + ".java", m + "JNI.java"] for m in mnames] - java_files = SCons.Util.flatten(java_files) - outdir = env.subst('$SWIGOUTDIR', target=target, source=source) - if outdir: - java_files = [os.path.join(outdir, j) for j in java_files] - java_files = list(map(env.fs.File, java_files)) - def t_from_s(t, p, s, x): - return t.dir - tsm = SCons.Node._target_from_source_map - tkey = len(tsm) - tsm[tkey] = t_from_s - for jf in java_files: - jf._func_target_from_source = tkey - target.extend(java_files) - return (target, source) - -def _get_swig_version(env, swig): - """Run the SWIG command line tool to get and return the version number""" - version = None - swig = env.subst(swig) - if not swig: - return version - pipe = SCons.Action._subproc(env, SCons.Util.CLVar(swig) + ['-version'], - stdin = 'devnull', - stderr = 'devnull', - stdout = subprocess.PIPE) - if pipe.wait() != 0: - return version - - # MAYBE: out = SCons.Util.to_str (pipe.stdout.read()) - with pipe.stdout: - out = SCons.Util.to_str(pipe.stdout.read()) - - match = re.search(r'SWIG Version\s+(\S+).*', out, re.MULTILINE) - if match: - version = match.group(1) - if verbose: - print("Version is: %s" % version) - else: - if verbose: - print("Unable to detect version: [%s]" % out) - - return version - -def generate(env): - """Add Builders and construction variables for swig to an Environment.""" - c_file, cxx_file = SCons.Tool.createCFileBuilders(env) - - c_file.suffix['.i'] = swigSuffixEmitter - cxx_file.suffix['.i'] = swigSuffixEmitter - - c_file.add_action('.i', SwigAction) - c_file.add_emitter('.i', _swigEmitter) - cxx_file.add_action('.i', SwigAction) - cxx_file.add_emitter('.i', _swigEmitter) - - java_file = SCons.Tool.CreateJavaFileBuilder(env) - - java_file.suffix['.i'] = swigSuffixEmitter - - java_file.add_action('.i', SwigAction) - java_file.add_emitter('.i', _swigEmitter) - - from SCons.Platform.mingw import MINGW_DEFAULT_PATHS - from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS - from SCons.Platform.win32 import CHOCO_DEFAULT_PATH - - if sys.platform == 'win32': - swig = SCons.Tool.find_program_path(env, 'swig', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS + CHOCO_DEFAULT_PATH) - if swig: - swig_bin_dir = os.path.dirname(swig) - env.AppendENVPath('PATH', swig_bin_dir) - else: - SCons.Warnings.Warning('swig tool requested, but binary not found in ENV PATH') - - if 'SWIG' not in env: - env['SWIG'] = env.Detect(swigs) or swigs[0] - env['SWIGVERSION'] = _get_swig_version(env, env['SWIG']) - env['SWIGFLAGS'] = SCons.Util.CLVar('') - env['SWIGDIRECTORSUFFIX'] = '_wrap.h' - env['SWIGCFILESUFFIX'] = '_wrap$CFILESUFFIX' - env['SWIGCXXFILESUFFIX'] = '_wrap$CXXFILESUFFIX' - env['_SWIGOUTDIR'] = r'${"-outdir \"%s\"" % SWIGOUTDIR}' - env['SWIGPATH'] = [] - env['SWIGINCPREFIX'] = '-I' - env['SWIGINCSUFFIX'] = '' - env['_SWIGINCFLAGS'] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' - env['SWIGCOM'] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES' - -def exists(env): - swig = env.get('SWIG') or env.Detect(['swig']) - return swig - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tar.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tar.py deleted file mode 100644 index 564ea0f8d10..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tar.py +++ /dev/null @@ -1,73 +0,0 @@ -"""SCons.Tool.tar - -Tool-specific initialization for tar. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/tar.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Node.FS -import SCons.Util - -tars = ['tar', 'gtar'] - -TarAction = SCons.Action.Action('$TARCOM', '$TARCOMSTR') - -TarBuilder = SCons.Builder.Builder(action = TarAction, - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARSUFFIX', - multi = 1) - - -def generate(env): - """Add Builders and construction variables for tar to an Environment.""" - try: - bld = env['BUILDERS']['Tar'] - except KeyError: - bld = TarBuilder - env['BUILDERS']['Tar'] = bld - - env['TAR'] = env.Detect(tars) or 'gtar' - env['TARFLAGS'] = SCons.Util.CLVar('-c') - env['TARCOM'] = '$TAR $TARFLAGS -f $TARGET $SOURCES' - env['TARSUFFIX'] = '.tar' - -def exists(env): - return env.Detect(tars) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tex.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tex.py deleted file mode 100644 index c9d9f5522c8..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tex.py +++ /dev/null @@ -1,993 +0,0 @@ -"""SCons.Tool.tex - -Tool-specific initialization for TeX. -Generates .dvi files from .tex files - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -__revision__ = "src/engine/SCons/Tool/tex.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import re -import shutil -import sys -import platform -import glob - -import SCons.Action -import SCons.Node -import SCons.Node.FS -import SCons.Util -import SCons.Scanner.LaTeX - -Verbose = False - -must_rerun_latex = True - -# these are files that just need to be checked for changes and then rerun latex -check_suffixes = ['.toc', '.lof', '.lot', '.out', '.nav', '.snm'] - -# these are files that require bibtex or makeindex to be run when they change -all_suffixes = check_suffixes + ['.bbl', '.idx', '.nlo', '.glo', '.acn', '.bcf'] - -# -# regular expressions used to search for Latex features -# or outputs that require rerunning latex -# -# search for all .aux files opened by latex (recorded in the .fls file) -openout_aux_re = re.compile(r"OUTPUT *(.*\.aux)") - -# search for all .bcf files opened by latex (recorded in the .fls file) -# for use by biber -openout_bcf_re = re.compile(r"OUTPUT *(.*\.bcf)") - -#printindex_re = re.compile(r"^[^%]*\\printindex", re.MULTILINE) -#printnomenclature_re = re.compile(r"^[^%]*\\printnomenclature", re.MULTILINE) -#printglossary_re = re.compile(r"^[^%]*\\printglossary", re.MULTILINE) - -# search to find rerun warnings -warning_rerun_str = r'(^LaTeX Warning:.*Rerun)|(^Package \w+ Warning:.*Rerun)' -warning_rerun_re = re.compile(warning_rerun_str, re.MULTILINE) - -# search to find citation rerun warnings -rerun_citations_str = r"^LaTeX Warning:.*\n.*Rerun to get citations correct" -rerun_citations_re = re.compile(rerun_citations_str, re.MULTILINE) - -# search to find undefined references or citations warnings -undefined_references_str = r'(^LaTeX Warning:.*undefined references)|(^Package \w+ Warning:.*undefined citations)' -undefined_references_re = re.compile(undefined_references_str, re.MULTILINE) - -# used by the emitter -auxfile_re = re.compile(r".", re.MULTILINE) -tableofcontents_re = re.compile(r"^[^%\n]*\\tableofcontents", re.MULTILINE) -makeindex_re = re.compile(r"^[^%\n]*\\makeindex", re.MULTILINE) -bibliography_re = re.compile(r"^[^%\n]*\\bibliography", re.MULTILINE) -bibunit_re = re.compile(r"^[^%\n]*\\begin\{bibunit\}", re.MULTILINE) -multibib_re = re.compile(r"^[^%\n]*\\newcites\{([^\}]*)\}", re.MULTILINE) -addbibresource_re = re.compile(r"^[^%\n]*\\(addbibresource|addglobalbib|addsectionbib)", re.MULTILINE) -listoffigures_re = re.compile(r"^[^%\n]*\\listoffigures", re.MULTILINE) -listoftables_re = re.compile(r"^[^%\n]*\\listoftables", re.MULTILINE) -hyperref_re = re.compile(r"^[^%\n]*\\usepackage.*\{hyperref\}", re.MULTILINE) -makenomenclature_re = re.compile(r"^[^%\n]*\\makenomenclature", re.MULTILINE) -makeglossary_re = re.compile(r"^[^%\n]*\\makeglossary", re.MULTILINE) -makeglossaries_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE) -makeacronyms_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE) -beamer_re = re.compile(r"^[^%\n]*\\documentclass\{beamer\}", re.MULTILINE) -regex = r'^[^%\n]*\\newglossary\s*\[([^\]]+)\]?\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}' -newglossary_re = re.compile(regex, re.MULTILINE) -biblatex_re = re.compile(r"^[^%\n]*\\usepackage.*\{biblatex\}", re.MULTILINE) - -newglossary_suffix = [] - -# search to find all files included by Latex -include_re = re.compile(r'^[^%\n]*\\(?:include|input){([^}]*)}', re.MULTILINE) -includeOnly_re = re.compile(r'^[^%\n]*\\(?:include){([^}]*)}', re.MULTILINE) - -# search to find all graphics files included by Latex -includegraphics_re = re.compile(r'^[^%\n]*\\(?:includegraphics(?:\[[^\]]+\])?){([^}]*)}', re.MULTILINE) - -# search to find all files opened by Latex (recorded in .log file) -openout_re = re.compile(r"OUTPUT *(.*)") - -# list of graphics file extensions for TeX and LaTeX -TexGraphics = SCons.Scanner.LaTeX.TexGraphics -LatexGraphics = SCons.Scanner.LaTeX.LatexGraphics - -# An Action sufficient to build any generic tex file. -TeXAction = None - -# An action to build a latex file. This action might be needed more -# than once if we are dealing with labels and bibtex. -LaTeXAction = None - -# An action to run BibTeX on a file. -BibTeXAction = None - -# An action to run Biber on a file. -BiberAction = None - -# An action to run MakeIndex on a file. -MakeIndexAction = None - -# An action to run MakeIndex (for nomencl) on a file. -MakeNclAction = None - -# An action to run MakeIndex (for glossary) on a file. -MakeGlossaryAction = None - -# An action to run MakeIndex (for acronyms) on a file. -MakeAcronymsAction = None - -# An action to run MakeIndex (for newglossary commands) on a file. -MakeNewGlossaryAction = None - -# Used as a return value of modify_env_var if the variable is not set. -_null = SCons.Scanner.LaTeX._null - -modify_env_var = SCons.Scanner.LaTeX.modify_env_var - -def check_file_error_message(utility, filename='log'): - msg = '%s returned an error, check the %s file\n' % (utility, filename) - sys.stdout.write(msg) - -def FindFile(name,suffixes,paths,env,requireExt=False): - if requireExt: - name,ext = SCons.Util.splitext(name) - # if the user gave an extension use it. - if ext: - name = name + ext - if Verbose: - print(" searching for '%s' with extensions: " % name,suffixes) - - for path in paths: - testName = os.path.join(path,name) - if Verbose: - print(" look for '%s'" % testName) - if os.path.isfile(testName): - if Verbose: - print(" found '%s'" % testName) - return env.fs.File(testName) - else: - name_ext = SCons.Util.splitext(testName)[1] - if name_ext: - continue - - # if no suffix try adding those passed in - for suffix in suffixes: - testNameExt = testName + suffix - if Verbose: - print(" look for '%s'" % testNameExt) - - if os.path.isfile(testNameExt): - if Verbose: - print(" found '%s'" % testNameExt) - return env.fs.File(testNameExt) - if Verbose: - print(" did not find '%s'" % name) - return None - -def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None): - """A builder for LaTeX files that checks the output in the aux file - and decides how many times to use LaTeXAction, and BibTeXAction.""" - - global must_rerun_latex - - # This routine is called with two actions. In this file for DVI builds - # with LaTeXAction and from the pdflatex.py with PDFLaTeXAction - # set this up now for the case where the user requests a different extension - # for the target filename - if (XXXLaTeXAction == LaTeXAction): - callerSuffix = ".dvi" - else: - callerSuffix = env['PDFSUFFIX'] - - basename = SCons.Util.splitext(str(source[0]))[0] - basedir = os.path.split(str(source[0]))[0] - basefile = os.path.split(str(basename))[1] - abspath = os.path.abspath(basedir) - - targetext = os.path.splitext(str(target[0]))[1] - targetdir = os.path.split(str(target[0]))[0] - - saved_env = {} - for var in SCons.Scanner.LaTeX.LaTeX.env_variables: - saved_env[var] = modify_env_var(env, var, abspath) - - # Create base file names with the target directory since the auxiliary files - # will be made there. That's because the *COM variables have the cd - # command in the prolog. We check - # for the existence of files before opening them--even ones like the - # aux file that TeX always creates--to make it possible to write tests - # with stubs that don't necessarily generate all of the same files. - - targetbase = os.path.join(targetdir, basefile) - - # if there is a \makeindex there will be a .idx and thus - # we have to run makeindex at least once to keep the build - # happy even if there is no index. - # Same for glossaries, nomenclature, and acronyms - src_content = source[0].get_text_contents() - run_makeindex = makeindex_re.search(src_content) and not os.path.isfile(targetbase + '.idx') - run_nomenclature = makenomenclature_re.search(src_content) and not os.path.isfile(targetbase + '.nlo') - run_glossary = makeglossary_re.search(src_content) and not os.path.isfile(targetbase + '.glo') - run_glossaries = makeglossaries_re.search(src_content) and not os.path.isfile(targetbase + '.glo') - run_acronyms = makeacronyms_re.search(src_content) and not os.path.isfile(targetbase + '.acn') - - saved_hashes = {} - suffix_nodes = {} - - - for suffix in all_suffixes+sum(newglossary_suffix, []): - theNode = env.fs.File(targetbase + suffix) - suffix_nodes[suffix] = theNode - saved_hashes[suffix] = theNode.get_csig() - - if Verbose: - print("hashes: ",saved_hashes) - - must_rerun_latex = True - - # .aux files already processed by BibTex - already_bibtexed = [] - - # - # routine to update MD5 hash and compare - # - def check_MD5(filenode, suffix): - global must_rerun_latex - # two calls to clear old csig - filenode.clear_memoized_values() - filenode.ninfo = filenode.new_ninfo() - new_md5 = filenode.get_csig() - - if saved_hashes[suffix] == new_md5: - if Verbose: - print("file %s not changed" % (targetbase+suffix)) - return False # unchanged - saved_hashes[suffix] = new_md5 - must_rerun_latex = True - if Verbose: - print("file %s changed, rerunning Latex, new hash = " % (targetbase+suffix), new_md5) - return True # changed - - # generate the file name that latex will generate - resultfilename = targetbase + callerSuffix - - count = 0 - - while (must_rerun_latex and count < int(env.subst('$LATEXRETRIES'))) : - result = XXXLaTeXAction(target, source, env) - if result != 0: - return result - - count = count + 1 - - must_rerun_latex = False - # Decide if various things need to be run, or run again. - - # Read the log file to find warnings/errors - logfilename = targetbase + '.log' - logContent = '' - if os.path.isfile(logfilename): - with open(logfilename, "rb") as f: - logContent = f.read().decode(errors='replace') - - - # Read the fls file to find all .aux files - flsfilename = targetbase + '.fls' - flsContent = '' - auxfiles = [] - if os.path.isfile(flsfilename): - with open(flsfilename, "r") as f: - flsContent = f.read() - auxfiles = openout_aux_re.findall(flsContent) - # remove duplicates - dups = {} - for x in auxfiles: - dups[x] = 1 - auxfiles = list(dups.keys()) - - bcffiles = [] - if os.path.isfile(flsfilename): - with open(flsfilename, "r") as f: - flsContent = f.read() - bcffiles = openout_bcf_re.findall(flsContent) - # remove duplicates - dups = {} - for x in bcffiles: - dups[x] = 1 - bcffiles = list(dups.keys()) - - if Verbose: - print("auxfiles ",auxfiles) - print("bcffiles ",bcffiles) - - # Now decide if bibtex will need to be run. - # The information that bibtex reads from the .aux file is - # pass-independent. If we find (below) that the .bbl file is unchanged, - # then the last latex saw a correct bibliography. - # Therefore only do this once - # Go through all .aux files and remember the files already done. - for auxfilename in auxfiles: - if auxfilename not in already_bibtexed: - already_bibtexed.append(auxfilename) - target_aux = os.path.join(targetdir, auxfilename) - if os.path.isfile(target_aux): - with open(target_aux, "r") as f: - content = f.read() - if content.find("bibdata") != -1: - if Verbose: - print("Need to run bibtex on ",auxfilename) - bibfile = env.fs.File(SCons.Util.splitext(target_aux)[0]) - result = BibTeXAction(bibfile, bibfile, env) - if result != 0: - check_file_error_message(env['BIBTEX'], 'blg') - must_rerun_latex = True - - # Now decide if biber will need to be run. - # When the backend for biblatex is biber (by choice or default) the - # citation information is put in the .bcf file. - # The information that biber reads from the .bcf file is - # pass-independent. If we find (below) that the .bbl file is unchanged, - # then the last latex saw a correct bibliography. - # Therefore only do this once - # Go through all .bcf files and remember the files already done. - for bcffilename in bcffiles: - if bcffilename not in already_bibtexed: - already_bibtexed.append(bcffilename) - target_bcf = os.path.join(targetdir, bcffilename) - if os.path.isfile(target_bcf): - with open(target_bcf, "r") as f: - content = f.read() - if content.find("bibdata") != -1: - if Verbose: - print("Need to run biber on ",bcffilename) - bibfile = env.fs.File(SCons.Util.splitext(target_bcf)[0]) - result = BiberAction(bibfile, bibfile, env) - if result != 0: - check_file_error_message(env['BIBER'], 'blg') - must_rerun_latex = True - - # Now decide if latex will need to be run again due to index. - if check_MD5(suffix_nodes['.idx'],'.idx') or (count == 1 and run_makeindex): - # We must run makeindex - if Verbose: - print("Need to run makeindex") - idxfile = suffix_nodes['.idx'] - result = MakeIndexAction(idxfile, idxfile, env) - if result != 0: - check_file_error_message(env['MAKEINDEX'], 'ilg') - return result - - # TO-DO: need to add a way for the user to extend this list for whatever - # auxiliary files they create in other (or their own) packages - # Harder is case is where an action needs to be called -- that should be rare (I hope?) - - for index in check_suffixes: - check_MD5(suffix_nodes[index],index) - - # Now decide if latex will need to be run again due to nomenclature. - if check_MD5(suffix_nodes['.nlo'],'.nlo') or (count == 1 and run_nomenclature): - # We must run makeindex - if Verbose: - print("Need to run makeindex for nomenclature") - nclfile = suffix_nodes['.nlo'] - result = MakeNclAction(nclfile, nclfile, env) - if result != 0: - check_file_error_message('%s (nomenclature)' % env['MAKENCL'], - 'nlg') - #return result - - # Now decide if latex will need to be run again due to glossary. - if check_MD5(suffix_nodes['.glo'],'.glo') or (count == 1 and run_glossaries) or (count == 1 and run_glossary): - # We must run makeindex - if Verbose: - print("Need to run makeindex for glossary") - glofile = suffix_nodes['.glo'] - result = MakeGlossaryAction(glofile, glofile, env) - if result != 0: - check_file_error_message('%s (glossary)' % env['MAKEGLOSSARY'], - 'glg') - #return result - - # Now decide if latex will need to be run again due to acronyms. - if check_MD5(suffix_nodes['.acn'],'.acn') or (count == 1 and run_acronyms): - # We must run makeindex - if Verbose: - print("Need to run makeindex for acronyms") - acrfile = suffix_nodes['.acn'] - result = MakeAcronymsAction(acrfile, acrfile, env) - if result != 0: - check_file_error_message('%s (acronyms)' % env['MAKEACRONYMS'], - 'alg') - return result - - # Now decide if latex will need to be run again due to newglossary command. - for ig in range(len(newglossary_suffix)): - if check_MD5(suffix_nodes[newglossary_suffix[ig][2]],newglossary_suffix[ig][2]) or (count == 1): - # We must run makeindex - if Verbose: - print("Need to run makeindex for newglossary") - newglfile = suffix_nodes[newglossary_suffix[ig][2]] - MakeNewGlossaryAction = SCons.Action.Action("$MAKENEWGLOSSARYCOM ${SOURCE.filebase}%s -s ${SOURCE.filebase}.ist -t ${SOURCE.filebase}%s -o ${SOURCE.filebase}%s" % (newglossary_suffix[ig][2],newglossary_suffix[ig][0],newglossary_suffix[ig][1]), "$MAKENEWGLOSSARYCOMSTR") - - result = MakeNewGlossaryAction(newglfile, newglfile, env) - if result != 0: - check_file_error_message('%s (newglossary)' % env['MAKENEWGLOSSARY'], - newglossary_suffix[ig][0]) - return result - - # Now decide if latex needs to be run yet again to resolve warnings. - if warning_rerun_re.search(logContent): - must_rerun_latex = True - if Verbose: - print("rerun Latex due to latex or package rerun warning") - - if rerun_citations_re.search(logContent): - must_rerun_latex = True - if Verbose: - print("rerun Latex due to 'Rerun to get citations correct' warning") - - if undefined_references_re.search(logContent): - must_rerun_latex = True - if Verbose: - print("rerun Latex due to undefined references or citations") - - if (count >= int(env.subst('$LATEXRETRIES')) and must_rerun_latex): - print("reached max number of retries on Latex ,",int(env.subst('$LATEXRETRIES'))) -# end of while loop - - # rename Latex's output to what the target name is - if not (str(target[0]) == resultfilename and os.path.isfile(resultfilename)): - if os.path.isfile(resultfilename): - print("move %s to %s" % (resultfilename, str(target[0]), )) - shutil.move(resultfilename,str(target[0])) - - # Original comment (when TEXPICTS was not restored): - # The TEXPICTS enviroment variable is needed by a dvi -> pdf step - # later on Mac OSX so leave it - # - # It is also used when searching for pictures (implicit dependencies). - # Why not set the variable again in the respective builder instead - # of leaving local modifications in the environment? What if multiple - # latex builds in different directories need different TEXPICTS? - for var in SCons.Scanner.LaTeX.LaTeX.env_variables: - if var == 'TEXPICTS': - continue - if saved_env[var] is _null: - try: - del env['ENV'][var] - except KeyError: - pass # was never set - else: - env['ENV'][var] = saved_env[var] - - return result - -def LaTeXAuxAction(target = None, source= None, env=None): - result = InternalLaTeXAuxAction( LaTeXAction, target, source, env ) - return result - -LaTeX_re = re.compile("\\\\document(style|class)") - -def is_LaTeX(flist,env,abspath): - """Scan a file list to decide if it's TeX- or LaTeX-flavored.""" - - # We need to scan files that are included in case the - # \documentclass command is in them. - - # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS'] - savedpath = modify_env_var(env, 'TEXINPUTS', abspath) - paths = env['ENV']['TEXINPUTS'] - if SCons.Util.is_List(paths): - pass - else: - # Split at os.pathsep to convert into absolute path - paths = paths.split(os.pathsep) - - # now that we have the path list restore the env - if savedpath is _null: - try: - del env['ENV']['TEXINPUTS'] - except KeyError: - pass # was never set - else: - env['ENV']['TEXINPUTS'] = savedpath - if Verbose: - print("is_LaTeX search path ",paths) - print("files to search :",flist) - - # Now that we have the search path and file list, check each one - for f in flist: - if Verbose: - print(" checking for Latex source ",str(f)) - - content = f.get_text_contents() - if LaTeX_re.search(content): - if Verbose: - print("file %s is a LaTeX file" % str(f)) - return 1 - if Verbose: - print("file %s is not a LaTeX file" % str(f)) - - # now find included files - inc_files = [ ] - inc_files.extend( include_re.findall(content) ) - if Verbose: - print("files included by '%s': "%str(f),inc_files) - # inc_files is list of file names as given. need to find them - # using TEXINPUTS paths. - - # search the included files - for src in inc_files: - srcNode = FindFile(src,['.tex','.ltx','.latex'],paths,env,requireExt=False) - # make this a list since is_LaTeX takes a list. - fileList = [srcNode,] - if Verbose: - print("FindFile found ",srcNode) - if srcNode is not None: - file_test = is_LaTeX(fileList, env, abspath) - - # return on first file that finds latex is needed. - if file_test: - return file_test - - if Verbose: - print(" done scanning ",str(f)) - - return 0 - -def TeXLaTeXFunction(target = None, source= None, env=None): - """A builder for TeX and LaTeX that scans the source file to - decide the "flavor" of the source and then executes the appropriate - program.""" - - # find these paths for use in is_LaTeX to search for included files - basedir = os.path.split(str(source[0]))[0] - abspath = os.path.abspath(basedir) - - if is_LaTeX(source,env,abspath): - result = LaTeXAuxAction(target,source,env) - if result != 0: - check_file_error_message(env['LATEX']) - else: - result = TeXAction(target,source,env) - if result != 0: - check_file_error_message(env['TEX']) - return result - -def TeXLaTeXStrFunction(target = None, source= None, env=None): - """A strfunction for TeX and LaTeX that scans the source file to - decide the "flavor" of the source and then returns the appropriate - command string.""" - if env.GetOption("no_exec"): - - # find these paths for use in is_LaTeX to search for included files - basedir = os.path.split(str(source[0]))[0] - abspath = os.path.abspath(basedir) - - if is_LaTeX(source,env,abspath): - result = env.subst('$LATEXCOM',0,target,source)+" ..." - else: - result = env.subst("$TEXCOM",0,target,source)+" ..." - else: - result = '' - return result - -def tex_eps_emitter(target, source, env): - """An emitter for TeX and LaTeX sources when - executing tex or latex. It will accept .ps and .eps - graphics files - """ - (target, source) = tex_emitter_core(target, source, env, TexGraphics) - - return (target, source) - -def tex_pdf_emitter(target, source, env): - """An emitter for TeX and LaTeX sources when - executing pdftex or pdflatex. It will accept graphics - files of types .pdf, .jpg, .png, .gif, and .tif - """ - (target, source) = tex_emitter_core(target, source, env, LatexGraphics) - - return (target, source) - -def ScanFiles(theFile, target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir, aux_files): - """ For theFile (a Node) update any file_tests and search for graphics files - then find all included files and call ScanFiles recursively for each of them""" - - content = theFile.get_text_contents() - if Verbose: - print(" scanning ",str(theFile)) - - for i in range(len(file_tests_search)): - if file_tests[i][0] is None: - if Verbose: - print("scan i ",i," files_tests[i] ",file_tests[i], file_tests[i][1]) - file_tests[i][0] = file_tests_search[i].search(content) - if Verbose and file_tests[i][0]: - print(" found match for ",file_tests[i][1][-1]) - # for newglossary insert the suffixes in file_tests[i] - if file_tests[i][0] and file_tests[i][1][-1] == 'newglossary': - findresult = file_tests_search[i].findall(content) - for l in range(len(findresult)) : - (file_tests[i][1]).insert(0,'.'+findresult[l][3]) - (file_tests[i][1]).insert(0,'.'+findresult[l][2]) - (file_tests[i][1]).insert(0,'.'+findresult[l][0]) - suffix_list = ['.'+findresult[l][0],'.'+findresult[l][2],'.'+findresult[l][3] ] - newglossary_suffix.append(suffix_list) - if Verbose: - print(" new suffixes for newglossary ",newglossary_suffix) - - - incResult = includeOnly_re.search(content) - if incResult: - aux_files.append(os.path.join(targetdir, incResult.group(1))) - if Verbose: - print(r"\include file names : ", aux_files) - # recursively call this on each of the included files - inc_files = [ ] - inc_files.extend( include_re.findall(content) ) - if Verbose: - print("files included by '%s': "%str(theFile),inc_files) - # inc_files is list of file names as given. need to find them - # using TEXINPUTS paths. - - for src in inc_files: - srcNode = FindFile(src,['.tex','.ltx','.latex'],paths,env,requireExt=False) - if srcNode is not None: - file_tests = ScanFiles(srcNode, target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir, aux_files) - if Verbose: - print(" done scanning ",str(theFile)) - return file_tests - -def tex_emitter_core(target, source, env, graphics_extensions): - """An emitter for TeX and LaTeX sources. - For LaTeX sources we try and find the common created files that - are needed on subsequent runs of latex to finish tables of contents, - bibliographies, indices, lists of figures, and hyperlink references. - """ - basename = SCons.Util.splitext(str(source[0]))[0] - basefile = os.path.split(str(basename))[1] - targetdir = os.path.split(str(target[0]))[0] - targetbase = os.path.join(targetdir, basefile) - - basedir = os.path.split(str(source[0]))[0] - abspath = os.path.abspath(basedir) - target[0].attributes.path = abspath - - # - # file names we will make use of in searching the sources and log file - # - emit_suffixes = ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.glg', '.alg'] + all_suffixes - auxfilename = targetbase + '.aux' - logfilename = targetbase + '.log' - flsfilename = targetbase + '.fls' - syncfilename = targetbase + '.synctex.gz' - - env.SideEffect(auxfilename,target[0]) - env.SideEffect(logfilename,target[0]) - env.SideEffect(flsfilename,target[0]) - env.SideEffect(syncfilename,target[0]) - if Verbose: - print("side effect :",auxfilename,logfilename,flsfilename,syncfilename) - env.Clean(target[0],auxfilename) - env.Clean(target[0],logfilename) - env.Clean(target[0],flsfilename) - env.Clean(target[0],syncfilename) - - content = source[0].get_text_contents() - - # set up list with the regular expressions - # we use to find features used - file_tests_search = [auxfile_re, - makeindex_re, - bibliography_re, - bibunit_re, - multibib_re, - addbibresource_re, - tableofcontents_re, - listoffigures_re, - listoftables_re, - hyperref_re, - makenomenclature_re, - makeglossary_re, - makeglossaries_re, - makeacronyms_re, - beamer_re, - newglossary_re, - biblatex_re ] - # set up list with the file suffixes that need emitting - # when a feature is found - file_tests_suff = [['.aux','aux_file'], - ['.idx', '.ind', '.ilg','makeindex'], - ['.bbl', '.blg','bibliography'], - ['.bbl', '.blg','bibunit'], - ['.bbl', '.blg','multibib'], - ['.bbl', '.blg','.bcf','addbibresource'], - ['.toc','contents'], - ['.lof','figures'], - ['.lot','tables'], - ['.out','hyperref'], - ['.nlo', '.nls', '.nlg','nomenclature'], - ['.glo', '.gls', '.glg','glossary'], - ['.glo', '.gls', '.glg','glossaries'], - ['.acn', '.acr', '.alg','acronyms'], - ['.nav', '.snm', '.out', '.toc','beamer'], - ['newglossary',], - ['.bcf', '.blg','biblatex'] ] - # for newglossary the suffixes are added as we find the command - # build the list of lists - file_tests = [] - for i in range(len(file_tests_search)): - file_tests.append( [None, file_tests_suff[i]] ) - - # TO-DO: need to add a way for the user to extend this list for whatever - # auxiliary files they create in other (or their own) packages - - # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS'] - savedpath = modify_env_var(env, 'TEXINPUTS', abspath) - paths = env['ENV']['TEXINPUTS'] - if SCons.Util.is_List(paths): - pass - else: - # Split at os.pathsep to convert into absolute path - paths = paths.split(os.pathsep) - - # now that we have the path list restore the env - if savedpath is _null: - try: - del env['ENV']['TEXINPUTS'] - except KeyError: - pass # was never set - else: - env['ENV']['TEXINPUTS'] = savedpath - if Verbose: - print("search path ",paths) - - # scan all sources for side effect files - aux_files = [] - file_tests = ScanFiles(source[0], target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir, aux_files) - - for (theSearch,suffix_list) in file_tests: - # add side effects if feature is present.If file is to be generated,add all side effects - if Verbose and theSearch: - print("check side effects for ",suffix_list[-1]) - if theSearch is not None or not source[0].exists(): - file_list = [targetbase,] - # for bibunit we need a list of files - if suffix_list[-1] == 'bibunit': - file_basename = os.path.join(targetdir, 'bu*.aux') - file_list = glob.glob(file_basename) - # remove the suffix '.aux' - for i in range(len(file_list)): - file_list.append(SCons.Util.splitext(file_list[i])[0]) - # for multibib we need a list of files - if suffix_list[-1] == 'multibib': - for multibibmatch in multibib_re.finditer(content): - if Verbose: - print("multibib match ",multibibmatch.group(1)) - if multibibmatch is not None: - baselist = multibibmatch.group(1).split(',') - if Verbose: - print("multibib list ", baselist) - for i in range(len(baselist)): - file_list.append(os.path.join(targetdir, baselist[i])) - # now define the side effects - for file_name in file_list: - for suffix in suffix_list[:-1]: - env.SideEffect(file_name + suffix,target[0]) - if Verbose: - print("side effect tst :",file_name + suffix, " target is ",str(target[0])) - env.Clean(target[0],file_name + suffix) - - for aFile in aux_files: - aFile_base = SCons.Util.splitext(aFile)[0] - env.SideEffect(aFile_base + '.aux',target[0]) - if Verbose: - print("side effect aux :",aFile_base + '.aux') - env.Clean(target[0],aFile_base + '.aux') - # read fls file to get all other files that latex creates and will read on the next pass - # remove files from list that we explicitly dealt with above - if os.path.isfile(flsfilename): - with open(flsfilename, "r") as f: - content = f.read() - out_files = openout_re.findall(content) - myfiles = [auxfilename, logfilename, flsfilename, targetbase+'.dvi',targetbase+'.pdf'] - for filename in out_files[:]: - if filename in myfiles: - out_files.remove(filename) - env.SideEffect(out_files,target[0]) - if Verbose: - print("side effect fls :",out_files) - env.Clean(target[0],out_files) - - return (target, source) - - -TeXLaTeXAction = None - -def generate(env): - """Add Builders and construction variables for TeX to an Environment.""" - - global TeXLaTeXAction - if TeXLaTeXAction is None: - TeXLaTeXAction = SCons.Action.Action(TeXLaTeXFunction, - strfunction=TeXLaTeXStrFunction) - - env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) - - generate_common(env) - - from . import dvi - dvi.generate(env) - - bld = env['BUILDERS']['DVI'] - bld.add_action('.tex', TeXLaTeXAction) - bld.add_emitter('.tex', tex_eps_emitter) - -def generate_darwin(env): - try: - environ = env['ENV'] - except KeyError: - environ = {} - env['ENV'] = environ - - if (platform.system() == 'Darwin'): - try: - ospath = env['ENV']['PATHOSX'] - except: - ospath = None - if ospath: - env.AppendENVPath('PATH', ospath) - -def generate_common(env): - """Add internal Builders and construction variables for LaTeX to an Environment.""" - - # Add OSX system paths so TeX tools can be found - # when a list of tools is given the exists() method is not called - generate_darwin(env) - - # A generic tex file Action, sufficient for all tex files. - global TeXAction - if TeXAction is None: - TeXAction = SCons.Action.Action("$TEXCOM", "$TEXCOMSTR") - - # An Action to build a latex file. This might be needed more - # than once if we are dealing with labels and bibtex. - global LaTeXAction - if LaTeXAction is None: - LaTeXAction = SCons.Action.Action("$LATEXCOM", "$LATEXCOMSTR") - - # Define an action to run BibTeX on a file. - global BibTeXAction - if BibTeXAction is None: - BibTeXAction = SCons.Action.Action("$BIBTEXCOM", "$BIBTEXCOMSTR") - - # Define an action to run Biber on a file. - global BiberAction - if BiberAction is None: - BiberAction = SCons.Action.Action("$BIBERCOM", "$BIBERCOMSTR") - - # Define an action to run MakeIndex on a file. - global MakeIndexAction - if MakeIndexAction is None: - MakeIndexAction = SCons.Action.Action("$MAKEINDEXCOM", "$MAKEINDEXCOMSTR") - - # Define an action to run MakeIndex on a file for nomenclatures. - global MakeNclAction - if MakeNclAction is None: - MakeNclAction = SCons.Action.Action("$MAKENCLCOM", "$MAKENCLCOMSTR") - - # Define an action to run MakeIndex on a file for glossaries. - global MakeGlossaryAction - if MakeGlossaryAction is None: - MakeGlossaryAction = SCons.Action.Action("$MAKEGLOSSARYCOM", "$MAKEGLOSSARYCOMSTR") - - # Define an action to run MakeIndex on a file for acronyms. - global MakeAcronymsAction - if MakeAcronymsAction is None: - MakeAcronymsAction = SCons.Action.Action("$MAKEACRONYMSCOM", "$MAKEACRONYMSCOMSTR") - - try: - environ = env['ENV'] - except KeyError: - environ = {} - env['ENV'] = environ - - # Some Linux platforms have pdflatex set up in a way - # that requires that the HOME environment variable be set. - # Add it here if defined. - v = os.environ.get('HOME') - if v: - environ['HOME'] = v - - CDCOM = 'cd ' - if platform.system() == 'Windows': - # allow cd command to change drives on Windows - CDCOM = 'cd /D ' - - env['TEX'] = 'tex' - env['TEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') - env['TEXCOM'] = CDCOM + '${TARGET.dir} && $TEX $TEXFLAGS ${SOURCE.file}' - - env['PDFTEX'] = 'pdftex' - env['PDFTEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') - env['PDFTEXCOM'] = CDCOM + '${TARGET.dir} && $PDFTEX $PDFTEXFLAGS ${SOURCE.file}' - - env['LATEX'] = 'latex' - env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') - env['LATEXCOM'] = CDCOM + '${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}' - env['LATEXRETRIES'] = 4 - - env['PDFLATEX'] = 'pdflatex' - env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') - env['PDFLATEXCOM'] = CDCOM + '${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURCE.file}' - - env['BIBTEX'] = 'bibtex' - env['BIBTEXFLAGS'] = SCons.Util.CLVar('') - env['BIBTEXCOM'] = CDCOM + '${TARGET.dir} && $BIBTEX $BIBTEXFLAGS ${SOURCE.filebase}' - - env['BIBER'] = 'biber' - env['BIBERFLAGS'] = SCons.Util.CLVar('') - env['BIBERCOM'] = CDCOM + '${TARGET.dir} && $BIBER $BIBERFLAGS ${SOURCE.filebase}' - - env['MAKEINDEX'] = 'makeindex' - env['MAKEINDEXFLAGS'] = SCons.Util.CLVar('') - env['MAKEINDEXCOM'] = CDCOM + '${TARGET.dir} && $MAKEINDEX $MAKEINDEXFLAGS ${SOURCE.file}' - - env['MAKEGLOSSARY'] = 'makeindex' - env['MAKEGLOSSARYSTYLE'] = '${SOURCE.filebase}.ist' - env['MAKEGLOSSARYFLAGS'] = SCons.Util.CLVar('-s ${MAKEGLOSSARYSTYLE} -t ${SOURCE.filebase}.glg') - env['MAKEGLOSSARYCOM'] = CDCOM + '${TARGET.dir} && $MAKEGLOSSARY ${SOURCE.filebase}.glo $MAKEGLOSSARYFLAGS -o ${SOURCE.filebase}.gls' - - env['MAKEACRONYMS'] = 'makeindex' - env['MAKEACRONYMSSTYLE'] = '${SOURCE.filebase}.ist' - env['MAKEACRONYMSFLAGS'] = SCons.Util.CLVar('-s ${MAKEACRONYMSSTYLE} -t ${SOURCE.filebase}.alg') - env['MAKEACRONYMSCOM'] = CDCOM + '${TARGET.dir} && $MAKEACRONYMS ${SOURCE.filebase}.acn $MAKEACRONYMSFLAGS -o ${SOURCE.filebase}.acr' - - env['MAKENCL'] = 'makeindex' - env['MAKENCLSTYLE'] = 'nomencl.ist' - env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg' - env['MAKENCLCOM'] = CDCOM + '${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $MAKENCLFLAGS -o ${SOURCE.filebase}.nls' - - env['MAKENEWGLOSSARY'] = 'makeindex' - env['MAKENEWGLOSSARYCOM'] = CDCOM + '${TARGET.dir} && $MAKENEWGLOSSARY ' - -def exists(env): - generate_darwin(env) - return env.Detect('tex') - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/textfile.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/textfile.py deleted file mode 100644 index 906c1ac4d44..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/textfile.py +++ /dev/null @@ -1,208 +0,0 @@ -# -*- python -*- -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = """ -Textfile/Substfile builder for SCons. - - Create file 'target' which typically is a textfile. The 'source' - may be any combination of strings, Nodes, or lists of same. A - 'linesep' will be put between any part written and defaults to - os.linesep. - - The only difference between the Textfile builder and the Substfile - builder is that strings are converted to Value() nodes for the - former and File() nodes for the latter. To insert files in the - former or strings in the latter, wrap them in a File() or Value(), - respectively. - - The values of SUBST_DICT first have any construction variables - expanded (its keys are not expanded). If a value of SUBST_DICT is - a python callable function, it is called and the result is expanded - as the value. Values are substituted in a "random" order; if any - substitution could be further expanded by another substitution, it - is unpredictable whether the expansion will occur. -""" - -__revision__ = "src/engine/SCons/Tool/textfile.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons - -import os -import re - -from SCons.Node import Node -from SCons.Node.Python import Value -from SCons.Util import is_String, is_Sequence, is_Dict, to_bytes, PY3 - - -if PY3: - TEXTFILE_FILE_WRITE_MODE = 'w' -else: - TEXTFILE_FILE_WRITE_MODE = 'wb' - -LINESEP = '\n' - -def _do_subst(node, subs): - """ - Fetch the node contents and replace all instances of the keys with - their values. For example, if subs is - {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'}, - then all instances of %VERSION% in the file will be replaced with - 1.2345 and so forth. - """ - contents = node.get_text_contents() - if subs: - for (k, val) in subs: - contents = re.sub(k, val, contents) - - if 'b' in TEXTFILE_FILE_WRITE_MODE: - try: - contents = bytearray(contents, 'utf-8') - except UnicodeDecodeError: - # contents is already utf-8 encoded python 2 str i.e. a byte array - contents = bytearray(contents) - - return contents - - -def _action(target, source, env): - - # prepare the line separator - linesep = env['LINESEPARATOR'] - if linesep is None: - linesep = LINESEP # os.linesep - elif is_String(linesep): - pass - elif isinstance(linesep, Value): - linesep = linesep.get_text_contents() - else: - raise SCons.Errors.UserError('unexpected type/class for LINESEPARATOR: %s' - % repr(linesep), None) - - if 'b' in TEXTFILE_FILE_WRITE_MODE: - linesep = to_bytes(linesep) - - # create a dictionary to use for the substitutions - if 'SUBST_DICT' not in env: - subs = None # no substitutions - else: - subst_dict = env['SUBST_DICT'] - if is_Dict(subst_dict): - subst_dict = list(subst_dict.items()) - elif is_Sequence(subst_dict): - pass - else: - raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence') - subs = [] - for (k, value) in subst_dict: - if callable(value): - value = value() - if is_String(value): - value = env.subst(value) - else: - value = str(value) - subs.append((k, value)) - - # write the file - try: - if SCons.Util.PY3: - target_file = open(target[0].get_path(), TEXTFILE_FILE_WRITE_MODE, newline='') - else: - target_file = open(target[0].get_path(), TEXTFILE_FILE_WRITE_MODE) - except (OSError, IOError): - raise SCons.Errors.UserError("Can't write target file %s" % target[0]) - - # separate lines by 'linesep' only if linesep is not empty - lsep = None - for line in source: - if lsep: - target_file.write(lsep) - - target_file.write(_do_subst(line, subs)) - lsep = linesep - target_file.close() - - -def _strfunc(target, source, env): - return "Creating '%s'" % target[0] - - -def _convert_list_R(newlist, sources): - for elem in sources: - if is_Sequence(elem): - _convert_list_R(newlist, elem) - elif isinstance(elem, Node): - newlist.append(elem) - else: - newlist.append(Value(elem)) - - -def _convert_list(target, source, env): - if len(target) != 1: - raise SCons.Errors.UserError("Only one target file allowed") - newlist = [] - _convert_list_R(newlist, source) - return target, newlist - - -_common_varlist = ['SUBST_DICT', 'LINESEPARATOR'] - -_text_varlist = _common_varlist + ['TEXTFILEPREFIX', 'TEXTFILESUFFIX'] -_text_builder = SCons.Builder.Builder( - action=SCons.Action.Action(_action, _strfunc, varlist=_text_varlist), - source_factory=Value, - emitter=_convert_list, - prefix='$TEXTFILEPREFIX', - suffix='$TEXTFILESUFFIX', -) - -_subst_varlist = _common_varlist + ['SUBSTFILEPREFIX', 'TEXTFILESUFFIX'] -_subst_builder = SCons.Builder.Builder( - action=SCons.Action.Action(_action, _strfunc, varlist=_subst_varlist), - source_factory=SCons.Node.FS.File, - emitter=_convert_list, - prefix='$SUBSTFILEPREFIX', - suffix='$SUBSTFILESUFFIX', - src_suffix=['.in'], -) - - -def generate(env): - env['LINESEPARATOR'] = LINESEP # os.linesep - env['BUILDERS']['Textfile'] = _text_builder - env['TEXTFILEPREFIX'] = '' - env['TEXTFILESUFFIX'] = '.txt' - env['BUILDERS']['Substfile'] = _subst_builder - env['SUBSTFILEPREFIX'] = '' - env['SUBSTFILESUFFIX'] = '' - - -def exists(env): - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tlib.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tlib.py deleted file mode 100644 index 935b83fcbb4..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/tlib.py +++ /dev/null @@ -1,53 +0,0 @@ -"""SCons.Tool.tlib - -XXX - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/tlib.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Tool -import SCons.Tool.bcc32 -import SCons.Util - -def generate(env): - SCons.Tool.bcc32.findIt('tlib', env) - """Add Builders and construction variables for ar to an Environment.""" - SCons.Tool.createStaticLibBuilder(env) - env['AR'] = 'tlib' - env['ARFLAGS'] = SCons.Util.CLVar('') - env['ARCOM'] = '$AR $TARGET $ARFLAGS /a $SOURCES' - env['LIBPREFIX'] = '' - env['LIBSUFFIX'] = '.lib' - -def exists(env): - return SCons.Tool.bcc32.findIt('tlib', env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/wix.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/wix.py deleted file mode 100644 index 093cbcd946b..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/wix.py +++ /dev/null @@ -1,104 +0,0 @@ -"""SCons.Tool.wix - -Tool-specific initialization for wix, the Windows Installer XML Tool. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/wix.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import SCons.Builder -import SCons.Action -import os - -def generate(env): - """Add Builders and construction variables for WiX to an Environment.""" - if not exists(env): - return - - env['WIXCANDLEFLAGS'] = ['-nologo'] - env['WIXCANDLEINCLUDE'] = [] - env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}' - - env['WIXLIGHTFLAGS'].append( '-nologo' ) - env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}" - env['WIXSRCSUF'] = '.wxs' - env['WIXOBJSUF'] = '.wixobj' - - object_builder = SCons.Builder.Builder( - action = '$WIXCANDLECOM', - suffix = '$WIXOBJSUF', - src_suffix = '$WIXSRCSUF') - - linker_builder = SCons.Builder.Builder( - action = '$WIXLIGHTCOM', - src_suffix = '$WIXOBJSUF', - src_builder = object_builder) - - env['BUILDERS']['WiX'] = linker_builder - -def exists(env): - env['WIXCANDLE'] = 'candle.exe' - env['WIXLIGHT'] = 'light.exe' - - # try to find the candle.exe and light.exe tools and - # add the install directory to light libpath. - for path in os.environ['PATH'].split(os.pathsep): - if not path: - continue - - # workaround for some weird python win32 bug. - if path[0] == '"' and path[-1:]=='"': - path = path[1:-1] - - # normalize the path - path = os.path.normpath(path) - - # search for the tools in the PATH environment variable - try: - files = os.listdir(path) - if env['WIXCANDLE'] in files and env['WIXLIGHT'] in files: - env.PrependENVPath('PATH', path) - # include appropriate flags if running WiX 2.0 - if 'wixui.wixlib' in files and 'WixUI_en-us.wxl' in files: - env['WIXLIGHTFLAGS'] = [ os.path.join( path, 'wixui.wixlib' ), - '-loc', - os.path.join( path, 'WixUI_en-us.wxl' ) ] - else: - env['WIXLIGHTFLAGS'] = [] - return 1 - except OSError: - pass # ignore this, could be a stale PATH entry. - - return None - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/xgettext.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/xgettext.py deleted file mode 100644 index 1544a62ccfc..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/xgettext.py +++ /dev/null @@ -1,357 +0,0 @@ -""" xgettext tool - -Tool specific initialization of `xgettext` tool. -""" - -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Tool/xgettext.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import re -import subprocess -import sys - -import SCons.Action -import SCons.Node.FS -import SCons.Tool -import SCons.Util -from SCons.Builder import BuilderBase -from SCons.Environment import _null -from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS -from SCons.Platform.mingw import MINGW_DEFAULT_PATHS -from SCons.Tool.GettextCommon import _POTargetFactory -from SCons.Tool.GettextCommon import RPaths, _detect_xgettext -from SCons.Tool.GettextCommon import _xgettext_exists - - -############################################################################# -class _CmdRunner(object): - """ Callable object, which runs shell command storing its stdout and stderr to - variables. It also provides `strfunction()` method, which shall be used by - scons Action objects to print command string. """ - - def __init__(self, command, commandstr=None): - self.out = None - self.err = None - self.status = None - self.command = command - self.commandstr = commandstr - - def __call__(self, target, source, env): - kw = { - 'stdin': 'devnull', - 'stdout': subprocess.PIPE, - 'stderr': subprocess.PIPE, - 'universal_newlines': True, - 'shell': True - } - command = env.subst(self.command, target=target, source=source) - proc = SCons.Action._subproc(env, command, **kw) - self.out, self.err = proc.communicate() - self.status = proc.wait() - if self.err: - sys.stderr.write(SCons.Util.UnicodeType(self.err)) - return self.status - - def strfunction(self, target, source, env): - comstr = self.commandstr - if env.subst(comstr, target=target, source=source) == "": - comstr = self.command - s = env.subst(comstr, target=target, source=source) - return s - - -############################################################################# - -############################################################################# -def _update_pot_file(target, source, env): - """ Action function for `POTUpdate` builder """ - nop = lambda target, source, env: 0 - - # Save scons cwd and os cwd (NOTE: they may be different. After the job, we - # revert each one to its original state). - save_cwd = env.fs.getcwd() - save_os_cwd = os.getcwd() - chdir = target[0].dir - chdir_str = repr(chdir.get_abspath()) - # Print chdir message (employ SCons.Action.Action for that. It knows better - # than me how to to this correctly). - env.Execute(SCons.Action.Action(nop, "Entering " + chdir_str)) - # Go to target's directory and do our job - env.fs.chdir(chdir, 1) # Go into target's directory - try: - cmd = _CmdRunner('$XGETTEXTCOM', '$XGETTEXTCOMSTR') - action = SCons.Action.Action(cmd, strfunction=cmd.strfunction) - status = action([target[0]], source, env) - except: - # Something went wrong. - env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str)) - # Revert working dirs to previous state and re-throw exception. - env.fs.chdir(save_cwd, 0) - os.chdir(save_os_cwd) - raise - # Print chdir message. - env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str)) - # Revert working dirs to previous state. - env.fs.chdir(save_cwd, 0) - os.chdir(save_os_cwd) - # If the command was not successfull, return error code. - if status: return status - - new_content = cmd.out - - if not new_content: - # When xgettext finds no internationalized messages, no *.pot is created - # (because we don't want to bother translators with empty POT files). - needs_update = False - explain = "no internationalized messages encountered" - else: - if target[0].exists(): - # If the file already exists, it's left unaltered unless its messages - # are outdated (w.r.t. to these recovered by xgettext from sources). - old_content = target[0].get_text_contents() - re_cdate = re.compile(r'^"POT-Creation-Date: .*"$[\r\n]?', re.M) - old_content_nocdate = re.sub(re_cdate, "", old_content) - new_content_nocdate = re.sub(re_cdate, "", new_content) - if (old_content_nocdate == new_content_nocdate): - # Messages are up-to-date - needs_update = False - explain = "messages in file found to be up-to-date" - else: - # Messages are outdated - needs_update = True - explain = "messages in file were outdated" - else: - # No POT file found, create new one - needs_update = True - explain = "new file" - if needs_update: - # Print message employing SCons.Action.Action for that. - msg = "Writing " + repr(str(target[0])) + " (" + explain + ")" - env.Execute(SCons.Action.Action(nop, msg)) - f = open(str(target[0]), "w") - f.write(new_content) - f.close() - return 0 - else: - # Print message employing SCons.Action.Action for that. - msg = "Not writing " + repr(str(target[0])) + " (" + explain + ")" - env.Execute(SCons.Action.Action(nop, msg)) - return 0 - - -############################################################################# - -############################################################################# -class _POTBuilder(BuilderBase): - def _execute(self, env, target, source, *args): - if not target: - if 'POTDOMAIN' in env and env['POTDOMAIN']: - domain = env['POTDOMAIN'] - else: - domain = 'messages' - target = [domain] - return BuilderBase._execute(self, env, target, source, *args) - - -############################################################################# - -############################################################################# -def _scan_xgettext_from_files(target, source, env, files=None, path=None): - """ Parses `POTFILES.in`-like file and returns list of extracted file names. - """ - if files is None: - return 0 - if not SCons.Util.is_List(files): - files = [files] - - if path is None: - if 'XGETTEXTPATH' in env: - path = env['XGETTEXTPATH'] - else: - path = [] - if not SCons.Util.is_List(path): - path = [path] - - path = SCons.Util.flatten(path) - - dirs = () - for p in path: - if not isinstance(p, SCons.Node.FS.Base): - if SCons.Util.is_String(p): - p = env.subst(p, source=source, target=target) - p = env.arg2nodes(p, env.fs.Dir) - dirs += tuple(p) - # cwd is the default search path (when no path is defined by user) - if not dirs: - dirs = (env.fs.getcwd(),) - - # Parse 'POTFILE.in' files. - re_comment = re.compile(r'^#[^\n\r]*$\r?\n?', re.M) - re_emptyln = re.compile(r'^[ \t\r]*$\r?\n?', re.M) - re_trailws = re.compile(r'[ \t\r]+$') - for f in files: - # Find files in search path $XGETTEXTPATH - if isinstance(f, SCons.Node.FS.Base) and f.rexists(): - contents = f.get_text_contents() - contents = re_comment.sub("", contents) - contents = re_emptyln.sub("", contents) - contents = re_trailws.sub("", contents) - depnames = contents.splitlines() - for depname in depnames: - depfile = SCons.Node.FS.find_file(depname, dirs) - if not depfile: - depfile = env.arg2nodes(depname, dirs[0].File) - env.Depends(target, depfile) - return 0 - - -############################################################################# - -############################################################################# -def _pot_update_emitter(target, source, env): - """ Emitter function for `POTUpdate` builder """ - if 'XGETTEXTFROM' in env: - xfrom = env['XGETTEXTFROM'] - else: - return target, source - if not SCons.Util.is_List(xfrom): - xfrom = [xfrom] - - xfrom = SCons.Util.flatten(xfrom) - - # Prepare list of 'POTFILE.in' files. - files = [] - for xf in xfrom: - if not isinstance(xf, SCons.Node.FS.Base): - if SCons.Util.is_String(xf): - # Interpolate variables in strings - xf = env.subst(xf, source=source, target=target) - xf = env.arg2nodes(xf) - files.extend(xf) - if files: - env.Depends(target, files) - _scan_xgettext_from_files(target, source, env, files) - return target, source - - -############################################################################# - -############################################################################# -def _POTUpdateBuilderWrapper(env, target=None, source=_null, **kw): - return env._POTUpdateBuilder(target, source, **kw) - - -############################################################################# - -############################################################################# -def _POTUpdateBuilder(env, **kw): - """ Creates `POTUpdate` builder object """ - kw['action'] = SCons.Action.Action(_update_pot_file, None) - kw['suffix'] = '$POTSUFFIX' - kw['target_factory'] = _POTargetFactory(env, alias='$POTUPDATE_ALIAS').File - kw['emitter'] = _pot_update_emitter - return _POTBuilder(**kw) - - -############################################################################# - -############################################################################# -def generate(env, **kw): - """ Generate `xgettext` tool """ - - if sys.platform == 'win32': - xgettext = SCons.Tool.find_program_path(env, 'xgettext', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS ) - if xgettext: - xgettext_bin_dir = os.path.dirname(xgettext) - env.AppendENVPath('PATH', xgettext_bin_dir) - else: - SCons.Warnings.Warning('xgettext tool requested, but binary not found in ENV PATH') - try: - env['XGETTEXT'] = _detect_xgettext(env) - except: - env['XGETTEXT'] = 'xgettext' - # NOTE: sources="$SOURCES" would work as well. However, we use following - # construction to convert absolute paths provided by scons onto paths - # relative to current working dir. Note, that scons expands $SOURCE(S) to - # absolute paths for sources $SOURCE(s) outside of current subtree (e.g. in - # "../"). With source=$SOURCE these absolute paths would be written to the - # resultant *.pot file (and its derived *.po files) as references to lines in - # source code (e.g. referring lines in *.c files). Such references would be - # correct (e.g. in poedit) only on machine on which *.pot was generated and - # would be of no use on other hosts (having a copy of source code located - # in different place in filesystem). - sources = '$( ${_concat( "", SOURCES, "", __env__, XgettextRPaths, TARGET' \ - + ', SOURCES)} $)' - - # NOTE: the output from $XGETTEXTCOM command must go to stdout, not to a file. - # This is required by the POTUpdate builder's action. - xgettextcom = '$XGETTEXT $XGETTEXTFLAGS $_XGETTEXTPATHFLAGS' \ - + ' $_XGETTEXTFROMFLAGS -o - ' + sources - - xgettextpathflags = '$( ${_concat( XGETTEXTPATHPREFIX, XGETTEXTPATH' \ - + ', XGETTEXTPATHSUFFIX, __env__, RDirs, TARGET, SOURCES)} $)' - xgettextfromflags = '$( ${_concat( XGETTEXTFROMPREFIX, XGETTEXTFROM' \ - + ', XGETTEXTFROMSUFFIX, __env__, target=TARGET, source=SOURCES)} $)' - - env.SetDefault( - _XGETTEXTDOMAIN='${TARGET.filebase}', - XGETTEXTFLAGS=[], - XGETTEXTCOM=xgettextcom, - XGETTEXTCOMSTR='', - XGETTEXTPATH=[], - XGETTEXTPATHPREFIX='-D', - XGETTEXTPATHSUFFIX='', - XGETTEXTFROM=None, - XGETTEXTFROMPREFIX='-f', - XGETTEXTFROMSUFFIX='', - _XGETTEXTPATHFLAGS=xgettextpathflags, - _XGETTEXTFROMFLAGS=xgettextfromflags, - POTSUFFIX=['.pot'], - POTUPDATE_ALIAS='pot-update', - XgettextRPaths=RPaths(env) - ) - env.Append(BUILDERS={ - '_POTUpdateBuilder': _POTUpdateBuilder(env) - }) - env.AddMethod(_POTUpdateBuilderWrapper, 'POTUpdate') - env.AlwaysBuild(env.Alias('$POTUPDATE_ALIAS')) - - -############################################################################# - -############################################################################# -def exists(env): - """ Check, whether the tool exists """ - try: - return _xgettext_exists(env) - except: - return False - -############################################################################# - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/yacc.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/yacc.py deleted file mode 100644 index 0fdd5dd6985..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/yacc.py +++ /dev/null @@ -1,169 +0,0 @@ -"""SCons.Tool.yacc - -Tool-specific initialization for yacc. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/yacc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import sys - -import SCons.Defaults -import SCons.Tool -import SCons.Util -from SCons.Platform.mingw import MINGW_DEFAULT_PATHS -from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS -from SCons.Platform.win32 import CHOCO_DEFAULT_PATH - -YaccAction = SCons.Action.Action("$YACCCOM", "$YACCCOMSTR") - -if sys.platform == 'win32': - BINS = ['bison', 'yacc', 'win_bison'] -else: - BINS = ["bison", "yacc"] - -def _yaccEmitter(target, source, env, ysuf, hsuf): - yaccflags = env.subst("$YACCFLAGS", target=target, source=source) - flags = SCons.Util.CLVar(yaccflags) - targetBase, targetExt = os.path.splitext(SCons.Util.to_String(target[0])) - - if '.ym' in ysuf: # If using Objective-C - target = [targetBase + ".m"] # the extension is ".m". - - - # If -d is specified on the command line, yacc will emit a .h - # or .hpp file with the same name as the .c or .cpp output file. - if '-d' in flags: - target.append(targetBase + env.subst(hsuf, target=target, source=source)) - - # If -g is specified on the command line, yacc will emit a .vcg - # file with the same base name as the .y, .yacc, .ym or .yy file. - if "-g" in flags: - base, ext = os.path.splitext(SCons.Util.to_String(source[0])) - target.append(base + env.subst("$YACCVCGFILESUFFIX")) - - # If -v is specified yacc will create the output debug file - # which is not really source for any process, but should - # be noted and also be cleaned - # Bug #2558 - if "-v" in flags: - env.SideEffect(targetBase+'.output',target[0]) - env.Clean(target[0],targetBase+'.output') - - - - # With --defines and --graph, the name of the file is totally defined - # in the options. - fileGenOptions = ["--defines=", "--graph="] - for option in flags: - for fileGenOption in fileGenOptions: - l = len(fileGenOption) - if option[:l] == fileGenOption: - # A file generating option is present, so add the file - # name to the list of targets. - fileName = option[l:].strip() - target.append(fileName) - - return (target, source) - -def yEmitter(target, source, env): - return _yaccEmitter(target, source, env, ['.y', '.yacc'], '$YACCHFILESUFFIX') - -def ymEmitter(target, source, env): - return _yaccEmitter(target, source, env, ['.ym'], '$YACCHFILESUFFIX') - -def yyEmitter(target, source, env): - return _yaccEmitter(target, source, env, ['.yy'], '$YACCHXXFILESUFFIX') - -def get_yacc_path(env, append_paths=False): - """ - Find the path to the yacc tool, searching several possible names - - Only called in the Windows case, so the default_path - can be Windows-specific - - :param env: current construction environment - :param append_paths: if set, add the path to the tool to PATH - :return: path to yacc tool, if found - """ - for prog in BINS: - bin_path = SCons.Tool.find_program_path( - env, - prog, - default_paths=CHOCO_DEFAULT_PATH + MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS ) - if bin_path: - if append_paths: - env.AppendENVPath('PATH', os.path.dirname(bin_path)) - return bin_path - SCons.Warnings.Warning('yacc tool requested, but yacc or bison binary not found in ENV PATH') - - -def generate(env): - """Add Builders and construction variables for yacc to an Environment.""" - c_file, cxx_file = SCons.Tool.createCFileBuilders(env) - - # C - c_file.add_action('.y', YaccAction) - c_file.add_emitter('.y', yEmitter) - - c_file.add_action('.yacc', YaccAction) - c_file.add_emitter('.yacc', yEmitter) - - # Objective-C - c_file.add_action('.ym', YaccAction) - c_file.add_emitter('.ym', ymEmitter) - - # C++ - cxx_file.add_action('.yy', YaccAction) - cxx_file.add_emitter('.yy', yyEmitter) - - if sys.platform == 'win32': - # ignore the return, all we need is for the path to be added - _ = get_yacc_path(env, append_paths=True) - - env["YACC"] = env.Detect(BINS) - env['YACCFLAGS'] = SCons.Util.CLVar('') - env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES' - env['YACCHFILESUFFIX'] = '.h' - env['YACCHXXFILESUFFIX'] = '.hpp' - env['YACCVCGFILESUFFIX'] = '.vcg' - -def exists(env): - if sys.platform == 'win32': - return get_yacc_path(env) - else: - return env.Detect(BINS) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/zip.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/zip.py deleted file mode 100644 index 8b7ab769c14..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/zip.py +++ /dev/null @@ -1,92 +0,0 @@ -"""SCons.Tool.zip - -Tool-specific initialization for zip. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Tool/zip.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path - -import SCons.Builder -import SCons.Defaults -import SCons.Node.FS -import SCons.Util - -import zipfile - -zipcompression = zipfile.ZIP_DEFLATED -def zip(target, source, env): - compression = env.get('ZIPCOMPRESSION', 0) - zf = zipfile.ZipFile(str(target[0]), 'w', compression) - for s in source: - if s.isdir(): - for dirpath, dirnames, filenames in os.walk(str(s)): - for fname in filenames: - path = os.path.join(dirpath, fname) - if os.path.isfile(path): - - zf.write(path, os.path.relpath(path, str(env.get('ZIPROOT', '')))) - else: - zf.write(str(s), os.path.relpath(str(s), str(env.get('ZIPROOT', '')))) - zf.close() - -zipAction = SCons.Action.Action(zip, varlist=['ZIPCOMPRESSION']) - -ZipBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$ZIPCOM', '$ZIPCOMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$ZIPSUFFIX', - multi = 1) - - -def generate(env): - """Add Builders and construction variables for zip to an Environment.""" - try: - bld = env['BUILDERS']['Zip'] - except KeyError: - bld = ZipBuilder - env['BUILDERS']['Zip'] = bld - - env['ZIP'] = 'zip' - env['ZIPFLAGS'] = SCons.Util.CLVar('') - env['ZIPCOM'] = zipAction - env['ZIPCOMPRESSION'] = zipcompression - env['ZIPSUFFIX'] = '.zip' - env['ZIPROOT'] = SCons.Util.CLVar('') - -def exists(env): - return True - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Util.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Util.py deleted file mode 100644 index 0e9006a2d12..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Util.py +++ /dev/null @@ -1,1606 +0,0 @@ -"""SCons.Util - -Various utility functions go here. -""" -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Util.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import sys -import copy -import re -import types -import codecs -import pprint -import hashlib - -PY3 = sys.version_info[0] == 3 - -try: - from collections import UserDict, UserList, UserString -except ImportError: - from UserDict import UserDict - from UserList import UserList - from UserString import UserString - -try: - from collections.abc import Iterable, MappingView -except ImportError: - from collections import Iterable - -from collections import OrderedDict - -# Don't "from types import ..." these because we need to get at the -# types module later to look for UnicodeType. - -# Below not used? -# InstanceType = types.InstanceType - -MethodType = types.MethodType -FunctionType = types.FunctionType - -try: - _ = type(unicode) -except NameError: - UnicodeType = str -else: - UnicodeType = unicode - -def dictify(keys, values, result={}): - for k, v in zip(keys, values): - result[k] = v - return result - -_altsep = os.altsep -if _altsep is None and sys.platform == 'win32': - # My ActivePython 2.0.1 doesn't set os.altsep! What gives? - _altsep = '/' -if _altsep: - def rightmost_separator(path, sep): - return max(path.rfind(sep), path.rfind(_altsep)) -else: - def rightmost_separator(path, sep): - return path.rfind(sep) - -# First two from the Python Cookbook, just for completeness. -# (Yeah, yeah, YAGNI...) -def containsAny(str, set): - """Check whether sequence str contains ANY of the items in set.""" - for c in set: - if c in str: return 1 - return 0 - -def containsAll(str, set): - """Check whether sequence str contains ALL of the items in set.""" - for c in set: - if c not in str: return 0 - return 1 - -def containsOnly(str, set): - """Check whether sequence str contains ONLY items in set.""" - for c in str: - if c not in set: return 0 - return 1 - -def splitext(path): - """Same as os.path.splitext() but faster.""" - sep = rightmost_separator(path, os.sep) - dot = path.rfind('.') - # An ext is only real if it has at least one non-digit char - if dot > sep and not containsOnly(path[dot:], "0123456789."): - return path[:dot],path[dot:] - else: - return path,"" - -def updrive(path): - """ - Make the drive letter (if any) upper case. - This is useful because Windows is inconsistent on the case - of the drive letter, which can cause inconsistencies when - calculating command signatures. - """ - drive, rest = os.path.splitdrive(path) - if drive: - path = drive.upper() + rest - return path - -class NodeList(UserList): - """This class is almost exactly like a regular list of Nodes - (actually it can hold any object), with one important difference. - If you try to get an attribute from this list, it will return that - attribute from every item in the list. For example: - - >>> someList = NodeList([ ' foo ', ' bar ' ]) - >>> someList.strip() - [ 'foo', 'bar' ] - """ - -# def __init__(self, initlist=None): -# self.data = [] -# # print("TYPE:%s"%type(initlist)) -# if initlist is not None: -# # XXX should this accept an arbitrary sequence? -# if type(initlist) == type(self.data): -# self.data[:] = initlist -# elif isinstance(initlist, (UserList, NodeList)): -# self.data[:] = initlist.data[:] -# elif isinstance(initlist, Iterable): -# self.data = list(initlist) -# else: -# self.data = [ initlist,] - - - def __nonzero__(self): - return len(self.data) != 0 - - def __bool__(self): - return self.__nonzero__() - - def __str__(self): - return ' '.join(map(str, self.data)) - - def __iter__(self): - return iter(self.data) - - def __call__(self, *args, **kwargs): - result = [x(*args, **kwargs) for x in self.data] - return self.__class__(result) - - def __getattr__(self, name): - result = [getattr(x, name) for x in self.data] - return self.__class__(result) - - def __getitem__(self, index): - """ - This comes for free on py2, - but py3 slices of NodeList are returning a list - breaking slicing nodelist and refering to - properties and methods on contained object - """ -# return self.__class__(self.data[index]) - - if isinstance(index, slice): - # Expand the slice object using range() - # limited by number of items in self.data - indices = index.indices(len(self.data)) - return self.__class__([self[x] for x in - range(*indices)]) - else: - # Return one item of the tart - return self.data[index] - - -_get_env_var = re.compile(r'^\$([_a-zA-Z]\w*|{[_a-zA-Z]\w*})$') - -def get_environment_var(varstr): - """Given a string, first determine if it looks like a reference - to a single environment variable, like "$FOO" or "${FOO}". - If so, return that variable with no decorations ("FOO"). - If not, return None.""" - mo=_get_env_var.match(to_String(varstr)) - if mo: - var = mo.group(1) - if var[0] == '{': - return var[1:-1] - else: - return var - else: - return None - -class DisplayEngine(object): - print_it = True - def __call__(self, text, append_newline=1): - if not self.print_it: - return - if append_newline: text = text + '\n' - try: - sys.stdout.write(UnicodeType(text)) - except IOError: - # Stdout might be connected to a pipe that has been closed - # by now. The most likely reason for the pipe being closed - # is that the user has press ctrl-c. It this is the case, - # then SCons is currently shutdown. We therefore ignore - # IOError's here so that SCons can continue and shutdown - # properly so that the .sconsign is correctly written - # before SCons exits. - pass - - def set_mode(self, mode): - self.print_it = mode - - -def render_tree(root, child_func, prune=0, margin=[0], visited=None): - """ - Render a tree of nodes into an ASCII tree view. - - :Parameters: - - `root`: the root node of the tree - - `child_func`: the function called to get the children of a node - - `prune`: don't visit the same node twice - - `margin`: the format of the left margin to use for children of root. 1 results in a pipe, and 0 results in no pipe. - - `visited`: a dictionary of visited nodes in the current branch if not prune, or in the whole tree if prune. - """ - - rname = str(root) - - # Initialize 'visited' dict, if required - if visited is None: - visited = {} - - children = child_func(root) - retval = "" - for pipe in margin[:-1]: - if pipe: - retval = retval + "| " - else: - retval = retval + " " - - if rname in visited: - return retval + "+-[" + rname + "]\n" - - retval = retval + "+-" + rname + "\n" - if not prune: - visited = copy.copy(visited) - visited[rname] = 1 - - for i in range(len(children)): - margin.append(i < len(children)-1) - retval = retval + render_tree(children[i], child_func, prune, margin, visited) - margin.pop() - - return retval - -IDX = lambda N: N and 1 or 0 - - -def print_tree(root, child_func, prune=0, showtags=0, margin=[0], visited=None): - """ - Print a tree of nodes. This is like render_tree, except it prints - lines directly instead of creating a string representation in memory, - so that huge trees can be printed. - - :Parameters: - - `root` - the root node of the tree - - `child_func` - the function called to get the children of a node - - `prune` - don't visit the same node twice - - `showtags` - print status information to the left of each node line - - `margin` - the format of the left margin to use for children of root. 1 results in a pipe, and 0 results in no pipe. - - `visited` - a dictionary of visited nodes in the current branch if not prune, or in the whole tree if prune. - """ - - rname = str(root) - - - # Initialize 'visited' dict, if required - if visited is None: - visited = {} - - if showtags: - - if showtags == 2: - legend = (' E = exists\n' + - ' R = exists in repository only\n' + - ' b = implicit builder\n' + - ' B = explicit builder\n' + - ' S = side effect\n' + - ' P = precious\n' + - ' A = always build\n' + - ' C = current\n' + - ' N = no clean\n' + - ' H = no cache\n' + - '\n') - sys.stdout.write(legend) - - tags = ['['] - tags.append(' E'[IDX(root.exists())]) - tags.append(' R'[IDX(root.rexists() and not root.exists())]) - tags.append(' BbB'[[0,1][IDX(root.has_explicit_builder())] + - [0,2][IDX(root.has_builder())]]) - tags.append(' S'[IDX(root.side_effect)]) - tags.append(' P'[IDX(root.precious)]) - tags.append(' A'[IDX(root.always_build)]) - tags.append(' C'[IDX(root.is_up_to_date())]) - tags.append(' N'[IDX(root.noclean)]) - tags.append(' H'[IDX(root.nocache)]) - tags.append(']') - - else: - tags = [] - - def MMM(m): - return [" ","| "][m] - margins = list(map(MMM, margin[:-1])) - - children = child_func(root) - - if prune and rname in visited and children: - sys.stdout.write(''.join(tags + margins + ['+-[', rname, ']']) + '\n') - return - - sys.stdout.write(''.join(tags + margins + ['+-', rname]) + '\n') - - visited[rname] = 1 - - if children: - margin.append(1) - idx = IDX(showtags) - for C in children[:-1]: - print_tree(C, child_func, prune, idx, margin, visited) - margin[-1] = 0 - print_tree(children[-1], child_func, prune, idx, margin, visited) - margin.pop() - - - -# Functions for deciding if things are like various types, mainly to -# handle UserDict, UserList and UserString like their underlying types. -# -# Yes, all of this manual testing breaks polymorphism, and the real -# Pythonic way to do all of this would be to just try it and handle the -# exception, but handling the exception when it's not the right type is -# often too slow. - -# We are using the following trick to speed up these -# functions. Default arguments are used to take a snapshot of -# the global functions and constants used by these functions. This -# transforms accesses to global variable into local variables -# accesses (i.e. LOAD_FAST instead of LOAD_GLOBAL). - -DictTypes = (dict, UserDict) -ListTypes = (list, UserList) - -try: - # Handle getting dictionary views. - SequenceTypes = (list, tuple, UserList, MappingView) -except NameError: - SequenceTypes = (list, tuple, UserList) - - -# Note that profiling data shows a speed-up when comparing -# explicitly with str and unicode instead of simply comparing -# with basestring. (at least on Python 2.5.1) -try: - StringTypes = (str, unicode, UserString) -except NameError: - StringTypes = (str, UserString) - -# Empirically, it is faster to check explicitly for str and -# unicode than for basestring. -try: - BaseStringTypes = (str, unicode) -except NameError: - BaseStringTypes = (str) - -def is_Dict(obj, isinstance=isinstance, DictTypes=DictTypes): - return isinstance(obj, DictTypes) - -def is_List(obj, isinstance=isinstance, ListTypes=ListTypes): - return isinstance(obj, ListTypes) - -def is_Sequence(obj, isinstance=isinstance, SequenceTypes=SequenceTypes): - return isinstance(obj, SequenceTypes) - -def is_Tuple(obj, isinstance=isinstance, tuple=tuple): - return isinstance(obj, tuple) - -def is_String(obj, isinstance=isinstance, StringTypes=StringTypes): - return isinstance(obj, StringTypes) - -def is_Scalar(obj, isinstance=isinstance, StringTypes=StringTypes, SequenceTypes=SequenceTypes): - # Profiling shows that there is an impressive speed-up of 2x - # when explicitly checking for strings instead of just not - # sequence when the argument (i.e. obj) is already a string. - # But, if obj is a not string then it is twice as fast to - # check only for 'not sequence'. The following code therefore - # assumes that the obj argument is a string most of the time. - return isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes) - -def do_flatten(sequence, result, isinstance=isinstance, - StringTypes=StringTypes, SequenceTypes=SequenceTypes): - for item in sequence: - if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): - result.append(item) - else: - do_flatten(item, result) - -def flatten(obj, isinstance=isinstance, StringTypes=StringTypes, - SequenceTypes=SequenceTypes, do_flatten=do_flatten): - """Flatten a sequence to a non-nested list. - - Flatten() converts either a single scalar or a nested sequence - to a non-nested list. Note that flatten() considers strings - to be scalars instead of sequences like Python would. - """ - if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes): - return [obj] - result = [] - for item in obj: - if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): - result.append(item) - else: - do_flatten(item, result) - return result - -def flatten_sequence(sequence, isinstance=isinstance, StringTypes=StringTypes, - SequenceTypes=SequenceTypes, do_flatten=do_flatten): - """Flatten a sequence to a non-nested list. - - Same as flatten(), but it does not handle the single scalar - case. This is slightly more efficient when one knows that - the sequence to flatten can not be a scalar. - """ - result = [] - for item in sequence: - if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): - result.append(item) - else: - do_flatten(item, result) - return result - -# Generic convert-to-string functions that abstract away whether or -# not the Python we're executing has Unicode support. The wrapper -# to_String_for_signature() will use a for_signature() method if the -# specified object has one. -# -def to_String(s, - isinstance=isinstance, str=str, - UserString=UserString, BaseStringTypes=BaseStringTypes): - if isinstance(s,BaseStringTypes): - # Early out when already a string! - return s - elif isinstance(s, UserString): - # s.data can only be either a unicode or a regular - # string. Please see the UserString initializer. - return s.data - else: - return str(s) - -def to_String_for_subst(s, - isinstance=isinstance, str=str, to_String=to_String, - BaseStringTypes=BaseStringTypes, SequenceTypes=SequenceTypes, - UserString=UserString): - - # Note that the test cases are sorted by order of probability. - if isinstance(s, BaseStringTypes): - return s - elif isinstance(s, SequenceTypes): - return ' '.join([to_String_for_subst(e) for e in s]) - elif isinstance(s, UserString): - # s.data can only be either a unicode or a regular - # string. Please see the UserString initializer. - return s.data - else: - return str(s) - -def to_String_for_signature(obj, to_String_for_subst=to_String_for_subst, - AttributeError=AttributeError): - try: - f = obj.for_signature - except AttributeError: - if isinstance(obj, dict): - # pprint will output dictionary in key sorted order - # with py3.5 the order was randomized. In general depending on dictionary order - # which was undefined until py3.6 (where it's by insertion order) was not wise. - return pprint.pformat(obj, width=1000000) - else: - return to_String_for_subst(obj) - else: - return f() - - -# The SCons "semi-deep" copy. -# -# This makes separate copies of lists (including UserList objects) -# dictionaries (including UserDict objects) and tuples, but just copies -# references to anything else it finds. -# -# A special case is any object that has a __semi_deepcopy__() method, -# which we invoke to create the copy. Currently only used by -# BuilderDict to actually prevent the copy operation (as invalid on that object). -# -# The dispatch table approach used here is a direct rip-off from the -# normal Python copy module. - -_semi_deepcopy_dispatch = d = {} - -def semi_deepcopy_dict(x, exclude = [] ): - copy = {} - for key, val in x.items(): - # The regular Python copy.deepcopy() also deepcopies the key, - # as follows: - # - # copy[semi_deepcopy(key)] = semi_deepcopy(val) - # - # Doesn't seem like we need to, but we'll comment it just in case. - if key not in exclude: - copy[key] = semi_deepcopy(val) - return copy -d[dict] = semi_deepcopy_dict - -def _semi_deepcopy_list(x): - return list(map(semi_deepcopy, x)) -d[list] = _semi_deepcopy_list - -def _semi_deepcopy_tuple(x): - return tuple(map(semi_deepcopy, x)) -d[tuple] = _semi_deepcopy_tuple - -def semi_deepcopy(x): - copier = _semi_deepcopy_dispatch.get(type(x)) - if copier: - return copier(x) - else: - if hasattr(x, '__semi_deepcopy__') and callable(x.__semi_deepcopy__): - return x.__semi_deepcopy__() - elif isinstance(x, UserDict): - return x.__class__(semi_deepcopy_dict(x)) - elif isinstance(x, UserList): - return x.__class__(_semi_deepcopy_list(x)) - - return x - - -class Proxy(object): - """A simple generic Proxy class, forwarding all calls to - subject. So, for the benefit of the python newbie, what does - this really mean? Well, it means that you can take an object, let's - call it 'objA', and wrap it in this Proxy class, with a statement - like this - - proxyObj = Proxy(objA), - - Then, if in the future, you do something like this - - x = proxyObj.var1, - - since Proxy does not have a 'var1' attribute (but presumably objA does), - the request actually is equivalent to saying - - x = objA.var1 - - Inherit from this class to create a Proxy. - - Note that, with new-style classes, this does *not* work transparently - for Proxy subclasses that use special .__*__() method names, because - those names are now bound to the class, not the individual instances. - You now need to know in advance which .__*__() method names you want - to pass on to the underlying Proxy object, and specifically delegate - their calls like this: - - class Foo(Proxy): - __str__ = Delegate('__str__') - """ - - def __init__(self, subject): - """Wrap an object as a Proxy object""" - self._subject = subject - - def __getattr__(self, name): - """Retrieve an attribute from the wrapped object. If the named - attribute doesn't exist, AttributeError is raised""" - return getattr(self._subject, name) - - def get(self): - """Retrieve the entire wrapped object""" - return self._subject - - def __eq__(self, other): - if issubclass(other.__class__, self._subject.__class__): - return self._subject == other - return self.__dict__ == other.__dict__ - -class Delegate(object): - """A Python Descriptor class that delegates attribute fetches - to an underlying wrapped subject of a Proxy. Typical use: - - class Foo(Proxy): - __str__ = Delegate('__str__') - """ - def __init__(self, attribute): - self.attribute = attribute - def __get__(self, obj, cls): - if isinstance(obj, cls): - return getattr(obj._subject, self.attribute) - else: - return self - -# attempt to load the windows registry module: -can_read_reg = 0 -try: - import winreg - - can_read_reg = 1 - hkey_mod = winreg - - RegOpenKeyEx = winreg.OpenKeyEx - RegEnumKey = winreg.EnumKey - RegEnumValue = winreg.EnumValue - RegQueryValueEx = winreg.QueryValueEx - RegError = winreg.error - -except ImportError: - try: - import win32api - import win32con - can_read_reg = 1 - hkey_mod = win32con - - RegOpenKeyEx = win32api.RegOpenKeyEx - RegEnumKey = win32api.RegEnumKey - RegEnumValue = win32api.RegEnumValue - RegQueryValueEx = win32api.RegQueryValueEx - RegError = win32api.error - - except ImportError: - class _NoError(Exception): - pass - RegError = _NoError - - -# Make sure we have a definition of WindowsError so we can -# run platform-independent tests of Windows functionality on -# platforms other than Windows. (WindowsError is, in fact, an -# OSError subclass on Windows.) - -class PlainWindowsError(OSError): - pass - -try: - WinError = WindowsError -except NameError: - WinError = PlainWindowsError - - -if can_read_reg: - HKEY_CLASSES_ROOT = hkey_mod.HKEY_CLASSES_ROOT - HKEY_LOCAL_MACHINE = hkey_mod.HKEY_LOCAL_MACHINE - HKEY_CURRENT_USER = hkey_mod.HKEY_CURRENT_USER - HKEY_USERS = hkey_mod.HKEY_USERS - - def RegGetValue(root, key): - r"""This utility function returns a value in the registry - without having to open the key first. Only available on - Windows platforms with a version of Python that can read the - registry. Returns the same thing as - SCons.Util.RegQueryValueEx, except you just specify the entire - path to the value, and don't have to bother opening the key - first. So: - - Instead of: - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, - r'SOFTWARE\Microsoft\Windows\CurrentVersion') - out = SCons.Util.RegQueryValueEx(k, - 'ProgramFilesDir') - - You can write: - out = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE, - r'SOFTWARE\Microsoft\Windows\CurrentVersion\ProgramFilesDir') - """ - # I would use os.path.split here, but it's not a filesystem - # path... - p = key.rfind('\\') + 1 - keyp = key[:p-1] # -1 to omit trailing slash - val = key[p:] - k = RegOpenKeyEx(root, keyp) - return RegQueryValueEx(k,val) -else: - HKEY_CLASSES_ROOT = None - HKEY_LOCAL_MACHINE = None - HKEY_CURRENT_USER = None - HKEY_USERS = None - - def RegGetValue(root, key): - raise WinError - - def RegOpenKeyEx(root, key): - raise WinError - -if sys.platform == 'win32': - - def WhereIs(file, path=None, pathext=None, reject=[]): - if path is None: - try: - path = os.environ['PATH'] - except KeyError: - return None - if is_String(path): - path = path.split(os.pathsep) - if pathext is None: - try: - pathext = os.environ['PATHEXT'] - except KeyError: - pathext = '.COM;.EXE;.BAT;.CMD' - if is_String(pathext): - pathext = pathext.split(os.pathsep) - for ext in pathext: - if ext.lower() == file[-len(ext):].lower(): - pathext = [''] - break - if not is_List(reject) and not is_Tuple(reject): - reject = [reject] - for dir in path: - f = os.path.join(dir, file) - for ext in pathext: - fext = f + ext - if os.path.isfile(fext): - try: - reject.index(fext) - except ValueError: - return os.path.normpath(fext) - continue - return None - -elif os.name == 'os2': - - def WhereIs(file, path=None, pathext=None, reject=[]): - if path is None: - try: - path = os.environ['PATH'] - except KeyError: - return None - if is_String(path): - path = path.split(os.pathsep) - if pathext is None: - pathext = ['.exe', '.cmd'] - for ext in pathext: - if ext.lower() == file[-len(ext):].lower(): - pathext = [''] - break - if not is_List(reject) and not is_Tuple(reject): - reject = [reject] - for dir in path: - f = os.path.join(dir, file) - for ext in pathext: - fext = f + ext - if os.path.isfile(fext): - try: - reject.index(fext) - except ValueError: - return os.path.normpath(fext) - continue - return None - -else: - - def WhereIs(file, path=None, pathext=None, reject=[]): - import stat - if path is None: - try: - path = os.environ['PATH'] - except KeyError: - return None - if is_String(path): - path = path.split(os.pathsep) - if not is_List(reject) and not is_Tuple(reject): - reject = [reject] - for d in path: - f = os.path.join(d, file) - if os.path.isfile(f): - try: - st = os.stat(f) - except OSError: - # os.stat() raises OSError, not IOError if the file - # doesn't exist, so in this case we let IOError get - # raised so as to not mask possibly serious disk or - # network issues. - continue - if stat.S_IMODE(st[stat.ST_MODE]) & 0o111: - try: - reject.index(f) - except ValueError: - return os.path.normpath(f) - continue - return None - -def PrependPath(oldpath, newpath, sep = os.pathsep, - delete_existing=1, canonicalize=None): - """This prepends newpath elements to the given oldpath. Will only - add any particular path once (leaving the first one it encounters - and ignoring the rest, to preserve path order), and will - os.path.normpath and os.path.normcase all paths to help assure - this. This can also handle the case where the given old path - variable is a list instead of a string, in which case a list will - be returned instead of a string. - - Example: - Old Path: "/foo/bar:/foo" - New Path: "/biz/boom:/foo" - Result: "/biz/boom:/foo:/foo/bar" - - If delete_existing is 0, then adding a path that exists will - not move it to the beginning; it will stay where it is in the - list. - - If canonicalize is not None, it is applied to each element of - newpath before use. - """ - - orig = oldpath - is_list = 1 - paths = orig - if not is_List(orig) and not is_Tuple(orig): - paths = paths.split(sep) - is_list = 0 - - if is_String(newpath): - newpaths = newpath.split(sep) - elif not is_List(newpath) and not is_Tuple(newpath): - newpaths = [ newpath ] # might be a Dir - else: - newpaths = newpath - - if canonicalize: - newpaths=list(map(canonicalize, newpaths)) - - if not delete_existing: - # First uniquify the old paths, making sure to - # preserve the first instance (in Unix/Linux, - # the first one wins), and remembering them in normpaths. - # Then insert the new paths at the head of the list - # if they're not already in the normpaths list. - result = [] - normpaths = [] - for path in paths: - if not path: - continue - normpath = os.path.normpath(os.path.normcase(path)) - if normpath not in normpaths: - result.append(path) - normpaths.append(normpath) - newpaths.reverse() # since we're inserting at the head - for path in newpaths: - if not path: - continue - normpath = os.path.normpath(os.path.normcase(path)) - if normpath not in normpaths: - result.insert(0, path) - normpaths.append(normpath) - paths = result - - else: - newpaths = newpaths + paths # prepend new paths - - normpaths = [] - paths = [] - # now we add them only if they are unique - for path in newpaths: - normpath = os.path.normpath(os.path.normcase(path)) - if path and normpath not in normpaths: - paths.append(path) - normpaths.append(normpath) - - if is_list: - return paths - else: - return sep.join(paths) - -def AppendPath(oldpath, newpath, sep = os.pathsep, - delete_existing=1, canonicalize=None): - """This appends new path elements to the given old path. Will - only add any particular path once (leaving the last one it - encounters and ignoring the rest, to preserve path order), and - will os.path.normpath and os.path.normcase all paths to help - assure this. This can also handle the case where the given old - path variable is a list instead of a string, in which case a list - will be returned instead of a string. - - Example: - Old Path: "/foo/bar:/foo" - New Path: "/biz/boom:/foo" - Result: "/foo/bar:/biz/boom:/foo" - - If delete_existing is 0, then adding a path that exists - will not move it to the end; it will stay where it is in the list. - - If canonicalize is not None, it is applied to each element of - newpath before use. - """ - - orig = oldpath - is_list = 1 - paths = orig - if not is_List(orig) and not is_Tuple(orig): - paths = paths.split(sep) - is_list = 0 - - if is_String(newpath): - newpaths = newpath.split(sep) - elif not is_List(newpath) and not is_Tuple(newpath): - newpaths = [ newpath ] # might be a Dir - else: - newpaths = newpath - - if canonicalize: - newpaths=list(map(canonicalize, newpaths)) - - if not delete_existing: - # add old paths to result, then - # add new paths if not already present - # (I thought about using a dict for normpaths for speed, - # but it's not clear hashing the strings would be faster - # than linear searching these typically short lists.) - result = [] - normpaths = [] - for path in paths: - if not path: - continue - result.append(path) - normpaths.append(os.path.normpath(os.path.normcase(path))) - for path in newpaths: - if not path: - continue - normpath = os.path.normpath(os.path.normcase(path)) - if normpath not in normpaths: - result.append(path) - normpaths.append(normpath) - paths = result - else: - # start w/ new paths, add old ones if not present, - # then reverse. - newpaths = paths + newpaths # append new paths - newpaths.reverse() - - normpaths = [] - paths = [] - # now we add them only if they are unique - for path in newpaths: - normpath = os.path.normpath(os.path.normcase(path)) - if path and normpath not in normpaths: - paths.append(path) - normpaths.append(normpath) - paths.reverse() - - if is_list: - return paths - else: - return sep.join(paths) - -def AddPathIfNotExists(env_dict, key, path, sep=os.pathsep): - """This function will take 'key' out of the dictionary - 'env_dict', then add the path 'path' to that key if it is not - already there. This treats the value of env_dict[key] as if it - has a similar format to the PATH variable...a list of paths - separated by tokens. The 'path' will get added to the list if it - is not already there.""" - try: - is_list = 1 - paths = env_dict[key] - if not is_List(env_dict[key]): - paths = paths.split(sep) - is_list = 0 - if os.path.normcase(path) not in list(map(os.path.normcase, paths)): - paths = [ path ] + paths - if is_list: - env_dict[key] = paths - else: - env_dict[key] = sep.join(paths) - except KeyError: - env_dict[key] = path - -if sys.platform == 'cygwin': - def get_native_path(path): - """Transforms an absolute path into a native path for the system. In - Cygwin, this converts from a Cygwin path to a Windows one.""" - with os.popen('cygpath -w ' + path) as p: - npath = p.read().replace('\n', '') - return npath -else: - def get_native_path(path): - """Transforms an absolute path into a native path for the system. - Non-Cygwin version, just leave the path alone.""" - return path - -display = DisplayEngine() - -def Split(arg): - if is_List(arg) or is_Tuple(arg): - return arg - elif is_String(arg): - return arg.split() - else: - return [arg] - -class CLVar(UserList): - """A class for command-line construction variables. - - This is a list that uses Split() to split an initial string along - white-space arguments, and similarly to split any strings that get - added. This allows us to Do the Right Thing with Append() and - Prepend() (as well as straight Python foo = env['VAR'] + 'arg1 - arg2') regardless of whether a user adds a list or a string to a - command-line construction variable. - """ - def __init__(self, seq = []): - UserList.__init__(self, Split(seq)) - def __add__(self, other): - return UserList.__add__(self, CLVar(other)) - def __radd__(self, other): - return UserList.__radd__(self, CLVar(other)) - def __str__(self): - return ' '.join(self.data) - - -class Selector(OrderedDict): - """A callable ordered dictionary that maps file suffixes to - dictionary values. We preserve the order in which items are added - so that get_suffix() calls always return the first suffix added.""" - def __call__(self, env, source, ext=None): - if ext is None: - try: - ext = source[0].get_suffix() - except IndexError: - ext = "" - try: - return self[ext] - except KeyError: - # Try to perform Environment substitution on the keys of - # the dictionary before giving up. - s_dict = {} - for (k,v) in self.items(): - if k is not None: - s_k = env.subst(k) - if s_k in s_dict: - # We only raise an error when variables point - # to the same suffix. If one suffix is literal - # and a variable suffix contains this literal, - # the literal wins and we don't raise an error. - raise KeyError(s_dict[s_k][0], k, s_k) - s_dict[s_k] = (k,v) - try: - return s_dict[ext][1] - except KeyError: - try: - return self[None] - except KeyError: - return None - - -if sys.platform == 'cygwin': - # On Cygwin, os.path.normcase() lies, so just report back the - # fact that the underlying Windows OS is case-insensitive. - def case_sensitive_suffixes(s1, s2): - return 0 -else: - def case_sensitive_suffixes(s1, s2): - return (os.path.normcase(s1) != os.path.normcase(s2)) - -def adjustixes(fname, pre, suf, ensure_suffix=False): - if pre: - path, fn = os.path.split(os.path.normpath(fname)) - if fn[:len(pre)] != pre: - fname = os.path.join(path, pre + fn) - # Only append a suffix if the suffix we're going to add isn't already - # there, and if either we've been asked to ensure the specific suffix - # is present or there's no suffix on it at all. - if suf and fname[-len(suf):] != suf and \ - (ensure_suffix or not splitext(fname)[1]): - fname = fname + suf - return fname - - - -# From Tim Peters, -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# (Also in the printed Python Cookbook.) - -def unique(s): - """Return a list of the elements in s, but without duplicates. - - For example, unique([1,2,3,1,2,3]) is some permutation of [1,2,3], - unique("abcabc") some permutation of ["a", "b", "c"], and - unique(([1, 2], [2, 3], [1, 2])) some permutation of - [[2, 3], [1, 2]]. - - For best speed, all sequence elements should be hashable. Then - unique() will usually work in linear time. - - If not possible, the sequence elements should enjoy a total - ordering, and if list(s).sort() doesn't raise TypeError it's - assumed that they do enjoy a total ordering. Then unique() will - usually work in O(N*log2(N)) time. - - If that's not possible either, the sequence elements must support - equality-testing. Then unique() will usually work in quadratic - time. - """ - - n = len(s) - if n == 0: - return [] - - # Try using a dict first, as that's the fastest and will usually - # work. If it doesn't work, it will usually fail quickly, so it - # usually doesn't cost much to *try* it. It requires that all the - # sequence elements be hashable, and support equality comparison. - u = {} - try: - for x in s: - u[x] = 1 - except TypeError: - pass # move on to the next method - else: - return list(u.keys()) - del u - - # We can't hash all the elements. Second fastest is to sort, - # which brings the equal elements together; then duplicates are - # easy to weed out in a single pass. - # NOTE: Python's list.sort() was designed to be efficient in the - # presence of many duplicate elements. This isn't true of all - # sort functions in all languages or libraries, so this approach - # is more effective in Python than it may be elsewhere. - try: - t = sorted(s) - except TypeError: - pass # move on to the next method - else: - assert n > 0 - last = t[0] - lasti = i = 1 - while i < n: - if t[i] != last: - t[lasti] = last = t[i] - lasti = lasti + 1 - i = i + 1 - return t[:lasti] - del t - - # Brute force is all that's left. - u = [] - for x in s: - if x not in u: - u.append(x) - return u - - -# Best way (assuming Python 3.7, but effectively 3.6) to remove -# duplicates from a list in while preserving order, according to -# https://stackoverflow.com/questions/480214/how-do-i-remove-duplicates-from-a-list-while-preserving-order/17016257#17016257 -def uniquer_hashables(seq): - return list(dict.fromkeys(seq)) - - -# Recipe 19.11 "Reading Lines with Continuation Characters", -# by Alex Martelli, straight from the Python CookBook (2nd edition). -def logical_lines(physical_lines, joiner=''.join): - logical_line = [] - for line in physical_lines: - stripped = line.rstrip() - if stripped.endswith('\\'): - # a line which continues w/the next physical line - logical_line.append(stripped[:-1]) - else: - # a line which does not continue, end of logical line - logical_line.append(line) - yield joiner(logical_line) - logical_line = [] - if logical_line: - # end of sequence implies end of last logical line - yield joiner(logical_line) - - -class LogicalLines(object): - """ Wrapper class for the logical_lines method. - - Allows us to read all "logical" lines at once from a - given file object. - """ - - def __init__(self, fileobj): - self.fileobj = fileobj - - def readlines(self): - result = [l for l in logical_lines(self.fileobj)] - return result - - -class UniqueList(UserList): - def __init__(self, seq = []): - UserList.__init__(self, seq) - self.unique = True - def __make_unique(self): - if not self.unique: - self.data = uniquer_hashables(self.data) - self.unique = True - def __lt__(self, other): - self.__make_unique() - return UserList.__lt__(self, other) - def __le__(self, other): - self.__make_unique() - return UserList.__le__(self, other) - def __eq__(self, other): - self.__make_unique() - return UserList.__eq__(self, other) - def __ne__(self, other): - self.__make_unique() - return UserList.__ne__(self, other) - def __gt__(self, other): - self.__make_unique() - return UserList.__gt__(self, other) - def __ge__(self, other): - self.__make_unique() - return UserList.__ge__(self, other) - def __cmp__(self, other): - self.__make_unique() - return UserList.__cmp__(self, other) - def __len__(self): - self.__make_unique() - return UserList.__len__(self) - def __getitem__(self, i): - self.__make_unique() - return UserList.__getitem__(self, i) - def __setitem__(self, i, item): - UserList.__setitem__(self, i, item) - self.unique = False - def __getslice__(self, i, j): - self.__make_unique() - return UserList.__getslice__(self, i, j) - def __setslice__(self, i, j, other): - UserList.__setslice__(self, i, j, other) - self.unique = False - def __add__(self, other): - result = UserList.__add__(self, other) - result.unique = False - return result - def __radd__(self, other): - result = UserList.__radd__(self, other) - result.unique = False - return result - def __iadd__(self, other): - result = UserList.__iadd__(self, other) - result.unique = False - return result - def __mul__(self, other): - result = UserList.__mul__(self, other) - result.unique = False - return result - def __rmul__(self, other): - result = UserList.__rmul__(self, other) - result.unique = False - return result - def __imul__(self, other): - result = UserList.__imul__(self, other) - result.unique = False - return result - def append(self, item): - UserList.append(self, item) - self.unique = False - def insert(self, i): - UserList.insert(self, i) - self.unique = False - def count(self, item): - self.__make_unique() - return UserList.count(self, item) - def index(self, item): - self.__make_unique() - return UserList.index(self, item) - def reverse(self): - self.__make_unique() - UserList.reverse(self) - def sort(self, *args, **kwds): - self.__make_unique() - return UserList.sort(self, *args, **kwds) - def extend(self, other): - UserList.extend(self, other) - self.unique = False - - -class Unbuffered(object): - """ - A proxy class that wraps a file object, flushing after every write, - and delegating everything else to the wrapped object. - """ - def __init__(self, file): - self.file = file - self.softspace = 0 ## backward compatibility; not supported in Py3k - def write(self, arg): - try: - self.file.write(arg) - self.file.flush() - except IOError: - # Stdout might be connected to a pipe that has been closed - # by now. The most likely reason for the pipe being closed - # is that the user has press ctrl-c. It this is the case, - # then SCons is currently shutdown. We therefore ignore - # IOError's here so that SCons can continue and shutdown - # properly so that the .sconsign is correctly written - # before SCons exits. - pass - def __getattr__(self, attr): - return getattr(self.file, attr) - -def make_path_relative(path): - """ makes an absolute path name to a relative pathname. - """ - if os.path.isabs(path): - drive_s,path = os.path.splitdrive(path) - - import re - if not drive_s: - path=re.compile("/*(.*)").findall(path)[0] - else: - path=path[1:] - - assert( not os.path.isabs( path ) ), path - return path - - - -# The original idea for AddMethod() and RenameFunction() come from the -# following post to the ActiveState Python Cookbook: -# -# ASPN: Python Cookbook : Install bound methods in an instance -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/223613 -# -# That code was a little fragile, though, so the following changes -# have been wrung on it: -# -# * Switched the installmethod() "object" and "function" arguments, -# so the order reflects that the left-hand side is the thing being -# "assigned to" and the right-hand side is the value being assigned. -# -# * Changed explicit type-checking to the "try: klass = object.__class__" -# block in installmethod() below so that it still works with the -# old-style classes that SCons uses. -# -# * Replaced the by-hand creation of methods and functions with use of -# the "new" module, as alluded to in Alex Martelli's response to the -# following Cookbook post: -# -# ASPN: Python Cookbook : Dynamically added methods to a class -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732 - -def AddMethod(obj, function, name=None): - """ - Adds either a bound method to an instance or the function itself (or an unbound method in Python 2) to a class. - If name is ommited the name of the specified function - is used by default. - - Example:: - - a = A() - def f(self, x, y): - self.z = x + y - AddMethod(f, A, "add") - a.add(2, 4) - print(a.z) - AddMethod(lambda self, i: self.l[i], a, "listIndex") - print(a.listIndex(5)) - """ - if name is None: - name = function.__name__ - else: - function = RenameFunction(function, name) - - # Note the Python version checks - WLB - # Python 3.3 dropped the 3rd parameter from types.MethodType - if hasattr(obj, '__class__') and obj.__class__ is not type: - # "obj" is an instance, so it gets a bound method. - if sys.version_info[:2] > (3, 2): - method = MethodType(function, obj) - else: - method = MethodType(function, obj, obj.__class__) - else: - # Handle classes - method = function - - setattr(obj, name, method) - -def RenameFunction(function, name): - """ - Returns a function identical to the specified function, but with - the specified name. - """ - return FunctionType(function.__code__, - function.__globals__, - name, - function.__defaults__) - - -if hasattr(hashlib, 'md5'): - md5 = True - - def MD5signature(s): - """ - Generate md5 signature of a string - - :param s: either string or bytes. Normally should be bytes - :return: String of hex digits representing the signature - """ - m = hashlib.md5() - - try: - m.update(to_bytes(s)) - except TypeError as e: - m.update(to_bytes(str(s))) - - return m.hexdigest() - - def MD5filesignature(fname, chunksize=65536): - """ - Generate the md5 signature of a file - - :param fname: file to hash - :param chunksize: chunk size to read - :return: String of Hex digits representing the signature - """ - m = hashlib.md5() - with open(fname, "rb") as f: - while True: - blck = f.read(chunksize) - if not blck: - break - m.update(to_bytes(blck)) - return m.hexdigest() -else: - # if md5 algorithm not available, just return data unmodified - # could add alternative signature scheme here - md5 = False - - def MD5signature(s): - return str(s) - - def MD5filesignature(fname, chunksize=65536): - with open(fname, "rb") as f: - result = f.read() - return result - - -def MD5collect(signatures): - """ - Collects a list of signatures into an aggregate signature. - - signatures - a list of signatures - returns - the aggregate signature - """ - if len(signatures) == 1: - return signatures[0] - else: - return MD5signature(', '.join(signatures)) - - -def silent_intern(x): - """ - Perform sys.intern() on the passed argument and return the result. - If the input is ineligible (e.g. a unicode string) the original argument is - returned and no exception is thrown. - """ - try: - return sys.intern(x) - except TypeError: - return x - - - -# From Dinu C. Gherman, -# Python Cookbook, second edition, recipe 6.17, p. 277. -# Also: -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205 -# ASPN: Python Cookbook: Null Object Design Pattern - -#TODO??? class Null(object): -class Null(object): - """ Null objects always and reliably "do nothing." """ - def __new__(cls, *args, **kwargs): - if '_instance' not in vars(cls): - cls._instance = super(Null, cls).__new__(cls, *args, **kwargs) - return cls._instance - def __init__(self, *args, **kwargs): - pass - def __call__(self, *args, **kwargs): - return self - def __repr__(self): - return "Null(0x%08X)" % id(self) - def __nonzero__(self): - return False - def __bool__(self): - return False - def __getattr__(self, name): - return self - def __setattr__(self, name, value): - return self - def __delattr__(self, name): - return self - -class NullSeq(Null): - def __len__(self): - return 0 - def __iter__(self): - return iter(()) - def __getitem__(self, i): - return self - def __delitem__(self, i): - return self - def __setitem__(self, i, v): - return self - - -del __revision__ - - -def to_bytes(s): - if s is None: - return b'None' - if not PY3 and isinstance(s, UnicodeType): - # PY2, must encode unicode - return bytearray(s, 'utf-8') - if isinstance (s, (bytes, bytearray)) or bytes is str: - # Above case not covered here as py2 bytes and strings are the same - return s - return bytes(s, 'utf-8') - - -def to_str(s): - if s is None: - return 'None' - if bytes is str or is_String(s): - return s - return str (s, 'utf-8') - - -def cmp(a, b): - """ - Define cmp because it's no longer available in python3 - Works under python 2 as well - """ - return (a > b) - (a < b) - - -def get_env_bool(env, name, default=False): - """Get a value of env[name] converted to boolean. The value of env[name] is - interpreted as follows: 'true', 'yes', 'y', 'on' (case insensitive) and - anything convertible to int that yields non-zero integer are True values; - '0', 'false', 'no', 'n' and 'off' (case insensitive) are False values. For - all other cases, default value is returned. - - :Parameters: - - `env` - dict or dict-like object, a convainer with variables - - `name` - name of the variable in env to be returned - - `default` - returned when env[name] does not exist or can't be converted to bool - """ - try: - var = env[name] - except KeyError: - return default - try: - return bool(int(var)) - except ValueError: - if str(var).lower() in ('true', 'yes', 'y', 'on'): - return True - elif str(var).lower() in ('false', 'no', 'n', 'off'): - return False - else: - return default - - -def get_os_env_bool(name, default=False): - """Same as get_env_bool(os.environ, name, default).""" - return get_env_bool(os.environ, name, default) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/BoolVariable.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/BoolVariable.py deleted file mode 100644 index 629faaf44e3..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/BoolVariable.py +++ /dev/null @@ -1,89 +0,0 @@ -"""engine.SCons.Variables.BoolVariable - -This file defines the option type for SCons implementing true/false values. - -Usage example:: - - opts = Variables() - opts.Add(BoolVariable('embedded', 'build for an embedded system', 0)) - ... - if env['embedded'] == 1: - ... -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Variables/BoolVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__all__ = ['BoolVariable',] - -import SCons.Errors - -__true_strings = ('y', 'yes', 'true', 't', '1', 'on' , 'all' ) -__false_strings = ('n', 'no', 'false', 'f', '0', 'off', 'none') - - -def _text2bool(val): - """ - Converts strings to True/False depending on the 'truth' expressed by - the string. If the string can't be converted, the original value - will be returned. - - See '__true_strings' and '__false_strings' for values considered - 'true' or 'false respectively. - - This is usable as 'converter' for SCons' Variables. - """ - lval = val.lower() - if lval in __true_strings: return True - if lval in __false_strings: return False - raise ValueError("Invalid value for boolean option: %s" % val) - - -def _validator(key, val, env): - """ - Validates the given value to be either '0' or '1'. - - This is usable as 'validator' for SCons' Variables. - """ - if not env[key] in (True, False): - raise SCons.Errors.UserError( - 'Invalid value for boolean option %s: %s' % (key, env[key])) - - -def BoolVariable(key, help, default): - """ - The input parameters describe a boolean option, thus they are - returned with the correct converter and validator appended. The - 'help' text will by appended by '(yes|no) to show the valid - valued. The result is usable for input to opts.Add(). - """ - return (key, '%s (yes|no)' % help, default, - _validator, _text2bool) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/EnumVariable.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/EnumVariable.py deleted file mode 100644 index 22630a409f2..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/EnumVariable.py +++ /dev/null @@ -1,101 +0,0 @@ -"""engine.SCons.Variables.EnumVariable - -This file defines the option type for SCons allowing only specified -input-values. - -Usage example:: - - opts = Variables() - opts.Add(EnumVariable('debug', 'debug output and symbols', 'no', - allowed_values=('yes', 'no', 'full'), - map={}, ignorecase=2)) - ... - if env['debug'] == 'full': - ... -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Variables/EnumVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__all__ = ['EnumVariable',] - - -import SCons.Errors - -def _validator(key, val, env, vals): - if val not in vals: - raise SCons.Errors.UserError( - 'Invalid value for option %s: %s. Valid values are: %s' % (key, val, vals)) - - -def EnumVariable(key, help, default, allowed_values, map={}, ignorecase=0): - """ - The input parameters describe an option with only certain values - allowed. They are returned with an appropriate converter and - validator appended. The result is usable for input to - Variables.Add(). - - 'key' and 'default' are the values to be passed on to Variables.Add(). - - 'help' will be appended by the allowed values automatically - - 'allowed_values' is a list of strings, which are allowed as values - for this option. - - The 'map'-dictionary may be used for converting the input value - into canonical values (e.g. for aliases). - - 'ignorecase' defines the behaviour of the validator: - - If ignorecase == 0, the validator/converter are case-sensitive. - If ignorecase == 1, the validator/converter are case-insensitive. - If ignorecase == 2, the validator/converter is case-insensitive and the converted value will always be lower-case. - - The 'validator' tests whether the value is in the list of allowed values. The 'converter' converts input values - according to the given 'map'-dictionary (unmapped input values are returned unchanged). - """ - - help = '%s (%s)' % (help, '|'.join(allowed_values)) - # define validator - if ignorecase >= 1: - validator = lambda key, val, env: \ - _validator(key, val.lower(), env, allowed_values) - else: - validator = lambda key, val, env: \ - _validator(key, val, env, allowed_values) - # define converter - if ignorecase == 2: - converter = lambda val: map.get(val.lower(), val).lower() - elif ignorecase == 1: - converter = lambda val: map.get(val.lower(), val) - else: - converter = lambda val: map.get(val, val) - return (key, help, default, validator, converter) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/ListVariable.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/ListVariable.py deleted file mode 100644 index bf0f8568cc7..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/ListVariable.py +++ /dev/null @@ -1,135 +0,0 @@ -"""engine.SCons.Variables.ListVariable - -This file defines the option type for SCons implementing 'lists'. - -A 'list' option may either be 'all', 'none' or a list of names -separated by comma. After the option has been processed, the option -value holds either the named list elements, all list elements or no -list elements at all. - -Usage example:: - - list_of_libs = Split('x11 gl qt ical') - - opts = Variables() - opts.Add(ListVariable('shared', - 'libraries to build as shared libraries', - 'all', - elems = list_of_libs)) - ... - for lib in list_of_libs: - if lib in env['shared']: - env.SharedObject(...) - else: - env.Object(...) -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Variables/ListVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -# Known Bug: This should behave like a Set-Type, but does not really, -# since elements can occur twice. - -__all__ = ['ListVariable',] - -import collections - -import SCons.Util - - -class _ListVariable(collections.UserList): - def __init__(self, initlist=[], allowedElems=[]): - collections.UserList.__init__(self, [_f for _f in initlist if _f]) - self.allowedElems = sorted(allowedElems) - - def __cmp__(self, other): - raise NotImplementedError - def __eq__(self, other): - raise NotImplementedError - def __ge__(self, other): - raise NotImplementedError - def __gt__(self, other): - raise NotImplementedError - def __le__(self, other): - raise NotImplementedError - def __lt__(self, other): - raise NotImplementedError - def __str__(self): - if len(self) == 0: - return 'none' - self.data.sort() - if self.data == self.allowedElems: - return 'all' - else: - return ','.join(self) - def prepare_to_store(self): - return self.__str__() - -def _converter(val, allowedElems, mapdict): - """ - """ - if val == 'none': - val = [] - elif val == 'all': - val = allowedElems - else: - val = [_f for _f in val.split(',') if _f] - val = [mapdict.get(v, v) for v in val] - notAllowed = [v for v in val if v not in allowedElems] - if notAllowed: - raise ValueError("Invalid value(s) for option: %s" % - ','.join(notAllowed)) - return _ListVariable(val, allowedElems) - - -## def _validator(key, val, env): -## """ -## """ -## # todo: write validator for pgk list -## return 1 - - -def ListVariable(key, help, default, names, map={}): - """ - The input parameters describe a 'package list' option, thus they - are returned with the correct converter and validator appended. The - result is usable for input to opts.Add() . - - A 'package list' option may either be 'all', 'none' or a list of - package names (separated by space). - """ - names_str = 'allowed names: %s' % ' '.join(names) - if SCons.Util.is_List(default): - default = ','.join(default) - help = '\n '.join( - (help, '(all|none|comma-separated list of names)', names_str)) - return (key, help, default, - None, #_validator, - lambda val: _converter(val, names, map)) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/PackageVariable.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/PackageVariable.py deleted file mode 100644 index 12b89744a0c..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/PackageVariable.py +++ /dev/null @@ -1,106 +0,0 @@ -"""engine.SCons.Variables.PackageVariable - -This file defines the option type for SCons implementing 'package -activation'. - -To be used whenever a 'package' may be enabled/disabled and the -package path may be specified. - -Usage example: - - Examples: - x11=no (disables X11 support) - x11=yes (will search for the package installation dir) - x11=/usr/local/X11 (will check this path for existence) - - To replace autoconf's --with-xxx=yyy :: - - opts = Variables() - opts.Add(PackageVariable('x11', - 'use X11 installed here (yes = search some places', - 'yes')) - ... - if env['x11'] == True: - dir = ... search X11 in some standard places ... - env['x11'] = dir - if env['x11']: - ... build with x11 ... -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Variables/PackageVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__all__ = ['PackageVariable',] - -import SCons.Errors - -__enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search') -__disable_strings = ('0', 'no', 'false', 'off', 'disable') - -def _converter(val): - """ - """ - lval = val.lower() - if lval in __enable_strings: return True - if lval in __disable_strings: return False - #raise ValueError("Invalid value for boolean option: %s" % val) - return val - - -def _validator(key, val, env, searchfunc): - # NB: searchfunc is currently undocumented and unsupported - """ - """ - # TODO write validator, check for path - import os - if env[key] is True: - if searchfunc: - env[key] = searchfunc(key, val) - elif env[key] and not os.path.exists(val): - raise SCons.Errors.UserError( - 'Path does not exist for option %s: %s' % (key, val)) - - -def PackageVariable(key, help, default, searchfunc=None): - # NB: searchfunc is currently undocumented and unsupported - """ - The input parameters describe a 'package list' option, thus they - are returned with the correct converter and validator appended. The - result is usable for input to opts.Add() . - - A 'package list' option may either be 'all', 'none' or a list of - package names (separated by space). - """ - help = '\n '.join( - (help, '( yes | no | /path/to/%s )' % key)) - return (key, help, default, - lambda k, v, e: _validator(k,v,e,searchfunc), - _converter) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/PathVariable.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/PathVariable.py deleted file mode 100644 index 5b3059d4458..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/PathVariable.py +++ /dev/null @@ -1,145 +0,0 @@ -"""SCons.Variables.PathVariable - -This file defines an option type for SCons implementing path settings. - -To be used whenever a user-specified path override should be allowed. - -Arguments to PathVariable are: - option-name = name of this option on the command line (e.g. "prefix") - option-help = help string for option - option-dflt = default value for this option - validator = [optional] validator for option value. Predefined validators are: - - PathAccept -- accepts any path setting; no validation - PathIsDir -- path must be an existing directory - PathIsDirCreate -- path must be a dir; will create - PathIsFile -- path must be a file - PathExists -- path must exist (any type) [default] - - The validator is a function that is called and which - should return True or False to indicate if the path - is valid. The arguments to the validator function - are: (key, val, env). The key is the name of the - option, the val is the path specified for the option, - and the env is the env to which the Options have been - added. - -Usage example:: - - Examples: - prefix=/usr/local - - opts = Variables() - - opts = Variables() - opts.Add(PathVariable('qtdir', - 'where the root of Qt is installed', - qtdir, PathIsDir)) - opts.Add(PathVariable('qt_includes', - 'where the Qt includes are installed', - '$qtdir/includes', PathIsDirCreate)) - opts.Add(PathVariable('qt_libraries', - 'where the Qt library is installed', - '$qtdir/lib')) - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Variables/PathVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__all__ = ['PathVariable',] - -import os -import os.path - -import SCons.Errors - -class _PathVariableClass(object): - - def PathAccept(self, key, val, env): - """Accepts any path, no checking done.""" - pass - - def PathIsDir(self, key, val, env): - """Validator to check if Path is a directory.""" - if not os.path.isdir(val): - if os.path.isfile(val): - m = 'Directory path for option %s is a file: %s' - else: - m = 'Directory path for option %s does not exist: %s' - raise SCons.Errors.UserError(m % (key, val)) - - def PathIsDirCreate(self, key, val, env): - """Validator to check if Path is a directory, - creating it if it does not exist.""" - if os.path.isfile(val): - m = 'Path for option %s is a file, not a directory: %s' - raise SCons.Errors.UserError(m % (key, val)) - if not os.path.isdir(val): - os.makedirs(val) - - def PathIsFile(self, key, val, env): - """Validator to check if Path is a file""" - if not os.path.isfile(val): - if os.path.isdir(val): - m = 'File path for option %s is a directory: %s' - else: - m = 'File path for option %s does not exist: %s' - raise SCons.Errors.UserError(m % (key, val)) - - def PathExists(self, key, val, env): - """Validator to check if Path exists""" - if not os.path.exists(val): - m = 'Path for option %s does not exist: %s' - raise SCons.Errors.UserError(m % (key, val)) - - def __call__(self, key, help, default, validator=None): - """ - The input parameters describe a 'path list' option, thus they - are returned with the correct converter and validator appended. The - result is usable for input to opts.Add() . - - The 'default' option specifies the default path to use if the - user does not specify an override with this option. - - validator is a validator, see this file for examples - """ - if validator is None: - validator = self.PathExists - - if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key): - return (key, '%s ( /path/to/%s )' % (help, key[0]), default, - validator, None) - else: - return (key, '%s ( /path/to/%s )' % (help, key), default, - validator, None) - -PathVariable = _PathVariableClass() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/__init__.py deleted file mode 100644 index 31d66210853..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Variables/__init__.py +++ /dev/null @@ -1,327 +0,0 @@ -"""engine.SCons.Variables - -This file defines the Variables class that is used to add user-friendly -customizable variables to an SCons build. -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Variables/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os.path -import sys -from functools import cmp_to_key - -import SCons.Environment -import SCons.Errors -import SCons.Util -import SCons.Warnings - -from .BoolVariable import BoolVariable # okay -from .EnumVariable import EnumVariable # okay -from .ListVariable import ListVariable # naja -from .PackageVariable import PackageVariable # naja -from .PathVariable import PathVariable # okay - - -class Variables(object): - instance=None - - """ - Holds all the options, updates the environment with the variables, - and renders the help text. - """ - def __init__(self, files=None, args=None, is_global=1): - """ - files - [optional] List of option configuration files to load - (backward compatibility) If a single string is passed it is - automatically placed in a file list - """ - # initialize arguments - if files is None: - files = [] - if args is None: - args = {} - self.options = [] - self.args = args - if not SCons.Util.is_List(files): - if files: - files = [ files ] - else: - files = [] - self.files = files - self.unknown = {} - - # create the singleton instance - if is_global: - self=Variables.instance - - if not Variables.instance: - Variables.instance=self - - def _do_add(self, key, help="", default=None, validator=None, converter=None): - class Variable(object): - pass - - option = Variable() - - # if we get a list or a tuple, we take the first element as the - # option key and store the remaining in aliases. - if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key): - option.key = key[0] - option.aliases = key[1:] - else: - option.key = key - option.aliases = [ key ] - option.help = help - option.default = default - option.validator = validator - option.converter = converter - - self.options.append(option) - - # options might be added after the 'unknown' dict has been set up, - # so we remove the key and all its aliases from that dict - for alias in list(option.aliases) + [ option.key ]: - if alias in self.unknown: - del self.unknown[alias] - - def keys(self): - """ - Returns the keywords for the options - """ - return [o.key for o in self.options] - - def Add(self, key, help="", default=None, validator=None, converter=None, **kw): - """ - Add an option. - - - @param key: the name of the variable, or a list or tuple of arguments - @param help: optional help text for the options - @param default: optional default value - @param validator: optional function that is called to validate the option's value - @type validator: Called with (key, value, environment) - @param converter: optional function that is called to convert the option's value before putting it in the environment. - """ - - if SCons.Util.is_List(key) or isinstance(key, tuple): - self._do_add(*key) - return - - if not SCons.Util.is_String(key) or \ - not SCons.Environment.is_valid_construction_var(key): - raise SCons.Errors.UserError("Illegal Variables.Add() key `%s'" % str(key)) - - self._do_add(key, help, default, validator, converter) - - def AddVariables(self, *optlist): - """ - Add a list of options. - - Each list element is a tuple/list of arguments to be passed on - to the underlying method for adding options. - - Example:: - - opt.AddVariables( - ('debug', '', 0), - ('CC', 'The C compiler'), - ('VALIDATE', 'An option for testing validation', 'notset', - validator, None), - ) - - """ - - for o in optlist: - self._do_add(*o) - - - def Update(self, env, args=None): - """ - Update an environment with the option variables. - - env - the environment to update. - """ - - values = {} - - # first set the defaults: - for option in self.options: - if option.default is not None: - values[option.key] = option.default - - # next set the value specified in the options file - for filename in self.files: - if os.path.exists(filename): - dir = os.path.split(os.path.abspath(filename))[0] - if dir: - sys.path.insert(0, dir) - try: - values['__name__'] = filename - with open(filename, 'r') as f: - contents = f.read() - exec(contents, {}, values) - finally: - if dir: - del sys.path[0] - del values['__name__'] - - # set the values specified on the command line - if args is None: - args = self.args - - for arg, value in args.items(): - added = False - for option in self.options: - if arg in list(option.aliases) + [ option.key ]: - values[option.key] = value - added = True - if not added: - self.unknown[arg] = value - - # put the variables in the environment: - # (don't copy over variables that are not declared as options) - for option in self.options: - try: - env[option.key] = values[option.key] - except KeyError: - pass - - # Call the convert functions: - for option in self.options: - if option.converter and option.key in values: - value = env.subst('${%s}'%option.key) - try: - try: - env[option.key] = option.converter(value) - except TypeError: - env[option.key] = option.converter(value, env) - except ValueError as x: - raise SCons.Errors.UserError('Error converting option: %s\n%s'%(option.key, x)) - - - # Finally validate the values: - for option in self.options: - if option.validator and option.key in values: - option.validator(option.key, env.subst('${%s}'%option.key), env) - - def UnknownVariables(self): - """ - Returns any options in the specified arguments lists that - were not known, declared options in this object. - """ - return self.unknown - - def Save(self, filename, env): - """ - Saves all the options in the given file. This file can - then be used to load the options next run. This can be used - to create an option cache file. - - filename - Name of the file to save into - env - the environment get the option values from - """ - - # Create the file and write out the header - try: - fh = open(filename, 'w') - - try: - # Make an assignment in the file for each option - # within the environment that was assigned a value - # other than the default. - for option in self.options: - try: - value = env[option.key] - try: - prepare = value.prepare_to_store - except AttributeError: - try: - eval(repr(value)) - except KeyboardInterrupt: - raise - except: - # Convert stuff that has a repr() that - # cannot be evaluated into a string - value = SCons.Util.to_String(value) - else: - value = prepare() - - defaultVal = env.subst(SCons.Util.to_String(option.default)) - if option.converter: - defaultVal = option.converter(defaultVal) - - if str(env.subst('${%s}' % option.key)) != str(defaultVal): - fh.write('%s = %s\n' % (option.key, repr(value))) - except KeyError: - pass - finally: - fh.close() - - except IOError as x: - raise SCons.Errors.UserError('Error writing options to file: %s\n%s' % (filename, x)) - - def GenerateHelpText(self, env, sort=None): - """ - Generate the help text for the options. - - env - an environment that is used to get the current values - of the options. - cmp - Either a function as follows: The specific sort function should take two arguments and return -1, 0 or 1 - or a boolean to indicate if it should be sorted. - """ - - if callable(sort): - options = sorted(self.options, key=cmp_to_key(lambda x,y: sort(x.key,y.key))) - elif sort is True: - options = sorted(self.options, key=lambda x: x.key) - else: - options = self.options - - def format(opt, self=self, env=env): - if opt.key in env: - actual = env.subst('${%s}' % opt.key) - else: - actual = None - return self.FormatVariableHelpText(env, opt.key, opt.help, opt.default, actual, opt.aliases) - lines = [_f for _f in map(format, options) if _f] - - return ''.join(lines) - - format = '\n%s: %s\n default: %s\n actual: %s\n' - format_ = '\n%s: %s\n default: %s\n actual: %s\n aliases: %s\n' - - def FormatVariableHelpText(self, env, key, help, default, actual, aliases=[]): - # Don't display the key name itself as an alias. - aliases = [a for a in aliases if a != key] - if len(aliases)==0: - return self.format % (key, help, default, actual) - else: - return self.format_ % (key, help, default, actual, aliases) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Warnings.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Warnings.py deleted file mode 100644 index 49a35a9443d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Warnings.py +++ /dev/null @@ -1,233 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -"""SCons.Warnings - -This file implements the warnings framework for SCons. - -""" - -__revision__ = "src/engine/SCons/Warnings.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import sys - -import SCons.Errors - -class Warning(SCons.Errors.UserError): - pass - -class WarningOnByDefault(Warning): - pass - - -# NOTE: If you add a new warning class, add it to the man page, too! -class TargetNotBuiltWarning(Warning): # Should go to OnByDefault - pass - -class CacheVersionWarning(WarningOnByDefault): - pass - -class CacheWriteErrorWarning(Warning): - pass - -class CorruptSConsignWarning(WarningOnByDefault): - pass - -class DependencyWarning(Warning): - pass - -class DevelopmentVersionWarning(WarningOnByDefault): - pass - -class DuplicateEnvironmentWarning(WarningOnByDefault): - pass - -class FutureReservedVariableWarning(WarningOnByDefault): - pass - -class LinkWarning(WarningOnByDefault): - pass - -class MisleadingKeywordsWarning(WarningOnByDefault): - pass - -class MissingSConscriptWarning(WarningOnByDefault): - pass - -class NoObjectCountWarning(WarningOnByDefault): - pass - -class NoParallelSupportWarning(WarningOnByDefault): - pass - -class ReservedVariableWarning(WarningOnByDefault): - pass - -class StackSizeWarning(WarningOnByDefault): - pass - -class VisualCMissingWarning(WarningOnByDefault): - pass - -# Used when MSVC_VERSION and MSVS_VERSION do not point to the -# same version (MSVS_VERSION is deprecated) -class VisualVersionMismatch(WarningOnByDefault): - pass - -class VisualStudioMissingWarning(Warning): - pass - -class FortranCxxMixWarning(LinkWarning): - pass - - -# Deprecation warnings - -class FutureDeprecatedWarning(Warning): - pass - -class DeprecatedWarning(Warning): - pass - -class MandatoryDeprecatedWarning(DeprecatedWarning): - pass - - -# Special case; base always stays DeprecatedWarning -class PythonVersionWarning(DeprecatedWarning): - pass - -class DeprecatedSourceCodeWarning(FutureDeprecatedWarning): - pass - -class TaskmasterNeedsExecuteWarning(DeprecatedWarning): - pass - -class DeprecatedOptionsWarning(MandatoryDeprecatedWarning): - pass - -class DeprecatedDebugOptionsWarning(MandatoryDeprecatedWarning): - pass - -class DeprecatedMissingSConscriptWarning(DeprecatedWarning): - pass - - -# The below is a list of 2-tuples. The first element is a class object. -# The second element is true if that class is enabled, false if it is disabled. -_enabled = [] - -# If set, raise the warning as an exception -_warningAsException = 0 - -# If not None, a function to call with the warning -_warningOut = None - -def suppressWarningClass(clazz): - """Suppresses all warnings that are of type clazz or - derived from clazz.""" - _enabled.insert(0, (clazz, 0)) - -def enableWarningClass(clazz): - """Enables all warnings that are of type clazz or - derived from clazz.""" - _enabled.insert(0, (clazz, 1)) - -def warningAsException(flag=1): - """Turn warnings into exceptions. Returns the old value of the flag.""" - global _warningAsException - old = _warningAsException - _warningAsException = flag - return old - -def warn(clazz, *args): - global _enabled, _warningAsException, _warningOut - - warning = clazz(args) - for cls, flag in _enabled: - if isinstance(warning, cls): - if flag: - if _warningAsException: - raise warning - - if _warningOut: - _warningOut(warning) - break - -def process_warn_strings(arguments): - """Process requests to enable/disable warnings. - - The requests are strings passed to the --warn option or the - SetOption('warn') function. - - An argument to this option should be of the form - or no-. The warning class is munged in order - to get an actual class name from the classes above, which we - need to pass to the {enable,disable}WarningClass() functions. - The supplied is split on hyphens, each element - is capitalized, then smushed back together. Then the string - "Warning" is appended to get the class name. - - For example, 'deprecated' will enable the DeprecatedWarning - class. 'no-dependency' will disable the DependencyWarning class. - - As a special case, --warn=all and --warn=no-all will enable or - disable (respectively) the base Warning class of all warnings. - """ - - def _capitalize(s): - if s[:5] == "scons": - return "SCons" + s[5:] - else: - return s.capitalize() - - for arg in arguments: - - elems = arg.lower().split('-') - enable = 1 - if elems[0] == 'no': - enable = 0 - del elems[0] - - if len(elems) == 1 and elems[0] == 'all': - class_name = "Warning" - else: - class_name = ''.join(map(_capitalize, elems)) + "Warning" - try: - clazz = globals()[class_name] - except KeyError: - sys.stderr.write("No warning type: '%s'\n" % arg) - else: - if enable: - enableWarningClass(clazz) - elif issubclass(clazz, MandatoryDeprecatedWarning): - fmt = "Can not disable mandataory warning: '%s'\n" - sys.stderr.write(fmt % arg) - else: - suppressWarningClass(clazz) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/__init__.py deleted file mode 100644 index 42582b137cb..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -"""SCons - -The main package for the SCons software construction utility. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__version__ = "3.1.2" - -__build__ = "bee7caf9defd6e108fc2998a2520ddb36a967691" - -__buildsys__ = "octodog" - -__date__ = "2019-12-17 02:07:09" - -__developer__ = "bdeegan" - -# make sure compatibility is always in place -import SCons.compat - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/__main__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/__main__.py deleted file mode 100644 index 0dfbb9dc57d..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -import SCons.Script -# this does all the work, and calls sys.exit -# with the proper exit status when done. -SCons.Script.main() diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/compat/__init__.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/compat/__init__.py deleted file mode 100644 index a3935c37fb4..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/compat/__init__.py +++ /dev/null @@ -1,203 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = """ -SCons compatibility package for old Python versions - -This subpackage holds modules that provide backwards-compatible -implementations of various things that we'd like to use in SCons but which -only show up in later versions of Python than the early, old version(s) -we still support. - -Other code will not generally reference things in this package through -the SCons.compat namespace. The modules included here add things to -the builtins namespace or the global module list so that the rest -of our code can use the objects and names imported here regardless of -Python version. - -The rest of the things here will be in individual compatibility modules -that are either: 1) suitably modified copies of the future modules that -we want to use; or 2) backwards compatible re-implementations of the -specific portions of a future module's API that we want to use. - -GENERAL WARNINGS: Implementations of functions in the SCons.compat -modules are *NOT* guaranteed to be fully compliant with these functions in -later versions of Python. We are only concerned with adding functionality -that we actually use in SCons, so be wary if you lift this code for -other uses. (That said, making these more nearly the same as later, -official versions is still a desirable goal, we just don't need to be -obsessive about it.) - -We name the compatibility modules with an initial '_scons_' (for example, -_scons_subprocess.py is our compatibility module for subprocess) so -that we can still try to import the real module name and fall back to -our compatibility module if we get an ImportError. The import_as() -function defined below loads the module as the "real" name (without the -'_scons'), after which all of the "import {module}" statements in the -rest of our code will find our pre-loaded compatibility module. -""" - -__revision__ = "src/engine/SCons/compat/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import os -import sys -import importlib - -PYPY = hasattr(sys, 'pypy_translation_info') - - -def rename_module(new, old): - """ - Attempt to import the old module and load it under the new name. - Used for purely cosmetic name changes in Python 3.x. - """ - try: - sys.modules[new] = importlib.import_module(old) - return True - except ImportError: - return False - - -# TODO: FIXME -# In 3.x, 'pickle' automatically loads the fast version if available. -rename_module('pickle', 'cPickle') - -# Default pickle protocol. Higher protocols are more efficient/featureful -# but incompatible with older Python versions. On Python 2.7 this is 2. -# Negative numbers choose the highest available protocol. -import pickle - -# Was pickle.HIGHEST_PROTOCOL -# Changed to 2 so py3.5+'s pickle will be compatible with py2.7. -PICKLE_PROTOCOL = pickle.HIGHEST_PROTOCOL - -# TODO: FIXME -# In 3.x, 'profile' automatically loads the fast version if available. -rename_module('profile', 'cProfile') - -# TODO: FIXME -# Before Python 3.0, the 'queue' module was named 'Queue'. -rename_module('queue', 'Queue') - -# TODO: FIXME -# Before Python 3.0, the 'winreg' module was named '_winreg' -rename_module('winreg', '_winreg') - -# Python 3 moved builtin intern() to sys package -# To make porting easier, make intern always live -# in sys package (for python 2.7.x) -try: - sys.intern -except AttributeError: - # We must be using python 2.7.x so monkey patch - # intern into the sys package - sys.intern = intern - -# UserDict, UserList, UserString are in # collections for 3.x, -# but standalone in 2.7.x. Monkey-patch into collections for 2.7. -import collections - -try: - collections.UserDict -except AttributeError: - from UserDict import UserDict as _UserDict - collections.UserDict = _UserDict - del _UserDict - -try: - collections.UserList -except AttributeError: - from UserList import UserList as _UserList - collections.UserList = _UserList - del _UserList - -try: - collections.UserString -except AttributeError: - from UserString import UserString as _UserString - collections.UserString = _UserString - del _UserString - - -import shutil -try: - shutil.SameFileError -except AttributeError: - class SameFileError(Exception): - pass - - shutil.SameFileError = SameFileError - -def with_metaclass(meta, *bases): - """ - Function from jinja2/_compat.py. License: BSD. - - Use it like this:: - - class BaseForm(object): - pass - - class FormType(type): - pass - - class Form(with_metaclass(FormType, BaseForm)): - pass - - This requires a bit of explanation: the basic idea is to make a - dummy metaclass for one level of class instantiation that replaces - itself with the actual metaclass. Because of internal type checks - we also need to make sure that we downgrade the custom metaclass - for one level to something closer to type (that's why __call__ and - __init__ comes back from type etc.). - - This has the advantage over six.with_metaclass of not introducing - dummy classes into the final MRO. - """ - - class metaclass(meta): - __call__ = type.__call__ - __init__ = type.__init__ - - def __new__(cls, name, this_bases, d): - if this_bases is None: - return type.__new__(cls, name, (), d) - return meta(name, bases, d) - - return metaclass('temporary_class', None, {}) - - -class NoSlotsPyPy(type): - """ - Workaround for PyPy not working well with __slots__ and __class__ assignment. - """ - - def __new__(meta, name, bases, dct): - if PYPY and '__slots__' in dct: - dct.pop('__slots__') - return super(NoSlotsPyPy, meta).__new__(meta, name, bases, dct) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/compat/_scons_dbm.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/compat/_scons_dbm.py deleted file mode 100644 index c5da4028d5b..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/compat/_scons_dbm.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = """ -dbm compatibility module for Python versions that don't have dbm. - -This does not not NOT (repeat, *NOT*) provide complete dbm functionality. -It's just a stub on which to hang just enough pieces of dbm functionality -that the whichdb.whichdb() implementstation in the various 2.X versions of -Python won't blow up even if dbm wasn't compiled in. -""" - -__revision__ = "src/engine/SCons/compat/_scons_dbm.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -class error(Exception): - pass - -def open(*args, **kw): - raise error() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/cpp.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/cpp.py deleted file mode 100644 index 17a92e1b297..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/cpp.py +++ /dev/null @@ -1,595 +0,0 @@ -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/cpp.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__doc__ = """ -SCons C Pre-Processor module -""" -import SCons.compat - -import os -import re - -# -# First "subsystem" of regular expressions that we set up: -# -# Stuff to turn the C preprocessor directives in a file's contents into -# a list of tuples that we can process easily. -# - -# A table of regular expressions that fetch the arguments from the rest of -# a C preprocessor line. Different directives have different arguments -# that we want to fetch, using the regular expressions to which the lists -# of preprocessor directives map. -cpp_lines_dict = { - # Fetch the rest of a #if/#elif as one argument, - # with white space optional. - ('if', 'elif') : r'\s*(.+)', - - # Fetch the rest of a #ifdef/#ifndef as one argument, - # separated from the keyword by white space. - ('ifdef', 'ifndef',): r'\s+(.+)', - - # Fetch the rest of a #import/#include/#include_next line as one - # argument, with white space optional. - ('import', 'include', 'include_next',) - : r'\s*(.+)', - - # We don't care what comes after a #else or #endif line. - ('else', 'endif',) : '', - - # Fetch three arguments from a #define line: - # 1) The #defined keyword. - # 2) The optional parentheses and arguments (if it's a function-like - # macro, '' if it's not). - # 3) The expansion value. - ('define',) : r'\s+([_A-Za-z][_A-Za-z0-9_]*)(\([^)]*\))?\s*(.*)', - - # Fetch the #undefed keyword from a #undef line. - ('undef',) : r'\s+([_A-Za-z][A-Za-z0-9_]*)', -} - -# Create a table that maps each individual C preprocessor directive to -# the corresponding compiled regular expression that fetches the arguments -# we care about. -Table = {} -for op_list, expr in cpp_lines_dict.items(): - e = re.compile(expr) - for op in op_list: - Table[op] = e -del e -del op -del op_list - -# Create a list of the expressions we'll use to match all of the -# preprocessor directives. These are the same as the directives -# themselves *except* that we must use a negative lookahead assertion -# when matching "if" so it doesn't match the "if" in "ifdef" or "ifndef". -override = { - 'if' : 'if(?!n?def)', -} -l = [override.get(x, x) for x in list(Table.keys())] - - -# Turn the list of expressions into one big honkin' regular expression -# that will match all the preprocessor lines at once. This will return -# a list of tuples, one for each preprocessor line. The preprocessor -# directive will be the first element in each tuple, and the rest of -# the line will be the second element. -e = r'^\s*#\s*(' + '|'.join(l) + ')(.*)$' - -# And last but not least, compile the expression. -CPP_Expression = re.compile(e, re.M) - - - - -# -# Second "subsystem" of regular expressions that we set up: -# -# Stuff to translate a C preprocessor expression (as found on a #if or -# #elif line) into an equivalent Python expression that we can eval(). -# - -# A dictionary that maps the C representation of Boolean operators -# to their Python equivalents. -CPP_to_Python_Ops_Dict = { - '!' : ' not ', - '!=' : ' != ', - '&&' : ' and ', - '||' : ' or ', - '?' : ' and ', - ':' : ' or ', - '\r' : '', -} - -CPP_to_Python_Ops_Sub = lambda m: CPP_to_Python_Ops_Dict[m.group(0)] - -# We have to sort the keys by length so that longer expressions -# come *before* shorter expressions--in particular, "!=" must -# come before "!" in the alternation. Without this, the Python -# re module, as late as version 2.2.2, empirically matches the -# "!" in "!=" first, instead of finding the longest match. -# What's up with that? -l = sorted(list(CPP_to_Python_Ops_Dict.keys()), key=lambda a: len(a), reverse=True) - -# Turn the list of keys into one regular expression that will allow us -# to substitute all of the operators at once. -expr = '|'.join(map(re.escape, l)) - -# ...and compile the expression. -CPP_to_Python_Ops_Expression = re.compile(expr) - -# A separate list of expressions to be evaluated and substituted -# sequentially, not all at once. -CPP_to_Python_Eval_List = [ - [r'defined\s+(\w+)', '"\\1" in __dict__'], - [r'defined\s*\((\w+)\)', '"\\1" in __dict__'], - [r'/\*.*\*/', ''], - [r'/\*.*', ''], - [r'//.*', ''], - [r'(0x[0-9A-Fa-f]*)[UL]+', '\\1'], -] - -# Replace the string representations of the regular expressions in the -# list with compiled versions. -for l in CPP_to_Python_Eval_List: - l[0] = re.compile(l[0]) - -# Wrap up all of the above into a handy function. -def CPP_to_Python(s): - """ - Converts a C pre-processor expression into an equivalent - Python expression that can be evaluated. - """ - s = CPP_to_Python_Ops_Expression.sub(CPP_to_Python_Ops_Sub, s) - for expr, repl in CPP_to_Python_Eval_List: - s = re.sub(expr, repl, s) - return s - - - -del expr -del l -del override - - - -class FunctionEvaluator(object): - """ - Handles delayed evaluation of a #define function call. - """ - def __init__(self, name, args, expansion): - """ - Squirrels away the arguments and expansion value of a #define - macro function for later evaluation when we must actually expand - a value that uses it. - """ - self.name = name - self.args = function_arg_separator.split(args) - try: - expansion = expansion.split('##') - except AttributeError: - pass - self.expansion = expansion - def __call__(self, *values): - """ - Evaluates the expansion of a #define macro function called - with the specified values. - """ - if len(self.args) != len(values): - raise ValueError("Incorrect number of arguments to `%s'" % self.name) - # Create a dictionary that maps the macro arguments to the - # corresponding values in this "call." We'll use this when we - # eval() the expansion so that arguments will get expanded to - # the right values. - locals = {} - for k, v in zip(self.args, values): - locals[k] = v - - parts = [] - for s in self.expansion: - if s not in self.args: - s = repr(s) - parts.append(s) - statement = ' + '.join(parts) - - return eval(statement, globals(), locals) - - - -# Find line continuations. -line_continuations = re.compile('\\\\\r?\n') - -# Search for a "function call" macro on an expansion. Returns the -# two-tuple of the "function" name itself, and a string containing the -# arguments within the call parentheses. -function_name = re.compile(r'(\S+)\(([^)]*)\)') - -# Split a string containing comma-separated function call arguments into -# the separate arguments. -function_arg_separator = re.compile(r',\s*') - - - -class PreProcessor(object): - """ - The main workhorse class for handling C pre-processing. - """ - def __init__(self, current=os.curdir, cpppath=(), dict={}, all=0): - global Table - - cpppath = tuple(cpppath) - - self.searchpath = { - '"' : (current,) + cpppath, - '<' : cpppath + (current,), - } - - # Initialize our C preprocessor namespace for tracking the - # values of #defined keywords. We use this namespace to look - # for keywords on #ifdef/#ifndef lines, and to eval() the - # expressions on #if/#elif lines (after massaging them from C to - # Python). - self.cpp_namespace = dict.copy() - self.cpp_namespace['__dict__'] = self.cpp_namespace - - if all: - self.do_include = self.all_include - - # For efficiency, a dispatch table maps each C preprocessor - # directive (#if, #define, etc.) to the method that should be - # called when we see it. We accomodate state changes (#if, - # #ifdef, #ifndef) by pushing the current dispatch table on a - # stack and changing what method gets called for each relevant - # directive we might see next at this level (#else, #elif). - # #endif will simply pop the stack. - d = { - 'scons_current_file' : self.scons_current_file - } - for op in list(Table.keys()): - d[op] = getattr(self, 'do_' + op) - self.default_table = d - - # Controlling methods. - - def tupleize(self, contents): - """ - Turns the contents of a file into a list of easily-processed - tuples describing the CPP lines in the file. - - The first element of each tuple is the line's preprocessor - directive (#if, #include, #define, etc., minus the initial '#'). - The remaining elements are specific to the type of directive, as - pulled apart by the regular expression. - """ - global CPP_Expression, Table - contents = line_continuations.sub('', contents) - cpp_tuples = CPP_Expression.findall(contents) - return [(m[0],) + Table[m[0]].match(m[1]).groups() for m in cpp_tuples] - - def __call__(self, file): - """ - Pre-processes a file. - - This is the main public entry point. - """ - self.current_file = file - return self.process_contents(self.read_file(file), file) - - def process_contents(self, contents, fname=None): - """ - Pre-processes a file contents. - - This is the main internal entry point. - """ - self.stack = [] - self.dispatch_table = self.default_table.copy() - self.current_file = fname - self.tuples = self.tupleize(contents) - - self.initialize_result(fname) - while self.tuples: - t = self.tuples.pop(0) - # Uncomment to see the list of tuples being processed (e.g., - # to validate the CPP lines are being translated correctly). - #print(t) - self.dispatch_table[t[0]](t) - return self.finalize_result(fname) - - # Dispatch table stack manipulation methods. - - def save(self): - """ - Pushes the current dispatch table on the stack and re-initializes - the current dispatch table to the default. - """ - self.stack.append(self.dispatch_table) - self.dispatch_table = self.default_table.copy() - - def restore(self): - """ - Pops the previous dispatch table off the stack and makes it the - current one. - """ - try: self.dispatch_table = self.stack.pop() - except IndexError: pass - - # Utility methods. - - def do_nothing(self, t): - """ - Null method for when we explicitly want the action for a - specific preprocessor directive to do nothing. - """ - pass - - def scons_current_file(self, t): - self.current_file = t[1] - - def eval_expression(self, t): - """ - Evaluates a C preprocessor expression. - - This is done by converting it to a Python equivalent and - eval()ing it in the C preprocessor namespace we use to - track #define values. - """ - t = CPP_to_Python(' '.join(t[1:])) - try: return eval(t, self.cpp_namespace) - except (NameError, TypeError): return 0 - - def initialize_result(self, fname): - self.result = [fname] - - def finalize_result(self, fname): - return self.result[1:] - - def find_include_file(self, t): - """ - Finds the #include file for a given preprocessor tuple. - """ - fname = t[2] - for d in self.searchpath[t[1]]: - if d == os.curdir: - f = fname - else: - f = os.path.join(d, fname) - if os.path.isfile(f): - return f - return None - - def read_file(self, file): - with open(file) as f: - return f.read() - - # Start and stop processing include lines. - - def start_handling_includes(self, t=None): - """ - Causes the PreProcessor object to start processing #import, - #include and #include_next lines. - - This method will be called when a #if, #ifdef, #ifndef or #elif - evaluates True, or when we reach the #else in a #if, #ifdef, - #ifndef or #elif block where a condition already evaluated - False. - - """ - d = self.dispatch_table - p = self.stack[-1] if self.stack else self.default_table - - for k in ('import', 'include', 'include_next'): - d[k] = p[k] - - def stop_handling_includes(self, t=None): - """ - Causes the PreProcessor object to stop processing #import, - #include and #include_next lines. - - This method will be called when a #if, #ifdef, #ifndef or #elif - evaluates False, or when we reach the #else in a #if, #ifdef, - #ifndef or #elif block where a condition already evaluated True. - """ - d = self.dispatch_table - d['import'] = self.do_nothing - d['include'] = self.do_nothing - d['include_next'] = self.do_nothing - - # Default methods for handling all of the preprocessor directives. - # (Note that what actually gets called for a given directive at any - # point in time is really controlled by the dispatch_table.) - - def _do_if_else_condition(self, condition): - """ - Common logic for evaluating the conditions on #if, #ifdef and - #ifndef lines. - """ - self.save() - d = self.dispatch_table - if condition: - self.start_handling_includes() - d['elif'] = self.stop_handling_includes - d['else'] = self.stop_handling_includes - else: - self.stop_handling_includes() - d['elif'] = self.do_elif - d['else'] = self.start_handling_includes - - def do_ifdef(self, t): - """ - Default handling of a #ifdef line. - """ - self._do_if_else_condition(t[1] in self.cpp_namespace) - - def do_ifndef(self, t): - """ - Default handling of a #ifndef line. - """ - self._do_if_else_condition(t[1] not in self.cpp_namespace) - - def do_if(self, t): - """ - Default handling of a #if line. - """ - self._do_if_else_condition(self.eval_expression(t)) - - def do_elif(self, t): - """ - Default handling of a #elif line. - """ - d = self.dispatch_table - if self.eval_expression(t): - self.start_handling_includes() - d['elif'] = self.stop_handling_includes - d['else'] = self.stop_handling_includes - - def do_else(self, t): - """ - Default handling of a #else line. - """ - pass - - def do_endif(self, t): - """ - Default handling of a #endif line. - """ - self.restore() - - def do_define(self, t): - """ - Default handling of a #define line. - """ - _, name, args, expansion = t - try: - expansion = int(expansion) - except (TypeError, ValueError): - pass - if args: - evaluator = FunctionEvaluator(name, args[1:-1], expansion) - self.cpp_namespace[name] = evaluator - else: - self.cpp_namespace[name] = expansion - - def do_undef(self, t): - """ - Default handling of a #undef line. - """ - try: del self.cpp_namespace[t[1]] - except KeyError: pass - - def do_import(self, t): - """ - Default handling of a #import line. - """ - # XXX finish this -- maybe borrow/share logic from do_include()...? - pass - - def do_include(self, t): - """ - Default handling of a #include line. - """ - t = self.resolve_include(t) - include_file = self.find_include_file(t) - if include_file: - #print("include_file =", include_file) - self.result.append(include_file) - contents = self.read_file(include_file) - new_tuples = [('scons_current_file', include_file)] + \ - self.tupleize(contents) + \ - [('scons_current_file', self.current_file)] - self.tuples[:] = new_tuples + self.tuples - - # Date: Tue, 22 Nov 2005 20:26:09 -0500 - # From: Stefan Seefeld - # - # By the way, #include_next is not the same as #include. The difference - # being that #include_next starts its search in the path following the - # path that let to the including file. In other words, if your system - # include paths are ['/foo', '/bar'], and you are looking at a header - # '/foo/baz.h', it might issue an '#include_next ' which would - # correctly resolve to '/bar/baz.h' (if that exists), but *not* see - # '/foo/baz.h' again. See http://www.delorie.com/gnu/docs/gcc/cpp_11.html - # for more reasoning. - # - # I have no idea in what context 'import' might be used. - - # XXX is #include_next really the same as #include ? - do_include_next = do_include - - # Utility methods for handling resolution of include files. - - def resolve_include(self, t): - """Resolve a tuple-ized #include line. - - This handles recursive expansion of values without "" or <> - surrounding the name until an initial " or < is found, to handle - - #include FILE - - where FILE is a #define somewhere else.""" - - s = t[1] - while not s[0] in '<"': - #print("s =", s) - try: - s = self.cpp_namespace[s] - except KeyError: - m = function_name.search(s) - s = self.cpp_namespace[m.group(1)] - if callable(s): - args = function_arg_separator.split(m.group(2)) - s = s(*args) - if not s: - return None - return (t[0], s[0], s[1:-1]) - - def all_include(self, t): - """ - """ - self.result.append(self.resolve_include(t)) - -class DumbPreProcessor(PreProcessor): - """A preprocessor that ignores all #if/#elif/#else/#endif directives - and just reports back *all* of the #include files (like the classic - SCons scanner did). - - This is functionally equivalent to using a regular expression to - find all of the #include lines, only slower. It exists mainly as - an example of how the main PreProcessor class can be sub-classed - to tailor its behavior. - """ - def __init__(self, *args, **kw): - PreProcessor.__init__(self, *args, **kw) - d = self.default_table - for func in ['if', 'elif', 'else', 'endif', 'ifdef', 'ifndef']: - d[func] = d[func] = self.do_nothing - -del __revision__ - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/dblite.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/dblite.py deleted file mode 100644 index 14bd93dc326..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/dblite.py +++ /dev/null @@ -1,290 +0,0 @@ -# dblite.py module contributed by Ralf W. Grosse-Kunstleve. -# Extended for Unicode by Steven Knight. -from __future__ import print_function - -import os -import pickle -import shutil -import time - -from SCons.compat import PICKLE_PROTOCOL - -keep_all_files = 00000 -ignore_corrupt_dbfiles = 0 - - -def corruption_warning(filename): - print("Warning: Discarding corrupt database:", filename) - - -try: - unicode -except NameError: - def is_string(s): - return isinstance(s, str) -else: - def is_string(s): - return type(s) in (str, unicode) - - -def is_bytes(s): - return isinstance(s, bytes) - - -try: - unicode('a') -except NameError: - def unicode(s): - return s - -dblite_suffix = '.dblite' - -# TODO: Does commenting this out break switching from py2/3? -# if bytes is not str: -# dblite_suffix += '.p3' -tmp_suffix = '.tmp' - - -class dblite(object): - """ - Squirrel away references to the functions in various modules - that we'll use when our __del__() method calls our sync() method - during shutdown. We might get destroyed when Python is in the midst - of tearing down the different modules we import in an essentially - arbitrary order, and some of the various modules's global attributes - may already be wiped out from under us. - - See the discussion at: - http://mail.python.org/pipermail/python-bugs-list/2003-March/016877.html - """ - - _open = open - _pickle_dump = staticmethod(pickle.dump) - _pickle_protocol = PICKLE_PROTOCOL - _os_chmod = os.chmod - - try: - _os_chown = os.chown - except AttributeError: - _os_chown = None - - _os_rename = os.rename - _os_unlink = os.unlink - _shutil_copyfile = shutil.copyfile - _time_time = time.time - - def __init__(self, file_base_name, flag, mode): - assert flag in (None, "r", "w", "c", "n") - if flag is None: - flag = "r" - - base, ext = os.path.splitext(file_base_name) - if ext == dblite_suffix: - # There's already a suffix on the file name, don't add one. - self._file_name = file_base_name - self._tmp_name = base + tmp_suffix - else: - self._file_name = file_base_name + dblite_suffix - self._tmp_name = file_base_name + tmp_suffix - - self._flag = flag - self._mode = mode - self._dict = {} - self._needs_sync = 00000 - - if self._os_chown is not None and (os.geteuid() == 0 or os.getuid() == 0): - # running as root; chown back to current owner/group when done - try: - statinfo = os.stat(self._file_name) - self._chown_to = statinfo.st_uid - self._chgrp_to = statinfo.st_gid - except OSError as e: - # db file doesn't exist yet. - # Check os.environ for SUDO_UID, use if set - self._chown_to = int(os.environ.get('SUDO_UID', -1)) - self._chgrp_to = int(os.environ.get('SUDO_GID', -1)) - else: - self._chown_to = -1 # don't chown - self._chgrp_to = -1 # don't chgrp - - if self._flag == "n": - with self._open(self._file_name, "wb", self._mode): - pass # just make sure it exists - else: - try: - f = self._open(self._file_name, "rb") - except IOError as e: - if self._flag != "c": - raise e - with self._open(self._file_name, "wb", self._mode): - pass # just make sure it exists - else: - p = f.read() - f.close() - if len(p) > 0: - try: - if bytes is not str: - self._dict = pickle.loads(p, encoding='bytes') - else: - self._dict = pickle.loads(p) - except (pickle.UnpicklingError, EOFError, KeyError): - # Note how we catch KeyErrors too here, which might happen - # when we don't have cPickle available (default pickle - # throws it). - if (ignore_corrupt_dbfiles == 0): raise - if (ignore_corrupt_dbfiles == 1): - corruption_warning(self._file_name) - - def close(self): - if self._needs_sync: - self.sync() - - def __del__(self): - self.close() - - def sync(self): - self._check_writable() - f = self._open(self._tmp_name, "wb", self._mode) - self._pickle_dump(self._dict, f, self._pickle_protocol) - f.close() - - # Windows doesn't allow renaming if the file exists, so unlink - # it first, chmod'ing it to make sure we can do so. On UNIX, we - # may not be able to chmod the file if it's owned by someone else - # (e.g. from a previous run as root). We should still be able to - # unlink() the file if the directory's writable, though, so ignore - # any OSError exception thrown by the chmod() call. - try: - self._os_chmod(self._file_name, 0o777) - except OSError: - pass - self._os_unlink(self._file_name) - self._os_rename(self._tmp_name, self._file_name) - if self._os_chown is not None and self._chown_to > 0: # don't chown to root or -1 - try: - self._os_chown(self._file_name, self._chown_to, self._chgrp_to) - except OSError: - pass - self._needs_sync = 00000 - if (keep_all_files): - self._shutil_copyfile( - self._file_name, - self._file_name + "_" + str(int(self._time_time()))) - - def _check_writable(self): - if (self._flag == "r"): - raise IOError("Read-only database: %s" % self._file_name) - - def __getitem__(self, key): - return self._dict[key] - - def __setitem__(self, key, value): - self._check_writable() - if (not is_string(key)): - raise TypeError("key `%s' must be a string but is %s" % (key, type(key))) - if (not is_bytes(value)): - raise TypeError("value `%s' must be a bytes but is %s" % (value, type(value))) - self._dict[key] = value - self._needs_sync = 0o001 - - def keys(self): - return list(self._dict.keys()) - - def has_key(self, key): - return key in self._dict - - def __contains__(self, key): - return key in self._dict - - def iterkeys(self): - # Wrapping name in () prevents fixer from "fixing" this - return (self._dict.iterkeys)() - - __iter__ = iterkeys - - def __len__(self): - return len(self._dict) - - -def open(file, flag=None, mode=0o666): - return dblite(file, flag, mode) - - -def _exercise(): - db = open("tmp", "n") - assert len(db) == 0 - db["foo"] = "bar" - assert db["foo"] == "bar" - db[unicode("ufoo")] = unicode("ubar") - assert db[unicode("ufoo")] == unicode("ubar") - db.sync() - db = open("tmp", "c") - assert len(db) == 2, len(db) - assert db["foo"] == "bar" - db["bar"] = "foo" - assert db["bar"] == "foo" - db[unicode("ubar")] = unicode("ufoo") - assert db[unicode("ubar")] == unicode("ufoo") - db.sync() - db = open("tmp", "r") - assert len(db) == 4, len(db) - assert db["foo"] == "bar" - assert db["bar"] == "foo" - assert db[unicode("ufoo")] == unicode("ubar") - assert db[unicode("ubar")] == unicode("ufoo") - try: - db.sync() - except IOError as e: - assert str(e) == "Read-only database: tmp.dblite" - else: - raise RuntimeError("IOError expected.") - db = open("tmp", "w") - assert len(db) == 4 - db["ping"] = "pong" - db.sync() - try: - db[(1, 2)] = "tuple" - except TypeError as e: - assert str(e) == "key `(1, 2)' must be a string but is ", str(e) - else: - raise RuntimeError("TypeError exception expected") - try: - db["list"] = [1, 2] - except TypeError as e: - assert str(e) == "value `[1, 2]' must be a string but is ", str(e) - else: - raise RuntimeError("TypeError exception expected") - db = open("tmp", "r") - assert len(db) == 5 - db = open("tmp", "n") - assert len(db) == 0 - dblite._open("tmp.dblite", "w") - db = open("tmp", "r") - dblite._open("tmp.dblite", "w").write("x") - try: - db = open("tmp", "r") - except pickle.UnpicklingError: - pass - else: - raise RuntimeError("pickle exception expected.") - global ignore_corrupt_dbfiles - ignore_corrupt_dbfiles = 2 - db = open("tmp", "r") - assert len(db) == 0 - os.unlink("tmp.dblite") - try: - db = open("tmp", "w") - except IOError as e: - assert str(e) == "[Errno 2] No such file or directory: 'tmp.dblite'", str(e) - else: - raise RuntimeError("IOError expected.") - - -if (__name__ == "__main__"): - _exercise() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/exitfuncs.py b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/exitfuncs.py deleted file mode 100644 index 43ae73bc643..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/exitfuncs.py +++ /dev/null @@ -1,64 +0,0 @@ -"""SCons.exitfuncs - -Register functions which are executed when SCons exits for any reason. - -""" - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/exitfuncs.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - - -import atexit - -_exithandlers = [] -def _run_exitfuncs(): - """run any registered exit functions - - _exithandlers is traversed in reverse order so functions are executed - last in, first out. - """ - - while _exithandlers: - func, targs, kargs = _exithandlers.pop() - func(*targs, **kargs) - -def register(func, *targs, **kargs): - """register a function to be executed upon normal program termination - - func - function to be called at exit - targs - optional arguments to pass to func - kargs - optional keyword arguments to pass to func - """ - _exithandlers.append((func, targs, kargs)) - - -# make our exit function get run by python when it exits -atexit.register(_run_exitfuncs) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons-local-3.1.2/scons-3.1.2.egg-info b/src/third_party/scons-3.1.2/scons-local-3.1.2/scons-3.1.2.egg-info deleted file mode 100644 index 321323ee246..00000000000 --- a/src/third_party/scons-3.1.2/scons-local-3.1.2/scons-3.1.2.egg-info +++ /dev/null @@ -1,13 +0,0 @@ -Metadata-Version: 1.0 -Name: scons -Version: 3.1.2 -Summary: Open Source next-generation build tool. -Home-page: http://www.scons.org/ -Author: William Deegan -Author-email: bill@baddogconsulting.com -License: UNKNOWN -Description: Open Source next-generation build tool. - Improved, cross-platform substitute for the classic Make - utility. In short, SCons is an easier, more reliable - and faster way to build software. -Platform: UNKNOWN diff --git a/src/third_party/scons-3.1.2/scons-time.py b/src/third_party/scons-3.1.2/scons-time.py deleted file mode 100755 index c371d59b300..00000000000 --- a/src/third_party/scons-3.1.2/scons-time.py +++ /dev/null @@ -1,1481 +0,0 @@ -#!/usr/bin/env python -# -# scons-time - run SCons timings and collect statistics -# -# A script for running a configuration through SCons with a standard -# set of invocations to collect timing and memory statistics and to -# capture the results in a consistent set of output files for display -# and analysis. -# - -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from __future__ import division, print_function - -__revision__ = "src/script/scons-time.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -import getopt -import glob -import os -import re -import shutil -import sys -import tempfile -import time -import subprocess - -def HACK_for_exec(cmd, *args): - """ - For some reason, Python won't allow an exec() within a function - that also declares an internal function (including lambda functions). - This function is a hack that calls exec() in a function with no - internal functions. - """ - if not args: exec(cmd) - elif len(args) == 1: exec(cmd, args[0]) - else: exec(cmd, args[0], args[1]) - -class Plotter(object): - def increment_size(self, largest): - """ - Return the size of each horizontal increment line for a specified - maximum value. This returns a value that will provide somewhere - between 5 and 9 horizontal lines on the graph, on some set of - boundaries that are multiples of 10/100/1000/etc. - """ - i = largest // 5 - if not i: - return largest - multiplier = 1 - while i >= 10: - i = i // 10 - multiplier = multiplier * 10 - return i * multiplier - - def max_graph_value(self, largest): - # Round up to next integer. - largest = int(largest) + 1 - increment = self.increment_size(largest) - return ((largest + increment - 1) // increment) * increment - -class Line(object): - def __init__(self, points, type, title, label, comment, fmt="%s %s"): - self.points = points - self.type = type - self.title = title - self.label = label - self.comment = comment - self.fmt = fmt - - def print_label(self, inx, x, y): - if self.label: - print('set label %s "%s" at %0.1f,%0.1f right' % (inx, self.label, x, y)) - - def plot_string(self): - if self.title: - title_string = 'title "%s"' % self.title - else: - title_string = 'notitle' - return "'-' %s with lines lt %s" % (title_string, self.type) - - def print_points(self, fmt=None): - if fmt is None: - fmt = self.fmt - if self.comment: - print('# %s' % self.comment) - for x, y in self.points: - # If y is None, it usually represents some kind of break - # in the line's index number. We might want to represent - # this some way rather than just drawing the line straight - # between the two points on either side. - if y is not None: - print(fmt % (x, y)) - print('e') - - def get_x_values(self): - return [ p[0] for p in self.points ] - - def get_y_values(self): - return [ p[1] for p in self.points ] - -class Gnuplotter(Plotter): - - def __init__(self, title, key_location): - self.lines = [] - self.title = title - self.key_location = key_location - - def line(self, points, type, title=None, label=None, comment=None, fmt='%s %s'): - if points: - line = Line(points, type, title, label, comment, fmt) - self.lines.append(line) - - def plot_string(self, line): - return line.plot_string() - - def vertical_bar(self, x, type, label, comment): - if self.get_min_x() <= x <= self.get_max_x(): - points = [(x, 0), (x, self.max_graph_value(self.get_max_y()))] - self.line(points, type, label, comment) - - def get_all_x_values(self): - result = [] - for line in self.lines: - result.extend(line.get_x_values()) - return [r for r in result if r is not None] - - def get_all_y_values(self): - result = [] - for line in self.lines: - result.extend(line.get_y_values()) - return [r for r in result if r is not None] - - def get_min_x(self): - try: - return self.min_x - except AttributeError: - try: - self.min_x = min(self.get_all_x_values()) - except ValueError: - self.min_x = 0 - return self.min_x - - def get_max_x(self): - try: - return self.max_x - except AttributeError: - try: - self.max_x = max(self.get_all_x_values()) - except ValueError: - self.max_x = 0 - return self.max_x - - def get_min_y(self): - try: - return self.min_y - except AttributeError: - try: - self.min_y = min(self.get_all_y_values()) - except ValueError: - self.min_y = 0 - return self.min_y - - def get_max_y(self): - try: - return self.max_y - except AttributeError: - try: - self.max_y = max(self.get_all_y_values()) - except ValueError: - self.max_y = 0 - return self.max_y - - def draw(self): - - if not self.lines: - return - - if self.title: - print('set title "%s"' % self.title) - print('set key %s' % self.key_location) - - min_y = self.get_min_y() - max_y = self.max_graph_value(self.get_max_y()) - incr = (max_y - min_y) / 10.0 - start = min_y + (max_y / 2.0) + (2.0 * incr) - position = [ start - (i * incr) for i in range(5) ] - - inx = 1 - for line in self.lines: - line.print_label(inx, line.points[0][0]-1, - position[(inx-1) % len(position)]) - inx += 1 - - plot_strings = [ self.plot_string(l) for l in self.lines ] - print('plot ' + ', \\\n '.join(plot_strings)) - - for line in self.lines: - line.print_points() - - - -def untar(fname): - import tarfile - tar = tarfile.open(name=fname, mode='r') - for tarinfo in tar: - tar.extract(tarinfo) - tar.close() - -def unzip(fname): - import zipfile - zf = zipfile.ZipFile(fname, 'r') - for name in zf.namelist(): - dir = os.path.dirname(name) - try: - os.makedirs(dir) - except: - pass - with open(name, 'wb') as f: - f.write(zf.read(name)) - -def read_tree(dir): - for dirpath, dirnames, filenames in os.walk(dir): - for fn in filenames: - fn = os.path.join(dirpath, fn) - if os.path.isfile(fn): - with open(fn, 'rb') as f: - f.read() - -def redirect_to_file(command, log): - return '%s > %s 2>&1' % (command, log) - -def tee_to_file(command, log): - return '%s 2>&1 | tee %s' % (command, log) - - - -class SConsTimer(object): - """ - Usage: scons-time SUBCOMMAND [ARGUMENTS] - Type "scons-time help SUBCOMMAND" for help on a specific subcommand. - - Available subcommands: - func Extract test-run data for a function - help Provides help - mem Extract --debug=memory data from test runs - obj Extract --debug=count data from test runs - time Extract --debug=time data from test runs - run Runs a test configuration - """ - - name = 'scons-time' - name_spaces = ' '*len(name) - - def makedict(**kw): - return kw - - default_settings = makedict( - chdir = None, - config_file = None, - initial_commands = [], - key_location = 'bottom left', - orig_cwd = os.getcwd(), - outdir = None, - prefix = '', - python = '"%s"' % sys.executable, - redirect = redirect_to_file, - scons = None, - scons_flags = '--debug=count --debug=memory --debug=time --debug=memoizer', - scons_lib_dir = None, - scons_wrapper = None, - startup_targets = '--help', - subdir = None, - subversion_url = None, - svn = 'svn', - svn_co_flag = '-q', - tar = 'tar', - targets = '', - targets0 = None, - targets1 = None, - targets2 = None, - title = None, - unzip = 'unzip', - verbose = False, - vertical_bars = [], - - unpack_map = { - '.tar.gz' : (untar, '%(tar)s xzf %%s'), - '.tgz' : (untar, '%(tar)s xzf %%s'), - '.tar' : (untar, '%(tar)s xf %%s'), - '.zip' : (unzip, '%(unzip)s %%s'), - }, - ) - - run_titles = [ - 'Startup', - 'Full build', - 'Up-to-date build', - ] - - run_commands = [ - '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof0)s %(targets0)s', - '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof1)s %(targets1)s', - '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof2)s %(targets2)s', - ] - - stages = [ - 'pre-read', - 'post-read', - 'pre-build', - 'post-build', - ] - - stage_strings = { - 'pre-read' : 'Memory before reading SConscript files:', - 'post-read' : 'Memory after reading SConscript files:', - 'pre-build' : 'Memory before building targets:', - 'post-build' : 'Memory after building targets:', - } - - memory_string_all = 'Memory ' - - default_stage = stages[-1] - - time_strings = { - 'total' : 'Total build time', - 'SConscripts' : 'Total SConscript file execution time', - 'SCons' : 'Total SCons execution time', - 'commands' : 'Total command execution time', - } - - time_string_all = 'Total .* time' - - # - - def __init__(self): - self.__dict__.update(self.default_settings) - - # Functions for displaying and executing commands. - - def subst(self, x, dictionary): - try: - return x % dictionary - except TypeError: - # x isn't a string (it's probably a Python function), - # so just return it. - return x - - def subst_variables(self, command, dictionary): - """ - Substitutes (via the format operator) the values in the specified - dictionary into the specified command. - - The command can be an (action, string) tuple. In all cases, we - perform substitution on strings and don't worry if something isn't - a string. (It's probably a Python function to be executed.) - """ - try: - command + '' - except TypeError: - action = command[0] - string = command[1] - args = command[2:] - else: - action = command - string = action - args = (()) - action = self.subst(action, dictionary) - string = self.subst(string, dictionary) - return (action, string, args) - - def _do_not_display(self, msg, *args): - pass - - def display(self, msg, *args): - """ - Displays the specified message. - - Each message is prepended with a standard prefix of our name - plus the time. - """ - if callable(msg): - msg = msg(*args) - else: - msg = msg % args - if msg is None: - return - fmt = '%s[%s]: %s\n' - sys.stdout.write(fmt % (self.name, time.strftime('%H:%M:%S'), msg)) - - def _do_not_execute(self, action, *args): - pass - - def execute(self, action, *args): - """ - Executes the specified action. - - The action is called if it's a callable Python function, and - otherwise passed to os.system(). - """ - if callable(action): - action(*args) - else: - os.system(action % args) - - def run_command_list(self, commands, dict): - """ - Executes a list of commands, substituting values from the - specified dictionary. - """ - commands = [ self.subst_variables(c, dict) for c in commands ] - for action, string, args in commands: - self.display(string, *args) - sys.stdout.flush() - status = self.execute(action, *args) - if status: - sys.exit(status) - - def log_display(self, command, log): - command = self.subst(command, self.__dict__) - if log: - command = self.redirect(command, log) - return command - - def log_execute(self, command, log): - command = self.subst(command, self.__dict__) - p = os.popen(command) - output = p.read() - p.close() - #TODO: convert to subrocess, os.popen is obsolete. This didn't work: - #process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) - #output = process.stdout.read() - #process.stdout.close() - #process.wait() - if self.verbose: - sys.stdout.write(output) - # TODO: Figure out - # Not sure we need to write binary here - with open(log, 'w') as f: - f.write(str(output)) - - def archive_splitext(self, path): - """ - Splits an archive name into a filename base and extension. - - This is like os.path.splitext() (which it calls) except that it - also looks for '.tar.gz' and treats it as an atomic extensions. - """ - if path.endswith('.tar.gz'): - return path[:-7], path[-7:] - else: - return os.path.splitext(path) - - def args_to_files(self, args, tail=None): - """ - Takes a list of arguments, expands any glob patterns, and - returns the last "tail" files from the list. - """ - files = [] - for a in args: - files.extend(sorted(glob.glob(a))) - - if tail: - files = files[-tail:] - - return files - - def ascii_table(self, files, columns, - line_function, file_function=lambda x: x, - *args, **kw): - - header_fmt = ' '.join(['%12s'] * len(columns)) - line_fmt = header_fmt + ' %s' - - print(header_fmt % columns) - - for file in files: - t = line_function(file, *args, **kw) - if t is None: - t = [] - diff = len(columns) - len(t) - if diff > 0: - t += [''] * diff - t.append(file_function(file)) - print(line_fmt % tuple(t)) - - def collect_results(self, files, function, *args, **kw): - results = {} - - for file in files: - base = os.path.splitext(file)[0] - run, index = base.split('-')[-2:] - - run = int(run) - index = int(index) - - value = function(file, *args, **kw) - - try: - r = results[index] - except KeyError: - r = [] - results[index] = r - r.append((run, value)) - - return results - - def doc_to_help(self, obj): - """ - Translates an object's __doc__ string into help text. - - This strips a consistent number of spaces from each line in the - help text, essentially "outdenting" the text to the left-most - column. - """ - doc = obj.__doc__ - if doc is None: - return '' - return self.outdent(doc) - - def find_next_run_number(self, dir, prefix): - """ - Returns the next run number in a directory for the specified prefix. - - Examines the contents the specified directory for files with the - specified prefix, extracts the run numbers from each file name, - and returns the next run number after the largest it finds. - """ - x = re.compile(re.escape(prefix) + '-([0-9]+).*') - matches = [x.match(e) for e in os.listdir(dir)] - matches = [_f for _f in matches if _f] - if not matches: - return 0 - run_numbers = [int(m.group(1)) for m in matches] - return int(max(run_numbers)) + 1 - - def gnuplot_results(self, results, fmt='%s %.3f'): - """ - Prints out a set of results in Gnuplot format. - """ - gp = Gnuplotter(self.title, self.key_location) - - for i in sorted(results.keys()): - try: - t = self.run_titles[i] - except IndexError: - t = '??? %s ???' % i - results[i].sort() - gp.line(results[i], i+1, t, None, t, fmt=fmt) - - for bar_tuple in self.vertical_bars: - try: - x, type, label, comment = bar_tuple - except ValueError: - x, type, label = bar_tuple - comment = label - gp.vertical_bar(x, type, label, comment) - - gp.draw() - - def logfile_name(self, invocation): - """ - Returns the absolute path of a log file for the specificed - invocation number. - """ - name = self.prefix_run + '-%d.log' % invocation - return os.path.join(self.outdir, name) - - def outdent(self, s): - """ - Strip as many spaces from each line as are found at the beginning - of the first line in the list. - """ - lines = s.split('\n') - if lines[0] == '': - lines = lines[1:] - spaces = re.match(' *', lines[0]).group(0) - def strip_initial_spaces(l, s=spaces): - if l.startswith(spaces): - l = l[len(spaces):] - return l - return '\n'.join([ strip_initial_spaces(l) for l in lines ]) + '\n' - - def profile_name(self, invocation): - """ - Returns the absolute path of a profile file for the specified - invocation number. - """ - name = self.prefix_run + '-%d.prof' % invocation - return os.path.join(self.outdir, name) - - def set_env(self, key, value): - os.environ[key] = value - - # - - def get_debug_times(self, file, time_string=None): - """ - Fetch times from the --debug=time strings in the specified file. - """ - if time_string is None: - search_string = self.time_string_all - else: - search_string = time_string - with open(file) as f: - contents = f.read() - if not contents: - sys.stderr.write('file %s has no contents!\n' % repr(file)) - return None - result = re.findall(r'%s: ([\d\.]*)' % search_string, contents)[-4:] - result = [ float(r) for r in result ] - if time_string is not None: - try: - result = result[0] - except IndexError: - sys.stderr.write('file %s has no results!\n' % repr(file)) - return None - return result - - def get_function_profile(self, file, function): - """ - Returns the file, line number, function name, and cumulative time. - """ - try: - import pstats - except ImportError as e: - sys.stderr.write('%s: func: %s\n' % (self.name, e)) - sys.stderr.write('%s This version of Python is missing the profiler.\n' % self.name_spaces) - sys.stderr.write('%s Cannot use the "func" subcommand.\n' % self.name_spaces) - sys.exit(1) - statistics = pstats.Stats(file).stats - matches = [ e for e in statistics.items() if e[0][2] == function ] - r = matches[0] - return r[0][0], r[0][1], r[0][2], r[1][3] - - def get_function_time(self, file, function): - """ - Returns just the cumulative time for the specified function. - """ - return self.get_function_profile(file, function)[3] - - def get_memory(self, file, memory_string=None): - """ - Returns a list of integers of the amount of memory used. The - default behavior is to return all the stages. - """ - if memory_string is None: - search_string = self.memory_string_all - else: - search_string = memory_string - with open(file) as f: - lines = f.readlines() - lines = [ l for l in lines if l.startswith(search_string) ][-4:] - result = [ int(l.split()[-1]) for l in lines[-4:] ] - if len(result) == 1: - result = result[0] - return result - - def get_object_counts(self, file, object_name, index=None): - """ - Returns the counts of the specified object_name. - """ - object_string = ' ' + object_name + '\n' - with open(file) as f: - lines = f.readlines() - line = [ l for l in lines if l.endswith(object_string) ][0] - result = [ int(field) for field in line.split()[:4] ] - if index is not None: - result = result[index] - return result - - - command_alias = {} - - def execute_subcommand(self, argv): - """ - Executes the do_*() function for the specified subcommand (argv[0]). - """ - if not argv: - return - cmdName = self.command_alias.get(argv[0], argv[0]) - try: - func = getattr(self, 'do_' + cmdName) - except AttributeError: - return self.default(argv) - try: - return func(argv) - except TypeError as e: - sys.stderr.write("%s %s: %s\n" % (self.name, cmdName, e)) - import traceback - traceback.print_exc(file=sys.stderr) - sys.stderr.write("Try '%s help %s'\n" % (self.name, cmdName)) - - def default(self, argv): - """ - The default behavior for an unknown subcommand. Prints an - error message and exits. - """ - sys.stderr.write('%s: Unknown subcommand "%s".\n' % (self.name, argv[0])) - sys.stderr.write('Type "%s help" for usage.\n' % self.name) - sys.exit(1) - - # - - def do_help(self, argv): - """ - """ - if argv[1:]: - for arg in argv[1:]: - try: - func = getattr(self, 'do_' + arg) - except AttributeError: - sys.stderr.write('%s: No help for "%s"\n' % (self.name, arg)) - else: - try: - help = getattr(self, 'help_' + arg) - except AttributeError: - sys.stdout.write(self.doc_to_help(func)) - sys.stdout.flush() - else: - help() - else: - doc = self.doc_to_help(self.__class__) - if doc: - sys.stdout.write(doc) - sys.stdout.flush() - return None - - # - - def help_func(self): - help = """\ - Usage: scons-time func [OPTIONS] FILE [...] - - -C DIR, --chdir=DIR Change to DIR before looking for files - -f FILE, --file=FILE Read configuration from specified FILE - --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT - --func=NAME, --function=NAME Report time for function NAME - -h, --help Print this help and exit - -p STRING, --prefix=STRING Use STRING as log file/profile prefix - -t NUMBER, --tail=NUMBER Only report the last NUMBER files - --title=TITLE Specify the output plot TITLE - """ - sys.stdout.write(self.outdent(help)) - sys.stdout.flush() - - def do_func(self, argv): - """ - """ - format = 'ascii' - function_name = '_main' - tail = None - - short_opts = '?C:f:hp:t:' - - long_opts = [ - 'chdir=', - 'file=', - 'fmt=', - 'format=', - 'func=', - 'function=', - 'help', - 'prefix=', - 'tail=', - 'title=', - ] - - opts, args = getopt.getopt(argv[1:], short_opts, long_opts) - - for o, a in opts: - if o in ('-C', '--chdir'): - self.chdir = a - elif o in ('-f', '--file'): - self.config_file = a - elif o in ('--fmt', '--format'): - format = a - elif o in ('--func', '--function'): - function_name = a - elif o in ('-?', '-h', '--help'): - self.do_help(['help', 'func']) - sys.exit(0) - elif o in ('--max',): - max_time = int(a) - elif o in ('-p', '--prefix'): - self.prefix = a - elif o in ('-t', '--tail'): - tail = int(a) - elif o in ('--title',): - self.title = a - - if self.config_file: - with open(self.config_file, 'r') as f: - config = f.read() - exec(config, self.__dict__) - - if self.chdir: - os.chdir(self.chdir) - - if not args: - - pattern = '%s*.prof' % self.prefix - args = self.args_to_files([pattern], tail) - - if not args: - if self.chdir: - directory = self.chdir - else: - directory = os.getcwd() - - sys.stderr.write('%s: func: No arguments specified.\n' % self.name) - sys.stderr.write('%s No %s*.prof files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) - sys.stderr.write('%s Type "%s help func" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - - else: - - args = self.args_to_files(args, tail) - - cwd_ = os.getcwd() + os.sep - - if format == 'ascii': - - for file in args: - try: - f, line, func, time = \ - self.get_function_profile(file, function_name) - except ValueError as e: - sys.stderr.write("%s: func: %s: %s\n" % - (self.name, file, e)) - else: - if f.startswith(cwd_): - f = f[len(cwd_):] - print("%.3f %s:%d(%s)" % (time, f, line, func)) - - elif format == 'gnuplot': - - results = self.collect_results(args, self.get_function_time, - function_name) - - self.gnuplot_results(results) - - else: - - sys.stderr.write('%s: func: Unknown format "%s".\n' % (self.name, format)) - sys.exit(1) - - # - - def help_mem(self): - help = """\ - Usage: scons-time mem [OPTIONS] FILE [...] - - -C DIR, --chdir=DIR Change to DIR before looking for files - -f FILE, --file=FILE Read configuration from specified FILE - --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT - -h, --help Print this help and exit - -p STRING, --prefix=STRING Use STRING as log file/profile prefix - --stage=STAGE Plot memory at the specified stage: - pre-read, post-read, pre-build, - post-build (default: post-build) - -t NUMBER, --tail=NUMBER Only report the last NUMBER files - --title=TITLE Specify the output plot TITLE - """ - sys.stdout.write(self.outdent(help)) - sys.stdout.flush() - - def do_mem(self, argv): - - format = 'ascii' - logfile_path = lambda x: x - stage = self.default_stage - tail = None - - short_opts = '?C:f:hp:t:' - - long_opts = [ - 'chdir=', - 'file=', - 'fmt=', - 'format=', - 'help', - 'prefix=', - 'stage=', - 'tail=', - 'title=', - ] - - opts, args = getopt.getopt(argv[1:], short_opts, long_opts) - - for o, a in opts: - if o in ('-C', '--chdir'): - self.chdir = a - elif o in ('-f', '--file'): - self.config_file = a - elif o in ('--fmt', '--format'): - format = a - elif o in ('-?', '-h', '--help'): - self.do_help(['help', 'mem']) - sys.exit(0) - elif o in ('-p', '--prefix'): - self.prefix = a - elif o in ('--stage',): - if a not in self.stages: - sys.stderr.write('%s: mem: Unrecognized stage "%s".\n' % (self.name, a)) - sys.exit(1) - stage = a - elif o in ('-t', '--tail'): - tail = int(a) - elif o in ('--title',): - self.title = a - - if self.config_file: - with open(self.config_file, 'r') as f: - config = f.read() - HACK_for_exec(config, self.__dict__) - - if self.chdir: - os.chdir(self.chdir) - logfile_path = lambda x: os.path.join(self.chdir, x) - - if not args: - - pattern = '%s*.log' % self.prefix - args = self.args_to_files([pattern], tail) - - if not args: - if self.chdir: - directory = self.chdir - else: - directory = os.getcwd() - - sys.stderr.write('%s: mem: No arguments specified.\n' % self.name) - sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) - sys.stderr.write('%s Type "%s help mem" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - - else: - - args = self.args_to_files(args, tail) - - cwd_ = os.getcwd() + os.sep - - if format == 'ascii': - - self.ascii_table(args, tuple(self.stages), self.get_memory, logfile_path) - - elif format == 'gnuplot': - - results = self.collect_results(args, self.get_memory, - self.stage_strings[stage]) - - self.gnuplot_results(results) - - else: - - sys.stderr.write('%s: mem: Unknown format "%s".\n' % (self.name, format)) - sys.exit(1) - - return 0 - - # - - def help_obj(self): - help = """\ - Usage: scons-time obj [OPTIONS] OBJECT FILE [...] - - -C DIR, --chdir=DIR Change to DIR before looking for files - -f FILE, --file=FILE Read configuration from specified FILE - --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT - -h, --help Print this help and exit - -p STRING, --prefix=STRING Use STRING as log file/profile prefix - --stage=STAGE Plot memory at the specified stage: - pre-read, post-read, pre-build, - post-build (default: post-build) - -t NUMBER, --tail=NUMBER Only report the last NUMBER files - --title=TITLE Specify the output plot TITLE - """ - sys.stdout.write(self.outdent(help)) - sys.stdout.flush() - - def do_obj(self, argv): - - format = 'ascii' - logfile_path = lambda x: x - stage = self.default_stage - tail = None - - short_opts = '?C:f:hp:t:' - - long_opts = [ - 'chdir=', - 'file=', - 'fmt=', - 'format=', - 'help', - 'prefix=', - 'stage=', - 'tail=', - 'title=', - ] - - opts, args = getopt.getopt(argv[1:], short_opts, long_opts) - - for o, a in opts: - if o in ('-C', '--chdir'): - self.chdir = a - elif o in ('-f', '--file'): - self.config_file = a - elif o in ('--fmt', '--format'): - format = a - elif o in ('-?', '-h', '--help'): - self.do_help(['help', 'obj']) - sys.exit(0) - elif o in ('-p', '--prefix'): - self.prefix = a - elif o in ('--stage',): - if a not in self.stages: - sys.stderr.write('%s: obj: Unrecognized stage "%s".\n' % (self.name, a)) - sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - stage = a - elif o in ('-t', '--tail'): - tail = int(a) - elif o in ('--title',): - self.title = a - - if not args: - sys.stderr.write('%s: obj: Must specify an object name.\n' % self.name) - sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - - object_name = args.pop(0) - - if self.config_file: - with open(self.config_file, 'r') as f: - config = f.read() - HACK_for_exec(config, self.__dict__) - - if self.chdir: - os.chdir(self.chdir) - logfile_path = lambda x: os.path.join(self.chdir, x) - - if not args: - - pattern = '%s*.log' % self.prefix - args = self.args_to_files([pattern], tail) - - if not args: - if self.chdir: - directory = self.chdir - else: - directory = os.getcwd() - - sys.stderr.write('%s: obj: No arguments specified.\n' % self.name) - sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) - sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - - else: - - args = self.args_to_files(args, tail) - - cwd_ = os.getcwd() + os.sep - - if format == 'ascii': - - self.ascii_table(args, tuple(self.stages), self.get_object_counts, logfile_path, object_name) - - elif format == 'gnuplot': - - stage_index = 0 - for s in self.stages: - if stage == s: - break - stage_index = stage_index + 1 - - results = self.collect_results(args, self.get_object_counts, - object_name, stage_index) - - self.gnuplot_results(results) - - else: - - sys.stderr.write('%s: obj: Unknown format "%s".\n' % (self.name, format)) - sys.exit(1) - - return 0 - - # - - def help_run(self): - help = """\ - Usage: scons-time run [OPTIONS] [FILE ...] - - --chdir=DIR Name of unpacked directory for chdir - -f FILE, --file=FILE Read configuration from specified FILE - -h, --help Print this help and exit - -n, --no-exec No execute, just print command lines - --number=NUMBER Put output in files for run NUMBER - --outdir=OUTDIR Put output files in OUTDIR - -p STRING, --prefix=STRING Use STRING as log file/profile prefix - --python=PYTHON Time using the specified PYTHON - -q, --quiet Don't print command lines - --scons=SCONS Time using the specified SCONS - --svn=URL, --subversion=URL Use SCons from Subversion URL - -v, --verbose Display output of commands - """ - sys.stdout.write(self.outdent(help)) - sys.stdout.flush() - - def do_run(self, argv): - """ - """ - run_number_list = [None] - - short_opts = '?f:hnp:qs:v' - - long_opts = [ - 'file=', - 'help', - 'no-exec', - 'number=', - 'outdir=', - 'prefix=', - 'python=', - 'quiet', - 'scons=', - 'svn=', - 'subdir=', - 'subversion=', - 'verbose', - ] - - opts, args = getopt.getopt(argv[1:], short_opts, long_opts) - - for o, a in opts: - if o in ('-f', '--file'): - self.config_file = a - elif o in ('-?', '-h', '--help'): - self.do_help(['help', 'run']) - sys.exit(0) - elif o in ('-n', '--no-exec'): - self.execute = self._do_not_execute - elif o in ('--number',): - run_number_list = self.split_run_numbers(a) - elif o in ('--outdir',): - self.outdir = a - elif o in ('-p', '--prefix'): - self.prefix = a - elif o in ('--python',): - self.python = a - elif o in ('-q', '--quiet'): - self.display = self._do_not_display - elif o in ('-s', '--subdir'): - self.subdir = a - elif o in ('--scons',): - self.scons = a - elif o in ('--svn', '--subversion'): - self.subversion_url = a - elif o in ('-v', '--verbose'): - self.redirect = tee_to_file - self.verbose = True - self.svn_co_flag = '' - - if not args and not self.config_file: - sys.stderr.write('%s: run: No arguments or -f config file specified.\n' % self.name) - sys.stderr.write('%s Type "%s help run" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - - if self.config_file: - with open(self.config_file, 'r') as f: - config = f.read() - exec(config, self.__dict__) - - if args: - self.archive_list = args - - archive_file_name = os.path.split(self.archive_list[0])[1] - - if not self.subdir: - self.subdir = self.archive_splitext(archive_file_name)[0] - - if not self.prefix: - self.prefix = self.archive_splitext(archive_file_name)[0] - - prepare = None - if self.subversion_url: - prepare = self.prep_subversion_run - - for run_number in run_number_list: - self.individual_run(run_number, self.archive_list, prepare) - - def split_run_numbers(self, s): - result = [] - for n in s.split(','): - try: - x, y = n.split('-') - except ValueError: - result.append(int(n)) - else: - result.extend(list(range(int(x), int(y)+1))) - return result - - def scons_path(self, dir): - return os.path.join(dir, 'src', 'script', 'scons.py') - - def scons_lib_dir_path(self, dir): - return os.path.join(dir, 'src', 'engine') - - def prep_subversion_run(self, commands, removals): - self.svn_tmpdir = tempfile.mkdtemp(prefix=self.name + '-svn-') - removals.append((shutil.rmtree, 'rm -rf %%s', self.svn_tmpdir)) - - self.scons = self.scons_path(self.svn_tmpdir) - self.scons_lib_dir = self.scons_lib_dir_path(self.svn_tmpdir) - - commands.extend([ - '%(svn)s co %(svn_co_flag)s -r %(run_number)s %(subversion_url)s %(svn_tmpdir)s', - ]) - - def individual_run(self, run_number, archive_list, prepare=None): - """ - Performs an individual run of the default SCons invocations. - """ - - commands = [] - removals = [] - - if prepare: - prepare(commands, removals) - - save_scons = self.scons - save_scons_wrapper = self.scons_wrapper - save_scons_lib_dir = self.scons_lib_dir - - if self.outdir is None: - self.outdir = self.orig_cwd - elif not os.path.isabs(self.outdir): - self.outdir = os.path.join(self.orig_cwd, self.outdir) - - if self.scons is None: - self.scons = self.scons_path(self.orig_cwd) - - if self.scons_lib_dir is None: - self.scons_lib_dir = self.scons_lib_dir_path(self.orig_cwd) - - if self.scons_wrapper is None: - self.scons_wrapper = self.scons - - if not run_number: - run_number = self.find_next_run_number(self.outdir, self.prefix) - - self.run_number = str(run_number) - - self.prefix_run = self.prefix + '-%03d' % run_number - - if self.targets0 is None: - self.targets0 = self.startup_targets - if self.targets1 is None: - self.targets1 = self.targets - if self.targets2 is None: - self.targets2 = self.targets - - self.tmpdir = tempfile.mkdtemp(prefix=self.name + '-') - - commands.extend([ - (os.chdir, 'cd %%s', self.tmpdir), - ]) - - for archive in archive_list: - if not os.path.isabs(archive): - archive = os.path.join(self.orig_cwd, archive) - if os.path.isdir(archive): - dest = os.path.split(archive)[1] - commands.append((shutil.copytree, 'cp -r %%s %%s', archive, dest)) - else: - suffix = self.archive_splitext(archive)[1] - unpack_command = self.unpack_map.get(suffix) - if not unpack_command: - dest = os.path.split(archive)[1] - commands.append((shutil.copyfile, 'cp %%s %%s', archive, dest)) - else: - commands.append(unpack_command + (archive,)) - - commands.extend([ - (os.chdir, 'cd %%s', self.subdir), - ]) - - commands.extend(self.initial_commands) - - commands.extend([ - (lambda: read_tree('.'), - 'find * -type f | xargs cat > /dev/null'), - - (self.set_env, 'export %%s=%%s', - 'SCONS_LIB_DIR', self.scons_lib_dir), - - '%(python)s %(scons_wrapper)s --version', - ]) - - index = 0 - for run_command in self.run_commands: - setattr(self, 'prof%d' % index, self.profile_name(index)) - c = ( - self.log_execute, - self.log_display, - run_command, - self.logfile_name(index), - ) - commands.append(c) - index = index + 1 - - commands.extend([ - (os.chdir, 'cd %%s', self.orig_cwd), - ]) - - if not os.environ.get('PRESERVE'): - commands.extend(removals) - commands.append((shutil.rmtree, 'rm -rf %%s', self.tmpdir)) - - self.run_command_list(commands, self.__dict__) - - self.scons = save_scons - self.scons_lib_dir = save_scons_lib_dir - self.scons_wrapper = save_scons_wrapper - - # - - def help_time(self): - help = """\ - Usage: scons-time time [OPTIONS] FILE [...] - - -C DIR, --chdir=DIR Change to DIR before looking for files - -f FILE, --file=FILE Read configuration from specified FILE - --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT - -h, --help Print this help and exit - -p STRING, --prefix=STRING Use STRING as log file/profile prefix - -t NUMBER, --tail=NUMBER Only report the last NUMBER files - --which=TIMER Plot timings for TIMER: total, - SConscripts, SCons, commands. - """ - sys.stdout.write(self.outdent(help)) - sys.stdout.flush() - - def do_time(self, argv): - - format = 'ascii' - logfile_path = lambda x: x - tail = None - which = 'total' - - short_opts = '?C:f:hp:t:' - - long_opts = [ - 'chdir=', - 'file=', - 'fmt=', - 'format=', - 'help', - 'prefix=', - 'tail=', - 'title=', - 'which=', - ] - - opts, args = getopt.getopt(argv[1:], short_opts, long_opts) - - for o, a in opts: - if o in ('-C', '--chdir'): - self.chdir = a - elif o in ('-f', '--file'): - self.config_file = a - elif o in ('--fmt', '--format'): - format = a - elif o in ('-?', '-h', '--help'): - self.do_help(['help', 'time']) - sys.exit(0) - elif o in ('-p', '--prefix'): - self.prefix = a - elif o in ('-t', '--tail'): - tail = int(a) - elif o in ('--title',): - self.title = a - elif o in ('--which',): - if a not in list(self.time_strings.keys()): - sys.stderr.write('%s: time: Unrecognized timer "%s".\n' % (self.name, a)) - sys.stderr.write('%s Type "%s help time" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - which = a - - if self.config_file: - with open(self.config_file, 'r') as f: - config = f.read() - HACK_for_exec(config, self.__dict__) - - if self.chdir: - os.chdir(self.chdir) - logfile_path = lambda x: os.path.join(self.chdir, x) - - if not args: - - pattern = '%s*.log' % self.prefix - args = self.args_to_files([pattern], tail) - - if not args: - if self.chdir: - directory = self.chdir - else: - directory = os.getcwd() - - sys.stderr.write('%s: time: No arguments specified.\n' % self.name) - sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) - sys.stderr.write('%s Type "%s help time" for help.\n' % (self.name_spaces, self.name)) - sys.exit(1) - - else: - - args = self.args_to_files(args, tail) - - cwd_ = os.getcwd() + os.sep - - if format == 'ascii': - - columns = ("Total", "SConscripts", "SCons", "commands") - self.ascii_table(args, columns, self.get_debug_times, logfile_path) - - elif format == 'gnuplot': - - results = self.collect_results(args, self.get_debug_times, - self.time_strings[which]) - - self.gnuplot_results(results, fmt='%s %.6f') - - else: - - sys.stderr.write('%s: time: Unknown format "%s".\n' % (self.name, format)) - sys.exit(1) - -if __name__ == '__main__': - opts, args = getopt.getopt(sys.argv[1:], 'h?V', ['help', 'version']) - - ST = SConsTimer() - - for o, a in opts: - if o in ('-?', '-h', '--help'): - ST.do_help(['help']) - sys.exit(0) - elif o in ('-V', '--version'): - sys.stdout.write('scons-time version\n') - sys.exit(0) - - if not args: - sys.stderr.write('Type "%s help" for usage.\n' % ST.name) - sys.exit(1) - - ST.execute_subcommand(args) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/scons.bat b/src/third_party/scons-3.1.2/scons.bat deleted file mode 100755 index dc671889f63..00000000000 --- a/src/third_party/scons-3.1.2/scons.bat +++ /dev/null @@ -1,38 +0,0 @@ -@REM Copyright (c) 2001 - 2019 The SCons Foundation -@REM src/script/scons.bat bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan -@echo off -set SCONS_ERRORLEVEL= -if "%OS%" == "Windows_NT" goto WinNT - -@REM for 9x/Me you better not have more than 9 args -python -c "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-3.1.2'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons-3.1.2'), join(sys.prefix, 'scons')] + sys.path; import SCons.Script; SCons.Script.main()" %1 %2 %3 %4 %5 %6 %7 %8 %9 -@REM no way to set exit status of this script for 9x/Me -goto endscons - -@REM Credit where credit is due: we return the exit code despite our -@REM use of setlocal+endlocal using a technique from Bear's Journal: -@REM http://code-bear.com/bearlog/2007/06/01/getting-the-exit-code-from-a-batch-file-that-is-run-from-a-python-program/ - -:WinNT -setlocal -@REM ensure the script will be executed with the Python it was installed for -pushd %~dp0.. -set path=%~dp0;%CD%;%path% -popd -@REM try the script named as the .bat file in current dir, then in Scripts subdir -set scriptname=%~dp0%~n0.py -if not exist "%scriptname%" set scriptname=%~dp0Scripts\%~n0.py -@REM Handle when running from wheel where the script has no .py extension -if not exist "%scriptname%" set scriptname=%~dp0%~n0 -python "%scriptname%" %* -endlocal & set SCONS_ERRORLEVEL=%ERRORLEVEL% - -if NOT "%COMSPEC%" == "%SystemRoot%\system32\cmd.exe" goto returncode -if errorlevel 9009 echo you do not have python in your PATH -goto endscons - -:returncode -exit /B %SCONS_ERRORLEVEL% - -:endscons -call :returncode %SCONS_ERRORLEVEL% diff --git a/src/third_party/scons-3.1.2/scons.py b/src/third_party/scons-3.1.2/scons.py deleted file mode 100755 index d889e7472fb..00000000000 --- a/src/third_party/scons-3.1.2/scons.py +++ /dev/null @@ -1,210 +0,0 @@ -#! /usr/bin/env python -# -# SCons - a Software Constructor -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -from __future__ import print_function - -__revision__ = "src/script/scons.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__version__ = "3.1.2" - -__build__ = "bee7caf9defd6e108fc2998a2520ddb36a967691" - -__buildsys__ = "octodog" - -__date__ = "2019-12-17 02:07:09" - -__developer__ = "bdeegan" - -# This is the entry point to the SCons program. -# The only job of this script is to work out where the guts of the program -# could be and import them, where the real work begins. -# SCons can be invoked several different ways -# - from an installed location -# - from a "local install" copy -# - from a source tree, which has a different dir struture than the other two -# Try to account for all those possibilities. - -import os -import sys - -############################################################################## -# BEGIN STANDARD SCons SCRIPT HEADER -# -# This is the cut-and-paste logic so that a self-contained script can -# interoperate correctly with different SCons versions and installation -# locations for the engine. If you modify anything in this section, you -# should also change other scripts that use this same header. -############################################################################## - -# compatibility check -if (3,0,0) < sys.version_info < (3,5,0) or sys.version_info < (2,7,0): - msg = "scons: *** SCons version %s does not run under Python version %s.\n\ -Python 2.7 or >= 3.5 is required.\n" - sys.stderr.write(msg % (__version__, sys.version.split()[0])) - sys.exit(1) - -# Strip the script directory from sys.path so on case-insensitive -# (WIN32) systems Python doesn't think that the "scons" script is the -# "SCons" package. -script_dir = os.path.dirname(os.path.realpath(__file__)) -script_path = os.path.realpath(os.path.dirname(__file__)) -if script_path in sys.path: - sys.path.remove(script_path) - -libs = [] - -if "SCONS_LIB_DIR" in os.environ: - libs.append(os.environ["SCONS_LIB_DIR"]) - -# running from source takes 2nd priority (since 2.3.2), following SCONS_LIB_DIR -source_path = os.path.join(script_path, os.pardir, 'engine') -if os.path.isdir(source_path): - libs.append(source_path) - -# add local-install locations -local_version = 'scons-local-' + __version__ -local = 'scons-local' -if script_dir: - local_version = os.path.join(script_dir, local_version) - local = os.path.join(script_dir, local) -if os.path.isdir(local_version): - libs.append(os.path.abspath(local_version)) -if os.path.isdir(local): - libs.append(os.path.abspath(local)) - -scons_version = 'scons-%s' % __version__ - -# preferred order of scons lookup paths -prefs = [] - -# if we can find package information, use it -try: - import pkg_resources -except ImportError: - pass -else: - try: - d = pkg_resources.get_distribution('scons') - except pkg_resources.DistributionNotFound: - pass - else: - prefs.append(d.location) - -if sys.platform == 'win32': - # Use only sys.prefix on Windows - prefs.append(sys.prefix) - prefs.append(os.path.join(sys.prefix, 'Lib', 'site-packages')) -else: - # On other (POSIX) platforms, things are more complicated due to - # the variety of path names and library locations. - # Build up some possibilities, then transform them into candidates - temp = [] - if script_dir == 'bin': - # script_dir is `pwd`/bin; - # check `pwd`/lib/scons*. - temp.append(os.getcwd()) - else: - if script_dir == '.' or script_dir == '': - script_dir = os.getcwd() - head, tail = os.path.split(script_dir) - if tail == "bin": - # script_dir is /foo/bin; - # check /foo/lib/scons*. - temp.append(head) - - head, tail = os.path.split(sys.prefix) - if tail == "usr": - # sys.prefix is /foo/usr; - # check /foo/usr/lib/scons* first, - # then /foo/usr/local/lib/scons*. - temp.append(sys.prefix) - temp.append(os.path.join(sys.prefix, "local")) - elif tail == "local": - h, t = os.path.split(head) - if t == "usr": - # sys.prefix is /foo/usr/local; - # check /foo/usr/local/lib/scons* first, - # then /foo/usr/lib/scons*. - temp.append(sys.prefix) - temp.append(head) - else: - # sys.prefix is /foo/local; - # check only /foo/local/lib/scons*. - temp.append(sys.prefix) - else: - # sys.prefix is /foo (ends in neither /usr or /local); - # check only /foo/lib/scons*. - temp.append(sys.prefix) - - # suffix these to add to our original prefs: - prefs.extend([os.path.join(x, 'lib') for x in temp]) - prefs.extend([os.path.join(x, 'lib', 'python' + sys.version[:3], - 'site-packages') for x in temp]) - - - # Add the parent directory of the current python's library to the - # preferences. This picks up differences between, e.g., lib and lib64, - # and finds the base location in case of a non-copying virtualenv. - try: - libpath = os.__file__ - except AttributeError: - pass - else: - # Split /usr/libfoo/python*/os.py to /usr/libfoo/python*. - libpath, tail = os.path.split(libpath) - # Split /usr/libfoo/python* to /usr/libfoo - libpath, tail = os.path.split(libpath) - # Check /usr/libfoo/scons*. - prefs.append(libpath) - -# Look first for 'scons-__version__' in all of our preference libs, -# then for 'scons'. Skip paths that do not exist. -libs.extend([os.path.join(x, scons_version) for x in prefs if os.path.isdir(x)]) -libs.extend([os.path.join(x, 'scons') for x in prefs if os.path.isdir(x)]) - -sys.path = libs + sys.path - -############################################################################## -# END STANDARD SCons SCRIPT HEADER -############################################################################## - -if __name__ == "__main__": - try: - import SCons.Script - except ImportError: - sys.stderr.write("SCons import failed. Unable to find engine files in:\n") - for path in libs: - sys.stderr.write(" {}\n".format(path)) - raise - - # this does all the work, and calls sys.exit - # with the proper exit status when done. - SCons.Script.main() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/scons-3.1.2/sconsign.py b/src/third_party/scons-3.1.2/sconsign.py deleted file mode 100755 index b1f7cd0b98b..00000000000 --- a/src/third_party/scons-3.1.2/sconsign.py +++ /dev/null @@ -1,654 +0,0 @@ -#! /usr/bin/env python -# -# SCons - a Software Constructor -# -# Copyright (c) 2001 - 2019 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -from __future__ import print_function - -__revision__ = "src/script/sconsign.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" - -__version__ = "3.1.2" - -__build__ = "bee7caf9defd6e108fc2998a2520ddb36a967691" - -__buildsys__ = "octodog" - -__date__ = "2019-12-17 02:07:09" - -__developer__ = "bdeegan" - -import os -import sys - -############################################################################## -# BEGIN STANDARD SCons SCRIPT HEADER -# -# This is the cut-and-paste logic so that a self-contained script can -# interoperate correctly with different SCons versions and installation -# locations for the engine. If you modify anything in this section, you -# should also change other scripts that use this same header. -############################################################################## - -# compatibility check -if (3,0,0) < sys.version_info < (3,5,0) or sys.version_info < (2,7,0): - msg = "scons: *** SCons version %s does not run under Python version %s.\n\ -Python 2.7 or >= 3.5 is required.\n" - sys.stderr.write(msg % (__version__, sys.version.split()[0])) - sys.exit(1) - -# Strip the script directory from sys.path so on case-insensitive -# (WIN32) systems Python doesn't think that the "scons" script is the -# "SCons" package. -script_dir = os.path.dirname(os.path.realpath(__file__)) -script_path = os.path.realpath(os.path.dirname(__file__)) -if script_path in sys.path: - sys.path.remove(script_path) - -libs = [] - -if "SCONS_LIB_DIR" in os.environ: - libs.append(os.environ["SCONS_LIB_DIR"]) - -# running from source takes 2nd priority (since 2.3.2), following SCONS_LIB_DIR -source_path = os.path.join(script_path, os.pardir, 'engine') -if os.path.isdir(source_path): - libs.append(source_path) - -# add local-install locations -local_version = 'scons-local-' + __version__ -local = 'scons-local' -if script_dir: - local_version = os.path.join(script_dir, local_version) - local = os.path.join(script_dir, local) -if os.path.isdir(local_version): - libs.append(os.path.abspath(local_version)) -if os.path.isdir(local): - libs.append(os.path.abspath(local)) - -scons_version = 'scons-%s' % __version__ - -# preferred order of scons lookup paths -prefs = [] - -# if we can find package information, use it -try: - import pkg_resources -except ImportError: - pass -else: - try: - d = pkg_resources.get_distribution('scons') - except pkg_resources.DistributionNotFound: - pass - else: - prefs.append(d.location) - -if sys.platform == 'win32': - # Use only sys.prefix on Windows - prefs.append(sys.prefix) - prefs.append(os.path.join(sys.prefix, 'Lib', 'site-packages')) -else: - # On other (POSIX) platforms, things are more complicated due to - # the variety of path names and library locations. - # Build up some possibilities, then transform them into candidates - temp = [] - if script_dir == 'bin': - # script_dir is `pwd`/bin; - # check `pwd`/lib/scons*. - temp.append(os.getcwd()) - else: - if script_dir in ('.', ''): - script_dir = os.getcwd() - head, tail = os.path.split(script_dir) - if tail == "bin": - # script_dir is /foo/bin; - # check /foo/lib/scons*. - temp.append(head) - - head, tail = os.path.split(sys.prefix) - if tail == "usr": - # sys.prefix is /foo/usr; - # check /foo/usr/lib/scons* first, - # then /foo/usr/local/lib/scons*. - temp.append(sys.prefix) - temp.append(os.path.join(sys.prefix, "local")) - elif tail == "local": - h, t = os.path.split(head) - if t == "usr": - # sys.prefix is /foo/usr/local; - # check /foo/usr/local/lib/scons* first, - # then /foo/usr/lib/scons*. - temp.append(sys.prefix) - temp.append(head) - else: - # sys.prefix is /foo/local; - # check only /foo/local/lib/scons*. - temp.append(sys.prefix) - else: - # sys.prefix is /foo (ends in neither /usr or /local); - # check only /foo/lib/scons*. - temp.append(sys.prefix) - - # suffix these to add to our original prefs: - prefs.extend([os.path.join(x, 'lib') for x in temp]) - prefs.extend([os.path.join(x, 'lib', 'python' + sys.version[:3], - 'site-packages') for x in temp]) - - - # Add the parent directory of the current python's library to the - # preferences. This picks up differences between, e.g., lib and lib64, - # and finds the base location in case of a non-copying virtualenv. - try: - libpath = os.__file__ - except AttributeError: - pass - else: - # Split /usr/libfoo/python*/os.py to /usr/libfoo/python*. - libpath, _ = os.path.split(libpath) - # Split /usr/libfoo/python* to /usr/libfoo - libpath, tail = os.path.split(libpath) - # Check /usr/libfoo/scons*. - prefs.append(libpath) - -# Look first for 'scons-__version__' in all of our preference libs, -# then for 'scons'. Skip paths that do not exist. -libs.extend([os.path.join(x, scons_version) for x in prefs if os.path.isdir(x)]) -libs.extend([os.path.join(x, 'scons') for x in prefs if os.path.isdir(x)]) - -sys.path = libs + sys.path - -############################################################################## -# END STANDARD SCons SCRIPT HEADER -############################################################################## - -import SCons.compat - -try: - import whichdb - - whichdb = whichdb.whichdb -except ImportError as e: - from dbm import whichdb - -import time -import pickle - -import SCons.SConsign - - -def my_whichdb(filename): - if filename[-7:] == ".dblite": - return "SCons.dblite" - try: - with open(filename + ".dblite", "rb"): - return "SCons.dblite" - except IOError: - pass - return _orig_whichdb(filename) - - -# Should work on python2 -_orig_whichdb = whichdb -whichdb = my_whichdb - -# was changed for python3 -#_orig_whichdb = whichdb.whichdb -#dbm.whichdb = my_whichdb - -def my_import(mname): - import imp - - if '.' in mname: - i = mname.rfind('.') - parent = my_import(mname[:i]) - fp, pathname, description = imp.find_module(mname[i+1:], - parent.__path__) - else: - fp, pathname, description = imp.find_module(mname) - return imp.load_module(mname, fp, pathname, description) - - -class Flagger(object): - default_value = 1 - - def __setitem__(self, item, value): - self.__dict__[item] = value - self.default_value = 0 - - def __getitem__(self, item): - return self.__dict__.get(item, self.default_value) - - -Do_Call = None -Print_Directories = [] -Print_Entries = [] -Print_Flags = Flagger() -Verbose = 0 -Readable = 0 -Warns = 0 - - -def default_mapper(entry, name): - """ - Stringify an entry that doesn't have an explicit mapping. - - Args: - entry: entry - name: field name - - Returns: str - - """ - try: - val = eval("entry." + name) - except AttributeError: - val = None - if sys.version_info.major >= 3 and isinstance(val, bytes): - # This is a dirty hack for py 2/3 compatibility. csig is a bytes object - # in Python3 while Python2 bytes are str. Hence, we decode the csig to a - # Python3 string - val = val.decode() - return str(val) - - -def map_action(entry, _): - """ - Stringify an action entry and signature. - - Args: - entry: action entry - second argument is not used - - Returns: str - - """ - try: - bact = entry.bact - bactsig = entry.bactsig - except AttributeError: - return None - return '%s [%s]' % (bactsig, bact) - - -def map_timestamp(entry, _): - """ - Stringify a timestamp entry. - - Args: - entry: timestamp entry - second argument is not used - - Returns: str - - """ - try: - timestamp = entry.timestamp - except AttributeError: - timestamp = None - if Readable and timestamp: - return "'" + time.ctime(timestamp) + "'" - else: - return str(timestamp) - - -def map_bkids(entry, _): - """ - Stringify an implicit entry. - - Args: - entry: - second argument is not used - - Returns: str - - """ - try: - bkids = entry.bsources + entry.bdepends + entry.bimplicit - bkidsigs = entry.bsourcesigs + entry.bdependsigs + entry.bimplicitsigs - except AttributeError: - return None - - if len(bkids) != len(bkidsigs): - global Warns - Warns += 1 - # add warning to result rather than direct print so it will line up - msg = "Warning: missing information, {} ids but {} sigs" - result = [msg.format(len(bkids), len(bkidsigs))] - else: - result = [] - result += [nodeinfo_string(bkid, bkidsig, " ") - for bkid, bkidsig in zip(bkids, bkidsigs)] - if not result: - return None - return "\n ".join(result) - - -map_field = { - 'action' : map_action, - 'timestamp' : map_timestamp, - 'bkids' : map_bkids, -} - -map_name = { - 'implicit' : 'bkids', -} - - -def field(name, entry, verbose=Verbose): - if not Print_Flags[name]: - return None - fieldname = map_name.get(name, name) - mapper = map_field.get(fieldname, default_mapper) - val = mapper(entry, name) - if verbose: - val = name + ": " + val - return val - - -def nodeinfo_raw(name, ninfo, prefix=""): - # This just formats the dictionary, which we would normally use str() - # to do, except that we want the keys sorted for deterministic output. - d = ninfo.__getstate__() - try: - keys = ninfo.field_list + ['_version_id'] - except AttributeError: - keys = sorted(d.keys()) - l = [] - for k in keys: - l.append('%s: %s' % (repr(k), repr(d.get(k)))) - if '\n' in name: - name = repr(name) - return name + ': {' + ', '.join(l) + '}' - - -def nodeinfo_cooked(name, ninfo, prefix=""): - try: - field_list = ninfo.field_list - except AttributeError: - field_list = [] - if '\n' in name: - name = repr(name) - outlist = [name + ':'] + [ - f for f in [field(x, ninfo, Verbose) for x in field_list] if f - ] - if Verbose: - sep = '\n ' + prefix - else: - sep = ' ' - return sep.join(outlist) - - -nodeinfo_string = nodeinfo_cooked - - -def printfield(name, entry, prefix=""): - outlist = field("implicit", entry, 0) - if outlist: - if Verbose: - print(" implicit:") - print(" " + outlist) - outact = field("action", entry, 0) - if outact: - if Verbose: - print(" action: " + outact) - else: - print(" " + outact) - - -def printentries(entries, location): - if Print_Entries: - for name in Print_Entries: - try: - entry = entries[name] - except KeyError: - err = "sconsign: no entry `%s' in `%s'\n" % (name, location) - sys.stderr.write(err) - else: - try: - ninfo = entry.ninfo - except AttributeError: - print(name + ":") - else: - print(nodeinfo_string(name, entry.ninfo)) - printfield(name, entry.binfo) - else: - for name in sorted(entries.keys()): - entry = entries[name] - try: - ninfo = entry.ninfo - except AttributeError: - print(name + ":") - else: - print(nodeinfo_string(name, entry.ninfo)) - printfield(name, entry.binfo) - - -class Do_SConsignDB(object): - def __init__(self, dbm_name, dbm): - self.dbm_name = dbm_name - self.dbm = dbm - - def __call__(self, fname): - # The *dbm modules stick their own file suffixes on the names - # that are passed in. This causes us to jump through some - # hoops here. - try: - # Try opening the specified file name. Example: - # SPECIFIED OPENED BY self.dbm.open() - # --------- ------------------------- - # .sconsign => .sconsign.dblite - # .sconsign.dblite => .sconsign.dblite.dblite - db = self.dbm.open(fname, "r") - except (IOError, OSError) as e: - print_e = e - try: - # That didn't work, so try opening the base name, - # so that if they actually passed in 'sconsign.dblite' - # (for example), the dbm module will put the suffix back - # on for us and open it anyway. - db = self.dbm.open(os.path.splitext(fname)[0], "r") - except (IOError, OSError): - # That didn't work either. See if the file name - # they specified even exists (independent of the dbm - # suffix-mangling). - try: - with open(fname, "rb"): - pass # this is a touch only, we don't use it here. - except (IOError, OSError) as e: - # Nope, that file doesn't even exist, so report that - # fact back. - print_e = e - sys.stderr.write("sconsign: %s\n" % print_e) - return - except KeyboardInterrupt: - raise - except pickle.UnpicklingError: - sys.stderr.write("sconsign: ignoring invalid `%s' file `%s'\n" - % (self.dbm_name, fname)) - return - except Exception as e: - sys.stderr.write("sconsign: ignoring invalid `%s' file `%s': %s\n" - % (self.dbm_name, fname, e)) - exc_type, _, _ = sys.exc_info() - if exc_type.__name__ == "ValueError" and sys.version_info < (3,0,0): - sys.stderr.write("Python 2 only supports pickle protocols 0-2.\n") - return - - if Print_Directories: - for dir in Print_Directories: - try: - val = db[dir] - except KeyError: - err = "sconsign: no dir `%s' in `%s'\n" % (dir, args[0]) - sys.stderr.write(err) - else: - self.printentries(dir, val) - else: - for dir in sorted(db.keys()): - self.printentries(dir, db[dir]) - - @staticmethod - def printentries(dir, val): - try: - print('=== ' + dir + ':') - except TypeError: - print('=== ' + dir.decode() + ':') - printentries(pickle.loads(val), dir) - - -def Do_SConsignDir(name): - try: - with open(name, 'rb') as fp: - try: - sconsign = SCons.SConsign.Dir(fp) - except KeyboardInterrupt: - raise - except pickle.UnpicklingError: - err = "sconsign: ignoring invalid .sconsign file `%s'\n" % (name) - sys.stderr.write(err) - return - except Exception as e: - err = "sconsign: ignoring invalid .sconsign file `%s': %s\n" % (name, e) - sys.stderr.write(err) - return - printentries(sconsign.entries, args[0]) - except (IOError, OSError) as e: - sys.stderr.write("sconsign: %s\n" % e) - return - - -############################################################################## - -import getopt - -helpstr = """\ -Usage: sconsign [OPTIONS] [FILE ...] -Options: - -a, --act, --action Print build action information. - -c, --csig Print content signature information. - -d DIR, --dir=DIR Print only info about DIR. - -e ENTRY, --entry=ENTRY Print only info about ENTRY. - -f FORMAT, --format=FORMAT FILE is in the specified FORMAT. - -h, --help Print this message and exit. - -i, --implicit Print implicit dependency information. - -r, --readable Print timestamps in human-readable form. - --raw Print raw Python object representations. - -s, --size Print file sizes. - -t, --timestamp Print timestamp information. - -v, --verbose Verbose, describe each field. -""" - -try: - opts, args = getopt.getopt(sys.argv[1:], "acd:e:f:hirstv", - ['act', 'action', - 'csig', 'dir=', 'entry=', - 'format=', 'help', 'implicit', - 'raw', 'readable', - 'size', 'timestamp', 'verbose']) -except getopt.GetoptError as err: - sys.stderr.write(str(err) + '\n') - print(helpstr) - sys.exit(2) - -for o, a in opts: - if o in ('-a', '--act', '--action'): - Print_Flags['action'] = 1 - elif o in ('-c', '--csig'): - Print_Flags['csig'] = 1 - elif o in ('-d', '--dir'): - Print_Directories.append(a) - elif o in ('-e', '--entry'): - Print_Entries.append(a) - elif o in ('-f', '--format'): - # Try to map the given DB format to a known module - # name, that we can then try to import... - Module_Map = {'dblite': 'SCons.dblite', 'sconsign': None} - dbm_name = Module_Map.get(a, a) - if dbm_name: - try: - if dbm_name != "SCons.dblite": - dbm = my_import(dbm_name) - else: - import SCons.dblite - - dbm = SCons.dblite - # Ensure that we don't ignore corrupt DB files, - # this was handled by calling my_import('SCons.dblite') - # again in earlier versions... - SCons.dblite.ignore_corrupt_dbfiles = 0 - except ImportError: - sys.stderr.write("sconsign: illegal file format `%s'\n" % a) - print(helpstr) - sys.exit(2) - Do_Call = Do_SConsignDB(a, dbm) - else: - Do_Call = Do_SConsignDir - elif o in ('-h', '--help'): - print(helpstr) - sys.exit(0) - elif o in ('-i', '--implicit'): - Print_Flags['implicit'] = 1 - elif o in ('--raw',): - nodeinfo_string = nodeinfo_raw - elif o in ('-r', '--readable'): - Readable = 1 - elif o in ('-s', '--size'): - Print_Flags['size'] = 1 - elif o in ('-t', '--timestamp'): - Print_Flags['timestamp'] = 1 - elif o in ('-v', '--verbose'): - Verbose = 1 - -if Do_Call: - for a in args: - Do_Call(a) -else: - if not args: - args = [".sconsign.dblite"] - for a in args: - dbm_name = whichdb(a) - if dbm_name: - Map_Module = {'SCons.dblite': 'dblite'} - if dbm_name != "SCons.dblite": - dbm = my_import(dbm_name) - else: - import SCons.dblite - - dbm = SCons.dblite - # Ensure that we don't ignore corrupt DB files, - # this was handled by calling my_import('SCons.dblite') - # again in earlier versions... - SCons.dblite.ignore_corrupt_dbfiles = 0 - Do_SConsignDB(Map_Module.get(dbm_name, dbm_name), dbm)(a) - else: - Do_SConsignDir(a) - - if Warns: - print("NOTE: there were %d warnings, please check output" % Warns) -sys.exit(0) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/src/third_party/tcmalloc/SConscript b/src/third_party/tcmalloc/SConscript deleted file mode 100644 index 0d090df0e8c..00000000000 --- a/src/third_party/tcmalloc/SConscript +++ /dev/null @@ -1,185 +0,0 @@ -# Project: com_google_tcmalloc -import json -import re -import sys - -import SCons - -Import("env") -Import("has_option") -Import("get_option") - -env = env.Clone( - # Building with hidden visibility interferes with intercepting the - # libc allocation functions. - DISALLOW_VISHIDDEN=True, - NINJA_GENSOURCE_INDEPENDENT=True, -) - -if env.Verbose(): - - def tcmalloc_scons_print(msg, *args, **kwargs): - print("[TCMALLOC_TO_SCONS]: " + msg, *args, **kwargs) -else: - - def tcmalloc_scons_print(msg, *args, **kwargs): - pass - - -# manually switch this for all the debugging -tcmalloc_extra_debug = False - -if tcmalloc_extra_debug: - - def tcmalloc_scons_debug(msg, *args, **kwargs): - print("[TCMALLOC_TO_SCONS][DEBUG]: " + msg, *args, **kwargs) -else: - - def tcmalloc_scons_debug(msg, *args, **kwargs): - pass - - -_bazelToSconsMap = dict( - (f'@com_google_absl//absl/{k}', [f'$BUILD_DIR/third_party/abseil-cpp/absl_{ve}' for ve in v]) - for k, v in { - 'algorithm:container': [], - 'base:config': [], - 'base:core_headers': [], - 'base:dynamic_annotations': [], - 'container:btree': [], - 'container:fixed_array': [], - 'container:flat_hash_map': ['raw_hash_set'], - 'debugging:leak_check': [], - 'debugging:stacktrace': ['stacktrace'], - 'debugging:symbolize': [], - 'functional:function_ref': [], - 'base:malloc_internal': ['malloc_internal'], - 'memory': [], - 'numeric:bits': [], - 'numeric:int128': [], - 'strings:str_format': ['str_format_internal'], - 'types:optional': [], - 'types:span': [], - }.items()) - -sys.path.append(env.Dir('scripts/site-scons').srcnode().abspath) -from bazel_to_scons import BazelEnv, Label - - -def dumpBazelLibs(baz, target): - if tcmalloc_extra_debug: - tcmalloc_scons_debug(f"Dumping tcmalloc deps to: '{target}'", file=sys.stderr) - with open(target.abspath, 'w') as dump: - tcmalloc_scons_debug( - json.dumps({'libraries': baz}, sort_keys=True, indent=4), file=dump) - else: - pass - - -def _remapAbseilDep(label: Label) -> 'list[str]': - tcmalloc_scons_print(f'Remap abseilDep {label}', file=sys.stderr) - if str(label) in _bazelToSconsMap: - out = _bazelToSconsMap[str(label)] - tcmalloc_scons_print(f'Remap {label} to {out}', file=sys.stderr) - return out - - pkg = label.package().replace('/', '_') - tgt = label.target() - # bazel expands //foo/bar => //foo/bar:bar implicitly. Use short form - if tgt and not pkg.endswith('/' + tgt): - tgt = "_" + tgt.replace('/', '_') - else: - tgt = '' - return [f'$BUILD_DIR/third_party/abseil-cpp/{pkg}{tgt}'] - - -def findAbslLibs(): - abslSconscript = env.File('$BUILD_DIR/third_party/abseil-cpp/SConscript').srcnode().abspath - tcmalloc_scons_debug(f'abslSconscript={abslSconscript}', file=sys.stderr) - abslLibs = [] - with open(abslSconscript) as inf: - lines = (s.strip() for s in inf.readlines()) - targetRe = re.compile(r"\s*target=['\"](.*)['\"],") - for line in lines: - m = targetRe.match(line) - if m: - fq = f'$BUILD_DIR/third_party/abseil-cpp/{m[1]}' - tcmalloc_scons_debug(f"found {fq} in {line}", file=sys.stderr) - abslLibs.append(fq) - return sorted(abslLibs) - - -def _mapDepToScons(lab: str, base: str = '') -> str: - if re.match(r'^@com_google_absl//', lab): - return _remapAbseilDep(Label(lab)) - lab = re.sub(r'^:', f'//{Label(base).package()}:', lab) - lab = re.sub(r'^//', '', lab) - lab = re.sub(r'(.*):(.*)', r'\1_\2', lab) - lab = lab.replace("/", "_") - return [lab] - - -def slurpBlaze(target, source, exports, env): - bazel = BazelEnv(env, env.Dir("dist").srcnode().abspath, debug=tcmalloc_scons_debug) - bazel.run() - bazel.pruneTestOnlyLibraries() - bazel.eliminateHeadersFromSources() - bazel.eliminateSourcelessDeps() - bzl = bazel.libraries() - dumpBazelLibs(bzl, target) - resolved = bazel.resolveDeps(exports) - - unknowns = [(x, resolved[x]) for x in resolved if 'unknown' in resolved[x]] - abslImports = {} - for unk in sorted(unknowns): - lab = Label(unk[0]) - if lab.remote() == 'com_google_absl': - abslImports[str(lab)] = _remapAbseilDep(lab) - tcmalloc_scons_debug(f"{json.dumps({'abslImports': abslImports}, indent=4)}", file=sys.stderr) - - tcmalloc_scons_print('Final render into env.Library calls', file=sys.stderr) - for libName in sorted(resolved.keys()): - if Label(libName).remote() or libName in _bazelToSconsMap or libName not in bzl: - continue - libDef = bzl[libName] - # It's the abseil name - lab = _mapDepToScons(libName)[0] - tcmalloc_scons_debug(f'libName: {libName:60s} => {lab}', file=sys.stderr) - tcmalloc_scons_debug(f' {json.dumps(list(libDef), indent=4)}', file=sys.stderr) - kwargs = {'target': lab} - for src in libDef.get('srcs', []): - src = f'dist/{Label(libName).package()}/{src}' - tcmalloc_scons_debug(f'srcs for lib={libName} -> src={src}', file=sys.stderr) - kwargs.setdefault('source', []).append(src) - for dep in libDef.get('deps', set()): - scons_deps = _mapDepToScons(dep, base=libName) - tcmalloc_scons_debug(f'lib={libName}: dep={dep} => {scons_deps}', file=sys.stderr) - kwargs.setdefault('LIBDEPS', []).extend(scons_deps) - if 'LIBDEPS' in kwargs: - kwargs['LIBDEPS'] = sorted(list(set(kwargs['LIBDEPS']))) - - # libunwind and tcmalloc are both added as global dependencies. Skip - # inheriting global dependencies to avoid a circular dependency. - kwargs['LIBDEPS_NO_INHERIT'] = [ - '$BUILD_DIR/third_party/unwind/unwind', - "$BUILD_DIR/third_party/tcmalloc/tcmalloc", - "$BUILD_DIR/third_party/gperftools/tcmalloc_minimal", - ] - - for cf in libDef.get('copts', []): - kwargs.setdefault('CCFLAGS', [e for e in env.get('CCFLAGS', [])]).append(cf) - tcmalloc_scons_print(f'env.Library(**{json.dumps(kwargs, indent=4)})', file=sys.stderr) - env.BazelLibrary(**kwargs) - - return 0 - - -env = env.Clone() -env.InjectThirdParty(libraries=['abseil-cpp']) - -if get_option('link-model') == 'dynamic': - env.Append(CPPDEFINES=[('MONGO_TCMALLOC_DYNAMIC_BUILD', 1)]) - -slurpBlaze( - target=env.File('tcmalloc_deps.json').srcnode(), source=[], - exports=['//tcmalloc', '//tcmalloc:tcmalloc_extension'], env=env) diff --git a/src/third_party/tcmalloc/scripts/import.sh b/src/third_party/tcmalloc/scripts/import.sh index 5aede5d9f2b..820994fb08b 100755 --- a/src/third_party/tcmalloc/scripts/import.sh +++ b/src/third_party/tcmalloc/scripts/import.sh @@ -29,6 +29,6 @@ mv $LIB_GIT_DIR/* $DEST_DIR/dist pushd $DEST_DIR/dist find . -mindepth 1 -maxdepth 1 -name ".*" -exec rm -rf {} \; rm -rf ci -rm -rf scons_gen_build +rm -rf gen_build find tcmalloc -type d -name "testdata" -exec rm -rf {} \; popd diff --git a/src/third_party/tcmalloc/scripts/site-scons/bazel_to_scons.py b/src/third_party/tcmalloc/scripts/site-scons/bazel_to_scons.py deleted file mode 100755 index 5eb08f94f17..00000000000 --- a/src/third_party/tcmalloc/scripts/site-scons/bazel_to_scons.py +++ /dev/null @@ -1,343 +0,0 @@ -#!/usr/bin/env python3 - -""" Convert tcmalloc's Bazel filetree to SConscript - -This sets up an environment in which Bazel's Python code -can execute and produce equivalent Scons directives. - -Supports the subset of Bazel that TCMalloc needs. -""" - -import copy -import glob -import json -import os -import re -import sys -import textwrap - -############################################################ -class Label: - _RE_REMOTE = re.compile('@([\w]*)') - _RE_PACKAGE = re.compile('//([\w/]*)') - _RE_TARGET = re.compile('(?::?)([^:]*)') - - def _consumePrefix(self, re, s): - m = re.match(s) - if m: - return m[1], s[len(m[0]):] - return None, s - - def __init__(self, label): - label = re.sub(r'/(\w+):(\1)', r'/\1', label) - self._spec = label - s = self._spec - self._remote, s = self._consumePrefix(self._RE_REMOTE, s) - self._package, s = self._consumePrefix(self._RE_PACKAGE, s) - self._target, s = self._consumePrefix(self._RE_TARGET, s) - - def __str__(self): return self._spec - def remote(self): return self._remote - def package(self): return self._package - def target(self): return self._target - - - -############################################################ -class EvalContext: - def __init__(self, bazelEnv, thisLabel, debug=lambda x: x): - self._bazelEnv = bazelEnv - self._root = self._bazelEnv.root() - self._label = thisLabel - self.debug = debug - - def _dummy(self, *args, **kwargs) : pass - - def bazelEnv(self): - return self._bazelEnv - - def label(self): - return self._label - - def load(self, bazelPath, *syms, **aliasSyms): - bzl = Label(bazelPath) - filePath = self.bazelEnv().resolveFile(bzl) - if not filePath: - self.debug(f"load: Ignoring remote load: {bzl}") - return - self.debug(f"\_ load({bzl}, {syms}, file={filePath})") - glo = copy.copy(self.bazelEnv().getGlobals()) - self.debug(f"Before: glo[{len(glo)}]=[{','.join(glo.keys())}]") - self._execFile(filePath, glo) - self.debug("Import symbols: [") - envGlo = self.bazelEnv().getGlobals() - for sym in syms: - aliasSyms[sym] = sym - for alias, sym in aliasSyms.items(): - envGlo[alias] = glo[sym] - self.debug("]") - self.debug(f"After: glo[{len(envGlo)}]=[{','.join(envGlo.keys())}]") - - def _execFile(self, file, glo): - self.debug(f"Compiling {file}") - with open(os.path.join(self.bazelEnv().root(), file)) as f: - exec(compile(f.read(), file, 'exec'), glo) - newline='\n ' - self.debug(f"exec({file}) completed. Side effects:\nglobals={newline.join(glo)}\n") - - def installIgnoredMembers(self, ignoredMembers: 'list[str]'): - for memfn in ignoredMembers: - def annotated(fn, note) : - def wrapper(*args, **kwargs): - nonlocal fn - self.debug(f"# {note} args={args}, kwargs={kwargs}") - fn(*args, **kwargs) - return wrapper - setattr(self, memfn, annotated(self._dummy, memfn)) - -############################################################ -class BuildEvalContext(EvalContext): - _IGNORED_MEMBERS = [ - 'alias', - 'cc_binary', - 'cc_fuzz_test', - 'cc_proto_library', - 'cc_test', - 'config_setting', - 'exports_files', - 'generated_file_staleness_test', - 'genrule', - 'licenses', - 'lua_binary', - 'lua_cclibrary', - 'lua_library', - 'lua_test', - 'make_shell_script', - 'map_dep', - 'package', - 'package_group', - 'proto_library', - 'py_binary', - 'py_library', - 'sh_test', - 'test_suite', - 'upb_amalgamation', - 'upb_proto_library', - 'upb_proto_reflection_library', - ] - - def __init__(self, env, thisLabel, debug): - super().__init__(env, thisLabel, debug) - self.installIgnoredMembers(self._IGNORED_MEMBERS) - self.native = self # not sure what this really does - - def _strEval(self, e): - return f'"{e}"' - - def cc_library(self, **kwargs): - name = kwargs['name'] - label = Label(f"//{self._label.package()}:{name}") - self.bazelEnv().addCcLibrary(label, **kwargs) - - def _truth(self, target): - truths = { - '//tcmalloc:llvm': self.bazelEnv().toolchainIs('clang'), - '//conditions:default': True, - } - return truths[target] - - def select(self, argDict): - for k,v in argDict.items(): - self.debug(f" select: evaluating {k} => {v}") - if self._truth(k): - self.debug(f"select True {k}: returning {v}") - return v - raise RuntimeError("no condition matched in select map") - - def glob(self, *args): return [] - - -############################################################ -class WorkspaceEvalContext(EvalContext): - _IGNORED_MEMBERS = [ - 'http_archive', - 'git_repository', - 'protobuf_deps', - 'rules_proto_dependencies', - 'rules_proto_toolchains', - 'rules_fuzzing_dependencies', - 'rules_fuzzing_init', - 'workspace', - ] - - def __init__(self, bazelEnv, thisLabel, debug): - super().__init__(bazelEnv, thisLabel, debug) - self.installIgnoredMembers(self._IGNORED_MEMBERS) - - -############################################################ -class BazelEnv: - def __init__(self, sconsEnv, root, debug=lambda x: x): - self._sconsEnv = sconsEnv - self._root = root - self._globals = {} - self._locals = {} - self._allTargets = {} - self._libraries = {} - self.debug = debug - - def run(self): - workspace = os.path.join(self._root, 'WORKSPACE') - self._evalWorkspaceFile(workspace) - for build in self._findBuildFiles(): - self._evalBuildFile(build) - - def pruneTestOnlyLibraries(self): - lib = self._libraries - self._libraries = {} - for k in lib: - if 'testonly' not in lib[k]: - self._libraries[k] = lib[k] - - def libraries(self): - return self._libraries - - def sconsEnv(self): - return self._sconsEnv - - def resolveFile(self, label): - if label.remote(): - return None - parts = [self._root] - pkg = label.package() - if pkg: - parts.append(pkg) - tgt = label.target() - if tgt: - parts.append(tgt) - return os.path.join(*parts) - - def root(self): - return self._root - - def getGlobals(self): - return self._globals - - def getLocals(self): - return self._locals - - def toolchainIs(self, category): - return self.sconsEnv().ToolchainIs(category) - - # Called by the EvalContext to handle cc_library. - def addCcLibrary(self, label: Label, **kwargs): - self.debug(f"\_ cc_library('{label}', {kwargs})") - if 'deps' in kwargs: - deps = kwargs['deps'] - fqd = set(self._fullyQualifiedDep(str(label), dep) for dep in deps) - kwargs['deps'] = fqd - self._libraries[str(label)] = kwargs - - def _getDict(self, obj): - return {(k, getattr(obj,k)) for k in filter(lambda s : not s.startswith("_"), dir(obj))} - - def _evalWorkspaceFile(self, path): - self.debug(f"Evaluating WORKSPACE file={path}") - label = self._labelForFile(path) - ctx = WorkspaceEvalContext(self, label, self.debug) - glo = self._globals - glo.update(self._getDict(ctx)) - ctx._execFile(path, glo) - - def _evalBuildFile(self, path): - label = self._labelForFile(path) - self.debug(f"Evaluating BUILD file={path} as label={label}") - ctx = BuildEvalContext(self, label, self.debug) - glo = self._globals - glo.update(self._getDict(ctx)) - ctx._execFile(path, glo) - - def _findBuildFiles(self): - return glob.iglob(os.path.join(self._root, '**', 'BUILD'), recursive=True) - - def _labelForFile(self, path): - pkg = path[len(self._root)+1:] # extract common base with path + '/' - dirPart = os.path.dirname(pkg) - basePart = os.path.basename(pkg) - return Label(f"//{dirPart}:{basePart}") - - def _fullyQualifiedDep(self, libName:str, dep:str) -> str: - """ dep might need to be relative to lib, make it an absolute label """ - libLabel = Label(libName) - depLabel = Label(dep) - if depLabel.package() is None: - depLabel = Label(f'//{libLabel.package()}:{depLabel.target()}') - return str(depLabel) - - def resolveDeps(self, exports): - libs = self.libraries() - resolved = {} - todo = [(k, {}) for k in exports] - while todo: - top, attrs = todo.pop() - - if top in resolved: - # merge the 'from' property - resolved[top].setdefault('from',[]).extend(attrs.get('from',[])) - continue - - if top not in libs: - self.debug(f" unknown dep '{top}'") - newAttr = copy.copy(attrs) - newAttr['unknown'] = True - resolved[top] = newAttr - continue - - deps = libs[top].get('deps', set()) - if len(deps): - fqd = set(self._fullyQualifiedDep(top, dep) for dep in deps) - libs[top]['deps'] = fqd - self.debug(f' {top}: pushing deps={json.dumps(sorted(list(fqd)), indent=4)}') - todo.extend((dep, {'from':[top]}) for dep in fqd) - - if top not in resolved: - resolved[top] = attrs - - return resolved - - def _eliminateSourcelessLib(self, remove): - bzl = self.libraries() - for libName, libDef in bzl.items(): - if 'deps' not in libDef: - continue - deps = libDef['deps'] - if remove not in deps: - continue - deps.remove(remove) - for innerDep in bzl[remove].get('deps', set()): - deps.add(innerDep) - bzl.pop(remove) - - def eliminateSourcelessDeps(self): - self.debug('eliminate sourceless deps') - bzl = self.libraries() - while True: - class DepsChanged(BaseException): pass - try: - for libName,libDef in bzl.items(): - # for each sourceless lib, inline into all dependents and remove it - if 'srcs' not in libDef: - self.debug(f'Inline sourceless {libName}') - self._eliminateSourcelessLib(libName) - raise DepsChanged() - except DepsChanged: - continue - break - - def eliminateHeadersFromSources(self): - self.debug('eliminate headers from sources') - bzl = self.libraries() - for _,libDef in bzl.items(): - if 'srcs' not in libDef: - continue - libDef['srcs'] = [f for f in libDef['srcs'] if not f.endswith('.h')] diff --git a/src/third_party/wiredtiger/SConscript b/src/third_party/wiredtiger/SConscript deleted file mode 100644 index 9e38215d0eb..00000000000 --- a/src/third_party/wiredtiger/SConscript +++ /dev/null @@ -1,15 +0,0 @@ -# -*- mode: python; -*- -Import("env") -env = env.Clone() - -# TODO remove conditional with SERVER-80640 -if "-flto=thin" not in env["CCFLAGS"]: - wtbin = env.BazelProgram( - target='wt', - source=[], - LIBDEPS=[], - AIB_COMPONENT='wiredtiger', - AIB_COMPONENTS_EXTRA=[ - 'dist-test', - ], - ) diff --git a/src/third_party/wiredtiger/test/evergreen.yml b/src/third_party/wiredtiger/test/evergreen.yml index 1cb377edf1d..e891236676f 100644 --- a/src/third_party/wiredtiger/test/evergreen.yml +++ b/src/third_party/wiredtiger/test/evergreen.yml @@ -78,8 +78,7 @@ functions: else export PATH=/opt/mongodbtoolchain/v4/bin:$PATH fi - # Since Bazel (currently used in SCons) uses EngFlow's remote execution system instead of icecream, - # additional credentials need to be setup to maintain efficient compilation speed. + "get engflow creds": &get_engflow_creds - command: s3.get display_name: "get engflow key"