SERVER-105041 [v8.0] upgrade to scons 4.9.1 and set MSVC_TOOLSET_VERSION (#36325)

GitOrigin-RevId: 74791afd41de64b01c9ad9c260456a118a6d046d
This commit is contained in:
Daniel Moody 2025-06-27 11:08:05 -05:00 committed by MongoDB Bot
parent a016d52011
commit c91a71c860
1587 changed files with 465585 additions and 16824 deletions

View File

@ -52,7 +52,7 @@ a notice will be included in
| [S2] | Apache-2.0 | c872048da5d1 + changes | ✗ | ✗ |
| [SafeInt] | MIT | 3.0.26 | | |
| [schemastore.org] | Apache-2.0 | 6847cfc3a1 | | |
| [scons] | MIT | 3.1.2 | | |
| [scons] | MIT | 4.9.1 | | |
| [Snappy] | BSD-3-Clause | 1.1.10 | ✗ | ✗ |
| [TCMalloc] | Apache-2.0 | 093ba93 + changes | | ✗ |
| [timelib] | MIT | 2022.10 | | ✗ |

View File

@ -25,7 +25,6 @@ from pkg_resources import parse_version
import SCons
import SCons.Script
from mongo_tooling_metrics.lib.top_level_metrics import SConsToolingMetrics
from site_scons.mongo import build_profiles
# This must be first, even before EnsureSConsVersion, if
@ -41,7 +40,7 @@ import mongo.generators as mongo_generators
import mongo.install_actions as install_actions
EnsurePythonVersion(3, 10)
EnsureSConsVersion(3, 1, 1)
EnsureSConsVersion(4, 9, 1)
utc_starttime = datetime.utcnow()
@ -1131,7 +1130,7 @@ env_vars.Add(
def validate_dwarf_version(key, val, env):
if val == '4' or val == '5' or val == '':
if val == 4 or val == 5 or val == '':
return
print(f"Invalid DWARF_VERSION '{val}'. Only valid versions are 4 or 5.")
@ -1352,6 +1351,12 @@ env_vars.Add(
default="14.3",
)
env_vars.Add(
'MSVC_TOOLSET_VERSION',
help='Sets the full toolset version of Visual C++ to use.',
default="14.31.31103",
)
env_vars.Add(
'LINKFLAGS_COMPILER_EXEC_PREFIX',
help='Specify the search path to be injected into the LINKFLAGS',
@ -1725,6 +1730,8 @@ envDict = dict(
CONFIGURELOG='$BUILD_ROOT/scons/config.log',
CONFIG_HEADER_DEFINES={},
LIBDEPS_TAG_EXPANSIONS=[],
MSVC_VERSION=variables_only_env.get("MSVC_VERSION"),
MSVC_TOOLSET_VERSION=variables_only_env.get("MSVC_TOOLSET_VERSION"),
)
# By default, we will get the normal SCons tool search. But if the
@ -1738,16 +1745,6 @@ env = Environment(variables=env_vars, **envDict)
del envDict
env.AddMethod(lambda env, name, **kwargs: add_option(name, **kwargs), 'AddOption')
# The placement of this is intentional. Here we setup an atexit method to store tooling metrics.
# We should only register this function after env, env_vars and the parser have been properly initialized.
SConsToolingMetrics.register_metrics(
utc_starttime=datetime.utcnow(),
artifact_dir=env.Dir('$BUILD_DIR').get_abspath(),
env_vars=env_vars,
env=env,
parser=_parser,
)
if get_option('build-metrics'):
env['BUILD_METRICS_ARTIFACTS_DIR'] = '$BUILD_ROOT/$VARIANT_DIR'
env.Tool('build_metrics')
@ -5024,7 +5021,7 @@ def doConfigure(myenv):
cryptoLibName,
["openssl/crypto.h"],
"C",
"SSLeay_version(0);",
call="SSLeay_version(0);",
autoadd=True,
):
maybeIssueDarwinSSLAdvice(conf.env)
@ -5196,7 +5193,7 @@ def doConfigure(myenv):
"curl",
["curl/curl.h"],
"C",
"curl_global_init(0);",
call="curl_global_init(0);",
autoadd=False,
):
return True
@ -5332,7 +5329,7 @@ def doConfigure(myenv):
"sasl2",
["stddef.h", "sasl/sasl.h"],
"C",
"sasl_version_info(0, 0, 0, 0, 0, 0);",
call="sasl_version_info(0, 0, 0, 0, 0, 0);",
autoadd=False,
):
myenv.ConfError("Couldn't find SASL header/libraries")
@ -5479,7 +5476,7 @@ def doConfigure(myenv):
["mongoc-1.0"],
["mongoc/mongoc.h"],
"C",
"mongoc_get_major_version();",
call="mongoc_get_major_version();",
autoadd=False,
):
conf.env['MONGO_HAVE_LIBMONGOC'] = True
@ -5729,7 +5726,7 @@ if get_option('ninja') != 'disabled':
if env.ToolchainIs('gcc', 'clang'):
env.AppendUnique(CCFLAGS=["-fdiagnostics-color"])
ninja_builder = Tool("ninja")
ninja_builder = Tool("mongo_ninja")
env["NINJA_BUILDDIR"] = env.Dir("$NINJA_BUILDDIR")
ninja_builder.generate(env)
@ -6696,13 +6693,15 @@ if has_option("cache"):
if env.GetOption("patch-build-mongot-url"):
binary_url = env.GetOption("patch-build-mongot-url")
env.Command(
mongot_node = env.Command(
target="mongot-localdev",
source=[],
action=[
f"curl {binary_url} | tar xvz",
],
)
env.NoCache(mongot_node)
env.Precious(mongot_node)
env.AutoInstall(
target="$PREFIX_BINDIR",
@ -6746,11 +6745,13 @@ elif env.GetOption('build-mongot'):
f'$BUILD_ROOT/db_contrib_tool_venv/bin/python3 -m pip install db-contrib-tool',
], BUILD_ROOT=env.Dir("$BUILD_ROOT").path)
env.Command(
mongot_node = env.Command(
target=["mongot-localdev"], source=db_contrib_tool, action=[
f"$SOURCE setup-mongot-repro-env {binary_ver_str} --platform={platform_str} --architecture={arch_str}",
f"mv build/mongot-localdev mongot-localdev"
f"rm -rf mongot-localdev", f"mv build/mongot-localdev mongot-localdev"
], ENV=os.environ)
env.NoCache(mongot_node)
env.Precious(mongot_node)
env.AutoInstall(
target="$PREFIX_BINDIR",

View File

@ -1,78 +0,0 @@
import datetime
import pkg_resources
from pydantic import ValidationError
import mongo_tooling_metrics.client as metrics_client
from mongo_tooling_metrics.lib.top_level_metrics import NinjaToolingMetrics, ResmokeToolingMetrics, SConsToolingMetrics
import pymongo
# Check cluster connectivity
try:
client = pymongo.MongoClient(
host=metrics_client.INTERNAL_TOOLING_METRICS_HOSTNAME,
username=metrics_client.INTERNAL_TOOLING_METRICS_USERNAME,
password=metrics_client.INTERNAL_TOOLING_METRICS_PASSWORD,
)
client.server_info()
except Exception as exc:
print("Could not connect to Atlas cluster")
raise exc
metrics_classes = {
'ninja': NinjaToolingMetrics,
'scons': SConsToolingMetrics,
'resmoke': ResmokeToolingMetrics,
}
def get_metrics_data(source, lookback=30):
try:
# Get SCons metrics for the lookback period
tooling_metrics_version = pkg_resources.get_distribution('mongo-tooling-metrics').version
lookback_datetime = datetime.datetime.utcnow() - datetime.timedelta(days=lookback)
last_week_metrics = client.metrics.tooling_metrics.find({
"source": source,
"utc_starttime": {"$gt": lookback_datetime},
"tooling_metrics_version": tooling_metrics_version,
})
malformed_metrics = []
invalid_metrics = []
total_docs = 0
# Find any malformed/invalid documents in the cluster
for doc in last_week_metrics:
total_docs += 1
try:
metrics = metrics_classes[source](**doc)
if metrics.is_malformed():
malformed_metrics.append(doc['_id'])
except ValidationError:
invalid_metrics.append(doc['_id'])
metrics_detailed = (f"METRICS DETAILED ({source}):\n"
f"malformed_metrics_last_week: {malformed_metrics}\n"
f"invalid_metrics_last_week: {invalid_metrics}\n"
f"total_docs_last_week: {total_docs}\n"
f"tooling_metrics_version: {tooling_metrics_version}\n")
metrics_overview = (
f"METRICS OVERVIEW ({source}):\n"
f"malformed_metrics_last_week: {len(malformed_metrics)} ({len(malformed_metrics)/total_docs*100:.2f}%)\n"
f"invalid_metrics_last_week: {len(invalid_metrics)} ({len(invalid_metrics)/total_docs*100:.2f}%)\n"
f"total_docs_last_week: {total_docs}\n"
f"tooling_metrics_version: {tooling_metrics_version}\n")
print(metrics_overview)
print(metrics_detailed)
return metrics_overview
except Exception as exc:
print("Unexpected failure while getting metrics")
raise exc
ninja_metrics_overview = get_metrics_data("ninja")
scons_metrics_overview = get_metrics_data("scons")
resmoke_metrics_overview = get_metrics_data("resmoke")

View File

@ -59,9 +59,8 @@ def generate_scons_cache_expansions():
scons_cache_mode = "nolinked"
if os.getenv("USE_SCONS_CACHE") not in (None, False, "false", ""):
expansions[
"scons_cache_args"] = "--cache={0} --cache-signature-mode=validate --cache-dir={1} --cache-show".format(
scons_cache_mode, shlex.quote(default_cache_path))
expansions["scons_cache_args"] = "--cache={0} --cache-dir={1} --cache-show".format(
scons_cache_mode, shlex.quote(default_cache_path))
return expansions

View File

@ -66,14 +66,13 @@ def generate_scons_cache_expansions():
scons_cache_dir = os.getenv("SCONS_CACHE_DIR")
if scons_cache_dir:
default_cache_path = os.path.join(shared_mount_root, system_uuid, 'per_variant_caches',
scons_cache_dir, "scons-cache")
scons_cache_dir, "4_9_1-scons-cache")
else:
default_cache_path = os.path.join(shared_mount_root, system_uuid, "scons-cache")
default_cache_path = os.path.join(shared_mount_root, system_uuid, "4_9_1-scons-cache")
expansions["scons_cache_path"] = default_cache_path
expansions[
"scons_cache_args"] = "--cache=nolinked --cache-signature-mode=validate --cache-dir={0} --cache-show".format(
shlex.quote(default_cache_path))
expansions["scons_cache_args"] = "--cache=nolinked --cache-dir={0} --cache-show".format(
shlex.quote(default_cache_path))
# Local shared cache - host-based
elif os.getenv("SCONS_CACHE_SCOPE") == "local":
@ -85,9 +84,8 @@ def generate_scons_cache_expansions():
default_cache_path = os.path.join(default_cache_path_base, system_uuid)
expansions["scons_cache_path"] = default_cache_path
expansions[
"scons_cache_args"] = "--cache={0} --cache-signature-mode=validate --cache-dir={1} --cache-show".format(
scons_cache_mode, shlex.quote(default_cache_path))
expansions["scons_cache_args"] = "--cache={0} --cache-dir={1} --cache-show".format(
scons_cache_mode, shlex.quote(default_cache_path))
# No cache
else:
# Anything else is 'none'

View File

@ -4,7 +4,6 @@ from datetime import datetime
import time
import os
import psutil
from mongo_tooling_metrics.lib.top_level_metrics import ResmokeToolingMetrics
from buildscripts.resmokelib import parser
@ -25,8 +24,4 @@ def main(argv):
"For example: resmoke.py run -h\n"
"Note: bisect, setup-multiversion and symbolize subcommands have been moved to db-contrib-tool (https://github.com/10gen/db-contrib-tool#readme).\n"
)
ResmokeToolingMetrics.register_metrics(
utc_starttime=datetime.utcfromtimestamp(__start_time),
parser=parser.get_parser(),
)
subcommand.execute()

View File

@ -4,7 +4,7 @@
import os
import sys
SCONS_VERSION = os.environ.get('SCONS_VERSION', "3.1.2")
SCONS_VERSION = os.environ.get('SCONS_VERSION', "4.9.1")
MONGODB_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SCONS_DIR = os.path.join(MONGODB_ROOT, 'src', 'third_party', 'scons-' + SCONS_VERSION,

View File

@ -354,7 +354,7 @@ components:
homepage_url: https://www.scons.org/
open_hub_url: https://www.openhub.net/p/scons
release_monitoring_id: 4770
local_directory_path: src/third_party/scons-3.1.2
local_directory_path: src/third_party/scons-4.9.1
team_owner: "Build"
upgrade_suppression: TODO SERVER-49324

40
poetry.lock generated
View File

@ -102,7 +102,7 @@ version = "1.34.40"
description = "The AWS SDK for Python"
optional = false
python-versions = ">= 3.8"
groups = ["aws", "platform", "tooling-metrics"]
groups = ["aws", "platform"]
files = [
{file = "boto3-1.34.40-py3-none-any.whl", hash = "sha256:49eb215e4142d441e26eedaf5d0b43065200f0849d82c904bc9a62d1328016cd"},
{file = "boto3-1.34.40.tar.gz", hash = "sha256:81d026ed8c8305b880c71f9f287f9b745b52bd358a91cfc133844c907db4d7ee"},
@ -122,7 +122,7 @@ version = "1.34.40"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">= 3.8"
groups = ["aws", "platform", "tooling-metrics"]
groups = ["aws", "platform"]
files = [
{file = "botocore-1.34.40-py3-none-any.whl", hash = "sha256:a3edd774653a61a1b211e4ea88cdb1c2655ffcc7660ba77b41a4027b097d145d"},
{file = "botocore-1.34.40.tar.gz", hash = "sha256:cb794bdb5b3d41845749a182ec93cb1453560e52b97ae0ab43ace81deb011f6d"},
@ -664,7 +664,7 @@ version = "1.5.0"
description = "Distro - an OS platform information API"
optional = false
python-versions = "*"
groups = ["platform", "testing", "tooling-metrics"]
groups = ["platform", "testing"]
files = [
{file = "distro-1.5.0-py2.py3-none-any.whl", hash = "sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799"},
{file = "distro-1.5.0.tar.gz", hash = "sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92"},
@ -676,7 +676,7 @@ version = "2.5.0"
description = "DNS toolkit"
optional = false
python-versions = ">=3.8"
groups = ["main", "core", "platform", "testing", "tooling-metrics"]
groups = ["main", "core", "platform", "testing"]
files = [
{file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"},
{file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"},
@ -1026,7 +1026,7 @@ version = "4.0.11"
description = "Git Object Database"
optional = false
python-versions = ">=3.7"
groups = ["evergreen", "lint", "platform", "tooling-metrics"]
groups = ["evergreen", "lint", "platform"]
files = [
{file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"},
{file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"},
@ -1041,7 +1041,7 @@ version = "3.1.41"
description = "GitPython is a Python library used to interact with Git repositories"
optional = false
python-versions = ">=3.7"
groups = ["evergreen", "lint", "platform", "tooling-metrics"]
groups = ["evergreen", "lint", "platform"]
files = [
{file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"},
{file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"},
@ -1386,7 +1386,7 @@ version = "1.0.1"
description = "JSON Matching Expressions"
optional = false
python-versions = ">=3.7"
groups = ["aws", "platform", "tooling-metrics"]
groups = ["aws", "platform"]
files = [
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
@ -1703,7 +1703,8 @@ version = "1.0.8"
description = "A slim library which leverages Pydantic to reliably collect type enforced metrics and store them to MongoDB."
optional = false
python-versions = ">=3.7,<4.0"
groups = ["platform", "tooling-metrics"]
groups = ["platform"]
markers = "platform_machine != \"s390x\" and platform_machine != \"ppc64le\""
files = [
{file = "mongo_tooling_metrics-1.0.8-py3-none-any.whl", hash = "sha256:6f022c07e55bedd06c9fbb19daf4118b38ac1bc290c9a645b5c1ef39cf905003"},
{file = "mongo_tooling_metrics-1.0.8.tar.gz", hash = "sha256:1f10712b237a8c99551a4b63ce4e62db42aca05ef6d054af728b55081dd477d4"},
@ -2380,7 +2381,7 @@ version = "1.10.14"
description = "Data validation and settings management using python type hints"
optional = false
python-versions = ">=3.7"
groups = ["evergreen", "platform", "testing", "tooling-metrics"]
groups = ["evergreen", "platform", "testing"]
files = [
{file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"},
{file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"},
@ -2529,7 +2530,7 @@ version = "4.3.3"
description = "Python driver for MongoDB <http://www.mongodb.org>"
optional = false
python-versions = ">=3.7"
groups = ["core", "platform", "tooling-metrics"]
groups = ["core", "platform"]
files = [
{file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"},
@ -2625,7 +2626,8 @@ version = "1.1.0"
description = "MONGODB-AWS authentication support for PyMongo"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
groups = ["platform", "tooling-metrics"]
groups = ["platform"]
markers = "platform_machine != \"s390x\" and platform_machine != \"ppc64le\""
files = [
{file = "pymongo-auth-aws-1.1.0.tar.gz", hash = "sha256:7e04c3ba72e3138dd1dc35cbc122c97cb2341d7ff5f10271dc3e0b8adf950349"},
{file = "pymongo_auth_aws-1.1.0-py2.py3-none-any.whl", hash = "sha256:8300b7c4bc5c81351f3f38a5de93aa547ba9bd583e98f861bf2e9aacf5b93ff3"},
@ -2778,7 +2780,7 @@ version = "2.8.2"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["aws", "platform", "testing", "tooling-metrics"]
groups = ["aws", "platform", "testing"]
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
@ -3312,7 +3314,7 @@ version = "0.10.0"
description = "An Amazon S3 Transfer Manager"
optional = false
python-versions = ">= 3.8"
groups = ["aws", "platform", "tooling-metrics"]
groups = ["aws", "platform"]
files = [
{file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"},
{file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"},
@ -3401,7 +3403,7 @@ version = "58.1.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.6"
groups = ["main", "compile", "external-auth", "jira-client", "platform", "tooling-metrics"]
groups = ["main", "compile", "external-auth", "jira-client", "platform"]
files = [
{file = "setuptools-58.1.0-py3-none-any.whl", hash = "sha256:7324fd4b66efa05cdfc9c89174573a4410acc7848f318cc0565c7fb659dfdc81"},
{file = "setuptools-58.1.0.tar.gz", hash = "sha256:5de67252090e08d25f240f07d80310f778a5a46cdcf9ea9855662630ac8547b2"},
@ -3447,7 +3449,7 @@ version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
groups = ["aws", "export", "external-auth", "jira-client", "platform", "testing", "tooling-metrics"]
groups = ["aws", "export", "external-auth", "jira-client", "platform", "testing"]
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@ -3460,7 +3462,7 @@ version = "5.0.1"
description = "A pure Python implementation of a sliding window memory map manager"
optional = false
python-versions = ">=3.7"
groups = ["evergreen", "lint", "platform", "tooling-metrics"]
groups = ["evergreen", "lint", "platform"]
files = [
{file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"},
{file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
@ -3870,7 +3872,7 @@ version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main", "core", "evergreen", "external-auth", "lint", "platform", "testing", "tooling-metrics"]
groups = ["main", "core", "evergreen", "external-auth", "lint", "platform", "testing"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@ -3894,7 +3896,7 @@ version = "1.26.18"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
groups = ["aws", "core", "export", "jira-client", "platform", "testing", "tooling-metrics"]
groups = ["aws", "core", "export", "jira-client", "platform", "testing"]
files = [
{file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
{file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
@ -4292,4 +4294,4 @@ oldcrypt = []
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<4.0"
content-hash = "d8969bfe2e827cb913689ba89555b4a64cd53f1b4d2f69842a72c9b86cefda6e"
content-hash = "92fe1742fc3186364ece803540045d4a5b883eb6f42ae01ae2f36b3dba0f2ecb"

View File

@ -169,9 +169,6 @@ opentelemetry-api = "*"
opentelemetry-sdk = "*"
opentelemetry-exporter-otlp-proto-common = "*"
[tool.poetry.group.tooling-metrics.dependencies]
mongo-tooling-metrics = "1.0.8"
# This can be installed with "poetry install -E libdeps"
[project.optional-dependencies]
libdeps = [

View File

@ -1207,7 +1207,7 @@
"name": "Organization: github"
},
"name": "SCons - a Software Construction tool",
"version": "3.1.2",
"version": "4.9.1",
"licenses": [
{
"license": {
@ -1215,7 +1215,7 @@
}
}
],
"purl": "pkg:github/SCons/scons@3.1.2",
"purl": "pkg:github/SCons/scons@4.9.1",
"properties": [
{
"name": "internal:team_responsible",
@ -1227,7 +1227,7 @@
"evidence": {
"occurrences": [
{
"location": "src/third_party/scons-3.1.2"
"location": "src/third_party/scons-4.9.1"
}
]
},

View File

@ -1150,7 +1150,7 @@ def get_command_env(env, target, source):
# os.environ or differ from it. We assume if it's a new or
# differing key from the process environment then it's
# important to pass down to commands in the Ninja file.
ENV = env.get('SHELL_ENV_GENERATOR', get_default_ENV)(env, target, source)
ENV = env.get('SHELL_ENV_GENERATOR', get_default_ENV)(env)
scons_specified_env = {
key: value
for key, value in ENV.items() if key not in os.environ or os.environ.get(key, None) != value
@ -1610,7 +1610,7 @@ def ninja_whereis(thing, *_args, **_kwargs):
def ninja_always_serial(self, num, taskmaster):
"""Replacement for SCons.Job.Jobs constructor which always uses the Serial Job class."""
"""Replacement for SCons.Taskmaster.Job.Jobs constructor which always uses the Serial Job class."""
# We still set self.num_jobs to num even though it's a lie. The
# only consumer of this attribute is the Parallel Job class AND
# the Main.py function which instantiates a Jobs class. It checks
@ -1620,7 +1620,7 @@ def ninja_always_serial(self, num, taskmaster):
# builds. So here we lie so the Main.py will not give a false
# warning to users.
self.num_jobs = num
self.job = SCons.Job.Serial(taskmaster)
self.job = SCons.Taskmaster.Job.Serial(taskmaster)
def ninja_print_conf_log(s, target, source, env):
@ -1873,7 +1873,7 @@ def generate(env):
# The Serial job class is SIGNIFICANTLY (almost twice as) faster
# than the Parallel job class for generating Ninja files. So we
# monkey the Jobs constructor to only use the Serial Job class.
SCons.Job.Jobs.__init__ = ninja_always_serial
SCons.Taskmaster.Job.Jobs.__init__ = ninja_always_serial
# The environment variable NINJA_SYNTAX points to the
# ninja_syntax.py module from the ninja sources found here:

View File

@ -27,11 +27,14 @@ else:
def remove_define(env, define_to_remove):
to_remove = []
for define in env.get("CPPDEFINES", []):
if isinstance(define, tuple) and define[0] == define_to_remove:
env.get("CPPDEFINES").remove(define)
to_remove.append(define)
elif define == define_to_remove:
env.get("CPPDEFINES").remove(define)
to_remove.append(define)
for define in to_remove:
env.get("CPPDEFINES").remove(define)
# Kafka manages these defines.

View File

@ -1,178 +0,0 @@
#! /usr/bin/env python
#
# SCons - a Software Constructor
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''Show or convert the configuration of an SCons cache directory.
A cache of derived files is stored by file signature.
The files are split into directories named by the first few
digits of the signature. The prefix length used for directory
names can be changed by this script.
'''
from __future__ import print_function
import argparse
import glob
import json
import os
__revision__ = "src/script/scons-configure-cache.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__version__ = "3.1.2"
__build__ = "bee7caf9defd6e108fc2998a2520ddb36a967691"
__buildsys__ = "octodog"
__date__ = "2019-12-17 02:07:09"
__developer__ = "bdeegan"
def rearrange_cache_entries(current_prefix_len, new_prefix_len):
'''Move cache files if prefix length changed.
Move the existing cache files to new directories of the
appropriate name length and clean up the old directories.
'''
print('Changing prefix length from', current_prefix_len,
'to', new_prefix_len)
dirs = set()
old_dirs = set()
for file in glob.iglob(os.path.join('*', '*')):
name = os.path.basename(file)
dname = name[:current_prefix_len].upper()
if dname not in old_dirs:
print('Migrating', dname)
old_dirs.add(dname)
dname = name[:new_prefix_len].upper()
if dname not in dirs:
os.mkdir(dname)
dirs.add(dname)
os.rename(file, os.path.join(dname, name))
# Now delete the original directories
for dname in old_dirs:
os.rmdir(dname)
# The configuration dictionary should have one entry per entry in the
# cache config. The value of each entry should include the following:
# implicit - (optional) This is to allow adding a new config entry and also
# changing the behaviour of the system at the same time. This
# indicates the value the config entry would have had if it had
# been specified.
# default - The value the config entry should have if it wasn't previously
# specified
# command-line - parameters to pass to ArgumentParser.add_argument
# converter - (optional) Function to call if conversion is required
# if this configuration entry changes
config_entries = {
'prefix_len': {
'implicit': 1,
'default': 2,
'command-line': {
'help': 'Length of cache file name used as subdirectory prefix',
'metavar': '<number>',
'type': int
},
'converter': rearrange_cache_entries
}
}
parser = argparse.ArgumentParser(
description='Modify the configuration of an scons cache directory',
epilog='''
Unspecified options will not be changed unless they are not
set at all, in which case they are set to an appropriate default.
''')
parser.add_argument('cache-dir', help='Path to scons cache directory')
for param in config_entries:
parser.add_argument('--' + param.replace('_', '-'),
**config_entries[param]['command-line'])
parser.add_argument('--version',
action='version',
version='%(prog)s 1.0')
parser.add_argument('--show',
action="store_true",
help="show current configuration")
# Get the command line as a dict without any of the unspecified entries.
args = dict([x for x in vars(parser.parse_args()).items() if x[1]])
# It seems somewhat strange to me, but positional arguments don't get the -
# in the name changed to _, whereas optional arguments do...
cache = args['cache-dir']
if not os.path.isdir(cache):
raise RuntimeError("There is no cache directory named %s" % cache)
os.chdir(cache)
del args['cache-dir']
if not os.path.exists('config'):
# old config dirs did not have a 'config' file. Try to update.
# Validate the only files in the directory are directories 0-9, a-f
expected = ['{:X}'.format(x) for x in range(0, 16)]
if not set(os.listdir('.')).issubset(expected):
raise RuntimeError(
"%s does not look like a valid version 1 cache directory" % cache)
config = dict()
else:
with open('config') as conf:
config = json.load(conf)
if args.get('show', None):
print("Current configuration in '%s':" % cache)
print(json.dumps(config, sort_keys=True,
indent=4, separators=(',', ': ')))
# in case of the show argument, emit some stats as well
file_count = 0
for _, _, files in os.walk('.'):
file_count += len(files)
if file_count: # skip config file if it exists
file_count -= 1
print("Cache contains %s files" % file_count)
del args['show']
# Find any keys that are not currently set but should be
for key in config_entries:
if key not in config:
if 'implicit' in config_entries[key]:
config[key] = config_entries[key]['implicit']
else:
config[key] = config_entries[key]['default']
if key not in args:
args[key] = config_entries[key]['default']
# Now go through each entry in args to see if it changes an existing config
# setting.
for key in args:
if args[key] != config[key]:
if 'converter' in config_entries[key]:
config_entries[key]['converter'](config[key], args[key])
config[key] = args[key]
# and write the updated config file
with open('config', 'w') as conf:
json.dump(config, conf)

View File

@ -1,596 +0,0 @@
"""SCons.Defaults
Builders and other things for the local site. Here's where we'll
duplicate the functionality of autoconf until we move it into the
installation procedure or use something like qmconf.
The code that reads the registry to find MSVC components was borrowed
from distutils.msvccompiler.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import division
__revision__ = "src/engine/SCons/Defaults.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os
import errno
import shutil
import stat
import time
import sys
import SCons.Action
import SCons.Builder
import SCons.CacheDir
import SCons.Environment
import SCons.PathList
import SCons.Subst
import SCons.Tool
# A placeholder for a default Environment (for fetching source files
# from source code management systems and the like). This must be
# initialized later, after the top-level directory is set by the calling
# interface.
_default_env = None
# Lazily instantiate the default environment so the overhead of creating
# it doesn't apply when it's not needed.
def _fetch_DefaultEnvironment(*args, **kw):
"""
Returns the already-created default construction environment.
"""
global _default_env
return _default_env
def DefaultEnvironment(*args, **kw):
"""
Initial public entry point for creating the default construction
Environment.
After creating the environment, we overwrite our name
(DefaultEnvironment) with the _fetch_DefaultEnvironment() function,
which more efficiently returns the initialized default construction
environment without checking for its existence.
(This function still exists with its _default_check because someone
else (*cough* Script/__init__.py *cough*) may keep a reference
to this function. So we can't use the fully functional idiom of
having the name originally be a something that *only* creates the
construction environment and then overwrites the name.)
"""
global _default_env
if not _default_env:
import SCons.Util
_default_env = SCons.Environment.Environment(*args, **kw)
if SCons.Util.md5:
_default_env.Decider('MD5')
else:
_default_env.Decider('timestamp-match')
global DefaultEnvironment
DefaultEnvironment = _fetch_DefaultEnvironment
_default_env._CacheDir_path = None
return _default_env
# Emitters for setting the shared attribute on object files,
# and an action for checking that all of the source files
# going into a shared library are, in fact, shared.
def StaticObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = None
return (target, source)
def SharedObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = 1
return (target, source)
def SharedFlagChecker(source, target, env):
same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME')
if same == '0' or same == '' or same == 'False':
for src in source:
try:
shared = src.attributes.shared
except AttributeError:
shared = None
if not shared:
raise SCons.Errors.UserError("Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]))
SharedCheck = SCons.Action.Action(SharedFlagChecker, None)
# Some people were using these variable name before we made
# SourceFileScanner part of the public interface. Don't break their
# SConscript files until we've given them some fair warning and a
# transition period.
CScan = SCons.Tool.CScanner
DScan = SCons.Tool.DScanner
LaTeXScan = SCons.Tool.LaTeXScanner
ObjSourceScan = SCons.Tool.SourceFileScanner
ProgScan = SCons.Tool.ProgramScanner
# These aren't really tool scanners, so they don't quite belong with
# the rest of those in Tool/__init__.py, but I'm not sure where else
# they should go. Leave them here for now.
import SCons.Scanner.Dir
DirScanner = SCons.Scanner.Dir.DirScanner()
DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner()
# Actions for common languages.
CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR")
ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR")
CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR")
ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR")
DAction = SCons.Action.Action("$DCOM", "$DCOMSTR")
ShDAction = SCons.Action.Action("$SHDCOM", "$SHDCOMSTR")
ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR")
ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR")
LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR")
ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR")
LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR")
# Common tasks that we allow users to perform in platform-independent
# ways by creating ActionFactory instances.
ActionFactory = SCons.Action.ActionFactory
def get_paths_str(dest):
# If dest is a list, we need to manually call str() on each element
if SCons.Util.is_List(dest):
elem_strs = []
for element in dest:
elem_strs.append('"' + str(element) + '"')
return '[' + ', '.join(elem_strs) + ']'
else:
return '"' + str(dest) + '"'
permission_dic = {
'u':{
'r':stat.S_IRUSR,
'w':stat.S_IWUSR,
'x':stat.S_IXUSR
},
'g':{
'r':stat.S_IRGRP,
'w':stat.S_IWGRP,
'x':stat.S_IXGRP
},
'o':{
'r':stat.S_IROTH,
'w':stat.S_IWOTH,
'x':stat.S_IXOTH
}
}
def chmod_func(dest, mode):
import SCons.Util
from string import digits
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
if SCons.Util.is_String(mode) and 0 not in [i in digits for i in mode]:
mode = int(mode, 8)
if not SCons.Util.is_String(mode):
for element in dest:
os.chmod(str(element), mode)
else:
mode = str(mode)
for operation in mode.split(","):
if "=" in operation:
operator = "="
elif "+" in operation:
operator = "+"
elif "-" in operation:
operator = "-"
else:
raise SyntaxError("Could not find +, - or =")
operation_list = operation.split(operator)
if len(operation_list) != 2:
raise SyntaxError("More than one operator found")
user = operation_list[0].strip().replace("a", "ugo")
permission = operation_list[1].strip()
new_perm = 0
for u in user:
for p in permission:
try:
new_perm = new_perm | permission_dic[u][p]
except KeyError:
raise SyntaxError("Unrecognized user or permission format")
for element in dest:
curr_perm = os.stat(str(element)).st_mode
if operator == "=":
os.chmod(str(element), new_perm)
elif operator == "+":
os.chmod(str(element), curr_perm | new_perm)
elif operator == "-":
os.chmod(str(element), curr_perm & ~new_perm)
def chmod_strfunc(dest, mode):
import SCons.Util
if not SCons.Util.is_String(mode):
return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode)
else:
return 'Chmod(%s, "%s")' % (get_paths_str(dest), str(mode))
Chmod = ActionFactory(chmod_func, chmod_strfunc)
def copy_func(dest, src, symlinks=True):
"""
If symlinks (is true), then a symbolic link will be
shallow copied and recreated as a symbolic link; otherwise, copying
a symbolic link will be equivalent to copying the symbolic link's
final target regardless of symbolic link depth.
"""
dest = str(dest)
src = str(src)
SCons.Node.FS.invalidate_node_memos(dest)
if SCons.Util.is_List(src) and os.path.isdir(dest):
for file in src:
shutil.copy2(file, dest)
return 0
elif os.path.islink(src):
if symlinks:
return os.symlink(os.readlink(src), dest)
else:
return copy_func(dest, os.path.realpath(src))
elif os.path.isfile(src):
shutil.copy2(src, dest)
return 0
else:
shutil.copytree(src, dest, symlinks)
# copytree returns None in python2 and destination string in python3
# A error is raised in both cases, so we can just return 0 for success
return 0
Copy = ActionFactory(
copy_func,
lambda dest, src, symlinks=True: 'Copy("%s", "%s")' % (dest, src)
)
def delete_func(dest, must_exist=0):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for entry in dest:
entry = str(entry)
# os.path.exists returns False with broken links that exist
entry_exists = os.path.exists(entry) or os.path.islink(entry)
if not entry_exists and not must_exist:
continue
# os.path.isdir returns True when entry is a link to a dir
if os.path.isdir(entry) and not os.path.islink(entry):
shutil.rmtree(entry, 1)
continue
os.unlink(entry)
def delete_strfunc(dest, must_exist=0):
return 'Delete(%s)' % get_paths_str(dest)
Delete = ActionFactory(delete_func, delete_strfunc)
def mkdir_func(dest):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for entry in dest:
try:
os.makedirs(str(entry))
except os.error as e:
p = str(entry)
if (e.args[0] == errno.EEXIST or
(sys.platform=='win32' and e.args[0]==183)) \
and os.path.isdir(str(entry)):
pass # not an error if already exists
else:
raise
Mkdir = ActionFactory(mkdir_func,
lambda dir: 'Mkdir(%s)' % get_paths_str(dir))
def move_func(dest, src):
SCons.Node.FS.invalidate_node_memos(dest)
SCons.Node.FS.invalidate_node_memos(src)
shutil.move(src, dest)
Move = ActionFactory(move_func,
lambda dest, src: 'Move("%s", "%s")' % (dest, src),
convert=str)
def touch_func(dest):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for file in dest:
file = str(file)
mtime = int(time.time())
if os.path.exists(file):
atime = os.path.getatime(file)
else:
with open(file, 'w'):
atime = mtime
os.utime(file, (atime, mtime))
Touch = ActionFactory(touch_func,
lambda file: 'Touch(%s)' % get_paths_str(file))
# Internal utility functions
def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None):
"""
Creates a new list from 'list' by first interpolating each element
in the list using the 'env' dictionary and then calling f on the
list, and finally calling _concat_ixes to concatenate 'prefix' and
'suffix' onto each element of the list.
"""
if not list:
return list
l = f(SCons.PathList.PathList(list).subst_path(env, target, source))
if l is not None:
list = l
return _concat_ixes(prefix, list, suffix, env)
def _concat_ixes(prefix, list, suffix, env):
"""
Creates a new list from 'list' by concatenating the 'prefix' and
'suffix' arguments onto each element of the list. A trailing space
on 'prefix' or leading space on 'suffix' will cause them to be put
into separate list elements rather than being concatenated.
"""
result = []
# ensure that prefix and suffix are strings
prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW))
suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW))
for x in list:
if isinstance(x, SCons.Node.FS.File):
result.append(x)
continue
x = str(x)
if x:
if prefix:
if prefix[-1] == ' ':
result.append(prefix[:-1])
elif x[:len(prefix)] != prefix:
x = prefix + x
result.append(x)
if suffix:
if suffix[0] == ' ':
result.append(suffix[1:])
elif x[-len(suffix):] != suffix:
result[-1] = result[-1]+suffix
return result
def _stripixes(prefix, itms, suffix, stripprefixes, stripsuffixes, env, c=None):
"""
This is a wrapper around _concat()/_concat_ixes() that checks for
the existence of prefixes or suffixes on list items and strips them
where it finds them. This is used by tools (like the GNU linker)
that need to turn something like 'libfoo.a' into '-lfoo'.
"""
if not itms:
return itms
if not callable(c):
env_c = env['_concat']
if env_c != _concat and callable(env_c):
# There's a custom _concat() method in the construction
# environment, and we've allowed people to set that in
# the past (see test/custom-concat.py), so preserve the
# backwards compatibility.
c = env_c
else:
c = _concat_ixes
stripprefixes = list(map(env.subst, SCons.Util.flatten(stripprefixes)))
stripsuffixes = list(map(env.subst, SCons.Util.flatten(stripsuffixes)))
stripped = []
for l in SCons.PathList.PathList(itms).subst_path(env, None, None):
if isinstance(l, SCons.Node.FS.File):
stripped.append(l)
continue
if not SCons.Util.is_String(l):
l = str(l)
for stripprefix in stripprefixes:
lsp = len(stripprefix)
if l[:lsp] == stripprefix:
l = l[lsp:]
# Do not strip more than one prefix
break
for stripsuffix in stripsuffixes:
lss = len(stripsuffix)
if l[-lss:] == stripsuffix:
l = l[:-lss]
# Do not strip more than one suffix
break
stripped.append(l)
return c(prefix, stripped, suffix, env)
def processDefines(defs):
"""process defines, resolving strings, lists, dictionaries, into a list of
strings
"""
if SCons.Util.is_List(defs):
l = []
for d in defs:
if d is None:
continue
elif SCons.Util.is_List(d) or isinstance(d, tuple):
if len(d) >= 2:
l.append(str(d[0]) + '=' + str(d[1]))
else:
l.append(str(d[0]))
elif SCons.Util.is_Dict(d):
for macro,value in d.items():
if value is not None:
l.append(str(macro) + '=' + str(value))
else:
l.append(str(macro))
elif SCons.Util.is_String(d):
l.append(str(d))
else:
raise SCons.Errors.UserError("DEFINE %s is not a list, dict, string or None."%repr(d))
elif SCons.Util.is_Dict(defs):
# The items in a dictionary are stored in random order, but
# if the order of the command-line options changes from
# invocation to invocation, then the signature of the command
# line will change and we'll get random unnecessary rebuilds.
# Consequently, we have to sort the keys to ensure a
# consistent order...
l = []
for k,v in sorted(defs.items()):
if v is None:
l.append(str(k))
else:
l.append(str(k) + '=' + str(v))
else:
l = [str(defs)]
return l
def _defines(prefix, defs, suffix, env, target, source, c=_concat_ixes):
"""A wrapper around _concat_ixes that turns a list or string
into a list of C preprocessor command-line definitions.
"""
return c(prefix, env.subst_path(processDefines(defs), target=target, source=source), suffix, env)
class NullCmdGenerator(object):
"""This is a callable class that can be used in place of other
command generators if you don't want them to do anything.
The __call__ method for this class simply returns the thing
you instantiated it with.
Example usage:
env["DO_NOTHING"] = NullCmdGenerator
env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}"
"""
def __init__(self, cmd):
self.cmd = cmd
def __call__(self, target, source, env, for_signature=None):
return self.cmd
class Variable_Method_Caller(object):
"""A class for finding a construction variable on the stack and
calling one of its methods.
We use this to support "construction variables" in our string
eval()s that actually stand in for methods--specifically, use
of "RDirs" in call to _concat that should actually execute the
"TARGET.RDirs" method. (We used to support this by creating a little
"build dictionary" that mapped RDirs to the method, but this got in
the way of Memoizing construction environments, because we had to
create new environment objects to hold the variables.)
"""
def __init__(self, variable, method):
self.variable = variable
self.method = method
def __call__(self, *args, **kw):
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
variable = self.variable
while frame:
if variable in frame.f_locals:
v = frame.f_locals[variable]
if v:
method = getattr(v, self.method)
return method(*args, **kw)
frame = frame.f_back
return None
# if $version_var is not empty, returns env[flags_var], otherwise returns None
def __libversionflags(env, version_var, flags_var):
try:
if env.subst('$'+version_var):
return env[flags_var]
except KeyError:
pass
return None
ConstructionEnvironment = {
'BUILDERS' : {},
'SCANNERS' : [ SCons.Tool.SourceFileScanner ],
'CONFIGUREDIR' : '#/.sconf_temp',
'CONFIGURELOG' : '#/config.log',
'CPPSUFFIXES' : SCons.Tool.CSuffixes,
'DSUFFIXES' : SCons.Tool.DSuffixes,
'ENV' : {},
'IDLSUFFIXES' : SCons.Tool.IDLSuffixes,
'_concat' : _concat,
'_defines' : _defines,
'_stripixes' : _stripixes,
'_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}',
'_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
'_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
'_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__, TARGET, SOURCE)}',
'__libversionflags' : __libversionflags,
'__SHLIBVERSIONFLAGS' : '${__libversionflags(__env__,"SHLIBVERSION","_SHLIBVERSIONFLAGS")}',
'__LDMODULEVERSIONFLAGS' : '${__libversionflags(__env__,"LDMODULEVERSION","_LDMODULEVERSIONFLAGS")}',
'__DSHLIBVERSIONFLAGS' : '${__libversionflags(__env__,"DSHLIBVERSION","_DSHLIBVERSIONFLAGS")}',
'TEMPFILE' : NullCmdGenerator,
'TEMPFILEARGJOIN': ' ',
'Dir' : Variable_Method_Caller('TARGET', 'Dir'),
'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'),
'File' : Variable_Method_Caller('TARGET', 'File'),
'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'),
}
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,698 +0,0 @@
"""SCons.Job
This module defines the Serial and Parallel classes that execute tasks to
complete a build. The Jobs class provides a higher level interface to start,
stop, and wait on jobs.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Job.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import SCons.compat
import os
import signal
import threading
from enum import Enum
import SCons.Errors
# The default stack size (in kilobytes) of the threads used to execute
# jobs in parallel.
#
# We use a stack size of 256 kilobytes. The default on some platforms
# is too large and prevents us from creating enough threads to fully
# parallelized the build. For example, the default stack size on linux
# is 8 MBytes.
explicit_stack_size = None
default_stack_size = 256
interrupt_msg = 'Build interrupted.'
class InterruptState(object):
def __init__(self):
self.interrupted = False
def set(self):
self.interrupted = True
def __call__(self):
return self.interrupted
class Jobs(object):
"""An instance of this class initializes N jobs, and provides
methods for starting, stopping, and waiting on all N jobs.
"""
def __init__(self, num, taskmaster):
"""
Create 'num' jobs using the given taskmaster.
If 'num' is 1 or less, then a serial job will be used,
otherwise a parallel job with 'num' worker threads will
be used.
The 'num_jobs' attribute will be set to the actual number of jobs
allocated. If more than one job is requested but the Parallel
class can't do it, it gets reset to 1. Wrapping interfaces that
care should check the value of 'num_jobs' after initialization.
"""
self.job = None
if num > 1:
stack_size = explicit_stack_size
if stack_size is None:
stack_size = default_stack_size
try:
self.job = Parallel(taskmaster, num, stack_size)
self.num_jobs = num
except NameError:
pass
if self.job is None:
self.job = Serial(taskmaster)
self.num_jobs = 1
def run(self, postfunc=lambda: None):
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler()
def were_interrupted(self):
"""Returns whether the jobs were interrupted by a signal."""
return self.job.interrupted()
def _setup_sig_handler(self):
"""Setup an interrupt handler so that SCons can shutdown cleanly in
various conditions:
a) SIGINT: Keyboard interrupt
b) SIGTERM: kill or system shutdown
c) SIGHUP: Controlling shell exiting
We handle all of these cases by stopping the taskmaster. It
turns out that it's very difficult to stop the build process
by throwing asynchronously an exception such as
KeyboardInterrupt. For example, the python Condition
variables (threading.Condition) and queues do not seem to be
asynchronous-exception-safe. It would require adding a whole
bunch of try/finally block and except KeyboardInterrupt all
over the place.
Note also that we have to be careful to handle the case when
SCons forks before executing another process. In that case, we
want the child to exit immediately.
"""
def handler(signum, stack, self=self, parentpid=os.getpid()):
if os.getpid() == parentpid:
self.job.taskmaster.stop()
self.job.interrupted.set()
else:
os._exit(2)
self.old_sigint = signal.signal(signal.SIGINT, handler)
self.old_sigterm = signal.signal(signal.SIGTERM, handler)
try:
self.old_sighup = signal.signal(signal.SIGHUP, handler)
except AttributeError:
pass
def _reset_sig_handler(self):
"""Restore the signal handlers to their previous state (before the
call to _setup_sig_handler()."""
signal.signal(signal.SIGINT, self.old_sigint)
signal.signal(signal.SIGTERM, self.old_sigterm)
try:
signal.signal(signal.SIGHUP, self.old_sighup)
except AttributeError:
pass
class Serial(object):
"""This class is used to execute tasks in series, and is more efficient
than Parallel, but is only appropriate for non-parallel builds. Only
one instance of this class should be in existence at a time.
This class is not thread safe.
"""
def __init__(self, taskmaster):
"""Create a new serial job given a taskmaster.
The taskmaster's next_task() method should return the next task
that needs to be executed, or None if there are no more tasks. The
taskmaster's executed() method will be called for each task when it
is successfully executed, or failed() will be called if it failed to
execute (e.g. execute() raised an exception)."""
self.taskmaster = taskmaster
self.interrupted = InterruptState()
def start(self):
"""Start the job. This will begin pulling tasks from the taskmaster
and executing them, and return when there are no more tasks. If a task
fails to execute (i.e. execute() raises an exception), then the job will
stop."""
while True:
task = self.taskmaster.next_task()
if task is None:
break
try:
task.prepare()
if task.needs_execute():
task.execute()
except Exception:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
else:
task.exception_set()
# Let the failed() callback function arrange for the
# build to stop if that's appropriate.
task.failed()
else:
task.executed()
task.postprocess()
self.taskmaster.cleanup()
# Trap import failure so that everything in the Job module but the
# Parallel class (and its dependent classes) will work if the interpreter
# doesn't support threads.
try:
import queue
import threading
except ImportError:
pass
else:
class Worker(threading.Thread):
"""A worker thread waits on a task to be posted to its request queue,
dequeues the task, executes it, and posts a tuple including the task
and a boolean indicating whether the task executed successfully. """
def __init__(self, requestQueue, resultsQueue, interrupted):
threading.Thread.__init__(self)
self.setDaemon(1)
self.requestQueue = requestQueue
self.resultsQueue = resultsQueue
self.interrupted = interrupted
self.start()
def run(self):
while True:
task = self.requestQueue.get()
if task is None:
# The "None" value is used as a sentinel by
# ThreadPool.cleanup(). This indicates that there
# are no more tasks, so we should quit.
break
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
task.exception_set()
ok = False
else:
ok = True
self.resultsQueue.put((task, ok))
class ThreadPool(object):
"""This class is responsible for spawning and managing worker threads."""
def __init__(self, num, stack_size, interrupted):
"""Create the request and reply queues, and 'num' worker threads.
One must specify the stack size of the worker threads. The
stack size is specified in kilobytes.
"""
self.requestQueue = queue.Queue(0)
self.resultsQueue = queue.Queue(0)
try:
prev_size = threading.stack_size(stack_size * 1024)
except AttributeError as e:
# Only print a warning if the stack size has been
# explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
# Create worker threads
self.workers = []
for _ in range(num):
worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
self.workers.append(worker)
if 'prev_size' in locals():
threading.stack_size(prev_size)
def put(self, task):
"""Put task into request queue."""
self.requestQueue.put(task)
def get(self):
"""Remove and return a result tuple from the results queue."""
return self.resultsQueue.get()
def preparation_failed(self, task):
self.resultsQueue.put((task, False))
def cleanup(self):
"""
Shuts down the thread pool, giving each worker thread a
chance to shut down gracefully.
"""
# For each worker thread, put a sentinel "None" value
# on the requestQueue (indicating that there's no work
# to be done) so that each worker thread will get one and
# terminate gracefully.
for _ in self.workers:
self.requestQueue.put(None)
# Wait for all of the workers to terminate.
#
# If we don't do this, later Python versions (2.4, 2.5) often
# seem to raise exceptions during shutdown. This happens
# in requestQueue.get(), as an assertion failure that
# requestQueue.not_full is notified while not acquired,
# seemingly because the main thread has shut down (or is
# in the process of doing so) while the workers are still
# trying to pull sentinels off the requestQueue.
#
# Normally these terminations should happen fairly quickly,
# but we'll stick a one-second timeout on here just in case
# someone gets hung.
for worker in self.workers:
worker.join(1.0)
self.workers = []
class Parallel(object):
"""This class is used to execute tasks in parallel, and is somewhat
less efficient than Serial, but is appropriate for parallel builds.
This class is thread safe.
"""
def __init__(self, taskmaster, num, stack_size):
"""Create a new parallel job given a taskmaster.
The taskmaster's next_task() method should return the next
task that needs to be executed, or None if there are no more
tasks. The taskmaster's executed() method will be called
for each task when it is successfully executed, or failed()
will be called if the task failed to execute (i.e. execute()
raised an exception).
Note: calls to taskmaster are serialized, but calls to
execute() on distinct tasks are not serialized, because
that is the whole point of parallel jobs: they can execute
multiple tasks simultaneously. """
self.taskmaster = taskmaster
self.interrupted = InterruptState()
self.tp = ThreadPool(num, stack_size, self.interrupted)
self.maxjobs = num
def start(self):
"""Start the job. This will begin pulling tasks from the
taskmaster and executing them, and return when there are no
more tasks. If a task fails to execute (i.e. execute() raises
an exception), then the job will stop."""
jobs = 0
while True:
# Start up as many available tasks as we're
# allowed to.
while jobs < self.maxjobs:
task = self.taskmaster.next_task()
if task is None:
break
try:
# prepare task for execution
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if task.needs_execute():
# dispatch task
self.tp.put(task)
jobs = jobs + 1
else:
task.executed()
task.postprocess()
if not task and not jobs:
break
# Let any/all completed tasks finish up before we go
# back and put the next batch of tasks on the queue.
while True:
task, ok = self.tp.get()
jobs = jobs - 1
if ok:
task.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
task.failed()
task.postprocess()
if self.tp.resultsQueue.empty():
break
self.tp.cleanup()
self.taskmaster.cleanup()
# An experimental new parallel scheduler that uses a leaders/followers pattern.
class ExperimentalParallel:
class State(Enum):
READY = 0
SEARCHING = 1
STALLED = 2
COMPLETED = 3
class Worker(threading.Thread):
def __init__(self, owner):
super().__init__()
self.daemon = True
self.owner = owner
self.start()
def run(self):
self.owner._work()
def __init__(self, taskmaster, num, stack_size):
self.taskmaster = taskmaster
self.num_workers = num
self.stack_size = stack_size
self.interrupted = InterruptState()
self.workers = []
# The `tm_lock` is what ensures that we only have one
# thread interacting with the taskmaster at a time. It
# also protects access to our state that gets updated
# concurrently. The `can_search_cv` is associated with
# this mutex.
self.tm_lock = threading.Lock()
# Guarded under `tm_lock`.
self.jobs = 0
self.state = ExperimentalParallel.State.READY
# The `can_search_cv` is used to manage a leader /
# follower pattern for access to the taskmaster, and to
# awaken from stalls.
self.can_search_cv = threading.Condition(self.tm_lock)
# The queue of tasks that have completed execution. The
# next thread to obtain `tm_lock`` will retire them.
self.results_queue_lock = threading.Lock()
self.results_queue = []
def start(self):
self._start_workers()
for worker in self.workers:
worker.join()
self.workers = []
self.taskmaster.cleanup()
def _start_workers(self):
prev_size = self._adjust_stack_size()
for _ in range(self.num_workers):
self.workers.append(ExperimentalParallel.Worker(self))
self._restore_stack_size(prev_size)
def _adjust_stack_size(self):
try:
prev_size = threading.stack_size(self.stack_size * 1024)
return prev_size
except AttributeError as e:
# Only print a warning if the stack size has been
# explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
return None
def _restore_stack_size(self, prev_size):
if prev_size is not None:
threading.stack_size(prev_size)
def _work(self):
task = None
while True:
# Obtain `tm_lock`, granting exclusive access to the taskmaster.
with self.can_search_cv:
# print(f"XXX {threading.get_ident()} Gained exclusive access")
# Capture whether we got here with `task` set,
# then drop our reference to the task as we are no
# longer interested in the actual object.
completed_task = (task is not None)
task = None
# We will only have `completed_task` set here if
# we have looped back after executing a task. If
# we have completed a task and find that we are
# stalled, we should speculatively indicate that
# we are no longer stalled by transitioning to the
# 'ready' state which will bypass the condition
# wait so that we immediately process the results
# queue and hopefully light up new
# work. Otherwise, stay stalled, and we will wait
# in the condvar. Some other thread will come back
# here with a completed task.
if self.state == ExperimentalParallel.State.STALLED and completed_task:
# print(f"XXX {threading.get_ident()} Detected stall with completed task, bypassing wait")
self.state = ExperimentalParallel.State.READY
# Wait until we are neither searching nor stalled.
while self.state == ExperimentalParallel.State.SEARCHING or self.state == ExperimentalParallel.State.STALLED:
# print(f"XXX {threading.get_ident()} Search already in progress, waiting")
self.can_search_cv.wait()
# If someone set the completed flag, bail.
if self.state == ExperimentalParallel.State.COMPLETED:
# print(f"XXX {threading.get_ident()} Completion detected, breaking from main loop")
break
# Set the searching flag to indicate that a thread
# is currently in the critical section for
# taskmaster work.
#
# print(f"XXX {threading.get_ident()} Starting search")
self.state = ExperimentalParallel.State.SEARCHING
# Bulk acquire the tasks in the results queue
# under the result queue lock, then process them
# all outside that lock. We need to process the
# tasks in the results queue before looking for
# new work because we might be unable to find new
# work if we don't.
results_queue = []
with self.results_queue_lock:
results_queue, self.results_queue = self.results_queue, results_queue
# print(f"XXX {threading.get_ident()} Found {len(results_queue)} completed tasks to process")
for (rtask, rresult) in results_queue:
if rresult:
rtask.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
rtask.targets[0], errstr=interrupt_msg)
except Exception:
rtask.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
rtask.failed()
rtask.postprocess()
self.jobs -= 1
# We are done with any task objects that were in
# the results queue.
results_queue.clear()
# Now, turn the crank on the taskmaster until we
# either run out of tasks, or find a task that
# needs execution. If we run out of tasks, go idle
# until results arrive if jobs are pending, or
# mark the walk as complete if not.
while self.state == ExperimentalParallel.State.SEARCHING:
# print(f"XXX {threading.get_ident()} Searching for new tasks")
task = self.taskmaster.next_task()
if task:
# We found a task. Walk it through the
# task lifecycle. If it does not need
# execution, just complete the task and
# look for the next one. Otherwise,
# indicate that we are no longer searching
# so we can drop out of this loop, execute
# the task outside the lock, and allow
# another thread in to search.
try:
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if not task.needs_execute():
# print(f"XXX {threading.get_ident()} Found internal task")
task.executed()
task.postprocess()
else:
self.jobs += 1
# print(f"XXX {threading.get_ident()} Found task requiring execution")
self.state = ExperimentalParallel.State.READY
self.can_search_cv.notify()
else:
# We failed to find a task, so this thread
# cannot continue turning the taskmaster
# crank. We must exit the loop.
if self.jobs:
# No task was found, but there are
# outstanding jobs executing that
# might unblock new tasks when they
# complete. Transition to the stalled
# state. We do not need a notify,
# because we know there are threads
# outstanding that will re-enter the
# loop.
#
# print(f"XXX {threading.get_ident()} Found no task requiring execution, but have jobs: marking stalled")
self.state = ExperimentalParallel.State.STALLED
else:
# We didn't find a task and there are
# no jobs outstanding, so there is
# nothing that will ever return
# results which might unblock new
# tasks. We can conclude that the walk
# is complete. Update our state to
# note completion and awaken anyone
# sleeping on the condvar.
#
# print(f"XXX {threading.get_ident()} Found no task requiring execution, and have no jobs: marking complete")
self.state = ExperimentalParallel.State.COMPLETED
self.can_search_cv.notify_all()
# We no longer hold `tm_lock` here. If we have a task,
# we can now execute it. If there are threads waiting
# to search, one of them can now begin turning the
# taskmaster crank in parallel.
if task:
# print(f"XXX {threading.get_ident()} Executing task")
ok = True
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
ok = False
task.exception_set()
# Grab the results queue lock and enqueue the
# executed task and state. The next thread into
# the searching loop will complete the
# postprocessing work under the taskmaster lock.
#
# print(f"XXX {threading.get_ident()} Enqueueing executed task results")
with self.results_queue_lock:
self.results_queue.append((task, ok))
# Tricky state "fallthrough" here. We are going back
# to the top of the loop, which behaves differently
# depending on whether `task` is set. Do not perturb
# the value of the `task` variable if you add new code
# after this comment.
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,131 +0,0 @@
"""SCons.Scanner.C
This module implements the dependency scanner for C/C++ code.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/C.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import SCons.Node.FS
import SCons.Scanner
import SCons.Util
import SCons.cpp
class SConsCPPScanner(SCons.cpp.PreProcessor):
"""
SCons-specific subclass of the cpp.py module's processing.
We subclass this so that: 1) we can deal with files represented
by Nodes, not strings; 2) we can keep track of the files that are
missing.
"""
def __init__(self, *args, **kw):
SCons.cpp.PreProcessor.__init__(self, *args, **kw)
self.missing = []
def initialize_result(self, fname):
self.result = SCons.Util.UniqueList([fname])
def finalize_result(self, fname):
return self.result[1:]
def find_include_file(self, t):
keyword, quote, fname = t
result = SCons.Node.FS.find_file(fname, self.searchpath[quote])
if not result:
self.missing.append((fname, self.current_file))
return result
def read_file(self, file):
try:
with open(str(file.rfile())) as fp:
return fp.read()
except EnvironmentError as e:
self.missing.append((file, self.current_file))
return ''
def dictify_CPPDEFINES(env):
cppdefines = env.get('CPPDEFINES', {})
if cppdefines is None:
return {}
if SCons.Util.is_Sequence(cppdefines):
result = {}
for c in cppdefines:
if SCons.Util.is_Sequence(c):
result[c[0]] = c[1]
else:
result[c] = None
return result
if not SCons.Util.is_Dict(cppdefines):
return {cppdefines : None}
return cppdefines
class SConsCPPScannerWrapper(object):
"""
The SCons wrapper around a cpp.py scanner.
This is the actual glue between the calling conventions of generic
SCons scanners, and the (subclass of) cpp.py class that knows how
to look for #include lines with reasonably real C-preprocessor-like
evaluation of #if/#ifdef/#else/#elif lines.
"""
def __init__(self, name, variable):
self.name = name
self.path = SCons.Scanner.FindPathDirs(variable)
def __call__(self, node, env, path = ()):
cpp = SConsCPPScanner(current = node.get_dir(),
cpppath = path,
dict = dictify_CPPDEFINES(env))
result = cpp(node)
for included, includer in cpp.missing:
fmt = "No dependency generated for file: %s (included from: %s) -- file not found"
SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
fmt % (included, includer))
return result
def recurse_nodes(self, nodes):
return nodes
def select(self, node):
return self
def CScanner():
"""Return a prototype Scanner instance for scanning source files
that use the C pre-processor"""
# Here's how we would (or might) use the CPP scanner code above that
# knows how to evaluate #if/#ifdef/#else/#elif lines when searching
# for #includes. This is commented out for now until we add the
# right configurability to let users pick between the scanners.
#return SConsCPPScannerWrapper("CScanner", "CPPPATH")
cs = SCons.Scanner.ClassicCPP("CScanner",
"$CPPSUFFIXES",
"CPPPATH",
'^[ \t]*#[ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")')
return cs
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,281 +0,0 @@
"""SCons.Tool.FortranCommon
Stuff for processing Fortran, common to all fortran dialects.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
__revision__ = "src/engine/SCons/Tool/FortranCommon.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import re
import os.path
import SCons.Action
import SCons.Defaults
import SCons.Scanner.Fortran
import SCons.Tool
import SCons.Util
def isfortran(env, source):
"""Return 1 if any of code in source has fortran files in it, 0
otherwise."""
try:
fsuffixes = env['FORTRANSUFFIXES']
except KeyError:
# If no FORTRANSUFFIXES, no fortran tool, so there is no need to look
# for fortran sources.
return 0
if not source:
# Source might be None for unusual cases like SConf.
return 0
for s in source:
if s.sources:
ext = os.path.splitext(str(s.sources[0]))[1]
if ext in fsuffixes:
return 1
return 0
def _fortranEmitter(target, source, env):
node = source[0].rfile()
if not node.exists() and not node.is_derived():
print("Could not locate " + str(node.name))
return ([], [])
# This has to match the def_regex in the Fortran scanner
mod_regex = r"""(?i)^\s*MODULE\s+(?!PROCEDURE|SUBROUTINE|FUNCTION|PURE|ELEMENTAL)(\w+)"""
cre = re.compile(mod_regex,re.M)
# Retrieve all USE'd module names
modules = cre.findall(node.get_text_contents())
# Remove unique items from the list
modules = SCons.Util.unique(modules)
# Convert module name to a .mod filename
suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source)
moddir = env.subst('$FORTRANMODDIR', target=target, source=source)
modules = [x.lower() + suffix for x in modules]
for m in modules:
target.append(env.fs.File(m, moddir))
return (target, source)
def FortranEmitter(target, source, env):
target, source = _fortranEmitter(target, source, env)
return SCons.Defaults.StaticObjectEmitter(target, source, env)
def ShFortranEmitter(target, source, env):
target, source = _fortranEmitter(target, source, env)
return SCons.Defaults.SharedObjectEmitter(target, source, env)
def ComputeFortranSuffixes(suffixes, ppsuffixes):
"""suffixes are fortran source files, and ppsuffixes the ones to be
pre-processed. Both should be sequences, not strings."""
assert len(suffixes) > 0
s = suffixes[0]
sup = s.upper()
upper_suffixes = [_.upper() for _ in suffixes]
if SCons.Util.case_sensitive_suffixes(s, sup):
ppsuffixes.extend(upper_suffixes)
else:
suffixes.extend(upper_suffixes)
def CreateDialectActions(dialect):
"""Create dialect specific actions."""
CompAction = SCons.Action.Action('$%sCOM ' % dialect, '$%sCOMSTR' % dialect)
CompPPAction = SCons.Action.Action('$%sPPCOM ' % dialect, '$%sPPCOMSTR' % dialect)
ShCompAction = SCons.Action.Action('$SH%sCOM ' % dialect, '$SH%sCOMSTR' % dialect)
ShCompPPAction = SCons.Action.Action('$SH%sPPCOM ' % dialect, '$SH%sPPCOMSTR' % dialect)
return CompAction, CompPPAction, ShCompAction, ShCompPPAction
def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_module = 0):
"""Add dialect specific construction variables."""
ComputeFortranSuffixes(suffixes, ppsuffixes)
fscan = SCons.Scanner.Fortran.FortranScan("%sPATH" % dialect)
for suffix in suffixes + ppsuffixes:
SCons.Tool.SourceFileScanner.add_scanner(suffix, fscan)
env.AppendUnique(FORTRANSUFFIXES = suffixes + ppsuffixes)
compaction, compppaction, shcompaction, shcompppaction = \
CreateDialectActions(dialect)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in suffixes:
static_obj.add_action(suffix, compaction)
shared_obj.add_action(suffix, shcompaction)
static_obj.add_emitter(suffix, FortranEmitter)
shared_obj.add_emitter(suffix, ShFortranEmitter)
for suffix in ppsuffixes:
static_obj.add_action(suffix, compppaction)
shared_obj.add_action(suffix, shcompppaction)
static_obj.add_emitter(suffix, FortranEmitter)
shared_obj.add_emitter(suffix, ShFortranEmitter)
if '%sFLAGS' % dialect not in env:
env['%sFLAGS' % dialect] = SCons.Util.CLVar('')
if 'SH%sFLAGS' % dialect not in env:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect)
# If a tool does not define fortran prefix/suffix for include path, use C ones
if 'INC%sPREFIX' % dialect not in env:
env['INC%sPREFIX' % dialect] = '$INCPREFIX'
if 'INC%sSUFFIX' % dialect not in env:
env['INC%sSUFFIX' % dialect] = '$INCSUFFIX'
env['_%sINCFLAGS' % dialect] = '$( ${_concat(INC%sPREFIX, %sPATH, INC%sSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' % (dialect, dialect, dialect)
if support_module == 1:
env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
else:
env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
def add_fortran_to_env(env):
"""Add Builders and construction variables for Fortran to an Environment."""
try:
FortranSuffixes = env['FORTRANFILESUFFIXES']
except KeyError:
FortranSuffixes = ['.f', '.for', '.ftn']
#print("Adding %s to fortran suffixes" % FortranSuffixes)
try:
FortranPPSuffixes = env['FORTRANPPFILESUFFIXES']
except KeyError:
FortranPPSuffixes = ['.fpp', '.FPP']
DialectAddToEnv(env, "FORTRAN", FortranSuffixes,
FortranPPSuffixes, support_module = 1)
env['FORTRANMODPREFIX'] = '' # like $LIBPREFIX
env['FORTRANMODSUFFIX'] = '.mod' # like $LIBSUFFIX
env['FORTRANMODDIR'] = '' # where the compiler should place .mod files
env['FORTRANMODDIRPREFIX'] = '' # some prefix to $FORTRANMODDIR - similar to $INCPREFIX
env['FORTRANMODDIRSUFFIX'] = '' # some suffix to $FORTRANMODDIR - similar to $INCSUFFIX
env['_FORTRANMODFLAG'] = '$( ${_concat(FORTRANMODDIRPREFIX, FORTRANMODDIR, FORTRANMODDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
def add_f77_to_env(env):
"""Add Builders and construction variables for f77 to an Environment."""
try:
F77Suffixes = env['F77FILESUFFIXES']
except KeyError:
F77Suffixes = ['.f77']
#print("Adding %s to f77 suffixes" % F77Suffixes)
try:
F77PPSuffixes = env['F77PPFILESUFFIXES']
except KeyError:
F77PPSuffixes = []
DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes)
def add_f90_to_env(env):
"""Add Builders and construction variables for f90 to an Environment."""
try:
F90Suffixes = env['F90FILESUFFIXES']
except KeyError:
F90Suffixes = ['.f90']
#print("Adding %s to f90 suffixes" % F90Suffixes)
try:
F90PPSuffixes = env['F90PPFILESUFFIXES']
except KeyError:
F90PPSuffixes = []
DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes,
support_module = 1)
def add_f95_to_env(env):
"""Add Builders and construction variables for f95 to an Environment."""
try:
F95Suffixes = env['F95FILESUFFIXES']
except KeyError:
F95Suffixes = ['.f95']
#print("Adding %s to f95 suffixes" % F95Suffixes)
try:
F95PPSuffixes = env['F95PPFILESUFFIXES']
except KeyError:
F95PPSuffixes = []
DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes,
support_module = 1)
def add_f03_to_env(env):
"""Add Builders and construction variables for f03 to an Environment."""
try:
F03Suffixes = env['F03FILESUFFIXES']
except KeyError:
F03Suffixes = ['.f03']
#print("Adding %s to f95 suffixes" % F95Suffixes)
try:
F03PPSuffixes = env['F03PPFILESUFFIXES']
except KeyError:
F03PPSuffixes = []
DialectAddToEnv(env, "F03", F03Suffixes, F03PPSuffixes,
support_module = 1)
def add_f08_to_env(env):
"""Add Builders and construction variables for f08 to an Environment."""
try:
F08Suffixes = env['F08FILESUFFIXES']
except KeyError:
F08Suffixes = ['.f08']
try:
F08PPSuffixes = env['F08PPFILESUFFIXES']
except KeyError:
F08PPSuffixes = []
DialectAddToEnv(env, "F08", F08Suffixes, F08PPSuffixes,
support_module = 1)
def add_all_to_env(env):
"""Add builders and construction variables for all supported fortran
dialects."""
add_fortran_to_env(env)
add_f77_to_env(env)
add_f90_to_env(env)
add_f95_to_env(env)
add_f03_to_env(env)
add_f08_to_env(env)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,57 +0,0 @@
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/MSCommon/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__doc__ = """
Common functions for Microsoft Visual Studio and Visual C/C++.
"""
import copy
import os
import re
import subprocess
import SCons.Errors
import SCons.Platform.win32
import SCons.Util
from SCons.Tool.MSCommon.sdk import mssdk_exists, \
mssdk_setup_env
from SCons.Tool.MSCommon.vc import msvc_exists, \
msvc_setup_env, \
msvc_setup_env_once, \
msvc_version_to_maj_min
from SCons.Tool.MSCommon.vs import get_default_version, \
get_vs_by_version, \
merge_default_version, \
msvs_exists, \
query_versions
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,284 +0,0 @@
"""
Common helper functions for working with the Microsoft tool chain.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
__revision__ = "src/engine/SCons/Tool/MSCommon/common.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import copy
import json
import os
import re
import subprocess
import sys
import SCons.Util
# SCONS_MSCOMMON_DEBUG is internal-use so undocumented:
# set to '-' to print to console, else set to filename to log to
LOGFILE = os.environ.get('SCONS_MSCOMMON_DEBUG')
if LOGFILE == '-':
def debug(message):
print(message)
elif LOGFILE:
import logging
logging.basicConfig(
format='%(relativeCreated)05dms:pid%(process)05d:MSCommon/%(filename)s:%(message)s',
filename=LOGFILE,
level=logging.DEBUG)
debug = logging.getLogger(name=__name__).debug
else:
debug = lambda x: None
# SCONS_CACHE_MSVC_CONFIG is public, and is documented.
CONFIG_CACHE = os.environ.get('SCONS_CACHE_MSVC_CONFIG')
if CONFIG_CACHE in ('1', 'true', 'True'):
CONFIG_CACHE = os.path.join(os.path.expanduser('~'), '.scons_msvc_cache')
def read_script_env_cache():
""" fetch cached msvc env vars if requested, else return empty dict """
envcache = {}
if CONFIG_CACHE:
try:
with open(CONFIG_CACHE, 'r') as f:
envcache = json.load(f)
#TODO can use more specific FileNotFoundError when py2 dropped
except IOError:
# don't fail if no cache file, just proceed without it
pass
return envcache
def write_script_env_cache(cache):
""" write out cache of msvc env vars if requested """
if CONFIG_CACHE:
try:
with open(CONFIG_CACHE, 'w') as f:
json.dump(cache, f, indent=2)
except TypeError:
# data can't serialize to json, don't leave partial file
os.remove(CONFIG_CACHE)
except IOError:
# can't write the file, just skip
pass
_is_win64 = None
def is_win64():
"""Return true if running on windows 64 bits.
Works whether python itself runs in 64 bits or 32 bits."""
# Unfortunately, python does not provide a useful way to determine
# if the underlying Windows OS is 32-bit or 64-bit. Worse, whether
# the Python itself is 32-bit or 64-bit affects what it returns,
# so nothing in sys.* or os.* help.
# Apparently the best solution is to use env vars that Windows
# sets. If PROCESSOR_ARCHITECTURE is not x86, then the python
# process is running in 64 bit mode (on a 64-bit OS, 64-bit
# hardware, obviously).
# If this python is 32-bit but the OS is 64, Windows will set
# ProgramW6432 and PROCESSOR_ARCHITEW6432 to non-null.
# (Checking for HKLM\Software\Wow6432Node in the registry doesn't
# work, because some 32-bit installers create it.)
global _is_win64
if _is_win64 is None:
# I structured these tests to make it easy to add new ones or
# add exceptions in the future, because this is a bit fragile.
_is_win64 = False
if os.environ.get('PROCESSOR_ARCHITECTURE', 'x86') != 'x86':
_is_win64 = True
if os.environ.get('PROCESSOR_ARCHITEW6432'):
_is_win64 = True
if os.environ.get('ProgramW6432'):
_is_win64 = True
return _is_win64
def read_reg(value, hkroot=SCons.Util.HKEY_LOCAL_MACHINE):
return SCons.Util.RegGetValue(hkroot, value)[0]
def has_reg(value):
"""Return True if the given key exists in HKEY_LOCAL_MACHINE, False
otherwise."""
try:
SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, value)
ret = True
except SCons.Util.WinError:
ret = False
return ret
# Functions for fetching environment variable settings from batch files.
def normalize_env(env, keys, force=False):
"""Given a dictionary representing a shell environment, add the variables
from os.environ needed for the processing of .bat files; the keys are
controlled by the keys argument.
It also makes sure the environment values are correctly encoded.
If force=True, then all of the key values that exist are copied
into the returned dictionary. If force=false, values are only
copied if the key does not already exist in the copied dictionary.
Note: the environment is copied."""
normenv = {}
if env:
for k in list(env.keys()):
normenv[k] = copy.deepcopy(env[k])
for k in keys:
if k in os.environ and (force or k not in normenv):
normenv[k] = os.environ[k]
# This shouldn't be necessary, since the default environment should include system32,
# but keep this here to be safe, since it's needed to find reg.exe which the MSVC
# bat scripts use.
sys32_dir = os.path.join(os.environ.get("SystemRoot",
os.environ.get("windir", r"C:\Windows\system32")),
"System32")
if sys32_dir not in normenv['PATH']:
normenv['PATH'] = normenv['PATH'] + os.pathsep + sys32_dir
# Without Wbem in PATH, vcvarsall.bat has a "'wmic' is not recognized"
# error starting with Visual Studio 2017, although the script still
# seems to work anyway.
sys32_wbem_dir = os.path.join(sys32_dir, 'Wbem')
if sys32_wbem_dir not in normenv['PATH']:
normenv['PATH'] = normenv['PATH'] + os.pathsep + sys32_wbem_dir
debug("PATH: %s"%normenv['PATH'])
return normenv
def get_output(vcbat, args = None, env = None):
"""Parse the output of given bat file, with given args."""
if env is None:
# Create a blank environment, for use in launching the tools
env = SCons.Environment.Environment(tools=[])
# TODO: This is a hard-coded list of the variables that (may) need
# to be imported from os.environ[] for v[sc]*vars*.bat file
# execution to work. This list should really be either directly
# controlled by vc.py, or else derived from the common_tools_var
# settings in vs.py.
vs_vc_vars = [
'COMSPEC',
# VS100 and VS110: Still set, but modern MSVC setup scripts will
# discard these if registry has values. However Intel compiler setup
# script still requires these as of 2013/2014.
'VS140COMNTOOLS',
'VS120COMNTOOLS',
'VS110COMNTOOLS',
'VS100COMNTOOLS',
'VS90COMNTOOLS',
'VS80COMNTOOLS',
'VS71COMNTOOLS',
'VS70COMNTOOLS',
'VS60COMNTOOLS',
]
env['ENV'] = normalize_env(env['ENV'], vs_vc_vars, force=False)
if args:
debug("Calling '%s %s'" % (vcbat, args))
popen = SCons.Action._subproc(env,
'"%s" %s & set' % (vcbat, args),
stdin='devnull',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
else:
debug("Calling '%s'" % vcbat)
popen = SCons.Action._subproc(env,
'"%s" & set' % vcbat,
stdin='devnull',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Use the .stdout and .stderr attributes directly because the
# .communicate() method uses the threading module on Windows
# and won't work under Pythons not built with threading.
with popen.stdout:
stdout = popen.stdout.read()
with popen.stderr:
stderr = popen.stderr.read()
# Extra debug logic, uncomment if necessary
# debug('get_output():stdout:%s'%stdout)
# debug('get_output():stderr:%s'%stderr)
if stderr:
# TODO: find something better to do with stderr;
# this at least prevents errors from getting swallowed.
sys.stderr.write(stderr)
if popen.wait() != 0:
raise IOError(stderr.decode("mbcs"))
output = stdout.decode("mbcs")
return output
KEEPLIST = ("INCLUDE", "LIB", "LIBPATH", "PATH", 'VSCMD_ARG_app_plat')
def parse_output(output, keep=KEEPLIST):
"""
Parse output from running visual c++/studios vcvarsall.bat and running set
To capture the values listed in keep
"""
# dkeep is a dict associating key: path_list, where key is one item from
# keep, and path_list the associated list of paths
dkeep = dict([(i, []) for i in keep])
# rdk will keep the regex to match the .bat file output line starts
rdk = {}
for i in keep:
rdk[i] = re.compile('%s=(.*)' % i, re.I)
def add_env(rmatch, key, dkeep=dkeep):
path_list = rmatch.group(1).split(os.pathsep)
for path in path_list:
# Do not add empty paths (when a var ends with ;)
if path:
# XXX: For some reason, VC98 .bat file adds "" around the PATH
# values, and it screws up the environment later, so we strip
# it.
path = path.strip('"')
dkeep[key].append(str(path))
for line in output.splitlines():
for k, value in rdk.items():
match = value.match(line)
if match:
add_env(match, k)
return dkeep
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,842 +0,0 @@
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# TODO:
# * supported arch for versions: for old versions of batch file without
# argument, giving bogus argument cannot be detected, so we have to hardcode
# this here
# * print warning when msvc version specified but not found
# * find out why warning do not print
# * test on 64 bits XP + VS 2005 (and VS 6 if possible)
# * SDK
# * Assembly
__revision__ = "src/engine/SCons/Tool/MSCommon/vc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__doc__ = """Module for Visual C/C++ detection and configuration.
"""
import SCons.compat
import SCons.Util
import subprocess
import os
import platform
import sys
from string import digits as string_digits
if sys.version_info[0] == 2:
import collections
import SCons.Warnings
from SCons.Tool import find_program_path
from . import common
debug = common.debug
from . import sdk
get_installed_sdks = sdk.get_installed_sdks
class VisualCException(Exception):
pass
class UnsupportedVersion(VisualCException):
pass
class MSVCUnsupportedHostArch(VisualCException):
pass
class MSVCUnsupportedTargetArch(VisualCException):
pass
class MissingConfiguration(VisualCException):
pass
class NoVersionFound(VisualCException):
pass
class BatchFileExecutionError(VisualCException):
pass
# Dict to 'canonalize' the arch
_ARCH_TO_CANONICAL = {
"amd64" : "amd64",
"emt64" : "amd64",
"i386" : "x86",
"i486" : "x86",
"i586" : "x86",
"i686" : "x86",
"ia64" : "ia64", # deprecated
"itanium" : "ia64", # deprecated
"x86" : "x86",
"x86_64" : "amd64",
"arm" : "arm",
"arm64" : "arm64",
"aarch64" : "arm64",
}
_HOST_TARGET_TO_CL_DIR_GREATER_THAN_14 = {
("amd64","amd64") : ("Hostx64","x64"),
("amd64","x86") : ("Hostx64","x86"),
("amd64","arm") : ("Hostx64","arm"),
("amd64","arm64") : ("Hostx64","arm64"),
("x86","amd64") : ("Hostx86","x64"),
("x86","x86") : ("Hostx86","x86"),
("x86","arm") : ("Hostx86","arm"),
("x86","arm64") : ("Hostx86","arm64"),
}
# get path to the cl.exe dir for older VS versions
# based off a tuple of (host, target) platforms
_HOST_TARGET_TO_CL_DIR = {
("amd64","amd64") : "amd64",
("amd64","x86") : "amd64_x86",
("amd64","arm") : "amd64_arm",
("amd64","arm64") : "amd64_arm64",
("x86","amd64") : "x86_amd64",
("x86","x86") : "",
("x86","arm") : "x86_arm",
("x86","arm64") : "x86_arm64",
}
# Given a (host, target) tuple, return the argument for the bat file.
# Both host and targets should be canonalized.
_HOST_TARGET_ARCH_TO_BAT_ARCH = {
("x86", "x86"): "x86",
("x86", "amd64"): "x86_amd64",
("x86", "x86_amd64"): "x86_amd64",
("amd64", "x86_amd64"): "x86_amd64", # This is present in (at least) VS2012 express
("amd64", "amd64"): "amd64",
("amd64", "x86"): "x86",
("x86", "ia64"): "x86_ia64", # gone since 14.0
("arm", "arm"): "arm", # since 14.0, maybe gone 14.1?
("x86", "arm"): "x86_arm", # since 14.0
("x86", "arm64"): "x86_arm64", # since 14.1
("amd64", "arm"): "amd64_arm", # since 14.0
("amd64", "arm64"): "amd64_arm64", # since 14.1
}
_CL_EXE_NAME = 'cl.exe'
def get_msvc_version_numeric(msvc_version):
"""Get the raw version numbers from a MSVC_VERSION string, so it
could be cast to float or other numeric values. For example, '14.0Exp'
would get converted to '14.0'.
Args:
msvc_version: str
string representing the version number, could contain non
digit characters
Returns:
str: the value converted to a numeric only string
"""
return ''.join([x for x in msvc_version if x in string_digits + '.'])
def get_host_target(env):
debug('get_host_target()')
host_platform = env.get('HOST_ARCH')
if not host_platform:
host_platform = platform.machine()
# Solaris returns i86pc for both 32 and 64 bit architectures
if host_platform == "i86pc":
if platform.architecture()[0] == "64bit":
host_platform = "amd64"
else:
host_platform = "x86"
# Retain user requested TARGET_ARCH
req_target_platform = env.get('TARGET_ARCH')
debug('get_host_target() req_target_platform:%s'%req_target_platform)
if req_target_platform:
# If user requested a specific platform then only try that one.
target_platform = req_target_platform
else:
target_platform = host_platform
try:
host = _ARCH_TO_CANONICAL[host_platform.lower()]
except KeyError:
msg = "Unrecognized host architecture %s"
raise MSVCUnsupportedHostArch(msg % repr(host_platform))
try:
target = _ARCH_TO_CANONICAL[target_platform.lower()]
except KeyError:
all_archs = str(list(_ARCH_TO_CANONICAL.keys()))
raise MSVCUnsupportedTargetArch("Unrecognized target architecture %s\n\tValid architectures: %s" % (target_platform, all_archs))
return (host, target,req_target_platform)
# If you update this, update SupportedVSList in Tool/MSCommon/vs.py, and the
# MSVC_VERSION documentation in Tool/msvc.xml.
_VCVER = ["14.3", "14.2", "14.1", "14.0", "14.0Exp", "12.0", "12.0Exp", "11.0", "11.0Exp", "10.0", "10.0Exp", "9.0", "9.0Exp","8.0", "8.0Exp","7.1", "7.0", "6.0"]
# if using vswhere, a further mapping is needed
_VCVER_TO_VSWHERE_VER = {
'14.3': '[17.0, 18.0)',
'14.2' : '[16.0, 17.0)',
'14.1' : '[15.0, 16.0)',
}
_VCVER_TO_PRODUCT_DIR = {
'14.3': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'')], # not set by this version
'14.2' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'')], # VS 2019 doesn't set this key
'14.1' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'')], # VS 2017 doesn't set this key
'14.0' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir')],
'14.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\14.0\Setup\VC\ProductDir')],
'12.0' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\12.0\Setup\VC\ProductDir'),
],
'12.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\12.0\Setup\VC\ProductDir'),
],
'11.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\11.0\Setup\VC\ProductDir'),
],
'11.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\11.0\Setup\VC\ProductDir'),
],
'10.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\10.0\Setup\VC\ProductDir'),
],
'10.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\10.0\Setup\VC\ProductDir'),
],
'9.0': [
(SCons.Util.HKEY_CURRENT_USER, r'Microsoft\DevDiv\VCForPython\9.0\installdir',),
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir',),
],
'9.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'),
],
'8.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\8.0\Setup\VC\ProductDir'),
],
'8.0Exp': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\8.0\Setup\VC\ProductDir'),
],
'7.1': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\7.1\Setup\VC\ProductDir'),
],
'7.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\7.0\Setup\VC\ProductDir'),
],
'6.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual C++\ProductDir'),
]
}
def msvc_version_to_maj_min(msvc_version):
msvc_version_numeric = get_msvc_version_numeric(msvc_version)
t = msvc_version_numeric.split(".")
if not len(t) == 2:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
try:
maj = int(t[0])
min = int(t[1])
return maj, min
except ValueError as e:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
def is_host_target_supported(host_target, msvc_version):
"""Check if (host, target) pair is supported for a VC version.
:note: only checks whether a given version *may* support the given (host,
target), not that the toolchain is actually present on the machine.
:param tuple host_target: canonalized host-targets pair, e.g.
("x86", "amd64") for cross compilation from 32 bit Windows to 64 bits.
:param str msvc_version: Visual C++ version (major.minor), e.g. "10.0"
:returns: True or False
"""
# We assume that any Visual Studio version supports x86 as a target
if host_target[1] != "x86":
maj, min = msvc_version_to_maj_min(msvc_version)
if maj < 8:
return False
return True
def find_vc_pdir_vswhere(msvc_version):
"""
Find the MSVC product directory using the vswhere program.
:param msvc_version: MSVC version to search for
:return: MSVC install dir or None
:raises UnsupportedVersion: if the version is not known by this file
"""
try:
vswhere_version = _VCVER_TO_VSWHERE_VER[msvc_version]
except KeyError:
debug("Unknown version of MSVC: %s" % msvc_version)
raise UnsupportedVersion("Unknown version %s" % msvc_version)
# For bug 3333 - support default location of vswhere for both 64 and 32 bit windows
# installs.
for pf in ['Program Files (x86)', 'Program Files']:
vswhere_path = os.path.join(
'C:\\',
pf,
'Microsoft Visual Studio',
'Installer',
'vswhere.exe'
)
if os.path.exists(vswhere_path):
# If we found vswhere, then use it.
break
else:
# No vswhere on system, no install info available
return None
vswhere_cmd = [vswhere_path,
'-products', '*',
'-version', vswhere_version,
'-property', 'installationPath']
#TODO PY27 cannot use Popen as context manager
# try putting it back to the old way for now
sp = subprocess.Popen(vswhere_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
vsdir, err = sp.communicate()
if vsdir:
vsdir = vsdir.decode("mbcs").splitlines()
# vswhere could easily return multiple lines
# we could define a way to pick the one we prefer, but since
# this data is currently only used to make a check for existence,
# returning the first hit should be good enough for now.
vc_pdir = os.path.join(vsdir[0], 'VC')
return vc_pdir
else:
# No vswhere on system, no install info available
return None
def find_vc_pdir(msvc_version):
"""Find the MSVC product directory for the given version.
Tries to look up the path using a registry key from the table
_VCVER_TO_PRODUCT_DIR; if there is no key, calls find_vc_pdir_wshere
for help instead.
Args:
msvc_version: str
msvc version (major.minor, e.g. 10.0)
Returns:
str: Path found in registry, or None
Raises:
UnsupportedVersion: if the version is not known by this file.
MissingConfiguration: found version but the directory is missing.
Both exceptions inherit from VisualCException.
"""
root = 'Software\\'
try:
hkeys = _VCVER_TO_PRODUCT_DIR[msvc_version]
except KeyError:
debug("Unknown version of MSVC: %s" % msvc_version)
raise UnsupportedVersion("Unknown version %s" % msvc_version)
for hkroot, key in hkeys:
try:
comps = None
if not key:
comps = find_vc_pdir_vswhere(msvc_version)
if not comps:
debug('find_vc_pdir_vswhere(): no VC found for version {}'.format(repr(msvc_version)))
raise SCons.Util.WinError
debug('find_vc_pdir_vswhere(): VC found: {}'.format(repr(msvc_version)))
return comps
else:
if common.is_win64():
try:
# ordinally at win64, try Wow6432Node first.
comps = common.read_reg(root + 'Wow6432Node\\' + key, hkroot)
except SCons.Util.WinError as e:
# at Microsoft Visual Studio for Python 2.7, value is not in Wow6432Node
pass
if not comps:
# not Win64, or Microsoft Visual Studio for Python 2.7
comps = common.read_reg(root + key, hkroot)
except SCons.Util.WinError as e:
debug('find_vc_dir(): no VC registry key {}'.format(repr(key)))
else:
debug('find_vc_dir(): found VC in registry: {}'.format(comps))
if os.path.exists(comps):
return comps
else:
debug('find_vc_dir(): reg says dir is {}, but it does not exist. (ignoring)'.format(comps))
raise MissingConfiguration("registry dir {} not found on the filesystem".format(comps))
return None
def find_batch_file(env,msvc_version,host_arch,target_arch):
"""
Find the location of the batch script which should set up the compiler
for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress
"""
pdir = find_vc_pdir(msvc_version)
if pdir is None:
raise NoVersionFound("No version of Visual Studio found")
debug('find_batch_file() in {}'.format(pdir))
# filter out e.g. "Exp" from the version name
msvc_ver_numeric = get_msvc_version_numeric(msvc_version)
vernum = float(msvc_ver_numeric)
if 7 <= vernum < 8:
pdir = os.path.join(pdir, os.pardir, "Common7", "Tools")
batfilename = os.path.join(pdir, "vsvars32.bat")
elif vernum < 7:
pdir = os.path.join(pdir, "Bin")
batfilename = os.path.join(pdir, "vcvars32.bat")
elif 8 <= vernum <= 14:
batfilename = os.path.join(pdir, "vcvarsall.bat")
else: # vernum >= 14.1 VS2017 and above
batfilename = os.path.join(pdir, "Auxiliary", "Build", "vcvarsall.bat")
if not os.path.exists(batfilename):
debug("Not found: %s" % batfilename)
batfilename = None
installed_sdks = get_installed_sdks()
for _sdk in installed_sdks:
sdk_bat_file = _sdk.get_sdk_vc_script(host_arch,target_arch)
if not sdk_bat_file:
debug("find_batch_file() not found:%s"%_sdk)
else:
sdk_bat_file_path = os.path.join(pdir,sdk_bat_file)
if os.path.exists(sdk_bat_file_path):
debug('find_batch_file() sdk_bat_file_path:%s'%sdk_bat_file_path)
return (batfilename, sdk_bat_file_path)
return (batfilename, None)
__INSTALLED_VCS_RUN = None
_VC_TOOLS_VERSION_FILE_PATH = ['Auxiliary', 'Build', 'Microsoft.VCToolsVersion.default.txt']
_VC_TOOLS_VERSION_FILE = os.sep.join(_VC_TOOLS_VERSION_FILE_PATH)
def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version):
"""Find the cl.exe on the filesystem in the vc_dir depending on
TARGET_ARCH, HOST_ARCH and the msvc version. TARGET_ARCH and
HOST_ARCH can be extracted from the passed env, unless its None,
which then the native platform is assumed the host and target.
Args:
env: Environment
a construction environment, usually if this is passed its
because there is a desired TARGET_ARCH to be used when searching
for a cl.exe
vc_dir: str
the path to the VC dir in the MSVC installation
msvc_version: str
msvc version (major.minor, e.g. 10.0)
Returns:
bool:
"""
# determine if there is a specific target platform we want to build for and
# use that to find a list of valid VCs, default is host platform == target platform
# and same for if no env is specified to extract target platform from
if env:
(host_platform, target_platform, req_target_platform) = get_host_target(env)
else:
host_platform = platform.machine().lower()
target_platform = host_platform
host_platform = _ARCH_TO_CANONICAL[host_platform]
target_platform = _ARCH_TO_CANONICAL[target_platform]
debug('_check_cl_exists_in_vc_dir(): host platform %s, target platform %s for version %s' % (host_platform, target_platform, msvc_version))
ver_num = float(get_msvc_version_numeric(msvc_version))
# make sure the cl.exe exists meaning the tool is installed
if ver_num > 14:
# 2017 and newer allowed multiple versions of the VC toolset to be installed at the same time.
# Just get the default tool version for now
#TODO: support setting a specific minor VC version
default_toolset_file = os.path.join(vc_dir, _VC_TOOLS_VERSION_FILE)
try:
with open(default_toolset_file) as f:
vc_specific_version = f.readlines()[0].strip()
except IOError:
debug('_check_cl_exists_in_vc_dir(): failed to read ' + default_toolset_file)
return False
except IndexError:
debug('_check_cl_exists_in_vc_dir(): failed to find MSVC version in ' + default_toolset_file)
return False
host_trgt_dir = _HOST_TARGET_TO_CL_DIR_GREATER_THAN_14.get((host_platform, target_platform), None)
if host_trgt_dir is None:
debug('_check_cl_exists_in_vc_dir(): unsupported host/target platform combo: (%s,%s)'%(host_platform, target_platform))
return False
cl_path = os.path.join(vc_dir, 'Tools','MSVC', vc_specific_version, 'bin', host_trgt_dir[0], host_trgt_dir[1], _CL_EXE_NAME)
debug('_check_cl_exists_in_vc_dir(): checking for ' + _CL_EXE_NAME + ' at ' + cl_path)
if os.path.exists(cl_path):
debug('_check_cl_exists_in_vc_dir(): found ' + _CL_EXE_NAME + '!')
return True
elif ver_num <= 14 and ver_num >= 8:
# Set default value to be -1 as "" which is the value for x86/x86 yields true when tested
# if not host_trgt_dir
host_trgt_dir = _HOST_TARGET_TO_CL_DIR.get((host_platform, target_platform), None)
if host_trgt_dir is None:
debug('_check_cl_exists_in_vc_dir(): unsupported host/target platform combo')
return False
cl_path = os.path.join(vc_dir, 'bin', host_trgt_dir, _CL_EXE_NAME)
debug('_check_cl_exists_in_vc_dir(): checking for ' + _CL_EXE_NAME + ' at ' + cl_path)
cl_path_exists = os.path.exists(cl_path)
if not cl_path_exists and host_platform == 'amd64':
# older versions of visual studio only had x86 binaries,
# so if the host platform is amd64, we need to check cross
# compile options (x86 binary compiles some other target on a 64 bit os)
# Set default value to be -1 as "" which is the value for x86/x86 yields true when tested
# if not host_trgt_dir
host_trgt_dir = _HOST_TARGET_TO_CL_DIR.get(('x86', target_platform), None)
if host_trgt_dir is None:
return False
cl_path = os.path.join(vc_dir, 'bin', host_trgt_dir, _CL_EXE_NAME)
debug('_check_cl_exists_in_vc_dir(): checking for ' + _CL_EXE_NAME + ' at ' + cl_path)
cl_path_exists = os.path.exists(cl_path)
if cl_path_exists:
debug('_check_cl_exists_in_vc_dir(): found ' + _CL_EXE_NAME + '!')
return True
elif ver_num < 8 and ver_num >= 6:
# not sure about these versions so if a walk the VC dir (could be slow)
for root, _, files in os.walk(vc_dir):
if _CL_EXE_NAME in files:
debug('get_installed_vcs ' + _CL_EXE_NAME + ' found %s' % os.path.join(root, _CL_EXE_NAME))
return True
return False
else:
# version not support return false
debug('_check_cl_exists_in_vc_dir(): unsupported MSVC version: ' + str(ver_num))
return False
def cached_get_installed_vcs(env=None):
global __INSTALLED_VCS_RUN
if __INSTALLED_VCS_RUN is None:
ret = get_installed_vcs(env)
__INSTALLED_VCS_RUN = ret
return __INSTALLED_VCS_RUN
def get_installed_vcs(env=None):
installed_versions = []
for ver in _VCVER:
debug('trying to find VC %s' % ver)
try:
VC_DIR = find_vc_pdir(ver)
if VC_DIR:
debug('found VC %s' % ver)
if _check_cl_exists_in_vc_dir(env, VC_DIR, ver):
installed_versions.append(ver)
else:
debug('find_vc_pdir no compiler found %s' % ver)
else:
debug('find_vc_pdir return None for ver %s' % ver)
except (MSVCUnsupportedTargetArch, MSVCUnsupportedHostArch):
# Allow this exception to propagate further as it should cause
# SCons to exit with an error code
raise
except VisualCException as e:
debug('did not find VC %s: caught exception %s' % (ver, str(e)))
return installed_versions
def reset_installed_vcs():
"""Make it try again to find VC. This is just for the tests."""
__INSTALLED_VCS_RUN = None
# Running these batch files isn't cheap: most of the time spent in
# msvs.generate() is due to vcvars*.bat. In a build that uses "tools='msvs'"
# in multiple environments, for example:
# env1 = Environment(tools='msvs')
# env2 = Environment(tools='msvs')
# we can greatly improve the speed of the second and subsequent Environment
# (or Clone) calls by memoizing the environment variables set by vcvars*.bat.
#
# Updated: by 2018, vcvarsall.bat had gotten so expensive (vs2017 era)
# it was breaking CI builds because the test suite starts scons so many
# times and the existing memo logic only helped with repeated calls
# within the same scons run. Windows builds on the CI system were split
# into chunks to get around single-build time limits.
# With VS2019 it got even slower and an optional persistent cache file
# was introduced. The cache now also stores only the parsed vars,
# not the entire output of running the batch file - saves a bit
# of time not parsing every time.
script_env_cache = None
def script_env(script, args=None):
global script_env_cache
if script_env_cache is None:
script_env_cache = common.read_script_env_cache()
cache_key = "{}--{}".format(script, args)
cache_data = script_env_cache.get(cache_key, None)
if cache_data is None:
stdout = common.get_output(script, args)
# Stupid batch files do not set return code: we take a look at the
# beginning of the output for an error message instead
olines = stdout.splitlines()
if olines[0].startswith("The specified configuration type is missing"):
raise BatchFileExecutionError("\n".join(olines[:2]))
cache_data = common.parse_output(stdout)
script_env_cache[cache_key] = cache_data
# once we updated cache, give a chance to write out if user wanted
common.write_script_env_cache(script_env_cache)
else:
#TODO: Python 2 cleanup
# If we "hit" data from the json file, we have a Py2 problem:
# keys & values will be unicode. don't detect, just convert.
if sys.version_info[0] == 2:
def convert(data):
if isinstance(data, basestring):
return str(data)
elif isinstance(data, collections.Mapping):
return dict(map(convert, data.iteritems()))
elif isinstance(data, collections.Iterable):
return type(data)(map(convert, data))
else:
return data
cache_data = convert(cache_data)
return cache_data
def get_default_version(env):
debug('get_default_version()')
msvc_version = env.get('MSVC_VERSION')
msvs_version = env.get('MSVS_VERSION')
debug('get_default_version(): msvc_version:%s msvs_version:%s'%(msvc_version,msvs_version))
if msvs_version and not msvc_version:
SCons.Warnings.warn(
SCons.Warnings.DeprecatedWarning,
"MSVS_VERSION is deprecated: please use MSVC_VERSION instead ")
return msvs_version
elif msvc_version and msvs_version:
if not msvc_version == msvs_version:
SCons.Warnings.warn(
SCons.Warnings.VisualVersionMismatch,
"Requested msvc version (%s) and msvs version (%s) do " \
"not match: please use MSVC_VERSION only to request a " \
"visual studio version, MSVS_VERSION is deprecated" \
% (msvc_version, msvs_version))
return msvs_version
if not msvc_version:
installed_vcs = cached_get_installed_vcs(env)
debug('installed_vcs:%s' % installed_vcs)
if not installed_vcs:
#msg = 'No installed VCs'
#debug('msv %s' % repr(msg))
#SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, msg)
debug('msvc_setup_env: No installed VCs')
return None
msvc_version = installed_vcs[0]
debug('msvc_setup_env: using default installed MSVC version %s' % repr(msvc_version))
return msvc_version
def msvc_setup_env_once(env):
try:
has_run = env["MSVC_SETUP_RUN"]
except KeyError:
has_run = False
if not has_run:
msvc_setup_env(env)
env["MSVC_SETUP_RUN"] = True
def msvc_find_valid_batch_script(env, version):
debug('msvc_find_valid_batch_script()')
# Find the host platform, target platform, and if present the requested
# target platform
platforms = get_host_target(env)
debug(" msvs_find_valid_batch_script(): host_platform %s, target_platform %s req_target_platform:%s" % platforms)
host_platform, target_platform, req_target_platform = platforms
try_target_archs = [target_platform]
# VS2012 has a "cross compile" environment to build 64 bit
# with x86_amd64 as the argument to the batch setup script
if req_target_platform in ('amd64', 'x86_64'):
try_target_archs.append('x86_amd64')
elif not req_target_platform and target_platform in ['amd64', 'x86_64']:
# There may not be "native" amd64, but maybe "cross" x86_amd64 tools
try_target_archs.append('x86_amd64')
# If the user hasn't specifically requested a TARGET_ARCH, and
# The TARGET_ARCH is amd64 then also try 32 bits if there are no viable
# 64 bit tools installed
try_target_archs.append('x86')
debug("msvs_find_valid_batch_script(): host_platform: %s try_target_archs:%s"%(host_platform, try_target_archs))
d = None
for tp in try_target_archs:
# Set to current arch.
env['TARGET_ARCH']=tp
debug("msvc_find_valid_batch_script() trying target_platform:%s"%tp)
host_target = (host_platform, tp)
if not is_host_target_supported(host_target, version):
warn_msg = "host, target = %s not supported for MSVC version %s" % \
(host_target, version)
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
arg = _HOST_TARGET_ARCH_TO_BAT_ARCH[host_target]
# Get just version numbers
maj, min = msvc_version_to_maj_min(version)
# VS2015+
if maj >= 14:
if env.get('MSVC_UWP_APP') == '1':
# Initialize environment variables with store/universal paths
arg += ' store'
# Try to locate a batch file for this host/target platform combo
try:
(vc_script, sdk_script) = find_batch_file(env, version, host_platform, tp)
debug('msvc_find_valid_batch_script() vc_script:%s sdk_script:%s'%(vc_script,sdk_script))
except VisualCException as e:
msg = str(e)
debug('Caught exception while looking for batch file (%s)' % msg)
warn_msg = "VC version %s not installed. " + \
"C/C++ compilers are most likely not set correctly.\n" + \
" Installed versions are: %s"
warn_msg = warn_msg % (version, cached_get_installed_vcs(env))
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
continue
# Try to use the located batch file for this host/target platform combo
debug('msvc_find_valid_batch_script() use_script 2 %s, args:%s' % (repr(vc_script), arg))
found = None
if vc_script:
try:
d = script_env(vc_script, args=arg)
found = vc_script
except BatchFileExecutionError as e:
debug('msvc_find_valid_batch_script() use_script 3: failed running VC script %s: %s: Error:%s'%(repr(vc_script),arg,e))
vc_script=None
continue
if not vc_script and sdk_script:
debug('msvc_find_valid_batch_script() use_script 4: trying sdk script: %s'%(sdk_script))
try:
d = script_env(sdk_script)
found = sdk_script
except BatchFileExecutionError as e:
debug('msvc_find_valid_batch_script() use_script 5: failed running SDK script %s: Error:%s'%(repr(sdk_script),e))
continue
elif not vc_script and not sdk_script:
debug('msvc_find_valid_batch_script() use_script 6: Neither VC script nor SDK script found')
continue
debug("msvc_find_valid_batch_script() Found a working script/target: %s/%s"%(repr(found),arg))
break # We've found a working target_platform, so stop looking
# If we cannot find a viable installed compiler, reset the TARGET_ARCH
# To it's initial value
if not d:
env['TARGET_ARCH']=req_target_platform
return d
def msvc_setup_env(env):
debug('msvc_setup_env()')
version = get_default_version(env)
if version is None:
warn_msg = "No version of Visual Studio compiler found - C/C++ " \
"compilers most likely not set correctly"
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
debug('msvc_setup_env: using specified MSVC version %s' % repr(version))
# XXX: we set-up both MSVS version for backward
# compatibility with the msvs tool
env['MSVC_VERSION'] = version
env['MSVS_VERSION'] = version
env['MSVS'] = {}
use_script = env.get('MSVC_USE_SCRIPT', True)
if SCons.Util.is_String(use_script):
debug('msvc_setup_env() use_script 1 %s' % repr(use_script))
d = script_env(use_script)
elif use_script:
d = msvc_find_valid_batch_script(env,version)
debug('msvc_setup_env() use_script 2 %s' % d)
if not d:
return d
else:
debug('MSVC_USE_SCRIPT set to False')
warn_msg = "MSVC_USE_SCRIPT set to False, assuming environment " \
"set correctly."
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
for k, v in d.items():
debug('msvc_setup_env() env:%s -> %s'%(k,v))
env.PrependENVPath(k, v, delete_existing=True)
# final check to issue a warning if the compiler is not present
msvc_cl = find_program_path(env, 'cl')
if not msvc_cl:
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning,
"Could not find MSVC compiler 'cl', it may need to be installed separately with Visual Studio")
def msvc_exists(env=None, version=None):
vcs = cached_get_installed_vcs(env)
if version is None:
return len(vcs) > 0
return version in vcs

File diff suppressed because it is too large Load Diff

View File

@ -1,236 +0,0 @@
"""SCons.Tool.cyglink
Customization of gnulink for Cygwin (http://www.cygwin.com/)
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
from __future__ import absolute_import, print_function
import re
import os
import SCons.Action
import SCons.Util
import SCons.Tool
#MAYBE: from . import gnulink
from . import gnulink
from . import link
def _lib_generator(target, source, env, for_signature, **kw):
try: cmd = kw['cmd']
except KeyError: cmd = SCons.Util.CLVar(['$SHLINK'])
try: vp = kw['varprefix']
except KeyError: vp = 'SHLIB'
dll = env.FindIxes(target, '%sPREFIX' % vp, '%sSUFFIX' % vp)
if dll: cmd.extend(['-o', dll])
cmd.extend(['$SHLINKFLAGS', '$__%sVERSIONFLAGS' % vp, '$__RPATH'])
implib = env.FindIxes(target, 'IMPLIBPREFIX', 'IMPLIBSUFFIX')
if implib:
cmd.extend([
'-Wl,--out-implib='+implib.get_string(for_signature),
'-Wl,--export-all-symbols',
'-Wl,--enable-auto-import',
'-Wl,--whole-archive', '$SOURCES',
'-Wl,--no-whole-archive', '$_LIBDIRFLAGS', '$_LIBFLAGS'
])
else:
cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS'])
return [cmd]
def shlib_generator(target, source, env, for_signature):
return _lib_generator(target, source, env, for_signature,
varprefix='SHLIB',
cmd = SCons.Util.CLVar(['$SHLINK']))
def ldmod_generator(target, source, env, for_signature):
return _lib_generator(target, source, env, for_signature,
varprefix='LDMODULE',
cmd = SCons.Util.CLVar(['$LDMODULE']))
def _lib_emitter(target, source, env, **kw):
Verbose = False
if Verbose:
print("_lib_emitter: target[0]=%r" % target[0].get_path())
try: vp = kw['varprefix']
except KeyError: vp = 'SHLIB'
try: libtype = kw['libtype']
except KeyError: libtype = 'ShLib'
dll = env.FindIxes(target, '%sPREFIX' % vp, '%sSUFFIX' % vp)
no_import_lib = env.get('no_import_lib', 0)
if Verbose:
print("_lib_emitter: dll=%r" % dll.get_path())
if not dll or len(target) > 1:
raise SCons.Errors.UserError("A shared library should have exactly one target with the suffix: %s" % env.subst("$%sSUFFIX" % vp))
# Remove any "lib" after the prefix
pre = env.subst('$%sPREFIX' % vp)
if dll.name[len(pre):len(pre)+3] == 'lib':
dll.name = pre + dll.name[len(pre)+3:]
if Verbose:
print("_lib_emitter: dll.name=%r" % dll.name)
orig_target = target
target = [env.fs.File(dll)]
target[0].attributes.shared = 1
if Verbose:
print("_lib_emitter: after target=[env.fs.File(dll)]: target[0]=%r" % target[0].get_path())
# Append an import lib target
if not no_import_lib:
# Create list of target libraries as strings
target_strings = env.ReplaceIxes(orig_target[0],
'%sPREFIX' % vp, '%sSUFFIX' % vp,
'IMPLIBPREFIX', 'IMPLIBSUFFIX')
if Verbose:
print("_lib_emitter: target_strings=%r" % target_strings)
implib_target = env.fs.File(target_strings)
if Verbose:
print("_lib_emitter: implib_target=%r" % implib_target.get_path())
implib_target.attributes.shared = 1
target.append(implib_target)
symlinks = SCons.Tool.ImpLibSymlinkGenerator(env, implib_target,
implib_libtype=libtype,
generator_libtype=libtype+'ImpLib')
if Verbose:
print("_lib_emitter: implib symlinks=%r" % SCons.Tool.StringizeLibSymlinks(symlinks))
if symlinks:
SCons.Tool.EmitLibSymlinks(env, symlinks, implib_target, clean_targets = target[0])
implib_target.attributes.shliblinks = symlinks
return (target, source)
def shlib_emitter(target, source, env):
return _lib_emitter(target, source, env, varprefix='SHLIB', libtype='ShLib')
def ldmod_emitter(target, source, env):
return _lib_emitter(target, source, env, varprefix='LDMODULE', libtype='LdMod')
def _versioned_lib_suffix(env, suffix, version):
"""Generate versioned shared library suffix from a unversioned one.
If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll'"""
Verbose = False
if Verbose:
print("_versioned_lib_suffix: suffix= ", suffix)
print("_versioned_lib_suffix: version= ", version)
cygversion = re.sub(r'\.', '-', version)
if not suffix.startswith('-' + cygversion):
suffix = '-' + cygversion + suffix
if Verbose:
print("_versioned_lib_suffix: return suffix= ", suffix)
return suffix
def _versioned_implib_name(env, libnode, version, prefix, suffix, **kw):
return link._versioned_lib_name(env, libnode, version, prefix, suffix,
SCons.Tool.ImpLibPrefixGenerator,
SCons.Tool.ImpLibSuffixGenerator,
implib_libtype=kw['libtype'])
def _versioned_implib_symlinks(env, libnode, version, prefix, suffix, **kw):
"""Generate link names that should be created for a versioned shared library.
Returns a list in the form [ (link, linktarget), ... ]
"""
Verbose = False
if Verbose:
print("_versioned_implib_symlinks: libnode=%r" % libnode.get_path())
print("_versioned_implib_symlinks: version=%r" % version)
try: libtype = kw['libtype']
except KeyError: libtype = 'ShLib'
linkdir = os.path.dirname(libnode.get_path())
if Verbose:
print("_versioned_implib_symlinks: linkdir=%r" % linkdir)
name = SCons.Tool.ImpLibNameGenerator(env, libnode,
implib_libtype=libtype,
generator_libtype=libtype+'ImpLib')
if Verbose:
print("_versioned_implib_symlinks: name=%r" % name)
major = version.split('.')[0]
link0 = env.fs.File(os.path.join(linkdir, name))
symlinks = [(link0, libnode)]
if Verbose:
print("_versioned_implib_symlinks: return symlinks=%r" % SCons.Tool.StringizeLibSymlinks(symlinks))
return symlinks
shlib_action = SCons.Action.Action(shlib_generator, generator=1)
ldmod_action = SCons.Action.Action(ldmod_generator, generator=1)
def generate(env):
"""Add Builders and construction variables for cyglink to an Environment."""
gnulink.generate(env)
env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,-no-undefined')
env['SHLINKCOM'] = shlib_action
env['LDMODULECOM'] = ldmod_action
env.Append(SHLIBEMITTER = [shlib_emitter])
env.Append(LDMODULEEMITTER = [ldmod_emitter])
env['SHLIBPREFIX'] = 'cyg'
env['SHLIBSUFFIX'] = '.dll'
env['IMPLIBPREFIX'] = 'lib'
env['IMPLIBSUFFIX'] = '.dll.a'
# Variables used by versioned shared libraries
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS'
# SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink...
# LINKCALLBACKS are NOT inherited from gnulink
env['LINKCALLBACKS'] = {
'VersionedShLibSuffix' : _versioned_lib_suffix,
'VersionedLdModSuffix' : _versioned_lib_suffix,
'VersionedImpLibSuffix' : _versioned_lib_suffix,
'VersionedShLibName' : link._versioned_shlib_name,
'VersionedLdModName' : link._versioned_ldmod_name,
'VersionedShLibImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='ShLib'),
'VersionedLdModImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='LdMod'),
'VersionedShLibImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='ShLib'),
'VersionedLdModImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='LdMod'),
}
# these variables were set by gnulink but are not used in cyglink
try: del env['_SHLIBSONAME']
except KeyError: pass
try: del env['_LDMODULESONAME']
except KeyError: pass
def exists(env):
return gnulink.exists(env)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,60 +0,0 @@
"""gettext tool
"""
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/gettext_tool.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
#############################################################################
def generate(env,**kw):
import sys
import os
import SCons.Tool
from SCons.Platform.mingw import MINGW_DEFAULT_PATHS
from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS
from SCons.Tool.GettextCommon \
import _translate, tool_list
for t in tool_list(env['PLATFORM'], env):
if sys.platform == 'win32':
tool = SCons.Tool.find_program_path(env, t, default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS )
if tool:
tool_bin_dir = os.path.dirname(tool)
env.AppendENVPath('PATH', tool_bin_dir)
else:
SCons.Warnings.Warning(t + ' tool requested, but binary not found in ENV PATH')
env.Tool(t)
env.AddMethod(_translate, 'Translate')
#############################################################################
#############################################################################
def exists(env):
from SCons.Tool.GettextCommon \
import _xgettext_exists, _msginit_exists, \
_msgmerge_exists, _msgfmt_exists
try:
return _xgettext_exists(env) and _msginit_exists(env) \
and _msgmerge_exists(env) and _msgfmt_exists(env)
except:
return False
#############################################################################

View File

@ -1,141 +0,0 @@
"""SCons.Tool.lex
Tool-specific initialization for lex.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/lex.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os.path
import sys
import SCons.Action
import SCons.Tool
import SCons.Util
from SCons.Platform.mingw import MINGW_DEFAULT_PATHS
from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS
from SCons.Platform.win32 import CHOCO_DEFAULT_PATH
LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR")
if sys.platform == 'win32':
BINS = ['flex', 'lex', 'win_flex']
else:
BINS = ["flex", "lex"]
def lexEmitter(target, source, env):
sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0]))
if sourceExt == ".lm": # If using Objective-C
target = [sourceBase + ".m"] # the extension is ".m".
# This emitter essentially tries to add to the target all extra
# files generated by flex.
# Different options that are used to trigger the creation of extra files.
fileGenOptions = ["--header-file=", "--tables-file="]
lexflags = env.subst("$LEXFLAGS", target=target, source=source)
for option in SCons.Util.CLVar(lexflags):
for fileGenOption in fileGenOptions:
l = len(fileGenOption)
if option[:l] == fileGenOption:
# A file generating option is present, so add the
# file name to the target list.
fileName = option[l:].strip()
target.append(fileName)
return (target, source)
def get_lex_path(env, append_paths=False):
"""
Find the path to the lex tool, searching several possible names
Only called in the Windows case, so the default_path
can be Windows-specific
:param env: current construction environment
:param append_paths: if set, add the path to the tool to PATH
:return: path to lex tool, if found
"""
for prog in BINS:
bin_path = SCons.Tool.find_program_path(
env,
prog,
default_paths=CHOCO_DEFAULT_PATH + MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS )
if bin_path:
if append_paths:
env.AppendENVPath('PATH', os.path.dirname(bin_path))
return bin_path
SCons.Warnings.Warning('lex tool requested, but lex or flex binary not found in ENV PATH')
def generate(env):
"""Add Builders and construction variables for lex to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action(".l", LexAction)
c_file.add_emitter(".l", lexEmitter)
c_file.add_action(".lex", LexAction)
c_file.add_emitter(".lex", lexEmitter)
# Objective-C
cxx_file.add_action(".lm", LexAction)
cxx_file.add_emitter(".lm", lexEmitter)
# C++
cxx_file.add_action(".ll", LexAction)
cxx_file.add_emitter(".ll", lexEmitter)
env["LEXFLAGS"] = SCons.Util.CLVar("")
if sys.platform == 'win32':
# ignore the return - we do not need the full path here
_ = get_lex_path(env, append_paths=True)
env["LEX"] = env.Detect(BINS)
if not env.get("LEXUNISTD"):
env["LEXUNISTD"] = SCons.Util.CLVar("")
env["LEXCOM"] = "$LEX $LEXUNISTD $LEXFLAGS -t $SOURCES > $TARGET"
else:
env["LEX"] = env.Detect(BINS)
env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET"
def exists(env):
if sys.platform == 'win32':
return get_lex_path(env)
else:
return env.Detect(BINS)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,362 +0,0 @@
"""SCons.Tool.link
Tool-specific initialization for the generic Posix linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
__revision__ = "src/engine/SCons/Tool/link.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import sys
import re
import os
import SCons.Tool
import SCons.Util
import SCons.Warnings
from SCons.Tool.FortranCommon import isfortran
from SCons.Tool.DCommon import isD
from SCons.Tool.cxx import iscplusplus
issued_mixed_link_warning = False
def smart_link(source, target, env, for_signature):
has_cplusplus = iscplusplus(source)
has_fortran = isfortran(env, source)
has_d = isD(env, source)
if has_cplusplus and has_fortran and not has_d:
global issued_mixed_link_warning
if not issued_mixed_link_warning:
msg = "Using $CXX to link Fortran and C++ code together.\n\t" + \
"This may generate a buggy executable if the '%s'\n\t" + \
"compiler does not know how to deal with Fortran runtimes."
SCons.Warnings.warn(SCons.Warnings.FortranCxxMixWarning,
msg % env.subst('$CXX'))
issued_mixed_link_warning = True
return '$CXX'
elif has_d:
env['LINKCOM'] = env['DLINKCOM']
env['SHLINKCOM'] = env['SHDLINKCOM']
return '$DC'
elif has_fortran:
return '$FORTRAN'
elif has_cplusplus:
return '$CXX'
return '$CC'
def _lib_emitter(target, source, env, **kw):
Verbose = False
if Verbose:
print("_lib_emitter: target[0]={!r}".format(target[0].get_path()))
for tgt in target:
if SCons.Util.is_String(tgt):
tgt = env.File(tgt)
tgt.attributes.shared = 1
try:
symlink_generator = kw['symlink_generator']
except KeyError:
pass
else:
if Verbose:
print("_lib_emitter: symlink_generator={!r}".format(symlink_generator))
symlinks = symlink_generator(env, target[0])
if Verbose:
print("_lib_emitter: symlinks={!r}".format(symlinks))
if symlinks:
SCons.Tool.EmitLibSymlinks(env, symlinks, target[0])
target[0].attributes.shliblinks = symlinks
return (target, source)
def shlib_emitter(target, source, env):
return _lib_emitter(target, source, env, symlink_generator=SCons.Tool.ShLibSymlinkGenerator)
def ldmod_emitter(target, source, env):
return _lib_emitter(target, source, env, symlink_generator=SCons.Tool.LdModSymlinkGenerator)
# This is generic enough to be included here...
def _versioned_lib_name(env, libnode, version, prefix, suffix, prefix_generator, suffix_generator, **kw):
"""For libnode='/optional/dir/libfoo.so.X.Y.Z' it returns 'libfoo.so'"""
Verbose = False
if Verbose:
print("_versioned_lib_name: libnode={!r}".format(libnode.get_path()))
print("_versioned_lib_name: version={!r}".format(version))
print("_versioned_lib_name: prefix={!r}".format(prefix))
print("_versioned_lib_name: suffix={!r}".format(suffix))
print("_versioned_lib_name: suffix_generator={!r}".format(suffix_generator))
versioned_name = os.path.basename(libnode.get_path())
if Verbose:
print("_versioned_lib_name: versioned_name={!r}".format(versioned_name))
versioned_prefix = prefix_generator(env, **kw)
versioned_suffix = suffix_generator(env, **kw)
if Verbose:
print("_versioned_lib_name: versioned_prefix={!r}".format(versioned_prefix))
print("_versioned_lib_name: versioned_suffix={!r}".format(versioned_suffix))
versioned_prefix_re = '^' + re.escape(versioned_prefix)
versioned_suffix_re = re.escape(versioned_suffix) + '$'
name = re.sub(versioned_prefix_re, prefix, versioned_name)
name = re.sub(versioned_suffix_re, suffix, name)
if Verbose:
print("_versioned_lib_name: name={!r}".format(name))
return name
def _versioned_shlib_name(env, libnode, version, prefix, suffix, **kw):
prefix_generator = SCons.Tool.ShLibPrefixGenerator
suffix_generator = SCons.Tool.ShLibSuffixGenerator
return _versioned_lib_name(env, libnode, version, prefix, suffix, prefix_generator, suffix_generator, **kw)
def _versioned_ldmod_name(env, libnode, version, prefix, suffix, **kw):
prefix_generator = SCons.Tool.LdModPrefixGenerator
suffix_generator = SCons.Tool.LdModSuffixGenerator
return _versioned_lib_name(env, libnode, version, prefix, suffix, prefix_generator, suffix_generator, **kw)
def _versioned_lib_suffix(env, suffix, version):
"""For suffix='.so' and version='0.1.2' it returns '.so.0.1.2'"""
Verbose = False
if Verbose:
print("_versioned_lib_suffix: suffix={!r}".format(suffix))
print("_versioned_lib_suffix: version={!r}".format(version))
if not suffix.endswith(version):
suffix = suffix + '.' + version
if Verbose:
print("_versioned_lib_suffix: return suffix={!r}".format(suffix))
return suffix
def _versioned_lib_soname(env, libnode, version, prefix, suffix, name_func):
"""For libnode='/optional/dir/libfoo.so.X.Y.Z' it returns 'libfoo.so.X'"""
Verbose = False
if Verbose:
print("_versioned_lib_soname: version={!r}".format(version))
name = name_func(env, libnode, version, prefix, suffix)
if Verbose:
print("_versioned_lib_soname: name={!r}".format(name))
major = version.split('.')[0]
soname = name + '.' + major
if Verbose:
print("_versioned_lib_soname: soname={!r}".format(soname))
return soname
def _versioned_shlib_soname(env, libnode, version, prefix, suffix):
return _versioned_lib_soname(env, libnode, version, prefix, suffix, _versioned_shlib_name)
def _versioned_ldmod_soname(env, libnode, version, prefix, suffix):
return _versioned_lib_soname(env, libnode, version, prefix, suffix, _versioned_ldmod_name)
def _versioned_lib_symlinks(env, libnode, version, prefix, suffix, name_func, soname_func):
"""Generate link names that should be created for a versioned shared library.
Returns a dictionary in the form { linkname : linktarget }
"""
Verbose = False
if Verbose:
print("_versioned_lib_symlinks: libnode={!r}".format(libnode.get_path()))
print("_versioned_lib_symlinks: version={!r}".format(version))
if sys.platform.startswith('openbsd'):
# OpenBSD uses x.y shared library versioning numbering convention
# and doesn't use symlinks to backwards-compatible libraries
if Verbose:
print("_versioned_lib_symlinks: return symlinks={!r}".format(None))
return None
linkdir = libnode.get_dir()
if Verbose:
print("_versioned_lib_symlinks: linkdir={!r}".format(linkdir.get_path()))
name = name_func(env, libnode, version, prefix, suffix)
if Verbose:
print("_versioned_lib_symlinks: name={!r}".format(name))
soname = soname_func(env, libnode, version, prefix, suffix)
if Verbose:
print("_versioned_lib_symlinks: soname={!r}".format(soname))
link0 = env.fs.File(soname, linkdir)
link1 = env.fs.File(name, linkdir)
# We create direct symlinks, not daisy-chained.
if link0 == libnode:
# This enables SHLIBVERSION without periods (e.g. SHLIBVERSION=1)
symlinks = [(link1, libnode)]
else:
# This handles usual SHLIBVERSION, i.e. '1.2', '1.2.3', etc.
symlinks = [(link0, libnode), (link1, libnode)]
if Verbose:
print("_versioned_lib_symlinks: return symlinks={!r}".format(SCons.Tool.StringizeLibSymlinks(symlinks)))
return symlinks
def _versioned_shlib_symlinks(env, libnode, version, prefix, suffix):
name_func = env['LINKCALLBACKS']['VersionedShLibName']
soname_func = env['LINKCALLBACKS']['VersionedShLibSoname']
return _versioned_lib_symlinks(env, libnode, version, prefix, suffix, name_func, soname_func)
def _versioned_ldmod_symlinks(env, libnode, version, prefix, suffix):
name_func = _versioned_ldmod_name
soname_func = _versioned_ldmod_soname
name_func = env['LINKCALLBACKS']['VersionedLdModName']
soname_func = env['LINKCALLBACKS']['VersionedLdModSoname']
return _versioned_lib_symlinks(env, libnode, version, prefix, suffix, name_func, soname_func)
def _versioned_lib_callbacks():
return {
'VersionedShLibSuffix': _versioned_lib_suffix,
'VersionedLdModSuffix': _versioned_lib_suffix,
'VersionedShLibSymlinks': _versioned_shlib_symlinks,
'VersionedLdModSymlinks': _versioned_ldmod_symlinks,
'VersionedShLibName': _versioned_shlib_name,
'VersionedLdModName': _versioned_ldmod_name,
'VersionedShLibSoname': _versioned_shlib_soname,
'VersionedLdModSoname': _versioned_ldmod_soname,
}.copy()
def _setup_versioned_lib_variables(env, **kw):
"""
Setup all variables required by the versioning machinery
"""
tool = None
try:
tool = kw['tool']
except KeyError:
pass
use_soname = False
try:
use_soname = kw['use_soname']
except KeyError:
pass
# The $_SHLIBVERSIONFLAGS define extra commandline flags used when
# building VERSIONED shared libraries. It's always set, but used only
# when VERSIONED library is built (see __SHLIBVERSIONFLAGS in SCons/Defaults.py).
if use_soname:
# If the linker uses SONAME, then we need this little automata
if tool == 'sunlink':
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS -h $_SHLIBSONAME'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS -h $_LDMODULESONAME'
else:
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS -Wl,-soname=$_SHLIBSONAME'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS -Wl,-soname=$_LDMODULESONAME'
env['_SHLIBSONAME'] = '${ShLibSonameGenerator(__env__,TARGET)}'
env['_LDMODULESONAME'] = '${LdModSonameGenerator(__env__,TARGET)}'
env['ShLibSonameGenerator'] = SCons.Tool.ShLibSonameGenerator
env['LdModSonameGenerator'] = SCons.Tool.LdModSonameGenerator
else:
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS'
# LDOMDULVERSIONFLAGS should always default to $SHLIBVERSIONFLAGS
env['LDMODULEVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
def generate(env):
"""Add Builders and construction variables for gnulink to an Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = '$SHLINK -o $TARGET $SHLINKFLAGS $__SHLIBVERSIONFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
# don't set up the emitter, because AppendUnique will generate a list
# starting with None :-(
env.Append(SHLIBEMITTER=[shlib_emitter])
env['SMARTLINK'] = smart_link
env['LINK'] = "$SMARTLINK"
env['LINKFLAGS'] = SCons.Util.CLVar('')
# __RPATH is only set to something ($_RPATH typically) on platforms that support it.
env['LINKCOM'] = '$LINK -o $TARGET $LINKFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['LIBDIRPREFIX'] = '-L'
env['LIBDIRSUFFIX'] = ''
env['_LIBFLAGS'] = '${_stripixes(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}'
env['LIBLINKPREFIX'] = '-l'
env['LIBLINKSUFFIX'] = ''
if env['PLATFORM'] == 'hpux':
env['SHLIBSUFFIX'] = '.sl'
elif env['PLATFORM'] == 'aix':
env['SHLIBSUFFIX'] = '.a'
# For most platforms, a loadable module is the same as a shared
# library. Platforms which are different can override these, but
# setting them the same means that LoadableModule works everywhere.
SCons.Tool.createLoadableModuleBuilder(env)
env['LDMODULE'] = '$SHLINK'
env.Append(LDMODULEEMITTER=[ldmod_emitter])
env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
env[
'LDMODULECOM'] = '$LDMODULE -o $TARGET $LDMODULEFLAGS $__LDMODULEVERSIONFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['LDMODULEVERSION'] = '$SHLIBVERSION'
env['LDMODULENOVERSIONSYMLINKS'] = '$SHLIBNOVERSIONSYMLINKS'
def exists(env):
# This module isn't really a Tool on its own, it's common logic for
# other linkers.
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,122 +0,0 @@
""" msgfmt tool """
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/msgfmt.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
from SCons.Builder import BuilderBase
#############################################################################
class _MOFileBuilder(BuilderBase):
""" The builder class for `MO` files.
The reason for this builder to exists and its purpose is quite simillar
as for `_POFileBuilder`. This time, we extend list of sources, not targets,
and call `BuilderBase._execute()` only once (as we assume single-target
here).
"""
def _execute(self, env, target, source, *args, **kw):
# Here we add support for 'LINGUAS_FILE' keyword. Emitter is not suitable
# in this case, as it is called too late (after multiple sources
# are handled single_source builder.
import SCons.Util
from SCons.Tool.GettextCommon import _read_linguas_from_files
linguas_files = None
if 'LINGUAS_FILE' in env and env['LINGUAS_FILE'] is not None:
linguas_files = env['LINGUAS_FILE']
# This should prevent from endless recursion.
env['LINGUAS_FILE'] = None
# We read only languages. Suffixes shall be added automatically.
linguas = _read_linguas_from_files(env, linguas_files)
if SCons.Util.is_List(source):
source.extend(linguas)
elif source is not None:
source = [source] + linguas
else:
source = linguas
result = BuilderBase._execute(self,env,target,source,*args, **kw)
if linguas_files is not None:
env['LINGUAS_FILE'] = linguas_files
return result
#############################################################################
#############################################################################
def _create_mo_file_builder(env, **kw):
""" Create builder object for `MOFiles` builder """
import SCons.Action
# FIXME: What factory use for source? Ours or their?
kw['action'] = SCons.Action.Action('$MSGFMTCOM','$MSGFMTCOMSTR')
kw['suffix'] = '$MOSUFFIX'
kw['src_suffix'] = '$POSUFFIX'
kw['src_builder'] = '_POUpdateBuilder'
kw['single_source'] = True
return _MOFileBuilder(**kw)
#############################################################################
#############################################################################
def generate(env,**kw):
""" Generate `msgfmt` tool """
import sys
import os
import SCons.Util
import SCons.Tool
from SCons.Tool.GettextCommon import _detect_msgfmt
from SCons.Platform.mingw import MINGW_DEFAULT_PATHS
from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS
if sys.platform == 'win32':
msgfmt = SCons.Tool.find_program_path(env, 'msgfmt', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS )
if msgfmt:
msgfmt_bin_dir = os.path.dirname(msgfmt)
env.AppendENVPath('PATH', msgfmt_bin_dir)
else:
SCons.Warnings.Warning('msgfmt tool requested, but binary not found in ENV PATH')
try:
env['MSGFMT'] = _detect_msgfmt(env)
except:
env['MSGFMT'] = 'msgfmt'
env.SetDefault(
MSGFMTFLAGS = [ SCons.Util.CLVar('-c') ],
MSGFMTCOM = '$MSGFMT $MSGFMTFLAGS -o $TARGET $SOURCE',
MSGFMTCOMSTR = '',
MOSUFFIX = ['.mo'],
POSUFFIX = ['.po']
)
env.Append( BUILDERS = { 'MOFiles' : _create_mo_file_builder(env) } )
#############################################################################
#############################################################################
def exists(env):
""" Check if the tool exists """
from SCons.Tool.GettextCommon import _msgfmt_exists
try:
return _msgfmt_exists(env)
except:
return False
#############################################################################
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,134 +0,0 @@
""" msginit tool
Tool specific initialization of msginit tool.
"""
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/msginit.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import SCons.Warnings
import SCons.Builder
import re
#############################################################################
def _optional_no_translator_flag(env):
""" Return '--no-translator' flag if we run *msginit(1)* in non-interactive
mode."""
import SCons.Util
if 'POAUTOINIT' in env:
autoinit = env['POAUTOINIT']
else:
autoinit = False
if autoinit:
return [SCons.Util.CLVar('--no-translator')]
else:
return [SCons.Util.CLVar('')]
#############################################################################
#############################################################################
def _POInitBuilder(env, **kw):
""" Create builder object for `POInit` builder. """
import SCons.Action
from SCons.Tool.GettextCommon import _init_po_files, _POFileBuilder
action = SCons.Action.Action(_init_po_files, None)
return _POFileBuilder(env, action=action, target_alias='$POCREATE_ALIAS')
#############################################################################
#############################################################################
from SCons.Environment import _null
#############################################################################
def _POInitBuilderWrapper(env, target=None, source=_null, **kw):
""" Wrapper for _POFileBuilder. We use it to make user's life easier.
This wrapper checks for `$POTDOMAIN` construction variable (or override in
`**kw`) and treats it appropriatelly.
"""
if source is _null:
if 'POTDOMAIN' in kw:
domain = kw['POTDOMAIN']
elif 'POTDOMAIN' in env:
domain = env['POTDOMAIN']
else:
domain = 'messages'
source = [ domain ] # NOTE: Suffix shall be appended automatically
return env._POInitBuilder(target, source, **kw)
#############################################################################
#############################################################################
def generate(env,**kw):
""" Generate the `msginit` tool """
import sys
import os
import SCons.Util
import SCons.Tool
from SCons.Tool.GettextCommon import _detect_msginit
from SCons.Platform.mingw import MINGW_DEFAULT_PATHS
from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS
if sys.platform == 'win32':
msginit = SCons.Tool.find_program_path(env, 'msginit', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS )
if msginit:
msginit_bin_dir = os.path.dirname(msginit)
env.AppendENVPath('PATH', msginit_bin_dir)
else:
SCons.Warnings.Warning('msginit tool requested, but binary not found in ENV PATH')
try:
env['MSGINIT'] = _detect_msginit(env)
except:
env['MSGINIT'] = 'msginit'
msginitcom = '$MSGINIT ${_MSGNoTranslator(__env__)} -l ${_MSGINITLOCALE}' \
+ ' $MSGINITFLAGS -i $SOURCE -o $TARGET'
# NOTE: We set POTSUFFIX here, in case the 'xgettext' is not loaded
# (sometimes we really don't need it)
env.SetDefault(
POSUFFIX = ['.po'],
POTSUFFIX = ['.pot'],
_MSGINITLOCALE = '${TARGET.filebase}',
_MSGNoTranslator = _optional_no_translator_flag,
MSGINITCOM = msginitcom,
MSGINITCOMSTR = '',
MSGINITFLAGS = [ ],
POAUTOINIT = False,
POCREATE_ALIAS = 'po-create'
)
env.Append( BUILDERS = { '_POInitBuilder' : _POInitBuilder(env) } )
env.AddMethod(_POInitBuilderWrapper, 'POInit')
env.AlwaysBuild(env.Alias('$POCREATE_ALIAS'))
#############################################################################
#############################################################################
def exists(env):
""" Check if the tool exists """
from SCons.Tool.GettextCommon import _msginit_exists
try:
return _msginit_exists(env)
except:
return False
#############################################################################
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,117 +0,0 @@
""" msgmerget tool
Tool specific initialization for `msgmerge` tool.
"""
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/msgmerge.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
#############################################################################
def _update_or_init_po_files(target, source, env):
""" Action function for `POUpdate` builder """
import SCons.Action
from SCons.Tool.GettextCommon import _init_po_files
for tgt in target:
if tgt.rexists():
action = SCons.Action.Action('$MSGMERGECOM', '$MSGMERGECOMSTR')
else:
action = _init_po_files
status = action([tgt], source, env)
if status : return status
return 0
#############################################################################
#############################################################################
def _POUpdateBuilder(env, **kw):
""" Create an object of `POUpdate` builder """
import SCons.Action
from SCons.Tool.GettextCommon import _POFileBuilder
action = SCons.Action.Action(_update_or_init_po_files, None)
return _POFileBuilder(env, action=action, target_alias='$POUPDATE_ALIAS')
#############################################################################
#############################################################################
from SCons.Environment import _null
#############################################################################
def _POUpdateBuilderWrapper(env, target=None, source=_null, **kw):
""" Wrapper for `POUpdate` builder - make user's life easier """
if source is _null:
if 'POTDOMAIN' in kw:
domain = kw['POTDOMAIN']
elif 'POTDOMAIN' in env and env['POTDOMAIN']:
domain = env['POTDOMAIN']
else:
domain = 'messages'
source = [ domain ] # NOTE: Suffix shall be appended automatically
return env._POUpdateBuilder(target, source, **kw)
#############################################################################
#############################################################################
def generate(env,**kw):
""" Generate the `msgmerge` tool """
import sys
import os
import SCons.Tool
from SCons.Tool.GettextCommon import _detect_msgmerge
from SCons.Platform.mingw import MINGW_DEFAULT_PATHS
from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS
if sys.platform == 'win32':
msgmerge = SCons.Tool.find_program_path(env, 'msgmerge', default_paths=MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS )
if msgmerge:
msgmerge_bin_dir = os.path.dirname(msgmerge)
env.AppendENVPath('PATH', msgmerge_bin_dir)
else:
SCons.Warnings.Warning('msgmerge tool requested, but binary not found in ENV PATH')
try:
env['MSGMERGE'] = _detect_msgmerge(env)
except:
env['MSGMERGE'] = 'msgmerge'
env.SetDefault(
POTSUFFIX = ['.pot'],
POSUFFIX = ['.po'],
MSGMERGECOM = '$MSGMERGE $MSGMERGEFLAGS --update $TARGET $SOURCE',
MSGMERGECOMSTR = '',
MSGMERGEFLAGS = [ ],
POUPDATE_ALIAS = 'po-update'
)
env.Append(BUILDERS = { '_POUpdateBuilder':_POUpdateBuilder(env) })
env.AddMethod(_POUpdateBuilderWrapper, 'POUpdate')
env.AlwaysBuild(env.Alias('$POUPDATE_ALIAS'))
#############################################################################
#############################################################################
def exists(env):
""" Check if the tool exists """
from SCons.Tool.GettextCommon import _msgmerge_exists
try:
return _msgmerge_exists(env)
except:
return False
#############################################################################
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,169 +0,0 @@
"""SCons.Tool.yacc
Tool-specific initialization for yacc.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/yacc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os.path
import sys
import SCons.Defaults
import SCons.Tool
import SCons.Util
from SCons.Platform.mingw import MINGW_DEFAULT_PATHS
from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS
from SCons.Platform.win32 import CHOCO_DEFAULT_PATH
YaccAction = SCons.Action.Action("$YACCCOM", "$YACCCOMSTR")
if sys.platform == 'win32':
BINS = ['bison', 'yacc', 'win_bison']
else:
BINS = ["bison", "yacc"]
def _yaccEmitter(target, source, env, ysuf, hsuf):
yaccflags = env.subst("$YACCFLAGS", target=target, source=source)
flags = SCons.Util.CLVar(yaccflags)
targetBase, targetExt = os.path.splitext(SCons.Util.to_String(target[0]))
if '.ym' in ysuf: # If using Objective-C
target = [targetBase + ".m"] # the extension is ".m".
# If -d is specified on the command line, yacc will emit a .h
# or .hpp file with the same name as the .c or .cpp output file.
if '-d' in flags:
target.append(targetBase + env.subst(hsuf, target=target, source=source))
# If -g is specified on the command line, yacc will emit a .vcg
# file with the same base name as the .y, .yacc, .ym or .yy file.
if "-g" in flags:
base, ext = os.path.splitext(SCons.Util.to_String(source[0]))
target.append(base + env.subst("$YACCVCGFILESUFFIX"))
# If -v is specified yacc will create the output debug file
# which is not really source for any process, but should
# be noted and also be cleaned
# Bug #2558
if "-v" in flags:
env.SideEffect(targetBase+'.output',target[0])
env.Clean(target[0],targetBase+'.output')
# With --defines and --graph, the name of the file is totally defined
# in the options.
fileGenOptions = ["--defines=", "--graph="]
for option in flags:
for fileGenOption in fileGenOptions:
l = len(fileGenOption)
if option[:l] == fileGenOption:
# A file generating option is present, so add the file
# name to the list of targets.
fileName = option[l:].strip()
target.append(fileName)
return (target, source)
def yEmitter(target, source, env):
return _yaccEmitter(target, source, env, ['.y', '.yacc'], '$YACCHFILESUFFIX')
def ymEmitter(target, source, env):
return _yaccEmitter(target, source, env, ['.ym'], '$YACCHFILESUFFIX')
def yyEmitter(target, source, env):
return _yaccEmitter(target, source, env, ['.yy'], '$YACCHXXFILESUFFIX')
def get_yacc_path(env, append_paths=False):
"""
Find the path to the yacc tool, searching several possible names
Only called in the Windows case, so the default_path
can be Windows-specific
:param env: current construction environment
:param append_paths: if set, add the path to the tool to PATH
:return: path to yacc tool, if found
"""
for prog in BINS:
bin_path = SCons.Tool.find_program_path(
env,
prog,
default_paths=CHOCO_DEFAULT_PATH + MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS )
if bin_path:
if append_paths:
env.AppendENVPath('PATH', os.path.dirname(bin_path))
return bin_path
SCons.Warnings.Warning('yacc tool requested, but yacc or bison binary not found in ENV PATH')
def generate(env):
"""Add Builders and construction variables for yacc to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action('.y', YaccAction)
c_file.add_emitter('.y', yEmitter)
c_file.add_action('.yacc', YaccAction)
c_file.add_emitter('.yacc', yEmitter)
# Objective-C
c_file.add_action('.ym', YaccAction)
c_file.add_emitter('.ym', ymEmitter)
# C++
cxx_file.add_action('.yy', YaccAction)
cxx_file.add_emitter('.yy', yyEmitter)
if sys.platform == 'win32':
# ignore the return, all we need is for the path to be added
_ = get_yacc_path(env, append_paths=True)
env["YACC"] = env.Detect(BINS)
env['YACCFLAGS'] = SCons.Util.CLVar('')
env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES'
env['YACCHFILESUFFIX'] = '.h'
env['YACCHXXFILESUFFIX'] = '.hpp'
env['YACCVCGFILESUFFIX'] = '.vcg'
def exists(env):
if sys.platform == 'win32':
return get_yacc_path(env)
else:
return env.Detect(BINS)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,92 +0,0 @@
"""SCons.Tool.zip
Tool-specific initialization for zip.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/zip.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os.path
import SCons.Builder
import SCons.Defaults
import SCons.Node.FS
import SCons.Util
import zipfile
zipcompression = zipfile.ZIP_DEFLATED
def zip(target, source, env):
compression = env.get('ZIPCOMPRESSION', 0)
zf = zipfile.ZipFile(str(target[0]), 'w', compression)
for s in source:
if s.isdir():
for dirpath, dirnames, filenames in os.walk(str(s)):
for fname in filenames:
path = os.path.join(dirpath, fname)
if os.path.isfile(path):
zf.write(path, os.path.relpath(path, str(env.get('ZIPROOT', ''))))
else:
zf.write(str(s), os.path.relpath(str(s), str(env.get('ZIPROOT', ''))))
zf.close()
zipAction = SCons.Action.Action(zip, varlist=['ZIPCOMPRESSION'])
ZipBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$ZIPCOM', '$ZIPCOMSTR'),
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$ZIPSUFFIX',
multi = 1)
def generate(env):
"""Add Builders and construction variables for zip to an Environment."""
try:
bld = env['BUILDERS']['Zip']
except KeyError:
bld = ZipBuilder
env['BUILDERS']['Zip'] = bld
env['ZIP'] = 'zip'
env['ZIPFLAGS'] = SCons.Util.CLVar('')
env['ZIPCOM'] = zipAction
env['ZIPCOMPRESSION'] = zipcompression
env['ZIPSUFFIX'] = '.zip'
env['ZIPROOT'] = SCons.Util.CLVar('')
def exists(env):
return True
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

File diff suppressed because it is too large Load Diff

View File

@ -1,89 +0,0 @@
"""engine.SCons.Variables.BoolVariable
This file defines the option type for SCons implementing true/false values.
Usage example::
opts = Variables()
opts.Add(BoolVariable('embedded', 'build for an embedded system', 0))
...
if env['embedded'] == 1:
...
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/BoolVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__all__ = ['BoolVariable',]
import SCons.Errors
__true_strings = ('y', 'yes', 'true', 't', '1', 'on' , 'all' )
__false_strings = ('n', 'no', 'false', 'f', '0', 'off', 'none')
def _text2bool(val):
"""
Converts strings to True/False depending on the 'truth' expressed by
the string. If the string can't be converted, the original value
will be returned.
See '__true_strings' and '__false_strings' for values considered
'true' or 'false respectively.
This is usable as 'converter' for SCons' Variables.
"""
lval = val.lower()
if lval in __true_strings: return True
if lval in __false_strings: return False
raise ValueError("Invalid value for boolean option: %s" % val)
def _validator(key, val, env):
"""
Validates the given value to be either '0' or '1'.
This is usable as 'validator' for SCons' Variables.
"""
if not env[key] in (True, False):
raise SCons.Errors.UserError(
'Invalid value for boolean option %s: %s' % (key, env[key]))
def BoolVariable(key, help, default):
"""
The input parameters describe a boolean option, thus they are
returned with the correct converter and validator appended. The
'help' text will by appended by '(yes|no) to show the valid
valued. The result is usable for input to opts.Add().
"""
return (key, '%s (yes|no)' % help, default,
_validator, _text2bool)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,101 +0,0 @@
"""engine.SCons.Variables.EnumVariable
This file defines the option type for SCons allowing only specified
input-values.
Usage example::
opts = Variables()
opts.Add(EnumVariable('debug', 'debug output and symbols', 'no',
allowed_values=('yes', 'no', 'full'),
map={}, ignorecase=2))
...
if env['debug'] == 'full':
...
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/EnumVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__all__ = ['EnumVariable',]
import SCons.Errors
def _validator(key, val, env, vals):
if val not in vals:
raise SCons.Errors.UserError(
'Invalid value for option %s: %s. Valid values are: %s' % (key, val, vals))
def EnumVariable(key, help, default, allowed_values, map={}, ignorecase=0):
"""
The input parameters describe an option with only certain values
allowed. They are returned with an appropriate converter and
validator appended. The result is usable for input to
Variables.Add().
'key' and 'default' are the values to be passed on to Variables.Add().
'help' will be appended by the allowed values automatically
'allowed_values' is a list of strings, which are allowed as values
for this option.
The 'map'-dictionary may be used for converting the input value
into canonical values (e.g. for aliases).
'ignorecase' defines the behaviour of the validator:
If ignorecase == 0, the validator/converter are case-sensitive.
If ignorecase == 1, the validator/converter are case-insensitive.
If ignorecase == 2, the validator/converter is case-insensitive and the converted value will always be lower-case.
The 'validator' tests whether the value is in the list of allowed values. The 'converter' converts input values
according to the given 'map'-dictionary (unmapped input values are returned unchanged).
"""
help = '%s (%s)' % (help, '|'.join(allowed_values))
# define validator
if ignorecase >= 1:
validator = lambda key, val, env: \
_validator(key, val.lower(), env, allowed_values)
else:
validator = lambda key, val, env: \
_validator(key, val, env, allowed_values)
# define converter
if ignorecase == 2:
converter = lambda val: map.get(val.lower(), val).lower()
elif ignorecase == 1:
converter = lambda val: map.get(val.lower(), val)
else:
converter = lambda val: map.get(val, val)
return (key, help, default, validator, converter)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,135 +0,0 @@
"""engine.SCons.Variables.ListVariable
This file defines the option type for SCons implementing 'lists'.
A 'list' option may either be 'all', 'none' or a list of names
separated by comma. After the option has been processed, the option
value holds either the named list elements, all list elements or no
list elements at all.
Usage example::
list_of_libs = Split('x11 gl qt ical')
opts = Variables()
opts.Add(ListVariable('shared',
'libraries to build as shared libraries',
'all',
elems = list_of_libs))
...
for lib in list_of_libs:
if lib in env['shared']:
env.SharedObject(...)
else:
env.Object(...)
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Variables/ListVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
# Known Bug: This should behave like a Set-Type, but does not really,
# since elements can occur twice.
__all__ = ['ListVariable',]
import collections
import SCons.Util
class _ListVariable(collections.UserList):
def __init__(self, initlist=[], allowedElems=[]):
collections.UserList.__init__(self, [_f for _f in initlist if _f])
self.allowedElems = sorted(allowedElems)
def __cmp__(self, other):
raise NotImplementedError
def __eq__(self, other):
raise NotImplementedError
def __ge__(self, other):
raise NotImplementedError
def __gt__(self, other):
raise NotImplementedError
def __le__(self, other):
raise NotImplementedError
def __lt__(self, other):
raise NotImplementedError
def __str__(self):
if len(self) == 0:
return 'none'
self.data.sort()
if self.data == self.allowedElems:
return 'all'
else:
return ','.join(self)
def prepare_to_store(self):
return self.__str__()
def _converter(val, allowedElems, mapdict):
"""
"""
if val == 'none':
val = []
elif val == 'all':
val = allowedElems
else:
val = [_f for _f in val.split(',') if _f]
val = [mapdict.get(v, v) for v in val]
notAllowed = [v for v in val if v not in allowedElems]
if notAllowed:
raise ValueError("Invalid value(s) for option: %s" %
','.join(notAllowed))
return _ListVariable(val, allowedElems)
## def _validator(key, val, env):
## """
## """
## # todo: write validator for pgk list
## return 1
def ListVariable(key, help, default, names, map={}):
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (separated by space).
"""
names_str = 'allowed names: %s' % ' '.join(names)
if SCons.Util.is_List(default):
default = ','.join(default)
help = '\n '.join(
(help, '(all|none|comma-separated list of names)', names_str))
return (key, help, default,
None, #_validator,
lambda val: _converter(val, names, map))
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,106 +0,0 @@
"""engine.SCons.Variables.PackageVariable
This file defines the option type for SCons implementing 'package
activation'.
To be used whenever a 'package' may be enabled/disabled and the
package path may be specified.
Usage example:
Examples:
x11=no (disables X11 support)
x11=yes (will search for the package installation dir)
x11=/usr/local/X11 (will check this path for existence)
To replace autoconf's --with-xxx=yyy ::
opts = Variables()
opts.Add(PackageVariable('x11',
'use X11 installed here (yes = search some places',
'yes'))
...
if env['x11'] == True:
dir = ... search X11 in some standard places ...
env['x11'] = dir
if env['x11']:
... build with x11 ...
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PackageVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__all__ = ['PackageVariable',]
import SCons.Errors
__enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search')
__disable_strings = ('0', 'no', 'false', 'off', 'disable')
def _converter(val):
"""
"""
lval = val.lower()
if lval in __enable_strings: return True
if lval in __disable_strings: return False
#raise ValueError("Invalid value for boolean option: %s" % val)
return val
def _validator(key, val, env, searchfunc):
# NB: searchfunc is currently undocumented and unsupported
"""
"""
# TODO write validator, check for path
import os
if env[key] is True:
if searchfunc:
env[key] = searchfunc(key, val)
elif env[key] and not os.path.exists(val):
raise SCons.Errors.UserError(
'Path does not exist for option %s: %s' % (key, val))
def PackageVariable(key, help, default, searchfunc=None):
# NB: searchfunc is currently undocumented and unsupported
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (separated by space).
"""
help = '\n '.join(
(help, '( yes | no | /path/to/%s )' % key))
return (key, help, default,
lambda k, v, e: _validator(k,v,e,searchfunc),
_converter)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,145 +0,0 @@
"""SCons.Variables.PathVariable
This file defines an option type for SCons implementing path settings.
To be used whenever a user-specified path override should be allowed.
Arguments to PathVariable are:
option-name = name of this option on the command line (e.g. "prefix")
option-help = help string for option
option-dflt = default value for this option
validator = [optional] validator for option value. Predefined validators are:
PathAccept -- accepts any path setting; no validation
PathIsDir -- path must be an existing directory
PathIsDirCreate -- path must be a dir; will create
PathIsFile -- path must be a file
PathExists -- path must exist (any type) [default]
The validator is a function that is called and which
should return True or False to indicate if the path
is valid. The arguments to the validator function
are: (key, val, env). The key is the name of the
option, the val is the path specified for the option,
and the env is the env to which the Options have been
added.
Usage example::
Examples:
prefix=/usr/local
opts = Variables()
opts = Variables()
opts.Add(PathVariable('qtdir',
'where the root of Qt is installed',
qtdir, PathIsDir))
opts.Add(PathVariable('qt_includes',
'where the Qt includes are installed',
'$qtdir/includes', PathIsDirCreate))
opts.Add(PathVariable('qt_libraries',
'where the Qt library is installed',
'$qtdir/lib'))
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PathVariable.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__all__ = ['PathVariable',]
import os
import os.path
import SCons.Errors
class _PathVariableClass(object):
def PathAccept(self, key, val, env):
"""Accepts any path, no checking done."""
pass
def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathIsDirCreate(self, key, val, env):
"""Validator to check if Path is a directory,
creating it if it does not exist."""
if os.path.isfile(val):
m = 'Path for option %s is a file, not a directory: %s'
raise SCons.Errors.UserError(m % (key, val))
if not os.path.isdir(val):
os.makedirs(val)
def PathIsFile(self, key, val, env):
"""Validator to check if Path is a file"""
if not os.path.isfile(val):
if os.path.isdir(val):
m = 'File path for option %s is a directory: %s'
else:
m = 'File path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathExists(self, key, val, env):
"""Validator to check if Path exists"""
if not os.path.exists(val):
m = 'Path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def __call__(self, key, help, default, validator=None):
"""
The input parameters describe a 'path list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
The 'default' option specifies the default path to use if the
user does not specify an override with this option.
validator is a validator, see this file for examples
"""
if validator is None:
validator = self.PathExists
if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
return (key, '%s ( /path/to/%s )' % (help, key[0]), default,
validator, None)
else:
return (key, '%s ( /path/to/%s )' % (help, key), default,
validator, None)
PathVariable = _PathVariableClass()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,327 +0,0 @@
"""engine.SCons.Variables
This file defines the Variables class that is used to add user-friendly
customizable variables to an SCons build.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Variables/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os.path
import sys
from functools import cmp_to_key
import SCons.Environment
import SCons.Errors
import SCons.Util
import SCons.Warnings
from .BoolVariable import BoolVariable # okay
from .EnumVariable import EnumVariable # okay
from .ListVariable import ListVariable # naja
from .PackageVariable import PackageVariable # naja
from .PathVariable import PathVariable # okay
class Variables(object):
instance=None
"""
Holds all the options, updates the environment with the variables,
and renders the help text.
"""
def __init__(self, files=None, args=None, is_global=1):
"""
files - [optional] List of option configuration files to load
(backward compatibility) If a single string is passed it is
automatically placed in a file list
"""
# initialize arguments
if files is None:
files = []
if args is None:
args = {}
self.options = []
self.args = args
if not SCons.Util.is_List(files):
if files:
files = [ files ]
else:
files = []
self.files = files
self.unknown = {}
# create the singleton instance
if is_global:
self=Variables.instance
if not Variables.instance:
Variables.instance=self
def _do_add(self, key, help="", default=None, validator=None, converter=None):
class Variable(object):
pass
option = Variable()
# if we get a list or a tuple, we take the first element as the
# option key and store the remaining in aliases.
if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
option.key = key[0]
option.aliases = key[1:]
else:
option.key = key
option.aliases = [ key ]
option.help = help
option.default = default
option.validator = validator
option.converter = converter
self.options.append(option)
# options might be added after the 'unknown' dict has been set up,
# so we remove the key and all its aliases from that dict
for alias in list(option.aliases) + [ option.key ]:
if alias in self.unknown:
del self.unknown[alias]
def keys(self):
"""
Returns the keywords for the options
"""
return [o.key for o in self.options]
def Add(self, key, help="", default=None, validator=None, converter=None, **kw):
"""
Add an option.
@param key: the name of the variable, or a list or tuple of arguments
@param help: optional help text for the options
@param default: optional default value
@param validator: optional function that is called to validate the option's value
@type validator: Called with (key, value, environment)
@param converter: optional function that is called to convert the option's value before putting it in the environment.
"""
if SCons.Util.is_List(key) or isinstance(key, tuple):
self._do_add(*key)
return
if not SCons.Util.is_String(key) or \
not SCons.Environment.is_valid_construction_var(key):
raise SCons.Errors.UserError("Illegal Variables.Add() key `%s'" % str(key))
self._do_add(key, help, default, validator, converter)
def AddVariables(self, *optlist):
"""
Add a list of options.
Each list element is a tuple/list of arguments to be passed on
to the underlying method for adding options.
Example::
opt.AddVariables(
('debug', '', 0),
('CC', 'The C compiler'),
('VALIDATE', 'An option for testing validation', 'notset',
validator, None),
)
"""
for o in optlist:
self._do_add(*o)
def Update(self, env, args=None):
"""
Update an environment with the option variables.
env - the environment to update.
"""
values = {}
# first set the defaults:
for option in self.options:
if option.default is not None:
values[option.key] = option.default
# next set the value specified in the options file
for filename in self.files:
if os.path.exists(filename):
dir = os.path.split(os.path.abspath(filename))[0]
if dir:
sys.path.insert(0, dir)
try:
values['__name__'] = filename
with open(filename, 'r') as f:
contents = f.read()
exec(contents, {}, values)
finally:
if dir:
del sys.path[0]
del values['__name__']
# set the values specified on the command line
if args is None:
args = self.args
for arg, value in args.items():
added = False
for option in self.options:
if arg in list(option.aliases) + [ option.key ]:
values[option.key] = value
added = True
if not added:
self.unknown[arg] = value
# put the variables in the environment:
# (don't copy over variables that are not declared as options)
for option in self.options:
try:
env[option.key] = values[option.key]
except KeyError:
pass
# Call the convert functions:
for option in self.options:
if option.converter and option.key in values:
value = env.subst('${%s}'%option.key)
try:
try:
env[option.key] = option.converter(value)
except TypeError:
env[option.key] = option.converter(value, env)
except ValueError as x:
raise SCons.Errors.UserError('Error converting option: %s\n%s'%(option.key, x))
# Finally validate the values:
for option in self.options:
if option.validator and option.key in values:
option.validator(option.key, env.subst('${%s}'%option.key), env)
def UnknownVariables(self):
"""
Returns any options in the specified arguments lists that
were not known, declared options in this object.
"""
return self.unknown
def Save(self, filename, env):
"""
Saves all the options in the given file. This file can
then be used to load the options next run. This can be used
to create an option cache file.
filename - Name of the file to save into
env - the environment get the option values from
"""
# Create the file and write out the header
try:
fh = open(filename, 'w')
try:
# Make an assignment in the file for each option
# within the environment that was assigned a value
# other than the default.
for option in self.options:
try:
value = env[option.key]
try:
prepare = value.prepare_to_store
except AttributeError:
try:
eval(repr(value))
except KeyboardInterrupt:
raise
except:
# Convert stuff that has a repr() that
# cannot be evaluated into a string
value = SCons.Util.to_String(value)
else:
value = prepare()
defaultVal = env.subst(SCons.Util.to_String(option.default))
if option.converter:
defaultVal = option.converter(defaultVal)
if str(env.subst('${%s}' % option.key)) != str(defaultVal):
fh.write('%s = %s\n' % (option.key, repr(value)))
except KeyError:
pass
finally:
fh.close()
except IOError as x:
raise SCons.Errors.UserError('Error writing options to file: %s\n%s' % (filename, x))
def GenerateHelpText(self, env, sort=None):
"""
Generate the help text for the options.
env - an environment that is used to get the current values
of the options.
cmp - Either a function as follows: The specific sort function should take two arguments and return -1, 0 or 1
or a boolean to indicate if it should be sorted.
"""
if callable(sort):
options = sorted(self.options, key=cmp_to_key(lambda x,y: sort(x.key,y.key)))
elif sort is True:
options = sorted(self.options, key=lambda x: x.key)
else:
options = self.options
def format(opt, self=self, env=env):
if opt.key in env:
actual = env.subst('${%s}' % opt.key)
else:
actual = None
return self.FormatVariableHelpText(env, opt.key, opt.help, opt.default, actual, opt.aliases)
lines = [_f for _f in map(format, options) if _f]
return ''.join(lines)
format = '\n%s: %s\n default: %s\n actual: %s\n'
format_ = '\n%s: %s\n default: %s\n actual: %s\n aliases: %s\n'
def FormatVariableHelpText(self, env, key, help, default, actual, aliases=[]):
# Don't display the key name itself as an alias.
aliases = [a for a in aliases if a != key]
if len(aliases)==0:
return self.format % (key, help, default, actual)
else:
return self.format_ % (key, help, default, actual, aliases)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,233 +0,0 @@
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""SCons.Warnings
This file implements the warnings framework for SCons.
"""
__revision__ = "src/engine/SCons/Warnings.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import sys
import SCons.Errors
class Warning(SCons.Errors.UserError):
pass
class WarningOnByDefault(Warning):
pass
# NOTE: If you add a new warning class, add it to the man page, too!
class TargetNotBuiltWarning(Warning): # Should go to OnByDefault
pass
class CacheVersionWarning(WarningOnByDefault):
pass
class CacheWriteErrorWarning(Warning):
pass
class CorruptSConsignWarning(WarningOnByDefault):
pass
class DependencyWarning(Warning):
pass
class DevelopmentVersionWarning(WarningOnByDefault):
pass
class DuplicateEnvironmentWarning(WarningOnByDefault):
pass
class FutureReservedVariableWarning(WarningOnByDefault):
pass
class LinkWarning(WarningOnByDefault):
pass
class MisleadingKeywordsWarning(WarningOnByDefault):
pass
class MissingSConscriptWarning(WarningOnByDefault):
pass
class NoObjectCountWarning(WarningOnByDefault):
pass
class NoParallelSupportWarning(WarningOnByDefault):
pass
class ReservedVariableWarning(WarningOnByDefault):
pass
class StackSizeWarning(WarningOnByDefault):
pass
class VisualCMissingWarning(WarningOnByDefault):
pass
# Used when MSVC_VERSION and MSVS_VERSION do not point to the
# same version (MSVS_VERSION is deprecated)
class VisualVersionMismatch(WarningOnByDefault):
pass
class VisualStudioMissingWarning(Warning):
pass
class FortranCxxMixWarning(LinkWarning):
pass
# Deprecation warnings
class FutureDeprecatedWarning(Warning):
pass
class DeprecatedWarning(Warning):
pass
class MandatoryDeprecatedWarning(DeprecatedWarning):
pass
# Special case; base always stays DeprecatedWarning
class PythonVersionWarning(DeprecatedWarning):
pass
class DeprecatedSourceCodeWarning(FutureDeprecatedWarning):
pass
class TaskmasterNeedsExecuteWarning(DeprecatedWarning):
pass
class DeprecatedOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedDebugOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedMissingSConscriptWarning(DeprecatedWarning):
pass
# The below is a list of 2-tuples. The first element is a class object.
# The second element is true if that class is enabled, false if it is disabled.
_enabled = []
# If set, raise the warning as an exception
_warningAsException = 0
# If not None, a function to call with the warning
_warningOut = None
def suppressWarningClass(clazz):
"""Suppresses all warnings that are of type clazz or
derived from clazz."""
_enabled.insert(0, (clazz, 0))
def enableWarningClass(clazz):
"""Enables all warnings that are of type clazz or
derived from clazz."""
_enabled.insert(0, (clazz, 1))
def warningAsException(flag=1):
"""Turn warnings into exceptions. Returns the old value of the flag."""
global _warningAsException
old = _warningAsException
_warningAsException = flag
return old
def warn(clazz, *args):
global _enabled, _warningAsException, _warningOut
warning = clazz(args)
for cls, flag in _enabled:
if isinstance(warning, cls):
if flag:
if _warningAsException:
raise warning
if _warningOut:
_warningOut(warning)
break
def process_warn_strings(arguments):
"""Process requests to enable/disable warnings.
The requests are strings passed to the --warn option or the
SetOption('warn') function.
An argument to this option should be of the form <warning-class>
or no-<warning-class>. The warning class is munged in order
to get an actual class name from the classes above, which we
need to pass to the {enable,disable}WarningClass() functions.
The supplied <warning-class> is split on hyphens, each element
is capitalized, then smushed back together. Then the string
"Warning" is appended to get the class name.
For example, 'deprecated' will enable the DeprecatedWarning
class. 'no-dependency' will disable the DependencyWarning class.
As a special case, --warn=all and --warn=no-all will enable or
disable (respectively) the base Warning class of all warnings.
"""
def _capitalize(s):
if s[:5] == "scons":
return "SCons" + s[5:]
else:
return s.capitalize()
for arg in arguments:
elems = arg.lower().split('-')
enable = 1
if elems[0] == 'no':
enable = 0
del elems[0]
if len(elems) == 1 and elems[0] == 'all':
class_name = "Warning"
else:
class_name = ''.join(map(_capitalize, elems)) + "Warning"
try:
clazz = globals()[class_name]
except KeyError:
sys.stderr.write("No warning type: '%s'\n" % arg)
else:
if enable:
enableWarningClass(clazz)
elif issubclass(clazz, MandatoryDeprecatedWarning):
fmt = "Can not disable mandataory warning: '%s'\n"
sys.stderr.write(fmt % arg)
else:
suppressWarningClass(clazz)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,4 +0,0 @@
import SCons.Script
# this does all the work, and calls sys.exit
# with the proper exit status when done.
SCons.Script.main()

View File

@ -1,203 +0,0 @@
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__doc__ = """
SCons compatibility package for old Python versions
This subpackage holds modules that provide backwards-compatible
implementations of various things that we'd like to use in SCons but which
only show up in later versions of Python than the early, old version(s)
we still support.
Other code will not generally reference things in this package through
the SCons.compat namespace. The modules included here add things to
the builtins namespace or the global module list so that the rest
of our code can use the objects and names imported here regardless of
Python version.
The rest of the things here will be in individual compatibility modules
that are either: 1) suitably modified copies of the future modules that
we want to use; or 2) backwards compatible re-implementations of the
specific portions of a future module's API that we want to use.
GENERAL WARNINGS: Implementations of functions in the SCons.compat
modules are *NOT* guaranteed to be fully compliant with these functions in
later versions of Python. We are only concerned with adding functionality
that we actually use in SCons, so be wary if you lift this code for
other uses. (That said, making these more nearly the same as later,
official versions is still a desirable goal, we just don't need to be
obsessive about it.)
We name the compatibility modules with an initial '_scons_' (for example,
_scons_subprocess.py is our compatibility module for subprocess) so
that we can still try to import the real module name and fall back to
our compatibility module if we get an ImportError. The import_as()
function defined below loads the module as the "real" name (without the
'_scons'), after which all of the "import {module}" statements in the
rest of our code will find our pre-loaded compatibility module.
"""
__revision__ = "src/engine/SCons/compat/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os
import sys
import importlib
PYPY = hasattr(sys, 'pypy_translation_info')
def rename_module(new, old):
"""
Attempt to import the old module and load it under the new name.
Used for purely cosmetic name changes in Python 3.x.
"""
try:
sys.modules[new] = importlib.import_module(old)
return True
except ImportError:
return False
# TODO: FIXME
# In 3.x, 'pickle' automatically loads the fast version if available.
rename_module('pickle', 'cPickle')
# Default pickle protocol. Higher protocols are more efficient/featureful
# but incompatible with older Python versions. On Python 2.7 this is 2.
# Negative numbers choose the highest available protocol.
import pickle
# Was pickle.HIGHEST_PROTOCOL
# Changed to 2 so py3.5+'s pickle will be compatible with py2.7.
PICKLE_PROTOCOL = pickle.HIGHEST_PROTOCOL
# TODO: FIXME
# In 3.x, 'profile' automatically loads the fast version if available.
rename_module('profile', 'cProfile')
# TODO: FIXME
# Before Python 3.0, the 'queue' module was named 'Queue'.
rename_module('queue', 'Queue')
# TODO: FIXME
# Before Python 3.0, the 'winreg' module was named '_winreg'
rename_module('winreg', '_winreg')
# Python 3 moved builtin intern() to sys package
# To make porting easier, make intern always live
# in sys package (for python 2.7.x)
try:
sys.intern
except AttributeError:
# We must be using python 2.7.x so monkey patch
# intern into the sys package
sys.intern = intern
# UserDict, UserList, UserString are in # collections for 3.x,
# but standalone in 2.7.x. Monkey-patch into collections for 2.7.
import collections
try:
collections.UserDict
except AttributeError:
from UserDict import UserDict as _UserDict
collections.UserDict = _UserDict
del _UserDict
try:
collections.UserList
except AttributeError:
from UserList import UserList as _UserList
collections.UserList = _UserList
del _UserList
try:
collections.UserString
except AttributeError:
from UserString import UserString as _UserString
collections.UserString = _UserString
del _UserString
import shutil
try:
shutil.SameFileError
except AttributeError:
class SameFileError(Exception):
pass
shutil.SameFileError = SameFileError
def with_metaclass(meta, *bases):
"""
Function from jinja2/_compat.py. License: BSD.
Use it like this::
class BaseForm(object):
pass
class FormType(type):
pass
class Form(with_metaclass(FormType, BaseForm)):
pass
This requires a bit of explanation: the basic idea is to make a
dummy metaclass for one level of class instantiation that replaces
itself with the actual metaclass. Because of internal type checks
we also need to make sure that we downgrade the custom metaclass
for one level to something closer to type (that's why __call__ and
__init__ comes back from type etc.).
This has the advantage over six.with_metaclass of not introducing
dummy classes into the final MRO.
"""
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
class NoSlotsPyPy(type):
"""
Workaround for PyPy not working well with __slots__ and __class__ assignment.
"""
def __new__(meta, name, bases, dct):
if PYPY and '__slots__' in dct:
dct.pop('__slots__')
return super(NoSlotsPyPy, meta).__new__(meta, name, bases, dct)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,290 +0,0 @@
# dblite.py module contributed by Ralf W. Grosse-Kunstleve.
# Extended for Unicode by Steven Knight.
from __future__ import print_function
import os
import pickle
import shutil
import time
from SCons.compat import PICKLE_PROTOCOL
keep_all_files = 00000
ignore_corrupt_dbfiles = 0
def corruption_warning(filename):
print("Warning: Discarding corrupt database:", filename)
try:
unicode
except NameError:
def is_string(s):
return isinstance(s, str)
else:
def is_string(s):
return type(s) in (str, unicode)
def is_bytes(s):
return isinstance(s, bytes)
try:
unicode('a')
except NameError:
def unicode(s):
return s
dblite_suffix = '.dblite'
# TODO: Does commenting this out break switching from py2/3?
# if bytes is not str:
# dblite_suffix += '.p3'
tmp_suffix = '.tmp'
class dblite(object):
"""
Squirrel away references to the functions in various modules
that we'll use when our __del__() method calls our sync() method
during shutdown. We might get destroyed when Python is in the midst
of tearing down the different modules we import in an essentially
arbitrary order, and some of the various modules's global attributes
may already be wiped out from under us.
See the discussion at:
http://mail.python.org/pipermail/python-bugs-list/2003-March/016877.html
"""
_open = open
_pickle_dump = staticmethod(pickle.dump)
_pickle_protocol = PICKLE_PROTOCOL
_os_chmod = os.chmod
try:
_os_chown = os.chown
except AttributeError:
_os_chown = None
_os_rename = os.rename
_os_unlink = os.unlink
_shutil_copyfile = shutil.copyfile
_time_time = time.time
def __init__(self, file_base_name, flag, mode):
assert flag in (None, "r", "w", "c", "n")
if flag is None:
flag = "r"
base, ext = os.path.splitext(file_base_name)
if ext == dblite_suffix:
# There's already a suffix on the file name, don't add one.
self._file_name = file_base_name
self._tmp_name = base + tmp_suffix
else:
self._file_name = file_base_name + dblite_suffix
self._tmp_name = file_base_name + tmp_suffix
self._flag = flag
self._mode = mode
self._dict = {}
self._needs_sync = 00000
if self._os_chown is not None and (os.geteuid() == 0 or os.getuid() == 0):
# running as root; chown back to current owner/group when done
try:
statinfo = os.stat(self._file_name)
self._chown_to = statinfo.st_uid
self._chgrp_to = statinfo.st_gid
except OSError as e:
# db file doesn't exist yet.
# Check os.environ for SUDO_UID, use if set
self._chown_to = int(os.environ.get('SUDO_UID', -1))
self._chgrp_to = int(os.environ.get('SUDO_GID', -1))
else:
self._chown_to = -1 # don't chown
self._chgrp_to = -1 # don't chgrp
if self._flag == "n":
with self._open(self._file_name, "wb", self._mode):
pass # just make sure it exists
else:
try:
f = self._open(self._file_name, "rb")
except IOError as e:
if self._flag != "c":
raise e
with self._open(self._file_name, "wb", self._mode):
pass # just make sure it exists
else:
p = f.read()
f.close()
if len(p) > 0:
try:
if bytes is not str:
self._dict = pickle.loads(p, encoding='bytes')
else:
self._dict = pickle.loads(p)
except (pickle.UnpicklingError, EOFError, KeyError):
# Note how we catch KeyErrors too here, which might happen
# when we don't have cPickle available (default pickle
# throws it).
if (ignore_corrupt_dbfiles == 0): raise
if (ignore_corrupt_dbfiles == 1):
corruption_warning(self._file_name)
def close(self):
if self._needs_sync:
self.sync()
def __del__(self):
self.close()
def sync(self):
self._check_writable()
f = self._open(self._tmp_name, "wb", self._mode)
self._pickle_dump(self._dict, f, self._pickle_protocol)
f.close()
# Windows doesn't allow renaming if the file exists, so unlink
# it first, chmod'ing it to make sure we can do so. On UNIX, we
# may not be able to chmod the file if it's owned by someone else
# (e.g. from a previous run as root). We should still be able to
# unlink() the file if the directory's writable, though, so ignore
# any OSError exception thrown by the chmod() call.
try:
self._os_chmod(self._file_name, 0o777)
except OSError:
pass
self._os_unlink(self._file_name)
self._os_rename(self._tmp_name, self._file_name)
if self._os_chown is not None and self._chown_to > 0: # don't chown to root or -1
try:
self._os_chown(self._file_name, self._chown_to, self._chgrp_to)
except OSError:
pass
self._needs_sync = 00000
if (keep_all_files):
self._shutil_copyfile(
self._file_name,
self._file_name + "_" + str(int(self._time_time())))
def _check_writable(self):
if (self._flag == "r"):
raise IOError("Read-only database: %s" % self._file_name)
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
self._check_writable()
if (not is_string(key)):
raise TypeError("key `%s' must be a string but is %s" % (key, type(key)))
if (not is_bytes(value)):
raise TypeError("value `%s' must be a bytes but is %s" % (value, type(value)))
self._dict[key] = value
self._needs_sync = 0o001
def keys(self):
return list(self._dict.keys())
def has_key(self, key):
return key in self._dict
def __contains__(self, key):
return key in self._dict
def iterkeys(self):
# Wrapping name in () prevents fixer from "fixing" this
return (self._dict.iterkeys)()
__iter__ = iterkeys
def __len__(self):
return len(self._dict)
def open(file, flag=None, mode=0o666):
return dblite(file, flag, mode)
def _exercise():
db = open("tmp", "n")
assert len(db) == 0
db["foo"] = "bar"
assert db["foo"] == "bar"
db[unicode("ufoo")] = unicode("ubar")
assert db[unicode("ufoo")] == unicode("ubar")
db.sync()
db = open("tmp", "c")
assert len(db) == 2, len(db)
assert db["foo"] == "bar"
db["bar"] = "foo"
assert db["bar"] == "foo"
db[unicode("ubar")] = unicode("ufoo")
assert db[unicode("ubar")] == unicode("ufoo")
db.sync()
db = open("tmp", "r")
assert len(db) == 4, len(db)
assert db["foo"] == "bar"
assert db["bar"] == "foo"
assert db[unicode("ufoo")] == unicode("ubar")
assert db[unicode("ubar")] == unicode("ufoo")
try:
db.sync()
except IOError as e:
assert str(e) == "Read-only database: tmp.dblite"
else:
raise RuntimeError("IOError expected.")
db = open("tmp", "w")
assert len(db) == 4
db["ping"] = "pong"
db.sync()
try:
db[(1, 2)] = "tuple"
except TypeError as e:
assert str(e) == "key `(1, 2)' must be a string but is <type 'tuple'>", str(e)
else:
raise RuntimeError("TypeError exception expected")
try:
db["list"] = [1, 2]
except TypeError as e:
assert str(e) == "value `[1, 2]' must be a string but is <type 'list'>", str(e)
else:
raise RuntimeError("TypeError exception expected")
db = open("tmp", "r")
assert len(db) == 5
db = open("tmp", "n")
assert len(db) == 0
dblite._open("tmp.dblite", "w")
db = open("tmp", "r")
dblite._open("tmp.dblite", "w").write("x")
try:
db = open("tmp", "r")
except pickle.UnpicklingError:
pass
else:
raise RuntimeError("pickle exception expected.")
global ignore_corrupt_dbfiles
ignore_corrupt_dbfiles = 2
db = open("tmp", "r")
assert len(db) == 0
os.unlink("tmp.dblite")
try:
db = open("tmp", "w")
except IOError as e:
assert str(e) == "[Errno 2] No such file or directory: 'tmp.dblite'", str(e)
else:
raise RuntimeError("IOError expected.")
if (__name__ == "__main__"):
_exercise()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,13 +0,0 @@
Metadata-Version: 1.0
Name: scons
Version: 3.1.2
Summary: Open Source next-generation build tool.
Home-page: http://www.scons.org/
Author: William Deegan
Author-email: bill@baddogconsulting.com
License: UNKNOWN
Description: Open Source next-generation build tool.
Improved, cross-platform substitute for the classic Make
utility. In short, SCons is an easier, more reliable
and faster way to build software.
Platform: UNKNOWN

View File

@ -1,210 +0,0 @@
#! /usr/bin/env python
#
# SCons - a Software Constructor
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
__revision__ = "src/script/scons.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__version__ = "3.1.2"
__build__ = "bee7caf9defd6e108fc2998a2520ddb36a967691"
__buildsys__ = "octodog"
__date__ = "2019-12-17 02:07:09"
__developer__ = "bdeegan"
# This is the entry point to the SCons program.
# The only job of this script is to work out where the guts of the program
# could be and import them, where the real work begins.
# SCons can be invoked several different ways
# - from an installed location
# - from a "local install" copy
# - from a source tree, which has a different dir struture than the other two
# Try to account for all those possibilities.
import os
import sys
##############################################################################
# BEGIN STANDARD SCons SCRIPT HEADER
#
# This is the cut-and-paste logic so that a self-contained script can
# interoperate correctly with different SCons versions and installation
# locations for the engine. If you modify anything in this section, you
# should also change other scripts that use this same header.
##############################################################################
# compatibility check
if (3,0,0) < sys.version_info < (3,5,0) or sys.version_info < (2,7,0):
msg = "scons: *** SCons version %s does not run under Python version %s.\n\
Python 2.7 or >= 3.5 is required.\n"
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
sys.exit(1)
# Strip the script directory from sys.path so on case-insensitive
# (WIN32) systems Python doesn't think that the "scons" script is the
# "SCons" package.
script_dir = os.path.dirname(os.path.realpath(__file__))
script_path = os.path.realpath(os.path.dirname(__file__))
if script_path in sys.path:
sys.path.remove(script_path)
libs = []
if "SCONS_LIB_DIR" in os.environ:
libs.append(os.environ["SCONS_LIB_DIR"])
# running from source takes 2nd priority (since 2.3.2), following SCONS_LIB_DIR
source_path = os.path.join(script_path, os.pardir, 'engine')
if os.path.isdir(source_path):
libs.append(source_path)
# add local-install locations
local_version = 'scons-local-' + __version__
local = 'scons-local'
if script_dir:
local_version = os.path.join(script_dir, local_version)
local = os.path.join(script_dir, local)
if os.path.isdir(local_version):
libs.append(os.path.abspath(local_version))
if os.path.isdir(local):
libs.append(os.path.abspath(local))
scons_version = 'scons-%s' % __version__
# preferred order of scons lookup paths
prefs = []
# if we can find package information, use it
try:
import pkg_resources
except ImportError:
pass
else:
try:
d = pkg_resources.get_distribution('scons')
except pkg_resources.DistributionNotFound:
pass
else:
prefs.append(d.location)
if sys.platform == 'win32':
# Use only sys.prefix on Windows
prefs.append(sys.prefix)
prefs.append(os.path.join(sys.prefix, 'Lib', 'site-packages'))
else:
# On other (POSIX) platforms, things are more complicated due to
# the variety of path names and library locations.
# Build up some possibilities, then transform them into candidates
temp = []
if script_dir == 'bin':
# script_dir is `pwd`/bin;
# check `pwd`/lib/scons*.
temp.append(os.getcwd())
else:
if script_dir == '.' or script_dir == '':
script_dir = os.getcwd()
head, tail = os.path.split(script_dir)
if tail == "bin":
# script_dir is /foo/bin;
# check /foo/lib/scons*.
temp.append(head)
head, tail = os.path.split(sys.prefix)
if tail == "usr":
# sys.prefix is /foo/usr;
# check /foo/usr/lib/scons* first,
# then /foo/usr/local/lib/scons*.
temp.append(sys.prefix)
temp.append(os.path.join(sys.prefix, "local"))
elif tail == "local":
h, t = os.path.split(head)
if t == "usr":
# sys.prefix is /foo/usr/local;
# check /foo/usr/local/lib/scons* first,
# then /foo/usr/lib/scons*.
temp.append(sys.prefix)
temp.append(head)
else:
# sys.prefix is /foo/local;
# check only /foo/local/lib/scons*.
temp.append(sys.prefix)
else:
# sys.prefix is /foo (ends in neither /usr or /local);
# check only /foo/lib/scons*.
temp.append(sys.prefix)
# suffix these to add to our original prefs:
prefs.extend([os.path.join(x, 'lib') for x in temp])
prefs.extend([os.path.join(x, 'lib', 'python' + sys.version[:3],
'site-packages') for x in temp])
# Add the parent directory of the current python's library to the
# preferences. This picks up differences between, e.g., lib and lib64,
# and finds the base location in case of a non-copying virtualenv.
try:
libpath = os.__file__
except AttributeError:
pass
else:
# Split /usr/libfoo/python*/os.py to /usr/libfoo/python*.
libpath, tail = os.path.split(libpath)
# Split /usr/libfoo/python* to /usr/libfoo
libpath, tail = os.path.split(libpath)
# Check /usr/libfoo/scons*.
prefs.append(libpath)
# Look first for 'scons-__version__' in all of our preference libs,
# then for 'scons'. Skip paths that do not exist.
libs.extend([os.path.join(x, scons_version) for x in prefs if os.path.isdir(x)])
libs.extend([os.path.join(x, 'scons') for x in prefs if os.path.isdir(x)])
sys.path = libs + sys.path
##############################################################################
# END STANDARD SCons SCRIPT HEADER
##############################################################################
if __name__ == "__main__":
try:
import SCons.Script
except ImportError:
sys.stderr.write("SCons import failed. Unable to find engine files in:\n")
for path in libs:
sys.stderr.write(" {}\n".format(path))
raise
# this does all the work, and calls sys.exit
# with the proper exit status when done.
SCons.Script.main()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,38 +1,37 @@
@REM Copyright (c) 2001 - 2019 The SCons Foundation
@REM src/script/scons.bat bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan
@echo off
set SCONS_ERRORLEVEL=
if "%OS%" == "Windows_NT" goto WinNT
@REM for 9x/Me you better not have more than 9 args
python -c "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-3.1.2'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons-3.1.2'), join(sys.prefix, 'scons')] + sys.path; import SCons.Script; SCons.Script.main()" %1 %2 %3 %4 %5 %6 %7 %8 %9
@REM no way to set exit status of this script for 9x/Me
goto endscons
@REM Credit where credit is due: we return the exit code despite our
@REM use of setlocal+endlocal using a technique from Bear's Journal:
@REM http://code-bear.com/bearlog/2007/06/01/getting-the-exit-code-from-a-batch-file-that-is-run-from-a-python-program/
:WinNT
setlocal
@REM ensure the script will be executed with the Python it was installed for
pushd %~dp0..
set path=%~dp0;%CD%;%path%
popd
@REM try the script named as the .bat file in current dir, then in Scripts subdir
set scriptname=%~dp0%~n0.py
if not exist "%scriptname%" set scriptname=%~dp0Scripts\%~n0.py
@REM Handle when running from wheel where the script has no .py extension
if not exist "%scriptname%" set scriptname=%~dp0%~n0
python "%scriptname%" %*
endlocal & set SCONS_ERRORLEVEL=%ERRORLEVEL%
if NOT "%COMSPEC%" == "%SystemRoot%\system32\cmd.exe" goto returncode
if errorlevel 9009 echo you do not have python in your PATH
goto endscons
:returncode
exit /B %SCONS_ERRORLEVEL%
:endscons
call :returncode %SCONS_ERRORLEVEL%
@REM Copyright (c) 2001 - 2025 The SCons Foundation
@echo off
set SCONS_ERRORLEVEL=
if "%OS%" == "Windows_NT" goto WinNT
@REM for 9x/Me you better not have more than 9 args
python -c "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-4.9.1'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons-4.9.1'), join(sys.prefix, 'scons')] + sys.path; import SCons.Script; SCons.Script.main()" %*
@REM no way to set exit status of this script for 9x/Me
goto endscons
@REM Credit where credit is due: we return the exit code despite our
@REM use of setlocal+endlocal using a technique from Bear's Journal:
@REM http://code-bear.com/bearlog/2007/06/01/getting-the-exit-code-from-a-batch-file-that-is-run-from-a-python-program/
:WinNT
setlocal
@REM ensure the script will be executed with the Python it was installed for
pushd %~dp0..
set path=%~dp0;%CD%;%path%
popd
@REM try the script named as the .bat file in current dir, then in Scripts subdir
set scriptname=%~dp0%~n0.py
if not exist "%scriptname%" set scriptname=%~dp0Scripts\%~n0.py
@REM Handle when running from wheel where the script has no .py extension
if not exist "%scriptname%" set scriptname=%~dp0%~n0
python "%scriptname%" %*
endlocal & set SCONS_ERRORLEVEL=%ERRORLEVEL%
if NOT "%COMSPEC%" == "%SystemRoot%\system32\cmd.exe" goto returncode
if errorlevel 9009 echo you do not have python in your PATH
goto endscons
:returncode
exit /B %SCONS_ERRORLEVEL%
:endscons
call :returncode %SCONS_ERRORLEVEL%

View File

@ -5,7 +5,7 @@
MIT License
Copyright (c) 2001 - 2019 The SCons Foundation
Copyright (c) 2001 - 2025 The SCons Foundation
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the

View File

@ -1,4 +1,3 @@
# Copyright (c) 2001 - 2019 The SCons Foundation
SCons - a software construction tool
@ -38,14 +37,15 @@ LATEST VERSION
Before going further, you can check for the latest version of the
scons-local package, or any SCons package, at the SCons download page:
http://www.scons.org/download.html
https://scons.org/pages/download.html
EXECUTION REQUIREMENTS
======================
Running SCons requires either Python version 2.7.* or Python 3.5 or higher.
There should be no other dependencies or requirements to run SCons.
Running SCons requires Python 3.6 or higher. There should be no other
dependencies or requirements to run standard SCons.
The last release to support Python 3.5 was 4.2.0.
The default SCons configuration assumes use of the Microsoft Visual C++
compiler suite on WIN32 systems (either through the Visual Studio
@ -82,6 +82,11 @@ Or (if, for example, you installed this package in a subdirectory named
That should be all you have to do. (If it isn't that simple, please let
us know!)
Since 4.5, there is also an alternate form of scons-local avaialble:
a zipapp. This is a single file with a .pyz suffix, which can be
downloaded and executed directly (e.g.: python scons-local-4.5.2.pyz)
without unpacking. This may be more convenient in a few cases.
CONTENTS OF THIS PACKAGE
========================
@ -189,8 +194,9 @@ You may subscribe to the scons-users mailing list at:
http://two.pairlist.net/mailman/listinfo/scons-users
An active mailing list for developers of SCons is available. You may
send questions or comments to the list at:
In addition to the scons-users list which is appropriate for almost any
question, there is a mailing list specifically for developers of SCons
You may send questions or comments to the list at:
scons-dev@scons.org
@ -202,15 +208,11 @@ Subscription to the developer's mailing list is by approval. In practice, no
one is refused list membership, but we reserve the right to limit membership
in the future and/or weed out lurkers.
There is also a low-volume mailing list available for announcements about
SCons. Subscribe by sending email to:
announce-subscribe@scons.tigris.org
There are other mailing lists available for SCons users, for notification of
SCons code changes, and for notification of updated bug reports and project
documents. Please see our mailing lists page for details.
Note that while this list still exists, the number of different places you
can talk about SCons means it is no longer very active. GitHub has
support for discussions as well as for issues, and there is usually more
immediacy on the Discord chat, so these are probably now considered the
preferred places for "development" topics.
FOR MORE INFORMATION
@ -221,18 +223,29 @@ Check the SCons web site at:
http://www.scons.org/
AUTHOR INFO
Author Info
===========
Steven Knight
knight at baldmt dot com
http://www.baldmt.com/~knight/
With plenty of help from the SCons Development team:
Chad Austin
Charles Crain
Steve Leblanc
Anthony Roach
Terrel Shumway
SCons was originally written by Steven Knight, knight at baldmt dot com.
Since around 2010 it has been maintained by the SCons
development team, co-managed by Bill Deegan and Gary Oberbrunner, with
many contributors, including but not at all limited to:
- Chad Austin
- Dirk Baechle
- Charles Crain
- William Deegan
- Steve Leblanc
- Rob Managan
- Greg Noel
- Gary Oberbrunner
- Anthony Roach
- Greg Spencer
- Tom Tanner
- Anatoly Techtonik
- Christoph Wiedemann
- Mats Wichmann
- Russel Winder
- Mats Wichmann
\... and many others.

View File

@ -0,0 +1,97 @@
#! /usr/bin/env python
#
# SCons - a Software Constructor
#
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Show or convert the configuration of an SCons cache directory.
A cache of derived files is stored by file signature.
The files are split into directories named by the first few
digits of the signature. The prefix length used for directory
names can be changed by this script.
"""
__revision__ = "scripts/scons-configure-cache.py 39a12f34d532ab2493e78a7b73aeab2250852790 Thu, 27 Mar 2025 11:44:24 -0700 bdbaddog"
__version__ = "4.9.1"
__build__ = "39a12f34d532ab2493e78a7b73aeab2250852790"
__buildsys__ = "M1Dog2021"
__date__ = "Thu, 27 Mar 2025 11:44:24 -0700"
__developer__ = "bdbaddog"
import os
import sys
# python compatibility check
if sys.version_info < (3, 7, 0):
msg = "scons: *** SCons version %s does not run under Python version %s.\n\
Python >= 3.7.0 is required.\n"
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
sys.exit(1)
# Strip the script directory from sys.path so on case-insensitive
# (WIN32) systems Python doesn't think that the "scons" script is the
# "SCons" package.
script_dir = os.path.dirname(os.path.realpath(__file__))
script_path = os.path.realpath(os.path.dirname(__file__))
if script_path in sys.path:
sys.path.remove(script_path)
libs = []
if "SCONS_LIB_DIR" in os.environ:
libs.append(os.environ["SCONS_LIB_DIR"])
# running from source takes 2nd priority (since 2.3.2), following SCONS_LIB_DIR
source_path = os.path.join(script_path, os.pardir)
if os.path.isdir(source_path):
libs.append(source_path)
# add local-install locations
local_version = 'scons-local-' + __version__
local = 'scons-local'
if script_dir:
local_version = os.path.join(script_dir, local_version)
local = os.path.join(script_dir, local)
if os.path.isdir(local_version):
libs.append(os.path.abspath(local_version))
if os.path.isdir(local):
libs.append(os.path.abspath(local))
scons_version = 'scons-%s' % __version__
sys.path = libs + sys.path
##############################################################################
# END STANDARD SCons SCRIPT HEADER
##############################################################################
from SCons.Utilities.ConfigureCache import main
if __name__ == "__main__":
main()

View File

@ -1,3 +1,26 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
SCons.Builder
@ -76,42 +99,23 @@ There are the following methods for internal use within this module:
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import annotations
__revision__ = "src/engine/SCons/Builder.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import collections
import os
from collections import UserDict, UserList
from contextlib import suppress
import SCons.Action
import SCons.Debug
from SCons.Debug import logInstanceCreation
from SCons.Errors import InternalError, UserError
import SCons.Executor
import SCons.Memoize
import SCons.Util
import SCons.Warnings
from SCons.Debug import logInstanceCreation
from SCons.Errors import InternalError, UserError
from SCons.Executor import Executor
class _Null(object):
class _Null:
pass
_null = _Null
@ -131,14 +135,14 @@ class DictCmdGenerator(SCons.Util.Selector):
to return the proper action based on the file suffix of
the source file."""
def __init__(self, dict=None, source_ext_match=1):
SCons.Util.Selector.__init__(self, dict)
def __init__(self, mapping=None, source_ext_match: bool=True) -> None:
super().__init__(mapping)
self.source_ext_match = source_ext_match
def src_suffixes(self):
return list(self.keys())
def add_action(self, suffix, action):
def add_action(self, suffix, action) -> None:
"""Add a suffix-action pair to the mapping.
"""
self[suffix] = action
@ -197,7 +201,7 @@ class DictEmitter(SCons.Util.Selector):
target, source = emitter(target, source, env)
return (target, source)
class ListEmitter(collections.UserList):
class ListEmitter(UserList):
"""A callable list of emitters that calls each in sequence,
returning the result.
"""
@ -215,7 +219,7 @@ misleading_keywords = {
'sources' : 'source',
}
class OverrideWarner(collections.UserDict):
class OverrideWarner(UserDict):
"""A class for warning about keyword arguments that we use as
overrides in a Builder call.
@ -223,14 +227,15 @@ class OverrideWarner(collections.UserDict):
can actually invoke multiple builders. This class only emits the
warnings once, no matter how many Builders are invoked.
"""
def __init__(self, dict):
collections.UserDict.__init__(self, dict)
def __init__(self, mapping) -> None:
super().__init__(mapping)
if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.OverrideWarner')
self.already_warned = None
def warn(self):
def warn(self) -> None:
if self.already_warned:
return
for k in list(self.keys()):
for k in self.keys():
if k in misleading_keywords:
alt = misleading_keywords[k]
msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k)
@ -246,7 +251,7 @@ def Builder(**kw):
kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {})
del kw['generator']
elif 'action' in kw:
source_ext_match = kw.get('source_ext_match', 1)
source_ext_match = kw.get('source_ext_match', True)
if 'source_ext_match' in kw:
del kw['source_ext_match']
if SCons.Util.is_Dict(kw['action']):
@ -328,14 +333,14 @@ def _node_errors(builder, env, tlist, slist):
if len(slist) > 1:
raise UserError("More than one source given for single-source builder: targets=%s sources=%s" % (list(map(str,tlist)), list(map(str,slist))))
class EmitterProxy(object):
class EmitterProxy:
"""This is a callable class that can act as a
Builder emitter. It holds on to a string that
is a key into an Environment dictionary, and will
look there at actual build time to see if it holds
a callable. If so, we will call that as the actual
emitter."""
def __init__(self, var):
def __init__(self, var) -> None:
self.var = SCons.Util.to_String(var)
def __call__(self, target, source, env):
@ -354,36 +359,44 @@ class EmitterProxy(object):
return (target, source)
def __eq__(self, other):
return self.var == other.var
def __lt__(self, other):
return self.var < other.var
class BuilderBase(object):
def __le__(self, other):
return self.var <= other.var
def __gt__(self, other):
return self.var > other.var
def __ge__(self, other):
return self.var >= other.var
class BuilderBase:
"""Base class for Builders, objects that create output
nodes (files) from input nodes (files).
"""
def __init__(self, action = None,
prefix = '',
suffix = '',
src_suffix = '',
prefix: str = '',
suffix: str = '',
src_suffix: str = '',
target_factory = None,
source_factory = None,
target_scanner = None,
source_scanner = None,
emitter = None,
multi = 0,
multi: bool = False,
env = None,
single_source = 0,
single_source: bool = False,
name = None,
chdir = _null,
is_explicit = 1,
is_explicit: bool = True,
src_builder = None,
ensure_suffix = False,
**overrides):
ensure_suffix: bool = False,
**overrides) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.BuilderBase')
self._memo = {}
self.action = action
@ -431,12 +444,9 @@ class BuilderBase(object):
src_builder = [ src_builder ]
self.src_builder = src_builder
def __nonzero__(self):
def __bool__(self) -> bool:
raise InternalError("Do not test for the Node.builder attribute directly; use Node.has_builder() instead")
def __bool__(self):
return self.__nonzero__()
def get_name(self, env):
"""Attempts to get the name of the Builder.
@ -466,7 +476,7 @@ class BuilderBase(object):
suffixes = []
return match_splitext(path, suffixes)
def _adjustixes(self, files, pre, suf, ensure_suffix=False):
def _adjustixes(self, files, pre, suf, ensure_suffix: bool=False):
if not files:
return []
result = []
@ -474,6 +484,11 @@ class BuilderBase(object):
files = [files]
for f in files:
# fspath() is to catch PathLike paths. We avoid the simpler
# str(f) so as not to "lose" files that are already Nodes:
# TypeError: expected str, bytes or os.PathLike object, not File
with suppress(TypeError):
f = os.fspath(f)
if SCons.Util.is_String(f):
f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix)
result.append(f)
@ -504,6 +519,7 @@ class BuilderBase(object):
splitext = lambda S: self.splitext(S,env)
tlist = [ t_from_s(pre, suf, splitext) ]
else:
# orig_target = target
target = self._adjustixes(target, pre, suf, self.ensure_suffix)
tlist = env.arg2nodes(target, target_factory, target=target, source=source)
@ -576,7 +592,7 @@ class BuilderBase(object):
# build this particular list of targets from this particular list of
# sources.
executor = None
executor: Executor | None = None
key = None
if self.multi:
@ -616,6 +632,10 @@ class BuilderBase(object):
t.set_executor(executor)
t.set_explicit(self.is_explicit)
if env.get("SCONF_NODE"):
for node in tlist:
node.attributes.conftest_node = 1
return SCons.Node.NodeList(tlist)
def __call__(self, env, target=None, source=None, chdir=_null, **kw):
@ -663,7 +683,7 @@ class BuilderBase(object):
prefix = prefix(env, sources)
return env.subst(prefix)
def set_suffix(self, suffix):
def set_suffix(self, suffix) -> None:
if not callable(suffix):
suffix = self.adjust_suffix(suffix)
self.suffix = suffix
@ -674,7 +694,7 @@ class BuilderBase(object):
suffix = suffix(env, sources)
return env.subst(suffix)
def set_src_suffix(self, src_suffix):
def set_src_suffix(self, src_suffix) -> None:
if not src_suffix:
src_suffix = []
elif not SCons.Util.is_List(src_suffix):
@ -688,7 +708,7 @@ class BuilderBase(object):
return ''
return ret[0]
def add_emitter(self, suffix, emitter):
def add_emitter(self, suffix, emitter) -> None:
"""Add a suffix-emitter mapping to this Builder.
This assumes that emitter has been initialized with an
@ -698,7 +718,7 @@ class BuilderBase(object):
"""
self.emitter[suffix] = emitter
def add_src_builder(self, builder):
def add_src_builder(self, builder) -> None:
"""
Add a new Builder to the list of src_builders.
@ -865,9 +885,9 @@ class CompositeBuilder(SCons.Util.Proxy):
to the DictCmdGenerator's add_action() method.
"""
def __init__(self, builder, cmdgen):
def __init__(self, builder, cmdgen) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.CompositeBuilder')
SCons.Util.Proxy.__init__(self, builder)
super().__init__(builder)
# cmdgen should always be an instance of DictCmdGenerator.
self.cmdgen = cmdgen
@ -875,11 +895,11 @@ class CompositeBuilder(SCons.Util.Proxy):
__call__ = SCons.Util.Delegate('__call__')
def add_action(self, suffix, action):
def add_action(self, suffix, action) -> None:
self.cmdgen.add_action(suffix, action)
self.set_src_suffix(self.cmdgen.src_suffixes())
def is_a_Builder(obj):
def is_a_Builder(obj) -> bool:
""""Returns True if the specified obj is one of our Builder classes.
The test is complicated a bit by the fact that CompositeBuilder

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -19,25 +20,29 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/CacheDir.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__doc__ = """
CacheDir support
"""CacheDir support
"""
import hashlib
import atexit
import json
import os
import shutil
import stat
import sys
import tempfile
import uuid
import SCons
import SCons.Action
import SCons.Errors
import SCons.Warnings
from SCons.Util import PY3
import SCons.Util
CACHE_PREFIX_LEN = 2 # first two characters used as subdirectory name
CACHE_TAG = (
b"Signature: 8a477f597d28d172789f06886806bc55\n"
b"# SCons cache directory - see https://bford.info/cachedir/\n"
)
cache_enabled = True
cache_debug = False
@ -46,7 +51,7 @@ cache_show = False
cache_readonly = False
cache_tmp_uuid = uuid.uuid4().hex
def CacheRetrieveFunc(target, source, env):
def CacheRetrieveFunc(target, source, env) -> int:
t = target[0]
fs = t.fs
cd = env.get_CacheDir()
@ -67,23 +72,22 @@ def CacheRetrieveFunc(target, source, env):
except OSError:
pass
st = fs.stat(cachefile)
fs.chmod(t.get_internal_path(), stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
fs.chmod(t.get_internal_path(), stat.S_IMODE(st.st_mode) | stat.S_IWRITE)
return 0
def CacheRetrieveString(target, source, env):
def CacheRetrieveString(target, source, env) -> str:
t = target[0]
fs = t.fs
cd = env.get_CacheDir()
cachedir, cachefile = cd.cachepath(t)
if t.fs.exists(cachefile):
return "Retrieved `%s' from cache" % t.get_internal_path()
return None
return ""
CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString)
CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None)
def CachePushFunc(target, source, env):
def CachePushFunc(target, source, env) -> None:
if cache_readonly:
return
@ -106,26 +110,22 @@ def CachePushFunc(target, source, env):
cd.CacheDebug('CachePush(%s): pushing to %s\n', t, cachefile)
tempfile = f"{cachefile}.tmp{cache_tmp_uuid}"
tempfile = "%s.tmp%s"%(cachefile,cache_tmp_uuid)
errfmt = "Unable to copy %s to cache. Cache file is %s"
if not fs.isdir(cachedir):
try:
fs.makedirs(cachedir)
except EnvironmentError:
# We may have received an exception because another process
# has beaten us creating the directory.
if not fs.isdir(cachedir):
msg = errfmt % (str(target), cachefile)
raise SCons.Errors.SConsEnvironmentError(msg)
try:
fs.makedirs(cachedir, exist_ok=True)
except OSError:
msg = errfmt % (str(target), cachefile)
raise SCons.Errors.SConsEnvironmentError(msg)
try:
if fs.islink(t.get_internal_path()):
fs.symlink(fs.readlink(t.get_internal_path()), tempfile)
else:
cd.copy_to_cache(env, t.get_internal_path(), tempfile)
fs.rename(tempfile, cachefile)
except EnvironmentError:
except OSError:
# It's possible someone else tried writing the file at the
# same time we did, or else that there was some problem like
# the CacheDir being on a separate file system that's full.
@ -137,15 +137,11 @@ def CachePushFunc(target, source, env):
CachePush = SCons.Action.Action(CachePushFunc, None)
# Nasty hack to cut down to one warning for each cachedir path that needs
# upgrading.
warned = dict()
class CacheDir(object):
class CacheDir:
def __init__(self, path):
"""
Initialize a CacheDir object.
def __init__(self, path) -> None:
"""Initialize a CacheDir object.
The cache configuration is stored in the object. It
is read from the config file in the supplied path if
@ -157,119 +153,131 @@ class CacheDir(object):
self.path = path
self.current_cache_debug = None
self.debugFP = None
self.config = dict()
if path is None:
return
self.config = {}
if path is not None:
self._readconfig(path)
if PY3:
self._readconfig3(path)
else:
self._readconfig2(path)
def _add_config(self, path: str) -> None:
"""Create the cache config file in *path*.
def _readconfig3(self, path):
"""
Python3 version of reading the cache config.
If directory or config file do not exist, create. Take advantage
of Py3 capability in os.makedirs() and in file open(): just try
the operation and handle failure appropriately.
Omit the check for old cache format, assume that's old enough
there will be none of those left to worry about.
:param path: path to the cache directory
Locking isn't necessary in the normal case - when the cachedir is
being created - because it's written to a unique directory first,
before the directory is renamed. But it is legal to call CacheDir
with an existing directory, which may be missing the config file,
and in that case we do need locking. Simpler to always lock.
"""
config_file = os.path.join(path, 'config')
# TODO: this breaks the "unserializable config object" test which
# does some crazy stuff, so for now don't use setdefault. It does
# seem like it would be better to preserve an exisiting value.
# self.config.setdefault('prefix_len', CACHE_PREFIX_LEN)
self.config['prefix_len'] = CACHE_PREFIX_LEN
with SCons.Util.FileLock(config_file, timeout=5, writer=True), open(
config_file, "x"
) as config:
try:
json.dump(self.config, config)
except Exception:
msg = "Failed to write cache configuration for " + path
raise SCons.Errors.SConsEnvironmentError(msg)
# Add the tag file "carelessly" - the contents are not used by SCons
# so we don't care about the chance of concurrent writes.
try:
os.makedirs(path, exist_ok=True)
tagfile = os.path.join(path, "CACHEDIR.TAG")
with open(tagfile, 'xb') as cachedir_tag:
cachedir_tag.write(CACHE_TAG)
except FileExistsError:
pass
except OSError:
msg = "Failed to create cache directory " + path
raise SCons.Errors.SConsEnvironmentError(msg)
def _mkdir_atomic(self, path: str) -> bool:
"""Create cache directory at *path*.
Uses directory renaming to avoid races. If we are actually
creating the dir, populate it with the metadata files at the
same time as that's the safest way. But it's not illegal to point
CacheDir at an existing directory that wasn't a cache previously,
so we may have to do that elsewhere, too.
Returns:
``True`` if it we created the dir, ``False`` if already existed,
Raises:
SConsEnvironmentError: if we tried and failed to create the cache.
"""
directory = os.path.abspath(path)
if os.path.exists(directory):
return False
try:
with open(config_file, 'x') as config:
self.config['prefix_len'] = 2
# TODO: Python 3.7. See comment below.
# tempdir = tempfile.TemporaryDirectory(dir=os.path.dirname(directory))
tempdir = tempfile.mkdtemp(dir=os.path.dirname(directory))
except OSError as e:
msg = "Failed to create cache directory " + path
raise SCons.Errors.SConsEnvironmentError(msg) from e
# TODO: Python 3.7: the context manager raises exception on cleanup
# if the temporary was moved successfully (File Not Found).
# Fixed in 3.8+. In the replacement below we manually clean up if
# the move failed as mkdtemp() does not. TemporaryDirectory's
# cleanup is more sophisitcated so prefer when we can use it.
# self._add_config(tempdir.name)
# with tempdir:
# try:
# os.replace(tempdir.name, directory)
# return True
# except OSError as e:
# # did someone else get there first?
# if os.path.isdir(directory):
# return False # context manager cleans up
# msg = "Failed to create cache directory " + path
# raise SCons.Errors.SConsEnvironmentError(msg) from e
self._add_config(tempdir)
try:
os.replace(tempdir, directory)
return True
except OSError as e:
# did someone else get there first? attempt cleanup.
if os.path.isdir(directory):
try:
json.dump(self.config, config)
except Exception:
msg = "Failed to write cache configuration for " + path
raise SCons.Errors.SConsEnvironmentError(msg)
except FileExistsError:
try:
with open(config_file) as config:
self.config = json.load(config)
except ValueError:
msg = "Failed to read cache configuration for " + path
raise SCons.Errors.SConsEnvironmentError(msg)
shutil.rmtree(tempdir)
except Exception: # we tried, don't worry about it
pass
return False
msg = "Failed to create cache directory " + path
raise SCons.Errors.SConsEnvironmentError(msg) from e
def _readconfig(self, path: str) -> None:
"""Read the cache config from *path*.
def _readconfig2(self, path):
"""
Python2 version of reading cache config.
See if there is a config file in the cache directory. If there is,
use it. If there isn't, and the directory exists and isn't empty,
produce a warning. If the directory does not exist or is empty,
write a config file.
:param path: path to the cache directory
If directory or config file do not exist, create and populate.
"""
config_file = os.path.join(path, 'config')
if not os.path.exists(config_file):
# A note: There is a race hazard here if two processes start and
# attempt to create the cache directory at the same time. However,
# Python 2.x does not give you the option to do exclusive file
# creation (not even the option to error on opening an existing
# file for writing...). The ordering of events here is an attempt
# to alleviate this, on the basis that it's a pretty unlikely
# occurrence (would require two builds with a brand new cache
# directory)
if os.path.isdir(path) and any(f != "config" for f in os.listdir(path)):
self.config['prefix_len'] = 1
# When building the project I was testing this on, the warning
# was output over 20 times. That seems excessive
global warned
if self.path not in warned:
msg = "Please upgrade your cache by running " +\
"scons-configure-cache.py " + self.path
SCons.Warnings.warn(SCons.Warnings.CacheVersionWarning, msg)
warned[self.path] = True
else:
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError:
# If someone else is trying to create the directory at
# the same time as me, bad things will happen
msg = "Failed to create cache directory " + path
raise SCons.Errors.SConsEnvironmentError(msg)
created = self._mkdir_atomic(path)
if not created and not os.path.isfile(config_file):
# Could have been passed an empty directory
self._add_config(path)
try:
with SCons.Util.FileLock(config_file, timeout=5, writer=False), open(
config_file
) as config:
self.config = json.load(config)
except (ValueError, json.decoder.JSONDecodeError):
msg = "Failed to read cache configuration for " + path
raise SCons.Errors.SConsEnvironmentError(msg)
self.config['prefix_len'] = 2
if not os.path.exists(config_file):
try:
with open(config_file, 'w') as config:
json.dump(self.config, config)
except Exception:
msg = "Failed to write cache configuration for " + path
raise SCons.Errors.SConsEnvironmentError(msg)
else:
try:
with open(config_file) as config:
self.config = json.load(config)
except ValueError:
msg = "Failed to read cache configuration for " + path
raise SCons.Errors.SConsEnvironmentError(msg)
def CacheDebug(self, fmt, target, cachefile):
def CacheDebug(self, fmt, target, cachefile) -> None:
if cache_debug != self.current_cache_debug:
if cache_debug == '-':
self.debugFP = sys.stdout
elif cache_debug:
def debug_cleanup(debugFP) -> None:
debugFP.close()
self.debugFP = open(cache_debug, 'w')
atexit.register(debug_cleanup, self.debugFP)
else:
self.debugFP = None
self.current_cache_debug = cache_debug
@ -279,58 +287,70 @@ class CacheDir(object):
(self.requests, self.hits, self.misses, self.hit_ratio))
@classmethod
def copy_from_cache(cls, env, src, dst):
def copy_from_cache(cls, env, src, dst) -> str:
"""Copy a file from cache."""
if env.cache_timestamp_newer:
return env.fs.copy(src, dst)
else:
return env.fs.copy2(src, dst)
@classmethod
def copy_to_cache(cls, env, src, dst):
def copy_to_cache(cls, env, src, dst) -> str:
"""Copy a file to cache.
Just use the FS copy2 ("with metadata") method, except do an additional
check and if necessary a chmod to ensure the cachefile is writeable,
to forestall permission problems if the cache entry is later updated.
"""
try:
result = env.fs.copy2(src, dst)
fs = env.File(src).fs
st = fs.stat(src)
fs.chmod(dst, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
st = stat.S_IMODE(os.stat(result).st_mode)
if not st | stat.S_IWRITE:
os.chmod(dst, st | stat.S_IWRITE)
return result
except AttributeError as ex:
raise EnvironmentError from ex
raise OSError from ex
@property
def hit_ratio(self):
def hit_ratio(self) -> float:
return (100.0 * self.hits / self.requests if self.requests > 0 else 100)
@property
def misses(self):
def misses(self) -> int:
return self.requests - self.hits
def is_enabled(self):
def is_enabled(self) -> bool:
return cache_enabled and self.path is not None
def is_readonly(self):
def is_readonly(self) -> bool:
return cache_readonly
def get_cachedir_csig(self, node):
def get_cachedir_csig(self, node) -> str:
cachedir, cachefile = self.cachepath(node)
if cachefile and os.path.exists(cachefile):
return SCons.Util.MD5filesignature(cachefile, \
SCons.Node.FS.File.md5_chunksize * 1024)
return SCons.Util.hash_file_signature(cachefile, SCons.Node.FS.File.hash_chunksize)
def cachepath(self, node):
"""
def cachepath(self, node) -> tuple:
"""Return where to cache a file.
Given a Node, obtain the configured cache directory and
the path to the cached file, which is generated from the
node's build signature. If caching is not enabled for the
None, return a tuple of None.
"""
if not self.is_enabled():
return None, None
sig = node.get_cachedir_bsig()
subdir = sig[:self.config['prefix_len']].upper()
cachedir = os.path.join(self.path, subdir)
return cachedir, os.path.join(cachedir, sig)
dir = os.path.join(self.path, subdir)
return dir, os.path.join(dir, sig)
def retrieve(self, node) -> bool:
"""Retrieve a node from cache.
Returns True if a successful retrieval resulted.
def retrieve(self, node):
"""
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
built().

View File

@ -1,9 +1,6 @@
"""SCons.Conftest
Autoconf-like configuration support; low level implementation of tests.
"""
# MIT License
#
# Copyright The SCons Foundation
# Copyright (c) 2003 Stichting NLnet Labs
# Copyright (c) 2001, 2002, 2003 Steven Knight
#
@ -25,80 +22,72 @@ Autoconf-like configuration support; low level implementation of tests.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#
# The purpose of this module is to define how a check is to be performed.
# Use one of the Check...() functions below.
#
r"""Autoconf-like configuration support
#
# A context class is used that defines functions for carrying out the tests,
# logging and messages. The following methods and members must be present:
#
# context.Display(msg) Function called to print messages that are normally
# displayed for the user. Newlines are explicitly used.
# The text should also be written to the logfile!
#
# context.Log(msg) Function called to write to a log file.
#
# context.BuildProg(text, ext)
# Function called to build a program, using "ext" for the
# file extention. Must return an empty string for
# success, an error message for failure.
# For reliable test results building should be done just
# like an actual program would be build, using the same
# command and arguments (including configure results so
# far).
#
# context.CompileProg(text, ext)
# Function called to compile a program, using "ext" for
# the file extention. Must return an empty string for
# success, an error message for failure.
# For reliable test results compiling should be done just
# like an actual source file would be compiled, using the
# same command and arguments (including configure results
# so far).
#
# context.AppendLIBS(lib_name_list)
# Append "lib_name_list" to the value of LIBS.
# "lib_namelist" is a list of strings.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.PrependLIBS(lib_name_list)
# Prepend "lib_name_list" to the value of LIBS.
# "lib_namelist" is a list of strings.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.SetLIBS(value)
# Set LIBS to "value". The type of "value" is what
# AppendLIBS() returned.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.headerfilename
# Name of file to append configure results to, usually
# "confdefs.h".
# The file must not exist or be empty when starting.
# Empty or None to skip this (some tests will not work!).
#
# context.config_h (may be missing). If present, must be a string, which
# will be filled with the contents of a config_h file.
#
# context.vardict Dictionary holding variables used for the tests and
# stores results from the tests, used for the build
# commands.
# Normally contains "CC", "LIBS", "CPPFLAGS", etc.
#
# context.havedict Dictionary holding results from the tests that are to
# be used inside a program.
# Names often start with "HAVE_". These are zero
# (feature not present) or one (feature present). Other
# variables may have any value, e.g., "PERLVERSION" can
# be a number and "SYSTEMNAME" a string.
#
The purpose of this module is to define how a check is to be performed.
A context class is used that defines functions for carrying out the tests,
logging and messages. The following methods and members must be present:
context.Display(msg)
Function called to print messages that are normally displayed
for the user. Newlines are explicitly used. The text should
also be written to the logfile!
context.Log(msg)
Function called to write to a log file.
context.BuildProg(text, ext)
Function called to build a program, using "ext" for the file
extension. Must return an empty string for success, an error
message for failure. For reliable test results building should
be done just like an actual program would be build, using the
same command and arguments (including configure results so far).
context.CompileProg(text, ext)
Function called to compile a program, using "ext" for the file
extension. Must return an empty string for success, an error
message for failure. For reliable test results compiling should be
done just like an actual source file would be compiled, using the
same command and arguments (including configure results so far).
context.AppendLIBS(lib_name_list)
Append "lib_name_list" to the value of LIBS. "lib_namelist" is
a list of strings. Return the value of LIBS before changing it
(any type can be used, it is passed to SetLIBS() later.)
context.PrependLIBS(lib_name_list)
Prepend "lib_name_list" to the value of LIBS. "lib_namelist" is
a list of strings. Return the value of LIBS before changing it
(any type can be used, it is passed to SetLIBS() later.)
context.SetLIBS(value)
Set LIBS to "value". The type of "value" is what AppendLIBS()
returned. Return the value of LIBS before changing it (any type
can be used, it is passed to SetLIBS() later.)
context.headerfilename
Name of file to append configure results to, usually "confdefs.h".
The file must not exist or be empty when starting. Empty or None
to skip this (some tests will not work!).
context.config_h (may be missing).
If present, must be a string, which will be filled with the
contents of a config_h file.
context.vardict
Dictionary holding variables used for the tests and stores results
from the tests, used for the build commands. Normally contains
"CC", "LIBS", "CPPFLAGS", etc.
context.havedict
Dictionary holding results from the tests that are to be used
inside a program. Names often start with "HAVE\_". These are zero
(feature not present) or one (feature present). Other variables
may have any value, e.g., "PERLVERSION" can be a number and
"SYSTEMNAME" a string.
"""
import re
@ -226,7 +215,7 @@ int main(void)
_YesNoResult(context, ret, None, text)
return ret
def _check_empty_program(context, comp, text, language, use_shared = False):
def _check_empty_program(context, comp, text, language, use_shared: bool = False):
"""Return 0 on success, 1 otherwise."""
if comp not in context.env or not context.env[comp]:
# The compiler construction variable is not set or empty
@ -242,17 +231,22 @@ def _check_empty_program(context, comp, text, language, use_shared = False):
return context.CompileProg(text, suffix)
def CheckFunc(context, function_name, header = None, language = None):
def CheckFunc(context, function_name, header = None, language = None, funcargs = None):
"""
Configure check for a function "function_name".
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Optional "header" can be defined to define a function prototype, include a
header file or anything else that comes before main().
Optional "funcargs" can be defined to define an argument list for the
generated function invocation.
Sets HAVE_function_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure.
.. versionchanged:: 4.7.0
The ``funcargs`` parameter was added.
"""
# Remarks from autoconf:
@ -278,13 +272,16 @@ def CheckFunc(context, function_name, header = None, language = None):
#ifdef __cplusplus
extern "C"
#endif
char %s();""" % function_name
char %s(void);""" % function_name
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for %s(): %s\n" % (function_name, msg))
return msg
if not funcargs:
funcargs = ''
text = """
%(include)s
#include <assert.h>
@ -296,16 +293,17 @@ char %s();""" % function_name
int main(void) {
#if defined (__stub_%(name)s) || defined (__stub___%(name)s)
fail fail fail
#error "%(name)s has a GNU stub, cannot check"
#else
%(name)s();
%(name)s(%(args)s);
#endif
return 0;
}
""" % { 'name': function_name,
'include': includetext,
'hdr': header }
'hdr': header,
'args': funcargs}
context.Display("Checking for %s function %s()... " % (lang, function_name))
ret = context.BuildProg(text, suffix)
@ -579,9 +577,66 @@ int main(void)
"Set to 1 if %s is defined." % symbol)
return st
def CheckMember(context, aggregate_member, header = None, language = None):
"""
Configure check for a C or C++ member "aggregate_member".
Optional "header" can be defined to include a header file.
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Arguments:
aggregate_member : str
the member to check. For example, 'struct tm.tm_gmtoff'.
includes : str
Optional "header" can be defined to include a header file.
language : str
only C and C++ supported.
Returns the status (0 or False = Passed, True/non-zero = Failed).
"""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for member %s: %s\n" % (aggregate_member, msg))
return True
context.Display("Checking for %s member %s... " % (lang, aggregate_member))
fields = aggregate_member.split('.')
if len(fields) != 2:
msg = "shall contain just one dot, for example 'struct tm.tm_gmtoff'"
context.Display("Cannot check for member %s: %s\n" % (aggregate_member, msg))
return True
aggregate, member = fields[0], fields[1]
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = ''
text = '''
%(include)s
%(header)s
int main(void) {
if (sizeof ((%(aggregate)s *) 0)->%(member)s)
return 0;
}''' % {'include': includetext,
'header': header,
'aggregate': aggregate,
'member': member}
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, "HAVE_" + aggregate_member, text,
"Define to 1 if the system has the member `%s`." % aggregate_member)
return ret
def CheckLib(context, libs, func_name = None, header = None,
extra_libs = None, call = None, language = None, autoadd = 1,
append = True):
extra_libs = None, call = None, language = None, autoadd: int = 1,
append: bool=True, unique: bool=False):
"""
Configure check for a C or C++ libraries "libs". Searches through
the list of libraries, until one is found where the test succeeds.
@ -630,8 +685,7 @@ char %s();
# if no function to test, leave main() blank
text = text + """
int
main() {
int main(void) {
%s
return 0;
}
@ -667,9 +721,9 @@ return 0;
if extra_libs:
l.extend(extra_libs)
if append:
oldLIBS = context.AppendLIBS(l)
oldLIBS = context.AppendLIBS(l, unique)
else:
oldLIBS = context.PrependLIBS(l)
oldLIBS = context.PrependLIBS(l, unique)
sym = "HAVE_LIB" + lib_name
else:
oldLIBS = -1
@ -707,7 +761,7 @@ def CheckProg(context, prog_name):
# END OF PUBLIC FUNCTIONS
#
def _YesNoResult(context, ret, key, text, comment = None):
def _YesNoResult(context, ret, key, text, comment = None) -> None:
r"""
Handle the result of a test with a "yes" or "no" result.
@ -726,7 +780,7 @@ def _YesNoResult(context, ret, key, text, comment = None):
context.Display("yes\n")
def _Have(context, key, have, comment = None):
def _Have(context, key, have, comment = None) -> None:
r"""
Store result of a test in context.havedict and context.headerfilename.
@ -769,7 +823,7 @@ def _Have(context, key, have, comment = None):
context.config_h = context.config_h + lines
def _LogFailed(context, text, msg):
def _LogFailed(context, text, msg) -> None:
"""
Write to the log about a failed program.
Add line numbers, so that error messages can be understood.

View File

@ -1,15 +1,6 @@
"""SCons.Debug
Code for debugging SCons internal things. Shouldn't be
needed by most users. Quick shortcuts:
from SCons.Debug import caller_trace
caller_trace()
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -29,10 +20,16 @@ caller_trace()
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Debug.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Code for debugging SCons internal things.
Shouldn't be needed by most users. Quick shortcuts::
from SCons.Debug import caller_trace
caller_trace()
"""
import atexit
import os
import sys
import time
@ -44,8 +41,11 @@ import inspect
track_instances = False
# List of currently tracked classes
tracked_classes = {}
# Global variable that gets set to 'True' by the Main script
# when SConscript call tracing should be enabled.
sconscript_trace = False
def logInstanceCreation(instance, name=None):
def logInstanceCreation(instance, name=None) -> None:
if name is None:
name = instance.__class__.__name__
if name not in tracked_classes:
@ -63,15 +63,15 @@ def string_to_classes(s):
else:
return s.split()
def fetchLoggedInstances(classes="*"):
def fetchLoggedInstances(classes: str="*"):
classnames = string_to_classes(classes)
return [(cn, len(tracked_classes[cn])) for cn in classnames]
def countLoggedInstances(classes, file=sys.stdout):
def countLoggedInstances(classes, file=sys.stdout) -> None:
for classname in string_to_classes(classes):
file.write("%s: %d\n" % (classname, len(tracked_classes[classname])))
def listLoggedInstances(classes, file=sys.stdout):
def listLoggedInstances(classes, file=sys.stdout) -> None:
for classname in string_to_classes(classes):
file.write('\n%s:\n' % classname)
for ref in tracked_classes[classname]:
@ -82,7 +82,7 @@ def listLoggedInstances(classes, file=sys.stdout):
if obj is not None:
file.write(' %s\n' % repr(obj))
def dumpLoggedInstances(classes, file=sys.stdout):
def dumpLoggedInstances(classes, file=sys.stdout) -> None:
for classname in string_to_classes(classes):
file.write('\n%s:\n' % classname)
for ref in tracked_classes[classname]:
@ -93,40 +93,34 @@ def dumpLoggedInstances(classes, file=sys.stdout):
file.write(' %20s : %s\n' % (key, value))
if sys.platform[:5] == "linux":
# Linux doesn't actually support memory usage stats from getrusage().
def memory():
def memory() -> int:
with open('/proc/self/stat') as f:
mstr = f.read()
mstr = mstr.split()[22]
return int(mstr)
elif sys.platform[:6] == 'darwin':
#TODO really get memory stats for OS X
def memory():
def memory() -> int:
return 0
elif sys.platform == 'win32':
from SCons.compat.win32 import get_peak_memory_usage
memory = get_peak_memory_usage
else:
try:
import resource
except ImportError:
try:
import win32process
import win32api
except ImportError:
def memory():
return 0
else:
def memory():
process_handle = win32api.GetCurrentProcess()
memory_info = win32process.GetProcessMemoryInfo( process_handle )
return memory_info['PeakWorkingSetSize']
def memory() -> int:
return 0
else:
def memory():
def memory() -> int:
res = resource.getrusage(resource.RUSAGE_SELF)
return res[4]
# returns caller's stack
def caller_stack():
"""return caller's stack"""
import traceback
tb = traceback.extract_stack()
# strip itself and the caller from the output
@ -141,7 +135,7 @@ def caller_stack():
caller_bases = {}
caller_dicts = {}
def caller_trace(back=0):
def caller_trace(back: int=0) -> None:
"""
Trace caller stack and save info into global dicts, which
are printed automatically at the end of SCons execution.
@ -162,7 +156,7 @@ def caller_trace(back=0):
callee = caller
# print a single caller and its callers, if any
def _dump_one_caller(key, file, level=0):
def _dump_one_caller(key, file, level: int=0) -> None:
leader = ' '*level
for v,c in sorted([(-v,c) for c,v in caller_dicts[key].items()]):
file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:])))
@ -170,7 +164,7 @@ def _dump_one_caller(key, file, level=0):
_dump_one_caller(c, file, level+1)
# print each call tree
def dump_caller_counts(file=sys.stdout):
def dump_caller_counts(file=sys.stdout) -> None:
for k in sorted(caller_bases.keys()):
file.write("Callers of %s:%d(%s), %d calls:\n"
% (func_shorten(k) + (caller_bases[k],)))
@ -201,40 +195,57 @@ if sys.platform == 'win32':
TraceDefault = 'con'
else:
TraceDefault = '/dev/tty'
TimeStampDefault = None
StartTime = time.time()
TimeStampDefault = False
StartTime = time.perf_counter()
PreviousTime = StartTime
def Trace(msg, file=None, mode='w', tstamp=None):
"""Write a trace message to a file. Whenever a file is specified,
it becomes the default for the next call to Trace()."""
def Trace(msg, tracefile=None, mode: str='w', tstamp: bool=False) -> None:
"""Write a trace message.
Write messages when debugging which do not interfere with stdout.
Useful in tests, which monitor stdout and would break with
unexpected output. Trace messages can go to the console (which is
opened as a file), or to a disk file; the tracefile argument persists
across calls unless overridden.
Args:
tracefile: file to write trace message to. If omitted,
write to the previous trace file (default: console).
mode: file open mode (default: 'w')
tstamp: write relative timestamps with trace. Outputs time since
scons was started, and time since last trace (default: False)
"""
global TraceDefault
global TimeStampDefault
global PreviousTime
if file is None:
file = TraceDefault
def trace_cleanup(traceFP) -> None:
traceFP.close()
if tracefile is None:
tracefile = TraceDefault
else:
TraceDefault = file
if tstamp is None:
TraceDefault = tracefile
if not tstamp:
tstamp = TimeStampDefault
else:
TimeStampDefault = tstamp
try:
fp = TraceFP[file]
fp = TraceFP[tracefile]
except KeyError:
try:
fp = TraceFP[file] = open(file, mode)
fp = TraceFP[tracefile] = open(tracefile, mode)
atexit.register(trace_cleanup, fp)
except TypeError:
# Assume we were passed an open file pointer.
fp = file
fp = tracefile
if tstamp:
now = time.time()
now = time.perf_counter()
fp.write('%8.4f %8.4f: ' % (now - StartTime, now - PreviousTime))
PreviousTime = now
fp.write(msg)
fp.flush()
fp.close()
# Local Variables:
# tab-width:4

View File

@ -0,0 +1,759 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""Builders and other things for the local site.
Here's where we'll duplicate the functionality of autoconf until we
move it into the installation procedure or use something like qmconf.
The code that reads the registry to find MSVC components was borrowed
from distutils.msvccompiler.
"""
from __future__ import annotations
import os
import shutil
import stat
import sys
import time
from typing import Callable
import SCons.Action
import SCons.Builder
import SCons.CacheDir
import SCons.Environment
import SCons.Errors
import SCons.PathList
import SCons.Scanner.Dir
import SCons.Subst
import SCons.Tool
from SCons.Util import is_List, is_String, is_Sequence, is_Tuple, is_Dict, flatten
# A placeholder for a default Environment (for fetching source files
# from source code management systems and the like). This must be
# initialized later, after the top-level directory is set by the calling
# interface.
_default_env = None
# Lazily instantiate the default environment so the overhead of creating
# it doesn't apply when it's not needed.
def _fetch_DefaultEnvironment(*args, **kwargs):
"""Returns the already-created default construction environment."""
return _default_env
def DefaultEnvironment(*args, **kwargs):
"""Construct the global ("default") construction environment.
The environment is provisioned with the values from *kwargs*.
After the environment is created, this function is replaced with
a reference to :func:`_fetch_DefaultEnvironment` which efficiently
returns the initialized default construction environment without
checking for its existence.
Historically, some parts of the code held references to this function.
Thus it still has the existence check for :data:`_default_env` rather
than just blindly creating the environment and overwriting itself.
"""
global _default_env
if not _default_env:
_default_env = SCons.Environment.Environment(*args, **kwargs)
_default_env.Decider('content')
global DefaultEnvironment
DefaultEnvironment = _fetch_DefaultEnvironment
_default_env._CacheDir_path = None
return _default_env
# Emitters for setting the shared attribute on object files,
# and an action for checking that all of the source files
# going into a shared library are, in fact, shared.
def StaticObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = False
return target, source
def SharedObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = 1
return target, source
def SharedFlagChecker(source, target, env):
same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME')
if same == '0' or same == '' or same == 'False':
for src in source:
try:
shared = src.attributes.shared
except AttributeError:
shared = False
if not shared:
raise SCons.Errors.UserError(
"Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]))
SharedCheck = SCons.Action.Action(SharedFlagChecker, None)
# Some people were using these variable name before we made
# SourceFileScanner part of the public interface. Don't break their
# SConscript files until we've given them some fair warning and a
# transition period.
CScan = SCons.Tool.CScanner
DScan = SCons.Tool.DScanner
LaTeXScan = SCons.Tool.LaTeXScanner
ObjSourceScan = SCons.Tool.SourceFileScanner
ProgScan = SCons.Tool.ProgramScanner
# These aren't really tool scanners, so they don't quite belong with
# the rest of those in Tool/__init__.py, but I'm not sure where else
# they should go. Leave them here for now.
DirScanner = SCons.Scanner.Dir.DirScanner()
DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner()
# Actions for common languages.
CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR")
ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR")
CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR")
ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR")
DAction = SCons.Action.Action("$DCOM", "$DCOMSTR")
ShDAction = SCons.Action.Action("$SHDCOM", "$SHDCOMSTR")
ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR")
ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR")
LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR")
ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR")
LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR")
# Common tasks that we allow users to perform in platform-independent
# ways by creating ActionFactory instances.
ActionFactory = SCons.Action.ActionFactory
def get_paths_str(dest) -> str:
"""Generates a string from *dest* for use in a strfunction.
If *dest* is a list, manually converts each elem to a string.
"""
def quote(arg) -> str:
return f'"{arg}"'
if is_List(dest):
elem_strs = [quote(d) for d in dest]
return f'[{", ".join(elem_strs)}]'
else:
return quote(dest)
permission_dic = {
'u': {
'r': stat.S_IRUSR,
'w': stat.S_IWUSR,
'x': stat.S_IXUSR
},
'g': {
'r': stat.S_IRGRP,
'w': stat.S_IWGRP,
'x': stat.S_IXGRP
},
'o': {
'r': stat.S_IROTH,
'w': stat.S_IWOTH,
'x': stat.S_IXOTH
}
}
def chmod_func(dest, mode) -> None:
"""Implementation of the Chmod action function.
*mode* can be either an integer (normally expressed in octal mode,
as in 0o755) or a string following the syntax of the POSIX chmod
command (for example "ugo+w"). The latter must be converted, since
the underlying Python only takes the numeric form.
"""
from string import digits
SCons.Node.FS.invalidate_node_memos(dest)
if not is_List(dest):
dest = [dest]
if is_String(mode) and 0 not in [i in digits for i in mode]:
mode = int(mode, 8)
if not is_String(mode):
for element in dest:
os.chmod(str(element), mode)
else:
mode = str(mode)
for operation in mode.split(","):
if "=" in operation:
operator = "="
elif "+" in operation:
operator = "+"
elif "-" in operation:
operator = "-"
else:
raise SyntaxError("Could not find +, - or =")
operation_list = operation.split(operator)
if len(operation_list) != 2:
raise SyntaxError("More than one operator found")
user = operation_list[0].strip().replace("a", "ugo")
permission = operation_list[1].strip()
new_perm = 0
for u in user:
for p in permission:
try:
new_perm = new_perm | permission_dic[u][p]
except KeyError:
raise SyntaxError("Unrecognized user or permission format")
for element in dest:
curr_perm = os.stat(str(element)).st_mode
if operator == "=":
os.chmod(str(element), new_perm)
elif operator == "+":
os.chmod(str(element), curr_perm | new_perm)
elif operator == "-":
os.chmod(str(element), curr_perm & ~new_perm)
def chmod_strfunc(dest, mode) -> str:
"""strfunction for the Chmod action function."""
if not is_String(mode):
return f'Chmod({get_paths_str(dest)}, {mode:#o})'
else:
return f'Chmod({get_paths_str(dest)}, "{mode}")'
Chmod = ActionFactory(chmod_func, chmod_strfunc)
def copy_func(dest, src, symlinks: bool=True) -> int:
"""Implementation of the Copy action function.
Copies *src* to *dest*. If *src* is a list, *dest* must be
a directory, or not exist (will be created).
Since Python :mod:`shutil` methods, which know nothing about
SCons Nodes, will be called to perform the actual copying,
args are converted to strings first.
If *symlinks* evaluates true, then a symbolic link will be
shallow copied and recreated as a symbolic link; otherwise, copying
a symbolic link will be equivalent to copying the symbolic link's
final target regardless of symbolic link depth.
"""
dest = str(dest)
src = [str(n) for n in src] if is_List(src) else str(src)
SCons.Node.FS.invalidate_node_memos(dest)
if is_List(src):
# this fails only if dest exists and is not a dir
try:
os.makedirs(dest, exist_ok=True)
except FileExistsError:
raise SCons.Errors.BuildError(
errstr=(
'Error: Copy() called with a list of sources, '
'which requires target to be a directory, '
f'but "{dest}" is not a directory.'
)
)
for file in src:
shutil.copy2(file, dest)
return 0
elif os.path.islink(src):
if symlinks:
try:
os.symlink(os.readlink(src), dest)
except FileExistsError:
raise SCons.Errors.BuildError(
errstr=(
f'Error: Copy() called to create symlink at "{dest}",'
' but a file already exists at that location.'
)
)
return 0
return copy_func(dest, os.path.realpath(src))
elif os.path.isfile(src):
shutil.copy2(src, dest)
return 0
else:
shutil.copytree(src, dest, symlinks)
return 0
def copy_strfunc(dest, src, symlinks: bool=True) -> str:
"""strfunction for the Copy action function."""
return f'Copy({get_paths_str(dest)}, {get_paths_str(src)})'
Copy = ActionFactory(copy_func, copy_strfunc)
def delete_func(dest, must_exist: bool=False) -> None:
"""Implementation of the Delete action function.
Lets the Python :func:`os.unlink` raise an error if *dest* does not exist,
unless *must_exist* evaluates false (the default).
"""
SCons.Node.FS.invalidate_node_memos(dest)
if not is_List(dest):
dest = [dest]
for entry in dest:
entry = str(entry)
# os.path.exists returns False with broken links that exist
entry_exists = os.path.exists(entry) or os.path.islink(entry)
if not entry_exists and not must_exist:
continue
# os.path.isdir returns True when entry is a link to a dir
if os.path.isdir(entry) and not os.path.islink(entry):
shutil.rmtree(entry, True)
continue
os.unlink(entry)
def delete_strfunc(dest, must_exist: bool=False) -> str:
"""strfunction for the Delete action function."""
return f'Delete({get_paths_str(dest)})'
Delete = ActionFactory(delete_func, delete_strfunc)
def mkdir_func(dest) -> None:
"""Implementation of the Mkdir action function."""
SCons.Node.FS.invalidate_node_memos(dest)
if not is_List(dest):
dest = [dest]
for entry in dest:
os.makedirs(str(entry), exist_ok=True)
Mkdir = ActionFactory(mkdir_func, lambda _dir: f'Mkdir({get_paths_str(_dir)})')
def move_func(dest, src) -> None:
"""Implementation of the Move action function."""
SCons.Node.FS.invalidate_node_memos(dest)
SCons.Node.FS.invalidate_node_memos(src)
shutil.move(src, dest)
Move = ActionFactory(
move_func, lambda dest, src: f'Move("{dest}", "{src}")', convert=str
)
def touch_func(dest) -> None:
"""Implementation of the Touch action function."""
SCons.Node.FS.invalidate_node_memos(dest)
if not is_List(dest):
dest = [dest]
for file in dest:
file = str(file)
mtime = int(time.time())
if os.path.exists(file):
atime = os.path.getatime(file)
else:
with open(file, 'w'):
atime = mtime
os.utime(file, (atime, mtime))
Touch = ActionFactory(touch_func, lambda file: f'Touch({get_paths_str(file)})')
# Internal utility functions
# pylint: disable-msg=too-many-arguments
def _concat(prefix, items_iter, suffix, env, f=lambda x: x, target=None, source=None, affect_signature: bool=True):
"""
Creates a new list from 'items_iter' by first interpolating each element
in the list using the 'env' dictionary and then calling f on the
list, and finally calling _concat_ixes to concatenate 'prefix' and
'suffix' onto each element of the list.
"""
if not items_iter:
return items_iter
l = f(SCons.PathList.PathList(items_iter).subst_path(env, target, source))
if l is not None:
items_iter = l
if not affect_signature:
value = ['$(']
else:
value = []
value += _concat_ixes(prefix, items_iter, suffix, env)
if not affect_signature:
value += ["$)"]
return value
# pylint: enable-msg=too-many-arguments
def _concat_ixes(prefix, items_iter, suffix, env):
"""
Creates a new list from 'items_iter' by concatenating the 'prefix' and
'suffix' arguments onto each element of the list. A trailing space
on 'prefix' or leading space on 'suffix' will cause them to be put
into separate list elements rather than being concatenated.
"""
result = []
# ensure that prefix and suffix are strings
prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW))
suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW))
for x in flatten(items_iter):
if isinstance(x, SCons.Node.FS.File):
result.append(x)
continue
x = str(x)
if x:
if prefix:
if prefix[-1] == ' ':
result.append(prefix[:-1])
elif x[:len(prefix)] != prefix:
x = prefix + x
result.append(x)
if suffix:
if suffix[0] == ' ':
result.append(suffix[1:])
elif x[-len(suffix):] != suffix:
result[-1] = result[-1] + suffix
return result
def _stripixes(
prefix: str,
items,
suffix: str,
stripprefixes: list[str],
stripsuffixes: list[str],
env,
literal_prefix: str = "",
c: Callable[[list], list] = None,
) -> list:
"""Returns a list with text added to items after first stripping them.
A companion to :func:`_concat_ixes`, used by tools (like the GNU
linker) that need to turn something like ``libfoo.a`` into ``-lfoo``.
*stripprefixes* and *stripsuffixes* are stripped from *items*.
Calls function *c* to postprocess the result.
Args:
prefix: string to prepend to elements
items: string or iterable to transform
suffix: string to append to elements
stripprefixes: prefix string(s) to strip from elements
stripsuffixes: suffix string(s) to strip from elements
env: construction environment for variable interpolation
c: optional function to perform a transformation on the list.
The default is `None`, which will select :func:`_concat_ixes`.
"""
if not items:
return items
if not callable(c):
env_c = env['_concat']
if env_c != _concat and callable(env_c):
# There's a custom _concat() method in the construction
# environment, and we've allowed people to set that in
# the past (see test/custom-concat.py), so preserve the
# backwards compatibility.
c = env_c
else:
c = _concat_ixes
stripprefixes = list(map(env.subst, flatten(stripprefixes)))
stripsuffixes = list(map(env.subst, flatten(stripsuffixes)))
# This is a little funky: if literal_prefix is the same as os.pathsep
# (e.g. both ':'), the normal conversion to a PathList will drop the
# literal_prefix prefix. Tell it not to split in that case, which *should*
# be okay because if we come through here, we're normally processing
# library names and won't have strings like "path:secondpath:thirdpath"
# which is why PathList() otherwise wants to split strings.
do_split = not literal_prefix == os.pathsep
stripped = []
for l in SCons.PathList.PathList(items, do_split).subst_path(env, None, None):
if isinstance(l, SCons.Node.FS.File):
stripped.append(l)
continue
if not is_String(l):
l = str(l)
if literal_prefix and l.startswith(literal_prefix):
stripped.append(l)
continue
for stripprefix in stripprefixes:
lsp = len(stripprefix)
if l[:lsp] == stripprefix:
l = l[lsp:]
# Do not strip more than one prefix
break
for stripsuffix in stripsuffixes:
lss = len(stripsuffix)
if l[-lss:] == stripsuffix:
l = l[:-lss]
# Do not strip more than one suffix
break
stripped.append(l)
return c(prefix, stripped, suffix, env)
def processDefines(defs) -> list[str]:
"""Return list of strings for preprocessor defines from *defs*.
Resolves the different forms ``CPPDEFINES`` can be assembled in:
if the Append/Prepend routines are used beyond a initial setting it
will be a deque, but if written to only once (Environment initializer,
or direct write) it can be a multitude of types.
Any prefix/suffix is handled elsewhere (usually :func:`_concat_ixes`).
.. versionchanged:: 4.5.0
Bare tuples are now treated the same as tuple-in-sequence, assumed
to describe a valued macro. Bare strings are now split on space.
A dictionary is no longer sorted before handling.
"""
dlist = []
if is_List(defs):
for define in defs:
if define is None:
continue
elif is_Sequence(define):
if len(define) > 2:
raise SCons.Errors.UserError(
f"Invalid tuple in CPPDEFINES: {define!r}, "
"must be a tuple with only two elements"
)
name, *value = define
if value and value[0] is not None:
# TODO: do we need to quote value if it contains space?
dlist.append(f"{name}={value[0]}")
else:
dlist.append(str(define[0]))
elif is_Dict(define):
for macro, value in define.items():
if value is not None:
# TODO: do we need to quote value if it contains space?
dlist.append(f"{macro}={value}")
else:
dlist.append(str(macro))
elif is_String(define):
dlist.append(str(define))
else:
raise SCons.Errors.UserError(
f"CPPDEFINES entry {define!r} is not a tuple, list, "
"dict, string or None."
)
elif is_Tuple(defs):
if len(defs) > 2:
raise SCons.Errors.UserError(
f"Invalid tuple in CPPDEFINES: {defs!r}, "
"must be a tuple with only two elements"
)
name, *value = defs
if value and value[0] is not None:
# TODO: do we need to quote value if it contains space?
dlist.append(f"{name}={value[0]}")
else:
dlist.append(str(define[0]))
elif is_Dict(defs):
for macro, value in defs.items():
if value is None:
dlist.append(str(macro))
else:
dlist.append(f"{macro}={value}")
elif is_String(defs):
return defs.split()
else:
dlist.append(str(defs))
return dlist
def _defines(prefix, defs, suffix, env, target=None, source=None, c=_concat_ixes):
"""A wrapper around :func:`_concat_ixes` that turns a list or string
into a list of C preprocessor command-line definitions.
"""
return c(prefix, env.subst_list(processDefines(defs), target=target, source=source), suffix, env)
class NullCmdGenerator:
"""Callable class for use as a no-effect command generator.
The ``__call__`` method for this class simply returns the thing
you instantiated it with. Example usage::
env["DO_NOTHING"] = NullCmdGenerator
env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}"
"""
def __init__(self, cmd) -> None:
self.cmd = cmd
def __call__(self, target, source, env, for_signature=None):
return self.cmd
class Variable_Method_Caller:
"""A class for finding a construction variable on the stack and
calling one of its methods.
Used to support "construction variables" appearing in string
``eval``s that actually stand in for methods--specifically, the use
of "RDirs" in a call to :func:`_concat` that should actually execute the
``TARGET.RDirs`` method.
Historical note: This was formerly supported by creating a little
"build dictionary" that mapped RDirs to the method, but this got
in the way of Memoizing construction environments, because we had to
create new environment objects to hold the variables.
"""
def __init__(self, variable, method) -> None:
self.variable = variable
self.method = method
def __call__(self, *args, **kw):
try:
1 // 0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
variable = self.variable
while frame:
if variable in frame.f_locals:
v = frame.f_locals[variable]
if v:
method = getattr(v, self.method)
return method(*args, **kw)
frame = frame.f_back
return None
def __libversionflags(env, version_var, flags_var):
"""
if version_var is not empty, returns env[flags_var], otherwise returns None
:param env:
:param version_var:
:param flags_var:
:return:
"""
try:
if env.subst('$' + version_var):
return env[flags_var]
except KeyError:
pass
return None
def __lib_either_version_flag(env, version_var1, version_var2, flags_var):
"""
if $version_var1 or $version_var2 is not empty, returns env[flags_var], otherwise returns None
:param env:
:param version_var1:
:param version_var2:
:param flags_var:
:return:
"""
try:
if env.subst('$' + version_var1) or env.subst('$' + version_var2):
return env[flags_var]
except KeyError:
pass
return None
ConstructionEnvironment = {
'BUILDERS': {},
'SCANNERS': [SCons.Tool.SourceFileScanner],
'CONFIGUREDIR': '#/.sconf_temp',
'CONFIGURELOG': '#/config.log',
'CPPSUFFIXES': SCons.Tool.CSuffixes,
'DSUFFIXES': SCons.Tool.DSuffixes,
'ENV': {},
'IDLSUFFIXES': SCons.Tool.IDLSuffixes,
'_concat': _concat,
'_defines': _defines,
'_stripixes': _stripixes,
'_LIBFLAGS': '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}',
'_LIBDIRFLAGS': '${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE, affect_signature=False)}',
'_CPPINCFLAGS': '${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE, affect_signature=False)}',
'_CPPDEFFLAGS': '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__, TARGET, SOURCE)}',
'__libversionflags': __libversionflags,
'__SHLIBVERSIONFLAGS': '${__libversionflags(__env__,"SHLIBVERSION","_SHLIBVERSIONFLAGS")}',
'__LDMODULEVERSIONFLAGS': '${__libversionflags(__env__,"LDMODULEVERSION","_LDMODULEVERSIONFLAGS")}',
'__DSHLIBVERSIONFLAGS': '${__libversionflags(__env__,"DSHLIBVERSION","_DSHLIBVERSIONFLAGS")}',
'__lib_either_version_flag': __lib_either_version_flag,
'TEMPFILE': NullCmdGenerator,
'TEMPFILEARGJOIN': ' ',
'TEMPFILEARGESCFUNC': SCons.Subst.quote_spaces,
'Dir': Variable_Method_Caller('TARGET', 'Dir'),
'Dirs': Variable_Method_Caller('TARGET', 'Dirs'),
'File': Variable_Method_Caller('TARGET', 'File'),
'RDirs': Variable_Method_Caller('TARGET', 'RDirs'),
}
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -0,0 +1,119 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import re
_is_valid_var = re.compile(r'[_a-zA-Z]\w*$')
_rm = re.compile(r'\$[()]')
_remove = re.compile(r'\$\([^$]*(\$[^)][^$]*)*\$\)')
# Regular expressions for splitting strings and handling substitutions,
# for use by the scons_subst() and scons_subst_list() functions:
#
# The first expression compiled matches all of the $-introduced tokens
# that we need to process in some way, and is used for substitutions.
# The expressions it matches are:
#
# "$$"
# "$("
# "$)"
# "$variable" [must begin with alphabetic or underscore]
# "${any stuff}"
#
# The second expression compiled is used for splitting strings into tokens
# to be processed, and it matches all of the tokens listed above, plus
# the following that affect how arguments do or don't get joined together:
#
# " " [white space]
# "non-white-space" [without any dollar signs]
# "$" [single dollar sign]
#
_dollar_exps_str = r'\$[\$\(\)]|\$[_a-zA-Z][\.\w]*|\${[^}]*}'
_dollar_exps = re.compile(r'(%s)' % _dollar_exps_str)
_separate_args = re.compile(r'(%s|\s+|[^\s$]+|\$)' % _dollar_exps_str)
# This regular expression is used to replace strings of multiple white
# space characters in the string result from the scons_subst() function.
_space_sep = re.compile(r'[\t ]+(?![^{]*})')
class ValueTypes:
"""
Enum to store what type of value the variable holds.
"""
UNKNOWN = 0
STRING = 1
CALLABLE = 2
VARIABLE = 3
class EnvironmentValue:
"""
Hold a single value. We're going to cache parsed version of the file
We're going to keep track of variables which feed into this values evaluation
"""
def __init__(self, value) -> None:
self.value = value
self.var_type = ValueTypes.UNKNOWN
if callable(self.value):
self.var_type = ValueTypes.CALLABLE
else:
self.parse_value()
def parse_value(self) -> None:
"""
Scan the string and break into component values
"""
try:
if '$' not in self.value:
self._parsed = self.value
self.var_type = ValueTypes.STRING
else:
# Now we need to parse the specified string
result = _dollar_exps.sub(sub_match, args)
print(result)
except TypeError:
# likely callable? either way we don't parse
self._parsed = self.value
def parse_trial(self) -> None:
"""
Try alternate parsing methods.
:return:
"""
parts = []
for c in self.value:
pass
class EnvironmentValues:
"""
A class to hold all the environment variables
"""
def __init__(self, **kw) -> None:
self._dict = {}
for k in kw:
self._dict[k] = EnvironmentValue(kw[k])

View File

@ -0,0 +1,39 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import unittest
from SCons.EnvironmentValues import EnvironmentValues
class MyTestCase(unittest.TestCase):
def test_simple_environmentValues(self) -> None:
"""Test comparing SubstitutionEnvironments
"""
env1 = EnvironmentValues(XXX='x')
env2 = EnvironmentValues(XXX='x',XX="$X", X1="${X}", X2="$($X$)")
if __name__ == '__main__':
unittest.main()

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -19,83 +20,71 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""SCons.Errors
This file contains the exception classes used to handle internal
and user errors in SCons.
"""SCons exception classes.
Used to handle internal and user errors in SCons.
"""
__revision__ = "src/engine/SCons/Errors.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
from __future__ import annotations
import shutil
import SCons.Util
from typing import TYPE_CHECKING
from SCons.Util.sctypes import to_String, is_String
if TYPE_CHECKING:
from SCons.Executor import Executor
# Note that not all Errors are defined here, some are at the point of use
class BuildError(Exception):
""" Errors occurring while building.
"""SCons Errors that can occur while building.
BuildError have the following attributes:
=========================================
A :class:`BuildError` exception contains information both
about the erorr itself, and what caused the error.
Information about the cause of the build error:
-----------------------------------------------
Attributes:
node: (*cause*) the error occurred while building this target node(s)
errstr: (*info*) a description of the error message
status: (*info*) the return code of the action that caused the build error.
Must be set to a non-zero value even if the build error is not due
to an action returning a non-zero returned code.
exitstatus: (*info*) SCons exit status due to this build error.
Must be nonzero unless due to an explicit :meth:`Exit` call.
Not always the same as ``status``, since actions return a status
code that should be respected, but SCons typically exits with 2
irrespective of the return value of the failed action.
filename: (*info*) The name of the file or directory that caused the
build error. Set to ``None`` if no files are associated with
this error. This might be different from the target
being built. For example, failure to create the
directory in which the target file will appear. It
can be ``None`` if the error is not due to a particular
filename.
executor: (*cause*) the executor that caused the build to fail (might
be ``None`` if the build failures is not due to the executor failing)
action: (*cause*) the action that caused the build to fail (might be
``None`` if the build failures is not due to the an
action failure)
command: (*cause*) the command line for the action that caused the
build to fail (might be ``None`` if the build failures
is not due to the an action failure)
exc_info: (*info*) Info about exception that caused the build
error. Set to ``(None, None, None)`` if this build
error is not due to an exception.
errstr : a description of the error message
status : the return code of the action that caused the build error.
Must be set to a non-zero value even if the build error is not due
to an action returning a non-zero returned code.
exitstatus : SCons exit status due to this build error.
Must be nonzero unless due to an explicit Exit()
call. Not always the same as status, since
actions return a status code that should be
respected, but SCons typically exits with 2
irrespective of the return value of the failed
action.
filename : The name of the file or directory that caused the
build error. Set to None if no files are associated with
this error. This might be different from the target
being built. For example, failure to create the
directory in which the target file will appear. It
can be None if the error is not due to a particular
filename.
exc_info : Info about exception that caused the build
error. Set to (None, None, None) if this build
error is not due to an exception.
Information about the cause of the location of the error:
---------------------------------------------------------
node : the error occured while building this target node(s)
executor : the executor that caused the build to fail (might
be None if the build failures is not due to the
executor failing)
action : the action that caused the build to fail (might be
None if the build failures is not due to the an
action failure)
command : the command line for the action that caused the
build to fail (might be None if the build failures
is not due to the an action failure)
"""
def __init__(self,
node=None, errstr="Unknown error", status=2, exitstatus=2,
filename=None, executor=None, action=None, command=None,
exc_info=(None, None, None)):
node=None, errstr: str="Unknown error", status: int=2, exitstatus: int=2,
filename=None, executor: Executor | None = None, action=None, command=None,
exc_info=(None, None, None)) -> None:
# py3: errstr should be string and not bytes.
self.errstr = SCons.Util.to_String(errstr)
self.errstr = to_String(errstr)
self.status = status
self.exitstatus = exitstatus
self.filename = filename
@ -106,10 +95,10 @@ class BuildError(Exception):
self.action = action
self.command = command
Exception.__init__(self, node, errstr, status, exitstatus, filename,
executor, action, command, exc_info)
super().__init__(node, errstr, status, exitstatus, filename,
executor, action, command, exc_info)
def __str__(self):
def __str__(self) -> str:
if self.filename:
return self.filename + ': ' + self.errstr
else:
@ -131,21 +120,22 @@ class MSVCError(IOError):
pass
class ExplicitExit(Exception):
def __init__(self, node=None, status=None, *args):
def __init__(self, node=None, status=None, *args) -> None:
self.node = node
self.status = status
self.exitstatus = status
Exception.__init__(self, *args)
super().__init__(*args)
def convert_to_BuildError(status, exc_info=None):
"""
Convert any return code a BuildError Exception.
"""Convert a return code to a BuildError Exception.
:Parameters:
- `status`: can either be a return code or an Exception.
The buildError.status we set here will normally be
The `buildError.status` we set here will normally be
used as the exit status of the "scons" process.
Args:
status: can either be a return code or an Exception.
exc_info (tuple, optional): explicit exception information.
"""
if not exc_info and isinstance(status, Exception):
@ -191,8 +181,12 @@ def convert_to_BuildError(status, exc_info=None):
# (for example, failure to create the directory in which the
# target file will appear).
filename = getattr(status, 'filename', None)
strerror = getattr(status, 'strerror', str(status))
errno = getattr(status, 'errno', 2)
strerror = getattr(status, 'strerror', None)
if strerror is None:
strerror = str(status)
errno = getattr(status, 'errno', None)
if errno is None:
errno = 2
buildError = BuildError(
errstr=strerror,
@ -206,7 +200,7 @@ def convert_to_BuildError(status, exc_info=None):
status=2,
exitstatus=2,
exc_info=exc_info)
elif SCons.Util.is_String(status):
elif is_String(status):
buildError = BuildError(
errstr=status,
status=2,

View File

@ -1,12 +1,6 @@
"""SCons.Executor
A module for executing actions with specific lists of target and source
Nodes.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -26,27 +20,28 @@ Nodes.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
__revision__ = "src/engine/SCons/Executor.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Execute actions with specific lists of target and source Nodes."""
from __future__ import annotations
import collections
import SCons.Debug
from SCons.Debug import logInstanceCreation
import SCons.Errors
import SCons.Memoize
import SCons.Util
from SCons.compat import with_metaclass, NoSlotsPyPy
from SCons.compat import NoSlotsPyPy
import SCons.Debug
from SCons.Debug import logInstanceCreation
class Batch(object):
class Batch:
"""Remembers exact association between targets
and sources of executor."""
__slots__ = ('targets',
'sources')
def __init__(self, targets=[], sources=[]):
def __init__(self, targets=[], sources=[]) -> None:
self.targets = targets
self.sources = sources
@ -62,7 +57,7 @@ class TSList(collections.UserList):
a list during variable expansion. We're not really using any
collections.UserList methods in practice.
"""
def __init__(self, func):
def __init__(self, func) -> None:
self.func = func
def __getattr__(self, attr):
nl = self.func()
@ -70,32 +65,28 @@ class TSList(collections.UserList):
def __getitem__(self, i):
nl = self.func()
return nl[i]
def __getslice__(self, i, j):
nl = self.func()
i, j = max(i, 0), max(j, 0)
return nl[i:j]
def __str__(self):
def __str__(self) -> str:
nl = self.func()
return str(nl)
def __repr__(self):
def __repr__(self) -> str:
nl = self.func()
return repr(nl)
class TSObject(object):
class TSObject:
"""A class that implements $TARGET or $SOURCE expansions by wrapping
an Executor method.
"""
def __init__(self, func):
def __init__(self, func) -> None:
self.func = func
def __getattr__(self, attr):
n = self.func()
return getattr(n, attr)
def __str__(self):
def __str__(self) -> str:
n = self.func()
if n:
return str(n)
return ''
def __repr__(self):
def __repr__(self) -> str:
n = self.func()
if n:
return repr(n)
@ -115,7 +106,7 @@ def rfile(node):
return rfile()
def execute_nothing(obj, target, kw):
def execute_nothing(obj, target, kw) -> int:
return 0
def execute_action_list(obj, target, kw):
@ -149,14 +140,14 @@ def execute_actions_str(obj):
env)
for action in obj.get_action_list()])
def execute_null_str(obj):
def execute_null_str(obj) -> str:
return ''
_execute_str_map = {0 : execute_null_str,
1 : execute_actions_str}
class Executor(object, with_metaclass(NoSlotsPyPy)):
class Executor(metaclass=NoSlotsPyPy):
"""A class for controlling instances of executing an action.
This largely exists to hold a single association of an action,
@ -181,7 +172,7 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
'_execute_str')
def __init__(self, action, env=None, overridelist=[{}],
targets=[], sources=[], builder_kw={}):
targets=[], sources=[], builder_kw={}) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Executor.Executor')
self.set_action_list(action)
self.pre_actions = []
@ -213,7 +204,7 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
}
return self.lvars
def _get_changes(self):
def _get_changes(self) -> None:
cs = []
ct = []
us = []
@ -281,10 +272,8 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
return self.get_lvars()[targets_string]
def set_action_list(self, action):
import SCons.Util
if not SCons.Util.is_List(action):
if not action:
import SCons.Errors
raise SCons.Errors.UserError("Executor must have an action.")
action = [action]
self.action_list = action
@ -396,10 +385,10 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
def __call__(self, target, **kw):
return _do_execute_map[self._do_execute](self, target, kw)
def cleanup(self):
def cleanup(self) -> None:
self._memo = {}
def add_sources(self, sources):
def add_sources(self, sources) -> None:
"""Add source files to this Executor's list. This is necessary
for "multi" Builders that can be called repeatedly to build up
a source file list for a given target."""
@ -412,7 +401,7 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
def get_sources(self):
return self.batches[0].sources
def add_batch(self, targets, sources):
def add_batch(self, targets, sources) -> None:
"""Add pair of associated target and source to this Executor's list.
This is necessary for "batch" Builders that can be called repeatedly
to build up a list of matching target and source files that will be
@ -430,18 +419,18 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
msg = "Source `%s' not found, needed by target `%s'."
raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0]))
def add_pre_action(self, action):
def add_pre_action(self, action) -> None:
self.pre_actions.append(action)
def add_post_action(self, action):
def add_post_action(self, action) -> None:
self.post_actions.append(action)
# another extra indirection for new-style objects and nullify...
def __str__(self):
def __str__(self) -> str:
return _execute_str_map[self._execute_str](self)
def nullify(self):
def nullify(self) -> None:
self.cleanup()
self._do_execute = 0
self._execute_str = 0
@ -472,23 +461,23 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
self._memo['get_contents'] = result
return result
def get_timestamp(self):
def get_timestamp(self) -> int:
"""Fetch a time stamp for this Executor. We don't have one, of
course (only files do), but this is the interface used by the
timestamp module.
"""
return 0
def scan_targets(self, scanner):
def scan_targets(self, scanner) -> None:
# TODO(batch): scan by batches
self.scan(scanner, self.get_all_targets())
def scan_sources(self, scanner):
def scan_sources(self, scanner) -> None:
# TODO(batch): scan by batches
if self.batches[0].sources:
self.scan(scanner, self.get_all_sources())
def scan(self, scanner, node_list):
def scan(self, scanner, node_list) -> None:
"""Scan a list of this Executor's files (targets or sources) for
implicit dependencies and update all of the targets with them.
This essentially short-circuits an N*M scan of the sources for
@ -561,12 +550,12 @@ class Executor(object, with_metaclass(NoSlotsPyPy)):
_batch_executors = {}
_batch_executors: dict[str, Executor] = {}
def GetBatchExecutor(key):
def GetBatchExecutor(key: str) -> Executor:
return _batch_executors[key]
def AddBatchExecutor(key, executor):
def AddBatchExecutor(key: str, executor: Executor) -> None:
assert key not in _batch_executors
_batch_executors[key] = executor
@ -589,7 +578,7 @@ def get_NullEnvironment():
nullenv = NullEnvironment()
return nullenv
class Null(object, with_metaclass(NoSlotsPyPy)):
class Null(metaclass=NoSlotsPyPy):
"""A null Executor, with a null build Environment, that does
nothing when the rest of the methods call it.
@ -614,7 +603,7 @@ class Null(object, with_metaclass(NoSlotsPyPy)):
'_do_execute',
'_execute_str')
def __init__(self, *args, **kw):
def __init__(self, *args, **kw) -> None:
if SCons.Debug.track_instances:
logInstanceCreation(self, 'Executor.Null')
self.batches = [Batch(kw['targets'][:], [])]
@ -622,9 +611,9 @@ class Null(object, with_metaclass(NoSlotsPyPy)):
return get_NullEnvironment()
def get_build_scanner_path(self):
return None
def cleanup(self):
def cleanup(self) -> None:
pass
def prepare(self):
def prepare(self) -> None:
pass
def get_unignored_sources(self, *args, **kw):
return tuple(())
@ -642,11 +631,11 @@ class Null(object, with_metaclass(NoSlotsPyPy)):
return []
def get_action_side_effects(self):
return []
def __call__(self, *args, **kw):
def __call__(self, *args, **kw) -> int:
return 0
def get_contents(self):
def get_contents(self) -> str:
return ''
def _morph(self):
def _morph(self) -> None:
"""Morph this Null executor to a real Executor object."""
batches = self.batches
self.__class__ = Executor
@ -656,13 +645,13 @@ class Null(object, with_metaclass(NoSlotsPyPy)):
# The following methods require morphing this Null Executor to a
# real Executor object.
def add_pre_action(self, action):
def add_pre_action(self, action) -> None:
self._morph()
self.add_pre_action(action)
def add_post_action(self, action):
def add_post_action(self, action) -> None:
self._morph()
self.add_post_action(action)
def set_action_list(self, action):
def set_action_list(self, action) -> None:
self._morph()
self.set_action_list(action)

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -19,12 +20,8 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
__revision__ = "src/engine/SCons/Memoize.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__doc__ = """Memoizer
"""Decorator-based memoizer to count caching stats.
A decorator-based implementation to count hits and misses of the computed
values that various methods cache in memory.
@ -106,7 +103,7 @@ use_memoizer = None
# Global list of counter objects
CounterList = {}
class Counter(object):
class Counter:
"""
Base class for counting memoization hits and misses.
@ -114,7 +111,7 @@ class Counter(object):
fill in the correct class name and method name that represents
the name of the function being counted.
"""
def __init__(self, cls_name, method_name):
def __init__(self, cls_name, method_name) -> None:
"""
"""
self.cls_name = cls_name
@ -123,8 +120,8 @@ class Counter(object):
self.miss = 0
def key(self):
return self.cls_name+'.'+self.method_name
def display(self):
print(" {:7d} hits {:7d} misses {}()".format(self.hit, self.miss, self.key()))
def display(self) -> None:
print(f" {self.hit:7d} hits {self.miss:7d} misses {self.key()}()")
def __eq__(self, other):
try:
return self.key() == other.key()
@ -139,7 +136,7 @@ class CountValue(Counter):
the class's methods that memoizes its return value by simply storing
the return value in its _memo dictionary.
"""
def count(self, *args, **kw):
def count(self, *args, **kw) -> None:
""" Counts whether the memoized value has already been
set (a hit) or not (a miss).
"""
@ -159,12 +156,12 @@ class CountDict(Counter):
indexed by some key that can be computed from one or more of
its input arguments.
"""
def __init__(self, cls_name, method_name, keymaker):
def __init__(self, cls_name, method_name, keymaker) -> None:
"""
"""
Counter.__init__(self, cls_name, method_name)
super().__init__(cls_name, method_name)
self.keymaker = keymaker
def count(self, *args, **kw):
def count(self, *args, **kw) -> None:
""" Counts whether the computed key value is already present
in the memoization dictionary (a hit) or not (a miss).
"""
@ -180,7 +177,7 @@ class CountDict(Counter):
else:
self.miss = self.miss + 1
def Dump(title=None):
def Dump(title=None) -> None:
""" Dump the hit/miss count for all the counters
collected so far.
"""
@ -189,7 +186,7 @@ def Dump(title=None):
for counter in sorted(CounterList):
CounterList[counter].display()
def EnableMemoization():
def EnableMemoization() -> None:
global use_memoizer
use_memoizer = 1

View File

@ -1,14 +1,6 @@
"""scons.Node.Alias
Alias nodes.
This creates a hash of global Aliases (dummy targets).
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,15 +20,18 @@ This creates a hash of global Aliases (dummy targets).
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Node/Alias.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Alias nodes.
This creates a hash of global Aliases (dummy targets).
"""
import collections
import SCons.Errors
import SCons.Node
import SCons.Util
from SCons.Util import hash_signature
class AliasNameSpace(collections.UserDict):
def Alias(self, name, **kw):
@ -62,38 +57,6 @@ class AliasNodeInfo(SCons.Node.NodeInfoBase):
def str_to_node(self, s):
return default_ans.Alias(s)
def __getstate__(self):
"""
Return all fields that shall be pickled. Walk the slots in the class
hierarchy and add those to the state dictionary. If a '__dict__' slot is
available, copy all entries to the dictionary. Also include the version
id, which is fixed for all instances of a class.
"""
state = getattr(self, '__dict__', {}).copy()
for obj in type(self).mro():
for name in getattr(obj,'__slots__',()):
if hasattr(self, name):
state[name] = getattr(self, name)
state['_version_id'] = self.current_version_id
try:
del state['__weakref__']
except KeyError:
pass
return state
def __setstate__(self, state):
"""
Restore the attributes from a pickled state.
"""
# TODO check or discard version
del state['_version_id']
for key, value in state.items():
if key not in ('__weakref__',):
setattr(self, key, value)
class AliasBuildInfo(SCons.Node.BuildInfoBase):
__slots__ = ()
current_version_id = 2
@ -103,29 +66,29 @@ class Alias(SCons.Node.Node):
NodeInfo = AliasNodeInfo
BuildInfo = AliasBuildInfo
def __init__(self, name):
SCons.Node.Node.__init__(self)
def __init__(self, name) -> None:
super().__init__()
self.name = name
self.changed_since_last_build = 1
self.store_info = 0
def str_for_display(self):
return '"' + self.__str__() + '"'
def __str__(self):
def __str__(self) -> str:
return self.name
def make_ready(self):
def make_ready(self) -> None:
self.get_csig()
really_build = SCons.Node.Node.build
is_up_to_date = SCons.Node.Node.children_are_up_to_date
def is_under(self, dir):
def is_under(self, dir) -> bool:
# Make Alias nodes get built regardless of
# what directory scons was run from. Alias nodes
# are outside the filesystem:
return 1
return True
def get_contents(self):
"""The contents of an alias is the concatenation
@ -133,7 +96,7 @@ class Alias(SCons.Node.Node):
childsigs = [n.get_csig() for n in self.children()]
return ''.join(childsigs)
def sconsign(self):
def sconsign(self) -> None:
"""An Alias is not recorded in .sconsign files"""
pass
@ -141,11 +104,17 @@ class Alias(SCons.Node.Node):
#
#
def build(self):
def build(self, **kw) -> None:
"""A "builder" for aliases."""
pass
if len(self.executor.post_actions) + len(self.executor.pre_actions) > 0:
# Only actually call Node's build() if there are any
# pre or post actions.
# Alias nodes will get 1 action and Alias.build()
# This fixes GH Issue #2281
return self.really_build(**kw)
def convert(self):
def convert(self) -> None:
try: del self.builder
except AttributeError: pass
self.reset_executor()
@ -166,7 +135,7 @@ class Alias(SCons.Node.Node):
pass
contents = self.get_contents()
csig = SCons.Util.MD5signature(contents)
csig = hash_signature(contents)
self.get_ninfo().csig = csig
return csig

View File

@ -1,11 +1,6 @@
"""scons.Node.Python
Python nodes.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -25,12 +20,14 @@ Python nodes.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Node/Python.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Python nodes."""
import SCons.Node
_memo_lookup_map = {}
class ValueNodeInfo(SCons.Node.NodeInfoBase):
__slots__ = ('csig',)
current_version_id = 2
@ -38,82 +35,64 @@ class ValueNodeInfo(SCons.Node.NodeInfoBase):
field_list = ['csig']
def str_to_node(self, s):
return Value(s)
def __getstate__(self):
"""
Return all fields that shall be pickled. Walk the slots in the class
hierarchy and add those to the state dictionary. If a '__dict__' slot is
available, copy all entries to the dictionary. Also include the version
id, which is fixed for all instances of a class.
"""
state = getattr(self, '__dict__', {}).copy()
for obj in type(self).mro():
for name in getattr(obj,'__slots__',()):
if hasattr(self, name):
state[name] = getattr(self, name)
state['_version_id'] = self.current_version_id
try:
del state['__weakref__']
except KeyError:
pass
return state
def __setstate__(self, state):
"""
Restore the attributes from a pickled state.
"""
# TODO check or discard version
del state['_version_id']
for key, value in state.items():
if key not in ('__weakref__',):
setattr(self, key, value)
return ValueWithMemo(s)
class ValueBuildInfo(SCons.Node.BuildInfoBase):
__slots__ = ()
current_version_id = 2
class Value(SCons.Node.Node):
"""A class for Python variables, typically passed on the command line
or generated by a script, but not from a file or some other source.
"""A Node class for values represented by Python expressions.
Values are typically passed on the command line or generated
by a script, but not from a file or some other source.
.. versionchanged:: 4.0
the *name* parameter was added.
"""
NodeInfo = ValueNodeInfo
BuildInfo = ValueBuildInfo
def __init__(self, value, built_value=None):
SCons.Node.Node.__init__(self)
def __init__(self, value, built_value=None, name=None) -> None:
super().__init__()
self.value = value
self.changed_since_last_build = 6
self.store_info = 0
if built_value is not None:
self.built_value = built_value
# Set a name so it can be a child of a node and not break
# its parent's implementation of Node.get_contents.
if name:
self.name = name
else:
self.name = str(value)
def str_for_display(self):
return repr(self.value)
def __str__(self):
def __str__(self) -> str:
return str(self.value)
def make_ready(self):
def make_ready(self) -> None:
self.get_csig()
def build(self, **kw):
def build(self, **kw) -> None:
if not hasattr(self, 'built_value'):
SCons.Node.Node.build(self, **kw)
is_up_to_date = SCons.Node.Node.children_are_up_to_date
def is_under(self, dir):
def is_under(self, dir) -> bool:
# Make Value nodes get built regardless of
# what directory scons was run from. Value nodes
# are outside the filesystem:
return 1
return True
def write(self, built_value):
def write(self, built_value) -> None:
"""Set the value of the node."""
self.built_value = built_value
@ -124,7 +103,7 @@ class Value(SCons.Node.Node):
self.built_value = self.value
return self.built_value
def get_text_contents(self):
def get_text_contents(self) -> str:
"""By the assumption that the node.built_value is a
deterministic product of the sources, the contents of a Value
are the concatenation of all the contents of its sources. As
@ -133,28 +112,13 @@ class Value(SCons.Node.Node):
###TODO: something reasonable about universal newlines
contents = str(self.value)
for kid in self.children(None):
contents = contents + kid.get_contents().decode()
# Get csig() value of child as this is more efficent
contents = contents + kid.get_csig()
return contents
def get_contents(self):
"""
Get contents for signature calculations.
:return: bytes
"""
text_contents = self.get_text_contents()
try:
return text_contents.encode()
except UnicodeDecodeError:
# Already encoded as python2 str are bytes
return text_contents
def changed_since_last_build(self, target, prev_ni):
cur_csig = self.get_csig()
try:
return cur_csig != prev_ni.csig
except AttributeError:
return 1
def get_contents(self) -> bytes:
"""Get contents for signature calculations."""
return self.get_text_contents().encode()
def get_csig(self, calc=None):
"""Because we're a Python value node and don't have a real
@ -173,6 +137,33 @@ class Value(SCons.Node.Node):
self.get_ninfo().csig = contents
return contents
def ValueWithMemo(value, built_value=None, name=None):
"""Memoized :class:`Value` node factory.
.. versionchanged:: 4.0
the *name* parameter was added.
"""
global _memo_lookup_map
# No current support for memoizing a value that needs to be built.
if built_value:
return Value(value, built_value, name=name)
try:
memo_lookup_key = hash((value, name))
except TypeError:
# Non-primitive types will hit this codepath.
return Value(value, name=name)
try:
return _memo_lookup_map[memo_lookup_key]
except KeyError:
v = Value(value, built_value, name)
_memo_lookup_map[memo_lookup_key] = v
return v
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -19,17 +20,13 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/PathList.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__doc__ = """SCons.PathList
A module for handling lists of directory paths (the sort of things
that get set as CPPPATH, LIBPATH, etc.) with as much caching of data and
efficiency as we can, while still keeping the evaluation delayed so that we
Do the Right Thing (almost) regardless of how the variable is specified.
"""Handle lists of directory paths.
These are the path lists that get set as ``CPPPATH``, ``LIBPATH``,
etc.) with as much caching of data and efficiency as we can, while
still keeping the evaluation delayed so that we Do the Right Thing
(almost) regardless of how the variable is specified.
"""
import os
@ -50,10 +47,10 @@ def node_conv(obj):
"""
This is the "string conversion" routine that we have our substitutions
use to return Nodes, not strings. This relies on the fact that an
EntryProxy object has a get() method that returns the underlying
Node that it wraps, which is a bit of architectural dependence
that we might need to break or modify in the future in response to
additional requirements.
:class:`~SCons.Node.FS.EntryProxy` object has a ``get()`` method that
returns the underlying Node that it wraps, which is a bit of
architectural dependence that we might need to break or modify in the
future in response to additional requirements.
"""
try:
get = obj.get
@ -66,38 +63,41 @@ def node_conv(obj):
result = get()
return result
class _PathList(object):
class _PathList:
"""An actual PathList object.
Initializes a :class:`PathList` object, canonicalizing the input and
pre-processing it for quicker substitution later.
The stored representation of the :class:`PathList` is a list of tuples
containing (type, value), where the "type" is one of the ``TYPE_*``
variables defined above. We distinguish between:
* Strings that contain no ``$`` and therefore need no
delayed-evaluation string substitution (we expect that there
will be many of these and that we therefore get a pretty
big win from avoiding string substitution)
* Strings that contain ``$`` and therefore need substitution
(the hard case is things like ``${TARGET.dir}/include``,
which require re-evaluation for every target + source)
* Other objects (which may be something like an
:class:`~SCons.Node.FS.EntryProxy`
that needs a method called to return a Node)
Pre-identifying the type of each element in the :class:`PathList`
up-front and storing the type in the list of tuples is intended to
reduce the amount of calculation when we actually do the substitution
over and over for each target.
"""
An actual PathList object.
"""
def __init__(self, pathlist):
"""
Initializes a PathList object, canonicalizing the input and
pre-processing it for quicker substitution later.
The stored representation of the PathList is a list of tuples
containing (type, value), where the "type" is one of the TYPE_*
variables defined above. We distinguish between:
strings that contain no '$' and therefore need no
delayed-evaluation string substitution (we expect that there
will be many of these and that we therefore get a pretty
big win from avoiding string substitution)
strings that contain '$' and therefore need substitution
(the hard case is things like '${TARGET.dir}/include',
which require re-evaluation for every target + source)
other objects (which may be something like an EntryProxy
that needs a method called to return a Node)
Pre-identifying the type of each element in the PathList up-front
and storing the type in the list of tuples is intended to reduce
the amount of calculation when we actually do the substitution
over and over for each target.
"""
def __init__(self, pathlist, split=True) -> None:
if SCons.Util.is_String(pathlist):
pathlist = pathlist.split(os.pathsep)
if split:
pathlist = pathlist.split(os.pathsep)
else: # no splitting, but still need a list
pathlist = [pathlist]
elif not SCons.Util.is_Sequence(pathlist):
pathlist = [pathlist]
@ -116,7 +116,7 @@ class _PathList(object):
self.pathlist = tuple(pl)
def __len__(self): return len(self.pathlist)
def __len__(self) -> int: return len(self.pathlist)
def __getitem__(self, i): return self.pathlist[i]
@ -143,9 +143,8 @@ class _PathList(object):
return tuple(result)
class PathListCache(object):
"""
A class to handle caching of PathList lookups.
class PathListCache:
"""A class to handle caching of PathList lookups.
This class gets instantiated once and then deleted from the namespace,
so it's used as a Singleton (although we don't enforce that in the
@ -154,34 +153,33 @@ class PathListCache(object):
use the same Memoizer pattern that we use elsewhere to count cache
hits and misses, which is very valuable.
Lookup keys in the cache are computed by the _PathList_key() method.
Lookup keys in the cache are computed by the :meth:`_PathList_key` method.
Cache lookup should be quick, so we don't spend cycles canonicalizing
all forms of the same lookup key. For example, 'x:y' and ['x',
'y'] logically represent the same list, but we don't bother to
all forms of the same lookup key. For example, ``x:y`` and ``['x', 'y']``
logically represent the same list, but we don't bother to
split string representations and treat those two equivalently.
(Note, however, that we do, treat lists and tuples the same.)
The main type of duplication we're trying to catch will come from
looking up the same path list from two different clones of the
same construction environment. That is, given
same construction environment. That is, given::
env2 = env1.Clone()
both env1 and env2 will have the same CPPPATH value, and we can
cheaply avoid re-parsing both values of CPPPATH by using the
both ``env1`` and ``env2`` will have the same ``CPPPATH`` value, and we can
cheaply avoid re-parsing both values of ``CPPPATH`` by using the
common value from this cache.
"""
def __init__(self):
def __init__(self) -> None:
self._memo = {}
def _PathList_key(self, pathlist):
"""
Returns the key for memoization of PathLists.
"""Returns the key for memoization of PathLists.
Note that we want this to be pretty quick, so we don't completely
canonicalize all forms of the same list. For example,
'dir1:$ROOT/dir2' and ['$ROOT/dir1', 'dir'] may logically
represent the same list if you're executing from $ROOT, but
``dir1:$ROOT/dir2`` and ``['$ROOT/dir1', 'dir']`` may logically
represent the same list if you're executing from ``$ROOT``, but
we're not going to bother splitting strings into path elements,
or massaging strings into Nodes, to identify that equivalence.
We just want to eliminate obvious redundancy from the normal
@ -192,10 +190,11 @@ class PathListCache(object):
return pathlist
@SCons.Memoize.CountDictCall(_PathList_key)
def PathList(self, pathlist):
"""
Returns the cached _PathList object for the specified pathlist,
creating and caching a new object as necessary.
def PathList(self, pathlist, split=True):
"""Entry point for getting PathLists.
Returns the cached :class:`_PathList` object for the specified
pathlist, creating and caching a new object as necessary.
"""
pathlist = self._PathList_key(pathlist)
try:
@ -209,7 +208,7 @@ class PathListCache(object):
except KeyError:
pass
result = _PathList(pathlist)
result = _PathList(pathlist, split)
memo_dict[pathlist] = result
@ -217,7 +216,8 @@ class PathListCache(object):
PathList = PathListCache().PathList
# TODO: removing the class object here means Sphinx doesn't pick up its
# docstrings: they're fine for reading here, but are not in API Docs.
del PathListCache
# Local Variables:

View File

@ -1,26 +1,6 @@
"""SCons.Platform
SCons platform selection.
This looks for modules that define a callable object that can modify a
construction environment as appropriate for a given platform.
Note that we take a more simplistic view of "platform" than Python does.
We're looking for a single string that determines a set of
tool-independent variables with which to initialize a construction
environment. Consequently, we'll examine both sys.platform and os.name
(and anything else that might come in to play) in order to return some
specification which is unique enough for our purposes.
Note that because this subsystem just *selects* a callable that can
modify a construction environment, it's possible for people to define
their own "platform specification" in an arbitrary callable function.
No one needs to use or tie in to this subsystem in order to roll
their own platform definition.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -40,13 +20,29 @@ their own platform definition.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
__revision__ = "src/engine/SCons/Platform/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""SCons platform selection.
Looks for modules that define a callable object that can modify a
construction environment as appropriate for a given platform.
Note that we take a more simplistic view of "platform" than Python does.
We're looking for a single string that determines a set of
tool-independent variables with which to initialize a construction
environment. Consequently, we'll examine both sys.platform and os.name
(and anything else that might come in to play) in order to return some
specification which is unique enough for our purposes.
Note that because this subsystem just *selects* a callable that can
modify a construction environment, it's possible for people to define
their own "platform specification" in an arbitrary callable function.
No one needs to use or tie in to this subsystem in order to roll
their own platform definition.
"""
import SCons.compat
import atexit
import importlib
import os
import sys
@ -58,10 +54,10 @@ import SCons.Tool
def platform_default():
"""Return the platform string for our execution environment.
r"""Return the platform string for our execution environment.
The returned value should map to one of the SCons/Platform/*.py
files. Since we're architecture independent, though, we don't
The returned value should map to one of the SCons/Platform/\*.py
files. Since scons is architecture independent, though, we don't
care about the machine architecture.
"""
osname = os.name
@ -88,7 +84,7 @@ def platform_default():
return sys.platform
def platform_module(name = platform_default()):
def platform_module(name=platform_default()):
"""Return the imported module for the platform.
This looks for a module name that matches the specified argument.
@ -96,65 +92,108 @@ def platform_module(name = platform_default()):
our execution environment.
"""
full_name = 'SCons.Platform.' + name
if full_name not in sys.modules:
if os.name == 'java':
eval(full_name)
else:
try:
return sys.modules[full_name]
except KeyError:
try:
# the specific platform module is a relative import
mod = importlib.import_module("." + name, __name__)
except ModuleNotFoundError:
try:
# the specific platform module is a relative import
mod = importlib.import_module("." + name, __name__)
except ImportError:
try:
import zipimport
importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] )
# This support was added to enable running inside
# a py2exe bundle a long time ago - unclear if it's
# still needed. It is *not* intended to load individual
# platform modules stored in a zipfile.
import zipimport
platform = sys.modules['SCons.Platform'].__path__[0]
importer = zipimport.zipimporter(platform)
if not hasattr(importer, 'find_spec'):
# zipimport only added find_spec, exec_module in 3.10,
# unlike importlib, where they've been around since 3.4.
# If we don't have 'em, use the old way.
mod = importer.load_module(full_name)
except ImportError:
raise SCons.Errors.UserError("No platform named '%s'" % name)
setattr(SCons.Platform, name, mod)
return sys.modules[full_name]
else:
spec = importer.find_spec(full_name)
mod = importlib.util.module_from_spec(spec)
importer.exec_module(mod)
sys.modules[full_name] = mod
except zipimport.ZipImportError:
raise SCons.Errors.UserError("No platform named '%s'" % name)
setattr(SCons.Platform, name, mod)
return mod
def DefaultToolList(platform, env):
"""Select a default tool list for the specified platform.
"""
"""Select a default tool list for the specified platform."""
return SCons.Tool.tool_list(platform, env)
class PlatformSpec(object):
def __init__(self, name, generate):
class PlatformSpec:
def __init__(self, name, generate) -> None:
self.name = name
self.generate = generate
def __call__(self, *args, **kw):
return self.generate(*args, **kw)
def __str__(self):
def __str__(self) -> str:
return self.name
class TempFileMunge(object):
"""A callable class. You can set an Environment variable to this,
then call it with a string argument, then it will perform temporary
file substitution on it. This is used to circumvent the long command
line limitation.
class TempFileMunge:
"""Convert long command lines to use a temporary file.
You can set an Environment variable (usually ``TEMPFILE``) to this,
then call it with a string argument, and it will perform temporary
file substitution on it. This is used to circumvent limitations on
the length of command lines. Example::
Example usage:
env["TEMPFILE"] = TempFileMunge
env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES','$LINKCOMSTR')}"
env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES', '$LINKCOMSTR')}"
By default, the name of the temporary file used begins with a
prefix of '@'. This may be configured for other tool chains by
setting '$TEMPFILEPREFIX':
setting the ``TEMPFILEPREFIX`` variable. Example::
env["TEMPFILEPREFIX"] = '-@' # diab compiler
env["TEMPFILEPREFIX"] = '-via' # arm tool chain
env["TEMPFILEPREFIX"] = '' # (the empty string) PC Lint
You can configure the extension of the temporary file through the
TEMPFILESUFFIX variable, which defaults to '.lnk' (see comments
in the code below):
``TEMPFILESUFFIX`` variable, which defaults to '.lnk' (see comments
in the code below). Example::
env["TEMPFILESUFFIX"] = '.lnt' # PC Lint
Entries in the temporary file are separated by the value of the
``TEMPFILEARGJOIN`` variable, which defaults to an OS-appropriate value.
A default argument escape function is ``SCons.Subst.quote_spaces``.
If you need to apply extra operations on a command argument before
writing to a temporary file(fix Windows slashes, normalize paths, etc.),
please set `TEMPFILEARGESCFUNC` variable to a custom function. Example::
import sys
import re
from SCons.Subst import quote_spaces
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
def tempfile_arg_esc_func(arg):
arg = quote_spaces(arg)
if sys.platform != "win32":
return arg
# GCC requires double Windows slashes, let's use UNIX separator
return WINPATHSEP_RE.sub(r"/\1", arg)
env["TEMPFILEARGESCFUNC"] = tempfile_arg_esc_func
"""
def __init__(self, cmd, cmdstr = None):
def __init__(self, cmd, cmdstr = None) -> None:
self.cmd = cmd
self.cmdstr = cmdstr
@ -186,48 +225,61 @@ class TempFileMunge(object):
# Check if we already created the temporary file for this target
# It should have been previously done by Action.strfunction() call
node = target[0] if SCons.Util.is_List(target) else target
cmdlist = getattr(node.attributes, 'tempfile_cmdlist', None) \
if node is not None else None
if SCons.Util.is_List(target):
node = target[0]
else:
node = target
cmdlist = None
if SCons.Util.is_List(self.cmd):
cmdlist_key = tuple(self.cmd)
else:
cmdlist_key = self.cmd
if node and hasattr(node.attributes, 'tempfile_cmdlist'):
cmdlist = node.attributes.tempfile_cmdlist.get(cmdlist_key, None)
if cmdlist is not None:
return cmdlist
# We do a normpath because mktemp() has what appears to be
# a bug in Windows that will use a forward slash as a path
# delimiter. Windows' link mistakes that for a command line
# switch and barfs.
#
# Default to the .lnk suffix for the benefit of the Phar Lap
# linkloc linker, which likes to append an .lnk suffix if
# none is given.
if env.has_key('TEMPFILESUFFIX'):
if 'TEMPFILESUFFIX' in env:
suffix = env.subst('$TEMPFILESUFFIX')
else:
suffix = '.lnk'
fd, tmp = tempfile.mkstemp(suffix, text=True)
native_tmp = SCons.Util.get_native_path(os.path.normpath(tmp))
if 'TEMPFILEDIR' in env:
tempfile_dir = env.subst('$TEMPFILEDIR')
os.makedirs(tempfile_dir, exist_ok=True)
else:
tempfile_dir = None
fd, tmp = tempfile.mkstemp(suffix, dir=tempfile_dir, text=True)
native_tmp = SCons.Util.get_native_path(tmp)
# arrange for cleanup on exit:
def tmpfile_cleanup(file) -> None:
os.remove(file)
atexit.register(tmpfile_cleanup, tmp)
if env.get('SHELL', None) == 'sh':
# The sh shell will try to escape the backslashes in the
# path, so unescape them.
native_tmp = native_tmp.replace('\\', r'\\\\')
# In Cygwin, we want to use rm to delete the temporary
# file, because del does not exist in the sh shell.
rm = env.Detect('rm') or 'del'
if 'TEMPFILEPREFIX' in env:
prefix = env.subst('$TEMPFILEPREFIX')
else:
# Don't use 'rm' if the shell is not sh, because rm won't
# work with the Windows shells (cmd.exe or command.com) or
# Windows path names.
rm = 'del'
prefix = "@"
prefix = env.subst('$TEMPFILEPREFIX')
if not prefix:
prefix = '@'
args = list(map(SCons.Subst.quote_spaces, cmd[1:]))
join_char = env.get('TEMPFILEARGJOIN',' ')
os.write(fd, bytearray(join_char.join(args) + "\n",'utf-8'))
tempfile_esc_func = env.get('TEMPFILEARGESCFUNC', SCons.Subst.quote_spaces)
args = [tempfile_esc_func(arg) for arg in cmd[1:]]
join_char = env.get('TEMPFILEARGJOIN', ' ')
os.write(fd, bytearray(join_char.join(args) + "\n", encoding="utf-8"))
os.close(fd)
# XXX Using the SCons.Action.print_actions value directly
@ -246,25 +298,34 @@ class TempFileMunge(object):
# purity get in the way of just being helpful, so we'll
# reach into SCons.Action directly.
if SCons.Action.print_actions:
cmdstr = env.subst(self.cmdstr, SCons.Subst.SUBST_RAW, target,
source) if self.cmdstr is not None else ''
cmdstr = (
env.subst(self.cmdstr, SCons.Subst.SUBST_RAW, target, source)
if self.cmdstr is not None
else ''
)
# Print our message only if XXXCOMSTR returns an empty string
if len(cmdstr) == 0 :
cmdstr = ("Using tempfile "+native_tmp+" for command line:\n"+
str(cmd[0]) + " " + " ".join(args))
if not cmdstr:
cmdstr = (
f"Using tempfile {native_tmp} for command line:\n"
f'{cmd[0]} {" ".join(args)}'
)
self._print_cmd_str(target, source, env, cmdstr)
cmdlist = [cmd[0], prefix + native_tmp]
# Store the temporary file command list into the target Node.attributes
# to avoid creating two temporary files one for print and one for execute.
cmdlist = [ cmd[0], prefix + native_tmp + '\n' + rm, native_tmp ]
if node is not None:
try :
setattr(node.attributes, 'tempfile_cmdlist', cmdlist)
try:
# Storing in tempfile_cmdlist by self.cmd provided when intializing
# $TEMPFILE{} fixes issue raised in PR #3140 and #3553
node.attributes.tempfile_cmdlist[cmdlist_key] = cmdlist
except AttributeError:
pass
node.attributes.tempfile_cmdlist = {cmdlist_key: cmdlist}
return cmdlist
def _print_cmd_str(self, target, source, env, cmdstr):
def _print_cmd_str(self, target, source, env, cmdstr) -> None:
# check if the user has specified a cmd line print function
print_func = None
try:
@ -283,8 +344,8 @@ class TempFileMunge(object):
def Platform(name = platform_default()):
"""Select a canned Platform specification.
"""
"""Select a canned Platform specification."""
module = platform_module(name)
spec = PlatformSpec(name, module.generate)
return spec

View File

@ -1,14 +1,6 @@
"""engine.SCons.Platform.aix
Platform-specific initialization for IBM AIX systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,12 +20,15 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/aix.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for IBM AIX systems.
import os
import subprocess
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
from subprocess import PIPE
from . import posix
@ -52,31 +47,31 @@ def get_xlc(env, xlc=None, packages=[]):
xlc = xlc[0]
for package in packages:
# find the installed filename, which may be a symlink as well
pipe = SCons.Action._subproc(env, ['lslpp', '-fc', package],
stdin = 'devnull',
stderr = 'devnull',
stdout = subprocess.PIPE)
cp = SCons.Action.scons_subproc_run(
env, ['lslpp', '-fc', package], universal_newlines=True, stdout=PIPE
)
# output of lslpp is something like this:
# #Path:Fileset:File
# /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/exe/xlCcpp
# /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/bin/xlc_r -> /usr/vac/bin/xlc
for line in pipe.stdout:
for line in cp.stdout.splitlines():
if xlcPath:
continue # read everything to let lslpp terminate
continue # read everything to let lslpp terminate
fileset, filename = line.split(':')[1:3]
filename = filename.split()[0]
if ('/' in xlc and filename == xlc) \
or ('/' not in xlc and filename.endswith('/' + xlc)):
if ('/' in xlc and filename == xlc) or (
'/' not in xlc and filename.endswith('/' + xlc)
):
xlcVersion = fileset.split()[1]
xlcPath, sep, xlc = filename.rpartition('/')
pass
pass
return (xlcPath, xlc, xlcVersion)
def generate(env):
def generate(env) -> None:
posix.generate(env)
#Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion
env['MAXLINELENGTH'] = 21576
env['SHLIBSUFFIX'] = '.a'
env['HOST_OS'] = 'aix'
# Local Variables:
# tab-width:4

View File

@ -1,14 +1,6 @@
"""SCons.Platform.cygwin
Platform-specific initialization for Cygwin systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,9 +20,13 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/cygwin.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for Cygwin systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
import sys
@ -44,18 +40,20 @@ if sys.platform == 'win32':
r'C:\cygwin\bin'
]
def generate(env):
def generate(env) -> None:
posix.generate(env)
env['PROGPREFIX'] = ''
env['PROGSUFFIX'] = '.exe'
env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = [ '$LIBPREFIX', '$SHLIBPREFIX', '$IMPLIBPREFIX' ]
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX', '$IMPLIBSUFFIX' ]
env['LIBPREFIXES'] = ['$LIBPREFIX', '$SHLIBPREFIX', '$IMPLIBPREFIX']
env['LIBSUFFIXES'] = ['$LIBSUFFIX', '$SHLIBSUFFIX', '$IMPLIBSUFFIX']
env['LIBLITERAPPREFIX'] = ':'
env['TEMPFILE'] = TempFileMunge
env['TEMPFILEPREFIX'] = '@'
env['MAXLINELENGTH'] = 2048
env['HOST_OS'] = 'cygwin'
# Local Variables:
# tab-width:4

View File

@ -1,14 +1,6 @@
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,27 +20,33 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/darwin.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
from . import posix
import os
def generate(env):
def generate(env) -> None:
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
env['HOST_OS'] = 'darwin'
# put macports paths at front to override Apple's versions, fink path is after
# For now let people who want Macports or Fink tools specify it!
# env['ENV']['PATH'] = '/opt/local/bin:/opt/local/sbin:' + env['ENV']['PATH'] + ':/sw/bin'
# Store extra system paths in env['ENV']['PATHOSX']
filelist = ['/etc/paths',]
# make sure this works on Macs with Tiger or earlier
try:
dirlist = os.listdir('/etc/paths.d')
except:
except (FileNotFoundError, PermissionError):
dirlist = []
for file in dirlist:
@ -56,7 +54,7 @@ def generate(env):
for file in filelist:
if os.path.isfile(file):
with open(file, 'r') as f:
with open(file) as f:
lines = f.readlines()
for line in lines:
if line:

View File

@ -1,14 +1,6 @@
"""engine.SCons.Platform.hpux
Platform-specific initialization for HP-UX systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,17 +20,24 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/hpux.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for HP-UX systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
from . import posix
def generate(env):
def generate(env) -> None:
posix.generate(env)
#Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion
env['MAXLINELENGTH'] = 2045000
env['SHLIBSUFFIX'] = '.sl'
env['HOST_OS'] = 'hpux'
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil

View File

@ -1,14 +1,6 @@
"""SCons.Platform.irix
Platform-specific initialization for SGI IRIX systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,14 +20,19 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/irix.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for SGI IRIX systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
from . import posix
def generate(env):
def generate(env) -> None:
posix.generate(env)
env['HOST_OS'] = 'irix'
# Local Variables:
# tab-width:4

View File

@ -1,11 +1,6 @@
"""SCons.Platform.mingw
Platform-specific initialization for the MinGW system.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -25,9 +20,8 @@ Platform-specific initialization for the MinGW system.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/mingw.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for the MinGW system."""
import sys
@ -35,5 +29,7 @@ MINGW_DEFAULT_PATHS = []
if sys.platform == 'win32':
MINGW_DEFAULT_PATHS = [
r'C:\msys64',
r'C:\msys'
]
r'C:\msys64\usr\bin',
r'C:\msys',
r'C:\msys\usr\bin'
]

View File

@ -1,14 +1,6 @@
"""SCons.Platform.os2
Platform-specific initialization for OS/2 systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,12 +20,17 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/os2.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for OS/2 systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
from . import win32
def generate(env):
def generate(env) -> None:
if 'ENV' not in env:
env['ENV'] = {}
env['OBJPREFIX'] = ''
@ -46,8 +43,9 @@ def generate(env):
env['LIBSUFFIX'] = '.lib'
env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = '$LIBPREFIX'
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ]
env['LIBPREFIXES'] = ['$LIBPREFIX']
env['LIBSUFFIXES'] = ['$LIBSUFFIX', '$SHLIBSUFFIX']
env['LIBLITERAPPREFIX'] = ''
env['HOST_OS'] = 'os2'
env['HOST_ARCH'] = win32.get_architecture().arch

View File

@ -1,14 +1,6 @@
"""SCons.Platform.posix
Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,18 +20,17 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/posix.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems.
import errno
import os
import os.path
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
import platform
import subprocess
import sys
import select
import SCons.Util
from SCons.Platform import TempFileMunge
from SCons.Platform.virtualenv import ImportVirtualenv
from SCons.Platform.virtualenv import ignore_virtualenv, enable_virtualenv
@ -83,7 +74,7 @@ def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr):
env, stdout, stderr)
def generate(env):
def generate(env) -> None:
# Bearing in mind we have python 2.4 as a baseline, we can just do this:
spawn = subprocess_spawn
pspawn = piped_env_spawn
@ -91,7 +82,7 @@ def generate(env):
if 'ENV' not in env:
env['ENV'] = {}
env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin'
env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin:/snap/bin'
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.o'
env['SHOBJPREFIX'] = '$OBJPREFIX'
@ -102,8 +93,11 @@ def generate(env):
env['LIBSUFFIX'] = '.a'
env['SHLIBPREFIX'] = '$LIBPREFIX'
env['SHLIBSUFFIX'] = '.so'
env['LIBPREFIXES'] = [ '$LIBPREFIX' ]
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ]
env['LIBPREFIXES'] = ['$LIBPREFIX']
env['LIBSUFFIXES'] = ['$LIBSUFFIX', '$SHLIBSUFFIX']
env['LIBLITERALPREFIX'] = ''
env['HOST_OS'] = 'posix'
env['HOST_ARCH'] = platform.machine()
env['PSPAWN'] = pspawn
env['SPAWN'] = spawn
env['SHELL'] = 'sh'

View File

@ -1,14 +1,6 @@
"""engine.SCons.Platform.sunos
Platform-specific initialization for Sun systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,13 +20,17 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/sunos.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for Sun systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
from . import posix
def generate(env):
def generate(env) -> None:
posix.generate(env)
# Based on sunSparc 8:32bit
# ARG_MAX=1048320 - 3000 for environment expansion
@ -42,6 +38,7 @@ def generate(env):
env['PKGINFO'] = 'pkginfo'
env['PKGCHK'] = '/usr/sbin/pkgchk'
env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin'
env['HOST_OS'] = 'sunos'
# Local Variables:
# tab-width:4

View File

@ -1,10 +1,6 @@
"""SCons.Platform.virtualenv
Support for virtualenv.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -24,9 +20,8 @@ Support for virtualenv.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/virtualenv.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""'Platform" support for a Python virtualenv."""
import os
import sys
@ -50,21 +45,21 @@ virtualenv_variables = ['VIRTUAL_ENV', 'PIPENV_ACTIVE']
def _running_in_virtualenv():
"""Returns True, if scons is executed within a virtualenv"""
"""Returns True if scons is executed within a virtualenv"""
# see https://stackoverflow.com/a/42580137
return (hasattr(sys, 'real_prefix') or
(hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
def _is_path_in(path, base):
"""Returns true, if **path** is located under the **base** directory."""
def _is_path_in(path, base) -> bool:
"""Returns true if **path** is located under the **base** directory."""
if not path or not base: # empty path may happen, base too
return False
rp = os.path.relpath(path, base)
return ((not rp.startswith(os.path.pardir)) and (not rp == os.path.curdir))
return (not rp.startswith(os.path.pardir)) and (not rp == os.path.curdir)
def _inject_venv_variables(env):
def _inject_venv_variables(env) -> None:
if 'ENV' not in env:
env['ENV'] = {}
ENV = env['ENV']
@ -74,7 +69,7 @@ def _inject_venv_variables(env):
except KeyError:
pass
def _inject_venv_path(env, path_list=None):
def _inject_venv_path(env, path_list=None) -> None:
"""Modify environment such that SCons will take into account its virtualenv
when running external tools."""
if path_list is None:
@ -91,7 +86,7 @@ def select_paths_in_venv(path_list):
return [path for path in path_list if IsInVirtualenv(path)]
def ImportVirtualenv(env):
def ImportVirtualenv(env) -> None:
"""Copies virtualenv-related environment variables from OS environment
to ``env['ENV']`` and prepends virtualenv's PATH to ``env['ENV']['PATH']``.
"""

View File

@ -1,14 +1,6 @@
"""SCons.Platform.win32
Platform-specific initialization for Win32 systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,12 +20,17 @@ selection method.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/win32.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Platform-specific initialization for Win32 systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
import os
import os.path
import platform
import sys
import tempfile
@ -47,62 +44,6 @@ CHOCO_DEFAULT_PATH = [
r'C:\ProgramData\chocolatey\bin'
]
try:
import msvcrt
import win32api
import win32con
except ImportError:
parallel_msg = \
"you do not seem to have the pywin32 extensions installed;\n" + \
"\tparallel (-j) builds may not work reliably with open Python files."
except AttributeError:
parallel_msg = \
"your pywin32 extensions do not support file handle operations;\n" + \
"\tparallel (-j) builds may not work reliably with open Python files."
else:
parallel_msg = None
if sys.version_info.major == 2:
import __builtin__
_builtin_file = __builtin__.file
_builtin_open = __builtin__.open
def _scons_fixup_mode(mode):
"""Adjust 'mode' to mark handle as non-inheritable.
SCons is multithreaded, so allowing handles to be inherited by
children opens us up to races, where (e.g.) processes spawned by
the Taskmaster may inherit and retain references to files opened
by other threads. This may lead to sharing violations and,
ultimately, build failures.
By including 'N' as part of fopen's 'mode' parameter, all file
handles returned from these functions are atomically marked as
non-inheritable.
"""
if not mode:
# Python's default is 'r'.
# https://docs.python.org/2/library/functions.html#open
mode = 'rN'
elif 'N' not in mode:
mode += 'N'
return mode
class _scons_file(_builtin_file):
def __init__(self, name, mode=None, *args, **kwargs):
_builtin_file.__init__(self, name, _scons_fixup_mode(mode),
*args, **kwargs)
def _scons_open(name, mode=None, *args, **kwargs):
return _builtin_open(name, _scons_fixup_mode(mode),
*args, **kwargs)
__builtin__.file = _scons_file
__builtin__.open = _scons_open
if False:
# Now swap out shutil.filecopy and filecopy2 for win32 api native CopyFile
try:
@ -117,7 +58,7 @@ if False:
shutil.copy2 = CopyFile
def win_api_copyfile(src,dst):
def win_api_copyfile(src,dst) -> None:
CopyFile(src,dst)
os.utime(dst)
@ -140,9 +81,8 @@ try:
# This locked version of spawnve works around a Windows
# MSVCRT bug, because its spawnve is not thread-safe.
# Without this, python can randomly crash while using -jN.
# See the python bug at http://bugs.python.org/issue6476
# and SCons issue at
# https://github.com/SCons/scons/issues/2449
# See the python bug at https://github.com/python/cpython/issues/50725
# and SCons issue at https://github.com/SCons/scons/issues/2449
def spawnve(mode, file, args, env):
spawn_lock.acquire()
try:
@ -178,67 +118,76 @@ def piped_spawn(sh, escape, cmd, args, env, stdout, stderr):
# we redirect it into a temporary file tmpFileStdout
# (tmpFileStderr) and copy the contents of this file
# to stdout (stderr) given in the argument
# Note that because this will paste shell redirection syntax
# into the cmdline, we have to call a shell to run the command,
# even though that's a bit of a performance hit.
if not sh:
sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n")
return 127
else:
# one temporary file for stdout and stderr
tmpFileStdout = os.path.normpath(tempfile.mktemp())
tmpFileStderr = os.path.normpath(tempfile.mktemp())
# check if output is redirected
stdoutRedirected = 0
stderrRedirected = 0
for arg in args:
# are there more possibilities to redirect stdout ?
if arg.find( ">", 0, 1 ) != -1 or arg.find( "1>", 0, 2 ) != -1:
stdoutRedirected = 1
# are there more possibilities to redirect stderr ?
if arg.find( "2>", 0, 2 ) != -1:
stderrRedirected = 1
# one temporary file for stdout and stderr
tmpFileStdout, tmpFileStdoutName = tempfile.mkstemp(text=True)
os.close(tmpFileStdout) # don't need open until the subproc is done
tmpFileStderr, tmpFileStderrName = tempfile.mkstemp(text=True)
os.close(tmpFileStderr)
# redirect output of non-redirected streams to our tempfiles
if stdoutRedirected == 0:
args.append(">" + str(tmpFileStdout))
if stderrRedirected == 0:
args.append("2>" + str(tmpFileStderr))
# check if output is redirected
stdoutRedirected = False
stderrRedirected = False
for arg in args:
# are there more possibilities to redirect stdout ?
if arg.find(">", 0, 1) != -1 or arg.find("1>", 0, 2) != -1:
stdoutRedirected = True
# are there more possibilities to redirect stderr ?
if arg.find("2>", 0, 2) != -1:
stderrRedirected = True
# actually do the spawn
# redirect output of non-redirected streams to our tempfiles
if not stdoutRedirected:
args.append(">" + tmpFileStdoutName)
if not stderrRedirected:
args.append("2>" + tmpFileStderrName)
# actually do the spawn
try:
args = [sh, '/C', escape(' '.join(args))]
ret = spawnve(os.P_WAIT, sh, args, env)
except OSError as e:
# catch any error
try:
args = [sh, '/C', escape(' '.join(args))]
ret = spawnve(os.P_WAIT, sh, args, env)
except OSError as e:
# catch any error
try:
ret = exitvalmap[e.errno]
except KeyError:
sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e.errno, cmd, e.strerror))
if stderr is not None:
stderr.write("scons: %s: %s\n" % (cmd, e.strerror))
# copy child output from tempfiles to our streams
# and do clean up stuff
if stdout is not None and stdoutRedirected == 0:
try:
with open(tmpFileStdout, "r" ) as tmp:
stdout.write(tmp.read())
os.remove(tmpFileStdout)
except (IOError, OSError):
pass
ret = exitvalmap[e.errno]
except KeyError:
sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e.errno, cmd, e.strerror))
if stderr is not None:
stderr.write("scons: %s: %s\n" % (cmd, e.strerror))
if stderr is not None and stderrRedirected == 0:
try:
with open(tmpFileStderr, "r" ) as tmp:
stderr.write(tmp.read())
os.remove(tmpFileStderr)
except (IOError, OSError):
pass
return ret
# copy child output from tempfiles to our streams
# and do clean up stuff
if stdout is not None and not stdoutRedirected:
try:
with open(tmpFileStdoutName, "rb") as tmpFileStdout:
output = tmpFileStdout.read()
stdout.write(output.decode('oem', "replace").replace("\r\n", "\n"))
os.remove(tmpFileStdoutName)
except OSError:
pass
if stderr is not None and not stderrRedirected:
try:
with open(tmpFileStderrName, "rb") as tmpFileStderr:
errors = tmpFileStderr.read()
stderr.write(errors.decode('oem', "replace").replace("\r\n", "\n"))
os.remove(tmpFileStderrName)
except OSError:
pass
return ret
def exec_spawn(l, env):
try:
result = spawnve(os.P_WAIT, l[0], l, env)
except (OSError, EnvironmentError) as e:
except OSError as e:
try:
result = exitvalmap[e.errno]
sys.stderr.write("scons: %s: %s\n" % (l[0], e.strerror))
@ -300,9 +249,6 @@ def get_system_root():
except:
pass
# Ensure system root is a string and not unicode
# (This only matters for py27 were unicode in env passed to POpen fails)
val = str(val)
_system_root = val
return val
@ -324,7 +270,6 @@ def get_program_files_dir():
val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir')
except SCons.Util.RegError:
val = ''
pass
if val == '':
# A reasonable default if we can't read the registry
@ -334,12 +279,12 @@ def get_program_files_dir():
return val
class ArchDefinition(object):
class ArchDefinition:
"""
Determine which windows CPU were running on.
A class for defining architecture-specific settings and logic.
"""
def __init__(self, arch, synonyms=[]):
def __init__(self, arch, synonyms=[]) -> None:
self.arch = arch
self.synonyms = synonyms
@ -354,6 +299,11 @@ SupportedArchitectureList = [
['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'],
),
ArchDefinition(
'arm64',
['ARM64', 'aarch64', 'AARCH64', 'AArch64'],
),
ArchDefinition(
'ia64',
['IA64'],
@ -371,14 +321,25 @@ def get_architecture(arch=None):
"""Returns the definition for the specified architecture string.
If no string is specified, the system default is returned (as defined
by the PROCESSOR_ARCHITEW6432 or PROCESSOR_ARCHITECTURE environment
variables).
by the registry PROCESSOR_ARCHITECTURE value, PROCESSOR_ARCHITEW6432
environment variable, PROCESSOR_ARCHITECTURE environment variable, or
the platform machine).
"""
if arch is None:
if SCons.Util.can_read_reg:
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment')
val, tok = SCons.Util.RegQueryValueEx(k, 'PROCESSOR_ARCHITECTURE')
except SCons.Util.RegError:
val = ''
if val and val in SupportedArchitectureMap:
arch = val
if arch is None:
arch = os.environ.get('PROCESSOR_ARCHITEW6432')
if not arch:
arch = os.environ.get('PROCESSOR_ARCHITECTURE')
return SupportedArchitectureMap.get(arch, ArchDefinition('', ['']))
return SupportedArchitectureMap.get(arch, ArchDefinition(platform.machine(), [platform.machine()]))
def generate(env):
@ -437,7 +398,7 @@ def generate(env):
# for SystemDrive because it's related.
#
# Weigh the impact carefully before adding other variables to this list.
import_env = ['SystemDrive', 'SystemRoot', 'TEMP', 'TMP' ]
import_env = ['SystemDrive', 'SystemRoot', 'TEMP', 'TMP', 'USERPROFILE']
for var in import_env:
v = os.environ.get(var)
if v:
@ -461,8 +422,9 @@ def generate(env):
env['LIBSUFFIX'] = '.lib'
env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = [ '$LIBPREFIX' ]
env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ]
env['LIBPREFIXES'] = ['$LIBPREFIX']
env['LIBSUFFIXES'] = ['$LIBSUFFIX']
env['LIBLITERALPREFIX'] = ''
env['PSPAWN'] = piped_spawn
env['SPAWN'] = spawn
env['SHELL'] = cmd_interp

View File

@ -1,18 +1,6 @@
"""SCons.SConf
Autoconf-like configuration support.
In other words, SConf allows to run tests on the build machine to detect
capabilities of system and do some things based on result: generate config
files, header files for C/C++, update variables in environment.
Tests on the build system can detect if compiler sees header files, if
libraries are installed, if some command line options are supported etc.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -32,13 +20,22 @@ libraries are installed, if some command line options are supported etc.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
__revision__ = "src/engine/SCons/SConf.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Autoconf-like configuration support.
In other words, SConf allows to run tests on the build machine to detect
capabilities of system and do some things based on result: generate config
files, header files for C/C++, update variables in environment.
Tests on the build system can detect if compiler sees header files, if
libraries are installed, if some command line options are supported etc.
"""
from __future__ import annotations
import SCons.compat
import atexit
import io
import os
import re
@ -48,7 +45,7 @@ import traceback
import SCons.Action
import SCons.Builder
import SCons.Errors
import SCons.Job
import SCons.Taskmaster.Job
import SCons.Node.FS
import SCons.Taskmaster
import SCons.Util
@ -66,9 +63,9 @@ SCons.Conftest.LogErrorMessages = 0
build_type = None
build_types = ['clean', 'help']
def SetBuildType(type):
def SetBuildType(buildtype) -> None:
global build_type
build_type = type
build_type = buildtype
# to be set, if we are in dry-run mode
dryrun = 0
@ -78,6 +75,9 @@ FORCE=1 # force all tests to be rebuilt
CACHE=2 # force all tests to be taken from cache (raise an error, if necessary)
cache_mode = AUTO
def _set_conftest_node(node) -> None:
node.attributes.conftest_node = 1
def SetCacheMode(mode):
"""Set the Configure cache mode. mode must be one of "auto", "force",
or "cache"."""
@ -92,7 +92,7 @@ def SetCacheMode(mode):
raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode)
progress_display = SCons.Util.display # will be overwritten by SCons.Script
def SetProgressDisplay(display):
def SetProgressDisplay(display) -> None:
"""Set the progress display to use (called from SCons.Script)"""
global progress_display
progress_display = display
@ -104,7 +104,7 @@ _ac_config_logs = {} # all config.log files created in this build
_ac_config_hs = {} # all config.h files created in this build
sconf_global = None # current sconf object
def _createConfigH(target, source, env):
def _createConfigH(target, source, env) -> None:
t = open(str(target[0]), "w")
defname = re.sub('[^A-Za-z0-9_]', '_', str(target[0]).upper())
t.write("""#ifndef %(DEFNAME)s_SEEN
@ -121,50 +121,50 @@ def _stringConfigH(target, source, env):
return "scons: Configure: creating " + str(target[0])
def NeedConfigHBuilder():
def NeedConfigHBuilder() -> bool:
if len(_ac_config_hs) == 0:
return False
else:
return True
def CreateConfigHBuilder(env):
def CreateConfigHBuilder(env) -> None:
"""Called if necessary just before the building targets phase begins."""
action = SCons.Action.Action(_createConfigH,
_stringConfigH)
sconfigHBld = SCons.Builder.Builder(action=action)
env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} )
for k in list(_ac_config_hs.keys()):
env.SConfigHBuilder(k, env.Value(_ac_config_hs[k]))
for k, v in _ac_config_hs.items():
env.SConfigHBuilder(k, env.Value(v))
class SConfWarning(SCons.Warnings.Warning):
class SConfWarning(SCons.Warnings.SConsWarning):
pass
SCons.Warnings.enableWarningClass(SConfWarning)
# some error definitions
class SConfError(SCons.Errors.UserError):
def __init__(self,msg):
SCons.Errors.UserError.__init__(self,msg)
def __init__(self,msg) -> None:
super().__init__(msg)
class ConfigureDryRunError(SConfError):
"""Raised when a file or directory needs to be updated during a Configure
process, but the user requested a dry-run"""
def __init__(self,target):
def __init__(self,target) -> None:
if not isinstance(target, SCons.Node.FS.File):
msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target)
else:
msg = 'Cannot update configure test "%s" within a dry-run.' % str(target)
SConfError.__init__(self,msg)
super().__init__(msg)
class ConfigureCacheError(SConfError):
"""Raised when a use explicitely requested the cache feature, but the test
is run the first time."""
def __init__(self,target):
SConfError.__init__(self, '"%s" is not yet built and cache is forced.' % str(target))
def __init__(self,target) -> None:
super().__init__('"%s" is not yet built and cache is forced.' % str(target))
# define actions for building text files
def _createSource(target, source, env):
def _createSource(target, source, env) -> None:
fd = open(str(target[0]), "w")
fd.write(source[0].get_contents().decode())
fd.close()
@ -182,24 +182,24 @@ class SConfBuildInfo(SCons.Node.FS.FileBuildInfo):
"""
__slots__ = ('result', 'string')
def __init__(self):
def __init__(self) -> None:
self.result = None # -> 0/None -> no error, != 0 error
self.string = None # the stdout / stderr output when building the target
def set_build_result(self, result, string):
def set_build_result(self, result, string) -> None:
self.result = result
self.string = string
class Streamer(object):
class Streamer:
"""
'Sniffer' for a file-like writable object. Similar to the unix tool tee.
"""
def __init__(self, orig):
def __init__(self, orig) -> None:
self.orig = orig
self.s = io.StringIO()
def write(self, str):
def write(self, str) -> None:
if self.orig:
self.orig.write(str)
try:
@ -208,7 +208,7 @@ class Streamer(object):
# "unicode argument expected" bug in IOStream (python 2.x)
self.s.write(str.decode())
def writelines(self, lines):
def writelines(self, lines) -> None:
for l in lines:
self.write(l + '\n')
@ -218,7 +218,7 @@ class Streamer(object):
"""
return self.s.getvalue()
def flush(self):
def flush(self) -> None:
if self.orig:
self.orig.flush()
self.s.flush()
@ -229,18 +229,22 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
This is almost the same as SCons.Script.BuildTask. Handles SConfErrors
correctly and knows about the current cache_mode.
"""
def display(self, message):
non_sconf_nodes = set()
def display(self, message) -> None:
if sconf_global.logstream:
sconf_global.logstream.write("scons: Configure: " + message + "\n")
def display_cached_string(self, bi):
def display_cached_string(self, bi) -> None:
"""
Logs the original builder messages, given the SConfBuildInfo instance
bi.
"""
if not isinstance(bi, SConfBuildInfo):
SCons.Warnings.warn(SConfWarning,
"The stored build information has an unexpected class: %s" % bi.__class__)
SCons.Warnings.warn(
SConfWarning,
"The stored build information has an unexpected class: %s" % bi.__class__
)
else:
self.display("The original builder output was:\n" +
(" |" + str(bi.string)).replace("\n", "\n |"))
@ -248,10 +252,9 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
def failed(self):
# check, if the reason was a ConfigureDryRunError or a
# ConfigureCacheError and if yes, reraise the exception
exc_type = self.exc_info()[0]
exc_type, exc, _ = self.exc_info()
if issubclass(exc_type, SConfError):
# TODO pylint E0704: bare raise not inside except
raise
raise exc
elif issubclass(exc_type, SCons.Errors.BuildError):
# we ignore Build Errors (occurs, when a test doesn't pass)
# Clear the exception to prevent the contained traceback
@ -263,18 +266,18 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
sys.excepthook(*self.exc_info())
return SCons.Taskmaster.Task.failed(self)
def collect_node_states(self):
def collect_node_states(self) -> tuple[bool, bool, bool]:
# returns (is_up_to_date, cached_error, cachable)
# where is_up_to_date is 1, if the node(s) are up_to_date
# cached_error is 1, if the node(s) are up_to_date, but the
# build will fail
# cachable is 0, if some nodes are not in our cache
# where is_up_to_date is True if the node(s) are up_to_date
# cached_error is True if the node(s) are up_to_date, but the
# build will fail
# cachable is False if some nodes are not in our cache
T = 0
changed = False
cached_error = False
cachable = True
for t in self.targets:
if T: Trace('%s' % (t))
if T: Trace('%s' % t)
bi = t.get_stored_info().binfo
if isinstance(bi, SConfBuildInfo):
if T: Trace(': SConfBuildInfo')
@ -284,7 +287,7 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
else:
if T: Trace(': get_state() %s' % t.get_state())
if T: Trace(': changed() %s' % t.changed())
if (t.get_state() != SCons.Node.up_to_date and t.changed()):
if t.get_state() != SCons.Node.up_to_date and t.changed():
changed = True
if T: Trace(': changed %s' % changed)
cached_error = cached_error or bi.result
@ -309,7 +312,7 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
if cache_mode == CACHE and not cachable:
raise ConfigureCacheError(self.targets[0])
elif cache_mode == FORCE:
is_up_to_date = 0
is_up_to_date = False
if cached_error and is_up_to_date:
self.display("Building \"%s\" failed in a previous run and all "
@ -376,7 +379,26 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
sconsign.set_entry(t.name, sconsign_entry)
sconsign.merge()
class SConfBase(object):
def make_ready_current(self) -> None:
# We're overriding make_ready_current() call to add to the list
# of nodes used by this task, filtering out any nodes created
# by the checker for it's own purpose.
self.non_sconf_nodes.update([t for t in self.targets if not t.is_conftest()])
super().make_ready_current()
make_ready = make_ready_current
def postprocess(self) -> None:
# We're done executing this task, so now we'll go through all the
# nodes used by this task which aren't nodes created for
# Configure checkers, but rather are existing or built files
# and reset their node info.
# If we do not reset their node info, any changes in these
# nodes will not trigger builds in the normal build process
for node in self.non_sconf_nodes:
node.ninfo = node.new_ninfo()
super().postprocess()
class SConfBase:
"""This is simply a class to represent a configure context. After
creating a SConf object, you can call any tests. After finished with your
tests, be sure to call the Finish() method, which returns the modified
@ -389,8 +411,8 @@ class SConfBase(object):
SConf run, we need to explicitly cache this error.
"""
def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR',
log_file='$CONFIGURELOG', config_h = None, _depth = 0):
def __init__(self, env, custom_tests = {}, conf_dir: str='$CONFIGUREDIR',
log_file: str='$CONFIGURELOG', config_h = None, _depth: int = 0) -> None:
"""Constructor. Pass additional tests in the custom_tests-dictionary,
e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest
defines a custom test.
@ -411,7 +433,7 @@ class SConfBase(object):
# and keep the build state consistent.
def force_build(dependency, target, prev_ni,
repo_node=None,
env_decider=env.decide_source):
env_decider=env.decide_source) -> bool:
try:
env_decider(dependency, target, prev_ni, repo_node)
except Exception as e:
@ -430,7 +452,8 @@ class SConfBase(object):
SConfFS = SCons.Node.FS.default_fs or \
SCons.Node.FS.FS(env.fs.pathTop)
if sconf_global is not None:
raise SCons.Errors.UserError
raise SCons.Errors.UserError("""Configure() called while another Configure() exists.
Please call .Finish() before creating and second Configure() context""")
if log_file is not None:
log_file = SConfFS.File(env.subst(log_file))
@ -449,6 +472,7 @@ class SConfBase(object):
'CheckFunc' : CheckFunc,
'CheckType' : CheckType,
'CheckTypeSize' : CheckTypeSize,
'CheckMember' : CheckMember,
'CheckDeclaration' : CheckDeclaration,
'CheckHeader' : CheckHeader,
'CheckCHeader' : CheckCHeader,
@ -473,7 +497,7 @@ class SConfBase(object):
return self.env
def Define(self, name, value = None, comment = None):
def Define(self, name, value = None, comment = None) -> None:
"""
Define a pre processor symbol name, with the optional given value in the
current config header.
@ -513,13 +537,14 @@ class SConfBase(object):
# the engine assumes the current path is the SConstruct directory ...
old_fs_dir = SConfFS.getcwd()
old_os_dir = os.getcwd()
SConfFS.chdir(SConfFS.Top, change_os_dir=1)
SConfFS.chdir(SConfFS.Top, change_os_dir=True)
# Because we take responsibility here for writing out our
# own .sconsign info (see SConfBuildTask.execute(), above),
# we override the store_info() method with a null place-holder
# so we really control how it gets written.
for n in nodes:
_set_conftest_node(n)
n.store_info = 0
if not hasattr(n, 'attributes'):
n.attributes = SCons.Node.Node.Attrs()
@ -532,6 +557,7 @@ class SConfBase(object):
for c in n.children(scan=False):
# Keep debug code here.
# print("Checking [%s] for builders and then setting keep_targetinfo"%c)
_set_conftest_node(c)
if c.has_builder():
n.store_info = 0
if not hasattr(c, 'attributes'):
@ -547,7 +573,7 @@ class SConfBase(object):
SConfFS.set_max_drift(0)
tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask)
# we don't want to build tests in parallel
jobs = SCons.Job.Jobs(1, tm )
jobs = SCons.Taskmaster.Job.Jobs(1, tm)
jobs.run()
for n in nodes:
state = n.get_state()
@ -558,7 +584,7 @@ class SConfBase(object):
finally:
SConfFS.set_max_drift(save_max_drift)
os.chdir(old_os_dir)
SConfFS.chdir(old_fs_dir, change_os_dir=0)
SConfFS.chdir(old_fs_dir, change_os_dir=False)
if self.logstream is not None:
# restore stdout / stderr
sys.stdout = oldStdout
@ -577,7 +603,7 @@ class SConfBase(object):
"""
return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream)
def TryBuild(self, builder, text=None, extension=""):
def TryBuild(self, builder, text=None, extension: str=""):
"""Low level TryBuild implementation. Normally you don't need to
call that - you can use TryCompile / TryLink / TryRun instead
"""
@ -596,39 +622,43 @@ class SConfBase(object):
nodesToBeBuilt = []
sourcetext = self.env.Value(text)
_set_conftest_node(sourcetext)
f = "conftest"
if text is not None:
textSig = SCons.Util.MD5signature(sourcetext)
textSig = SCons.Util.hash_signature(sourcetext)
textSigCounter = str(_ac_build_counter[textSig])
_ac_build_counter[textSig] += 1
f = "_".join([f, textSig, textSigCounter])
textFile = self.confdir.File(f + extension)
_set_conftest_node(textFile)
textFileNode = self.env.SConfSourceBuilder(target=textFile,
source=sourcetext)
nodesToBeBuilt.extend(textFileNode)
source = textFile
target = textFile.File(f + "SConfActionsContentDummyTarget")
_set_conftest_node(target)
else:
source = None
target = None
action = builder.builder.action.get_contents(target=target, source=[source], env=self.env)
actionsig = SCons.Util.MD5signature(action)
actionsig = SCons.Util.hash_signature(action)
f = "_".join([f, actionsig])
pref = self.env.subst( builder.builder.prefix )
suff = self.env.subst( builder.builder.suffix )
target = self.confdir.File(pref + f + suff)
_set_conftest_node(target)
try:
# Slide our wrapper into the construction environment as
# the SPAWN function.
self.env['SPAWN'] = self.pspawn_wrapper
nodes = builder(target = target, source = source)
nodes = builder(target = target, source = source, SCONF_NODE=True)
if not SCons.Util.is_List(nodes):
nodes = [nodes]
nodesToBeBuilt.extend(nodes)
@ -644,7 +674,7 @@ class SConfBase(object):
return result
def TryAction(self, action, text = None, extension = ""):
def TryAction(self, action, text = None, extension: str = ""):
"""Tries to execute the given action with optional source file
contents <text> and optional source file extension <extension>,
Returns the status (0 : failed, 1 : ok) and the contents of the
@ -682,9 +712,15 @@ class SConfBase(object):
is saved in self.lastTarget (for further processing).
"""
ok = self.TryLink(text, extension)
if( ok ):
if ok:
prog = self.lastTarget
pname = prog.get_internal_path()
if sys.platform == "win32" and os.sep == "/":
# msys might have a Python where os.sep='/' on Windows.
# That builds a path in the env.Command below which breaks
# if the SHELL used is cmd because 'pname' will always have
# an os.sep in it.
pname = pname.replace(os.sep, os.altsep)
output = self.confdir.File(os.path.basename(pname)+'.out')
node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ])
ok = self.BuildNodes(node)
@ -693,9 +729,9 @@ class SConfBase(object):
return( 1, outputStr)
return (0, "")
class TestWrapper(object):
class TestWrapper:
"""A wrapper around Tests (to ensure sanity)"""
def __init__(self, test, sconf):
def __init__(self, test, sconf) -> None:
self.test = test
self.sconf = sconf
def __call__(self, *args, **kw):
@ -708,16 +744,16 @@ class SConfBase(object):
context.Result("error: no result")
return ret
def AddTest(self, test_name, test_instance):
def AddTest(self, test_name, test_instance) -> None:
"""Adds test_class to this SConf instance. It can be called with
self.test_name(...)"""
setattr(self, test_name, SConfBase.TestWrapper(test_instance, self))
def AddTests(self, tests):
def AddTests(self, tests) -> None:
"""Adds all the tests given in the tests dictionary to this SConf
instance
"""
for name in list(tests.keys()):
for name in tests.keys():
self.AddTest(name, tests[name])
def _createDir( self, node ):
@ -729,7 +765,7 @@ class SConfBase(object):
if not os.path.isdir( dirName ):
os.makedirs( dirName )
def _startup(self):
def _startup(self) -> None:
"""Private method. Set up logstream, and set the environment
variables necessary for a piped build
"""
@ -751,15 +787,20 @@ class SConfBase(object):
_ac_config_logs[self.logfile] = None
log_mode = "w"
fp = open(str(self.logfile), log_mode)
def conflog_cleanup(logf) -> None:
logf.close()
atexit.register(conflog_cleanup, fp)
self.logstream = SCons.Util.Unbuffered(fp)
# logfile may stay in a build directory, so we tell
# the build system not to override it with a eventually
# the build system not to override it with an eventually
# existing file with the same name in the source directory
self.logfile.dir.add_ignore( [self.logfile] )
self.logfile.dir.add_ignore([self.logfile])
tb = traceback.extract_stack()[-3-self.depth]
old_fs_dir = SConfFS.getcwd()
SConfFS.chdir(SConfFS.Top, change_os_dir=0)
SConfFS.chdir(SConfFS.Top, change_os_dir=False)
self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' %
(tb[0], tb[1], str(self.confdir)) )
SConfFS.chdir(old_fs_dir)
@ -803,7 +844,7 @@ class SConfBase(object):
_ac_config_hs[self.config_h] = self.config_h_text
self.env.fs = self.lastEnvFs
class CheckContext(object):
class CheckContext:
"""Provides a context for configure tests. Defines how a test writes to the
screen and log file.
@ -821,7 +862,7 @@ class CheckContext(object):
changed.
"""
def __init__(self, sconf):
def __init__(self, sconf) -> None:
"""Constructor. Pass the corresponding SConf instance."""
self.sconf = sconf
self.did_show_result = 0
@ -839,7 +880,7 @@ class CheckContext(object):
# correctly. Note that we can't use Conftest.py's support for config.h,
# cause we will need to specify a builder for the config.h file ...
def Message(self, text):
def Message(self, text) -> None:
"""Inform about what we are doing right now, e.g.
'Checking for SOMETHING ... '
"""
@ -847,7 +888,7 @@ class CheckContext(object):
self.sconf.cached = 1
self.did_show_result = 0
def Result(self, res):
def Result(self, res) -> None:
"""Inform about the result of the test. If res is not a string, displays
'yes' or 'no' depending on whether res is evaluated as true or false.
The result is only displayed when self.did_show_result is not set.
@ -880,26 +921,26 @@ class CheckContext(object):
return self.sconf.TryRun(*args, **kw)
def __getattr__( self, attr ):
if( attr == 'env' ):
if attr == 'env':
return self.sconf.env
elif( attr == 'lastTarget' ):
elif attr == 'lastTarget':
return self.sconf.lastTarget
else:
raise AttributeError("CheckContext instance has no attribute '%s'" % attr)
#### Stuff used by Conftest.py (look there for explanations).
def BuildProg(self, text, ext):
def BuildProg(self, text, ext) -> bool:
self.sconf.cached = 1
# TODO: should use self.vardict for $CC, $CPPFLAGS, etc.
return not self.TryBuild(self.env.Program, text, ext)
def CompileProg(self, text, ext):
def CompileProg(self, text, ext) -> bool:
self.sconf.cached = 1
# TODO: should use self.vardict for $CC, $CPPFLAGS, etc.
return not self.TryBuild(self.env.Object, text, ext)
def CompileSharedObject(self, text, ext):
def CompileSharedObject(self, text, ext) -> bool:
self.sconf.cached = 1
# TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc.
return not self.TryBuild(self.env.SharedObject, text, ext)
@ -910,14 +951,20 @@ class CheckContext(object):
st, out = self.TryRun(text, ext)
return not st, out
def AppendLIBS(self, lib_name_list):
def AppendLIBS(self, lib_name_list, unique: bool=False):
oldLIBS = self.env.get( 'LIBS', [] )
self.env.Append(LIBS = lib_name_list)
if unique:
self.env.AppendUnique(LIBS = lib_name_list)
else:
self.env.Append(LIBS = lib_name_list)
return oldLIBS
def PrependLIBS(self, lib_name_list):
def PrependLIBS(self, lib_name_list, unique: bool=False):
oldLIBS = self.env.get( 'LIBS', [] )
self.env.Prepend(LIBS = lib_name_list)
if unique:
self.env.PrependUnique(LIBS = lib_name_list)
else:
self.env.Prepend(LIBS = lib_name_list)
return oldLIBS
def SetLIBS(self, val):
@ -925,7 +972,7 @@ class CheckContext(object):
self.env.Replace(LIBS = val)
return oldLIBS
def Display(self, msg):
def Display(self, msg) -> None:
if self.sconf.cached:
# We assume that Display is called twice for each test here
# once for the Checking for ... message and once for the result.
@ -935,7 +982,7 @@ class CheckContext(object):
progress_display(msg, append_newline=0)
self.Log("scons: Configure: " + msg + "\n")
def Log(self, msg):
def Log(self, msg) -> None:
if self.sconf.logstream is not None:
self.sconf.logstream.write(msg)
@ -955,32 +1002,39 @@ def SConf(*args, **kw):
return SCons.Util.Null()
def CheckFunc(context, function_name, header = None, language = None):
res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language)
def CheckFunc(context, function_name, header = None, language = None, funcargs = None) -> bool:
res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language, funcargs = funcargs)
context.did_show_result = 1
return not res
def CheckType(context, type_name, includes = "", language = None):
def CheckType(context, type_name, includes: str = "", language = None) -> bool:
res = SCons.Conftest.CheckType(context, type_name,
header = includes, language = language)
context.did_show_result = 1
return not res
def CheckTypeSize(context, type_name, includes = "", language = None, expect = None):
def CheckTypeSize(context, type_name, includes: str = "", language = None, expect = None):
res = SCons.Conftest.CheckTypeSize(context, type_name,
header = includes, language = language,
expect = expect)
context.did_show_result = 1
return res
def CheckDeclaration(context, declaration, includes = "", language = None):
def CheckDeclaration(context, declaration, includes: str = "", language = None) -> bool:
res = SCons.Conftest.CheckDeclaration(context, declaration,
includes = includes,
language = language)
context.did_show_result = 1
return not res
def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'):
def CheckMember(context, aggregate_member, header = None, language = None) -> bool:
'''Returns the status (False : failed, True : ok).'''
res = SCons.Conftest.CheckMember(context, aggregate_member, header=header, language=language)
context.did_show_result = 1
return not res
def createIncludesFromHeaders(headers, leaveLast, include_quotes: str = '""'):
# used by CheckHeader and CheckLibWithHeader to produce C - #include
# statements from the specified header (list)
if not SCons.Util.is_List(headers):
@ -996,7 +1050,7 @@ def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'):
% (include_quotes[0], s, include_quotes[1]))
return ''.join(l), lastHeader
def CheckHeader(context, header, include_quotes = '<>', language = None):
def CheckHeader(context, header, include_quotes: str = '<>', language = None) -> bool:
"""
A test for a C or C++ header file.
"""
@ -1008,29 +1062,29 @@ def CheckHeader(context, header, include_quotes = '<>', language = None):
context.did_show_result = 1
return not res
def CheckCC(context):
def CheckCC(context) -> bool:
res = SCons.Conftest.CheckCC(context)
context.did_show_result = 1
return not res
def CheckCXX(context):
def CheckCXX(context) -> bool:
res = SCons.Conftest.CheckCXX(context)
context.did_show_result = 1
return not res
def CheckSHCC(context):
def CheckSHCC(context) -> bool:
res = SCons.Conftest.CheckSHCC(context)
context.did_show_result = 1
return not res
def CheckSHCXX(context):
def CheckSHCXX(context) -> bool:
res = SCons.Conftest.CheckSHCXX(context)
context.did_show_result = 1
return not res
# Bram: Make this function obsolete? CheckHeader() is more generic.
def CheckCHeader(context, header, include_quotes = '""'):
def CheckCHeader(context, header, include_quotes: str = '""'):
"""
A test for a C header file.
"""
@ -1039,19 +1093,25 @@ def CheckCHeader(context, header, include_quotes = '""'):
# Bram: Make this function obsolete? CheckHeader() is more generic.
def CheckCXXHeader(context, header, include_quotes = '""'):
def CheckCXXHeader(context, header, include_quotes: str = '""'):
"""
A test for a C++ header file.
"""
return CheckHeader(context, header, include_quotes, language = "C++")
def CheckLib(context, library = None, symbol = "main",
header = None, language = None, autoadd = 1):
def CheckLib(context, library = None, symbol: str = "main",
header = None, language = None, extra_libs = None,
autoadd: bool=True, append: bool=True, unique: bool=False) -> bool:
"""
A test for a library. See also CheckLibWithHeader.
A test for a library. See also :func:`CheckLibWithHeader`.
Note that library may also be None to test whether the given symbol
compiles without flags.
.. versionchanged:: 4.9.0
Added the *extra_libs* keyword parameter. The actual implementation
is in :func:`SCons.Conftest.CheckLib` which already accepted this
parameter, so this is only exposing existing functionality.
"""
if not library:
@ -1061,34 +1121,41 @@ def CheckLib(context, library = None, symbol = "main",
library = [library]
# ToDo: accept path for the library
res = SCons.Conftest.CheckLib(context, library, symbol, header = header,
language = language, autoadd = autoadd)
context.did_show_result = 1
res = SCons.Conftest.CheckLib(context, library, symbol, header=header,
language=language, extra_libs=extra_libs,
autoadd=autoadd, append=append, unique=unique)
context.did_show_result = True
return not res
# XXX
# Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H.
def CheckLibWithHeader(context, libs, header, language,
call = None, autoadd = 1):
# ToDo: accept path for library. Support system header files.
extra_libs = None, call = None, autoadd: bool=True,
append: bool=True, unique: bool=False) -> bool:
"""
Another (more sophisticated) test for a library.
Checks, if library and header is available for language (may be 'C'
or 'CXX'). Call maybe be a valid expression _with_ a trailing ';'.
As in CheckLib, we support library=None, to test if the call compiles
As in :func:`CheckLib`, we support library=None, to test if the call compiles
without extra link flags.
.. versionchanged:: 4.9.0
Added the *extra_libs* keyword parameter. The actual implementation
is in :func:`SCons.Conftest.CheckLib` which already accepted this
parameter, so this is only exposing existing functionality.
"""
prog_prefix, dummy = \
createIncludesFromHeaders(header, 0)
if libs == []:
# ToDo: accept path for library. Support system header files.
prog_prefix, dummy = createIncludesFromHeaders(header, 0)
if not libs:
libs = [None]
if not SCons.Util.is_List(libs):
libs = [libs]
res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix,
call = call, language = language, autoadd = autoadd)
extra_libs = extra_libs, call = call, language = language,
autoadd=autoadd, append=append, unique=unique)
context.did_show_result = 1
return not res

View File

@ -1,11 +1,6 @@
"""SCons.SConsign
Writing and reading information to the .sconsign file or files.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -25,28 +20,28 @@ Writing and reading information to the .sconsign file or files.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
"""Operations on signature database files (.sconsign). """
__revision__ = "src/engine/SCons/SConsign.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import SCons.compat
import SCons.compat # pylint: disable=wrong-import-order
import os
import pickle
import time
import SCons.dblite
import SCons.Warnings
from SCons.compat import PICKLE_PROTOCOL
from SCons.Util import print_time
def corrupt_dblite_warning(filename):
SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning,
"Ignoring corrupt .sconsign file: %s"%filename)
def corrupt_dblite_warning(filename) -> None:
SCons.Warnings.warn(
SCons.Warnings.CorruptSConsignWarning,
"Ignoring corrupt .sconsign file: %s" % filename,
)
SCons.dblite.ignore_corrupt_dbfiles = 1
SCons.dblite.IGNORE_CORRUPT_DBFILES = True
SCons.dblite.corruption_warning = corrupt_dblite_warning
# XXX Get rid of the global array so this becomes re-entrant.
@ -60,12 +55,28 @@ sig_files = []
# extension the underlying DB module will add).
DataBase = {}
DB_Module = SCons.dblite
DB_Name = ".sconsign"
DB_Name = None
DB_sync_list = []
def current_sconsign_filename():
hash_format = SCons.Util.get_hash_format()
current_hash_algorithm = SCons.Util.get_current_hash_algorithm_used()
# if the user left the options defaulted AND the default algorithm set by
# SCons is md5, then set the database name to be the special default name
#
# otherwise, if it defaults to something like 'sha1' or the user explicitly
# set 'md5' as the hash format, set the database name to .sconsign_<algorithm>
# eg .sconsign_sha1, etc.
if hash_format is None and current_hash_algorithm == 'md5':
return ".sconsign"
return ".sconsign_" + current_hash_algorithm
def Get_DataBase(dir):
global DataBase, DB_Module, DB_Name
global DB_Name
if DB_Name is None:
DB_Name = current_sconsign_filename()
top = dir.fs.Top
if not os.path.isabs(DB_Name) and top.repositories:
mode = "c"
@ -76,7 +87,7 @@ def Get_DataBase(dir):
except KeyError:
path = d.entry_abspath(DB_Name)
try: db = DataBase[d] = DB_Module.open(path, mode)
except (IOError, OSError):
except OSError:
pass
else:
if mode != "r":
@ -94,18 +105,21 @@ def Get_DataBase(dir):
raise
def Reset():
def Reset() -> None:
"""Reset global state. Used by unit tests that end up using
SConsign multiple times to get a clean slate for each test."""
global sig_files, DB_sync_list
sig_files = []
DB_sync_list = []
normcase = os.path.normcase
def write():
global sig_files
def write() -> None:
if print_time():
start_time = time.perf_counter()
for sig_file in sig_files:
sig_file.write(sync=0)
for db in DB_sync_list:
@ -122,8 +136,12 @@ def write():
else:
closemethod()
if print_time():
elapsed = time.perf_counter() - start_time
print('Total SConsign sync time: %f seconds' % elapsed)
class SConsignEntry(object):
class SConsignEntry:
"""
Wrapper class for the generic entry in a .sconsign file.
The Node subclass populates it with attributes as it pleases.
@ -134,22 +152,22 @@ class SConsignEntry(object):
__slots__ = ("binfo", "ninfo", "__weakref__")
current_version_id = 2
def __init__(self):
def __init__(self) -> None:
# Create an object attribute from the class attribute so it ends up
# in the pickled data in the .sconsign file.
#_version_id = self.current_version_id
pass
def convert_to_sconsign(self):
def convert_to_sconsign(self) -> None:
self.binfo.convert_to_sconsign()
def convert_from_sconsign(self, dir, name):
def convert_from_sconsign(self, dir, name) -> None:
self.binfo.convert_from_sconsign(dir, name)
def __getstate__(self):
state = getattr(self, '__dict__', {}).copy()
for obj in type(self).mro():
for name in getattr(obj,'__slots__',()):
for name in getattr(obj, '__slots__', ()):
if hasattr(self, name):
state[name] = getattr(self, name)
@ -160,13 +178,13 @@ class SConsignEntry(object):
pass
return state
def __setstate__(self, state):
def __setstate__(self, state) -> None:
for key, value in state.items():
if key not in ('_version_id','__weakref__'):
if key not in ('_version_id', '__weakref__'):
setattr(self, key, value)
class Base(object):
class Base:
"""
This is the controlling class for the signatures for the collection of
entries associated with a specific directory. The actual directory
@ -175,7 +193,7 @@ class Base(object):
methods for fetching and storing the individual bits of information
that make up signature entry.
"""
def __init__(self):
def __init__(self) -> None:
self.entries = {}
self.dirty = False
self.to_be_merged = {}
@ -186,26 +204,26 @@ class Base(object):
"""
return self.entries[filename]
def set_entry(self, filename, obj):
def set_entry(self, filename, obj) -> None:
"""
Set the entry.
"""
self.entries[filename] = obj
self.dirty = True
def do_not_set_entry(self, filename, obj):
def do_not_set_entry(self, filename, obj) -> None:
pass
def store_info(self, filename, node):
def store_info(self, filename, node) -> None:
entry = node.get_stored_info()
entry.binfo.merge(node.get_binfo())
self.to_be_merged[filename] = node
self.dirty = True
def do_not_store_info(self, filename, node):
def do_not_store_info(self, filename, node) -> None:
pass
def merge(self):
def merge(self) -> None:
for key, node in self.to_be_merged.items():
entry = node.get_stored_info()
try:
@ -227,8 +245,8 @@ class DB(Base):
from a global .sconsign.db* file--the actual file suffix is
determined by the database module.
"""
def __init__(self, dir):
Base.__init__(self)
def __init__(self, dir) -> None:
super().__init__()
self.dir = dir
@ -264,10 +282,9 @@ class DB(Base):
self.set_entry = self.do_not_set_entry
self.store_info = self.do_not_store_info
global sig_files
sig_files.append(self)
def write(self, sync=1):
def write(self, sync: int=1) -> None:
if not self.dirty:
return
@ -295,11 +312,9 @@ class DB(Base):
class Dir(Base):
def __init__(self, fp=None, dir=None):
"""
fp - file pointer to read entries from
"""
Base.__init__(self)
def __init__(self, fp=None, dir=None) -> None:
"""fp - file pointer to read entries from."""
super().__init__()
if not fp:
return
@ -315,24 +330,20 @@ class Dir(Base):
class DirFile(Dir):
"""
Encapsulates reading and writing a per-directory .sconsign file.
"""
def __init__(self, dir):
"""
dir - the directory for the file
"""
"""Encapsulates reading and writing a per-directory .sconsign file."""
def __init__(self, dir) -> None:
"""dir - the directory for the file."""
self.dir = dir
self.sconsign = os.path.join(dir.get_internal_path(), '.sconsign')
self.sconsign = os.path.join(dir.get_internal_path(), current_sconsign_filename())
try:
fp = open(self.sconsign, 'rb')
except IOError:
except OSError:
fp = None
try:
Dir.__init__(self, fp, dir)
super().__init__(fp, dir)
except KeyboardInterrupt:
raise
except Exception:
@ -344,12 +355,10 @@ class DirFile(Dir):
except AttributeError:
pass
global sig_files
sig_files.append(self)
def write(self, sync=1):
"""
Write the .sconsign file to disk.
def write(self, sync: int=1) -> None:
"""Write the .sconsign file to disk.
Try to write to a temporary file first, and rename it if we
succeed. If we can't write to the temporary file, it's
@ -369,11 +378,11 @@ class DirFile(Dir):
try:
file = open(temp, 'wb')
fname = temp
except IOError:
except OSError:
try:
file = open(self.sconsign, 'wb')
fname = self.sconsign
except IOError:
except OSError:
return
for key, entry in self.entries.items():
entry.convert_to_sconsign()
@ -384,7 +393,7 @@ class DirFile(Dir):
mode = os.stat(self.sconsign)[0]
os.chmod(self.sconsign, 0o666)
os.unlink(self.sconsign)
except (IOError, OSError):
except OSError:
# Try to carry on in the face of either OSError
# (things like permission issues) or IOError (disk
# or network issues). If there's a really dangerous
@ -405,13 +414,13 @@ class DirFile(Dir):
os.chmod(self.sconsign, mode)
try:
os.unlink(temp)
except (IOError, OSError):
except OSError:
pass
ForDirectory = DB
def File(name, dbm_module=None):
def File(name, dbm_module=None) -> None:
"""
Arrange for all signatures to be stored in a global .sconsign.db*
file.

View File

@ -0,0 +1,333 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Dependency scanner for C/C++ code.
Two scanners are defined here: the default CScanner, and the optional
CConditionalScanner, which must be explicitly selected by calling
add_scanner() for each affected suffix.
"""
from typing import Dict
import SCons.Node.FS
import SCons.cpp
import SCons.Util
from . import ClassicCPP, FindPathDirs
class SConsCPPScanner(SCons.cpp.PreProcessor):
"""SCons-specific subclass of the cpp.py module's processing.
We subclass this so that: 1) we can deal with files represented
by Nodes, not strings; 2) we can keep track of the files that are
missing.
"""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.missing = []
def initialize_result(self, fname) -> None:
self.result = SCons.Util.UniqueList([fname])
def finalize_result(self, fname):
return self.result[1:]
def find_include_file(self, t):
keyword, quote, fname = t
result = SCons.Node.FS.find_file(fname, self.searchpath[quote])
if not result:
self.missing.append((fname, self.current_file))
return result
def read_file(self, file) -> str:
try:
return file.rfile().get_text_contents()
except OSError as e:
self.missing.append((file, self.current_file))
return ''
def dictify_CPPDEFINES(env, replace: bool = False) -> dict:
"""Return CPPDEFINES converted to a dict for preprocessor emulation.
The concept is similar to :func:`~SCons.Defaults.processDefines`:
turn the values stored in an internal form in ``env['CPPDEFINES']``
into one needed for a specific context - in this case the cpp-like
work the C/C++ scanner will do. We can't reuse ``processDefines``
output as that's a list of strings for the command line. We also can't
pass the ``CPPDEFINES`` variable directly to the ``dict`` constructor,
as SCons allows it to be stored in several different ways - it's only
after ``Append`` and relatives has been called we know for sure it will
be a deque of tuples.
If requested (*replace* is true), simulate some of the macro
replacement that would take place if an actual preprocessor ran,
to avoid some conditional inclusions comeing out wrong. A bit
of an edge case, but does happen (GH #4623). See 6.10.5 in the C
standard and 15.6 in the C++ standard).
Args:
replace: if true, simulate macro replacement
.. versionchanged:: 4.9.0
Simple macro replacement added, and *replace* arg to enable it.
"""
def _replace(mapping: Dict) -> Dict:
"""Simplistic macro replacer for dictify_CPPDEFINES.
Scan *mapping* for a value that is the same as a key in the dict,
and replace with the value of that key; the process is repeated a few
times, but not forever in case someone left a case that can't be
fully resolved. This is a cheap approximation of the preprocessor's
macro replacement rules with no smarts - it doesn't "look inside"
the values, so only triggers on object-like macros, not on
function-like macros, and will not work on complex values, e.g.
a value like ``(1UL << PR_MTE_TCF_SHIFT)`` would not have
``PR_MTE_TCF_SHIFT`` replaced if that was also a key in ``CPPDEFINES``.
Args:
mapping: a dictionary representing macro names and replacements.
Returns:
a dictionary with replacements made.
"""
old_ns = mapping
loops = 0
while loops < 5: # don't recurse forever in case there's circular data
# this was originally written as a dict comprehension, but unrolling
# lets us add a finer-grained check for whether another loop is
# needed, rather than comparing two dicts to see if one changed.
again = False
ns = {}
for k, v in old_ns.items():
if v in old_ns:
ns[k] = old_ns[v]
if not again and ns[k] != v:
again = True
else:
ns[k] = v
if not again:
break
old_ns = ns
loops += 1
return ns
cppdefines = env.get('CPPDEFINES', {})
if not cppdefines:
return {}
if SCons.Util.is_Tuple(cppdefines):
# single macro defined in a tuple
try:
return {cppdefines[0]: cppdefines[1]}
except IndexError:
return {cppdefines[0]: None}
if SCons.Util.is_Sequence(cppdefines):
# multiple (presumably) macro defines in a deque, list, etc.
result = {}
for c in cppdefines:
if SCons.Util.is_Sequence(c):
try:
result[c[0]] = c[1]
except IndexError:
# could be a one-item sequence
result[c[0]] = None
elif SCons.Util.is_String(c):
try:
name, value = c.split('=')
result[name] = value
except ValueError:
result[c] = None
else:
# don't really know what to do here
result[c] = None
if replace:
return _replace(result)
return(result)
if SCons.Util.is_String(cppdefines):
# single macro define in a string
try:
name, value = cppdefines.split('=')
return {name: value}
except ValueError:
return {cppdefines: None}
if SCons.Util.is_Dict(cppdefines):
# already in the desired form
if replace:
return _replace(cppdefines)
return cppdefines
return {cppdefines: None}
class SConsCPPScannerWrapper:
"""The SCons wrapper around a cpp.py scanner.
This is the actual glue between the calling conventions of generic
SCons scanners, and the (subclass of) cpp.py class that knows how
to look for #include lines with reasonably real C-preprocessor-like
evaluation of #if/#ifdef/#else/#elif lines.
"""
def __init__(self, name, variable) -> None:
self.name = name
self.path = FindPathDirs(variable)
def __call__(self, node, env, path=()):
cpp = SConsCPPScanner(
current=node.get_dir(),
cpppath=path,
dict=dictify_CPPDEFINES(env, replace=True),
)
result = cpp(node)
for included, includer in cpp.missing:
SCons.Warnings.warn(
SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s (included from: %s) "
"-- file not found" % (included, includer),
)
return result
def recurse_nodes(self, nodes):
return nodes
def select(self, node):
return self
def CScanner():
"""Return a prototype Scanner instance for scanning source files
that use the C pre-processor"""
# Here's how we would (or might) use the CPP scanner code above that
# knows how to evaluate #if/#ifdef/#else/#elif lines when searching
# for #includes. This is commented out for now until we add the
# right configurability to let users pick between the scanners.
# return SConsCPPScannerWrapper("CScanner", "CPPPATH")
cs = ClassicCPP(
"CScanner",
"$CPPSUFFIXES",
"CPPPATH",
r'^[ \t]*#[ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")',
)
return cs
#
# ConditionalScanner
#
class SConsCPPConditionalScanner(SCons.cpp.PreProcessor):
"""SCons-specific subclass of the cpp.py module's processing.
We subclass this so that: 1) we can deal with files represented
by Nodes, not strings; 2) we can keep track of the files that are
missing.
"""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.missing = []
self._known_paths = []
def initialize_result(self, fname) -> None:
self.result = SCons.Util.UniqueList([fname])
def find_include_file(self, t):
keyword, quote, fname = t
paths = tuple(self._known_paths) + self.searchpath[quote]
if quote == '"':
paths = (self.current_file.dir,) + paths
result = SCons.Node.FS.find_file(fname, paths)
if result:
result_path = result.get_abspath()
for p in self.searchpath[quote]:
if result_path.startswith(p.get_abspath()):
self._known_paths.append(p)
break
else:
self.missing.append((fname, self.current_file))
return result
def read_file(self, file) -> str:
try:
return file.rfile().get_text_contents()
except OSError:
self.missing.append((file, self.current_file))
return ""
class SConsCPPConditionalScannerWrapper:
"""
The SCons wrapper around a cpp.py scanner.
This is the actual glue between the calling conventions of generic
SCons scanners, and the (subclass of) cpp.py class that knows how
to look for #include lines with reasonably real C-preprocessor-like
evaluation of #if/#ifdef/#else/#elif lines.
"""
def __init__(self, name, variable) -> None:
self.name = name
self.path = FindPathDirs(variable)
def __call__(self, node, env, path=(), depth=-1):
cpp = SConsCPPConditionalScanner(
current=node.get_dir(),
cpppath=path,
dict=dictify_CPPDEFINES(env),
depth=depth,
)
result = cpp(node)
for included, includer in cpp.missing:
fmt = "No dependency generated for file: %s (included from: %s) -- file not found"
SCons.Warnings.warn(
SCons.Warnings.DependencyWarning, fmt % (included, includer)
)
return result
def recurse_nodes(self, nodes):
return nodes
def select(self, node):
return self
def CConditionalScanner():
"""
Return an advanced conditional Scanner instance for scanning source files
Interprets C/C++ Preprocessor conditional syntax
(#ifdef, #if, defined, #else, #elif, etc.).
"""
return SConsCPPConditionalScannerWrapper("CConditionalScanner", "CPPPATH")
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,14 +1,6 @@
"""SCons.Scanner.D
Scanner for the Digital Mars "D" programming language.
Coded by Andy Friesen
17 Nov 2003
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,34 +20,39 @@ Coded by Andy Friesen
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/D.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Scanner for the Digital Mars "D" programming language.
import SCons.Scanner
Coded by Andy Friesen, 17 Nov 2003
"""
import SCons.Node.FS
from . import Classic
def DScanner():
"""Return a prototype Scanner instance for scanning D source files"""
ds = D()
return ds
class D(SCons.Scanner.Classic):
def __init__ (self):
SCons.Scanner.Classic.__init__ (
self,
name = "DScanner",
suffixes = '$DSUFFIXES',
path_variable = 'DPATH',
regex = r'(?:import\s+)([\w\s=,.]+)(?:\s*:[\s\w,=]+)?(?:;)'
class D(Classic):
def __init__(self) -> None:
super().__init__(
name="DScanner",
suffixes='$DSUFFIXES',
path_variable='DPATH',
regex=r'(?:import\s+)([\w\s=,.]+)(?:\s*:[\s\w,=]+)?(?:;)',
)
def find_include(self, include, source_dir, path):
@staticmethod
def find_include(include, source_dir, path):
# translate dots (package separators) to slashes
inc = include.replace('.', '/')
i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path)
# According to https://dlang.org/dmd-linux.html#interface-files
# Prefer .di files over .d files when processing includes(imports)
i = SCons.Node.FS.find_file(inc + '.di', (source_dir,) + path)
if i is None:
i = SCons.Node.FS.find_file (inc + '.di', (source_dir,) + path)
i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path)
return i, include
def find_include_names(self, node):

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -20,28 +21,26 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Scanner/Dir.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import SCons.Node.FS
import SCons.Scanner
from . import ScannerBase
def only_dirs(nodes):
is_Dir = lambda n: isinstance(n.disambiguate(), SCons.Node.FS.Dir)
return [node for node in nodes if is_Dir(node)]
def DirScanner(**kw):
def DirScanner(**kwargs):
"""Return a prototype Scanner instance for scanning
directories for on-disk files"""
kw['node_factory'] = SCons.Node.FS.Entry
kw['recursive'] = only_dirs
return SCons.Scanner.Base(scan_on_disk, "DirScanner", **kw)
kwargs['node_factory'] = SCons.Node.FS.Entry
kwargs['recursive'] = only_dirs
return ScannerBase(scan_on_disk, "DirScanner", **kwargs)
def DirEntryScanner(**kw):
def DirEntryScanner(**kwargs):
"""Return a prototype Scanner instance for "scanning"
directory Nodes for their in-memory entries"""
kw['node_factory'] = SCons.Node.FS.Entry
kw['recursive'] = None
return SCons.Scanner.Base(scan_in_memory, "DirEntryScanner", **kw)
kwargs['node_factory'] = SCons.Node.FS.Entry
kwargs['recursive'] = None
return ScannerBase(scan_in_memory, "DirEntryScanner", **kwargs)
skip_entry = {}
@ -60,6 +59,29 @@ skip_entry_list = [
'.sconsign.bak',
# Used by some dbm emulations using Berkeley DB.
'.sconsign.db',
# new filenames since multiple hash formats allowed:
'.sconsign_md5.dblite',
'.sconsign_sha1.dblite',
'.sconsign_sha256.dblite',
# and all the duplicate files for each sub-sconsfile type
'.sconsign_md5',
'.sconsign_md5.dir',
'.sconsign_md5.pag',
'.sconsign_md5.dat',
'.sconsign_md5.bak',
'.sconsign_md5.db',
'.sconsign_sha1',
'.sconsign_sha1.dir',
'.sconsign_sha1.pag',
'.sconsign_sha1.dat',
'.sconsign_sha1.bak',
'.sconsign_sha1.db',
'.sconsign_sha256',
'.sconsign_sha256.dir',
'.sconsign_sha256.pag',
'.sconsign_sha256.dat',
'.sconsign_sha256.bak',
'.sconsign_sha256.db',
]
for skip in skip_entry_list:
@ -78,7 +100,7 @@ def scan_on_disk(node, env, path=()):
"""
try:
flist = node.fs.listdir(node.get_abspath())
except (IOError, OSError):
except OSError:
return []
e = node.Entry
for f in filter(do_not_scan, flist):

View File

@ -1,11 +1,6 @@
"""SCons.Scanner.Fortran
This module implements the dependency scanner for Fortran code.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -26,17 +21,17 @@ This module implements the dependency scanner for Fortran code.
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Scanner/Fortran.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Dependency scanner for Fortran code."""
import re
import SCons.Node
import SCons.Node.FS
import SCons.Scanner
import SCons.Util
import SCons.Warnings
from . import Classic, Current, FindPathDirs
class F90Scanner(SCons.Scanner.Classic):
class F90Scanner(Classic):
"""
A Classic Scanner subclass for Fortran source files which takes
into account both USE and INCLUDE statements. This scanner will
@ -53,7 +48,7 @@ class F90Scanner(SCons.Scanner.Classic):
"""
def __init__(self, name, suffixes, path_variable,
use_regex, incl_regex, def_regex, *args, **kw):
use_regex, incl_regex, def_regex, *args, **kwargs) -> None:
self.cre_use = re.compile(use_regex, re.M)
self.cre_incl = re.compile(incl_regex, re.M)
@ -67,13 +62,14 @@ class F90Scanner(SCons.Scanner.Classic):
return self.scan(node, env, path)
kw['function'] = _scan
kw['path_function'] = SCons.Scanner.FindPathDirs(path_variable)
kw['recursive'] = 1
kw['skeys'] = suffixes
kw['name'] = name
kwargs['function'] = _scan
kwargs['path_function'] = FindPathDirs(path_variable)
kwargs['recursive'] = 1
kwargs['skeys'] = suffixes
kwargs['name'] = name
SCons.Scanner.Current.__init__(self, *args, **kw)
# bypasses the parent Classic initializer
Current.__init__(self, *args, **kwargs)
def scan(self, node, env, path=()):
@ -123,7 +119,7 @@ class F90Scanner(SCons.Scanner.Classic):
return [pair[1] for pair in sorted(nodes)]
def FortranScan(path_variable="FORTRANPATH"):
def FortranScan(path_variable: str="FORTRANPATH"):
"""Return a prototype Scanner instance for scanning source files
for Fortran USE & INCLUDE statements"""

View File

@ -1,12 +1,6 @@
"""SCons.Scanner.IDL
This module implements the dependency scanner for IDL (Interface
Definition Language) files.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -26,19 +20,19 @@ Definition Language) files.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/IDL.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Dependency scanner for IDL (Interface Definition Language) files."""
import SCons.Node.FS
import SCons.Scanner
from . import ClassicCPP
def IDLScan():
"""Return a prototype Scanner instance for scanning IDL source files"""
cs = SCons.Scanner.ClassicCPP("IDLScan",
"$IDLSUFFIXES",
"CPPPATH",
'^[ \t]*(?:#[ \t]*include|[ \t]*import)[ \t]+(<|")([^>"]+)(>|")')
cs = ClassicCPP(
"IDLScan",
"$IDLSUFFIXES",
"CPPPATH",
r'^[ \t]*(?:#[ \t]*include|[ \t]*import)[ \t]+(<|")([^>"]+)(>|")',
)
return cs
# Local Variables:

View File

@ -0,0 +1,111 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import SCons.Node
import SCons.Node.FS
import SCons.Scanner
from SCons.Util import flatten, is_String
def _subst_paths(env, paths) -> list:
"""Return a list of substituted path elements.
If *paths* is a string, it is split on the search-path separator.
Otherwise, substitution is done on string-valued list elements but
they are not split.
Note helps support behavior like pulling in the external ``CLASSPATH``
and setting it directly into ``JAVACLASSPATH``, however splitting on
``os.pathsep`` makes the interpretation system-specific (this is
warned about in the manpage entry for ``JAVACLASSPATH``).
"""
if is_String(paths):
paths = env.subst(paths)
if SCons.Util.is_String(paths):
paths = paths.split(os.pathsep)
else:
# TODO: may want to revisit splitting list-element strings if requested
paths = flatten(paths)
paths = [env.subst(path) if is_String(path) else path for path in paths]
return paths
def _collect_classes(classlist, dirname, files) -> None:
for fname in files:
if fname.endswith(".class"):
classlist.append(os.path.join(str(dirname), fname))
def scan(node, env, libpath=()) -> list:
"""Scan for files both on JAVACLASSPATH and JAVAPROCESSORPATH.
JAVACLASSPATH/JAVAPROCESSORPATH path can contain:
- Explicit paths to JAR/Zip files
- Wildcards (*)
- Directories which contain classes in an unnamed package
- Parent directories of the root package for classes in a named package
Class path entries that are neither directories nor archives (.zip
or JAR files) nor the asterisk (*) wildcard character are ignored.
"""
classpath = []
for var in ['JAVACLASSPATH', 'JAVAPROCESSORPATH']:
classpath += _subst_paths(env, env.get(var, []))
result = []
for path in classpath:
if is_String(path) and "*" in path:
# This matches more than the Java docs describe: a '*' only
# matches jar files. The filter later should trim this down.
# TODO: should we filter here? use .endswith('*') rather than "in"?
libs = env.Glob(path)
else:
libs = [path]
for lib in libs:
if os.path.isdir(str(lib)):
# grab the in-memory nodes
env.Dir(lib).walk(_collect_classes, result)
# now the on-disk ones
for root, dirs, files in os.walk(str(lib)):
_collect_classes(result, root, files)
else:
result.append(lib)
return list(filter(lambda x: os.path.splitext(str(x))[1] in [".class", ".zip", ".jar"], result))
def JavaScanner():
"""Scanner for .java files.
.. versionadded:: 4.4
"""
return SCons.Scanner.Base(scan, 'JavaScanner', skeys=['.java'])
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,11 +1,6 @@
"""SCons.Scanner.LaTeX
This module implements the dependency scanner for LaTeX code.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -25,15 +20,16 @@ This module implements the dependency scanner for LaTeX code.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/LaTeX.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Dependency scanner for LaTeX code."""
import os.path
import re
import SCons.Scanner
import SCons.Node.FS
import SCons.Util
import SCons.Warnings
from . import ScannerBase, FindPathDirs
# list of graphics file extensions for TeX and LaTeX
TexGraphics = ['.eps', '.ps']
@ -42,7 +38,7 @@ LatexGraphics = [ '.png', '.jpg', '.gif', '.tif']
# Used as a return value of modify_env_var if the variable is not set.
class _Null(object):
class _Null:
pass
_null = _Null
@ -77,13 +73,14 @@ def modify_env_var(env, var, abspath):
return save
class FindENVPathDirs(object):
class FindENVPathDirs:
"""
A class to bind a specific E{*}PATH variable name to a function that
will return all of the E{*}path directories.
"""
def __init__(self, variable):
def __init__(self, variable) -> None:
self.variable = variable
def __call__(self, env, dir=None, target=None, source=None, argument=None):
import SCons.PathList
try:
@ -96,7 +93,6 @@ class FindENVPathDirs(object):
return tuple(dir.Rfindalldirs(path))
def LaTeXScanner():
"""
Return a prototype Scanner instance for scanning LaTeX source files
@ -109,6 +105,7 @@ def LaTeXScanner():
recursive = 0)
return ds
def PDFLaTeXScanner():
"""
Return a prototype Scanner instance for scanning LaTeX source files
@ -121,9 +118,9 @@ def PDFLaTeXScanner():
recursive = 0)
return ds
class LaTeX(SCons.Scanner.Base):
"""
Class for scanning LaTeX files for included files.
class LaTeX(ScannerBase):
"""Class for scanning LaTeX files for included files.
Unlike most scanners, which use regular expressions that just
return the included file name, this returns a tuple consisting
@ -172,13 +169,18 @@ class LaTeX(SCons.Scanner.Base):
'addsectionbib': 'BIBINPUTS',
'makeindex': 'INDEXSTYLE',
'usepackage': 'TEXINPUTS',
'usetheme': 'TEXINPUTS',
'usecolortheme': 'TEXINPUTS',
'usefonttheme': 'TEXINPUTS',
'useinnertheme': 'TEXINPUTS',
'useoutertheme': 'TEXINPUTS',
'lstinputlisting': 'TEXINPUTS'}
env_variables = SCons.Util.unique(list(keyword_paths.values()))
two_arg_commands = ['import', 'subimport',
'includefrom', 'subincludefrom',
'inputfrom', 'subinputfrom']
def __init__(self, name, suffixes, graphics_extensions, *args, **kw):
def __init__(self, name, suffixes, graphics_extensions, *args, **kwargs) -> None:
regex = r'''
\\(
include
@ -196,6 +198,7 @@ class LaTeX(SCons.Scanner.Base):
| addglobalbib
| addsectionbib
| usepackage
| use(?:|color|font|inner|outer)theme(?:\s*\[[^\]]+\])?
)
\s*{([^}]*)} # first arg
(?: \s*{([^}]*)} )? # maybe another arg
@ -211,7 +214,7 @@ class LaTeX(SCons.Scanner.Base):
return []
return self.scan_recurse(node, path)
class FindMultiPathDirs(object):
class FindMultiPathDirs:
"""The stock FindPathDirs function has the wrong granularity:
it is called once per target, while we need the path that depends
on what kind of included files is being searched. This wrapper
@ -222,11 +225,10 @@ class LaTeX(SCons.Scanner.Base):
back and uses a dictionary of tuples rather than a single tuple
of paths.
"""
def __init__(self, dictionary):
def __init__(self, dictionary) -> None:
self.dictionary = {}
for k,n in dictionary.items():
self.dictionary[k] = ( SCons.Scanner.FindPathDirs(n),
FindENVPathDirs(n) )
self.dictionary[k] = (FindPathDirs(n), FindENVPathDirs(n))
def __call__(self, env, dir=None, target=None, source=None,
argument=None):
@ -239,26 +241,29 @@ class LaTeX(SCons.Scanner.Base):
# To prevent "dict is not hashable error"
return tuple(di.items())
class LaTeXScanCheck(object):
"""Skip all but LaTeX source files, i.e., do not scan *.eps,
*.pdf, *.jpg, etc.
class LaTeXScanCheck:
"""Skip all but LaTeX source files.
Do not scan *.eps, *.pdf, *.jpg, etc.
"""
def __init__(self, suffixes):
def __init__(self, suffixes) -> None:
self.suffixes = suffixes
def __call__(self, node, env):
current = not node.has_builder() or node.is_up_to_date()
scannable = node.get_suffix() in env.subst_list(self.suffixes)[0]
# Returning false means that the file is not scanned.
return scannable and current
kw['function'] = _scan
kw['path_function'] = FindMultiPathDirs(LaTeX.keyword_paths)
kw['recursive'] = 0
kw['skeys'] = suffixes
kw['scan_check'] = LaTeXScanCheck(suffixes)
kw['name'] = name
kwargs['function'] = _scan
kwargs['path_function'] = FindMultiPathDirs(LaTeX.keyword_paths)
kwargs['recursive'] = 0
kwargs['skeys'] = suffixes
kwargs['scan_check'] = LaTeXScanCheck(suffixes)
kwargs['name'] = name
SCons.Scanner.Base.__init__(self, *args, **kw)
super().__init__(*args, **kwargs)
def _latex_names(self, include_type, filename):
if include_type == 'input':
@ -290,7 +295,8 @@ class LaTeX(SCons.Scanner.Base):
return [filename+e for e in self.graphics_extensions]
return [filename]
def sort_key(self, include):
@staticmethod
def sort_key(include):
return SCons.Node.FS._my_normcase(str(include))
def find_include(self, include, source_dir, path):
@ -332,7 +338,7 @@ class LaTeX(SCons.Scanner.Base):
line_continues_a_comment = len(comment) > 0
return '\n'.join(out).rstrip()+'\n'
def scan(self, node, subdir='.'):
def scan(self, node, subdir: str='.'):
# Modify the default scan function to allow for the regular
# expression to return a comma separated list of file names
# as can be the case with the bibliography keyword.
@ -362,6 +368,9 @@ class LaTeX(SCons.Scanner.Base):
if inc_type in self.two_arg_commands:
inc_subdir = os.path.join(subdir, include[1])
inc_list = include[2].split(',')
elif re.match('use(|color|font|inner|outer)theme', inc_type):
inc_list = [re.sub('use', 'beamer', inc_type) + _ + '.sty' for _ in
include[1].split(',')]
else:
inc_list = include[1].split(',')
for inc in inc_list:
@ -399,10 +408,10 @@ class LaTeX(SCons.Scanner.Base):
inc_type, inc_subdir, inc_filename = include
try:
if seen[inc_filename] == 1:
if seen[inc_filename]:
continue
except KeyError:
seen[inc_filename] = 1
seen[inc_filename] = True
#
# Handle multiple filenames in include[1]
@ -411,14 +420,17 @@ class LaTeX(SCons.Scanner.Base):
if n is None:
# Do not bother with 'usepackage' warnings, as they most
# likely refer to system-level files
if inc_type != 'usepackage':
SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s (included from: %s) -- file not found" % (i, node))
if inc_type != 'usepackage' or re.match("use(|color|font|inner|outer)theme", inc_type):
SCons.Warnings.warn(
SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s "
"(included from: %s) -- file not found" % (i, node),
)
else:
sortkey = self.sort_key(n)
nodes.append((sortkey, n))
# recurse down
queue.extend( self.scan(n, inc_subdir) )
queue.extend(self.scan(n, inc_subdir))
return [pair[1] for pair in sorted(nodes)]

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -19,29 +20,26 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/Prog.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Dependency scanner for program files."""
import SCons.Node
import SCons.Node.FS
import SCons.Scanner
import SCons.Util
from . import ScannerBase, FindPathDirs
# global, set by --debug=findlibs
print_find_libs = None
def ProgramScanner(**kw):
def ProgramScanner(**kwargs):
"""Return a prototype Scanner instance for scanning executable
files for static-lib dependencies"""
kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH')
ps = SCons.Scanner.Base(scan, "ProgramScanner", **kw)
kwargs['path_function'] = FindPathDirs('LIBPATH')
ps = ScannerBase(scan, "ProgramScanner", **kwargs)
return ps
def _subst_libs(env, libs):
"""
Substitute environment variables and split into list.
"""
"""Substitute environment variables and split into list."""
if SCons.Util.is_String(libs):
libs = env.subst(libs)
if SCons.Util.is_String(libs):
@ -57,9 +55,9 @@ def _subst_libs(env, libs):
return libs
def scan(node, env, libpath = ()):
"""
This scanner scans program files for static-library
dependencies. It will search the LIBPATH environment variable
"""Scans program files for static-library dependencies.
It will search the LIBPATH environment variable
for libraries specified in the LIBS variable, returning any
files it finds as dependencies.
"""

View File

@ -0,0 +1,216 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Dependency scanner for Python code.
One important note about the design is that this does not take any dependencies
upon packages or binaries in the Python installation unless they are listed in
PYTHONPATH. To do otherwise would have required code to determine where the
Python installation is, which is outside of the scope of a scanner like this.
If consumers want to pick up dependencies upon these packages, they must put
those directories in PYTHONPATH.
"""
import itertools
import os
import re
import SCons.Node.FS
import SCons.Util
from . import ScannerBase
# Capture python "from a import b" and "import a" statements.
from_cre = re.compile(r'^\s*from\s+([^\s]+)\s+import\s+(.*)', re.M)
import_cre = re.compile(r'^\s*import\s+([^\s]+)', re.M)
def path_function(env, dir=None, target=None, source=None, argument=None):
"""Retrieves a tuple with all search paths."""
paths = env['ENV'].get('PYTHONPATH', '').split(os.pathsep)
if source:
paths.append(source[0].dir.abspath)
return tuple(paths)
def find_include_names(node):
"""Scans the node for all imports.
Returns a list of tuples. Each tuple has two elements:
1. The main import (e.g. module, module.file, module.module2)
2. Additional optional imports that could be functions or files
in the case of a "from X import Y" statement. In the case of a
normal "import" statement, this is None.
"""
text = node.get_text_contents()
all_matches = []
matches = from_cre.findall(text)
if matches:
for match in matches:
imports = [i.strip() for i in match[1].split(',')]
# Add some custom logic to strip out "as" because the regex
# includes it.
last_import_split = imports[-1].split()
if len(last_import_split) > 1:
imports[-1] = last_import_split[0]
all_matches.append((match[0], imports))
matches = import_cre.findall(text)
if matches:
for match in matches:
all_matches.append((match, None))
return all_matches
def find_import(import_path, search_paths):
"""
Finds the specified import in the various search paths.
For an import of "p", it could either result in a file named p.py or
p/__init__.py. We can't do two consecutive searches for p then p.py
because the first search could return a result that is lower in the
search_paths precedence order. As a result, it is safest to iterate over
search_paths and check whether p or p.py exists in each path. This allows
us to cleanly respect the precedence order.
If the import is found, returns a tuple containing:
1. Discovered dependency node (e.g. p/__init__.py or p.py)
2. True if the import was a package, False if the import was a module.
3. The Dir node in search_paths that the import is relative to.
If the import is not found, returns a tuple containing (None, False, None).
Callers should check for failure by checking whether the first entry in the
tuple is not None.
"""
for search_path in search_paths:
paths = [search_path]
# Note: if the same import is present as a package and a module, Python
# prefers the package. As a result, we always look for x/__init__.py
# before looking for x.py.
node = SCons.Node.FS.find_file(import_path + '/__init__.py', paths)
if node:
return node, True, search_path
else:
node = SCons.Node.FS.find_file(import_path + '.py', paths)
if node:
return node, False, search_path
return None, False, None
def scan(node, env, path=()):
# cache the includes list in node so we only scan it once:
if node.includes is not None:
includes = node.includes
else:
includes = find_include_names(node)
# Intern the names of the include files. Saves some memory
# if the same header is included many times.
node.includes = list(map(SCons.Util.silent_intern, includes))
nodes = []
if callable(path):
path = path()
for module, imports in includes:
is_relative = module.startswith('.')
if is_relative:
# This is a relative include, so we must ignore PYTHONPATH.
module_lstripped = module.lstrip('.')
# One dot is current directory, two is parent, three is
# grandparent, etc.
num_parents = len(module) - len(module_lstripped) - 1
current_dir = node.get_dir()
for i in itertools.repeat(None, num_parents):
current_dir = current_dir.up()
search_paths = [current_dir]
search_string = module_lstripped
else:
search_paths = [env.Dir(p) for p in path]
search_string = module
# If there are no paths, there is no point in parsing includes for this
# iteration of the loop.
if not search_paths:
continue
module_components = [x for x in search_string.split('.') if x]
package_dir = None
hit_dir = None
if not module_components:
# This is just a "from . import x".
package_dir = search_paths[0]
else:
# Translate something like "import x.y" to a call to find_import
# with 'x/y' as the path. find_import will then determine whether
# we can find 'x/y/__init__.py' or 'x/y.py'.
import_node, is_dir, hit_dir = find_import(
'/'.join(module_components), search_paths)
if import_node:
nodes.append(import_node)
if is_dir:
package_dir = import_node.dir
# If the statement was something like "from x import y, z", whether we
# iterate over imports depends on whether x was a package or module.
# If it was a module, y and z are just functions so we don't need to
# search for them. If it was a package, y and z are either packages or
# modules and we do need to search for them.
if package_dir and imports:
for i in imports:
import_node, _, _ = find_import(i, [package_dir])
if import_node:
nodes.append(import_node)
# Take a dependency on all __init__.py files from all imported
# packages unless it's a relative import. If it's a relative
# import, we don't need to take the dependency because Python
# requires that all referenced packages have already been imported,
# which means that the dependency has already been established.
if hit_dir and not is_relative:
import_dirs = module_components
for i in range(len(import_dirs)):
init_path = '/'.join(import_dirs[:i+1] + ['__init__.py'])
init_node = SCons.Node.FS.find_file(init_path, [hit_dir])
if init_node and init_node not in nodes:
nodes.append(init_node)
return sorted(nodes)
PythonSuffixes = ['.py']
PythonScanner = ScannerBase(
scan,
name='PythonScanner',
skeys=PythonSuffixes,
path_function=path_function,
recursive=True,
)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,12 +1,6 @@
"""SCons.Scanner.RC
This module implements the dependency scanner for RC (Interface
Definition Language) files.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -26,20 +20,17 @@ Definition Language) files.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/RC.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Dependency scanner for RC (Interface Definition Language) files."""
import re
import SCons.Node.FS
import SCons.Scanner
from . import ClassicCPP
def no_tlb(nodes):
"""
Filter out .tlb files as they are binary and shouldn't be scanned
"""
"""Filter out .tlb files as they are binary and shouldn't be scanned."""
# print("Nodes:%s"%[str(n) for n in nodes])
return [n for n in nodes if str(n)[-4:] != '.tlb']
@ -47,16 +38,16 @@ def no_tlb(nodes):
def RCScan():
"""Return a prototype Scanner instance for scanning RC source files"""
res_re= r'^(?:\s*#\s*(?:include)|' \
r'.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' \
r'\s*.*?)' \
r'\s*(<|"| )([^>"\s]+)(?:[>"\s])*$'
resScanner = SCons.Scanner.ClassicCPP("ResourceScanner",
"$RCSUFFIXES",
"CPPPATH",
res_re,
recursive=no_tlb)
res_re = (
r'^(?:\s*#\s*(?:include)|'
r'.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)'
r'\s*.*?)'
r'\s*(<|"| )([^>"\s]+)(?:[>"\s])*$'
)
resScanner = ClassicCPP(
"ResourceScanner", "$RCSUFFIXES", "CPPPATH", res_re, recursive=no_tlb
)
return resScanner
# Local Variables:

View File

@ -1,11 +1,6 @@
"""SCons.Scanner.SWIG
This module implements the dependency scanner for SWIG code.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -25,17 +20,16 @@ This module implements the dependency scanner for SWIG code.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/SWIG.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""Dependency scanner for SWIG code."""
import SCons.Scanner
from . import ClassicCPP
SWIGSuffixes = [ '.i' ]
SWIGSuffixes = ['.i']
def SWIGScanner():
expr = r'^[ \t]*%[ \t]*(?:include|import|extern)[ \t]*(<|"?)([^>\s"]+)(?:>|"?)'
scanner = SCons.Scanner.ClassicCPP("SWIGScanner", ".i", "SWIGPATH", expr)
scanner = ClassicCPP("SWIGScanner", ".i", "SWIGPATH", expr)
return scanner
# Local Variables:

View File

@ -1,11 +1,6 @@
"""SCons.Scanner
The Scanner package for the SCons software construction utility.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -25,51 +20,49 @@ The Scanner package for the SCons software construction utility.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""The Scanner package for the SCons software construction utility."""
import re
import SCons.Node.FS
import SCons.PathList
import SCons.Util
class _Null(object):
class _Null:
pass
# This is used instead of None as a default argument value so None can be
# used as an actual argument value.
_null = _Null
def Scanner(function, *args, **kw):
"""
Public interface factory function for creating different types
of Scanners based on the different types of "functions" that may
be supplied.
def Scanner(function, *args, **kwargs):
"""Factory function to create a Scanner Object.
Creates the appropriate Scanner based on the type of "function".
TODO: Deprecate this some day. We've moved the functionality
inside the Base class and really don't need this factory function
inside the ScannerBase class and really don't need this factory function
any more. It was, however, used by some of our Tool modules, so
the call probably ended up in various people's custom modules
patterned on SCons code.
"""
if SCons.Util.is_Dict(function):
return Selector(function, *args, **kw)
else:
return Base(function, *args, **kw)
return Selector(function, *args, **kwargs)
return ScannerBase(function, *args, **kwargs)
class FindPathDirs(object):
"""
A class to bind a specific E{*}PATH variable name to a function that
class FindPathDirs:
"""Class to bind a specific E{*}PATH variable name to a function that
will return all of the E{*}path directories.
"""
def __init__(self, variable):
def __init__(self, variable) -> None:
self.variable = variable
def __call__(self, env, dir=None, target=None, source=None, argument=None):
import SCons.PathList
try:
path = env[self.variable]
except KeyError:
@ -80,86 +73,94 @@ class FindPathDirs(object):
return tuple(dir.Rfindalldirs(path))
class ScannerBase:
"""Base class for dependency scanners.
class Base(object):
"""
The base class for dependency scanners. This implements
straightforward, single-pass scanning of a single file.
"""
Implements straightforward, single-pass scanning of a single file.
def __init__(self,
function,
name = "NONE",
argument = _null,
skeys = _null,
path_function = None,
# Node.FS.Base so that, by default, it's okay for a
# scanner to return a Dir, File or Entry.
node_class = SCons.Node.FS.Base,
node_factory = None,
scan_check = None,
recursive = None):
"""
Construct a new scanner object given a scanner function.
A Scanner is usually set up with a scanner function (and optionally
a path function), but can also be a kind of dispatcher which
passes control to other Scanners.
'function' - a scanner function taking two or three
arguments and returning a list of strings.
A scanner function takes three arguments: a Node to scan for
dependecies, the construction environment to use, and an optional
tuple of paths (as generated by the optional path function).
It must return a list containing the Nodes for all the direct
dependencies of the file.
'name' - a name for identifying this scanner object.
The optional path function is called to return paths that can be
searched for implicit dependency files. It takes five arguments:
a construction environment, a Node for the directory containing
the SConscript file that defined the primary target, a list of
target nodes, a list of source nodes, and the optional argument
for this instance.
'argument' - an optional argument that, if specified, will be
passed to both the scanner function and the path_function.
'skeys' - an optional list argument that can be used to determine
which scanner should be used for a given Node. In the case of File
nodes, for example, the 'skeys' would be file suffixes.
'path_function' - a function that takes four or five arguments
(a construction environment, Node for the directory containing
the SConscript file that defined the primary target, list of
target nodes, list of source nodes, and optional argument for
this instance) and returns a tuple of the directories that can
be searched for implicit dependency files. May also return a
callable() which is called with no args and returns the tuple
(supporting Bindable class).
'node_class' - the class of Nodes which this scan will return.
If node_class is None, then this scanner will not enforce any
Node conversion and will return the raw results from the
underlying scanner function.
'node_factory' - the factory function to be called to translate
the raw results returned by the scanner function into the
expected node_class objects.
'scan_check' - a function to be called to first check whether
this node really needs to be scanned.
'recursive' - specifies that this scanner should be invoked
recursively on all of the implicit dependencies it returns
(the canonical example being #include lines in C source files).
May be a callable, which will be called to filter the list
of nodes found to select a subset for recursive scanning
(the canonical example being only recursively scanning
subdirectories within a directory).
The scanner function's first argument will be a Node that should
be scanned for dependencies, the second argument will be an
Environment object, the third argument will be the tuple of paths
returned by the path_function, and the fourth argument will be
the value passed into 'argument', and the returned list should
contain the Nodes for all the direct dependencies of the file.
Examples:
Examples::
s = Scanner(my_scanner_function)
s = Scanner(function=my_scanner_function)
s = Scanner(function=my_scanner_function, argument='foo')
s = Scanner(function = my_scanner_function)
Args:
function: either a scanner function taking two or three arguments
and returning a list of File Nodes; or a mapping of keys to
other Scanner objects.
s = Scanner(function = my_scanner_function, argument = 'foo')
name: an optional name for identifying this scanner object
(defaults to "NONE").
"""
argument: an optional argument that will be passed to both
*function* and *path_function*.
skeys: an optional list argument that can be used
to determine if this scanner can be used for a given Node.
In the case of File nodes, for example, the *skeys*
would be file suffixes.
path_function: an optional function which returns a tuple
of the directories that can be searched for implicit
dependency files. May also return a callable which
is called with no args and returns the tuple (supporting
Bindable class).
node_class: optional class of Nodes which this scan will return.
If not specified, defaults to :class:`SCons.Node.FS.Base`.
If *node_class* is ``None``, then this scanner will not enforce
any Node conversion and will return the raw results from *function*.
node_factory: optional factory function to be called to
translate the raw results returned by *function*
into the expected *node_class* objects.
scan_check: optional function to be called to first check whether
this node really needs to be scanned.
recursive: optional specifier of whether this scanner should be
invoked recursively on all of the implicit dependencies it returns
(for example `#include` lines in C source files, which may refer
to header files which should themselves be scanned).
May be a callable, which will be called to filter
the list of nodes found to select a subset for recursive
scanning (the canonical example being only recursively
scanning subdirectories within a directory). The default
is to not do recursive scanning.
"""
def __init__(
self,
function,
name: str="NONE",
argument=_null,
skeys=_null,
path_function=None,
# Node.FS.Base so that, by default, it's okay for a
# scanner to return a Dir, File or Entry.
node_class=SCons.Node.FS.Base,
node_factory=None,
scan_check=None,
recursive=None,
) -> None:
"""Construct a new scanner object given a scanner function."""
# Note: this class could easily work with scanner functions that take
# something other than a filename as an argument (e.g. a database
# node) and a dependencies list that aren't file names. All that
@ -190,22 +191,29 @@ class Base(object):
def path(self, env, dir=None, target=None, source=None):
if not self.path_function:
return ()
if self.argument is not _null:
return self.path_function(env, dir, target, source, self.argument)
else:
return self.path_function(env, dir, target, source)
def __call__(self, node, env, path=()):
"""
This method scans a single object. 'node' is the node
that will be passed to the scanner function, and 'env' is the
environment that will be passed to the scanner function. A list of
direct dependency nodes for the specified node will be returned.
return self.path_function(env, dir, target, source)
def __call__(self, node, env, path=()) -> list:
"""Scans a single object.
Args:
node: the node that will be passed to the scanner function
env: the environment that will be passed to the scanner function.
path: tuple of paths from the `path_function`
Returns:
A list of direct dependency nodes for the specified node.
"""
if self.scan_check and not self.scan_check(node, env):
return []
self = self.select(node)
# here we may morph into a different Scanner instance:
self = self.select(node) # pylint: disable=self-cls-assignment
if self.argument is not _null:
node_list = self.function(node, env, path, self.argument)
@ -215,13 +223,10 @@ class Base(object):
kw = {}
if hasattr(node, 'dir'):
kw['directory'] = node.dir
node_factory = env.get_factory(self.node_factory)
nodes = []
for l in node_list:
if self.node_class and not isinstance(l, self.node_class):
l = node_factory(l, **kw)
nodes.append(l)
return nodes
conv = env.get_factory(self.node_factory)
cls = self.node_class
nl = [conv(n, **kw) if cls and not isinstance(n, cls) else n for n in node_list]
return nl
def __eq__(self, other):
try:
@ -233,10 +238,10 @@ class Base(object):
def __hash__(self):
return id(self)
def __str__(self):
def __str__(self) -> str:
return self.name
def add_skey(self, skey):
def add_skey(self, skey) -> None:
"""Add a skey to the list of skeys"""
self.skeys.append(skey)
@ -255,62 +260,70 @@ class Base(object):
else:
return self
def _recurse_all_nodes(self, nodes):
@staticmethod
def _recurse_all_nodes(nodes):
return nodes
def _recurse_no_nodes(self, nodes):
@staticmethod
def _recurse_no_nodes(nodes):
return []
# recurse_nodes = _recurse_no_nodes
def add_scanner(self, skey, scanner):
def add_scanner(self, skey, scanner) -> None:
self.function[skey] = scanner
self.add_skey(skey)
class Selector(Base):
# keep the old name for a while in case external users are using.
# there are no more internal uses of this class by the name "Base"
Base = ScannerBase
class Selector(ScannerBase):
"""
A class for selecting a more specific scanner based on the
scanner_key() (suffix) for a specific Node.
:func:`scanner_key` (suffix) for a specific Node.
TODO: This functionality has been moved into the inner workings of
the Base class, and this class will be deprecated at some point.
the ScannerBase class, and this class will be deprecated at some point.
(It was never exposed directly as part of the public interface,
although it is used by the Scanner() factory function that was
although it is used by the :func:`Scanner` factory function that was
used by various Tool modules and therefore was likely a template
for custom modules that may be out there.)
"""
def __init__(self, dict, *args, **kw):
Base.__init__(self, None, *args, **kw)
self.dict = dict
self.skeys = list(dict.keys())
def __init__(self, mapping, *args, **kwargs) -> None:
super().__init__(None, *args, **kwargs)
self.mapping = mapping
self.skeys = list(mapping.keys())
def __call__(self, node, env, path=()):
return self.select(node)(node, env, path)
def select(self, node):
try:
return self.dict[node.scanner_key()]
return self.mapping[node.scanner_key()]
except KeyError:
return None
def add_scanner(self, skey, scanner):
self.dict[skey] = scanner
def add_scanner(self, skey, scanner) -> None:
self.mapping[skey] = scanner
self.add_skey(skey)
class Current(Base):
class Current(ScannerBase):
"""
A class for scanning files that are source files (have no builder)
or are derived files and are current (which implies that they exist,
either locally or in a repository).
"""
def __init__(self, *args, **kw):
def __init__(self, *args, **kwargs) -> None:
def current_check(node, env):
return not node.has_builder() or node.is_up_to_date()
kw['scan_check'] = current_check
Base.__init__(self, *args, **kw)
kwargs['scan_check'] = current_check
super().__init__(*args, **kwargs)
class Classic(Current):
"""
@ -319,13 +332,12 @@ class Classic(Current):
regular expressions to find the includes.
Note that in order for this to work "out of the box" (without
overriding the find_include() and sort_key() methods), the regular
expression passed to the constructor must return the name of the
include file in group 0.
overriding the :meth:`find_include` and :meth:`sort_key1` methods),
the regular expression passed to the constructor must return the
name of the include file in group 0.
"""
def __init__(self, name, suffixes, path_variable, regex, *args, **kw):
def __init__(self, name, suffixes, path_variable, regex, *args, **kwargs) -> None:
self.cre = re.compile(regex, re.M)
def _scan(node, _, path=(), self=self):
@ -334,31 +346,32 @@ class Classic(Current):
return []
return self.scan(node, path)
kw['function'] = _scan
kw['path_function'] = FindPathDirs(path_variable)
kwargs['function'] = _scan
kwargs['path_function'] = FindPathDirs(path_variable)
# Allow recursive to propagate if child class specifies.
# In this case resource scanner needs to specify a filter on which files
# get recursively processed. Previously was hardcoded to 1 instead of
# defaulted to 1.
kw['recursive'] = kw.get('recursive', 1)
kw['skeys'] = suffixes
kw['name'] = name
kwargs['recursive'] = kwargs.get('recursive', True)
kwargs['skeys'] = suffixes
kwargs['name'] = name
Current.__init__(self, *args, **kw)
super().__init__(*args, **kwargs)
def find_include(self, include, source_dir, path):
@staticmethod
def find_include(include, source_dir, path):
n = SCons.Node.FS.find_file(include, (source_dir,) + tuple(path))
return n, include
def sort_key(self, include):
@staticmethod
def sort_key(include):
return SCons.Node.FS._my_normcase(include)
def find_include_names(self, node):
return self.cre.findall(node.get_text_contents())
def scan(self, node, path=()):
# cache the includes list in node so we only scan it once:
if node.includes is not None:
includes = node.includes
@ -382,8 +395,11 @@ class Classic(Current):
n, i = self.find_include(include, source_dir, path)
if n is None:
SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s (included from: %s) -- file not found" % (i, node))
SCons.Warnings.warn(
SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s "
"(included from: %s) -- file not found" % (i, node),
)
else:
nodes.append((self.sort_key(include), n))
@ -399,7 +415,8 @@ class ClassicCPP(Classic):
to the constructor must return the leading bracket in group 0, and
the contained filename in group 1.
"""
def find_include(self, include, source_dir, path):
@staticmethod
def find_include(include, source_dir, path):
include = list(map(SCons.Util.to_str, include))
if include[0] == '"':
paths = (source_dir,) + tuple(path)
@ -407,11 +424,11 @@ class ClassicCPP(Classic):
paths = tuple(path) + (source_dir,)
n = SCons.Node.FS.find_file(include[1], paths)
i = SCons.Util.silent_intern(include[1])
return n, i
def sort_key(self, include):
@staticmethod
def sort_key(include):
return SCons.Node.FS._my_normcase(' '.join(include))
# Local Variables:

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -19,13 +20,8 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
__revision__ = "src/engine/SCons/Script/Interactive.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__doc__ = """
SCons interactive mode
"""
"""SCons interactive mode. """
# TODO:
#
@ -33,7 +29,7 @@ SCons interactive mode
# of its own, which might or might not be a good thing. Nevertheless,
# here are some enhancements that will probably be requested some day
# and are worth keeping in mind (assuming this takes off):
#
#
# - A command to re-read / re-load the SConscript files. This may
# involve allowing people to specify command-line options (e.g. -f,
# -I, --no-site-dir) that affect how the SConscript files are read.
@ -116,7 +112,7 @@ version Prints SCons version information.
'sh' : 'shell',
}
def __init__(self, **kw):
def __init__(self, **kw) -> None:
cmd.Cmd.__init__(self)
for key, val in kw.items():
setattr(self, key, val)
@ -126,7 +122,7 @@ version Prints SCons version information.
else:
self.shell_variable = 'SHELL'
def default(self, argv):
def default(self, argv) -> None:
print("*** Unknown command: %s" % argv[0])
def onecmd(self, line):
@ -152,7 +148,7 @@ version Prints SCons version information.
return self.default(argv)
return func(argv)
def do_build(self, argv):
def do_build(self, argv) -> None:
"""\
build [TARGETS] Build the specified TARGETS and their
dependencies. 'b' is a synonym.
@ -217,11 +213,11 @@ version Prints SCons version information.
seen_nodes = {}
def get_unseen_children(node, parent, seen_nodes=seen_nodes):
def is_unseen(node, seen_nodes=seen_nodes):
def is_unseen(node, seen_nodes=seen_nodes) -> bool:
return node not in seen_nodes
return [child for child in node.children(scan=1) if is_unseen(child)]
def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes):
def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes) -> None:
seen_nodes[node] = 1
# If this file is in a VariantDir and has a
@ -247,7 +243,7 @@ version Prints SCons version information.
while n:
n = walker.get_next()
for node in list(seen_nodes.keys()):
for node in seen_nodes.keys():
# Call node.clear() to clear most of the state
node.clear()
# node.clear() doesn't reset node.state, so call
@ -261,7 +257,12 @@ version Prints SCons version information.
# from SCons.Debug import Trace
# Trace('node %s, ref_count %s !!!\n' % (node, node.ref_count))
SCons.SConsign.Reset()
# TODO: REMOVE WPD DEBUG 02/14/2022
# This call was clearing the list of sconsign files to be written, so it would
# only write the results of the first build command. All others wouldn't be written
# to .SConsign.
# Pretty sure commenting this out is the correct fix.
# SCons.SConsign.Reset()
SCons.Script.Main.progress_display("scons: done clearing node information.")
def do_clean(self, argv):
@ -271,11 +272,11 @@ version Prints SCons version information.
"""
return self.do_build(['build', '--clean'] + argv[1:])
def do_EOF(self, argv):
def do_EOF(self, argv) -> None:
print()
self.do_exit(argv)
def _do_one_help(self, arg):
def _do_one_help(self, arg) -> None:
try:
# If help_<arg>() exists, then call it.
func = getattr(self, 'help_' + arg)
@ -311,13 +312,13 @@ version Prints SCons version information.
lines = list(map(strip_spaces, lines))
return '\n'.join(lines)
def do_exit(self, argv):
def do_exit(self, argv) -> None:
"""\
exit Exit SCons interactive mode.
"""
sys.exit(0)
def do_help(self, argv):
def do_help(self, argv) -> None:
"""\
help [COMMAND] Prints help for the specified COMMAND. 'h'
and '?' are synonyms.
@ -334,7 +335,7 @@ version Prints SCons version information.
sys.stdout.write(doc + '\n')
sys.stdout.flush()
def do_shell(self, argv):
def do_shell(self, argv) -> None:
"""\
shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and
'!' are synonyms.
@ -345,22 +346,22 @@ version Prints SCons version information.
argv = os.environ[self.shell_variable]
try:
# Per "[Python-Dev] subprocess insufficiently platform-independent?"
# http://mail.python.org/pipermail/python-dev/2008-August/081979.html "+
# https://mail.python.org/pipermail/python-dev/2008-August/081979.html "+
# Doing the right thing with an argument list currently
# requires different shell= values on Windows and Linux.
p = subprocess.Popen(argv, shell=(sys.platform=='win32'))
except EnvironmentError as e:
except OSError as e:
sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror))
else:
p.wait()
def do_version(self, argv):
def do_version(self, argv) -> None:
"""\
version Prints SCons version information.
"""
sys.stdout.write(self.parser.version + '\n')
def interact(fs, parser, options, targets, target_top):
def interact(fs, parser, options, targets, target_top) -> None:
c = SConsInteractiveCmd(prompt = 'scons>>> ',
fs = fs,
parser = parser,

View File

@ -1,12 +1,6 @@
"""SCons.Script.SConscript
This module defines the Python API provided to SConscript and SConstruct
files.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -27,7 +21,9 @@ files.
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Script/SConscript.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""This module defines the Python API provided to SConscript files."""
from __future__ import annotations
import SCons
import SCons.Action
@ -40,13 +36,11 @@ import SCons.Node.Alias
import SCons.Node.FS
import SCons.Platform
import SCons.SConf
import SCons.Script.Main
import SCons.Tool
from SCons.Util import is_List, is_String, is_Dict, flatten
from SCons.Node import SConscriptNodes
from . import Main
import collections
import os
import os.path
import re
@ -65,7 +59,7 @@ GlobalDict = None
global_exports = {}
# chdir flag
sconscript_chdir = 1
sconscript_chdir: bool = True
def get_calling_namespaces():
"""Return the locals and globals for the function that called
@ -110,9 +104,9 @@ def compute_exports(exports):
return retval
class Frame(object):
class Frame:
"""A frame on the SConstruct/SConscript call stack"""
def __init__(self, fs, exports, sconscript):
def __init__(self, fs, exports, sconscript) -> None:
self.globals = BuildDefaultGlobals()
self.retval = None
self.prev_dir = fs.getcwd()
@ -153,34 +147,32 @@ def Return(*vars, **kw):
stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :)
def handle_missing_SConscript(f, must_exist=None):
def handle_missing_SConscript(f: str, must_exist: bool = True) -> None:
"""Take appropriate action on missing file in SConscript() call.
Print a warning or raise an exception on missing file.
On first warning, print a deprecation message.
Print a warning or raise an exception on missing file, unless
missing is explicitly allowed by the *must_exist* parameter or by
a global flag.
Args:
f (str): path of missing configuration file
must_exist (bool): raise exception if file does not exist
f: path to missing configuration file
must_exist: if true (the default), fail. If false
do nothing, allowing a build to declare it's okay to be missing.
Raises:
UserError if 'must_exist' is True or if global
SCons.Script._no_missing_sconscript is True.
UserError: if *must_exist* is true or if global
:data:`SCons.Script._no_missing_sconscript` is true.
.. versionchanged: 4.6.0
Changed default from False.
"""
if not must_exist: # explicitly set False: ok
return
if not SCons.Script._no_missing_sconscript: # system default changed: ok
return
msg = f"missing SConscript file {f.get_internal_path()!r}"
raise SCons.Errors.UserError(msg)
if must_exist or (SCons.Script._no_missing_sconscript and must_exist is not False):
msg = "Fatal: missing SConscript '%s'" % f.get_internal_path()
raise SCons.Errors.UserError(msg)
if SCons.Script._warn_missing_sconscript_deprecated:
msg = "Calling missing SConscript without error is deprecated.\n" + \
"Transition by adding must_exist=0 to SConscript calls.\n" + \
"Missing SConscript '%s'" % f.get_internal_path()
SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg)
SCons.Script._warn_missing_sconscript_deprecated = False
else:
msg = "Ignoring missing SConscript '%s'" % f.get_internal_path()
SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg)
def _SConscript(fs, *files, **kw):
top = fs.Top
@ -202,11 +194,12 @@ def _SConscript(fs, *files, **kw):
else:
f = fs.File(str(fn))
_file_ = None
SConscriptNodes.add(f)
# Change directory to the top of the source
# tree to make sure the os's cwd and the cwd of
# fs match so we can open the SConscript.
fs.chdir(top, change_os_dir=1)
fs.chdir(top, change_os_dir=True)
if f.rexists():
actual = f.rfile()
_file_ = open(actual.get_abspath(), "rb")
@ -255,7 +248,7 @@ def _SConscript(fs, *files, **kw):
# fs.chdir(), because we still need to
# interpret the stuff within the SConscript file
# relative to where we are logically.
fs.chdir(ldir, change_os_dir=0)
fs.chdir(ldir, change_os_dir=False)
os.chdir(actual.dir.get_abspath())
# Append the SConscript directory to the beginning
@ -279,22 +272,30 @@ def _SConscript(fs, *files, **kw):
try:
try:
if Main.print_time:
time1 = time.time()
start_time = time.perf_counter()
scriptdata = _file_.read()
scriptname = _file_.name
_file_.close()
if SCons.Debug.sconscript_trace:
print("scons: Entering "+str(scriptname))
exec(compile(scriptdata, scriptname, 'exec'), call_stack[-1].globals)
if SCons.Debug.sconscript_trace:
print("scons: Exiting "+str(scriptname))
except SConscriptReturn:
pass
if SCons.Debug.sconscript_trace:
print("scons: Exiting "+str(scriptname))
else:
pass
finally:
if Main.print_time:
time2 = time.time()
print('SConscript:%s took %0.3f ms' % (f.get_abspath(), (time2 - time1) * 1000.0))
elapsed = time.perf_counter() - start_time
print('SConscript:%s took %0.3f ms' % (f.get_abspath(), elapsed * 1000.0))
if old_file is not None:
call_stack[-1].globals.update({__file__:old_file})
else:
handle_missing_SConscript(f, kw.get('must_exist', None))
handle_missing_SConscript(f, kw.get('must_exist', True))
finally:
SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1
@ -306,7 +307,7 @@ def _SConscript(fs, *files, **kw):
# There was no local directory, so chdir to the
# Repository directory. Like above, we do this
# directly.
fs.chdir(frame.prev_dir, change_os_dir=0)
fs.chdir(frame.prev_dir, change_os_dir=False)
rdir = frame.prev_dir.rdir()
rdir._create() # Make sure there's a directory there.
try:
@ -332,7 +333,7 @@ def _SConscript(fs, *files, **kw):
else:
return tuple(results)
def SConscript_exception(file=sys.stderr):
def SConscript_exception(file=sys.stderr) -> None:
"""Print an exception stack trace just for the SConscript file(s).
This will show users who have Python errors where the problem is,
without cluttering the output with all of the internal calls leading
@ -385,12 +386,8 @@ class SConsEnvironment(SCons.Environment.Base):
#
# Private methods of an SConsEnvironment.
#
def _exceeds_version(self, major, minor, v_major, v_minor):
"""Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise."""
return (major > v_major or (major == v_major and minor > v_minor))
def _get_major_minor_revision(self, version_string):
@staticmethod
def _get_major_minor_revision(version_string: str) -> tuple[int, int, int]:
"""Split a version string into major, minor and (optionally)
revision parts.
@ -485,18 +482,26 @@ class SConsEnvironment(SCons.Environment.Base):
kw['_depth'] = kw.get('_depth', 0) + 1
return SCons.Environment.Base.Configure(self, *args, **kw)
def Default(self, *targets):
def Default(self, *targets) -> None:
SCons.Script._Set_Default_Targets(self, targets)
def EnsureSConsVersion(self, major, minor, revision=0):
@staticmethod
def GetSConsVersion() -> tuple[int, int, int]:
"""Return the current SCons version.
.. versionadded:: 4.8.0
"""
return SConsEnvironment._get_major_minor_revision(SCons.__version__)
@staticmethod
def EnsureSConsVersion(major: int, minor: int, revision: int = 0) -> None:
"""Exit abnormally if the SCons version is not late enough."""
# split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__':
SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning,
"EnsureSConsVersion is ignored for development version")
return
scons_ver = self._get_major_minor_revision(SCons.__version__)
if scons_ver < (major, minor, revision):
if SConsEnvironment.GetSConsVersion() < (major, minor, revision):
if revision:
scons_ver_string = '%d.%d.%d' % (major, minor, revision)
else:
@ -505,22 +510,25 @@ class SConsEnvironment(SCons.Environment.Base):
(scons_ver_string, SCons.__version__))
sys.exit(2)
def EnsurePythonVersion(self, major, minor):
@staticmethod
def EnsurePythonVersion(major, minor) -> None:
"""Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v))
sys.exit(2)
def Exit(self, value=0):
@staticmethod
def Exit(value: int=0) -> None:
sys.exit(value)
def Export(self, *vars, **kw):
def Export(self, *vars, **kw) -> None:
for var in vars:
global_exports.update(compute_exports(self.Split(var)))
global_exports.update(kw)
def GetLaunchDir(self):
@staticmethod
def GetLaunchDir():
global launch_dir
return launch_dir
@ -528,9 +536,27 @@ class SConsEnvironment(SCons.Environment.Base):
name = self.subst(name)
return SCons.Script.Main.GetOption(name)
def Help(self, text, append=False):
def Help(self, text, append: bool = False, local_only: bool = False) -> None:
"""Update the help text.
The previous help text has *text* appended to it, except on the
first call. On first call, the values of *append* and *local_only*
are considered to determine what is appended to.
Arguments:
text: string to add to the help text.
append: on first call, if true, keep the existing help text
(default False).
local_only: on first call, if true and *append* is also true,
keep only the help text from AddOption calls.
.. versionchanged:: 4.6.0
The *keep_local* parameter was added.
.. versionchanged:: 4.9.0
The *keep_local* parameter was renamed *local_only* to match manpage
"""
text = self.subst(text, raw=1)
SCons.Script.HelpFunction(text, append=append)
SCons.Script.HelpFunction(text, append=append, local_only=local_only)
def Import(self, *vars):
try:
@ -596,11 +622,12 @@ class SConsEnvironment(SCons.Environment.Base):
subst_kw['exports'] = exports
return _SConscript(self.fs, *files, **subst_kw)
def SConscriptChdir(self, flag):
@staticmethod
def SConscriptChdir(flag: bool) -> None:
global sconscript_chdir
sconscript_chdir = flag
def SetOption(self, name, value):
def SetOption(self, name, value) -> None:
name = self.subst(name)
SCons.Script.Main.SetOption(name, value)
@ -639,7 +666,7 @@ def get_DefaultEnvironmentProxy():
_DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env)
return _DefaultEnvironmentProxy
class DefaultEnvironmentCall(object):
class DefaultEnvironmentCall:
"""A class that implements "global function" calls of
Environment methods by fetching the specified method from the
DefaultEnvironment's class. Note that this uses an intermediate
@ -648,7 +675,7 @@ class DefaultEnvironmentCall(object):
thereby prevent expansion of construction variables (since from
the user's point of view this was called as a global function,
with no associated construction environment)."""
def __init__(self, method_name, subst=0):
def __init__(self, method_name, subst: int=0) -> None:
self.method_name = method_name
if subst:
self.factory = SCons.Defaults.DefaultEnvironment
@ -672,7 +699,7 @@ def BuildDefaultGlobals():
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
def not_a_module(m, d=d, mtype=type(SCons.Script)) -> bool:
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]

View File

@ -1,18 +1,6 @@
"""SCons.Script
This file implements the main() function used by the scons script.
Architecturally, this *is* the scons script, and will likely only be
called from the external "scons" wrapper. Consequently, anything here
should not be, or be considered, part of the build engine. If it's
something that we expect other software to want to use, it should go in
some other module. If it's specific to the "scons" script invocation,
it goes here.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -32,20 +20,24 @@ it goes here.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Script/__init__.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""The main() function used by the scons script.
Architecturally, this *is* the scons script, and will likely only be
called from the external "scons" wrapper. Consequently, anything here
should not be, or be considered, part of the build engine. If it's
something that we expect other software to want to use, it should go in
some other module. If it's specific to the "scons" script invocation,
it goes here.
"""
import time
start_time = time.time()
import collections
import itertools
import os
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from io import StringIO
import sys
@ -62,14 +54,22 @@ import sys
# to not add the shims. So we use a special-case, up-front check for
# the "--debug=memoizer" flag and enable Memoizer before we import any
# of the other modules that use it.
# Update: this breaks if the option isn't exactly "--debug=memoizer",
# like if there is more than one debug option as a csv. Do a bit more work.
_args = sys.argv + os.environ.get('SCONSFLAGS', '').split()
if "--debug=memoizer" in _args:
_args = sys.argv + os.environ.get("SCONSFLAGS", "").split()
_args = (
arg[len("--debug=") :].split(",")
for arg in _args
if arg.startswith("--debug=")
)
_args = list(itertools.chain.from_iterable(_args))
if "memoizer" in _args:
import SCons.Memoize
import SCons.Warnings
try:
SCons.Memoize.EnableMemoization()
except SCons.Warnings.Warning:
except SCons.Warnings.SConsWarning:
# Some warning was thrown. Arrange for it to be displayed
# or not after warnings are configured.
from . import Main
@ -109,15 +109,16 @@ main = Main.main
BuildTask = Main.BuildTask
CleanTask = Main.CleanTask
QuestionTask = Main.QuestionTask
#PrintHelp = Main.PrintHelp
#SConscriptSettableOptions = Main.SConscriptSettableOptions
AddOption = Main.AddOption
PrintHelp = Main.PrintHelp
GetOption = Main.GetOption
SetOption = Main.SetOption
ValidateOptions = Main.ValidateOptions
Progress = Main.Progress
GetBuildFailures = Main.GetBuildFailures
DebugOptions = Main.DebugOptions
#keep_going_on_error = Main.keep_going_on_error
#print_dtree = Main.print_dtree
@ -134,9 +135,7 @@ GetBuildFailures = Main.GetBuildFailures
#profiling = Main.profiling
#repositories = Main.repositories
#
from . import SConscript
_SConscript = SConscript
from . import SConscript as _SConscript
call_stack = _SConscript.call_stack
@ -152,7 +151,7 @@ FindPathDirs = SCons.Scanner.FindPathDirs
Platform = SCons.Platform.Platform
Virtualenv = SCons.Platform.virtualenv.Virtualenv
Return = _SConscript.Return
Scanner = SCons.Scanner.Base
Scanner = SCons.Scanner.ScannerBase
Tool = SCons.Tool.Tool
WhereIs = SCons.Util.WhereIs
@ -185,11 +184,11 @@ DefaultEnvironment = SCons.Defaults.DefaultEnvironment
# Other variables we provide.
class TargetList(collections.UserList):
def _do_nothing(self, *args, **kw):
def _do_nothing(self, *args, **kw) -> None:
pass
def _add_Default(self, list):
def _add_Default(self, list) -> None:
self.extend(list)
def _clear(self):
def _clear(self) -> None:
del self[:]
ARGUMENTS = {}
@ -209,13 +208,13 @@ DEFAULT_TARGETS = []
# own targets to BUILD_TARGETS.
_build_plus_default = TargetList()
def _Add_Arguments(alist):
def _Add_Arguments(alist) -> None:
for arg in alist:
a, b = arg.split('=', 1)
ARGUMENTS[a] = b
ARGLIST.append((a, b))
def _Add_Targets(tlist):
def _Add_Targets(tlist) -> None:
if tlist:
COMMAND_LINE_TARGETS.extend(tlist)
BUILD_TARGETS.extend(tlist)
@ -235,7 +234,7 @@ def _Set_Default_Targets_Has_Not_Been_Called(d, fs):
_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called
def _Set_Default_Targets(env, tlist):
def _Set_Default_Targets(env, tlist) -> None:
global DEFAULT_TARGETS
global _Get_Default_Targets
_Get_Default_Targets = _Set_Default_Targets_Has_Been_Called
@ -257,57 +256,74 @@ def _Set_Default_Targets(env, tlist):
BUILD_TARGETS._add_Default(nodes)
_build_plus_default._add_Default(nodes)
#
help_text = None
def HelpFunction(text, append=False):
def HelpFunction(text, append: bool = False, local_only: bool = False) -> None:
"""The implementaion of the the ``Help`` method.
See :meth:`~SCons.Script.SConscript.Help`.
.. versionchanged:: 4.6.0
The *keep_local* parameter was added.
.. versionchanged:: 4.9.0
The *keep_local* parameter was renamed *local_only* to match manpage
"""
global help_text
if help_text is None:
if append:
s = StringIO()
PrintHelp(s)
help_text = s.getvalue()
s.close()
with StringIO() as s:
PrintHelp(s, local_only=local_only)
help_text = s.getvalue()
else:
help_text = ""
help_text= help_text + text
help_text += text
#
# Will be non-zero if we are reading an SConscript file.
sconscript_reading = 0
sconscript_reading: int = 0
_no_missing_sconscript = False
_warn_missing_sconscript_deprecated = True
_no_missing_sconscript = True
_warn_missing_sconscript_deprecated = False # TODO: now unused
def set_missing_sconscript_error(flag=1):
"""Set behavior on missing file in SConscript() call. Returns previous value"""
def set_missing_sconscript_error(flag: bool = True) -> bool:
"""Set behavior on missing file in SConscript() call.
Returns:
previous value
"""
global _no_missing_sconscript
old = _no_missing_sconscript
_no_missing_sconscript = flag
return old
#
def Variables(files=[], args=ARGUMENTS):
def Variables(files=None, args=ARGUMENTS):
return SCons.Variables.Variables(files, args)
# The list of global functions to add to the SConscript name space
# that end up calling corresponding methods or Builders in the
# Adding global functions to the SConscript name space.
#
# Static functions that do not trigger initialization of
# DefaultEnvironment() and don't use its state.
GetSConsVersion = _SConscript.SConsEnvironment.GetSConsVersion
EnsureSConsVersion = _SConscript.SConsEnvironment.EnsureSConsVersion
EnsurePythonVersion = _SConscript.SConsEnvironment.EnsurePythonVersion
Exit = _SConscript.SConsEnvironment.Exit
GetLaunchDir = _SConscript.SConsEnvironment.GetLaunchDir
SConscriptChdir = _SConscript.SConsEnvironment.SConscriptChdir
# Functions that end up calling methods or Builders in the
# DefaultEnvironment().
GlobalDefaultEnvironmentFunctions = [
# Methods from the SConsEnvironment class, above.
'Default',
'EnsurePythonVersion',
'EnsureSConsVersion',
'Exit',
'Export',
'GetLaunchDir',
'Help',
'Import',
#'SConscript', is handled separately, below.
'SConscriptChdir',
# Methods from the Environment.Base class.
'AddPostAction',
@ -339,12 +355,12 @@ GlobalDefaultEnvironmentFunctions = [
'Local',
'ParseDepends',
'Precious',
'Pseudo',
'PyPackageDir',
'Repository',
'Requires',
'SConsignFile',
'SideEffect',
'SourceCode',
'Split',
'Tag',
'Value',
@ -382,6 +398,8 @@ GlobalDefaultBuilders = [
'Package',
]
# DefaultEnvironmentCall() initializes DefaultEnvironment() if it is not
# created yet.
for name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders:
exec ("%s = _SConscript.DefaultEnvironmentCall(%s)" % (name, repr(name)))
del name

View File

@ -1,11 +1,6 @@
"""SCons.Subst
SCons string substitution.
"""
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -26,28 +21,32 @@ SCons string substitution.
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Subst.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
"""SCons string substitution."""
from __future__ import annotations
import collections
import re
from inspect import signature, Parameter
import SCons.Errors
from SCons.Util import is_String, is_Sequence
# Indexed by the SUBST_* constants below.
_strconv = [SCons.Util.to_String_for_subst,
SCons.Util.to_String_for_subst,
SCons.Util.to_String_for_signature]
_strconv = [
SCons.Util.to_String_for_subst,
SCons.Util.to_String_for_subst,
SCons.Util.to_String_for_signature,
]
AllowableExceptions = (IndexError, NameError)
def SetAllowableExceptions(*excepts):
def SetAllowableExceptions(*excepts) -> None:
global AllowableExceptions
AllowableExceptions = [_f for _f in excepts if _f]
def raise_exception(exception, target, s):
name = exception.__class__.__name__
msg = "%s `%s' trying to evaluate `%s'" % (name, exception, s)
@ -57,16 +56,15 @@ def raise_exception(exception, target, s):
raise SCons.Errors.UserError(msg)
class Literal(object):
class Literal:
"""A wrapper for a string. If you use this object wrapped
around a string, then it will be interpreted as literal.
When passed to the command interpreter, all special
characters will be escaped."""
def __init__(self, lstr):
def __init__(self, lstr) -> None:
self.lstr = lstr
def __str__(self):
def __str__(self) -> str:
return self.lstr
def escape(self, escape_func):
@ -75,21 +73,21 @@ class Literal(object):
def for_signature(self):
return self.lstr
def is_literal(self):
return 1
def is_literal(self) -> bool:
return True
def __eq__(self, other):
if not isinstance(other, Literal):
return False
return self.lstr == other.lstr
def __neq__(self, other):
def __neq__(self, other) -> bool:
return not self.__eq__(other)
def __hash__(self):
return hash(self.lstr)
class SpecialAttrWrapper(object):
class SpecialAttrWrapper:
"""This is a wrapper for what we call a 'Node special attribute.'
This is any of the attributes of a Node that we can reference from
Environment variable substitution, such as $TARGET.abspath or
@ -98,7 +96,7 @@ class SpecialAttrWrapper(object):
such that we can return some canonical string during signature
calculation to avoid unnecessary rebuilds."""
def __init__(self, lstr, for_signature=None):
def __init__(self, lstr, for_signature=None) -> None:
"""The for_signature parameter, if supplied, will be the
canonical string we return from for_signature(). Else
we will simply return lstr."""
@ -108,7 +106,7 @@ class SpecialAttrWrapper(object):
else:
self.forsig = lstr
def __str__(self):
def __str__(self) -> str:
return self.lstr
def escape(self, escape_func):
@ -117,8 +115,8 @@ class SpecialAttrWrapper(object):
def for_signature(self):
return self.forsig
def is_literal(self):
return 1
def is_literal(self) -> bool:
return True
def quote_spaces(arg):
"""Generic function for putting double quotes around any string that
@ -135,11 +133,11 @@ class CmdStringHolder(collections.UserString):
particular platform, it will return the contained string with the
proper escape sequences inserted.
"""
def __init__(self, cmd, literal=None):
collections.UserString.__init__(self, cmd)
def __init__(self, cmd, literal=None) -> None:
super().__init__(cmd)
self.literal = literal
def is_literal(self):
def is_literal(self) -> bool:
return self.literal
def escape(self, escape_func, quote_func=quote_spaces):
@ -171,7 +169,7 @@ def escape_list(mylist, escape_func):
return e(escape_func)
return list(map(escape, mylist))
class NLWrapper(object):
class NLWrapper:
"""A wrapper class that delays turning a list of sources or targets
into a NodeList until it's needed. The specified function supplied
when the object is initialized is responsible for turning raw nodes
@ -184,7 +182,7 @@ class NLWrapper(object):
cleaner conceptually...
"""
def __init__(self, list, func):
def __init__(self, list, func) -> None:
self.list = list
self.func = func
def _return_nodelist(self):
@ -213,7 +211,7 @@ class Targets_or_Sources(collections.UserList):
a list during variable expansion. We're not really using any
collections.UserList methods in practice.
"""
def __init__(self, nl):
def __init__(self, nl) -> None:
self.nl = nl
def __getattr__(self, attr):
nl = self.nl._create_nodelist()
@ -221,24 +219,20 @@ class Targets_or_Sources(collections.UserList):
def __getitem__(self, i):
nl = self.nl._create_nodelist()
return nl[i]
def __getslice__(self, i, j):
nl = self.nl._create_nodelist()
i = max(i, 0); j = max(j, 0)
return nl[i:j]
def __str__(self):
def __str__(self) -> str:
nl = self.nl._create_nodelist()
return str(nl)
def __repr__(self):
def __repr__(self) -> str:
nl = self.nl._create_nodelist()
return repr(nl)
class Target_or_Source(object):
class Target_or_Source:
"""A class that implements $TARGET or $SOURCE expansions by in turn
wrapping a NLWrapper. This class handles the different methods used
to access an individual proxy Node, calling the NLWrapper to create
a proxy on demand.
"""
def __init__(self, nl):
def __init__(self, nl) -> None:
self.nl = nl
def __getattr__(self, attr):
nl = self.nl._create_nodelist()
@ -249,20 +243,20 @@ class Target_or_Source(object):
# pass through, so raise AttributeError for everything.
raise AttributeError("NodeList has no attribute: %s" % attr)
return getattr(nl0, attr)
def __str__(self):
def __str__(self) -> str:
nl = self.nl._create_nodelist()
if nl:
return str(nl[0])
return ''
def __repr__(self):
def __repr__(self) -> str:
nl = self.nl._create_nodelist()
if nl:
return repr(nl[0])
return ''
class NullNodeList(SCons.Util.NullSeq):
def __call__(self, *args, **kwargs): return ''
def __str__(self): return ''
def __call__(self, *args, **kwargs) -> str: return ''
def __str__(self) -> str: return ''
NullNodesList = NullNodeList()
@ -332,14 +326,18 @@ def subst_dict(target, source):
return dict
class StringSubber(object):
_callable_args_set = {'target', 'source', 'env', 'for_signature'}
class StringSubber:
"""A class to construct the results of a scons_subst() call.
This binds a specific construction environment, mode, target and
source with two methods (substitute() and expand()) that handle
the expansion.
"""
def __init__(self, env, mode, conv, gvars):
def __init__(self, env, mode, conv, gvars) -> None:
self.env = env
self.mode = mode
self.conv = conv
@ -420,12 +418,20 @@ class StringSubber(object):
return conv(substitute(l, lvars))
return list(map(func, s))
elif callable(s):
try:
# SCons has the unusual Null class where any __getattr__ call returns it's self,
# which does not work the signature module, and the Null class returns an empty
# string if called on, so we make an exception in this condition for Null class
# Also allow callables where the only non default valued args match the expected defaults
# this should also allow functools.partial's to work.
if isinstance(s, SCons.Util.Null) or {k for k, v in signature(s).parameters.items() if
k in _callable_args_set or v.default == Parameter.empty} == _callable_args_set:
s = s(target=lvars['TARGETS'],
source=lvars['SOURCES'],
env=self.env,
for_signature=(self.mode != SUBST_CMD))
except TypeError:
for_signature=(self.mode == SUBST_SIG))
else:
# This probably indicates that it's a callable
# object that doesn't match our calling arguments
# (like an Action).
@ -444,11 +450,12 @@ class StringSubber(object):
This serves as a wrapper for splitting up a string into
separate tokens.
"""
def sub_match(match):
return self.conv(self.expand(match.group(1), lvars))
if is_String(args) and not isinstance(args, CmdStringHolder):
args = str(args) # In case it's a UserString.
try:
def sub_match(match):
return self.conv(self.expand(match.group(1), lvars))
result = _dollar_exps.sub(sub_match, args)
except TypeError:
# If the internal conversion routine doesn't return
@ -485,8 +492,8 @@ class ListSubber(collections.UserList):
and the rest of the object takes care of doing the right thing
internally.
"""
def __init__(self, env, mode, conv, gvars):
collections.UserList.__init__(self, [])
def __init__(self, env, mode, conv, gvars) -> None:
super().__init__([])
self.env = env
self.mode = mode
self.conv = conv
@ -499,7 +506,7 @@ class ListSubber(collections.UserList):
self.in_strip = None
self.next_line()
def expanded(self, s):
def expanded(self, s) -> bool:
"""Determines if the string s requires further expansion.
Due to the implementation of ListSubber expand will call
@ -590,12 +597,19 @@ class ListSubber(collections.UserList):
self.substitute(a, lvars, 1)
self.next_word()
elif callable(s):
try:
# SCons has the unusual Null class where any __getattr__ call returns it's self,
# which does not work the signature module, and the Null class returns an empty
# string if called on, so we make an exception in this condition for Null class
# Also allow callables where the only non default valued args match the expected defaults
# this should also allow functools.partial's to work.
if isinstance(s, SCons.Util.Null) or {k for k, v in signature(s).parameters.items() if
k in _callable_args_set or v.default == Parameter.empty} == _callable_args_set:
s = s(target=lvars['TARGETS'],
source=lvars['SOURCES'],
env=self.env,
for_signature=(self.mode != SUBST_CMD))
except TypeError:
else:
# This probably indicates that it's a callable
# object that doesn't match our calling arguments
# (like an Action).
@ -609,7 +623,7 @@ class ListSubber(collections.UserList):
else:
self.append(s)
def substitute(self, args, lvars, within_list):
def substitute(self, args, lvars, within_list) -> None:
"""Substitute expansions in an argument or list of arguments.
This serves as a wrapper for splitting up a string into
@ -632,23 +646,23 @@ class ListSubber(collections.UserList):
else:
self.expand(args, lvars, within_list)
def next_line(self):
def next_line(self) -> None:
"""Arrange for the next word to start a new line. This
is like starting a new word, except that we have to append
another line to the result."""
collections.UserList.append(self, [])
self.next_word()
def this_word(self):
def this_word(self) -> None:
"""Arrange for the next word to append to the end of the
current last word in the result."""
self.append = self.add_to_current_word
def next_word(self):
def next_word(self) -> None:
"""Arrange for the next word to start a new word."""
self.append = self.add_new_word
def add_to_current_word(self, x):
def add_to_current_word(self, x) -> None:
"""Append the string x to the end of the current last word
in the result. If that is not possible, then just add
it as a new word. Make sure the entire concatenated string
@ -696,7 +710,7 @@ class ListSubber(collections.UserList):
y = CmdStringHolder(y, None)
self[-1][-1] = y
def add_new_word(self, x):
def add_new_word(self, x) -> None:
if not self.in_strip or self.mode != SUBST_SIG:
literal = self.literal(x)
x = self.conv(x)
@ -713,12 +727,12 @@ class ListSubber(collections.UserList):
else:
return l()
def open_strip(self, x):
def open_strip(self, x) -> None:
"""Handle the "open strip" $( token."""
self.add_strip(x)
self.in_strip = 1
def close_strip(self, x):
def close_strip(self, x) -> None:
"""Handle the "close strip" $) token."""
self.add_strip(x)
self.in_strip = None
@ -793,7 +807,8 @@ _separate_args = re.compile(r'(%s|\s+|[^\s$]+|\$)' % _dollar_exps_str)
# space characters in the string result from the scons_subst() function.
_space_sep = re.compile(r'[\t ]+(?![^{]*})')
def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None, overrides: dict | None = None):
"""Expand a string or list containing construction variable
substitutions.
@ -823,6 +838,10 @@ def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={
lvars = lvars.copy()
lvars.update(d)
# Allow last ditch chance to override lvars
if overrides:
lvars.update(overrides)
# We're (most likely) going to eval() things. If Python doesn't
# find a __builtins__ value in the global dictionary used for eval(),
# it copies the current global values for you. Avoid this by
@ -871,7 +890,7 @@ def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={
return result
def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None, overrides: dict | None = None):
"""Substitute construction variables in a string (or list or other
object) and separate the arguments into a command list.
@ -879,7 +898,6 @@ def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gv
substitutions within strings, so see that function instead
if that's what you're looking for.
"""
if conv is None:
conv = _strconv[mode]
@ -898,6 +916,10 @@ def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gv
lvars = lvars.copy()
lvars.update(d)
# Allow caller to specify last ditch override of lvars
if overrides:
lvars.update(overrides)
# We're (most likely) going to eval() things. If Python doesn't
# find a __builtins__ value in the global dictionary used for eval(),
# it copies the current global values for you. Avoid this by

View File

@ -0,0 +1,762 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Serial and Parallel classes to execute build tasks.
The Jobs class provides a higher level interface to start,
stop, and wait on jobs.
"""
import SCons.compat
import logging
import os
import queue
import signal
import sys
import threading
from enum import Enum
import SCons.Errors
import SCons.Warnings
# The default stack size (in kilobytes) of the threads used to execute
# jobs in parallel.
#
# We use a stack size of 256 kilobytes. The default on some platforms
# is too large and prevents us from creating enough threads to fully
# parallelized the build. For example, the default stack size on linux
# is 8 MBytes.
explicit_stack_size = None
default_stack_size = 256
interrupt_msg = 'Build interrupted.'
class InterruptState:
def __init__(self) -> None:
self.interrupted = False
def set(self) -> None:
self.interrupted = True
def __call__(self):
return self.interrupted
class Jobs:
"""An instance of this class initializes N jobs, and provides
methods for starting, stopping, and waiting on all N jobs.
"""
def __init__(self, num, taskmaster) -> None:
"""
Create 'num' jobs using the given taskmaster. The exact implementation
used varies with the number of jobs requested and the state of the `legacy_sched` flag
to `--experimental`.
"""
# Importing GetOption here instead of at top of file to avoid
# circular imports
# pylint: disable=import-outside-toplevel
from SCons.Script import GetOption
stack_size = explicit_stack_size
if stack_size is None:
stack_size = default_stack_size
experimental_option = GetOption('experimental') or []
if 'legacy_sched' in experimental_option:
if num > 1:
self.job = LegacyParallel(taskmaster, num, stack_size)
else:
self.job = Serial(taskmaster)
else:
self.job = NewParallel(taskmaster, num, stack_size)
self.num_jobs = num
def run(self, postfunc=lambda: None) -> None:
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler()
def were_interrupted(self):
"""Returns whether the jobs were interrupted by a signal."""
return self.job.interrupted()
def _setup_sig_handler(self) -> None:
"""Setup an interrupt handler so that SCons can shutdown cleanly in
various conditions:
a) SIGINT: Keyboard interrupt
b) SIGTERM: kill or system shutdown
c) SIGHUP: Controlling shell exiting
We handle all of these cases by stopping the taskmaster. It
turns out that it's very difficult to stop the build process
by throwing asynchronously an exception such as
KeyboardInterrupt. For example, the python Condition
variables (threading.Condition) and queues do not seem to be
asynchronous-exception-safe. It would require adding a whole
bunch of try/finally block and except KeyboardInterrupt all
over the place.
Note also that we have to be careful to handle the case when
SCons forks before executing another process. In that case, we
want the child to exit immediately.
"""
def handler(signum, stack, self=self, parentpid=os.getpid()) -> None:
if os.getpid() == parentpid:
self.job.taskmaster.stop()
self.job.interrupted.set()
else:
os._exit(2) # pylint: disable=protected-access
self.old_sigint = signal.signal(signal.SIGINT, handler)
self.old_sigterm = signal.signal(signal.SIGTERM, handler)
try:
self.old_sighup = signal.signal(signal.SIGHUP, handler)
except AttributeError:
pass
if (self.old_sigint is None) or (self.old_sigterm is None) or \
(hasattr(self, "old_sighup") and self.old_sighup is None):
msg = "Overwritting previous signal handler which was not installed from Python. " + \
"Will not be able to reinstate and so will return to default handler."
SCons.Warnings.warn(SCons.Warnings.SConsWarning, msg)
def _reset_sig_handler(self) -> None:
"""Restore the signal handlers to their previous state (before the
call to _setup_sig_handler()."""
sigint_to_use = self.old_sigint if self.old_sigint is not None else signal.SIG_DFL
sigterm_to_use = self.old_sigterm if self.old_sigterm is not None else signal.SIG_DFL
signal.signal(signal.SIGINT, sigint_to_use)
signal.signal(signal.SIGTERM, sigterm_to_use)
try:
sigterm_to_use = self.old_sighup if self.old_sighup is not None else signal.SIG_DFL
signal.signal(signal.SIGHUP, sigterm_to_use)
except AttributeError:
pass
class Serial:
"""This class is used to execute tasks in series, and is more efficient
than Parallel, but is only appropriate for non-parallel builds. Only
one instance of this class should be in existence at a time.
This class is not thread safe.
"""
def __init__(self, taskmaster) -> None:
"""Create a new serial job given a taskmaster.
The taskmaster's next_task() method should return the next task
that needs to be executed, or None if there are no more tasks. The
taskmaster's executed() method will be called for each task when it
is successfully executed, or failed() will be called if it failed to
execute (e.g. execute() raised an exception)."""
self.taskmaster = taskmaster
self.interrupted = InterruptState()
def start(self):
"""Start the job. This will begin pulling tasks from the taskmaster
and executing them, and return when there are no more tasks. If a task
fails to execute (i.e. execute() raises an exception), then the job will
stop."""
while True:
task = self.taskmaster.next_task()
if task is None:
break
try:
task.prepare()
if task.needs_execute():
task.execute()
except Exception:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
else:
task.exception_set()
# Let the failed() callback function arrange for the
# build to stop if that's appropriate.
task.failed()
else:
task.executed()
task.postprocess()
self.taskmaster.cleanup()
class Worker(threading.Thread):
"""A worker thread waits on a task to be posted to its request queue,
dequeues the task, executes it, and posts a tuple including the task
and a boolean indicating whether the task executed successfully. """
def __init__(self, requestQueue, resultsQueue, interrupted) -> None:
super().__init__()
self.daemon = True
self.requestQueue = requestQueue
self.resultsQueue = resultsQueue
self.interrupted = interrupted
self.start()
def run(self):
while True:
task = self.requestQueue.get()
if task is None:
# The "None" value is used as a sentinel by
# ThreadPool.cleanup(). This indicates that there
# are no more tasks, so we should quit.
break
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
task.exception_set()
ok = False
else:
ok = True
self.resultsQueue.put((task, ok))
class ThreadPool:
"""This class is responsible for spawning and managing worker threads."""
def __init__(self, num, stack_size, interrupted) -> None:
"""Create the request and reply queues, and 'num' worker threads.
One must specify the stack size of the worker threads. The
stack size is specified in kilobytes.
"""
self.requestQueue = queue.Queue(0)
self.resultsQueue = queue.Queue(0)
try:
prev_size = threading.stack_size(stack_size * 1024)
except RuntimeError as e:
# Only print a warning if the stack size has been explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
# Create worker threads
self.workers = []
for _ in range(num):
worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
self.workers.append(worker)
if 'prev_size' in locals():
threading.stack_size(prev_size)
def put(self, task) -> None:
"""Put task into request queue."""
self.requestQueue.put(task)
def get(self):
"""Remove and return a result tuple from the results queue."""
return self.resultsQueue.get()
def preparation_failed(self, task) -> None:
self.resultsQueue.put((task, False))
def cleanup(self) -> None:
"""
Shuts down the thread pool, giving each worker thread a
chance to shut down gracefully.
"""
# For each worker thread, put a sentinel "None" value
# on the requestQueue (indicating that there's no work
# to be done) so that each worker thread will get one and
# terminate gracefully.
for _ in self.workers:
self.requestQueue.put(None)
# Wait for all of the workers to terminate.
#
# If we don't do this, later Python versions (2.4, 2.5) often
# seem to raise exceptions during shutdown. This happens
# in requestQueue.get(), as an assertion failure that
# requestQueue.not_full is notified while not acquired,
# seemingly because the main thread has shut down (or is
# in the process of doing so) while the workers are still
# trying to pull sentinels off the requestQueue.
#
# Normally these terminations should happen fairly quickly,
# but we'll stick a one-second timeout on here just in case
# someone gets hung.
for worker in self.workers:
worker.join(1.0)
self.workers = []
class LegacyParallel:
"""This class is used to execute tasks in parallel, and is somewhat
less efficient than Serial, but is appropriate for parallel builds.
This class is thread safe.
"""
def __init__(self, taskmaster, num, stack_size) -> None:
"""Create a new parallel job given a taskmaster.
The taskmaster's next_task() method should return the next
task that needs to be executed, or None if there are no more
tasks. The taskmaster's executed() method will be called
for each task when it is successfully executed, or failed()
will be called if the task failed to execute (i.e. execute()
raised an exception).
Note: calls to taskmaster are serialized, but calls to
execute() on distinct tasks are not serialized, because
that is the whole point of parallel jobs: they can execute
multiple tasks simultaneously. """
self.taskmaster = taskmaster
self.interrupted = InterruptState()
self.tp = ThreadPool(num, stack_size, self.interrupted)
self.maxjobs = num
def start(self):
"""Start the job. This will begin pulling tasks from the
taskmaster and executing them, and return when there are no
more tasks. If a task fails to execute (i.e. execute() raises
an exception), then the job will stop."""
jobs = 0
while True:
# Start up as many available tasks as we're
# allowed to.
while jobs < self.maxjobs:
task = self.taskmaster.next_task()
if task is None:
break
try:
# prepare task for execution
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if task.needs_execute():
# dispatch task
self.tp.put(task)
jobs += 1
else:
task.executed()
task.postprocess()
if not task and not jobs:
break
# Let any/all completed tasks finish up before we go
# back and put the next batch of tasks on the queue.
while True:
task, ok = self.tp.get()
jobs -= 1
if ok:
task.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
task.failed()
task.postprocess()
if self.tp.resultsQueue.empty():
break
self.tp.cleanup()
self.taskmaster.cleanup()
# An experimental new parallel scheduler that uses a leaders/followers pattern.
class NewParallel:
class State(Enum):
READY = 0
SEARCHING = 1
STALLED = 2
COMPLETED = 3
class Worker(threading.Thread):
def __init__(self, owner) -> None:
super().__init__()
self.daemon = True
self.owner = owner
self.start()
def run(self) -> None:
self.owner._work()
class FakeLock(object):
def lock(self):
pass
def unlock(self):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
class FakeCondition(object):
def __init__(self, lock):
pass
def wait(self):
fatal();
def notify(self):
pass
def notify_all(self):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
def __init__(self, taskmaster, num, stack_size) -> None:
self.taskmaster = taskmaster
self.max_workers = num
self.stack_size = stack_size
self.interrupted = InterruptState()
self.workers = []
# The `tm_lock` is what ensures that we only have one
# thread interacting with the taskmaster at a time. It
# also protects access to our state that gets updated
# concurrently. The `can_search_cv` is associated with
# this mutex.
self.tm_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()
# Guarded under `tm_lock`.
self.jobs = 0
self.state = NewParallel.State.READY
# The `can_search_cv` is used to manage a leader /
# follower pattern for access to the taskmaster, and to
# awaken from stalls.
self.can_search_cv = (threading.Condition if self.max_workers > 1 else NewParallel.FakeCondition)(self.tm_lock)
# The queue of tasks that have completed execution. The
# next thread to obtain `tm_lock`` will retire them.
self.results_queue_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()
self.results_queue = []
if self.taskmaster.trace:
self.trace = self._setup_logging()
else:
self.trace = False
def _setup_logging(self):
jl = logging.getLogger("Job")
jl.setLevel(level=logging.DEBUG)
jl.addHandler(self.taskmaster.trace.log_handler)
return jl
def trace_message(self, message) -> None:
# This grabs the name of the function which calls trace_message()
method_name = sys._getframe(1).f_code.co_name + "():"
thread_id=threading.get_ident()
self.trace.debug('%s.%s [Thread:%s] %s' % (type(self).__name__, method_name, thread_id, message))
def start(self) -> None:
if self.max_workers == 1:
self._work()
else:
self._start_worker()
while len(self.workers) > 0:
self.workers[0].join()
self.workers.pop(0)
self.taskmaster.cleanup()
def _maybe_start_worker(self) -> None:
if self.max_workers > 1 and len(self.workers) < self.max_workers:
if self.jobs >= len(self.workers):
self._start_worker()
def _start_worker(self) -> None:
prev_size = self._adjust_stack_size()
if self.trace:
self.trace_message("Starting new worker thread")
self.workers.append(NewParallel.Worker(self))
self._restore_stack_size(prev_size)
def _adjust_stack_size(self):
try:
prev_size = threading.stack_size(self.stack_size * 1024)
return prev_size
except AttributeError as e:
# Only print a warning if the stack size has been
# explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
return None
def _restore_stack_size(self, prev_size) -> None:
if prev_size is not None:
threading.stack_size(prev_size)
def _work(self):
task = None
while True:
# Obtain `tm_lock`, granting exclusive access to the taskmaster.
with self.can_search_cv:
if self.trace:
self.trace_message("Gained exclusive access")
# Capture whether we got here with `task` set,
# then drop our reference to the task as we are no
# longer interested in the actual object.
completed_task = (task is not None)
task = None
# We will only have `completed_task` set here if
# we have looped back after executing a task. If
# we have completed a task and find that we are
# stalled, we should speculatively indicate that
# we are no longer stalled by transitioning to the
# 'ready' state which will bypass the condition
# wait so that we immediately process the results
# queue and hopefully light up new
# work. Otherwise, stay stalled, and we will wait
# in the condvar. Some other thread will come back
# here with a completed task.
if self.state == NewParallel.State.STALLED and completed_task:
if self.trace:
self.trace_message("Detected stall with completed task, bypassing wait")
self.state = NewParallel.State.READY
# Wait until we are neither searching nor stalled.
while self.state == NewParallel.State.SEARCHING or self.state == NewParallel.State.STALLED:
if self.trace:
self.trace_message("Search already in progress, waiting")
self.can_search_cv.wait()
# If someone set the completed flag, bail.
if self.state == NewParallel.State.COMPLETED:
if self.trace:
self.trace_message("Completion detected, breaking from main loop")
break
# Set the searching flag to indicate that a thread
# is currently in the critical section for
# taskmaster work.
#
if self.trace:
self.trace_message("Starting search")
self.state = NewParallel.State.SEARCHING
# Bulk acquire the tasks in the results queue
# under the result queue lock, then process them
# all outside that lock. We need to process the
# tasks in the results queue before looking for
# new work because we might be unable to find new
# work if we don't.
results_queue = []
with self.results_queue_lock:
results_queue, self.results_queue = self.results_queue, results_queue
if self.trace:
self.trace_message(f"Found {len(results_queue)} completed tasks to process")
for (rtask, rresult) in results_queue:
if rresult:
rtask.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
rtask.targets[0], errstr=interrupt_msg)
except Exception:
rtask.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
rtask.failed()
rtask.postprocess()
self.jobs -= 1
# We are done with any task objects that were in
# the results queue.
results_queue.clear()
# Now, turn the crank on the taskmaster until we
# either run out of tasks, or find a task that
# needs execution. If we run out of tasks, go idle
# until results arrive if jobs are pending, or
# mark the walk as complete if not.
while self.state == NewParallel.State.SEARCHING:
if self.trace:
self.trace_message("Searching for new tasks")
task = self.taskmaster.next_task()
if task:
# We found a task. Walk it through the
# task lifecycle. If it does not need
# execution, just complete the task and
# look for the next one. Otherwise,
# indicate that we are no longer searching
# so we can drop out of this loop, execute
# the task outside the lock, and allow
# another thread in to search.
try:
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if not task.needs_execute():
if self.trace:
self.trace_message("Found internal task")
task.executed()
task.postprocess()
else:
self.jobs += 1
if self.trace:
self.trace_message("Found task requiring execution")
self.state = NewParallel.State.READY
self.can_search_cv.notify()
# This thread will be busy taking care of
# `execute`ing this task. If we haven't
# reached the limit, spawn a new thread to
# turn the crank and find the next task.
self._maybe_start_worker()
else:
# We failed to find a task, so this thread
# cannot continue turning the taskmaster
# crank. We must exit the loop.
if self.jobs:
# No task was found, but there are
# outstanding jobs executing that
# might unblock new tasks when they
# complete. Transition to the stalled
# state. We do not need a notify,
# because we know there are threads
# outstanding that will re-enter the
# loop.
#
if self.trace:
self.trace_message("Found no task requiring execution, but have jobs: marking stalled")
self.state = NewParallel.State.STALLED
else:
# We didn't find a task and there are
# no jobs outstanding, so there is
# nothing that will ever return
# results which might unblock new
# tasks. We can conclude that the walk
# is complete. Update our state to
# note completion and awaken anyone
# sleeping on the condvar.
#
if self.trace:
self.trace_message("Found no task requiring execution, and have no jobs: marking complete")
self.state = NewParallel.State.COMPLETED
self.can_search_cv.notify_all()
# We no longer hold `tm_lock` here. If we have a task,
# we can now execute it. If there are threads waiting
# to search, one of them can now begin turning the
# taskmaster crank in NewParallel.
if task:
if self.trace:
self.trace_message("Executing task")
ok = True
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
ok = False
task.exception_set()
# Grab the results queue lock and enqueue the
# executed task and state. The next thread into
# the searching loop will complete the
# postprocessing work under the taskmaster lock.
#
if self.trace:
self.trace_message("Enqueueing executed task results")
with self.results_queue_lock:
self.results_queue.append((task, ok))
# Tricky state "fallthrough" here. We are going back
# to the top of the loop, which behaves differently
# depending on whether `task` is set. Do not perturb
# the value of the `task` variable if you add new code
# after this comment.
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View File

@ -1,5 +1,6 @@
# MIT License
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -20,50 +21,41 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
"""Generic Taskmaster module for the SCons build engine.
import sys
This module contains the primary interface(s) between a wrapping user
interface and the SCons build engine. There are two key classes here:
__doc__ = """
Generic Taskmaster module for the SCons build engine.
=====================================================
Taskmaster
This is the main engine for walking the dependency graph and
calling things to decide what does or doesn't need to be built.
This module contains the primary interface(s) between a wrapping user
interface and the SCons build engine. There are two key classes here:
Task
This is the base class for allowing a wrapping interface to
decide what does or doesn't actually need to be done. The
intention is for a wrapping interface to subclass this as
appropriate for different types of behavior it may need.
Taskmaster
----------
This is the main engine for walking the dependency graph and
calling things to decide what does or doesn't need to be built.
The canonical example is the SCons native Python interface,
which has Task subclasses that handle its specific behavior,
like printing "'foo' is up to date" when a top-level target
doesn't need to be built, and handling the -c option by removing
targets as its "build" action. There is also a separate subclass
for suppressing this output when the -q option is used.
Task
----
This is the base class for allowing a wrapping interface to
decide what does or doesn't actually need to be done. The
intention is for a wrapping interface to subclass this as
appropriate for different types of behavior it may need.
The canonical example is the SCons native Python interface,
which has Task subclasses that handle its specific behavior,
like printing "'foo' is up to date" when a top-level target
doesn't need to be built, and handling the -c option by removing
targets as its "build" action. There is also a separate subclass
for suppressing this output when the -q option is used.
The Taskmaster instantiates a Task object for each (set of)
target(s) that it decides need to be evaluated and/or built.
The Taskmaster instantiates a Task object for each (set of)
target(s) that it decides need to be evaluated and/or built.
"""
__revision__ = "src/engine/SCons/Taskmaster.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
from itertools import chain
import operator
import io
import sys
import traceback
from abc import ABC, abstractmethod
from itertools import chain
import logging
import SCons.Errors
import SCons.Node
import SCons.Warnings
from SCons.Util import DispatchingFormatter
StateString = SCons.Node.StateString
NODE_NO_STATE = SCons.Node.no_state
@ -72,8 +64,7 @@ NODE_EXECUTING = SCons.Node.executing
NODE_UP_TO_DATE = SCons.Node.up_to_date
NODE_EXECUTED = SCons.Node.executed
NODE_FAILED = SCons.Node.failed
print_prepare = 0 # set by option --debug=prepare
print_prepare = False # set by option --debug=prepare
# A subsystem for recording stats about how different Nodes are handled by
# the main Taskmaster loop. There's no external control here (no need for
@ -81,7 +72,8 @@ print_prepare = 0 # set by option --debug=prepare
CollectStats = None
class Stats(object):
class Stats:
"""
A simple class for holding statistics about the disposition of a
Node by the Taskmaster. If we're collecting statistics, each Node
@ -89,7 +81,7 @@ class Stats(object):
the Taskmaster records its decision each time it processes the Node.
(Ideally, that's just once per Node.)
"""
def __init__(self):
def __init__(self) -> None:
"""
Instantiates a Taskmaster.Stats object, initializing all
appropriate counters to zero.
@ -102,6 +94,7 @@ class Stats(object):
self.side_effects = 0
self.build = 0
StatsNodes = []
fmt = "%(considered)3d "\
@ -112,15 +105,14 @@ fmt = "%(considered)3d "\
"%(side_effects)3d " \
"%(build)3d "
def dump_stats():
def dump_stats() -> None:
for n in sorted(StatsNodes, key=lambda a: str(a)):
print((fmt % n.attributes.stats.__dict__) + str(n))
class Task(object):
"""
Default SCons build engine task.
class Task(ABC):
""" SCons build engine abstract task class.
This controls the interaction of the actual building of node
and the rest of the engine.
@ -137,18 +129,22 @@ class Task(object):
these methods explicitly to update state, etc., rather than
roll their own interaction with Taskmaster from scratch.
"""
def __init__(self, tm, targets, top, node):
LOGGER = None
def __init__(self, tm, targets, top, node) -> None:
self.tm = tm
self.targets = targets
self.top = top
self.node = node
self.exc_clear()
def trace_message(self, method, node, description='node'):
fmt = '%-20s %s %s\n'
return fmt % (method + ':', description, self.tm.trace_node(node))
def trace_message(self, node, description: str='node') -> None:
# This grabs the name of the function which calls trace_message()
method_name=sys._getframe(1).f_code.co_name+"():"
Task.LOGGER.debug('%-15s %s %s' % (method_name, description, self.tm.tm_trace_node(node)))
def display(self, message):
def display(self, message) -> None:
"""
Hook to allow the calling interface to display a message.
@ -161,7 +157,7 @@ class Task(object):
"""
pass
def prepare(self):
def prepare(self) -> None:
"""
Called just before the task is executed.
@ -171,7 +167,8 @@ class Task(object):
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.prepare()', self.node))
if T:
self.trace_message(self.node)
# Now that it's the appropriate time, give the TaskMaster a
# chance to raise any exceptions it encountered while preparing
@ -212,17 +209,9 @@ class Task(object):
"""
return self.node
@abstractmethod
def needs_execute(self):
# TODO(deprecate): "return True" is the old default behavior;
# change it to NotImplementedError (after running through the
# Deprecation Cycle) so the desired behavior is explicitly
# determined by which concrete subclass is used.
#raise NotImplementedError
msg = ('Taskmaster.Task is an abstract base class; instead of\n'
'\tusing it directly, '
'derive from it and override the abstract methods.')
SCons.Warnings.warn(SCons.Warnings.TaskmasterNeedsExecuteWarning, msg)
return True
return
def execute(self):
"""
@ -233,7 +222,8 @@ class Task(object):
prepare(), executed() or failed().
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.execute()', self.node))
if T:
self.trace_message(self.node)
try:
cached_targets = []
@ -250,9 +240,12 @@ class Task(object):
for t in cached_targets:
try:
t.fs.unlink(t.get_internal_path())
except (IOError, OSError):
pass
except OSError as e:
SCons.Warnings.warn(SCons.Warnings.CacheCleanupErrorWarning,
"Failed copying all target files from cache, Error while attempting to remove file %s retrieved from cache: %s" % (t.get_internal_path(), e))
self.targets[0].build()
for t in self.targets:
t.push_to_cache()
else:
for t in cached_targets:
t.cached = 1
@ -269,15 +262,15 @@ class Task(object):
buildError.exc_info = sys.exc_info()
raise buildError
def executed_without_callbacks(self):
def executed_without_callbacks(self) -> None:
"""
Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_without_callbacks()',
self.node))
if T:
self.trace_message(self.node)
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
@ -285,7 +278,7 @@ class Task(object):
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
def executed_with_callbacks(self):
def executed_with_callbacks(self) -> None:
"""
Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
@ -300,16 +293,14 @@ class Task(object):
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_with_callbacks()',
self.node))
if T:
self.trace_message(self.node)
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
if not t.cached:
t.push_to_cache()
t.built()
t.visited()
if (not print_prepare and
@ -320,7 +311,7 @@ class Task(object):
executed = executed_with_callbacks
def failed(self):
def failed(self) -> None:
"""
Default action when a task fails: stop the build.
@ -330,7 +321,7 @@ class Task(object):
"""
self.fail_stop()
def fail_stop(self):
def fail_stop(self) -> None:
"""
Explicit stop-the-build failure.
@ -342,7 +333,8 @@ class Task(object):
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_stop()', self.node))
if T:
self.trace_message(self.node)
# Invoke will_not_build() to clean-up the pending children
# list.
@ -357,7 +349,7 @@ class Task(object):
self.targets = [self.tm.current_top]
self.top = 1
def fail_continue(self):
def fail_continue(self) -> None:
"""
Explicit continue-the-build failure.
@ -369,11 +361,12 @@ class Task(object):
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_continue()', self.node))
if T:
self.trace_message(self.node)
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
def make_ready_all(self):
def make_ready_all(self) -> None:
"""
Marks all targets in a task ready for execution.
@ -381,7 +374,8 @@ class Task(object):
visited--the canonical example being the "scons -c" option.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
if T:
self.trace_message(self.node)
self.out_of_date = self.targets[:]
for t in self.targets:
@ -399,8 +393,9 @@ class Task(object):
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.make_ready_current()',
self.node))
if T:
T.log_handler.stream.write('\n') # Prefix message with new line. This is a hack
self.trace_message(self.node)
self.out_of_date = []
needs_executing = False
@ -409,7 +404,7 @@ class Task(object):
t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date())
except EnvironmentError as e:
except OSError as e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date:
@ -436,7 +431,7 @@ class Task(object):
make_ready = make_ready_current
def postprocess(self):
def postprocess(self) -> None:
"""
Post-processes a task after it's been executed.
@ -447,7 +442,8 @@ class Task(object):
that can be put back on the candidates list.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.postprocess()', self.node))
if T:
self.trace_message(self.node)
# We may have built multiple targets, some of which may have
# common parents waiting for this build. Count up how many
@ -464,9 +460,8 @@ class Task(object):
# A node can only be in the pending_children set if it has
# some waiting_parents.
if t.waiting_parents:
if T: T.write(self.trace_message(u'Task.postprocess()',
t,
'removing'))
if T:
self.trace_message(t, 'removing')
pending_children.discard(t)
for p in t.waiting_parents:
parents[p] = parents.get(p, 0) + 1
@ -493,9 +488,8 @@ class Task(object):
for p, subtract in parents.items():
p.ref_count = p.ref_count - subtract
if T: T.write(self.trace_message(u'Task.postprocess()',
p,
'adjusted parent ref count'))
if T:
self.trace_message(p, 'adjusted parent ref count')
if p.ref_count == 0:
self.tm.candidates.append(p)
@ -517,7 +511,7 @@ class Task(object):
"""
return self.exception
def exc_clear(self):
def exc_clear(self) -> None:
"""
Clears any recorded exception.
@ -527,7 +521,7 @@ class Task(object):
self.exception = (None, None, None)
self.exception_raise = self._no_exception_to_raise
def exception_set(self, exception=None):
def exception_set(self, exception=None) -> None:
"""
Records an exception to be raised at the appropriate time.
@ -539,7 +533,7 @@ class Task(object):
self.exception = exception
self.exception_raise = self._exception_raise
def _no_exception_to_raise(self):
def _no_exception_to_raise(self) -> None:
pass
def _exception_raise(self):
@ -555,15 +549,12 @@ class Task(object):
exc_traceback = None
# raise exc_type(exc_value).with_traceback(exc_traceback)
if sys.version_info[0] == 2:
exec("raise exc_type, exc_value, exc_traceback")
else: # sys.version_info[0] == 3:
if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
exec("raise exc_value.with_traceback(exc_traceback)")
else:
# else we'll create an exception using the value and raise that
exec("raise exc_type(exc_value).with_traceback(exc_traceback)")
if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
raise exc_value.with_traceback(exc_traceback)
else:
# else we'll create an exception using the value and raise that
raise exc_type(exc_value).with_traceback(exc_traceback)
# raise e.__class__, e.__class__(e), sys.exc_info()[2]
@ -572,7 +563,7 @@ class Task(object):
class AlwaysTask(Task):
def needs_execute(self):
def needs_execute(self) -> bool:
"""
Always returns True (indicating this Task should always
be executed).
@ -582,7 +573,7 @@ class AlwaysTask(Task):
dependencies) can use this as follows:
class MyTaskSubclass(SCons.Taskmaster.Task):
needs_execute = SCons.Taskmaster.Task.execute_always
needs_execute = SCons.Taskmaster.AlwaysTask.needs_execute
"""
return True
@ -610,12 +601,12 @@ def find_cycle(stack, visited):
return None
class Taskmaster(object):
class Taskmaster:
"""
The Taskmaster for walking the dependency DAG.
"""
def __init__(self, targets=[], tasker=None, order=None, trace=None):
def __init__(self, targets=[], tasker=None, order=None, trace=None) -> None:
self.original_top = targets
self.top_targets_left = targets[:]
self.top_targets_left.reverse()
@ -627,9 +618,63 @@ class Taskmaster(object):
order = lambda l: l
self.order = order
self.message = None
self.trace = trace
self.next_candidate = self.find_next_candidate
self.pending_children = set()
self.trace = False
self.configure_trace(trace)
def configure_trace(self, trace=None) -> None:
"""
This handles the command line option --taskmastertrace=
It can be:
- : output to stdout
<filename> : output to a file
False/None : Do not trace
"""
if not trace:
self.trace = False
return
# TODO: May want to switch format to something like this.
# log_format = (
# '%(relativeCreated)05dms'
# ':%(relfilename)s'
# ':%(funcName)s'
# '#%(lineno)s'
# ': %(message)s'
# )
tm_formatter = logging.Formatter('Taskmaster: %(message)s')
if isinstance(trace, io.StringIO):
log_handler = logging.StreamHandler(trace)
elif trace == '-':
log_handler = logging.StreamHandler(sys.stdout)
elif trace:
log_handler = logging.FileHandler(filename=trace)
logger = logging.getLogger('Taskmaster')
logger.setLevel(level=logging.DEBUG)
logger.addHandler(log_handler)
self.trace = logger
logger.log_handler = log_handler
# Now setup Task's logger.
tl = logging.getLogger("Task")
tl.setLevel(level=logging.DEBUG)
tl.addHandler(log_handler)
task_formatter = logging.Formatter('%(name)s.%(message)s')
Task.LOGGER = tl
self.trace.log_handler = log_handler
log_handler.setFormatter(DispatchingFormatter(
formatters={
'Taskmaster': tm_formatter,
'Task': task_formatter,
'Job': task_formatter,
},
default_formatter=logging.Formatter('%(message)s')
))
def find_next_candidate(self):
"""
@ -681,7 +726,7 @@ class Taskmaster(object):
self.will_not_build(candidates)
return None
def _validate_pending_children(self):
def _validate_pending_children(self) -> None:
"""
Validate the content of the pending_children set. Assert if an
internal error is found.
@ -758,14 +803,10 @@ class Taskmaster(object):
for p in n.waiting_parents:
assert p.ref_count > 0, (str(n), str(p), p.ref_count)
def trace_message(self, message):
return 'Taskmaster: %s\n' % message
def trace_node(self, node):
return '<%-10s %-3s %s>' % (StateString[node.get_state()],
def tm_trace_node(self, node) -> str:
return('<%-10s %-3s %s>' % (StateString[node.get_state()],
node.ref_count,
repr(str(node)))
repr(str(node))))
def _find_next_ready_node(self):
"""
@ -792,12 +833,15 @@ class Taskmaster(object):
self.ready_exc = None
T = self.trace
if T: T.write(SCons.Util.UnicodeType('\n') + self.trace_message('Looking for a node to evaluate'))
if T:
T.log_handler.stream.write('\n') # Prefix message with new line. This is a hack
self.trace.debug('Looking for a node to evaluate')
while True:
node = self.next_candidate()
if node is None:
if T: T.write(self.trace_message('No candidate anymore.') + u'\n')
if T:
self.trace.debug('No candidate anymore.')
return None
node = node.disambiguate()
@ -820,7 +864,8 @@ class Taskmaster(object):
else:
S = None
if T: T.write(self.trace_message(u' Considering node %s and its children:' % self.trace_node(node)))
if T:
self.trace.debug(' Considering node %s and its children:' % self.tm_trace_node(node))
if state == NODE_NO_STATE:
# Mark this node as being on the execution stack:
@ -828,7 +873,8 @@ class Taskmaster(object):
elif state > NODE_PENDING:
# Skip this node if it has already been evaluated:
if S: S.already_handled = S.already_handled + 1
if T: T.write(self.trace_message(u' already handled (executed)'))
if T:
self.trace.debug(' already handled (executed)')
continue
executor = node.get_executor()
@ -839,7 +885,8 @@ class Taskmaster(object):
exc_value = sys.exc_info()[1]
e = SCons.Errors.ExplicitExit(node, exc_value.code)
self.ready_exc = (SCons.Errors.ExplicitExit, e)
if T: T.write(self.trace_message(' SystemExit'))
if T:
self.trace.debug(' SystemExit')
return node
except Exception as e:
# We had a problem just trying to figure out the
@ -848,7 +895,8 @@ class Taskmaster(object):
# raise the exception when the Task is "executed."
self.ready_exc = sys.exc_info()
if S: S.problem = S.problem + 1
if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e))
if T:
self.trace.debug(' exception %s while scanning children.' % e)
return node
children_not_visited = []
@ -859,7 +907,8 @@ class Taskmaster(object):
for child in chain(executor.get_all_prerequisites(), children):
childstate = child.get_state()
if T: T.write(self.trace_message(u' ' + self.trace_node(child)))
if T:
self.trace.debug(' ' + self.tm_trace_node(child))
if childstate == NODE_NO_STATE:
children_not_visited.append(child)
@ -880,8 +929,8 @@ class Taskmaster(object):
self.candidates.extend(self.order(children_not_visited))
# if T and children_not_visited:
# T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited)))
# T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates)))
# self.trace.debug(' adding to candidates: %s' % map(str, children_not_visited))
# self.trace.debug(' candidates now: %s\n' % map(str, self.candidates))
# Skip this node if any of its children have failed.
#
@ -906,7 +955,8 @@ class Taskmaster(object):
n.set_state(NODE_FAILED)
if S: S.child_failed = S.child_failed + 1
if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node)))
if T:
self.trace.debug('****** %s' % self.tm_trace_node(node))
continue
if children_not_ready:
@ -920,13 +970,14 @@ class Taskmaster(object):
# count so we can be put back on the list for
# re-evaluation when they've all finished.
node.ref_count = node.ref_count + child.add_to_waiting_parents(node)
if T: T.write(self.trace_message(u' adjusted ref count: %s, child %s' %
(self.trace_node(node), repr(str(child)))))
if T:
self.trace.debug(' adjusted ref count: %s, child %s' %
(self.tm_trace_node(node), repr(str(child))))
if T:
for pc in children_pending:
T.write(self.trace_message(' adding %s to the pending children set\n' %
self.trace_node(pc)))
self.trace.debug(' adding %s to the pending children set' %
self.tm_trace_node(pc))
self.pending_children = self.pending_children | children_pending
continue
@ -946,8 +997,8 @@ class Taskmaster(object):
# The default when we've gotten through all of the checks above:
# this node is ready to be built.
if S: S.build = S.build + 1
if T: T.write(self.trace_message(u'Evaluating %s\n' %
self.trace_node(node)))
if T:
self.trace.debug('Evaluating %s' % self.tm_trace_node(node))
# For debugging only:
#
@ -996,7 +1047,7 @@ class Taskmaster(object):
return task
def will_not_build(self, nodes, node_func=lambda n: None):
def will_not_build(self, nodes, node_func=lambda n: None) -> None:
"""
Perform clean-up about nodes that will never be built. Invokes
a user defined function on all of these nodes (including all
@ -1012,8 +1063,8 @@ class Taskmaster(object):
if T:
for n in nodes:
T.write(self.trace_message(' removing node %s from the pending children set\n' %
self.trace_node(n)))
self.trace.debug(' removing node %s from the pending children set\n' %
self.tm_trace_node(n))
try:
while len(to_visit):
node = to_visit.pop()
@ -1029,8 +1080,9 @@ class Taskmaster(object):
for p in parents:
p.ref_count = p.ref_count - 1
if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' %
self.trace_node(p)))
if T:
self.trace.debug(' removing parent %s from the pending children set\n' %
self.tm_trace_node(p))
except KeyError:
# The container to_visit has been emptied.
pass
@ -1040,7 +1092,7 @@ class Taskmaster(object):
# allow us to use in-place updates
self.pending_children = pending_children
def stop(self):
def stop(self) -> None:
"""
Stops the current build completely.
"""

View File

@ -10,7 +10,7 @@ selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -32,14 +32,14 @@ selection method.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/386asm.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
from SCons.Tool.PharLapCommon import addPharLapPaths
import SCons.Util
as_module = __import__('as', globals(), locals(), [], 1)
def generate(env):
def generate(env) -> None:
"""Add Builders and construction variables for ar to an Environment."""
as_module.generate(env)

Some files were not shown because too many files have changed in this diff Show More