mirror of https://github.com/mongodb/mongo
SERVER-103025 delete scons (#34270)
GitOrigin-RevId: 5b41cb76ab5930046a68021716a874ceda26f7ca
This commit is contained in:
parent
6e26f553f5
commit
230828c095
5
.bazelrc
5
.bazelrc
|
|
@ -405,10 +405,7 @@ common:mod-scanner --aspects //modules_poc:mod_scanner.bzl%mod_scanner_aspect
|
|||
common:mod-scanner --remote_download_regex=.*\.mod_scanner_decls.json$
|
||||
|
||||
# if you don't have access to the remote execution cluster above, use the local config
|
||||
# described below.
|
||||
# pass local config to SCons like:
|
||||
# > buildscripts/scons.py BAZEL_FLAGS=--config=local <others args>
|
||||
# or if invoking bazel directly pass "--config=local" on the bazel command line
|
||||
# by passing "--config=local" on the bazel command line
|
||||
--config=local
|
||||
common:local --remote_executor=
|
||||
common:local --remote_cache=
|
||||
|
|
|
|||
|
|
@ -6,16 +6,14 @@
|
|||
|
||||
# The following patterns are parsed from ./OWNERS.yml
|
||||
OWNERS.yml @10gen/server-root-ownership @svc-auto-approve-bot
|
||||
.bazelignore @10gen/devprod-build @svc-auto-approve-bot
|
||||
.bazelrc @10gen/devprod-build @svc-auto-approve-bot
|
||||
.bazelversion @10gen/devprod-build @svc-auto-approve-bot
|
||||
.bazel* @10gen/devprod-build @svc-auto-approve-bot
|
||||
.clang-format @10gen/server-programmability @svc-auto-approve-bot
|
||||
.clang-tidy.in @10gen/server-programmability @svc-auto-approve-bot
|
||||
.gitignore @10gen/devprod-build @svc-auto-approve-bot
|
||||
.mypy.ini @10gen/devprod-build @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
.prettierignore @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
.prettierrc @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
/BUILD.bazel @10gen/devprod-build @svc-auto-approve-bot
|
||||
BUILD.bazel @10gen/devprod-build @svc-auto-approve-bot
|
||||
copy.bara.sky @IamXander @smcclure15 @svc-auto-approve-bot
|
||||
copy.bara.staging.sky @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
eslint.config.mjs @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
|
|
@ -25,7 +23,7 @@ pnpm-lock.yaml @10gen/devprod-correctness @svc-auto-approve-bot
|
|||
poetry.lock @10gen/devprod-build @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
pyproject.toml @10gen/devprod-build @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
sbom.json @10gen/server-security @svc-auto-approve-bot
|
||||
SConstruct @10gen/devprod-build @svc-auto-approve-bot
|
||||
MODULE.bazel* @10gen/devprod-build @svc-auto-approve-bot
|
||||
WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot
|
||||
|
||||
# The following patterns are parsed from ./bazel/OWNERS.yml
|
||||
|
|
@ -163,9 +161,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot
|
|||
/buildscripts/tracing_profiler/**/* @10gen/query @svc-auto-approve-bot
|
||||
|
||||
# The following patterns are parsed from ./docs/OWNERS.yml
|
||||
/docs/**/bazel.md @10gen/devprod-build @svc-auto-approve-bot
|
||||
/docs/**/build_system_reference.md @10gen/devprod-build @svc-auto-approve-bot
|
||||
/docs/**/build_system.md @10gen/devprod-build @svc-auto-approve-bot
|
||||
/docs/**/building.md @10gen/devprod-build @svc-auto-approve-bot
|
||||
/docs/**/poetry_execution.md @10gen/devprod-correctness @svc-auto-approve-bot
|
||||
/docs/**/linting.md @10gen/devprod-build @svc-auto-approve-bot
|
||||
|
|
@ -1232,9 +1227,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot
|
|||
# The following patterns are parsed from ./jstests/with_mongot/OWNERS.yml
|
||||
/jstests/with_mongot/**/* @10gen/query-integration-search @svc-auto-approve-bot
|
||||
|
||||
# The following patterns are parsed from ./site_scons/OWNERS.yml
|
||||
/site_scons/**/* @10gen/devprod-build @svc-auto-approve-bot
|
||||
|
||||
# The following patterns are parsed from ./src/mongo/OWNERS.yml
|
||||
/src/mongo/**/config.h.in @10gen/server-programmability @svc-auto-approve-bot
|
||||
|
||||
|
|
@ -1606,8 +1598,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot
|
|||
|
||||
# The following patterns are parsed from ./src/mongo/db/modules/enterprise/OWNERS.yml
|
||||
/src/mongo/db/modules/enterprise/BUILD.bazel @10gen/devprod-build @svc-auto-approve-bot
|
||||
/src/mongo/db/modules/enterprise/build.py @10gen/devprod-build @svc-auto-approve-bot
|
||||
/src/mongo/db/modules/enterprise/SConscript @10gen/devprod-build @svc-auto-approve-bot
|
||||
/src/mongo/db/modules/enterprise/.gitignore @10gen/devprod-build @svc-auto-approve-bot
|
||||
/src/mongo/db/modules/enterprise/.git-blame-ignore-revs @10gen/devprod-build @svc-auto-approve-bot
|
||||
/src/mongo/db/modules/enterprise/.clang-format @10gen/server-programmability @svc-auto-approve-bot
|
||||
|
|
@ -2620,7 +2610,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot
|
|||
/src/third_party/**/SafeInt @10gen/server-programmability @svc-auto-approve-bot
|
||||
/src/third_party/**/sasl @10gen/server-security @svc-auto-approve-bot
|
||||
/src/third_party/**/schemastore.org @10gen/query-optimization @svc-auto-approve-bot
|
||||
/src/third_party/**/scons* @10gen/devprod-build @svc-auto-approve-bot
|
||||
/src/third_party/**/snappy @10gen/server-networking-and-observability @svc-auto-approve-bot
|
||||
/src/third_party/**/tcmalloc @10gen/server-workload-scheduling @svc-auto-approve-bot
|
||||
/src/third_party/**/timelib @10gen/query-execution @svc-auto-approve-bot
|
||||
|
|
@ -2631,7 +2620,6 @@ WORKSPACE.bazel @10gen/devprod-build @svc-auto-approve-bot
|
|||
/src/third_party/**/yaml-cpp @10gen/server-security @svc-auto-approve-bot
|
||||
/src/third_party/**/zlib @10gen/server-networking-and-observability @svc-auto-approve-bot
|
||||
/src/third_party/**/zstandard @10gen/server-networking-and-observability @svc-auto-approve-bot
|
||||
/src/third_party/**/SConscript @10gen/devprod-build @svc-auto-approve-bot
|
||||
/src/third_party/**/*.bazel @10gen/devprod-build @svc-auto-approve-bot
|
||||
|
||||
# The following patterns are parsed from ./src/third_party/libmongocrypt/OWNERS.yml
|
||||
|
|
|
|||
|
|
@ -19,20 +19,16 @@
|
|||
"javascript"
|
||||
],
|
||||
"files.associations": {
|
||||
"SConstruct": "python",
|
||||
"SConscript": "python",
|
||||
"*.idl": "yaml",
|
||||
},
|
||||
"files.insertFinalNewline": true,
|
||||
"js/ts.implicitProjectConfig.target": "ES2020",
|
||||
"python.autoComplete.extraPaths": [
|
||||
"/opt/mongodbtoolchain/v4/share/gcc-11.3.0/python",
|
||||
"src/third_party/scons-3.1.2/scons-local-3.1.2"
|
||||
],
|
||||
"python.defaultInterpreterPath": "python3-venv/bin/python",
|
||||
"python.analysis.extraPaths": [
|
||||
"/opt/mongodbtoolchain/v4/share/gcc-11.3.0/python",
|
||||
"src/third_party/scons-3.1.2/scons-local-3.1.2"
|
||||
],
|
||||
"mypy-type-checker.path": [
|
||||
"${interpreter}",
|
||||
|
|
|
|||
12
OWNERS.yml
12
OWNERS.yml
|
|
@ -6,13 +6,7 @@ filters:
|
|||
metadata:
|
||||
emeritus_approvers:
|
||||
- visemet # TODO: add back to approvers once project work is finished
|
||||
- ".bazelignore":
|
||||
approvers:
|
||||
- 10gen/devprod-build
|
||||
- ".bazelrc":
|
||||
approvers:
|
||||
- 10gen/devprod-build
|
||||
- ".bazelversion":
|
||||
- ".bazel*":
|
||||
approvers:
|
||||
- 10gen/devprod-build
|
||||
- ".clang-format":
|
||||
|
|
@ -34,7 +28,7 @@ filters:
|
|||
- ".prettierrc":
|
||||
approvers:
|
||||
- 10gen/devprod-correctness
|
||||
- "/BUILD.bazel":
|
||||
- "BUILD.bazel":
|
||||
approvers:
|
||||
- 10gen/devprod-build
|
||||
- "copy.bara.sky":
|
||||
|
|
@ -67,7 +61,7 @@ filters:
|
|||
- "sbom.json":
|
||||
approvers:
|
||||
- 10gen/server-security
|
||||
- "SConstruct":
|
||||
- "MODULE.bazel*":
|
||||
approvers:
|
||||
- 10gen/devprod-build
|
||||
- "WORKSPACE.bazel":
|
||||
|
|
|
|||
|
|
@ -64,7 +64,6 @@ a notice will be included in
|
|||
| [pyiso8601] | MIT | 2.1.0 | unknown | |
|
||||
| [RoaringBitmap/CRoaring] | Unknown License | v3.0.1 | | ✗ |
|
||||
| [SchemaStore/schemastore] | Apache-2.0 | Unknown | | |
|
||||
| [SCons - a Software Construction tool] | MIT | 3.1.2 | | ✗ |
|
||||
| [smhasher] | Unknown License | Unknown | unknown | ✗ |
|
||||
| [Snowball Stemming Algorithms] | BSD-3-Clause | 7b264ffa0f767c579d052fd8142558dc8264d795 | ✗ | ✗ |
|
||||
| [subunit] | BSD-3-Clause, Apache-2.0 | 1.4.4 | unknown | |
|
||||
|
|
@ -90,7 +89,6 @@ a notice will be included in
|
|||
[PCRE2]: http://www.pcre.org/
|
||||
[Protobuf]: https://github.com/protocolbuffers/protobuf
|
||||
[RoaringBitmap/CRoaring]: https://github.com/RoaringBitmap/CRoaring
|
||||
[SCons - a Software Construction tool]: https://github.com/SCons/scons
|
||||
[SchemaStore/schemastore]: https://www.schemastore.org/json/
|
||||
[Snowball Stemming Algorithms]: https://github.com/snowballstem/snowball
|
||||
[arximboldi/immer]: https://github.com/arximboldi/immer
|
||||
|
|
|
|||
7158
SConstruct
7158
SConstruct
File diff suppressed because it is too large
Load Diff
|
|
@ -2,7 +2,6 @@ package(default_visibility = ["//visibility:public"])
|
|||
|
||||
# Expose script for external usage through bazel.
|
||||
exports_files([
|
||||
"scons_link_list.py",
|
||||
"install_rules.py",
|
||||
])
|
||||
|
||||
|
|
|
|||
|
|
@ -2275,18 +2275,6 @@ config_setting(
|
|||
},
|
||||
)
|
||||
|
||||
bool_flag(
|
||||
name = "scons_query",
|
||||
build_setting_default = False,
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "scons_query_enabled",
|
||||
flag_values = {
|
||||
"//bazel/config:scons_query": "True",
|
||||
},
|
||||
)
|
||||
|
||||
# --------------------------------------
|
||||
# crypto options
|
||||
# --------------------------------------
|
||||
|
|
@ -2321,9 +2309,8 @@ selects.config_setting_group(
|
|||
# ssl_provider options
|
||||
# --------------------------------------
|
||||
|
||||
# TODO(SERVER-94377): The `mongo_crypto` setting refers to the old
|
||||
# `MONGO_CRYPTO` variable in SCons. The `ssl_provider` usually coincides with
|
||||
# that, but if ssl is disabled it'll get overridden to `none`. That is,
|
||||
# TODO(SERVER-94377): The `ssl_provider` usually coincides with
|
||||
# mongo_crypto, but if ssl is disabled it'll get overridden to `none`. That is,
|
||||
# regardless of operating system, you'll get `ssl_provider_none` here if ssl is
|
||||
# disabled. We should figure out a more intuitive/robust solution to this.
|
||||
|
||||
|
|
|
|||
|
|
@ -409,12 +409,6 @@ detect_odr_violations = rule(
|
|||
# build_enterprise_module
|
||||
# =========
|
||||
|
||||
# Original documentation is:
|
||||
# Comma-separated list of modules to build. Empty means none. Default is all.
|
||||
# As Bazel will not support the module building in the same way as Scons, the only
|
||||
# module is supported at present is the enterprise
|
||||
# more: https://mongodb.slack.com/archives/C05V4F6GZ6J/p1705687513581639
|
||||
|
||||
build_enterprise_provider = provider(
|
||||
doc = """Build enterprise module""",
|
||||
fields = ["enabled"],
|
||||
|
|
|
|||
|
|
@ -149,11 +149,6 @@ generate_config_header_rule = rule(
|
|||
)
|
||||
|
||||
def generate_config_header(name, tags = [], **kwargs):
|
||||
write_target(
|
||||
name = name + "_gen_source_tag",
|
||||
target_name = name,
|
||||
tags = ["scons_link_lists"],
|
||||
)
|
||||
generate_config_header_rule(
|
||||
name = name,
|
||||
tags = tags + ["gen_source"],
|
||||
|
|
|
|||
|
|
@ -50,11 +50,6 @@ render_template_rule = rule(
|
|||
)
|
||||
|
||||
def render_template(name, tags = [], **kwargs):
|
||||
write_target(
|
||||
name = name + "_gen_source_tag",
|
||||
target_name = name,
|
||||
tags = ["scons_link_lists"],
|
||||
)
|
||||
render_template_rule(
|
||||
name = name,
|
||||
tags = tags + ["gen_source"],
|
||||
|
|
|
|||
|
|
@ -4,9 +4,7 @@ This document describes the Server Developer workflow for modifying Bazel build
|
|||
|
||||
# Creating a new BUILD.bazel file
|
||||
|
||||
Similar to SCons, a build target is defined in the directory where its source code exists. To create a target that compiles **src/mongo/hello_world.cpp**, you would create **src/mongo/BUILD.bazel**.
|
||||
|
||||
The Bazel equivalent of SConscript files are BUILD.bazel files.
|
||||
A build target is defined in the directory where its source code exists. To create a target that compiles **src/mongo/hello_world.cpp**, you would create **src/mongo/BUILD.bazel**.
|
||||
|
||||
src/mongo/BUILD.bazel would contain:
|
||||
|
||||
|
|
@ -33,8 +31,6 @@ The full target name is a combination between the directory of the BUILD.bazel f
|
|||
|
||||
Bazel makes use of static analysis wherever possible to improve execution and querying speed. As part of this, source and header files must not be declared dynamically (ex. glob, wildcard, etc). Instead, you'll need to manually add a reference to each header or source file you add into your build target.
|
||||
|
||||
The divergence from SCons is that now source files have to be declared in addition to header files.
|
||||
|
||||
mongo_cc_binary(
|
||||
name = "hello_world",
|
||||
srcs = [
|
||||
|
|
@ -84,14 +80,6 @@ If a library or binary depends on another library, this must be declared in the
|
|||
],
|
||||
}
|
||||
|
||||
## Depending on a Bazel Library in a SCons Build Target
|
||||
|
||||
During migration from SCons to Bazel, the Build Team has created an integration layer between the two while working towards converting all SCons targets to Bazel targets.
|
||||
|
||||
Targets which are built by bazel will be labeled as ThinTarget builder types. You can reference them by the same name you would use in scons in LIBDEPS lists.
|
||||
|
||||
If adding a a new library to the build, check to see if it should be added as a bazel or scons library. This will depend on how deep it is in the dependency tree. You can ask the build team at #ask-devprod-build for advice on if a given library should be added to the bazel or scons part of the build.
|
||||
|
||||
## Running clang-tidy via Bazel
|
||||
|
||||
Note: This feature is still in development; see https://jira.mongodb.org/browse/SERVER-80396 for details)
|
||||
|
|
|
|||
|
|
@ -2,23 +2,6 @@
|
|||
|
||||
MongoDB uses EngFlow to enable remote execution with Bazel. This dramatically speeds up the build process, but is only available to internal MongoDB employees.
|
||||
|
||||
To install the necessary credentials to enable remote execution, run scons.py with any build command, then follow the setup instructions it prints out. Or:
|
||||
Bazel uses a wrapper script to check the credentials on each invocation, if for some reason thats not working, you can also manually perform this process with this command alternatively:
|
||||
|
||||
(Only if not in the Engineering org)
|
||||
|
||||
- Request access to the MANA group https://mana.corp.mongodbgov.com/resources/659ec4b9bccf3819e5608712
|
||||
|
||||
(For everyone)
|
||||
|
||||
- Go to https://sodalite.cluster.engflow.com/gettingstarted
|
||||
- Login with OKTA, then click the "GENERATE AND DOWNLOAD MTLS CERTIFICATE" button
|
||||
- (If logging in with OKTA doesn't work) Login with Google using your MongoDB email, then click the "GENERATE AND DOWNLOAD MTLS CERTIFICATE" button
|
||||
- On your local system (usually your MacBook), open a shell terminal and, after setting the variables on the first three lines, run:
|
||||
|
||||
REMOTE_USER=<SSH User from https://spruce.mongodb.com/spawn/host>
|
||||
REMOTE_HOST=<DNS Name from https://spruce.mongodb.com/spawn/host>
|
||||
ZIP_FILE=~/Downloads/engflow-mTLS.zip
|
||||
|
||||
curl https://raw.githubusercontent.com/mongodb/mongo/master/buildscripts/setup_engflow_creds.sh -o setup_engflow_creds.sh
|
||||
chmod +x ./setup_engflow_creds.sh
|
||||
./setup_engflow_creds.sh $REMOTE_USER $REMOTE_HOST $ZIP_FILE
|
||||
python buildscripts/engflow_auth.py
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
HEADER_DEP_SUFFIX = "_header_dep"
|
||||
LINK_DEP_SUFFIX = "_link_dep"
|
||||
|
||||
def create_header_dep_impl(ctx):
|
||||
compilation_context = cc_common.create_compilation_context(
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ mongo_pretty_printer_test = rule(
|
|||
# TODO have a way to get to gdb from inside bazel
|
||||
#"_gdb": attr.label(allow_single_file = True, default = "//:gdb"),
|
||||
"_pretty_printer_creation_script": attr.label(allow_single_file = True, default = "//bazel/install_rules:pretty_printer_test_creator.py"),
|
||||
"_pip_requirements_script": attr.label(allow_single_file = True, default = "//site_scons/mongo:pip_requirements.py"),
|
||||
"_pip_requirements_script": attr.label(allow_single_file = True, default = "//buildscripts:pip_requirements.py"),
|
||||
"_pretty_printer_launcher_infile": attr.label(allow_single_file = True, default = "//src/mongo/util:pretty_printer_test_launcher.py.in"),
|
||||
},
|
||||
doc = "Create pretty printer tests",
|
||||
|
|
|
|||
|
|
@ -11,9 +11,7 @@ load("@rules_proto//proto:defs.bzl", "proto_library")
|
|||
load(
|
||||
"//bazel:header_deps.bzl",
|
||||
"HEADER_DEP_SUFFIX",
|
||||
"LINK_DEP_SUFFIX",
|
||||
"create_header_dep",
|
||||
"create_link_deps",
|
||||
)
|
||||
load(
|
||||
"//bazel:separate_debug.bzl",
|
||||
|
|
@ -502,13 +500,6 @@ CLANG_WARNINGS_COPTS = select({
|
|||
# only) flag that turns it on.
|
||||
"-Wunused-exception-parameter",
|
||||
|
||||
# TODO: Note that the following two flags are added to CCFLAGS even
|
||||
# though they are really C++ specific. We need to do this because SCons
|
||||
# passes CXXFLAGS *before* CCFLAGS, but CCFLAGS contains -Wall, which
|
||||
# re-enables the warnings we are trying to suppress. In the future, we
|
||||
# should move all warning flags to CCWARNFLAGS and CXXWARNFLAGS and add
|
||||
# these to CCOM and CXXCOM as appropriate.
|
||||
#
|
||||
# Clang likes to warn about unused private fields, but some of our
|
||||
# third_party libraries have such things.
|
||||
"-Wno-unused-private-field",
|
||||
|
|
@ -1195,9 +1186,6 @@ DETECT_ODR_VIOLATIONS_LINKFLAGS = select({
|
|||
GDWARF_FEATURES = select({
|
||||
"//bazel/config:linux_clang": ["dwarf32"],
|
||||
"//bazel/config:linux_gcc_fission": ["dwarf32"], # gdb crashes with -gsplit-dwarf and -gdwarf64
|
||||
# SCons implementation originally used a compiler check to verify that
|
||||
# -gdwarf64 was supported. If this creates incompatibility issues, we may
|
||||
# need to fallback to -gdwarf32 in certain cases.
|
||||
"//bazel/config:linux_gcc": ["dwarf64"],
|
||||
# SUSE15 builds system libraries with dwarf32, use dwarf32 to be keep consistent
|
||||
"//bazel/config:suse15_gcc": ["dwarf32"],
|
||||
|
|
@ -1861,14 +1849,6 @@ def mongo_cc_library(
|
|||
header_deps = header_deps,
|
||||
)
|
||||
|
||||
create_link_deps(
|
||||
name = name + LINK_DEP_SUFFIX,
|
||||
target_name = name,
|
||||
link_deps = [name] + deps + cc_deps,
|
||||
tags = ["scons_link_lists"],
|
||||
target_compatible_with = target_compatible_with + enterprise_compatible,
|
||||
)
|
||||
|
||||
# Create a cc_library entry to generate a shared archive of the target.
|
||||
cc_library(
|
||||
name = name + SHARED_ARCHIVE_SUFFIX,
|
||||
|
|
@ -2188,21 +2168,6 @@ def _mongo_cc_binary_and_test(
|
|||
"env": env | SANITIZER_ENV,
|
||||
} | kwargs
|
||||
|
||||
create_link_deps(
|
||||
name = name + LINK_DEP_SUFFIX,
|
||||
target_name = name,
|
||||
link_deps = all_deps,
|
||||
tags = ["scons_link_lists"],
|
||||
testonly = testonly,
|
||||
target_compatible_with = target_compatible_with + enterprise_compatible,
|
||||
)
|
||||
|
||||
write_sources(
|
||||
name = name + "_sources_list",
|
||||
sources = srcs,
|
||||
tags = ["scons_link_lists"],
|
||||
)
|
||||
|
||||
original_tags = list(args["tags"])
|
||||
if _program_type == "binary":
|
||||
args["tags"] += ["intermediate_target"]
|
||||
|
|
@ -2548,11 +2513,6 @@ write_target = rule(
|
|||
)
|
||||
|
||||
def idl_generator(name, tags = [], **kwargs):
|
||||
write_target(
|
||||
name = name + "_gen_source_tag",
|
||||
target_name = name,
|
||||
tags = ["scons_link_lists"],
|
||||
)
|
||||
idl_generator_rule(
|
||||
name = name,
|
||||
tags = tags + ["gen_source"],
|
||||
|
|
@ -2648,12 +2608,6 @@ def mongo_proto_library(
|
|||
**kwargs
|
||||
)
|
||||
|
||||
dummy_file(
|
||||
name = name + "_exclude_link",
|
||||
output = "lib" + name + ".so.exclude_lib",
|
||||
tags = ["scons_link_lists"],
|
||||
)
|
||||
|
||||
def mongo_cc_proto_library(
|
||||
name,
|
||||
deps,
|
||||
|
|
|
|||
|
|
@ -10,55 +10,7 @@ REPO_ROOT = pathlib.Path(__file__).parent.parent.parent
|
|||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
|
||||
def check_for_missing_test_stubs():
|
||||
bazel_tests = (
|
||||
subprocess.check_output(
|
||||
[
|
||||
"bazel",
|
||||
"query",
|
||||
"attr(tags, 'mongo_unittest', //...) intersect attr(tags, 'final_target', //...)",
|
||||
],
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
.decode("utf-8")
|
||||
.splitlines()
|
||||
)
|
||||
bazel_tests = [bazel_test.split(":")[1] for bazel_test in bazel_tests]
|
||||
|
||||
scons_targets = (
|
||||
subprocess.check_output(
|
||||
["grep -rPo 'target\s*=\s*\"\K\w*' ./src | awk -F: '{print $2}'"],
|
||||
stderr=subprocess.STDOUT,
|
||||
shell=True,
|
||||
)
|
||||
.decode("utf-8")
|
||||
.splitlines()
|
||||
)
|
||||
|
||||
missing_tests = []
|
||||
for bazel_test in bazel_tests:
|
||||
if bazel_test not in scons_targets:
|
||||
missing_tests += [bazel_test]
|
||||
|
||||
if len(missing_tests) == 0:
|
||||
print("All bazel tests have SConscript stubs")
|
||||
return True
|
||||
|
||||
print("Tests found without SConscript stubs:")
|
||||
for missing_test in missing_tests:
|
||||
print(missing_test)
|
||||
print("\nPlease add a stub in the SConscript file in the directory of each test similar to:")
|
||||
print("""
|
||||
env.CppUnitTest(
|
||||
target="test_name",
|
||||
source=[],
|
||||
)
|
||||
|
||||
""")
|
||||
return False
|
||||
|
||||
|
||||
def create_build_files_in_new_js_dirs():
|
||||
def create_build_files_in_new_js_dirs() -> None:
|
||||
base_dirs = ["src/mongo/db/modules/enterprise/jstests", "jstests"]
|
||||
for base_dir in base_dirs:
|
||||
for root, dirs, _ in os.walk(base_dir):
|
||||
|
|
@ -86,7 +38,7 @@ js_library(
|
|||
print(f"Created BUILD.bazel in {full_dir}")
|
||||
|
||||
|
||||
def list_files_with_targets(bazel_bin: str):
|
||||
def list_files_with_targets(bazel_bin: str) -> List:
|
||||
return [
|
||||
line.strip()
|
||||
for line in subprocess.run(
|
||||
|
|
@ -103,7 +55,7 @@ def list_files_without_targets(
|
|||
type_name: str,
|
||||
ext: str,
|
||||
dirs: List[str],
|
||||
):
|
||||
) -> bool:
|
||||
# rules_lint only checks files that are in targets, verify that all files in the source tree
|
||||
# are contained within targets.
|
||||
|
||||
|
|
@ -184,7 +136,7 @@ def list_files_without_targets(
|
|||
return True
|
||||
|
||||
|
||||
def run_rules_lint(bazel_bin, args) -> bool:
|
||||
def run_rules_lint(bazel_bin: str, args: List[str]) -> bool:
|
||||
if platform.system() == "Windows":
|
||||
print("eslint not supported on windows")
|
||||
return False
|
||||
|
|
@ -201,9 +153,6 @@ def run_rules_lint(bazel_bin, args) -> bool:
|
|||
):
|
||||
return False
|
||||
|
||||
if not check_for_missing_test_stubs():
|
||||
return False
|
||||
|
||||
# Default to linting everything if no path was passed in
|
||||
if len([arg for arg in args if not arg.startswith("--")]) == 0:
|
||||
args = ["//..."] + args
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
This directory exists to manage a Buildfarm; see docs/bazel.md for more details.
|
||||
This directory exists to manage a Buildfarm
|
||||
|
|
|
|||
|
|
@ -5,7 +5,9 @@ exports_files([
|
|||
"candle_wrapper.py",
|
||||
"cheetah_source_generator.py",
|
||||
"clang_tidy_config_gen.py",
|
||||
"jstoh.py",
|
||||
"msitrim.py",
|
||||
"pip_requirements.py",
|
||||
])
|
||||
|
||||
py_binary(
|
||||
|
|
|
|||
|
|
@ -1,86 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import mongo_tooling_metrics.client as metrics_client
|
||||
import pkg_resources
|
||||
import pymongo
|
||||
from mongo_tooling_metrics.lib.top_level_metrics import (
|
||||
NinjaToolingMetrics,
|
||||
ResmokeToolingMetrics,
|
||||
SConsToolingMetrics,
|
||||
)
|
||||
from pydantic import ValidationError
|
||||
|
||||
# Check cluster connectivity
|
||||
try:
|
||||
client = pymongo.MongoClient(
|
||||
host=metrics_client.INTERNAL_TOOLING_METRICS_HOSTNAME,
|
||||
username=metrics_client.INTERNAL_TOOLING_METRICS_USERNAME,
|
||||
password=metrics_client.INTERNAL_TOOLING_METRICS_PASSWORD,
|
||||
)
|
||||
client.server_info()
|
||||
except Exception as exc:
|
||||
print("Could not connect to Atlas cluster")
|
||||
raise exc
|
||||
|
||||
metrics_classes = {
|
||||
"ninja": NinjaToolingMetrics,
|
||||
"scons": SConsToolingMetrics,
|
||||
"resmoke": ResmokeToolingMetrics,
|
||||
}
|
||||
|
||||
|
||||
def get_metrics_data(source, lookback=30):
|
||||
try:
|
||||
# Get SCons metrics for the lookback period
|
||||
tooling_metrics_version = pkg_resources.get_distribution("mongo-tooling-metrics").version
|
||||
lookback_datetime = datetime.datetime.utcnow() - datetime.timedelta(days=lookback)
|
||||
last_week_metrics = client.metrics.tooling_metrics.find(
|
||||
{
|
||||
"source": source,
|
||||
"utc_starttime": {"$gt": lookback_datetime},
|
||||
"tooling_metrics_version": tooling_metrics_version,
|
||||
}
|
||||
)
|
||||
|
||||
malformed_metrics = []
|
||||
invalid_metrics = []
|
||||
total_docs = 0
|
||||
|
||||
# Find any malformed/invalid documents in the cluster
|
||||
for doc in last_week_metrics:
|
||||
total_docs += 1
|
||||
try:
|
||||
metrics = metrics_classes[source](**doc)
|
||||
if metrics.is_malformed():
|
||||
malformed_metrics.append(doc["_id"])
|
||||
except ValidationError:
|
||||
invalid_metrics.append(doc["_id"])
|
||||
|
||||
metrics_detailed = (
|
||||
f"METRICS DETAILED ({source}):\n"
|
||||
f"malformed_metrics_last_week: {malformed_metrics}\n"
|
||||
f"invalid_metrics_last_week: {invalid_metrics}\n"
|
||||
f"total_docs_last_week: {total_docs}\n"
|
||||
f"tooling_metrics_version: {tooling_metrics_version}\n"
|
||||
)
|
||||
metrics_overview = (
|
||||
f"METRICS OVERVIEW ({source}):\n"
|
||||
f"malformed_metrics_last_week: {len(malformed_metrics)} ({len(malformed_metrics)/total_docs*100:.2f}%)\n"
|
||||
f"invalid_metrics_last_week: {len(invalid_metrics)} ({len(invalid_metrics)/total_docs*100:.2f}%)\n"
|
||||
f"total_docs_last_week: {total_docs}\n"
|
||||
f"tooling_metrics_version: {tooling_metrics_version}\n"
|
||||
)
|
||||
|
||||
print(metrics_overview)
|
||||
print(metrics_detailed)
|
||||
|
||||
return metrics_overview
|
||||
|
||||
except Exception as exc:
|
||||
print("Unexpected failure while getting metrics")
|
||||
raise exc
|
||||
|
||||
|
||||
ninja_metrics_overview = get_metrics_data("ninja")
|
||||
scons_metrics_overview = get_metrics_data("scons")
|
||||
resmoke_metrics_overview = get_metrics_data("resmoke")
|
||||
|
|
@ -315,8 +315,8 @@ def get_list_from_lines(lines):
|
|||
|
||||
|
||||
def _get_build_dir():
|
||||
"""Return the location of the scons' build directory."""
|
||||
return os.path.join(git.get_base_dir(), "build")
|
||||
"""Return the location of the default clang cache directory."""
|
||||
return os.path.join(git.get_base_dir(), ".clang_format_cache")
|
||||
|
||||
|
||||
def _lint_files(clang_format, files):
|
||||
|
|
|
|||
|
|
@ -183,7 +183,7 @@ def _run_tidy(args, parser_defaults):
|
|||
if args.compile_commands == parser_defaults.compile_commands:
|
||||
print(
|
||||
f"Could not find compile commands: '{args.compile_commands}', to generate it, use the build command:\n\n"
|
||||
+ "python3 buildscripts/scons.py --build-profile=compiledb compiledb\n"
|
||||
+ "bazel build compiledb\n"
|
||||
)
|
||||
else:
|
||||
print(f"Could not find compile commands: {args.compile_commands}")
|
||||
|
|
@ -196,7 +196,7 @@ def _run_tidy(args, parser_defaults):
|
|||
if args.clang_tidy_cfg == parser_defaults.clang_tidy_cfg:
|
||||
print(
|
||||
f"Could not find config file: '{args.clang_tidy_cfg}', to generate it, use the build command:\n\n"
|
||||
+ "python3 buildscripts/scons.py --build-profile=compiledb compiledb\n"
|
||||
+ "bazel build compiledb\n"
|
||||
)
|
||||
else:
|
||||
print(f"Could not find config file: {args.clang_tidy_cfg}")
|
||||
|
|
|
|||
|
|
@ -1,380 +0,0 @@
|
|||
import concurrent.futures
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
from typing import Annotated, List
|
||||
|
||||
import typer
|
||||
|
||||
|
||||
def work(target_library: str, silent: bool, cpu_count: int, cc: List[str]):
|
||||
headers = set()
|
||||
original_headers = set()
|
||||
|
||||
def get_headers(line):
|
||||
nonlocal headers
|
||||
try:
|
||||
with open(target_library + ".bazel_headers") as f:
|
||||
bazel_headers = [line.strip() for line in f.readlines()]
|
||||
bazel_headers += [
|
||||
"src/mongo/platform/basic.h",
|
||||
"src/mongo/platform/windows_basic.h",
|
||||
]
|
||||
|
||||
with open(target_library + ".env_vars") as f:
|
||||
tmp_env_vars = json.load(f)
|
||||
env_vars = {}
|
||||
# subprocess requies only strings
|
||||
for k, v in tmp_env_vars.items():
|
||||
env_vars[str(k)] = str(v)
|
||||
|
||||
for command in cc:
|
||||
cmd_output = command["output"].replace("\\", "/").strip("'").strip('"')
|
||||
line_output = line.replace("\\", "/")
|
||||
|
||||
if cmd_output == line_output:
|
||||
os.makedirs(os.path.dirname(line), exist_ok=True)
|
||||
if os.name == "nt":
|
||||
header_arg = " /showIncludes"
|
||||
else:
|
||||
header_arg = " -H"
|
||||
|
||||
if not silent:
|
||||
print(f"compiling {line}")
|
||||
|
||||
p = subprocess.run(
|
||||
shlex.split((command["command"].replace("\\", "/") + header_arg)),
|
||||
env=env_vars,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if p.returncode != 0:
|
||||
print(f"Error compiling, exitcode: {p.returncode}", file=sys.stderr)
|
||||
print(f"STDOUT: {p.stdout}", file=sys.stderr)
|
||||
print(f"STDERR: {p.stderr}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if os.name == "nt":
|
||||
for line in p.stdout.split("\n"):
|
||||
line = (
|
||||
line.replace("Note: including file:", "")
|
||||
.strip(" ")
|
||||
.replace("\\", "/")
|
||||
)
|
||||
|
||||
if not line.startswith(os.getcwd().replace("\\", "/")):
|
||||
continue
|
||||
|
||||
line = os.path.relpath(
|
||||
line, start=os.getcwd().replace("\\", "/")
|
||||
).replace("\\", "/")
|
||||
if line not in bazel_headers:
|
||||
if line.startswith("src/") or line.startswith("bazel-out/"):
|
||||
original_headers.add(line)
|
||||
line = "//" + line
|
||||
line = ":".join(line.rsplit("/", 1))
|
||||
|
||||
headers.add(line)
|
||||
else:
|
||||
for line in p.stderr.split("\n"):
|
||||
if ". src/" in line or ". bazel-out/" in line:
|
||||
while line.startswith("."):
|
||||
line = line[1:]
|
||||
line = line.replace("\\", "/")
|
||||
|
||||
if line[1:] not in bazel_headers:
|
||||
original_headers.add(line[1:])
|
||||
line = "//" + line[1:]
|
||||
line = ":".join(line.rsplit("/", 1))
|
||||
|
||||
headers.add(line)
|
||||
except Exception as exc:
|
||||
print(traceback.format_exc(), file=sys.stderr)
|
||||
raise exc
|
||||
|
||||
sources = []
|
||||
with open(target_library + ".obj_files") as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
line = line.replace("build/opt", "//src")
|
||||
line = line[: line.find(".")] + ".cpp"
|
||||
src_header = os.path.splitext(line[2:])[0] + ".h"
|
||||
if os.path.exists(src_header):
|
||||
src_header = "//" + ":".join(src_header.rsplit("/", 1))
|
||||
headers.add(src_header)
|
||||
line = ":".join(line.rsplit("/", 1))
|
||||
if line.endswith("_gen.cpp"):
|
||||
line = line[:-4]
|
||||
sources.append(line)
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=cpu_count) as executor:
|
||||
jobs = {executor.submit(get_headers, line.strip()): line.strip() for line in lines}
|
||||
for completed_job in concurrent.futures.as_completed(jobs):
|
||||
if not silent:
|
||||
print(f"finished {jobs[completed_job]}")
|
||||
|
||||
with open(".bazel_include_info.json") as f:
|
||||
bazel_include_info = json.load(f)
|
||||
|
||||
header_map = bazel_include_info["header_map"]
|
||||
gen_header_map = bazel_include_info["gen_header_map"]
|
||||
bazel_exec = bazel_include_info["bazel_exec"]
|
||||
bazel_config = bazel_include_info["config"]
|
||||
|
||||
global_headers = (
|
||||
"src/mongo:config.h",
|
||||
"src/mongo/config.h",
|
||||
"src/mongo/platform/basic.h",
|
||||
"src/mongo/platform/windows_basic.h",
|
||||
)
|
||||
|
||||
reverse_header_map = {}
|
||||
for k, v in header_map.items():
|
||||
for hdr in v:
|
||||
if not hdr or hdr.endswith(global_headers):
|
||||
continue
|
||||
bazel_header = "//" + hdr.replace("\\", "/")
|
||||
bazel_header = ":".join(bazel_header.rsplit("/", 1))
|
||||
if bazel_header.startswith("//src/third_party/SafeInt"):
|
||||
reverse_header_map[bazel_header] = ["//src/third_party/SafeInt:headers"]
|
||||
elif bazel_header.startswith("//src/third_party/immer"):
|
||||
reverse_header_map[bazel_header] = ["//src/third_party/immer:headers"]
|
||||
elif bazel_header in reverse_header_map:
|
||||
if bazel_header.startswith("//src/third_party/"):
|
||||
continue
|
||||
reverse_header_map[bazel_header].append(k)
|
||||
else:
|
||||
reverse_header_map[bazel_header] = [k]
|
||||
|
||||
for k, v in gen_header_map.items():
|
||||
for hdr in v:
|
||||
if not hdr or hdr.endswith(global_headers):
|
||||
continue
|
||||
bazel_header = "//" + hdr.replace("\\", "/")
|
||||
bazel_header = ":".join(bazel_header.rsplit("/", 1))
|
||||
if bazel_header not in reverse_header_map:
|
||||
reverse_header_map[bazel_header] = [k]
|
||||
else:
|
||||
reverse_header_map[bazel_header].append(k)
|
||||
|
||||
recommended_deps = set()
|
||||
minimal_headers = set()
|
||||
|
||||
basename_sources = [os.path.splitext(src.rsplit(":", 1)[1])[0] for src in sources]
|
||||
for header in headers:
|
||||
header_basename = os.path.splitext(header.rsplit(":", 1)[1])[0]
|
||||
if header_basename in basename_sources:
|
||||
minimal_headers.add(header)
|
||||
continue
|
||||
|
||||
if header in reverse_header_map:
|
||||
found = False
|
||||
for lib in reverse_header_map[header]:
|
||||
recommended_deps.add(lib)
|
||||
else:
|
||||
if not header.endswith(global_headers):
|
||||
minimal_headers.add(header)
|
||||
|
||||
deps_order_by_height = []
|
||||
deps_queries = {}
|
||||
|
||||
with open(target_library + ".bazel_deps") as f:
|
||||
original_deps = [line.strip() for line in f.readlines()]
|
||||
|
||||
for dep in recommended_deps | set(original_deps):
|
||||
p = subprocess.run(
|
||||
[bazel_exec, "cquery"]
|
||||
+ bazel_config
|
||||
+ [f'kind("extract_debuginfo|idl_generator|render_template", deps("@{dep}"))'],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
deps_queries[dep] = [
|
||||
line.split(" ")[0] for line in p.stdout.splitlines() if line.startswith("//")
|
||||
]
|
||||
deps_order_by_height.append((dep, len(deps_queries[dep])))
|
||||
|
||||
deps_order_by_height.sort(key=lambda x: x[1])
|
||||
|
||||
deps_order_by_height = [dep[0] for dep in deps_order_by_height]
|
||||
optimal_header_deps = set()
|
||||
for header in headers:
|
||||
if header in minimal_headers:
|
||||
continue
|
||||
|
||||
path_header = "/".join(header.rsplit(":", 1))
|
||||
path_header = path_header[2:]
|
||||
for dep in deps_order_by_height:
|
||||
if dep in header_map and path_header in header_map[dep]:
|
||||
optimal_header_deps.add(dep)
|
||||
break
|
||||
found = False
|
||||
for other_dep in deps_order_by_height:
|
||||
if other_dep in gen_header_map:
|
||||
continue
|
||||
if dep in deps_queries[other_dep]:
|
||||
optimal_header_deps.add(other_dep)
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
if dep in gen_header_map:
|
||||
minimal_headers.add(dep)
|
||||
else:
|
||||
raise Exception(
|
||||
f"Should not happen, did not find way to add dep {dep} for {target_library}"
|
||||
)
|
||||
|
||||
optimal_header_deps = list(optimal_header_deps)
|
||||
|
||||
working_deps = optimal_header_deps.copy()
|
||||
for dep in optimal_header_deps:
|
||||
if dep in working_deps:
|
||||
for test_dep in optimal_header_deps:
|
||||
if test_dep == dep:
|
||||
continue
|
||||
if test_dep in working_deps and test_dep in deps_queries[dep]:
|
||||
working_deps.remove(test_dep)
|
||||
|
||||
link_deps = []
|
||||
header_deps = []
|
||||
for dep in sorted(list(set(list(working_deps) + list(set(original_deps))))):
|
||||
if dep in original_deps:
|
||||
link_deps.append(dep)
|
||||
else:
|
||||
header_deps.append(dep)
|
||||
|
||||
target_name = os.path.splitext(os.path.basename(target_library))[0]
|
||||
if target_name.startswith("lib"):
|
||||
target_name = target_name[3:]
|
||||
|
||||
bazel_target = f"{target_library}\n"
|
||||
bazel_target += "=" * 50 + "\n"
|
||||
local_bazel_path = os.path.dirname(target_library.replace("build/opt", "//src")) + ":"
|
||||
bazel_target += "mongo_cc_library(\n"
|
||||
bazel_target += f' name = "{target_name}",\n'
|
||||
if sources:
|
||||
bazel_target += " srcs = [\n"
|
||||
for src in sorted([src.replace(local_bazel_path, "") for src in sources]):
|
||||
bazel_target += f' "{src}",\n'
|
||||
bazel_target += " ],\n"
|
||||
if minimal_headers:
|
||||
bazel_target += " hdrs = [\n"
|
||||
for header in sorted([header.replace(local_bazel_path, "") for header in minimal_headers]):
|
||||
bazel_target += f' "{header}",\n'
|
||||
bazel_target += " ],\n"
|
||||
if header_deps:
|
||||
bazel_target += " header_deps = [\n"
|
||||
for dep in sorted([dep.strip().replace(local_bazel_path, "") for dep in header_deps]):
|
||||
bazel_target += f' "{dep}",\n'
|
||||
bazel_target += " ],\n"
|
||||
if link_deps:
|
||||
bazel_target += " deps = [\n"
|
||||
for dep in sorted([dep.strip().replace(local_bazel_path, "") for dep in link_deps]):
|
||||
bazel_target += f' "{dep}",\n'
|
||||
bazel_target += " ],\n"
|
||||
bazel_target += ")\n"
|
||||
return bazel_target
|
||||
|
||||
|
||||
def main(
|
||||
target_libraries: Annotated[List[str], typer.Argument()],
|
||||
silent: Annotated[bool, typer.Option()] = False,
|
||||
skip_scons: Annotated[bool, typer.Option()] = False,
|
||||
debug_mode: Annotated[bool, typer.Option()] = False,
|
||||
):
|
||||
extra_args = []
|
||||
if os.name == "nt":
|
||||
extra_args += [
|
||||
"CPPPATH=C:\sasl\include",
|
||||
"LIBPATH=C:\sasl\lib",
|
||||
]
|
||||
target_library = os.path.join(
|
||||
os.path.dirname(target_library), os.path.basename(target_library)[3:-2] + "lib"
|
||||
)
|
||||
|
||||
path = shutil.which("icecc")
|
||||
if path is None:
|
||||
extra_args += ["ICECC="]
|
||||
|
||||
# Define separate functions instead of using lambdas
|
||||
def target_fmt_nt(target_library: str) -> str:
|
||||
return os.path.join(
|
||||
os.path.dirname(target_library), os.path.basename(target_library)[3:-2] + "lib"
|
||||
)
|
||||
|
||||
def target_fmt_darwin(target_library: str) -> str:
|
||||
return target_library[:-2] + "a"
|
||||
|
||||
def target_fmt_default(x: str) -> None:
|
||||
return None
|
||||
|
||||
if os.name == "nt":
|
||||
target_fmt = target_fmt_nt
|
||||
elif platform.system() == "Darwin":
|
||||
target_fmt = target_fmt_darwin
|
||||
else:
|
||||
target_fmt = target_fmt_default
|
||||
|
||||
map(target_fmt, target_libraries)
|
||||
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"buildscripts/scons.py",
|
||||
"--build-profile=opt",
|
||||
" ".join(
|
||||
[f"--bazel-includes-info={target_library}" for target_library in target_libraries]
|
||||
),
|
||||
"--libdeps-linting=off",
|
||||
"--ninja=disabled",
|
||||
"compiledb",
|
||||
] + extra_args
|
||||
|
||||
if not skip_scons:
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
|
||||
|
||||
while True:
|
||||
line = p.stdout.readline()
|
||||
if not line:
|
||||
break
|
||||
print(line.strip(), file=sys.stderr)
|
||||
|
||||
_, _ = p.communicate()
|
||||
|
||||
if p.returncode != 0:
|
||||
print(f"SCons build failed, exit code {p.returncode}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
with open("compile_commands.json") as f:
|
||||
cc = json.load(f)
|
||||
if platform.system() == "Linux":
|
||||
cpu_count = len(os.sched_getaffinity(0)) + 4
|
||||
else:
|
||||
cpu_count = os.cpu_count() + 4
|
||||
|
||||
# Process pool makes it harder to debug what is happening
|
||||
# so for debug mode, we disabled process pool so things happen in order
|
||||
# you can just print from the process.
|
||||
if debug_mode:
|
||||
bazel_targets = []
|
||||
for target_library in target_libraries:
|
||||
bazel_targets += [work(target_library, silent, cpu_count, cc)]
|
||||
else:
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor:
|
||||
jobs = {
|
||||
executor.submit(work, target_library, silent, cpu_count, cc): target_library
|
||||
for target_library in target_libraries
|
||||
}
|
||||
bazel_targets = [job.result() for job in concurrent.futures.as_completed(jobs)]
|
||||
|
||||
print("====== Bazel Targets ======\n")
|
||||
print("\n".join(bazel_targets))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
typer.run(main)
|
||||
|
|
@ -142,11 +142,7 @@ def is_terminated(lines):
|
|||
|
||||
|
||||
def get_next_code(seen, server_ticket=0):
|
||||
"""Find next unused assertion code.
|
||||
|
||||
Called by: SConstruct and main()
|
||||
Since SConstruct calls us, codes[] must be global OR WE REPARSE EVERYTHING
|
||||
"""
|
||||
"""Find next unused assertion code."""
|
||||
if not codes:
|
||||
(_, _, seen) = read_error_codes()
|
||||
|
||||
|
|
@ -169,12 +165,6 @@ def get_next_code(seen, server_ticket=0):
|
|||
return iter(range(highest + 1, MAXIMUM_CODE))
|
||||
|
||||
|
||||
def check_error_codes():
|
||||
"""Check error codes as SConstruct expects a boolean response from this function."""
|
||||
(_, errors, _) = read_error_codes()
|
||||
return len(errors) == 0
|
||||
|
||||
|
||||
def read_error_codes(src_root="src/mongo"):
|
||||
"""Define callback, call parse_source_files() with callback, save matches to global codes list."""
|
||||
seen = {}
|
||||
|
|
|
|||
|
|
@ -1,221 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
from shrub.v2 import BuildVariant, FunctionCall, ShrubProject, Task, TaskGroup
|
||||
from shrub.v2.command import BuiltInCommand
|
||||
|
||||
|
||||
def main():
|
||||
tasks = {
|
||||
"windows_tasks": {},
|
||||
"linux_x86_64_tasks": {},
|
||||
"linux_arm64_tasks": {},
|
||||
"macos_tasks": {},
|
||||
}
|
||||
|
||||
tasks_prefixes = {
|
||||
"windows_tasks": "build_metrics_msvc",
|
||||
"linux_x86_64_tasks": "build_metrics_x86_64",
|
||||
"linux_arm64_tasks": "build_metrics_arm64",
|
||||
"macos_tasks": "build_metrics_xcode",
|
||||
}
|
||||
|
||||
task_group_targets = {
|
||||
"dynamic": [
|
||||
"install-devcore",
|
||||
"install-all-meta generate-libdeps-graph",
|
||||
],
|
||||
"static": [
|
||||
"install-devcore",
|
||||
"install-all-meta-but-not-unittests",
|
||||
],
|
||||
}
|
||||
|
||||
def create_build_metric_task_steps(task_build_flags, task_targets, split_num):
|
||||
evg_flags = f"--debug=time,count,memory VARIANT_DIR=metrics BUILD_METRICS_EVG_TASK_ID={os.environ['task_id']} BUILD_METRICS_EVG_BUILD_VARIANT={os.environ['build_variant']}"
|
||||
cache_flags = "--cache-dir=$PWD/scons-cache-{split_num} --cache-signature-mode=validate"
|
||||
|
||||
scons_task_steps = [
|
||||
f"{evg_flags} --build-metrics=build_metrics_{split_num}.json",
|
||||
f"{evg_flags} {cache_flags} --cache-populate --build-metrics=populate_cache_{split_num}.json",
|
||||
f"{evg_flags} --clean",
|
||||
f"{evg_flags} {cache_flags} --build-metrics=pull_cache_{split_num}.json",
|
||||
]
|
||||
|
||||
task_steps = [
|
||||
FunctionCall(
|
||||
"scons compile",
|
||||
{
|
||||
"patch_compile_flags": f"{task_build_flags} {step_flags}",
|
||||
"targets": task_targets,
|
||||
"compiling_for_test": "true",
|
||||
},
|
||||
)
|
||||
for step_flags in scons_task_steps
|
||||
]
|
||||
return task_steps
|
||||
|
||||
def create_build_metric_task_list(task_list, link_model, build_flags):
|
||||
tasks[task_list][link_model] = []
|
||||
prefix = tasks_prefixes[task_list]
|
||||
index = 0
|
||||
for index, target in enumerate(task_group_targets[link_model]):
|
||||
tasks[task_list][link_model].append(
|
||||
Task(
|
||||
f"{prefix}_{link_model}_build_split_{index}_{target.replace(' ', '_')}",
|
||||
create_build_metric_task_steps(build_flags, target, index),
|
||||
)
|
||||
)
|
||||
tasks[task_list][link_model].append(
|
||||
Task(
|
||||
f"{prefix}_{link_model}_build_split_{index+1}_combine_metrics",
|
||||
[
|
||||
FunctionCall("combine build metrics"),
|
||||
FunctionCall("attach build metrics"),
|
||||
FunctionCall("print top N metrics"),
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
#############################
|
||||
if sys.platform == "win32":
|
||||
build_flags = "--cache=nolinked"
|
||||
|
||||
create_build_metric_task_list(
|
||||
"windows_tasks",
|
||||
"static",
|
||||
build_flags,
|
||||
)
|
||||
|
||||
##############################
|
||||
elif sys.platform == "darwin":
|
||||
for link_model in ["dynamic", "static"]:
|
||||
build_flags = f"--link-model={link_model} --force-macos-dynamic-link" + (
|
||||
" --cache=nolinked" if link_model == "static" else " --cache=all"
|
||||
)
|
||||
|
||||
create_build_metric_task_list(
|
||||
"macos_tasks",
|
||||
link_model,
|
||||
build_flags,
|
||||
)
|
||||
|
||||
##############################
|
||||
else:
|
||||
for toolchain in ["v4", "v5"]:
|
||||
# possibly we want to add clang to the mix here, so leaving as an easy drop in
|
||||
for compiler in ["gcc"]:
|
||||
for link_model in ["dynamic", "static"]:
|
||||
build_flags = (
|
||||
f"BUILD_METRICS_BLOATY=/opt/mongodbtoolchain/{toolchain}/bin/bloaty "
|
||||
+ f"--variables-files=etc/scons/mongodbtoolchain_{toolchain}_{compiler}.vars "
|
||||
+ f"--link-model={link_model}"
|
||||
+ (" --cache=nolinked" if link_model == "static" else " --cache=all")
|
||||
)
|
||||
|
||||
create_build_metric_task_list(
|
||||
"linux_x86_64_tasks",
|
||||
link_model,
|
||||
build_flags,
|
||||
)
|
||||
|
||||
create_build_metric_task_list(
|
||||
"linux_arm64_tasks",
|
||||
link_model,
|
||||
build_flags,
|
||||
)
|
||||
|
||||
def create_task_group(target_platform, tasks):
|
||||
task_group = TaskGroup(
|
||||
name=f"build_metrics_{target_platform}_task_group_gen",
|
||||
tasks=tasks,
|
||||
max_hosts=1,
|
||||
setup_group=[
|
||||
BuiltInCommand("manifest.load", {}),
|
||||
FunctionCall("git get project and add git tag"),
|
||||
FunctionCall("set task expansion macros"),
|
||||
FunctionCall("f_expansions_write"),
|
||||
FunctionCall("kill processes"),
|
||||
FunctionCall("cleanup environment"),
|
||||
FunctionCall("set up venv"),
|
||||
FunctionCall("set up libdeps venv"),
|
||||
FunctionCall("upload pip requirements"),
|
||||
FunctionCall("f_expansions_write"),
|
||||
FunctionCall("configure evergreen api credentials"),
|
||||
FunctionCall("get buildnumber"),
|
||||
FunctionCall("f_expansions_write"),
|
||||
FunctionCall("generate compile expansions"),
|
||||
FunctionCall("f_expansions_write"),
|
||||
],
|
||||
setup_task=[
|
||||
FunctionCall("f_expansions_write"),
|
||||
FunctionCall("apply compile expansions"),
|
||||
FunctionCall("set task expansion macros"),
|
||||
FunctionCall("f_expansions_write"),
|
||||
],
|
||||
teardown_group=[
|
||||
FunctionCall("f_expansions_write"),
|
||||
FunctionCall("cleanup environment"),
|
||||
],
|
||||
teardown_task=[
|
||||
FunctionCall("f_expansions_write"),
|
||||
FunctionCall("attach scons logs"),
|
||||
FunctionCall("kill processes"),
|
||||
FunctionCall("save disk statistics"),
|
||||
FunctionCall("save system resource information"),
|
||||
FunctionCall(
|
||||
"remove files", {"files": " ".join(["src/build", "src/scons-cache", "*.tgz"])}
|
||||
),
|
||||
],
|
||||
setup_group_can_fail_task=True,
|
||||
)
|
||||
return task_group
|
||||
|
||||
if sys.platform == "win32":
|
||||
variant = BuildVariant(
|
||||
name="enterprise-windows-build-metrics",
|
||||
activate=True,
|
||||
)
|
||||
variant.add_task_group(
|
||||
create_task_group("windows", tasks["windows_tasks"]["static"]),
|
||||
["windows-2022-xlarge"],
|
||||
)
|
||||
elif sys.platform == "darwin":
|
||||
variant = BuildVariant(
|
||||
name="macos-enterprise-build-metrics",
|
||||
activate=True,
|
||||
)
|
||||
for link_model, tasks in tasks["macos_tasks"].items():
|
||||
variant.add_task_group(
|
||||
create_task_group(f"macos_{link_model}", tasks), ["macos-14-arm64"]
|
||||
)
|
||||
else:
|
||||
if platform.machine() == "x86_64":
|
||||
variant = BuildVariant(
|
||||
name="enterprise-rhel-8-64-bit-build-metrics",
|
||||
activate=True,
|
||||
)
|
||||
for link_model, tasks in tasks["linux_x86_64_tasks"].items():
|
||||
variant.add_task_group(
|
||||
create_task_group(f"linux_X86_64_{link_model}", tasks), ["rhel8.8-xlarge"]
|
||||
)
|
||||
else:
|
||||
variant = BuildVariant(
|
||||
name="enterprise-rhel-8-aarch64-build-metrics",
|
||||
activate=True,
|
||||
)
|
||||
for link_model, tasks in tasks["linux_arm64_tasks"].items():
|
||||
variant.add_task_group(
|
||||
create_task_group(f"linux_arm64_{link_model}", tasks),
|
||||
["amazon2023-arm64-large"],
|
||||
)
|
||||
|
||||
project = ShrubProject({variant})
|
||||
with open("build_metrics_task_gen.json", "w") as fout:
|
||||
fout.write(project.json())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -84,22 +84,7 @@ def useful_print(fixes: Dict) -> None:
|
|||
|
||||
class HeaderFixer:
|
||||
def __init__(self):
|
||||
# TODO(SERVER-94781) Remove SCons dep
|
||||
subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
"buildscripts/scons.py",
|
||||
"--build-profile=opt",
|
||||
"--bazel-includes-info=dummy", # TODO Allow no library to be passed.
|
||||
"--libdeps-linting=off",
|
||||
"--ninja=disabled",
|
||||
"$BUILD_ROOT/scons/$VARIANT_DIR/sconf_temp",
|
||||
]
|
||||
)
|
||||
with open(".bazel_include_info.json") as f:
|
||||
bazel_include_info = json.load(f)
|
||||
self.bazel_exec = bazel_include_info["bazel_exec"]
|
||||
self.bazel_config = bazel_include_info["config"]
|
||||
self.bazel_exec = "bazel"
|
||||
auth = JiraAuth()
|
||||
auth.pat = os.environ["JIRA_TOKEN"]
|
||||
self.jira_client = JiraClient(JIRA_SERVER, auth, dry_run=False)
|
||||
|
|
@ -110,13 +95,9 @@ class HeaderFixer:
|
|||
self, query: str, config: bool = False, args: List[str] = []
|
||||
) -> subprocess.CompletedProcess:
|
||||
query_cmd = "cquery"
|
||||
config_args = self.bazel_config
|
||||
if not config:
|
||||
query_cmd = "query"
|
||||
config_args = []
|
||||
|
||||
p = subprocess.run(
|
||||
[self.bazel_exec, query_cmd] + config_args + args + [query],
|
||||
[self.bazel_exec, query_cmd] + args + [query],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
|
|
@ -125,7 +106,7 @@ class HeaderFixer:
|
|||
|
||||
def _build(self, target: str) -> subprocess.CompletedProcess:
|
||||
p = subprocess.run(
|
||||
[self.bazel_exec, "build"] + self.bazel_config + [target],
|
||||
[self.bazel_exec, "build"] + [target],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ VERSION_JSON = "version.json"
|
|||
def generate_expansions():
|
||||
"""Entry point for the script.
|
||||
|
||||
This calls functions to generate version and scons cache expansions and
|
||||
This calls functions to generate version and
|
||||
writes them to a file.
|
||||
"""
|
||||
args = parse_args()
|
||||
|
|
|
|||
|
|
@ -1,65 +0,0 @@
|
|||
# IWYU Analysis tool
|
||||
|
||||
This tool will run
|
||||
[include-what-you-use](https://github.com/include-what-you-use/include-what-you-use)
|
||||
(IWYU) analysis across the codebase via `compile_commands.json`.
|
||||
|
||||
The `iwyu_config.yml` file consists of the current options and automatic
|
||||
pragma marking. You can exclude files from the analysis here.
|
||||
|
||||
The tool has two main modes of operation, `fix` and `check` modes. `fix`
|
||||
mode will attempt to make changes to the source files based off IWYU's
|
||||
suggestions. The check mode will simply check if there are any suggestion
|
||||
at all.
|
||||
|
||||
`fix` mode will take a long time to run, as the tool needs to rerun any
|
||||
source in which a underlying header was changed to ensure things are not
|
||||
broken, and so therefore ends up recompile the codebase several times over.
|
||||
|
||||
For more information please refer the the script `--help` option.
|
||||
|
||||
# Example usage:
|
||||
|
||||
First you must generate the `compile_commands.json` file via this command:
|
||||
|
||||
```
|
||||
python3 buildscripts/scons.py --build-profile=compiledb compiledb
|
||||
```
|
||||
|
||||
Next you can run the analysis:
|
||||
|
||||
```
|
||||
python3 buildscripts/iwyu/run_iwyu_analysis.py
|
||||
```
|
||||
|
||||
The default mode is fix mode, and it will start making changes to the code
|
||||
if any changes are found.
|
||||
|
||||
# Debugging failures
|
||||
|
||||
Occasionally IWYU tool will run into problems where it is unable to suggest
|
||||
valid changes and the changes will cause things to break (not compile). When
|
||||
it his a failure it will copy the source and all the header's that were used
|
||||
at the time of the compilation into a directory where the same command can be
|
||||
run to reproduce the error.
|
||||
|
||||
You can examine the suggested changes in the source and headers and compare
|
||||
them to the working source tree. Then you can make corrective changes to allow
|
||||
IWYU to get past the failure.
|
||||
|
||||
IWYU is not perfect and it make several mistakes that a human can understand
|
||||
and fix appropriately.
|
||||
|
||||
# Running the tests
|
||||
|
||||
This tool includes its own end to end testing. The test directory includes
|
||||
sub directories which contain source and iwyu configs to run the tool against.
|
||||
The tests will then compare the results to built in expected results and fail
|
||||
if the the tests are not producing the expected results.
|
||||
|
||||
To run the tests use the command:
|
||||
|
||||
```
|
||||
cd buildscripts/iwyu/test
|
||||
python3 run_tests.py
|
||||
```
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
# options passed to IWYU
|
||||
iwyu_options:
|
||||
- "--mapping_file=etc/iwyu_mapping.imp"
|
||||
- "--no_fwd_decls"
|
||||
- "--prefix_header_includes=add"
|
||||
- "--transitive_includes_only"
|
||||
|
||||
# options passed to the fix script
|
||||
fix_options:
|
||||
- "--blank_lines"
|
||||
- "--nocomments"
|
||||
- "--noreorder"
|
||||
- "--separate_project_includes=mongo"
|
||||
- "--safe_headers"
|
||||
- '--only_re=^src/mongo\/.*'
|
||||
# TODO SERVER-77051 we will eventually turn this on when our codebase is cleaned up with out.
|
||||
# - '--nosafe_headers'
|
||||
|
||||
# filename regex to swap no_include in place
|
||||
# quotes and brackets not included in this config
|
||||
# since this is targeting IWYU added headers
|
||||
no_includes:
|
||||
# avoid boost crazyness
|
||||
- "boost/.+/detail/.+"
|
||||
- "asio/impl/.+"
|
||||
- 'boost/.+\.ipp'
|
||||
# avoid stdlib detail headers
|
||||
- 'ext/alloc_traits\.h'
|
||||
- 'ext/type_traits\.h'
|
||||
- 'cxxabi\.h' # https://github.com/include-what-you-use/include-what-you-use/issues/909
|
||||
- "bits/.+"
|
||||
- 'syscall\.h'
|
||||
# arch specific
|
||||
- "boost/predef/hardware/simd/x86.+"
|
||||
- 'emmintrin\.h'
|
||||
# we use a third party format which confuses IWYU
|
||||
- 'format\.h'
|
||||
# this is a link time symbol overloading thing not meant to be included
|
||||
- 'libunwind-x86_64\.h'
|
||||
# abuse of preprocessor
|
||||
- 'mongo/db/namespace_string_reserved\.def\.h'
|
||||
|
||||
# path prefixes (non regex) to skip
|
||||
skip_files:
|
||||
- "src/third_party"
|
||||
- "build/"
|
||||
- "src/mongo/tools/mongo_tidy_checks"
|
||||
- "src/mongo/util/net" # causes linkage issues
|
||||
- "src/mongo/util/text.cpp"
|
||||
# IWYU confused on forward declares
|
||||
- "src/mongo/db/exec/near.cpp"
|
||||
- "src/mongo/db/storage/wiredtiger/wiredtiger_index.cpp"
|
||||
# Asio is going to need some special treatment, the headers are very finicky
|
||||
- "src/mongo/transport/asio"
|
||||
# causes IWYU to crash:
|
||||
- "src/mongo/db/update/update_internal_node.cpp"
|
||||
- "src/mongo/db/update/update_array_node.cpp"
|
||||
- "src/mongo/db/update/update_object_node.cpp"
|
||||
- "src/mongo/db/update/update_array_node_test.cpp"
|
||||
- "src/mongo/db/update/update_object_node_test.cpp"
|
||||
- "src/mongo/util/options_parser/environment.cpp"
|
||||
- "src/mongo/util/options_parser/option_section.cpp"
|
||||
|
||||
# regex file paths to add keep pragma
|
||||
# include quotes are angle brackets
|
||||
keep_includes:
|
||||
- '".*\.cstruct"' # these are not true includes, but used for very large initializers
|
||||
- '<fmt/printf\.h>'
|
||||
- '<fmt/ranges\.h>'
|
||||
- '<fmt/chrono\.h>'
|
||||
- "<yaml-cpp/yaml.h>"
|
||||
- '<asio\.hpp>'
|
||||
- '<boost/utility/in_place_factory\.hpp>'
|
||||
- "<libunwind.h>"
|
||||
- "<fstream>" # IWYU messes up template instantiation
|
||||
- '"mongo/rpc/object_check\.h"'
|
||||
- '"mongo/base/init\.h"'
|
||||
- '"mongo/scripting/mozjs/wrapconstrainedmethod\.h"'
|
||||
- '"mongo/dbtests/dbtests\.h"' # this is due to using statements in the header
|
||||
- '"mongo/config\.h"'
|
||||
- '"mongo/util/overloaded_visitor\.h"'
|
||||
- '"mongo/db/query/optimizer/node\.h"'
|
||||
- '"mongo/util/text\.h"' # includes platform specific functions
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1 +0,0 @@
|
|||
#include "b.h"
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
#include "a.h"
|
||||
|
||||
type_b return_b_function() {
|
||||
return type_b();
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
class type_b {};
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import sys
|
||||
|
||||
EXPECTED_B_CPP = """
|
||||
#include "b.h"
|
||||
|
||||
type_b return_b_function() {
|
||||
return type_b();
|
||||
}
|
||||
"""
|
||||
|
||||
with open("b.cpp") as f:
|
||||
content = f.read()
|
||||
if content != EXPECTED_B_CPP:
|
||||
print(f'Actual:\n"""{content}"""')
|
||||
print(f'Expected:\n"""{EXPECTED_B_CPP}"""')
|
||||
sys.exit(1)
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
# options passed to IWYU
|
||||
iwyu_options:
|
||||
- "--max_line_length=100"
|
||||
- "--no_fwd_decls"
|
||||
- "--prefix_header_includes=add"
|
||||
- "--transitive_includes_only"
|
||||
|
||||
# options passed to the fix script
|
||||
fix_options:
|
||||
- "--blank_lines"
|
||||
- "--nocomments"
|
||||
- "--noreorder"
|
||||
- "--safe_headers"
|
||||
|
||||
# filename regex to swap no_include in place
|
||||
# quotes and brackets not included quotes are always assumed
|
||||
# since this is targeting IWYU added headers
|
||||
no_includes:
|
||||
|
||||
# prefixes (non regex) to skip
|
||||
skip_files:
|
||||
|
||||
# regex file paths to add keep pragma
|
||||
# include quotes are angle brackets
|
||||
keep_includes:
|
||||
|
|
@ -1 +0,0 @@
|
|||
#include "b.h"
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
#include "a.h"
|
||||
|
||||
type_b return_b_function() {
|
||||
return type_b();
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
class type_b {};
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import sys
|
||||
|
||||
EXPECTED_B_CPP = """// IWYU pragma: no_include "b.h"
|
||||
|
||||
#include "a.h" // IWYU pragma: keep
|
||||
|
||||
type_b return_b_function() {
|
||||
return type_b();
|
||||
}
|
||||
"""
|
||||
|
||||
with open("b.cpp") as f:
|
||||
content = f.read()
|
||||
if content != EXPECTED_B_CPP:
|
||||
print(f'Actual:\n"""{content}"""')
|
||||
print(f'Expected:\n"""{EXPECTED_B_CPP}"""')
|
||||
sys.exit(1)
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
# options passed to IWYU
|
||||
iwyu_options:
|
||||
- "--max_line_length=100"
|
||||
- "--no_fwd_decls"
|
||||
- "--prefix_header_includes=add"
|
||||
- "--transitive_includes_only"
|
||||
|
||||
# options passed to the fix script
|
||||
fix_options:
|
||||
- "--blank_lines"
|
||||
- "--nocomments"
|
||||
- "--noreorder"
|
||||
- "--safe_headers"
|
||||
|
||||
# filename regex to swap no_include in place
|
||||
# quotes and brackets not included quotes are always assumed
|
||||
# since this is targeting IWYU added headers
|
||||
no_includes:
|
||||
- "b.h"
|
||||
|
||||
# prefixes (non regex) to skip
|
||||
skip_files:
|
||||
|
||||
# regex file paths to add keep pragma
|
||||
# include quotes are angle brackets
|
||||
keep_includes:
|
||||
- '"a.h"'
|
||||
|
|
@ -1,114 +0,0 @@
|
|||
import argparse
|
||||
import concurrent.futures
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
parser = argparse.ArgumentParser(description="Run tests for the IWYU analysis script.")
|
||||
|
||||
parser.add_argument(
|
||||
"--mongo-toolchain-bin-dir",
|
||||
type=str,
|
||||
help="Which toolchain bin directory to use for this analysis.",
|
||||
default="/opt/mongodbtoolchain/v4/bin",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if os.getcwd() != pathlib.Path(__file__).parent:
|
||||
print(
|
||||
f"iwyu test script must run in the tests directory, changing dirs to {pathlib.Path(__file__).parent.resolve()}"
|
||||
)
|
||||
os.chdir(pathlib.Path(__file__).parent.resolve())
|
||||
|
||||
analysis_script = pathlib.Path(__file__).parent.parent / "run_iwyu_analysis.py"
|
||||
|
||||
|
||||
def run_test(entry):
|
||||
print(f"Running test {pathlib.Path(entry)}...")
|
||||
test_dir = pathlib.Path(entry) / "test_run"
|
||||
if os.path.exists(test_dir):
|
||||
shutil.rmtree(test_dir)
|
||||
|
||||
shutil.copytree(pathlib.Path(entry), test_dir)
|
||||
|
||||
source_files = glob.glob("**/*.cpp", root_dir=test_dir, recursive=True)
|
||||
compile_commands = []
|
||||
|
||||
for source_file in source_files:
|
||||
output = os.path.splitext(source_file)[0] + ".o"
|
||||
compile_commands.append(
|
||||
{
|
||||
"file": source_file,
|
||||
"command": f"{args.mongo_toolchain_bin_dir}/clang++ -o {output} -c {source_file}",
|
||||
"directory": os.path.abspath(test_dir),
|
||||
"output": output,
|
||||
}
|
||||
)
|
||||
|
||||
with open(test_dir / "compile_commands.json", "w") as compdb:
|
||||
json.dump(compile_commands, compdb)
|
||||
|
||||
os.makedirs(test_dir / "etc", exist_ok=True)
|
||||
with open(test_dir / "etc" / "iwyu_mapping.imp", "w") as mapping:
|
||||
mapping.write(
|
||||
'[{include: ["\\"placeholder.h\\"", "private", "\\"placeholder2.h\\"", "public"]}]'
|
||||
)
|
||||
|
||||
iwyu_run = subprocess.run(
|
||||
[sys.executable, analysis_script, "--verbose", "--config-file=test_config.yml"],
|
||||
text=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=test_dir,
|
||||
)
|
||||
|
||||
results_run = subprocess.run(
|
||||
[sys.executable, pathlib.Path(entry) / "expected_results.py"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
cwd=test_dir,
|
||||
)
|
||||
|
||||
msg = "\n".join([iwyu_run.stdout, results_run.stdout, f"FAILED!: {pathlib.Path(entry)}"])
|
||||
msg = "\n".join([f"[{pathlib.Path(entry).name}] {line}" for line in msg.split("\n")])
|
||||
|
||||
if results_run.returncode != 0:
|
||||
return results_run.returncode, msg, pathlib.Path(entry).name
|
||||
else:
|
||||
return (
|
||||
results_run.returncode,
|
||||
f"[{pathlib.Path(entry).name}] PASSED!: {pathlib.Path(entry)}",
|
||||
pathlib.Path(entry).name,
|
||||
)
|
||||
|
||||
|
||||
failed_tests = []
|
||||
with concurrent.futures.ThreadPoolExecutor(
|
||||
max_workers=len(os.sched_getaffinity(0)) + 4
|
||||
) as executor:
|
||||
# create and run the IWYU jobs
|
||||
future_cmd = {
|
||||
executor.submit(run_test, entry): entry
|
||||
for entry in pathlib.Path(__file__).parent.glob("*")
|
||||
if os.path.isdir(entry)
|
||||
}
|
||||
|
||||
# process the results
|
||||
for future in concurrent.futures.as_completed(future_cmd):
|
||||
result, message, test_name = future.result()
|
||||
if result != 0:
|
||||
failed_tests += [test_name]
|
||||
print(message)
|
||||
|
||||
print("\n***Tests complete.***")
|
||||
if failed_tests:
|
||||
print("The following tests failed:")
|
||||
for test in failed_tests:
|
||||
print(" - " + test)
|
||||
print("Please review the logs above for more information.")
|
||||
|
|
@ -56,14 +56,8 @@ MONGO_REVISION_ENV_VAR = "REVISION"
|
|||
|
||||
def _get_repos_and_revisions() -> Tuple[List[Repo], RevisionMap]:
|
||||
"""Get the repo object and a map of revisions to compare against."""
|
||||
modules = git.get_module_paths()
|
||||
|
||||
repos = [
|
||||
Repo(path)
|
||||
for path in modules
|
||||
# Exclude enterprise module; it's in the "modules" folder but does not correspond to a repo
|
||||
if "src/mongo/db/modules/enterprise" not in path
|
||||
]
|
||||
repos = [Repo(git.get_base_dir())]
|
||||
|
||||
revision_map = generate_revision_map(repos, {"mongo": os.environ.get(MONGO_REVISION_ENV_VAR)})
|
||||
return repos, revision_map
|
||||
|
|
|
|||
|
|
@ -1,74 +0,0 @@
|
|||
# Libdeps Graph Analysis Tools
|
||||
|
||||
The Libdeps Graph analysis tools perform analysis and queries on graph representing the libdeps dependencies in the mongodb server builds.
|
||||
|
||||
## Generating the graph file
|
||||
|
||||
The scons build can create the graph files for analysis. To build the graphml file run the build with this minimal set of args required:
|
||||
|
||||
python3 buildscripts/scons.py --link-model=dynamic --build-tools=next generate-libdeps-graph --linker=gold --modules=
|
||||
|
||||
The target `generate-libdeps-graph` has special meaning and will turn on extra build items to generate the graph. This target will build everything so that the graph is fully representative of the build. The graph file by default will be found at `build/opt/libdeps/libdeps.graphml` (where `build/opt` is the `$BUILD_DIR`).
|
||||
|
||||
## Command Line Tool
|
||||
|
||||
The Command Line tool will process a single graph file based off a list of input args. To see the full list of args run the command:
|
||||
|
||||
python3 buildscripts/libdeps/gacli.py --help
|
||||
|
||||
By default it will performs some basic operations and print the output in human readable format:
|
||||
|
||||
python3 buildscripts/libdeps/gacli.py --graph-file build/opt/libdeps/libdeps.graphml
|
||||
|
||||
Which will give an output similar to this:
|
||||
|
||||
Loading graph data...Loaded!
|
||||
|
||||
Graph built from git hash:
|
||||
19da729e2696bbf15d3a35c340281e4385069b88
|
||||
|
||||
Graph Schema version:
|
||||
1
|
||||
|
||||
Build invocation:
|
||||
"/usr/bin/python3.8" "buildscripts/scons.py" "--variables-files=etc/scons/mongodbtoolchain_stable_gcc.vars" "--dbg=on" "--opt=on" "--enable-free-mon=on" "--enable-http-client=on" "--cache=all" "--cache-dir=/home/ubuntu/scons-cache" "--install-action=hardlink" "--link-model=dynamic" "--build-tools=next" "--ssl" "--modules=enterprise" "CCACHE=ccache" "ICECC=icecc" "-j50" "generate-libdeps-graph"
|
||||
|
||||
Nodes in Graph: 859
|
||||
Edges in Graph: 90843
|
||||
Direct Edges in Graph: 5808
|
||||
Transitive Edges in Graph: 85035
|
||||
Direct Public Edges in Graph: 3511
|
||||
Public Edges in Graph: 88546
|
||||
Private Edges in Graph: 2272
|
||||
Interface Edges in Graph: 25
|
||||
Shim Nodes in Graph: 20
|
||||
Program Nodes in Graph: 134
|
||||
Library Nodes in Graph: 725
|
||||
|
||||
LibdepsLinter: PUBLIC libdeps that could be PRIVATE: 0
|
||||
|
||||
## Graph Visualizer Tool
|
||||
|
||||
The graph visualizer tools starts up a web service to provide a frontend GUI to navigating and examining the graph files. The Visualizer used a Python Flask backend and React Javascript frontend. You will need to install the libdeps requirements file to python to run the backend:
|
||||
|
||||
python3 -m poetry install --no-root --sync -E libdeps
|
||||
|
||||
For installing the dependencies for the frontend, you will need node >= 12.0.0 and npm installed and in the PATH. To install the dependencies navigate to directory where package.json lives, and run:
|
||||
|
||||
cd buildscripts/libdeps/graph_visualizer_web_stack && npm install
|
||||
|
||||
Alternatively if you are on linux, you can use the setup_node_env.sh script to automatically download node 12 and npm, setup the local environment and install the dependencies. Run the command:
|
||||
|
||||
buildscripts/libdeps/graph_visualizer_web_stack/setup_node_end.sh install
|
||||
|
||||
Assuming you are on a remote workstation and using defaults, you will need to make ssh tunnels to the web service to access the service in your local browser. The frontend and backend both use a port (this case 3000 is the frontend and 5000 is the backend), and the default host is localhost, so you will need to open two tunnels so the frontend running in your local web browser can communicate with the backend. If you are using the default host and port the tunnel command will look like this:
|
||||
|
||||
ssh -L 3000:localhost:3000 -L 5000:localhost:5000 ubuntu@workstation.hostname
|
||||
|
||||
Next we need to start the web service. It will require you to pass a directory where it will search for `.graphml` files which contain the graph data for various commits:
|
||||
|
||||
python3 buildscripts/libdeps/graph_visualizer.py --graphml-dir build/opt/libdeps
|
||||
|
||||
The script will download nodejs, use npm to install all required packages, launch the backend and then build the optimized production frontend. You can supply the `--debug` argument to work in development load which allows real time updates as files are modified.
|
||||
|
||||
After the server has started up, it should notify you via the terminal that you can access it at http://localhost:3000 locally in your browser.
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
3 removed shim node property
|
||||
2 flipped edge direction in graph file data
|
||||
1 initial schema
|
||||
|
|
@ -1,400 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2021 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
"""Unittests for the graph analyzer."""
|
||||
|
||||
import json
|
||||
import unittest
|
||||
|
||||
import libdeps.analyzer
|
||||
from generate_test_graphs import get_basic_mock_graph, get_double_diamond_mock_graph
|
||||
from libdeps.graph import (
|
||||
CountTypes,
|
||||
DependsReportTypes,
|
||||
LibdepsGraph,
|
||||
LinterTypes,
|
||||
)
|
||||
|
||||
|
||||
class Tests(unittest.TestCase):
|
||||
"""Common unittest for the libdeps graph analyzer module."""
|
||||
|
||||
def run_analysis(self, expected, graph, algo, *args):
|
||||
"""Check results of analysis generically."""
|
||||
|
||||
analysis = [algo(graph, *args)]
|
||||
ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis)
|
||||
printer = libdeps.analyzer.GaJsonPrinter(ga)
|
||||
result = json.loads(printer.get_json())
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def run_counts(self, expected, graph):
|
||||
"""Check results of counts generically."""
|
||||
|
||||
analysis = libdeps.analyzer.counter_factory(
|
||||
graph,
|
||||
[name[0] for name in CountTypes.__members__.items() if name[0] != CountTypes.ALL.name],
|
||||
)
|
||||
ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis)
|
||||
printer = libdeps.analyzer.GaJsonPrinter(ga)
|
||||
result = json.loads(printer.get_json())
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_graph_paths_basic(self):
|
||||
"""Test for the GraphPaths analyzer on a basic graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_basic_mock_graph())
|
||||
|
||||
expected_result = {
|
||||
"GRAPH_PATHS": {
|
||||
"('lib1.so', 'lib6.so')": [
|
||||
["lib1.so", "lib2.so", "lib3.so", "lib6.so"],
|
||||
["lib1.so", "lib2.so", "lib4.so", "lib6.so"],
|
||||
]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib1.so", "lib6.so"
|
||||
)
|
||||
|
||||
expected_result = {"GRAPH_PATHS": {"('lib4.so', 'lib5.so')": []}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib4.so", "lib5.so"
|
||||
)
|
||||
|
||||
expected_result = {
|
||||
"GRAPH_PATHS": {"('lib2.so', 'lib5.so')": [["lib2.so", "lib3.so", "lib5.so"]]}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib2.so", "lib5.so"
|
||||
)
|
||||
|
||||
def test_graph_paths_double_diamond(self):
|
||||
"""Test path algorithm on the double diamond graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph())
|
||||
|
||||
expected_result = {
|
||||
"GRAPH_PATHS": {
|
||||
"('lib1.so', 'lib9.so')": [
|
||||
["lib1.so", "lib2.so", "lib3.so", "lib5.so", "lib6.so", "lib7.so", "lib9.so"],
|
||||
["lib1.so", "lib2.so", "lib3.so", "lib5.so", "lib6.so", "lib8.so", "lib9.so"],
|
||||
["lib1.so", "lib2.so", "lib4.so", "lib5.so", "lib6.so", "lib7.so", "lib9.so"],
|
||||
["lib1.so", "lib2.so", "lib4.so", "lib5.so", "lib6.so", "lib8.so", "lib9.so"],
|
||||
]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib1.so", "lib9.so"
|
||||
)
|
||||
|
||||
expected_result = {
|
||||
"GRAPH_PATHS": {
|
||||
"('lib5.so', 'lib9.so')": [
|
||||
["lib5.so", "lib6.so", "lib7.so", "lib9.so"],
|
||||
["lib5.so", "lib6.so", "lib8.so", "lib9.so"],
|
||||
]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib5.so", "lib9.so"
|
||||
)
|
||||
|
||||
expected_result = {
|
||||
"GRAPH_PATHS": {
|
||||
"('lib2.so', 'lib6.so')": [
|
||||
["lib2.so", "lib3.so", "lib5.so", "lib6.so"],
|
||||
["lib2.so", "lib4.so", "lib5.so", "lib6.so"],
|
||||
]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.GraphPaths, "lib2.so", "lib6.so"
|
||||
)
|
||||
|
||||
def test_critical_paths_basic(self):
|
||||
"""Test for the CriticalPaths for basic graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_basic_mock_graph())
|
||||
|
||||
expected_result = {"CRITICAL_EDGES": {"('lib1.so', 'lib6.so')": [["lib1.so", "lib2.so"]]}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib1.so", "lib6.so"
|
||||
)
|
||||
|
||||
expected_result = {"CRITICAL_EDGES": {"('lib1.so', 'lib5.so')": [["lib1.so", "lib2.so"]]}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib1.so", "lib5.so"
|
||||
)
|
||||
|
||||
expected_result = {"CRITICAL_EDGES": {"('lib5.so', 'lib6.so')": []}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib5.so", "lib6.so"
|
||||
)
|
||||
|
||||
def test_critical_paths_double_diamond(self):
|
||||
"""Test for the CriticalPaths for double diamond graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph())
|
||||
|
||||
expected_result = {"CRITICAL_EDGES": {"('lib1.so', 'lib9.so')": [["lib1.so", "lib2.so"]]}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib1.so", "lib9.so"
|
||||
)
|
||||
|
||||
expected_result = {"CRITICAL_EDGES": {"('lib2.so', 'lib9.so')": [["lib5.so", "lib6.so"]]}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib2.so", "lib9.so"
|
||||
)
|
||||
|
||||
expected_result = {"CRITICAL_EDGES": {"('lib7.so', 'lib8.so')": []}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.CriticalEdges, "lib7.so", "lib8.so"
|
||||
)
|
||||
|
||||
def test_direct_depends_basic(self):
|
||||
"""Test for the DirectDependents for basic graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_basic_mock_graph())
|
||||
|
||||
expected_result = {"DIRECT_DEPENDS": {"lib6.so": ["lib3.so", "lib4.so"]}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib6.so"
|
||||
)
|
||||
|
||||
expected_result = {"DIRECT_DEPENDS": {"lib1.so": []}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib1.so"
|
||||
)
|
||||
|
||||
def test_direct_depends_double_diamond(self):
|
||||
"""Test for the DirectDependents for double diamond graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph())
|
||||
|
||||
expected_result = {"DIRECT_DEPENDS": {"lib9.so": ["lib7.so", "lib8.so"]}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib9.so"
|
||||
)
|
||||
|
||||
expected_result = {"DIRECT_DEPENDS": {"lib6.so": ["lib5.so"]}}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.DirectDependents, "lib6.so"
|
||||
)
|
||||
|
||||
def test_common_depends_basic(self):
|
||||
"""Test for the CommonDependents for basic graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_basic_mock_graph())
|
||||
|
||||
expected_result = {
|
||||
"COMMON_DEPENDS": {
|
||||
"('lib6.so', 'lib5.so')": ["lib1.so", "lib2.so", "lib3.so", "lib4.so"]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.CommonDependents,
|
||||
["lib6.so", "lib5.so"],
|
||||
)
|
||||
|
||||
expected_result = {
|
||||
"COMMON_DEPENDS": {
|
||||
"('lib5.so', 'lib6.so')": ["lib1.so", "lib2.so", "lib3.so", "lib4.so"]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.CommonDependents,
|
||||
["lib5.so", "lib6.so"],
|
||||
)
|
||||
|
||||
expected_result = {"COMMON_DEPENDS": {"('lib5.so', 'lib6.so', 'lib2.so')": ["lib1.so"]}}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.CommonDependents,
|
||||
["lib5.so", "lib6.so", "lib2.so"],
|
||||
)
|
||||
|
||||
def test_common_depends_double_diamond(self):
|
||||
"""Test for the CommonDependents for double diamond graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph())
|
||||
|
||||
expected_result = {
|
||||
"COMMON_DEPENDS": {
|
||||
"('lib9.so',)": [
|
||||
"lib1.so",
|
||||
"lib2.so",
|
||||
"lib3.so",
|
||||
"lib4.so",
|
||||
"lib5.so",
|
||||
"lib6.so",
|
||||
"lib7.so",
|
||||
"lib8.so",
|
||||
]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result, libdeps_graph, libdeps.analyzer.CommonDependents, ["lib9.so"]
|
||||
)
|
||||
|
||||
expected_result = {"COMMON_DEPENDS": {"('lib9.so', 'lib2.so')": ["lib1.so"]}}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.CommonDependents,
|
||||
["lib9.so", "lib2.so"],
|
||||
)
|
||||
|
||||
expected_result = {"COMMON_DEPENDS": {"('lib1.so', 'lib4.so', 'lib3.so')": []}}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.CommonDependents,
|
||||
["lib1.so", "lib4.so", "lib3.so"],
|
||||
)
|
||||
|
||||
def test_exclude_depends_basic(self):
|
||||
"""Test for the ExcludeDependents for basic graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_basic_mock_graph())
|
||||
|
||||
expected_result = {"EXCLUDE_DEPENDS": {"('lib6.so', 'lib5.so')": []}}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.ExcludeDependents,
|
||||
["lib6.so", "lib5.so"],
|
||||
)
|
||||
|
||||
expected_result = {"EXCLUDE_DEPENDS": {"('lib3.so', 'lib1.so')": ["lib1.so", "lib2.so"]}}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.ExcludeDependents,
|
||||
["lib3.so", "lib1.so"],
|
||||
)
|
||||
|
||||
expected_result = {
|
||||
"EXCLUDE_DEPENDS": {
|
||||
"('lib6.so', 'lib1.so', 'lib2.so')": ["lib2.so", "lib3.so", "lib4.so"]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.ExcludeDependents,
|
||||
["lib6.so", "lib1.so", "lib2.so"],
|
||||
)
|
||||
|
||||
def test_exclude_depends_double_diamond(self):
|
||||
"""Test for the ExcludeDependents for double diamond graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph())
|
||||
|
||||
expected_result = {
|
||||
"EXCLUDE_DEPENDS": {"('lib6.so', 'lib4.so')": ["lib3.so", "lib4.so", "lib5.so"]}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.ExcludeDependents,
|
||||
["lib6.so", "lib4.so"],
|
||||
)
|
||||
|
||||
expected_result = {"EXCLUDE_DEPENDS": {"('lib2.so', 'lib9.so')": []}}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.ExcludeDependents,
|
||||
["lib2.so", "lib9.so"],
|
||||
)
|
||||
|
||||
expected_result = {
|
||||
"EXCLUDE_DEPENDS": {
|
||||
"('lib8.so', 'lib1.so', 'lib2.so', 'lib3.so', 'lib4.so', 'lib5.so')": [
|
||||
"lib5.so",
|
||||
"lib6.so",
|
||||
]
|
||||
}
|
||||
}
|
||||
self.run_analysis(
|
||||
expected_result,
|
||||
libdeps_graph,
|
||||
libdeps.analyzer.ExcludeDependents,
|
||||
["lib8.so", "lib1.so", "lib2.so", "lib3.so", "lib4.so", "lib5.so"],
|
||||
)
|
||||
|
||||
def test_counts_basic(self):
|
||||
"""Test counts on basic graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_basic_mock_graph())
|
||||
|
||||
expected_result = {
|
||||
"NODE": 6,
|
||||
"EDGE": 13,
|
||||
"DIR_EDGE": 7,
|
||||
"TRANS_EDGE": 6,
|
||||
"DIR_PUB_EDGE": 6,
|
||||
"PUB_EDGE": 12,
|
||||
"PRIV_EDGE": 1,
|
||||
"IF_EDGE": 0,
|
||||
"PROG": 0,
|
||||
"LIB": 6,
|
||||
}
|
||||
self.run_counts(expected_result, libdeps_graph)
|
||||
|
||||
def test_counts_double_diamond(self):
|
||||
"""Test counts on double diamond graph."""
|
||||
|
||||
libdeps_graph = LibdepsGraph(get_double_diamond_mock_graph())
|
||||
|
||||
expected_result = {
|
||||
"NODE": 9,
|
||||
"EDGE": 34,
|
||||
"DIR_EDGE": 10,
|
||||
"TRANS_EDGE": 24,
|
||||
"DIR_PUB_EDGE": 10,
|
||||
"PUB_EDGE": 34,
|
||||
"PRIV_EDGE": 0,
|
||||
"IF_EDGE": 0,
|
||||
"PROG": 0,
|
||||
"LIB": 9,
|
||||
}
|
||||
self.run_counts(expected_result, libdeps_graph)
|
||||
|
||||
def test_unqiue_report_enums(self):
|
||||
"""Ensure uniqueness of enums used as keys when generating reports."""
|
||||
|
||||
enums = [enum.name for enum in LinterTypes]
|
||||
enums += [enum.name for enum in DependsReportTypes]
|
||||
enums_unique = set(enums)
|
||||
self.assertEqual(len(enums), len(enums_unique))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
@ -1,400 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2020 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
"""
|
||||
Graph Analysis Command Line Interface.
|
||||
|
||||
A Command line interface to the graph analysis module.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import sys
|
||||
import textwrap
|
||||
from pathlib import Path
|
||||
|
||||
import libdeps.analyzer as libdeps_analyzer
|
||||
import networkx
|
||||
from libdeps.graph import CountTypes, LibdepsGraph, LinterTypes
|
||||
|
||||
|
||||
class LinterSplitArgs(argparse.Action):
|
||||
"""Custom argument action for checking multiple choice comma separated list."""
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
"""Create a multi choice comma separated list."""
|
||||
|
||||
selected_choices = [v.upper() for v in "".join(values).split(",") if v]
|
||||
invalid_choices = [
|
||||
choice for choice in selected_choices if choice not in self.valid_choices
|
||||
]
|
||||
if invalid_choices:
|
||||
raise Exception(
|
||||
f"Invalid choices: {invalid_choices}\nMust use choices from {self.valid_choices}"
|
||||
)
|
||||
if CountTypes.ALL.name in selected_choices:
|
||||
selected_choices = copy.copy(self.valid_choices)
|
||||
selected_choices.remove(CountTypes.ALL.name)
|
||||
if selected_choices == []:
|
||||
selected_choices = copy.copy(self.default_choices)
|
||||
if values == [""]:
|
||||
selected_choices = []
|
||||
setattr(namespace, self.dest, [opt.replace("-", "_") for opt in selected_choices])
|
||||
|
||||
|
||||
class CountSplitArgs(LinterSplitArgs):
|
||||
"""Special case of common custom arg action for Count types."""
|
||||
|
||||
valid_choices = [name[0].replace("_", "-") for name in CountTypes.__members__.items()]
|
||||
default_choices = [
|
||||
name[0] for name in CountTypes.__members__.items() if name[0] != CountTypes.ALL.name
|
||||
]
|
||||
|
||||
|
||||
class LintSplitArgs(LinterSplitArgs):
|
||||
"""Special case of common custom arg action for Count types."""
|
||||
|
||||
valid_choices = [name[0].replace("_", "-") for name in LinterTypes.__members__.items()]
|
||||
default_choices = [LinterTypes.PUBLIC_UNUSED.name]
|
||||
|
||||
|
||||
class CustomFormatter(argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
|
||||
"""Custom arg help formatter for modifying the defaults printed for the custom list action."""
|
||||
|
||||
@staticmethod
|
||||
def _get_help_length(enum_type):
|
||||
max_length = max([len(name[0]) for name in enum_type.__members__.items()])
|
||||
help_text = {}
|
||||
for name in enum_type.__members__.items():
|
||||
help_text[name[0]] = name[0].lower() + ("-" * (max_length - len(name[0]))) + ": "
|
||||
return help_text
|
||||
|
||||
def _get_help_string(self, action):
|
||||
if isinstance(action, CountSplitArgs):
|
||||
help_text = self._get_help_length(CountTypes)
|
||||
return textwrap.dedent(f"""\
|
||||
{action.help}
|
||||
default: all, choices:
|
||||
{help_text[CountTypes.ALL.name]}perform all counts
|
||||
{help_text[CountTypes.NODE.name]}count nodes
|
||||
{help_text[CountTypes.EDGE.name]}count edges
|
||||
{help_text[CountTypes.DIR_EDGE.name]}count edges declared directly on a node
|
||||
{help_text[CountTypes.TRANS_EDGE.name]}count edges induced by direct public edges
|
||||
{help_text[CountTypes.DIR_PUB_EDGE.name]}count edges that are directly public
|
||||
{help_text[CountTypes.PUB_EDGE.name]}count edges that are public
|
||||
{help_text[CountTypes.PRIV_EDGE.name]}count edges that are private
|
||||
{help_text[CountTypes.IF_EDGE.name]}count edges that are interface
|
||||
{help_text[CountTypes.LIB.name]}count library nodes
|
||||
{help_text[CountTypes.PROG.name]}count program nodes
|
||||
""")
|
||||
elif isinstance(action, LintSplitArgs):
|
||||
help_text = self._get_help_length(LinterTypes)
|
||||
return textwrap.dedent(f"""\
|
||||
{action.help}
|
||||
default: all, choices:
|
||||
{help_text[LinterTypes.ALL.name]}perform all linters
|
||||
{help_text[LinterTypes.PUBLIC_UNUSED.name]}find unnecessary public libdeps
|
||||
""")
|
||||
return super()._get_help_string(action)
|
||||
|
||||
|
||||
def setup_args_parser():
|
||||
"""Add and parse the input args."""
|
||||
|
||||
parser = argparse.ArgumentParser(formatter_class=CustomFormatter)
|
||||
|
||||
parser.add_argument(
|
||||
"--graph-file",
|
||||
type=str,
|
||||
action="store",
|
||||
help="The LIBDEPS graph to load.",
|
||||
default="build/opt/libdeps/libdeps.graphml",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--format", choices=["pretty", "json"], default="pretty", help="The output format type."
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--build-data",
|
||||
choices=["on", "off"],
|
||||
default="on",
|
||||
help="Print the invocation and git hash used to build the graph",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--counts",
|
||||
metavar="COUNT,",
|
||||
nargs="*",
|
||||
action=CountSplitArgs,
|
||||
default=CountSplitArgs.default_choices,
|
||||
help="Output various counts from the graph. Comma separated list.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--lint",
|
||||
metavar="LINTER,",
|
||||
nargs="*",
|
||||
action=LintSplitArgs,
|
||||
default=LintSplitArgs.default_choices,
|
||||
help="Perform various linters on the graph. Comma separated list.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--direct-depends",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Print the nodes which depends on a given node.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--program-depends",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Print the programs which depend (transitively or directly) on a given node.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--common-depends",
|
||||
nargs="+",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Print the nodes which have a common dependency on all N nodes.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--exclude-depends",
|
||||
nargs="+",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Print nodes which depend on the first node of N nodes, but exclude all nodes listed there after.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--graph-paths",
|
||||
nargs="+",
|
||||
action="append",
|
||||
default=[],
|
||||
help="[from_node] [to_node]: Print all paths between 2 nodes.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--critical-edges",
|
||||
nargs="+",
|
||||
action="append",
|
||||
default=[],
|
||||
help="[from_node] [to_node]: Print edges between two nodes, which if removed would break the dependency between those "
|
||||
+ "nodes.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--symbol-depends",
|
||||
nargs="+",
|
||||
action="append",
|
||||
default=[],
|
||||
help="[from_node] [to_node]: Print symbols defined in from_node used by to_node.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--efficiency",
|
||||
nargs="+",
|
||||
action="append",
|
||||
default=[],
|
||||
help="[from_node ...]: Print efficiencies of public direct edges off each from_node in a list of nodes.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--efficiency-lint",
|
||||
nargs="?",
|
||||
type=int,
|
||||
const=2,
|
||||
help="[threshold]: Analyze efficiency of all public direct edges, print those below efficiency threshold percentage.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--indegree-one",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Find candidate nodes for merging by searching the graph for nodes with only one node which depends on them.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--bazel-conv-candidates",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Find candidate nodes ready for bazel conversion. This effectively means the node is currently not being built "
|
||||
"with bazel and the node does not have any dependency nodes that are not being built in bazel.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
for arg_list in args.graph_paths:
|
||||
if len(arg_list) != 2:
|
||||
parser.error(
|
||||
f"Must pass two args for --graph-paths, [from_node] [to_node], not {arg_list}"
|
||||
)
|
||||
|
||||
for arg_list in args.critical_edges:
|
||||
if len(arg_list) != 2:
|
||||
parser.error(
|
||||
f"Must pass two args for --critical-edges, [from_node] [to_node], not {arg_list}"
|
||||
)
|
||||
|
||||
for arg_list in args.symbol_depends:
|
||||
if len(arg_list) != 2:
|
||||
parser.error(
|
||||
f"Must pass two args for --symbol-depends, [from_node] [to_node], not {arg_list}"
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def strip_build_dir(build_dir, node):
|
||||
"""Small util function for making args match the graph paths."""
|
||||
|
||||
try:
|
||||
return str(Path(node).relative_to(build_dir))
|
||||
except ValueError:
|
||||
return node
|
||||
|
||||
|
||||
def strip_build_dirs(build_dir, nodes):
|
||||
"""Small util function for making a list of nodes match graph paths."""
|
||||
|
||||
return [strip_build_dir(build_dir, node) for node in nodes]
|
||||
|
||||
|
||||
def load_graph_data(graph_file, output_format):
|
||||
"""Load a graphml file."""
|
||||
|
||||
if output_format == "pretty":
|
||||
sys.stdout.write("Loading graph data...")
|
||||
sys.stdout.flush()
|
||||
graph = networkx.read_graphml(graph_file)
|
||||
if output_format == "pretty":
|
||||
sys.stdout.write("Loaded!\n\n")
|
||||
return graph
|
||||
|
||||
|
||||
def main():
|
||||
"""Perform graph analysis based on input args."""
|
||||
|
||||
args = setup_args_parser()
|
||||
graph = load_graph_data(args.graph_file, args.format)
|
||||
libdeps_graph = LibdepsGraph(graph=graph)
|
||||
build_dir = libdeps_graph.graph["build_dir"]
|
||||
|
||||
if libdeps_graph.graph["graph_schema_version"] == 1:
|
||||
libdeps_graph = networkx.reverse_view(libdeps_graph)
|
||||
|
||||
analysis = libdeps_analyzer.counter_factory(libdeps_graph, args.counts)
|
||||
|
||||
for analyzer_args in args.direct_depends:
|
||||
analysis.append(
|
||||
libdeps_analyzer.DirectDependents(
|
||||
libdeps_graph, strip_build_dir(build_dir, analyzer_args)
|
||||
)
|
||||
)
|
||||
|
||||
for analyzer_args in args.program_depends:
|
||||
analysis.append(
|
||||
libdeps_analyzer.TransitiveProgramDependents(
|
||||
libdeps_graph, strip_build_dir(build_dir, analyzer_args)
|
||||
)
|
||||
)
|
||||
|
||||
for analyzer_args in args.common_depends:
|
||||
analysis.append(
|
||||
libdeps_analyzer.CommonDependents(
|
||||
libdeps_graph, strip_build_dirs(build_dir, analyzer_args)
|
||||
)
|
||||
)
|
||||
|
||||
for analyzer_args in args.exclude_depends:
|
||||
analysis.append(
|
||||
libdeps_analyzer.ExcludeDependents(
|
||||
libdeps_graph, strip_build_dirs(build_dir, analyzer_args)
|
||||
)
|
||||
)
|
||||
|
||||
for analyzer_args in args.graph_paths:
|
||||
analysis.append(
|
||||
libdeps_analyzer.GraphPaths(
|
||||
libdeps_graph,
|
||||
strip_build_dir(build_dir, analyzer_args[0]),
|
||||
strip_build_dir(build_dir, analyzer_args[1]),
|
||||
)
|
||||
)
|
||||
|
||||
for analyzer_args in args.symbol_depends:
|
||||
analysis.append(
|
||||
libdeps_analyzer.SymbolDependents(
|
||||
libdeps_graph,
|
||||
strip_build_dir(build_dir, analyzer_args[0]),
|
||||
strip_build_dir(build_dir, analyzer_args[1]),
|
||||
)
|
||||
)
|
||||
|
||||
for analyzer_args in args.efficiency:
|
||||
nodes = []
|
||||
for arg in analyzer_args:
|
||||
nodes.append(strip_build_dir(build_dir, arg))
|
||||
analysis.append(libdeps_analyzer.Efficiency(libdeps_graph, nodes))
|
||||
|
||||
if args.efficiency_lint:
|
||||
analysis.append(libdeps_analyzer.EfficiencyLinter(libdeps_graph, args.efficiency_lint))
|
||||
|
||||
for analyzer_args in args.critical_edges:
|
||||
analysis.append(
|
||||
libdeps_analyzer.CriticalEdges(
|
||||
libdeps_graph,
|
||||
strip_build_dir(build_dir, analyzer_args[0]),
|
||||
strip_build_dir(build_dir, analyzer_args[1]),
|
||||
)
|
||||
)
|
||||
|
||||
if args.indegree_one:
|
||||
analysis.append(libdeps_analyzer.InDegreeOne(libdeps_graph))
|
||||
|
||||
if args.bazel_conv_candidates:
|
||||
analysis.append(libdeps_analyzer.BazelConversionCandidates(libdeps_graph))
|
||||
|
||||
analysis += libdeps_analyzer.linter_factory(libdeps_graph, args.lint)
|
||||
|
||||
if args.build_data:
|
||||
analysis.append(libdeps_analyzer.BuildDataReport(libdeps_graph))
|
||||
|
||||
ga = libdeps_analyzer.LibdepsGraphAnalysis(analysis)
|
||||
|
||||
if args.format == "pretty":
|
||||
ga_printer = libdeps_analyzer.GaPrettyPrinter(ga)
|
||||
elif args.format == "json":
|
||||
ga_printer = libdeps_analyzer.GaJsonPrinter(ga)
|
||||
else:
|
||||
return
|
||||
|
||||
ga_printer.print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,491 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2022 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
"""Test graphs for the graph visualizer and analyzer."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
|
||||
import networkx
|
||||
from libdeps.graph import EdgeProps, LibdepsGraph, NodeProps
|
||||
|
||||
|
||||
def get_args():
|
||||
"""Create the argparse and return passed args."""
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"--graph-output-dir",
|
||||
type=str,
|
||||
action="store",
|
||||
help="Directory test graphml files will be saved.",
|
||||
default="build/opt/libdeps",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--generate-big-graphs",
|
||||
action="store_true",
|
||||
help="Makes graphs which are large for testing scale.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def add_node(graph, node, builder):
|
||||
"""Add a node to the graph."""
|
||||
|
||||
graph.add_nodes_from([(node, {NodeProps.bin_type.name: builder})])
|
||||
|
||||
|
||||
def add_edge(graph, from_node, to_node, **kwargs):
|
||||
"""Add an edge to the graph."""
|
||||
|
||||
edge_props = {
|
||||
EdgeProps.direct.name: kwargs[EdgeProps.direct.name],
|
||||
EdgeProps.visibility.name: int(kwargs[EdgeProps.visibility.name]),
|
||||
}
|
||||
if kwargs.get("symbols"):
|
||||
edge_props[EdgeProps.symbols.name] = kwargs.get("symbols")
|
||||
|
||||
graph.add_edges_from([(from_node, to_node, edge_props)])
|
||||
|
||||
|
||||
def get_big_graph(int_id):
|
||||
"""Generate a big graph."""
|
||||
|
||||
graph = LibdepsGraph()
|
||||
graph.graph["build_dir"] = "."
|
||||
graph.graph["graph_schema_version"] = 2
|
||||
graph.graph["deptypes"] = json.dumps(
|
||||
{
|
||||
"Global": 0,
|
||||
"Public": 1,
|
||||
"Private": 2,
|
||||
"Interface": 3,
|
||||
}
|
||||
)
|
||||
graph.graph["git_hash"] = f"BIG{int_id.zfill(4)}"
|
||||
num_nodes = 200
|
||||
for i in range(num_nodes):
|
||||
add_node(graph, f"lib{i}.so", "SharedLibrary")
|
||||
for j in range(num_nodes - i):
|
||||
add_edge(
|
||||
graph,
|
||||
f"lib{i}.so",
|
||||
f"lib{j}.so",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
symbols="\n".join([f"RandomString{i+j}" * 100 for i in range(10)]),
|
||||
)
|
||||
return graph
|
||||
|
||||
|
||||
def get_double_diamond_mock_graph():
|
||||
"""Construct a mock graph which covers a double diamond structure."""
|
||||
|
||||
graph = LibdepsGraph()
|
||||
graph.graph["build_dir"] = "."
|
||||
graph.graph["graph_schema_version"] = 2
|
||||
graph.graph["deptypes"] = json.dumps(
|
||||
{
|
||||
"Global": 0,
|
||||
"Public": 1,
|
||||
"Private": 2,
|
||||
"Interface": 3,
|
||||
}
|
||||
)
|
||||
graph.graph["git_hash"] = "TEST001"
|
||||
|
||||
# builds a graph of mostly public edges that looks like this:
|
||||
#
|
||||
#
|
||||
# /lib3.so /lib7.so
|
||||
# | \ | \
|
||||
# <-lib1.so--lib2.so lib5.so--lib6.so lib9.so
|
||||
# | / | /
|
||||
# \lib4.so \lib8.so
|
||||
#
|
||||
|
||||
add_node(graph, "lib1.so", "SharedLibrary")
|
||||
add_node(graph, "lib2.so", "SharedLibrary")
|
||||
add_node(graph, "lib3.so", "SharedLibrary")
|
||||
add_node(graph, "lib4.so", "SharedLibrary")
|
||||
add_node(graph, "lib5.so", "SharedLibrary")
|
||||
add_node(graph, "lib6.so", "SharedLibrary")
|
||||
add_node(graph, "lib7.so", "SharedLibrary")
|
||||
add_node(graph, "lib8.so", "SharedLibrary")
|
||||
add_node(graph, "lib9.so", "SharedLibrary")
|
||||
|
||||
add_edge(graph, "lib1.so", "lib2.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib3.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib4.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib3.so", "lib5.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib4.so", "lib5.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib5.so", "lib6.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib6.so", "lib7.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib6.so", "lib8.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib7.so", "lib9.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib8.so", "lib9.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 3 and 4
|
||||
add_edge(graph, "lib1.so", "lib3.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib1.so", "lib4.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 5
|
||||
add_edge(graph, "lib1.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 6
|
||||
add_edge(graph, "lib1.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib3.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib4.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 7
|
||||
add_edge(graph, "lib1.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib3.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib4.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib5.so", "lib7.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 8
|
||||
add_edge(graph, "lib1.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib3.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib4.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib5.so", "lib8.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 9
|
||||
add_edge(graph, "lib1.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib3.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib4.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib5.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib6.so", "lib9.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def get_basic_mock_graph():
|
||||
"""Construct a mock graph which covers most cases and is easy to understand."""
|
||||
|
||||
graph = LibdepsGraph()
|
||||
graph.graph["build_dir"] = "."
|
||||
graph.graph["graph_schema_version"] = 2
|
||||
graph.graph["deptypes"] = json.dumps(
|
||||
{
|
||||
"Global": 0,
|
||||
"Public": 1,
|
||||
"Private": 2,
|
||||
"Interface": 3,
|
||||
}
|
||||
)
|
||||
graph.graph["git_hash"] = "TEST002"
|
||||
|
||||
# builds a graph of mostly public edges:
|
||||
#
|
||||
# /-lib5.so
|
||||
# /lib3.so
|
||||
# | \-lib6.so
|
||||
# <-lib1.so--lib2.so
|
||||
# | /-lib5.so (private)
|
||||
# \lib4.so
|
||||
# \-lib6.so
|
||||
|
||||
# nodes
|
||||
add_node(graph, "lib1.so", "SharedLibrary")
|
||||
add_node(graph, "lib2.so", "SharedLibrary")
|
||||
add_node(graph, "lib3.so", "SharedLibrary")
|
||||
add_node(graph, "lib4.so", "SharedLibrary")
|
||||
add_node(graph, "lib5.so", "SharedLibrary")
|
||||
add_node(graph, "lib6.so", "SharedLibrary")
|
||||
|
||||
# direct edges
|
||||
add_edge(graph, "lib1.so", "lib2.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib3.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib2.so", "lib4.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib4.so", "lib6.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib3.so", "lib5.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib3.so", "lib6.so", direct=True, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib4.so", "lib5.so", direct=True, visibility=graph.get_deptype("Private"))
|
||||
|
||||
# trans for 3
|
||||
add_edge(graph, "lib1.so", "lib3.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 4
|
||||
add_edge(graph, "lib1.so", "lib4.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 5
|
||||
add_edge(graph, "lib2.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib1.so", "lib5.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
# trans for 6
|
||||
add_edge(graph, "lib2.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
add_edge(graph, "lib1.so", "lib6.so", direct=False, visibility=graph.get_deptype("Public"))
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def get_basic_mock_directory_graph():
|
||||
"""Construct a mock graph which covers most cases and is easy to understand."""
|
||||
|
||||
graph = LibdepsGraph()
|
||||
graph.graph["build_dir"] = "."
|
||||
graph.graph["graph_schema_version"] = 2
|
||||
graph.graph["deptypes"] = json.dumps(
|
||||
{
|
||||
"Global": 0,
|
||||
"Public": 1,
|
||||
"Private": 2,
|
||||
"Interface": 3,
|
||||
}
|
||||
)
|
||||
graph.graph["git_hash"] = "TEST003"
|
||||
|
||||
# builds a graph of mostly public edges:
|
||||
#
|
||||
# /-lib5.so
|
||||
# /lib3
|
||||
# | \-lib6.so
|
||||
# <-lib1.so--lib2
|
||||
# | /-lib5.so (private)
|
||||
# \lib4.so
|
||||
# \-lib6.so
|
||||
|
||||
# nodes
|
||||
add_node(graph, "dir1/lib1.so", "SharedLibrary")
|
||||
add_node(graph, "dir1/sub1/lib2", "Program")
|
||||
add_node(graph, "dir1/sub1/lib3", "Program")
|
||||
add_node(graph, "dir1/sub2/lib4.so", "SharedLibrary")
|
||||
add_node(graph, "dir2/lib5.so", "SharedLibrary")
|
||||
add_node(graph, "dir2/lib6.so", "SharedLibrary")
|
||||
|
||||
# direct edges
|
||||
add_edge(
|
||||
graph, "dir1/lib1.so", "dir1/sub1/lib2", direct=True, visibility=graph.get_deptype("Public")
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/sub1/lib2",
|
||||
"dir1/sub1/lib3",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/sub1/lib2",
|
||||
"dir1/sub2/lib4.so",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/sub2/lib4.so",
|
||||
"dir2/lib6.so",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph, "dir1/sub1/lib3", "dir2/lib5.so", direct=True, visibility=graph.get_deptype("Public")
|
||||
)
|
||||
add_edge(
|
||||
graph, "dir1/sub1/lib3", "dir2/lib6.so", direct=True, visibility=graph.get_deptype("Public")
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/sub2/lib4.so",
|
||||
"dir2/lib5.so",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Private"),
|
||||
)
|
||||
|
||||
# trans for 3
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/lib1.so",
|
||||
"dir1/sub1/lib3",
|
||||
direct=False,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
|
||||
# trans for 4
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/lib1.so",
|
||||
"dir1/sub2/lib4.so",
|
||||
direct=False,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
|
||||
# trans for 5
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/sub1/lib2",
|
||||
"dir2/lib5.so",
|
||||
direct=False,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph, "dir1/lib1.so", "dir2/lib5.so", direct=False, visibility=graph.get_deptype("Public")
|
||||
)
|
||||
|
||||
# trans for 6
|
||||
add_edge(
|
||||
graph,
|
||||
"dir1/sub1/lib2",
|
||||
"dir2/lib6.so",
|
||||
direct=False,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph, "dir1/lib1.so", "dir2/lib6.so", direct=False, visibility=graph.get_deptype("Public")
|
||||
)
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def get_simple_directory_graph():
|
||||
"""Construct a mock graph which covers most cases and is easy to understand."""
|
||||
|
||||
graph = LibdepsGraph()
|
||||
graph.graph["build_dir"] = "."
|
||||
graph.graph["graph_schema_version"] = 2
|
||||
graph.graph["deptypes"] = json.dumps(
|
||||
{
|
||||
"Global": 0,
|
||||
"Public": 1,
|
||||
"Private": 2,
|
||||
"Interface": 3,
|
||||
}
|
||||
)
|
||||
graph.graph["git_hash"] = "TEST004"
|
||||
|
||||
# lib2.so <- lib4.so
|
||||
# /∧ \∨
|
||||
# lib1.so prog1 <- lib5.so
|
||||
# \∨ /∧
|
||||
# lib3.so -> prog2
|
||||
|
||||
# nodes
|
||||
add_node(graph, "mongo/base/lib1.so", "SharedLibrary")
|
||||
add_node(graph, "mongo/base/lib2.so", "SharedLibrary")
|
||||
add_node(graph, "mongo/db/lib3.so", "SharedLibrary")
|
||||
add_node(graph, "third_party/lib4.so", "SharedLibrary")
|
||||
add_node(graph, "third_party/lib5.so", "SharedLibrary")
|
||||
add_node(graph, "mongo/base/prog1", "Program")
|
||||
add_node(graph, "mongo/db/prog2", "Program")
|
||||
|
||||
# direct edges
|
||||
add_edge(
|
||||
graph,
|
||||
"mongo/base/lib1.so",
|
||||
"mongo/base/lib2.so",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"mongo/base/lib1.so",
|
||||
"mongo/db/lib3.so",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"mongo/base/lib2.so",
|
||||
"mongo/base/prog1",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"mongo/db/lib3.so",
|
||||
"mongo/base/prog1",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"mongo/db/lib3.so",
|
||||
"mongo/db/prog2",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"third_party/lib4.so",
|
||||
"mongo/base/lib2.so",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
add_edge(
|
||||
graph,
|
||||
"third_party/lib5.so",
|
||||
"mongo/base/prog1",
|
||||
direct=True,
|
||||
visibility=graph.get_deptype("Public"),
|
||||
)
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def save_graph_file(graph, output_dir):
|
||||
"""Save a graph locally as a .graphml."""
|
||||
|
||||
filename = output_dir + "/libdeps_" + graph.graph["git_hash"] + ".graphml"
|
||||
networkx.write_graphml(graph, filename, named_key_ids=True)
|
||||
|
||||
|
||||
def main():
|
||||
"""Generate and save the test graphs as .graphml files."""
|
||||
|
||||
args = get_args()
|
||||
output_dir = args.graph_output_dir
|
||||
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
graph = get_double_diamond_mock_graph()
|
||||
save_graph_file(graph, output_dir)
|
||||
|
||||
graph = get_basic_mock_graph()
|
||||
save_graph_file(graph, output_dir)
|
||||
|
||||
graph = get_basic_mock_directory_graph()
|
||||
save_graph_file(graph, output_dir)
|
||||
|
||||
graph = get_simple_directory_graph()
|
||||
save_graph_file(graph, output_dir)
|
||||
|
||||
if args.generate_big_graphs:
|
||||
graph = get_big_graph("0")
|
||||
for i in range(1, 30):
|
||||
print(f"generating big graph {i}...")
|
||||
graph.graph["git_hash"] = f"BIG{str(i).zfill(4)}"
|
||||
save_graph_file(graph, output_dir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,246 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2020 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
"""
|
||||
Libdeps Graph Visualization Tool.
|
||||
|
||||
Starts a web service which creates a UI for interacting and examining the libdeps graph.
|
||||
The web service front end consist of React+Redux for the framework, flask API for backend
|
||||
communication, and Material UI for the GUI. The web service back end uses flask.
|
||||
|
||||
This script will automatically install the npm modules, and build and run the production
|
||||
web service if not debug.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import textwrap
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
from graph_visualizer_web_stack.flask.flask_backend import BackendServer
|
||||
from werkzeug.serving import is_running_from_reloader
|
||||
|
||||
|
||||
def get_args():
|
||||
"""Create the argparse and return passed args."""
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"--debug",
|
||||
action="store_true",
|
||||
help='Whether or not to run debug server. Note for non-debug, you must build the production frontend with "npm run build".',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--graphml-dir",
|
||||
type=str,
|
||||
action="store",
|
||||
help="Directory where libdeps graphml files live. The UI will allow selecting different graphs from this location",
|
||||
default="build/opt",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--frontend-host",
|
||||
type=str,
|
||||
action="store",
|
||||
help="Hostname where the front end will run.",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--backend-host",
|
||||
type=str,
|
||||
action="store",
|
||||
help="Hostname where the back end will run.",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--frontend-port",
|
||||
type=str,
|
||||
action="store",
|
||||
help="Port where the front end will run.",
|
||||
default="3000",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--backend-port",
|
||||
type=str,
|
||||
action="store",
|
||||
help="Port where the back end will run.",
|
||||
default="5000",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--memory-limit",
|
||||
type=float,
|
||||
action="store",
|
||||
help="Limit in GB for backend memory usage.",
|
||||
default=8.0,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--launch",
|
||||
choices=["frontend", "backend", "both"],
|
||||
default="both",
|
||||
help="Specifies which part of the web service to launch.",
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def execute_and_read_stdout(cmd, cwd, env):
|
||||
"""Execute passed command and get realtime output."""
|
||||
|
||||
popen = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, cwd=str(cwd), env=env, universal_newlines=True
|
||||
)
|
||||
for stdout_line in iter(popen.stdout.readline, ""):
|
||||
yield stdout_line
|
||||
popen.stdout.close()
|
||||
return_code = popen.wait()
|
||||
if return_code:
|
||||
raise subprocess.CalledProcessError(return_code, cmd)
|
||||
|
||||
|
||||
def check_node(node_check, cwd):
|
||||
"""Check node version and install npm packages."""
|
||||
|
||||
status, output = subprocess.getstatusoutput(node_check)
|
||||
if status != 0 or not output.split("\n")[-1].startswith("v14"):
|
||||
print(
|
||||
textwrap.dedent(f"""\
|
||||
Failed to get node version 14 from 'node -v':
|
||||
output: '{output}'
|
||||
Perhaps run 'source {cwd}/setup_node_env.sh install'""")
|
||||
)
|
||||
exit(1)
|
||||
|
||||
node_modules = cwd / "node_modules"
|
||||
|
||||
if not node_modules.exists():
|
||||
print(
|
||||
textwrap.dedent(f"""\
|
||||
{node_modules} not found, you need to run 'npm install' in {cwd}
|
||||
Perhaps run 'source {cwd}/setup_node_env.sh install'""")
|
||||
)
|
||||
exit(1)
|
||||
|
||||
|
||||
def start_backend(web_service_info, debug):
|
||||
"""Start the backend in debug mode."""
|
||||
|
||||
web_service_info["app"].run(
|
||||
host=web_service_info["backend_host"], port=web_service_info["backend_port"], debug=debug
|
||||
)
|
||||
|
||||
|
||||
def start_frontend_thread(web_service_info, npm_command, debug):
|
||||
"""Start the backend in debug mode."""
|
||||
env = os.environ.copy()
|
||||
backend_url = f"http://{web_service_info['backend_host']}:{web_service_info['backend_port']}"
|
||||
env["REACT_APP_API_URL"] = backend_url
|
||||
|
||||
if debug:
|
||||
env["HOST"] = web_service_info["frontend_host"]
|
||||
env["PORT"] = web_service_info["frontend_port"]
|
||||
|
||||
for output in execute_and_read_stdout(npm_command, cwd=web_service_info["cwd"], env=env):
|
||||
print(output, end="")
|
||||
else:
|
||||
for output in execute_and_read_stdout(npm_command, cwd=web_service_info["cwd"], env=env):
|
||||
print(output, end="")
|
||||
|
||||
env["PATH"] = "node_modules/.bin:" + env["PATH"]
|
||||
react_frontend = subprocess.Popen(
|
||||
[
|
||||
"http-server",
|
||||
"build",
|
||||
"-a",
|
||||
web_service_info["frontend_host"],
|
||||
"-p",
|
||||
web_service_info["frontend_port"],
|
||||
f"--cors={backend_url}",
|
||||
],
|
||||
env=env,
|
||||
cwd=str(web_service_info["cwd"]),
|
||||
)
|
||||
stdout, stderr = react_frontend.communicate()
|
||||
print(f"frontend stdout: '{stdout}'\n\nfrontend stderr: '{stderr}'")
|
||||
|
||||
|
||||
def main():
|
||||
"""Start up the server."""
|
||||
|
||||
args = get_args()
|
||||
|
||||
# TODO: add https command line option and support
|
||||
server = BackendServer(
|
||||
graphml_dir=args.graphml_dir,
|
||||
frontend_url=f"http://{args.frontend_host}:{args.frontend_port}",
|
||||
memory_limit=args.memory_limit,
|
||||
)
|
||||
|
||||
app = server.get_app()
|
||||
cwd = Path(__file__).parent / "graph_visualizer_web_stack"
|
||||
|
||||
web_service_info = {
|
||||
"app": app,
|
||||
"cwd": cwd,
|
||||
"frontend_host": args.frontend_host,
|
||||
"frontend_port": args.frontend_port,
|
||||
"backend_host": args.backend_host,
|
||||
"backend_port": args.backend_port,
|
||||
}
|
||||
|
||||
node_check = "node -v"
|
||||
npm_start = ["npm", "start"]
|
||||
npm_build = ["npm", "run", "build"]
|
||||
|
||||
if not is_running_from_reloader():
|
||||
check_node(node_check, cwd)
|
||||
|
||||
frontend_thread = None
|
||||
if args.launch in ["frontend", "both"]:
|
||||
if args.debug:
|
||||
npm_command = npm_start
|
||||
else:
|
||||
npm_command = npm_build
|
||||
|
||||
frontend_thread = threading.Thread(
|
||||
target=start_frontend_thread, args=(web_service_info, npm_command, args.debug)
|
||||
)
|
||||
frontend_thread.start()
|
||||
|
||||
if args.launch in ["backend", "both"]:
|
||||
start_backend(web_service_info, args.debug)
|
||||
|
||||
if not is_running_from_reloader():
|
||||
if frontend_thread:
|
||||
frontend_thread.join()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,441 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2020 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
"""
|
||||
Flask backend web server.
|
||||
|
||||
The backend interacts with the graph_analyzer to perform queries on various libdeps graphs.
|
||||
"""
|
||||
|
||||
import gc
|
||||
import threading
|
||||
import time
|
||||
from collections import OrderedDict, namedtuple
|
||||
from pathlib import Path
|
||||
|
||||
import cxxfilt
|
||||
import flask
|
||||
import libdeps.analyzer
|
||||
import libdeps.graph
|
||||
import networkx
|
||||
from flask import request
|
||||
from flask_cors import CORS
|
||||
from lxml import etree
|
||||
from pympler.asizeof import asizeof
|
||||
|
||||
|
||||
class BackendServer:
|
||||
"""Create small class for storing variables and state of the backend."""
|
||||
|
||||
def __init__(self, graphml_dir, frontend_url, memory_limit):
|
||||
"""Create and setup the state variables."""
|
||||
self.app = flask.Flask(__name__)
|
||||
self.app.config["CORS_HEADERS"] = "Content-Type"
|
||||
CORS(self.app, resources={r"/*": {"origins": frontend_url}})
|
||||
|
||||
self.app.add_url_rule("/api/graphs", "return_graph_files", self.return_graph_files)
|
||||
self.app.add_url_rule(
|
||||
"/api/graphs/<git_hash>/nodes", "return_node_list", self.return_node_list
|
||||
)
|
||||
self.app.add_url_rule(
|
||||
"/api/graphs/<git_hash>/analysis", "return_analyze_counts", self.return_analyze_counts
|
||||
)
|
||||
self.app.add_url_rule(
|
||||
"/api/graphs/<git_hash>/d3", "return_d3", self.return_d3, methods=["POST"]
|
||||
)
|
||||
self.app.add_url_rule(
|
||||
"/api/graphs/<git_hash>/nodes/details",
|
||||
"return_node_infos",
|
||||
self.return_node_infos,
|
||||
methods=["POST"],
|
||||
)
|
||||
self.app.add_url_rule(
|
||||
"/api/graphs/<git_hash>/paths",
|
||||
"return_paths_between",
|
||||
self.return_paths_between,
|
||||
methods=["POST"],
|
||||
)
|
||||
|
||||
self.loaded_graphs = {}
|
||||
self.total_graph_size = 0
|
||||
self.graphml_dir = Path(graphml_dir)
|
||||
self.frontend_url = frontend_url
|
||||
self.loading_locks = {}
|
||||
self.memory_limit_bytes = memory_limit * (10**9) * 0.8
|
||||
self.unloading = False
|
||||
self.unloading_lock = threading.Lock()
|
||||
|
||||
self.graph_file_tuple = namedtuple("GraphFile", ["version", "git_hash", "graph_file"])
|
||||
self.graph_files = self.get_graphml_files()
|
||||
|
||||
@staticmethod
|
||||
def get_dependency_graph(graph):
|
||||
"""Returns the dependency graph of a given graph."""
|
||||
|
||||
if graph.graph["graph_schema_version"] == 1:
|
||||
return networkx.reverse_view(graph)
|
||||
else:
|
||||
return graph
|
||||
|
||||
@staticmethod
|
||||
def get_dependents_graph(graph):
|
||||
"""Returns the dependents graph of a given graph."""
|
||||
|
||||
if graph.graph["graph_schema_version"] == 1:
|
||||
return graph
|
||||
else:
|
||||
return networkx.reverse_view(graph)
|
||||
|
||||
def get_app(self):
|
||||
"""Return the app instance."""
|
||||
|
||||
return self.app
|
||||
|
||||
def get_graph_build_data(self, graph_file):
|
||||
"""Fast method for extracting basic build data from the graph file."""
|
||||
|
||||
version = ""
|
||||
git_hash = ""
|
||||
for _, element in etree.iterparse(
|
||||
str(graph_file), tag="{http://graphml.graphdrawing.org/xmlns}data"
|
||||
):
|
||||
if element.get("key") == "graph_schema_version":
|
||||
version = element.text
|
||||
if element.get("key") == "git_hash":
|
||||
git_hash = element.text
|
||||
element.clear()
|
||||
if version and git_hash:
|
||||
break
|
||||
return self.graph_file_tuple(version, git_hash, graph_file)
|
||||
|
||||
def get_graphml_files(self):
|
||||
"""Find all graphml files in the target graphml dir."""
|
||||
|
||||
graph_files = OrderedDict()
|
||||
for graph_file in self.graphml_dir.glob("**/*.graphml"):
|
||||
graph_file_tuple = self.get_graph_build_data(graph_file)
|
||||
graph_files[graph_file_tuple.git_hash[:7]] = graph_file_tuple
|
||||
return graph_files
|
||||
|
||||
def return_graph_files(self):
|
||||
"""Prepare the list of graph files for the frontend."""
|
||||
|
||||
data = {"graph_files": []}
|
||||
for i, (_, graph_file_data) in enumerate(self.graph_files.items(), start=1):
|
||||
data["graph_files"].append(
|
||||
{
|
||||
"id": i,
|
||||
"version": graph_file_data.version,
|
||||
"git": graph_file_data.git_hash[:7],
|
||||
"selected": False,
|
||||
}
|
||||
)
|
||||
return data
|
||||
|
||||
def return_node_infos(self, git_hash):
|
||||
"""Returns details about a set of selected nodes."""
|
||||
|
||||
req_body = request.get_json()
|
||||
if "selected_nodes" in req_body.keys():
|
||||
selected_nodes = req_body["selected_nodes"]
|
||||
|
||||
if graph := self.load_graph(git_hash):
|
||||
dependents_graph = self.get_dependents_graph(graph)
|
||||
dependency_graph = self.get_dependency_graph(graph)
|
||||
|
||||
nodeinfo_data = {"nodeInfos": []}
|
||||
|
||||
for node in selected_nodes:
|
||||
nodeinfo_data["nodeInfos"].append(
|
||||
{
|
||||
"id": len(nodeinfo_data["nodeInfos"]),
|
||||
"node": str(node),
|
||||
"name": Path(node).name,
|
||||
"attribs": [
|
||||
{"name": key, "value": value}
|
||||
for key, value in dependents_graph.nodes(data=True)[
|
||||
str(node)
|
||||
].items()
|
||||
],
|
||||
"dependers": [
|
||||
{
|
||||
"node": depender,
|
||||
"symbols": dependents_graph[str(node)][depender].get("symbols"),
|
||||
}
|
||||
for depender in dependents_graph[str(node)]
|
||||
],
|
||||
"dependencies": [
|
||||
{
|
||||
"node": dependency,
|
||||
"symbols": dependents_graph[dependency][str(node)].get(
|
||||
"symbols"
|
||||
),
|
||||
}
|
||||
for dependency in dependency_graph[str(node)]
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
return nodeinfo_data, 200
|
||||
return {
|
||||
"error": "Git commit hash (" + git_hash + ") does not have a matching graph file."
|
||||
}, 400
|
||||
return {"error": 'Request body does not contain "selected_nodes" attribute.'}, 400
|
||||
|
||||
def return_d3(self, git_hash):
|
||||
"""Convert the current selected rows into a format for D3."""
|
||||
|
||||
req_body = request.get_json()
|
||||
if "selected_nodes" in req_body.keys():
|
||||
selected_nodes = req_body["selected_nodes"]
|
||||
|
||||
if graph := self.load_graph(git_hash):
|
||||
dependents_graph = self.get_dependents_graph(graph)
|
||||
dependency_graph = self.get_dependency_graph(graph)
|
||||
|
||||
nodes = {}
|
||||
links = {}
|
||||
links_trans = {}
|
||||
|
||||
def add_node_to_graph_data(node):
|
||||
nodes[str(node)] = {
|
||||
"id": str(node),
|
||||
"name": Path(node).name,
|
||||
"type": dependents_graph.nodes()[str(node)].get("bin_type", ""),
|
||||
}
|
||||
|
||||
def add_link_to_graph_data(source, target, data):
|
||||
links[str(source) + str(target)] = {
|
||||
"source": str(source),
|
||||
"target": str(target),
|
||||
"data": data,
|
||||
}
|
||||
|
||||
for node in selected_nodes:
|
||||
add_node_to_graph_data(node)
|
||||
|
||||
for libdep in dependency_graph[str(node)]:
|
||||
if dependents_graph[libdep][str(node)].get("direct"):
|
||||
add_node_to_graph_data(libdep)
|
||||
add_link_to_graph_data(
|
||||
node, libdep, dependents_graph[libdep][str(node)]
|
||||
)
|
||||
|
||||
if "transitive_edges" in req_body.keys() and req_body["transitive_edges"] is True:
|
||||
for node in selected_nodes:
|
||||
for libdep in dependency_graph[str(node)]:
|
||||
if str(libdep) in nodes:
|
||||
add_link_to_graph_data(
|
||||
node, libdep, dependents_graph[libdep][str(node)]
|
||||
)
|
||||
|
||||
if "extra_nodes" in req_body.keys():
|
||||
extra_nodes = req_body["extra_nodes"]
|
||||
for node in extra_nodes:
|
||||
add_node_to_graph_data(node)
|
||||
|
||||
for libdep in dependency_graph.get_direct_nonprivate_graph()[str(node)]:
|
||||
add_node_to_graph_data(libdep)
|
||||
add_link_to_graph_data(
|
||||
node, libdep, dependents_graph[libdep][str(node)]
|
||||
)
|
||||
|
||||
node_data = {
|
||||
"graphData": {
|
||||
"nodes": [data for node, data in nodes.items()],
|
||||
"links": [data for link, data in links.items()],
|
||||
"links_trans": [data for link, data in links_trans.items()],
|
||||
}
|
||||
}
|
||||
return node_data, 200
|
||||
return {
|
||||
"error": "Git commit hash (" + git_hash + ") does not have a matching graph file."
|
||||
}, 400
|
||||
return {"error": 'Request body does not contain "selected_nodes" attribute.'}, 400
|
||||
|
||||
def return_analyze_counts(self, git_hash):
|
||||
"""Perform count analysis and send the results back to frontend."""
|
||||
|
||||
with self.app.test_request_context():
|
||||
if graph := self.load_graph(git_hash):
|
||||
dependency_graph = self.get_dependency_graph(graph)
|
||||
|
||||
analysis = libdeps.analyzer.counter_factory(
|
||||
dependency_graph,
|
||||
[name[0] for name in libdeps.analyzer.CountTypes.__members__.items()],
|
||||
)
|
||||
ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis)
|
||||
results = ga.get_results()
|
||||
|
||||
graph_data = []
|
||||
for i, data in enumerate(results):
|
||||
graph_data.append({"id": i, "type": data, "value": results[data]})
|
||||
return {"results": graph_data}, 200
|
||||
return {
|
||||
"error": "Git commit hash (" + git_hash + ") does not have a matching graph file."
|
||||
}, 400
|
||||
|
||||
def return_paths_between(self, git_hash):
|
||||
"""Gather all the paths in the graph between a fromNode and toNode."""
|
||||
|
||||
message = request.get_json()
|
||||
if "fromNode" in message.keys() and "toNode" in message.keys():
|
||||
if graph := self.load_graph(git_hash):
|
||||
dependency_graph = self.get_dependency_graph(graph)
|
||||
analysis = [
|
||||
libdeps.analyzer.GraphPaths(
|
||||
dependency_graph, message["fromNode"], message["toNode"]
|
||||
)
|
||||
]
|
||||
ga = libdeps.analyzer.LibdepsGraphAnalysis(analysis=analysis)
|
||||
results = ga.get_results()
|
||||
|
||||
paths = results[libdeps.analyzer.DependsReportTypes.GRAPH_PATHS.name][
|
||||
(message["fromNode"], message["toNode"])
|
||||
]
|
||||
paths.sort(key=len)
|
||||
nodes = set()
|
||||
for path in paths:
|
||||
for node in path:
|
||||
nodes.add(node)
|
||||
|
||||
# Need to handle self.send_graph_data(extra_nodes=list(nodes))
|
||||
return {
|
||||
"fromNode": message["fromNode"],
|
||||
"toNode": message["toNode"],
|
||||
"paths": paths,
|
||||
"extraNodes": list(nodes),
|
||||
}, 200
|
||||
return {
|
||||
"error": "Git commit hash (" + git_hash + ") does not have a matching graph file."
|
||||
}, 400
|
||||
return {"error": "Body must contain toNode and fromNode"}, 400
|
||||
|
||||
def return_node_list(self, git_hash):
|
||||
"""Gather all the nodes in the graph for the node list."""
|
||||
|
||||
with self.app.test_request_context():
|
||||
node_data = {"nodes": [], "links": []}
|
||||
if graph := self.load_graph(git_hash):
|
||||
for node in sorted(graph.nodes()):
|
||||
node_path = Path(node)
|
||||
node_data["nodes"].append(str(node_path))
|
||||
return node_data, 200
|
||||
return {
|
||||
"error": "Git commit hash (" + git_hash + ") does not have a matching graph file."
|
||||
}, 400
|
||||
|
||||
def perform_unloading(self, git_hash):
|
||||
"""Perform the unloading of a graph in a separate thread."""
|
||||
if self.total_graph_size > self.memory_limit_bytes:
|
||||
while self.total_graph_size > self.memory_limit_bytes:
|
||||
self.app.logger.info(
|
||||
f"Current graph memory: {self.total_graph_size / (10**9)} GB, Unloading to get to {self.memory_limit_bytes / (10**9)} GB"
|
||||
)
|
||||
|
||||
self.unloading_lock.acquire()
|
||||
|
||||
lru_hash = min(
|
||||
[graph_hash for graph_hash in self.loaded_graphs if graph_hash != git_hash],
|
||||
key=lambda x: self.loaded_graphs[x]["atime"],
|
||||
)
|
||||
if lru_hash:
|
||||
self.app.logger.info(
|
||||
f"Unloading {[lru_hash]}, last used {round(time.time() - self.loaded_graphs[lru_hash]['atime'] , 1)}s ago"
|
||||
)
|
||||
self.total_graph_size -= self.loaded_graphs[lru_hash]["size"]
|
||||
del self.loaded_graphs[lru_hash]
|
||||
del self.loading_locks[lru_hash]
|
||||
self.unloading_lock.release()
|
||||
gc.collect()
|
||||
self.app.logger.info(f"Memory limit satisfied: {self.total_graph_size / (10**9)} GB")
|
||||
self.unloading = False
|
||||
|
||||
def unload_graphs(self, git_hash):
|
||||
"""Unload least recently used graph when hitting application memory threshold."""
|
||||
|
||||
if not self.unloading:
|
||||
self.unloading = True
|
||||
|
||||
thread = threading.Thread(target=self.perform_unloading, args=(git_hash,))
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
def load_graph(self, git_hash):
|
||||
"""Load the graph into application memory."""
|
||||
|
||||
with self.app.test_request_context():
|
||||
self.unload_graphs(git_hash)
|
||||
|
||||
loaded_graph = None
|
||||
|
||||
self.unloading_lock.acquire()
|
||||
if git_hash in self.loaded_graphs:
|
||||
self.loaded_graphs[git_hash]["atime"] = time.time()
|
||||
loaded_graph = self.loaded_graphs[git_hash]["graph"]
|
||||
if git_hash not in self.loading_locks:
|
||||
self.loading_locks[git_hash] = threading.Lock()
|
||||
self.unloading_lock.release()
|
||||
|
||||
self.loading_locks[git_hash].acquire()
|
||||
if git_hash not in self.loaded_graphs:
|
||||
if git_hash in self.graph_files:
|
||||
file_path = self.graph_files[git_hash].graph_file
|
||||
nx_graph = networkx.read_graphml(file_path)
|
||||
if int(self.get_graph_build_data(file_path).version) > 3:
|
||||
for source, target in nx_graph.edges:
|
||||
try:
|
||||
nx_graph[source][target]["symbols"] = list(
|
||||
nx_graph[source][target].get("symbols").split("\n")
|
||||
)
|
||||
except AttributeError:
|
||||
nx_graph[source][target]["symbols"] = []
|
||||
else:
|
||||
for source, target in nx_graph.edges:
|
||||
try:
|
||||
nx_graph[source][target]["symbols"] = list(
|
||||
map(
|
||||
cxxfilt.demangle,
|
||||
nx_graph[source][target].get("symbols").split(),
|
||||
)
|
||||
)
|
||||
except AttributeError:
|
||||
try:
|
||||
nx_graph[source][target]["symbols"] = list(
|
||||
nx_graph[source][target].get("symbols").split()
|
||||
)
|
||||
except AttributeError:
|
||||
nx_graph[source][target]["symbols"] = []
|
||||
loaded_graph = libdeps.graph.LibdepsGraph(nx_graph)
|
||||
|
||||
self.loaded_graphs[git_hash] = {
|
||||
"graph": loaded_graph,
|
||||
"size": asizeof(loaded_graph),
|
||||
"atime": time.time(),
|
||||
}
|
||||
self.total_graph_size += self.loaded_graphs[git_hash]["size"]
|
||||
else:
|
||||
loaded_graph = self.loaded_graphs[git_hash]["graph"]
|
||||
self.loading_locks[git_hash].release()
|
||||
|
||||
return loaded_graph
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
{
|
||||
"name": "graph_visualizer",
|
||||
"version": "4.0.0",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
},
|
||||
"engineStrict": true,
|
||||
"scripts": {
|
||||
"start": "react-scripts start",
|
||||
"build": "react-scripts build",
|
||||
"start-flask": "cd flask && flask run --no-debugger",
|
||||
"test": "react-scripts test",
|
||||
"eject": "react-scripts eject"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/react": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@material-ui/core": "^5.0.0-alpha.22",
|
||||
"@material-ui/icons": "^5.0.0-alpha.22",
|
||||
"@material-ui/lab": "^5.0.0-alpha.22",
|
||||
"bezier-js": "6.1.3",
|
||||
"canvas": "^2.11.2",
|
||||
"date-fns": "^2.30.0",
|
||||
"dayjs": "^1.11.7",
|
||||
"force-graph": "^1.43.1",
|
||||
"http-proxy-middleware": "^2.0.6",
|
||||
"http-server": "^14.1.1",
|
||||
"luxon": "^3.3.0",
|
||||
"moment": "^2.29.4",
|
||||
"p-limit": "^4.0.0",
|
||||
"react": "^18.2",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-force-graph-2d": "1.25.0",
|
||||
"react-force-graph-3d": "1.23.0",
|
||||
"react-indiana-drag-scroll": "^2.2.0",
|
||||
"react-redux": "^8.0.5",
|
||||
"react-resize-aware": "3.1.1",
|
||||
"react-resize-detector": "^9.1.0",
|
||||
"react-scripts": "^5.0.1",
|
||||
"react-split-pane": "^0.1.92",
|
||||
"react-virtualized": "^9.22.5",
|
||||
"react-window": "^1.8.9",
|
||||
"redux": "^4.2.1",
|
||||
"typescript": "^5.0.4"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
">0.2%",
|
||||
"not dead",
|
||||
"not op_mini all"
|
||||
],
|
||||
"development": [
|
||||
"last 1 chrome version",
|
||||
"last 1 firefox version",
|
||||
"last 1 safari version"
|
||||
]
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 7.7 KiB |
|
|
@ -1,40 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico" />
|
||||
<meta name="viewport" content="minimum-scale=1, initial-scale=1, width=device-width" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<!--
|
||||
manifest.json provides metadata used when your web app is installed on a
|
||||
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
|
||||
-->
|
||||
<link rel="manifest" href="%PUBLIC_URL%/manifest.json">
|
||||
<!--
|
||||
Notice the use of %PUBLIC_URL% in the tags above.
|
||||
It will be replaced with the URL of the `public` folder during the build.
|
||||
Only files inside the `public` folder can be referenced from the HTML.
|
||||
|
||||
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
|
||||
work correctly both with client-side routing and a non-root public URL.
|
||||
Learn how to configure a non-root public URL by running `npm run build`.
|
||||
-->
|
||||
<title>Libdeps Graph</title>
|
||||
<!-- Fonts to support Material Design -->
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700&display=swap" />
|
||||
</head>
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
<div id="root"></div>
|
||||
<!--
|
||||
This HTML file is a template.
|
||||
If you open it directly in the browser, you will see an empty page.
|
||||
|
||||
You can add webfonts, meta tags, or analytics to this file.
|
||||
The build step will place the bundled scripts into the <body> tag.
|
||||
|
||||
To begin the development, run `npm start` or `yarn start`.
|
||||
To create a production bundle, use `npm run build` or `yarn build`.
|
||||
-->
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"short_name": "Libdeps Graph",
|
||||
"name": "Libdeps Graph Visualizer Service",
|
||||
"icons": [
|
||||
{
|
||||
"src": "favicon.ico",
|
||||
"sizes": "64x64 32x32 24x24 16x16",
|
||||
"type": "image/x-icon"
|
||||
}
|
||||
],
|
||||
"start_url": ".",
|
||||
"display": "standalone",
|
||||
"theme_color": "#000000",
|
||||
"background_color": "#ffffff"
|
||||
}
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SCRIPTPATH="$( cd "$(dirname "$BASH_SOURCE")" >/dev/null 2>&1 ; pwd -P )"
|
||||
pushd $SCRIPTPATH > /dev/null
|
||||
|
||||
function quit {
|
||||
popd > /dev/null
|
||||
}
|
||||
trap quit EXIT
|
||||
trap quit SIGINT
|
||||
trap quit SIGTERM
|
||||
|
||||
export NVM_DIR="$HOME/.nvm"
|
||||
if [ -s "$NVM_DIR/nvm.sh" ]
|
||||
then
|
||||
\. "$NVM_DIR/nvm.sh"
|
||||
else
|
||||
curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | sh
|
||||
\. "$NVM_DIR/nvm.sh"
|
||||
fi
|
||||
|
||||
nvm install 14
|
||||
|
||||
if [ "$1" = "install" ]
|
||||
then
|
||||
npm install
|
||||
fi
|
||||
|
||||
if [ "$1" = "start" ]
|
||||
then
|
||||
npm start
|
||||
fi
|
||||
|
||||
if [ "$1" = "build" ]
|
||||
then
|
||||
npm run build
|
||||
fi
|
||||
|
||||
if [ "$1" = "update" ]
|
||||
then
|
||||
set -u
|
||||
git -C "$NVM_DIR" fetch --tags
|
||||
TAG=$(git -C "$NVM_DIR" describe --tags `git -C "$NVM_DIR" rev-list --tags --max-count=1`)
|
||||
echo "Checking out tag $TAG..."
|
||||
git -C "$NVM_DIR" checkout "$TAG"
|
||||
|
||||
. "$NVM_DIR/nvm.sh"
|
||||
fi
|
||||
popd > /dev/null
|
||||
|
|
@ -1,105 +0,0 @@
|
|||
import React from "react";
|
||||
import { connect } from "react-redux";
|
||||
import { makeStyles, withStyles } from "@material-ui/core/styles";
|
||||
import Typography from "@material-ui/core/Typography";
|
||||
import ExpandMoreIcon from "@material-ui/icons/ExpandMore";
|
||||
import Paper from "@material-ui/core/Paper";
|
||||
import MuiAccordion from "@material-ui/core/Accordion";
|
||||
import MuiAccordionSummary from "@material-ui/core/AccordionSummary";
|
||||
import MuiAccordionDetails from "@material-ui/core/AccordionDetails";
|
||||
|
||||
import { getSelected } from "./redux/store";
|
||||
|
||||
import GraphInfo from "./GraphInfo";
|
||||
import GraphPaths from "./GraphPaths";
|
||||
import LoadingBar from "./LoadingBar";
|
||||
|
||||
const useStyles = makeStyles((theme) => ({
|
||||
root: {
|
||||
width: "100%",
|
||||
},
|
||||
heading: {
|
||||
fontSize: theme.typography.pxToRem(15),
|
||||
fontWeight: theme.typography.fontWeightRegular,
|
||||
},
|
||||
}));
|
||||
|
||||
const Accordion = withStyles({
|
||||
root: {
|
||||
border: "1px solid rgba(0, 0, 0, .125)",
|
||||
boxShadow: "none",
|
||||
"&:not(:last-child)": {
|
||||
borderBottom: 0,
|
||||
},
|
||||
"&:before": {
|
||||
display: "none",
|
||||
},
|
||||
"&$expanded": {
|
||||
margin: "auto",
|
||||
},
|
||||
},
|
||||
expanded: {},
|
||||
})(MuiAccordion);
|
||||
|
||||
const AccordionSummary = withStyles({
|
||||
root: {
|
||||
backgroundColor: "rgba(0, 0, 0, .03)",
|
||||
borderBottom: "1px solid rgba(0, 0, 0, .125)",
|
||||
marginBottom: -1,
|
||||
minHeight: 56,
|
||||
"&$expanded": {
|
||||
minHeight: 56,
|
||||
},
|
||||
},
|
||||
content: {
|
||||
"&$expanded": {
|
||||
margin: "12px 0",
|
||||
},
|
||||
},
|
||||
expanded: {},
|
||||
})(MuiAccordionSummary);
|
||||
|
||||
const AccordionDetails = withStyles((theme) => ({
|
||||
root: {
|
||||
padding: theme.spacing(2),
|
||||
},
|
||||
}))(MuiAccordionDetails);
|
||||
|
||||
const AlgorithmExpander = ({ loading, width, transPathFrom, transPathTo }) => {
|
||||
const classes = useStyles();
|
||||
|
||||
return (
|
||||
<div className={classes.root}>
|
||||
<LoadingBar loading={loading} height={"100%"}>
|
||||
<Paper style={{ maxHeight: "82vh", overflow: "auto" }}>
|
||||
<Accordion>
|
||||
<AccordionSummary
|
||||
expandIcon={<ExpandMoreIcon />}
|
||||
aria-controls="panel1a-content"
|
||||
id="panel1a-header"
|
||||
>
|
||||
<Typography className={classes.heading}>Counts</Typography>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails>
|
||||
<GraphInfo datawidth={width} />
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
<Accordion>
|
||||
<AccordionSummary
|
||||
expandIcon={<ExpandMoreIcon />}
|
||||
aria-controls="panel1a-content"
|
||||
id="panel1a-header"
|
||||
>
|
||||
<Typography className={classes.heading}>Graph Paths</Typography>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails>
|
||||
<GraphPaths datawidth={width} transPathFrom={transPathFrom} transPathTo={transPathTo}/>
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
</Paper>
|
||||
</LoadingBar>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getSelected)(AlgorithmExpander);
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
import React from "react";
|
||||
import SplitPane from "react-split-pane";
|
||||
|
||||
import theme from "./theme";
|
||||
|
||||
import GraphCommitDisplay from "./GraphCommitDisplay";
|
||||
import GraphInfoTabs from "./GraphInfoTabs";
|
||||
import DrawGraph from "./DrawGraph";
|
||||
|
||||
const resizerStyle = {
|
||||
background: theme.palette.text.secondary,
|
||||
width: "1px",
|
||||
cursor: "col-resize",
|
||||
margin: "1px",
|
||||
padding: "1px",
|
||||
height: "100%",
|
||||
};
|
||||
|
||||
const topPaneStyle = {
|
||||
height: "100vh",
|
||||
overflow: "visible",
|
||||
};
|
||||
|
||||
export default function App() {
|
||||
const [infosize, setInfosize] = React.useState(450);
|
||||
const [drawsize, setDrawsize] = React.useState(
|
||||
window.screen.width - infosize
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
setInfosize(window.screen.width - drawsize);
|
||||
}, [drawsize]);
|
||||
|
||||
return (
|
||||
<SplitPane
|
||||
pane1Style={{ height: "12%" }}
|
||||
pane2Style={{ height: "88%" }}
|
||||
split="horizontal"
|
||||
style={topPaneStyle}
|
||||
>
|
||||
<GraphCommitDisplay />
|
||||
<SplitPane
|
||||
split="vertical"
|
||||
minSize={100}
|
||||
style={{ position: "relative" }}
|
||||
defaultSize={infosize}
|
||||
pane1Style={{ height: "100%" }}
|
||||
pane2Style={{ height: "100%", width: "100%" }}
|
||||
resizerStyle={resizerStyle}
|
||||
onChange={(size) => setDrawsize(window.screen.width - size)}
|
||||
>
|
||||
<GraphInfoTabs width={infosize} />
|
||||
<DrawGraph size={drawsize} />
|
||||
</SplitPane>
|
||||
</SplitPane>
|
||||
);
|
||||
}
|
||||
|
|
@ -1,266 +0,0 @@
|
|||
import React from "react";
|
||||
import { connect } from "react-redux";
|
||||
import clsx from "clsx";
|
||||
import { AutoSizer, Column, Table } from "react-virtualized";
|
||||
import "react-virtualized/styles.css"; // only needs to be imported once
|
||||
import { withStyles } from "@material-ui/core/styles";
|
||||
import TableCell from "@material-ui/core/TableCell";
|
||||
import { Checkbox } from "@material-ui/core";
|
||||
import Typography from "@material-ui/core/Typography";
|
||||
|
||||
import { getRows } from "./redux/store";
|
||||
import { updateSelected } from "./redux/nodes";
|
||||
import { setGraphData } from "./redux/graphData";
|
||||
import { setNodeInfos } from "./redux/nodeInfo";
|
||||
import { setLinks } from "./redux/links";
|
||||
import { setLinksTrans } from "./redux/linksTrans";
|
||||
|
||||
const {REACT_APP_API_URL} = process.env;
|
||||
|
||||
function componentToHex(c) {
|
||||
var hex = c.toString(16);
|
||||
return hex.length == 1 ? "0" + hex : hex;
|
||||
}
|
||||
|
||||
function rgbToHex(r, g, b) {
|
||||
return "#" + componentToHex(r) + componentToHex(g) + componentToHex(b);
|
||||
}
|
||||
|
||||
function hexToRgb(hex) {
|
||||
// Expand shorthand form (e.g. "03F") to full form (e.g. "0033FF")
|
||||
var shorthandRegex = /^#?([a-f\d])([a-f\d])([a-f\d])$/i;
|
||||
hex = hex.replace(shorthandRegex, function (m, r, g, b) {
|
||||
return r + r + g + g + b + b;
|
||||
});
|
||||
|
||||
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
|
||||
return result
|
||||
? {
|
||||
r: parseInt(result[1], 16),
|
||||
g: parseInt(result[2], 16),
|
||||
b: parseInt(result[3], 16),
|
||||
}
|
||||
: null;
|
||||
}
|
||||
|
||||
function incrementPallete(palleteColor, increment) {
|
||||
var rgb = hexToRgb(palleteColor);
|
||||
rgb.r += increment;
|
||||
rgb.g += increment;
|
||||
rgb.b += increment;
|
||||
return rgbToHex(rgb.r, rgb.g, rgb.b);
|
||||
}
|
||||
|
||||
const styles = (theme) => ({
|
||||
flexContainer: {
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
},
|
||||
table: {
|
||||
// temporary right-to-left patch, waiting for
|
||||
// https://github.com/bvaughn/react-virtualized/issues/454
|
||||
"& .ReactVirtualized__Table__headerRow": {
|
||||
flip: false,
|
||||
paddingRight: theme.direction === "rtl" ? "0 !important" : undefined,
|
||||
},
|
||||
},
|
||||
tableRowOdd: {
|
||||
backgroundColor: incrementPallete(theme.palette.grey[800], 10),
|
||||
},
|
||||
tableRowEven: {
|
||||
backgroundColor: theme.palette.grey[800],
|
||||
},
|
||||
tableRowHover: {
|
||||
"&:hover": {
|
||||
backgroundColor: theme.palette.grey[600],
|
||||
},
|
||||
},
|
||||
tableCell: {
|
||||
flex: 1,
|
||||
},
|
||||
noClick: {
|
||||
cursor: "initial",
|
||||
},
|
||||
});
|
||||
|
||||
const DataGrid = ({
|
||||
rowGetter,
|
||||
rowCount,
|
||||
nodes,
|
||||
rowHeight,
|
||||
headerHeight,
|
||||
columns,
|
||||
onNodeClicked,
|
||||
updateSelected,
|
||||
classes,
|
||||
setGraphData,
|
||||
setLinks,
|
||||
setLinksTrans,
|
||||
selectedGraph,
|
||||
setNodeInfos,
|
||||
selectedNodes,
|
||||
searchedNodes,
|
||||
showTransitive
|
||||
}) => {
|
||||
const [checkBoxes, setCheckBoxes] = React.useState([]);
|
||||
|
||||
React.useEffect(() => {
|
||||
setCheckBoxes(searchedNodes);
|
||||
}, [searchedNodes]);
|
||||
|
||||
function newGraphData() {
|
||||
let gitHash = selectedGraph;
|
||||
if (gitHash) {
|
||||
let postData = {
|
||||
"selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node),
|
||||
"transitive_edges": showTransitive
|
||||
};
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setGraphData(data.graphData);
|
||||
setLinks(data.graphData.links);
|
||||
setLinksTrans(data.graphData.links_trans);
|
||||
});
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setNodeInfos(data.nodeInfos);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const getRowClassName = ({ index }) => {
|
||||
return clsx(
|
||||
index % 2 == 0 ? classes.tableRowEven : classes.tableRowOdd,
|
||||
classes.flexContainer,
|
||||
{
|
||||
[classes.tableRowHover]: index !== -1,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const cellRenderer = ({ cellData, columnIndex, rowIndex }) => {
|
||||
var finalCellData;
|
||||
var style = { height: rowHeight, padding: "0px" };
|
||||
if (cellData == "checkbox") {
|
||||
style["justifyContent"] = "space-evenly";
|
||||
finalCellData = (
|
||||
<Checkbox
|
||||
checked={checkBoxes[rowIndex] ? checkBoxes[rowIndex].selected : false}
|
||||
onChange={(event) => {
|
||||
setCheckBoxes(
|
||||
checkBoxes.map((checkbox, index) => {
|
||||
if (index == rowIndex) {
|
||||
checkbox.selected = event.target.checked;
|
||||
}
|
||||
return checkbox;
|
||||
})
|
||||
);
|
||||
if (checkBoxes[rowIndex].selected != event.target.checked) {
|
||||
updateSelected({ index: rowIndex, value: event.target.checked });
|
||||
}
|
||||
newGraphData();
|
||||
}}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
finalCellData = cellData;
|
||||
}
|
||||
|
||||
return (
|
||||
<TableCell
|
||||
component="div"
|
||||
className={clsx(
|
||||
classes.tableCell,
|
||||
classes.flexContainer,
|
||||
classes.noClick
|
||||
)}
|
||||
variant="body"
|
||||
onClick={onNodeClicked}
|
||||
style={style}
|
||||
>
|
||||
{finalCellData}
|
||||
</TableCell>
|
||||
);
|
||||
};
|
||||
|
||||
const headerRenderer = ({ label, columnIndex }) => {
|
||||
return (
|
||||
<TableCell
|
||||
component="div"
|
||||
className={clsx(
|
||||
classes.tableCell,
|
||||
classes.flexContainer,
|
||||
classes.noClick
|
||||
)}
|
||||
variant="head"
|
||||
style={{ height: headerHeight, padding: "0px" }}
|
||||
>
|
||||
<Typography
|
||||
style={{ width: "100%" }}
|
||||
align="left"
|
||||
variant="caption"
|
||||
component="h2"
|
||||
>
|
||||
{label}
|
||||
</Typography>
|
||||
</TableCell>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<AutoSizer>
|
||||
{({ height, width }) => (
|
||||
<Table
|
||||
height={height}
|
||||
width={width}
|
||||
rowCount={rowCount}
|
||||
rowHeight={rowHeight}
|
||||
gridStyle={{
|
||||
direction: "inherit",
|
||||
}}
|
||||
size={"small"}
|
||||
rowGetter={rowGetter}
|
||||
className={clsx(classes.table, classes.noClick)}
|
||||
rowClassName={getRowClassName}
|
||||
headerHeight={headerHeight}
|
||||
>
|
||||
{columns.map(({ dataKey, ...other }, index) => {
|
||||
return (
|
||||
<Column
|
||||
key={dataKey}
|
||||
headerRenderer={(headerProps) =>
|
||||
headerRenderer({
|
||||
...headerProps,
|
||||
columnIndex: index,
|
||||
})
|
||||
}
|
||||
className={classes.flexContainer}
|
||||
cellRenderer={cellRenderer}
|
||||
dataKey={dataKey}
|
||||
{...other}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</Table>
|
||||
)}
|
||||
</AutoSizer>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getRows, { updateSelected, setGraphData, setNodeInfos, setLinks, setLinksTrans })(
|
||||
withStyles(styles)(DataGrid)
|
||||
);
|
||||
|
|
@ -1,499 +0,0 @@
|
|||
import React, { useRef, useEffect } from "react";
|
||||
import * as THREE from "three";
|
||||
import { connect } from "react-redux";
|
||||
import ForceGraph2D from "react-force-graph-2d";
|
||||
import ForceGraph3D from "react-force-graph-3d";
|
||||
import SwitchComponents from "./SwitchComponent";
|
||||
import Button from "@material-ui/core/Button";
|
||||
import TextField from "@material-ui/core/TextField";
|
||||
import FormControlLabel from "@material-ui/core/FormControlLabel";
|
||||
import Checkbox from "@material-ui/core/Checkbox";
|
||||
|
||||
import theme from "./theme";
|
||||
import { getGraphData } from "./redux/store";
|
||||
import { updateCheckbox } from "./redux/nodes";
|
||||
import { setFindNode } from "./redux/findNode";
|
||||
import { setGraphData } from "./redux/graphData";
|
||||
import { setNodeInfos } from "./redux/nodeInfo";
|
||||
import { setLinks } from "./redux/links";
|
||||
import { setLinksTrans } from "./redux/linksTrans";
|
||||
import { setShowTransitive } from "./redux/showTransitive";
|
||||
import LoadingBar from "./LoadingBar";
|
||||
|
||||
const {REACT_APP_API_URL} = process.env;
|
||||
|
||||
const handleFindNode = (node_value, graphData, activeComponent, forceRef) => {
|
||||
var targetNode = null;
|
||||
if (graphData) {
|
||||
for (var i = 0; i < graphData.nodes.length; i++) {
|
||||
var node = graphData.nodes[i];
|
||||
if (node.name == node_value || node.id == node_value) {
|
||||
targetNode = node;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (targetNode != null) {
|
||||
if (activeComponent == "3D") {
|
||||
if (forceRef.current != null) {
|
||||
forceRef.current.centerAt(targetNode.x, targetNode.y, 2000);
|
||||
forceRef.current.zoom(6, 1000);
|
||||
}
|
||||
} else {
|
||||
const distance = 100;
|
||||
const distRatio =
|
||||
1 + distance / Math.hypot(targetNode.x, targetNode.y, targetNode.z);
|
||||
if (forceRef.current != null) {
|
||||
forceRef.current.cameraPosition(
|
||||
{
|
||||
x: targetNode.x * distRatio,
|
||||
y: targetNode.y * distRatio,
|
||||
z: targetNode.z * distRatio,
|
||||
}, // new position
|
||||
targetNode, // lookAt ({ x, y, z })
|
||||
3000 // ms transition duration
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const DrawGraph = ({
|
||||
size,
|
||||
graphData,
|
||||
nodes,
|
||||
links,
|
||||
loading,
|
||||
graphPaths,
|
||||
updateCheckbox,
|
||||
findNode,
|
||||
setFindNode,
|
||||
setGraphData,
|
||||
setNodeInfos,
|
||||
selectedGraph,
|
||||
setLinks,
|
||||
setLinksTrans,
|
||||
setShowTransitive,
|
||||
showTransitive
|
||||
}) => {
|
||||
const [activeComponent, setActiveComponent] = React.useState("2D");
|
||||
const [pathNodes, setPathNodes] = React.useState({});
|
||||
const [pathEdges, setPathEdges] = React.useState([]);
|
||||
const forceRef = useRef(null);
|
||||
|
||||
const PARTICLE_SIZE = 5;
|
||||
|
||||
React.useEffect(() => {
|
||||
handleFindNode(findNode, graphData, activeComponent, forceRef);
|
||||
setFindNode("");
|
||||
}, [findNode, graphData, activeComponent, forceRef]);
|
||||
|
||||
React.useEffect(() => {
|
||||
newGraphData();
|
||||
}, [showTransitive]);
|
||||
|
||||
const selectedEdge = links.filter(link => link.selected == true)[0];
|
||||
const selectedNodes = nodes.filter(node => node.selected == true).map(node => node.node);
|
||||
|
||||
React.useEffect(() => {
|
||||
setPathNodes({ fromNode: graphPaths.fromNode, toNode: graphPaths.toNode });
|
||||
var paths = Array();
|
||||
for (var path = 0; path < graphPaths.paths.length; path++) {
|
||||
var pathArr = Array();
|
||||
for (var i = 0; i < graphPaths.paths[path].length; i++) {
|
||||
if (i == 0) {
|
||||
continue;
|
||||
}
|
||||
pathArr.push({
|
||||
source: graphPaths.paths[path][i - 1],
|
||||
target: graphPaths.paths[path][i],
|
||||
});
|
||||
}
|
||||
paths.push(pathArr);
|
||||
}
|
||||
setPathEdges(paths);
|
||||
}, [graphPaths]);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (forceRef.current != null) {
|
||||
if (activeComponent == '3D'){
|
||||
forceRef.current.d3Force("charge").strength(-2000);
|
||||
}
|
||||
else {
|
||||
forceRef.current.d3Force("charge").strength(-10000);
|
||||
}
|
||||
|
||||
}
|
||||
}, [forceRef.current, activeComponent]);
|
||||
|
||||
function newGraphData() {
|
||||
let gitHash = selectedGraph;
|
||||
if (gitHash) {
|
||||
let postData = {
|
||||
"selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node),
|
||||
"transitive_edges": showTransitive
|
||||
};
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setGraphData(data.graphData);
|
||||
setLinks(data.graphData.links);
|
||||
setLinksTrans(data.graphData.links_trans);
|
||||
});
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setNodeInfos(data.nodeInfos);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const paintRing = React.useCallback(
|
||||
(node, ctx) => {
|
||||
// add ring just for highlighted nodes
|
||||
ctx.beginPath();
|
||||
ctx.arc(node.x, node.y, 7 * 1.4, 0, 2 * Math.PI, false);
|
||||
if (node.id == pathNodes.fromNode) {
|
||||
ctx.fillStyle = "blue";
|
||||
} else if (node.id == pathNodes.toNode) {
|
||||
ctx.fillStyle = "red";
|
||||
} else {
|
||||
ctx.fillStyle = "green";
|
||||
}
|
||||
ctx.fill();
|
||||
},
|
||||
[pathNodes]
|
||||
);
|
||||
|
||||
function colorNodes(node) {
|
||||
switch (node.type) {
|
||||
case "SharedLibrary":
|
||||
return "#e6ed11"; // yellow
|
||||
case "Program":
|
||||
return "#1120ed"; // blue
|
||||
case "shim":
|
||||
return "#800303"; // dark red
|
||||
default:
|
||||
return "#5a706f"; // grey
|
||||
}
|
||||
}
|
||||
|
||||
function isSameEdge(edgeA, edgeB) {
|
||||
if (edgeA.source.id && edgeA.target.id) {
|
||||
if (edgeB.source.id && edgeB.target.id) {
|
||||
return (edgeA.source.id == edgeB.source.id &&
|
||||
edgeA.target.id == edgeB.target.id);
|
||||
}
|
||||
}
|
||||
if (edgeA.source == edgeB.source &&
|
||||
edgeA.target == edgeB.target) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
<LoadingBar loading={loading} height={"100%"}>
|
||||
<Button
|
||||
onClick={() => {
|
||||
if (activeComponent == "2D") {
|
||||
setActiveComponent("3D");
|
||||
} else {
|
||||
setActiveComponent("2D");
|
||||
}
|
||||
}}
|
||||
>
|
||||
{activeComponent}
|
||||
</Button>
|
||||
<TextField
|
||||
size="small"
|
||||
label="Find Node"
|
||||
onChange={(event) => {
|
||||
handleFindNode(
|
||||
event.target.value,
|
||||
graphData,
|
||||
activeComponent,
|
||||
forceRef
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<FormControlLabel
|
||||
style={{ marginInline: 5 }}
|
||||
control={<Checkbox
|
||||
style={{ marginInline: 10 }}
|
||||
checked={ showTransitive }
|
||||
onClick={ () => setShowTransitive(!showTransitive) }
|
||||
/>}
|
||||
label="Show Viewable Transitive Edges"
|
||||
/>
|
||||
<SwitchComponents active={activeComponent}>
|
||||
<ForceGraph2D
|
||||
name="3D"
|
||||
width={size}
|
||||
dagMode="radialout"
|
||||
dagLevelDistance={50}
|
||||
graphData={graphData}
|
||||
ref={forceRef}
|
||||
nodeColor={colorNodes}
|
||||
nodeOpacity={1}
|
||||
backgroundColor={theme.palette.secondary.dark}
|
||||
linkDirectionalArrowLength={6}
|
||||
linkDirectionalArrowRelPos={1}
|
||||
linkDirectionalParticles={(d) => {
|
||||
if (graphPaths.selectedPath >= 0) {
|
||||
for (
|
||||
var i = 0;
|
||||
i < pathEdges[graphPaths.selectedPath].length;
|
||||
i++
|
||||
) {
|
||||
if (
|
||||
pathEdges[graphPaths.selectedPath][i].source == d.source.id &&
|
||||
pathEdges[graphPaths.selectedPath][i].target == d.target.id
|
||||
) {
|
||||
return PARTICLE_SIZE;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, d)) {
|
||||
return PARTICLE_SIZE;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}}
|
||||
linkDirectionalParticleSpeed={(d) => {
|
||||
return 0.01;
|
||||
}}
|
||||
nodeCanvasObjectMode={(node) => {
|
||||
if (selectedNodes.includes(node.id)) {
|
||||
return "before";
|
||||
}
|
||||
}}
|
||||
linkLineDash={(d) => {
|
||||
if (d.data.direct) {
|
||||
return [];
|
||||
}
|
||||
return [5, 3];
|
||||
}}
|
||||
linkColor={(d) => {
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, d)) {
|
||||
return "#ED7811";
|
||||
}
|
||||
}
|
||||
if (graphPaths.selectedPath >= 0) {
|
||||
for (
|
||||
var i = 0;
|
||||
i < pathEdges[graphPaths.selectedPath].length;
|
||||
i++
|
||||
) {
|
||||
if (
|
||||
pathEdges[graphPaths.selectedPath][i].source == d.source.id &&
|
||||
pathEdges[graphPaths.selectedPath][i].target == d.target.id
|
||||
) {
|
||||
return "#12FF19";
|
||||
}
|
||||
}
|
||||
}
|
||||
return "#FAFAFA";
|
||||
}}
|
||||
linkDirectionalParticleWidth={6}
|
||||
linkWidth={(d) => {
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, d)) {
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
if (graphPaths.selectedPath >= 0) {
|
||||
for (
|
||||
var i = 0;
|
||||
i < pathEdges[graphPaths.selectedPath].length;
|
||||
i++
|
||||
) {
|
||||
if (
|
||||
pathEdges[graphPaths.selectedPath][i].source == d.source.id &&
|
||||
pathEdges[graphPaths.selectedPath][i].target == d.target.id
|
||||
) {
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
}}
|
||||
onLinkClick={(link, event) => {
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, link)) {
|
||||
setLinks(
|
||||
links.map((temp_link) => {
|
||||
temp_link.selected = false;
|
||||
return temp_link;
|
||||
})
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
setLinks(
|
||||
links.map((temp_link, index) => {
|
||||
if (index == link.index) {
|
||||
temp_link.selected = true;
|
||||
} else {
|
||||
temp_link.selected = false;
|
||||
}
|
||||
return temp_link;
|
||||
})
|
||||
);
|
||||
}}
|
||||
nodeRelSize={7}
|
||||
nodeCanvasObject={paintRing}
|
||||
onNodeClick={(node, event) => {
|
||||
updateCheckbox({ node: node.id, value: "flip" });
|
||||
newGraphData();
|
||||
}}
|
||||
/>
|
||||
<ForceGraph3D
|
||||
name="2D"
|
||||
width={size}
|
||||
dagMode="radialout"
|
||||
graphData={graphData}
|
||||
nodeColor={colorNodes}
|
||||
nodeOpacity={1}
|
||||
nodeThreeObject={(node) => {
|
||||
if (!selectedNodes.includes(node.id)) {
|
||||
return new THREE.Mesh(
|
||||
new THREE.SphereGeometry(5, 5, 5),
|
||||
new THREE.MeshLambertMaterial({
|
||||
color: colorNodes(node),
|
||||
transparent: true,
|
||||
opacity: 0.2,
|
||||
})
|
||||
);
|
||||
}
|
||||
}}
|
||||
onNodeClick={(node, event) => {
|
||||
updateCheckbox({ node: node.id, value: "flip" });
|
||||
newGraphData();
|
||||
}}
|
||||
linkColor={(d) => {
|
||||
if (graphPaths.selectedPath >= 0) {
|
||||
for (
|
||||
var i = 0;
|
||||
i < pathEdges[graphPaths.selectedPath].length;
|
||||
i++
|
||||
) {
|
||||
if (
|
||||
pathEdges[graphPaths.selectedPath][i].source == d.source.id &&
|
||||
pathEdges[graphPaths.selectedPath][i].target == d.target.id
|
||||
) {
|
||||
return "#12FF19";
|
||||
}
|
||||
}
|
||||
}
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, d)) {
|
||||
return "#ED7811";
|
||||
}
|
||||
}
|
||||
if (d.data.direct == false) {
|
||||
return "#303030";
|
||||
}
|
||||
return "#FFFFFF";
|
||||
}}
|
||||
linkDirectionalParticleWidth={7}
|
||||
linkWidth={(d) => {
|
||||
if (graphPaths.selectedPath >= 0) {
|
||||
for (
|
||||
var i = 0;
|
||||
i < pathEdges[graphPaths.selectedPath].length;
|
||||
i++
|
||||
) {
|
||||
if (
|
||||
pathEdges[graphPaths.selectedPath][i].source == d.source.id &&
|
||||
pathEdges[graphPaths.selectedPath][i].target == d.target.id
|
||||
) {
|
||||
return 3;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, d)) {
|
||||
return 3;
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
}}
|
||||
linkDirectionalParticles={(d) => {
|
||||
if (graphPaths.selectedPath >= 0) {
|
||||
for (
|
||||
var i = 0;
|
||||
i < pathEdges[graphPaths.selectedPath].length;
|
||||
i++
|
||||
) {
|
||||
if (
|
||||
pathEdges[graphPaths.selectedPath][i].source == d.source.id &&
|
||||
pathEdges[graphPaths.selectedPath][i].target == d.target.id
|
||||
) {
|
||||
return PARTICLE_SIZE;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, d)) {
|
||||
return PARTICLE_SIZE;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}}
|
||||
linkDirectionalParticleSpeed={(d) => {
|
||||
return 0.01;
|
||||
}}
|
||||
linkDirectionalParticleResolution={10}
|
||||
linkOpacity={0.6}
|
||||
onLinkClick={(link, event) => {
|
||||
if (selectedEdge) {
|
||||
if (isSameEdge(selectedEdge, link)) {
|
||||
setLinks(
|
||||
links.map((temp_link) => {
|
||||
temp_link.selected = false;
|
||||
return temp_link;
|
||||
})
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
setLinks(
|
||||
links.map((temp_link, index) => {
|
||||
if (index == link.index) {
|
||||
temp_link.selected = true;
|
||||
} else {
|
||||
temp_link.selected = false;
|
||||
}
|
||||
return temp_link;
|
||||
})
|
||||
);
|
||||
}}
|
||||
nodeRelSize={7}
|
||||
backgroundColor={theme.palette.secondary.dark}
|
||||
linkDirectionalArrowLength={3.5}
|
||||
linkDirectionalArrowRelPos={1}
|
||||
ref={forceRef}
|
||||
/>
|
||||
</SwitchComponents>
|
||||
</LoadingBar>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getGraphData, { setFindNode, updateCheckbox, setGraphData, setNodeInfos, setLinks, setLinksTrans, setShowTransitive })(
|
||||
DrawGraph
|
||||
);
|
||||
|
|
@ -1,261 +0,0 @@
|
|||
import React, { useState } from "react";
|
||||
import clsx from "clsx";
|
||||
import { connect } from "react-redux";
|
||||
import { getEdges } from "./redux/store";
|
||||
import { setFindNode } from "./redux/findNode";
|
||||
import { setLinks } from "./redux/links";
|
||||
import { setGraphData } from "./redux/graphData";
|
||||
import { setSelectedPath } from "./redux/graphPaths";
|
||||
import { AutoSizer, Column, Table } from "react-virtualized";
|
||||
import TableCell from "@material-ui/core/TableCell";
|
||||
import Typography from "@material-ui/core/Typography";
|
||||
import Tooltip from '@material-ui/core/Tooltip';
|
||||
import GraphPaths from "./GraphPaths";
|
||||
|
||||
import { makeStyles, withStyles } from "@material-ui/core/styles";
|
||||
|
||||
import LoadingBar from "./LoadingBar";
|
||||
import TextField from "@material-ui/core/TextField";
|
||||
import { List, ListItemText, Paper, Button } from "@material-ui/core";
|
||||
|
||||
const columns = [
|
||||
{ dataKey: "type", label: "Type", width: 30 },
|
||||
{ dataKey: "source", label: "From", width: 180 },
|
||||
{ dataKey: "to", label: "➔", width: 40 },
|
||||
{ dataKey: "target", label: "To", width: 180 },
|
||||
];
|
||||
|
||||
const visibilityTypes = ['Global', 'Public', 'Private', 'Interface'];
|
||||
|
||||
function componentToHex(c) {
|
||||
var hex = c.toString(16);
|
||||
return hex.length == 1 ? "0" + hex : hex;
|
||||
}
|
||||
|
||||
function rgbToHex(r, g, b) {
|
||||
return "#" + componentToHex(r) + componentToHex(g) + componentToHex(b);
|
||||
}
|
||||
|
||||
function hexToRgb(hex) {
|
||||
// Expand shorthand form (e.g. "03F") to full form (e.g. "0033FF")
|
||||
var shorthandRegex = /^#?([a-f\d])([a-f\d])([a-f\d])$/i;
|
||||
hex = hex.replace(shorthandRegex, function (m, r, g, b) {
|
||||
return r + r + g + g + b + b;
|
||||
});
|
||||
|
||||
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
|
||||
return result
|
||||
? {
|
||||
r: parseInt(result[1], 16),
|
||||
g: parseInt(result[2], 16),
|
||||
b: parseInt(result[3], 16),
|
||||
}
|
||||
: null;
|
||||
}
|
||||
|
||||
function incrementPallete(palleteColor, increment) {
|
||||
var rgb = hexToRgb(palleteColor);
|
||||
rgb.r += increment;
|
||||
rgb.g += increment;
|
||||
rgb.b += increment;
|
||||
return rgbToHex(rgb.r, rgb.g, rgb.b);
|
||||
}
|
||||
|
||||
const styles = (theme) => ({
|
||||
flexContainer: {
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
},
|
||||
table: {
|
||||
// temporary right-to-left patch, waiting for
|
||||
// https://github.com/bvaughn/react-virtualized/issues/454
|
||||
"& .ReactVirtualized__Table__headerRow": {
|
||||
flip: false,
|
||||
paddingRight: theme.direction === "rtl" ? "0 !important" : undefined,
|
||||
},
|
||||
},
|
||||
tableRowOdd: {
|
||||
backgroundColor: incrementPallete(theme.palette.grey[800], 10),
|
||||
},
|
||||
tableRowEven: {
|
||||
backgroundColor: theme.palette.grey[800],
|
||||
},
|
||||
tableRowHover: {
|
||||
"&:hover": {
|
||||
backgroundColor: theme.palette.grey[600],
|
||||
},
|
||||
},
|
||||
tableCell: {
|
||||
flex: 1,
|
||||
},
|
||||
noClick: {
|
||||
cursor: "initial",
|
||||
},
|
||||
});
|
||||
|
||||
const EdgeList = ({ selectedGraph, links, setLinks, linksTrans, loading, setFindNode, classes, setTransPath }) => {
|
||||
const [searchTerm, setSearchTerm] = useState('');
|
||||
|
||||
const selectedLinks = links.filter(link => link.selected);
|
||||
|
||||
function searchedLinks() {
|
||||
if (searchTerm == '') {
|
||||
return links;
|
||||
}
|
||||
return links.filter(link => {
|
||||
if (link.source.name && link.target.name) {
|
||||
return link.source.name.indexOf(searchTerm) > -1 || link.target.name.indexOf(searchTerm) > -1;
|
||||
}});
|
||||
}
|
||||
|
||||
function handleRowClick(event) {
|
||||
setLinks(
|
||||
links.map((temp_link, index) => {
|
||||
if (index == searchedLinks()[event.index].index) {
|
||||
temp_link.selected = !temp_link.selected;
|
||||
} else {
|
||||
temp_link.selected = false;
|
||||
}
|
||||
return temp_link;
|
||||
})
|
||||
);
|
||||
setTransPath(event, '', '');
|
||||
}
|
||||
|
||||
function handleSearchTermChange(event) {
|
||||
setSearchTerm(event.target.value);
|
||||
}
|
||||
|
||||
function reduceNodeName(node) {
|
||||
if (node.name) {
|
||||
return node.name;
|
||||
}
|
||||
return node.substring(node.lastIndexOf('/') + 1);
|
||||
}
|
||||
|
||||
const getRowClassName = ({ index }) => {
|
||||
return clsx(
|
||||
index % 2 == 0 ? styles.tableRowEven : classes.tableRowOdd,
|
||||
classes.flexContainer,
|
||||
{
|
||||
[classes.tableRowHover]: index !== -1,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const cellRenderer = ({ cellData, columnIndex, rowIndex }) => {
|
||||
|
||||
return (
|
||||
<TableCell
|
||||
component="div"
|
||||
>
|
||||
{ columnIndex == 0 ?
|
||||
( searchedLinks()[rowIndex].data?.direct ?
|
||||
<Tooltip title="DIRECT" placement="right" arrow><p>D</p></Tooltip>
|
||||
:
|
||||
<Tooltip title="TRANSITIVE" placement="right" arrow><p>T</p></Tooltip>
|
||||
)
|
||||
:
|
||||
""
|
||||
}
|
||||
{ columnIndex == 1 ? reduceNodeName(searchedLinks()[rowIndex].source) : "" }
|
||||
{ columnIndex == 2 ? (searchedLinks()[rowIndex].selected ? <span style={{ color: "#ED7811" }}>➔</span> : "➔") : "" }
|
||||
{ columnIndex == 3 ? reduceNodeName(searchedLinks()[rowIndex].target) : "" }
|
||||
</TableCell>
|
||||
);
|
||||
};
|
||||
|
||||
const headerRenderer = ({ label, columnIndex }) => {
|
||||
return (
|
||||
<TableCell
|
||||
component="div"
|
||||
>
|
||||
<Typography
|
||||
style={{ width: "100%" }}
|
||||
align="left"
|
||||
variant="caption"
|
||||
component="h2"
|
||||
>
|
||||
{label}
|
||||
</Typography>
|
||||
</TableCell>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<LoadingBar loading={loading} height={"95%"}>
|
||||
<TextField
|
||||
fullWidth
|
||||
onChange={handleSearchTermChange}
|
||||
onClick={(event)=> event.target.select()}
|
||||
label="Search for Edge"
|
||||
/>
|
||||
<div style={{ height: "30%" }}>
|
||||
<AutoSizer>
|
||||
{({ height, width }) => (
|
||||
<Table
|
||||
height={height}
|
||||
width={width}
|
||||
rowCount={searchedLinks().length}
|
||||
rowGetter={({ index }) => searchedLinks()[index].target}
|
||||
rowHeight={25}
|
||||
onRowClick={handleRowClick}
|
||||
gridStyle={{
|
||||
direction: "inherit",
|
||||
}}
|
||||
size={"small"}
|
||||
rowClassName={getRowClassName}
|
||||
headerHeight={35}
|
||||
>
|
||||
{columns.map(({ dataKey, ...other }, index) => {
|
||||
return (
|
||||
<Column
|
||||
key={dataKey}
|
||||
headerRenderer={(headerProps) =>
|
||||
headerRenderer({
|
||||
...headerProps,
|
||||
columnIndex: index,
|
||||
})
|
||||
}
|
||||
cellRenderer={cellRenderer}
|
||||
dataKey={dataKey}
|
||||
{...other}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</Table>
|
||||
)}
|
||||
</AutoSizer>
|
||||
</div>
|
||||
<Paper style={{ border: "2px solid", height: "55%", padding: 5, overflow: 'auto' }} hidden={(selectedLinks.length <= 0)}>
|
||||
<List dense={true} style={{ padding: 5 }}>
|
||||
<Paper elevation={3} style={{ backgroundColor: "rgba(33, 33, 33)", padding: 15 }}>
|
||||
<h4 style={{ margin: 0 }}>{ selectedLinks[0]?.source.name } ➔ { selectedLinks[0]?.target.name }</h4>
|
||||
<ListItemText primary={ <span><strong>Type:</strong> { selectedLinks[0]?.data.direct ? "Direct" : "Transitive" }</span> }/>
|
||||
<ListItemText primary={ <span><strong>Visibility:</strong> { visibilityTypes[selectedLinks[0]?.data.visibility] }</span> }/>
|
||||
<ListItemText primary={ <span><strong>Source:</strong> { selectedLinks[0]?.source.id }</span> } secondary= { selectedLinks[0]?.source.type }/>
|
||||
<ListItemText primary={ <span><strong>Target:</strong> { selectedLinks[0]?.target.id }</span> } secondary= { selectedLinks[0]?.target.type }/>
|
||||
<div>
|
||||
<ListItemText primary={ <strong>Symbol Dependencies: { selectedLinks[0]?.data.symbols.length }</strong> }/>
|
||||
{ selectedLinks[0]?.data.symbols.map((symbol, index) => {
|
||||
return (
|
||||
<span key={index}>
|
||||
<ListItemText secondary={ symbol } style={{textIndent: "-1em", marginLeft: "1em", overflowWrap: "break-word"}}></ListItemText>
|
||||
<hr style={{ border: "0px", borderTop: "0.5px solid rgba(255, 255, 255, .2)", marginTop: "2px", marginBottom: "2px"}}></hr>
|
||||
</span>
|
||||
);
|
||||
})
|
||||
}
|
||||
</div>
|
||||
<div hidden={(selectedLinks[0]?.data.direct ? "Direct" : "Transitive") == "Direct" }>
|
||||
<br></br>
|
||||
<Button variant="contained" onClick={ (event) => setTransPath(event, selectedLinks[0]?.source.id, selectedLinks[0]?.target.id) }>View Paths</Button>
|
||||
</div>
|
||||
</Paper>
|
||||
</List>
|
||||
</Paper>
|
||||
</LoadingBar>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getEdges, { setGraphData, setFindNode, setLinks, setSelectedPath })(withStyles(styles)(EdgeList));
|
||||
|
|
@ -1,93 +0,0 @@
|
|||
import React from "react";
|
||||
import { connect } from "react-redux";
|
||||
import LoadingButton from "@material-ui/lab/LoadingButton";
|
||||
import GitIcon from "@material-ui/icons/GitHub";
|
||||
import { green, grey } from "@material-ui/core/colors";
|
||||
|
||||
import { getGraphFiles } from "./redux/store";
|
||||
import { setLoading } from "./redux/loading";
|
||||
import theme from "./theme";
|
||||
import { selectGraphFile } from "./redux/graphFiles";
|
||||
import { nodeInfo, setNodeInfos } from "./redux/nodeInfo";
|
||||
|
||||
const selectedStyle = {
|
||||
color: theme.palette.getContrastText(green[500]),
|
||||
backgroundColor: green[500],
|
||||
"&:hover": {
|
||||
backgroundColor: green[400],
|
||||
},
|
||||
"&:active": {
|
||||
backgroundColor: green[700],
|
||||
},
|
||||
};
|
||||
|
||||
const unselectedStyle = {
|
||||
color: theme.palette.getContrastText(grey[100]),
|
||||
backgroundColor: grey[100],
|
||||
"&:hover": {
|
||||
backgroundColor: grey[200],
|
||||
},
|
||||
"&:active": {
|
||||
backgroundColor: grey[400],
|
||||
},
|
||||
};
|
||||
|
||||
const GitHashButton = ({ loading, graphFiles, setLoading, selectGraphFile, setNodeInfos, text }) => {
|
||||
const [selected, setSelected] = React.useState(false);
|
||||
const [selfLoading, setSelfLoading] = React.useState(false);
|
||||
const [firstLoad, setFirstLoad] = React.useState(true);
|
||||
|
||||
function handleClick() {
|
||||
const selectedGraphFiles = graphFiles.filter(
|
||||
(graphFile) => graphFile.selected == true
|
||||
);
|
||||
|
||||
if (selectedGraphFiles.length > 0) {
|
||||
if (selectedGraphFiles[0]["git"] == text) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setSelfLoading(true);
|
||||
setLoading(true);
|
||||
selectGraphFile({
|
||||
hash: text,
|
||||
selected: true,
|
||||
});
|
||||
}
|
||||
|
||||
React.useEffect(() => {
|
||||
const selectedGraphFile = graphFiles.filter(
|
||||
(graphFile) => graphFile.git == text
|
||||
);
|
||||
setSelected(selectedGraphFile[0].selected);
|
||||
|
||||
if (firstLoad && graphFiles.length > 0) {
|
||||
if (graphFiles[0]["git"] == text) {
|
||||
handleClick();
|
||||
}
|
||||
setFirstLoad(false);
|
||||
}
|
||||
}, [graphFiles]);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!loading) {
|
||||
setSelfLoading(false);
|
||||
}
|
||||
}, [loading]);
|
||||
|
||||
return (
|
||||
<LoadingButton
|
||||
pending={selfLoading}
|
||||
pendingPosition="start"
|
||||
startIcon={<GitIcon />}
|
||||
variant="contained"
|
||||
style={selected ? selectedStyle : unselectedStyle}
|
||||
onClick={handleClick}
|
||||
>
|
||||
{text}
|
||||
</LoadingButton>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getGraphFiles, { setLoading, selectGraphFile, setNodeInfos })(GitHashButton);
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
import React from "react";
|
||||
import ScrollContainer from "react-indiana-drag-scroll";
|
||||
import { connect } from "react-redux";
|
||||
import Table from "@material-ui/core/Table";
|
||||
import TableBody from "@material-ui/core/TableBody";
|
||||
import TableCell from "@material-ui/core/TableCell";
|
||||
import Paper from "@material-ui/core/Paper";
|
||||
import TableRow from "@material-ui/core/TableRow";
|
||||
import List from "@material-ui/core/List";
|
||||
import ListItem from "@material-ui/core/ListItem";
|
||||
import TextField from "@material-ui/core/TextField";
|
||||
|
||||
import { getGraphFiles } from "./redux/store";
|
||||
import { setGraphFiles } from "./redux/graphFiles";
|
||||
|
||||
import GitHashButton from "./GitHashButton";
|
||||
|
||||
const { REACT_APP_API_URL } = process.env;
|
||||
|
||||
const flexContainer = {
|
||||
display: "flex",
|
||||
flexDirection: "row",
|
||||
padding: 0,
|
||||
width: "50%",
|
||||
height: "50%",
|
||||
};
|
||||
|
||||
const textFields = [
|
||||
"Scroll to commit",
|
||||
"Commit Range Begin",
|
||||
"Commit Range End",
|
||||
];
|
||||
|
||||
const GraphCommitDisplay = ({ graphFiles, setGraphFiles }) => {
|
||||
React.useEffect(() => {
|
||||
fetch(REACT_APP_API_URL + "/api/graphs")
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
setGraphFiles(data.graph_files);
|
||||
})
|
||||
.catch((err) => {
|
||||
/* eslint-disable no-console */
|
||||
console.log("Error Reading data " + err);
|
||||
});
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<Paper style={{ height: "100%", width: "100%" }}>
|
||||
<List style={flexContainer}>
|
||||
{textFields.map((text) => (
|
||||
<ListItem key={text}>
|
||||
<TextField size="small" label={text} />
|
||||
</ListItem>
|
||||
))}
|
||||
</List>
|
||||
<ScrollContainer
|
||||
vertical={false}
|
||||
style={{ height: "50%" }}
|
||||
className="scroll-container"
|
||||
hideScrollbars={true}
|
||||
>
|
||||
<Table style={{ height: "100%" }}>
|
||||
<TableBody>
|
||||
<TableRow>
|
||||
{graphFiles.map((file) => (
|
||||
<TableCell key={file.id}>
|
||||
<GitHashButton text={file.git} />
|
||||
</TableCell>
|
||||
))}
|
||||
</TableRow>
|
||||
</TableBody>
|
||||
</Table>
|
||||
</ScrollContainer>
|
||||
</Paper>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getGraphFiles, { setGraphFiles })(GraphCommitDisplay);
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
import React from "react";
|
||||
import { makeStyles } from "@material-ui/core/styles";
|
||||
import Table from "@material-ui/core/Table";
|
||||
import TableBody from "@material-ui/core/TableBody";
|
||||
import TableCell from "@material-ui/core/TableCell";
|
||||
import TableContainer from "@material-ui/core/TableContainer";
|
||||
import TableHead from "@material-ui/core/TableHead";
|
||||
import TableRow from "@material-ui/core/TableRow";
|
||||
import Paper from "@material-ui/core/Paper";
|
||||
import { connect } from "react-redux";
|
||||
import { getCounts } from "./redux/store";
|
||||
import { setCounts } from "./redux/counts";
|
||||
|
||||
const {REACT_APP_API_URL} = process.env;
|
||||
|
||||
const columns = [
|
||||
{ id: "ID", field: "type", headerName: "Count Type", width: 50 },
|
||||
{ field: "value", headerName: "Value", width: 50 },
|
||||
];
|
||||
|
||||
const useStyles = makeStyles({
|
||||
table: {
|
||||
minWidth: 50,
|
||||
},
|
||||
});
|
||||
|
||||
const GraphInfo = ({ selectedGraph, counts, datawidth, setCounts }) => {
|
||||
React.useEffect(() => {
|
||||
let gitHash = selectedGraph;
|
||||
if (gitHash) {
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/analysis')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setCounts(data.results);
|
||||
});
|
||||
}
|
||||
}, [selectedGraph]);
|
||||
|
||||
const classes = useStyles();
|
||||
|
||||
return (
|
||||
<TableContainer component={Paper}>
|
||||
<Table className={classes.table} size="small" aria-label="simple table">
|
||||
<TableHead>
|
||||
<TableRow>
|
||||
{columns.map((column, index) => {
|
||||
return <TableCell key={index}>{column.headerName}</TableCell>;
|
||||
})}
|
||||
</TableRow>
|
||||
</TableHead>
|
||||
<TableBody>
|
||||
{counts.map((row) => (
|
||||
<TableRow key={row.id}>
|
||||
<TableCell component="th" scope="row">
|
||||
{row.type}
|
||||
</TableCell>
|
||||
<TableCell>{row.value}</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getCounts, { setCounts })(GraphInfo);
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
import React from "react";
|
||||
import { makeStyles } from "@material-ui/core/styles";
|
||||
import AppBar from "@material-ui/core/AppBar";
|
||||
import Tabs from "@material-ui/core/Tabs";
|
||||
import Tab from "@material-ui/core/Tab";
|
||||
|
||||
import NodeList from "./NodeList";
|
||||
import EdgeList from "./EdgeList";
|
||||
import InfoExpander from "./InfoExpander";
|
||||
import AlgorithmExpander from "./AlgorithmExpander";
|
||||
|
||||
function a11yProps(index) {
|
||||
return {
|
||||
id: `scrollable-auto-tab-${index}`,
|
||||
"aria-controls": `scrollable-auto-tabpanel-${index}`,
|
||||
};
|
||||
}
|
||||
|
||||
const useStyles = makeStyles((theme) => ({
|
||||
root: {
|
||||
flexGrow: 1,
|
||||
width: "100%",
|
||||
height: "100%",
|
||||
backgroundColor: theme.palette.background.paper,
|
||||
},
|
||||
}));
|
||||
|
||||
export default function GraphInfoTabs({ nodes, width }) {
|
||||
const classes = useStyles();
|
||||
const [tab, setTab] = React.useState(1);
|
||||
const [transPathFrom, setTransPathFrom] = React.useState('');
|
||||
const [transPathTo, setTransPathTo] = React.useState('');
|
||||
|
||||
const handleChange = (event, newValue) => {
|
||||
setTab(newValue);
|
||||
};
|
||||
|
||||
const handleTransPath = (event, fromNode, toNode) => {
|
||||
setTransPathFrom(fromNode);
|
||||
setTransPathTo(toNode);
|
||||
if (fromNode != '' && toNode != '') {
|
||||
setTab(3);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={classes.root}>
|
||||
<AppBar position="static" color="default">
|
||||
<Tabs
|
||||
value={tab}
|
||||
onChange={handleChange}
|
||||
indicatorColor="primary"
|
||||
textColor="primary"
|
||||
variant="scrollable"
|
||||
scrollButtons="auto"
|
||||
aria-label="scrollable auto tabs example"
|
||||
>
|
||||
<Tab label="Selected Info" {...a11yProps(0)} />
|
||||
<Tab label="Node List" {...a11yProps(1)} />
|
||||
<Tab label="Edge List" {...a11yProps(2)} />
|
||||
<Tab label="Algorithms" {...a11yProps(3)} />
|
||||
</Tabs>
|
||||
</AppBar>
|
||||
<div style={{ height: "100%" }} hidden={tab != 0}>
|
||||
<InfoExpander width={width}></InfoExpander>
|
||||
</div>
|
||||
<div style={{ height: "100%" }} hidden={tab != 1}>
|
||||
<NodeList nodes={nodes}></NodeList>
|
||||
</div>
|
||||
<div style={{ height: "100%" }} hidden={tab != 2}>
|
||||
<EdgeList nodes={nodes} setTransPath={handleTransPath}></EdgeList>
|
||||
</div>
|
||||
<div style={{ height: "100%" }} hidden={tab != 3}>
|
||||
<AlgorithmExpander width={width} transPathFrom={transPathFrom} transPathTo={transPathTo}></AlgorithmExpander>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
@ -1,370 +0,0 @@
|
|||
import React from "react";
|
||||
import { connect } from "react-redux";
|
||||
import { FixedSizeList } from "react-window";
|
||||
import SplitPane from "react-split-pane";
|
||||
import { makeStyles, withStyles } from "@material-ui/core/styles";
|
||||
import ListItem from "@material-ui/core/ListItem";
|
||||
import ListItemText from "@material-ui/core/ListItemText";
|
||||
import Paper from "@material-ui/core/Paper";
|
||||
import Typography from "@material-ui/core/Typography";
|
||||
import Box from "@material-ui/core/Box";
|
||||
import ExpandMoreIcon from "@material-ui/icons/ExpandMore";
|
||||
import MuiAccordion from "@material-ui/core/Accordion";
|
||||
import MuiAccordionSummary from "@material-ui/core/AccordionSummary";
|
||||
import MuiAccordionDetails from "@material-ui/core/AccordionDetails";
|
||||
import useResizeAware from "react-resize-aware";
|
||||
|
||||
import { getSelected } from "./redux/store";
|
||||
import { selectedGraphPaths, setSelectedPath } from "./redux/graphPaths";
|
||||
import { setGraphData } from "./redux/graphData";
|
||||
import { setLinks } from "./redux/links";
|
||||
import { setLinksTrans } from "./redux/linksTrans";
|
||||
|
||||
import OverflowTooltip from "./OverflowTooltip";
|
||||
|
||||
const {REACT_APP_API_URL} = process.env;
|
||||
|
||||
const rowHeight = 25;
|
||||
|
||||
const Accordion = withStyles({
|
||||
root: {
|
||||
border: "1px solid rgba(0, 0, 0, .125)",
|
||||
boxShadow: "none",
|
||||
"&:not(:last-child)": {
|
||||
borderBottom: 0,
|
||||
},
|
||||
"&:before": {
|
||||
display: "none",
|
||||
},
|
||||
"&$expanded": {
|
||||
margin: "auto",
|
||||
},
|
||||
},
|
||||
expanded: {},
|
||||
})(MuiAccordion);
|
||||
|
||||
const AccordionSummary = withStyles({
|
||||
root: {
|
||||
backgroundColor: "rgba(0, 0, 0, .03)",
|
||||
borderBottom: "1px solid rgba(0, 0, 0, .125)",
|
||||
marginBottom: -1,
|
||||
minHeight: 56,
|
||||
"&$expanded": {
|
||||
minHeight: 56,
|
||||
},
|
||||
},
|
||||
content: {
|
||||
"&$expanded": {
|
||||
margin: "12px 0",
|
||||
},
|
||||
},
|
||||
expanded: {},
|
||||
})(MuiAccordionSummary);
|
||||
|
||||
const AccordionDetails = withStyles((theme) => ({
|
||||
root: {
|
||||
padding: theme.spacing(2),
|
||||
},
|
||||
}))(MuiAccordionDetails);
|
||||
|
||||
const GraphPaths = ({
|
||||
nodes,
|
||||
selectedGraph,
|
||||
selectedNodes,
|
||||
graphPaths,
|
||||
setSelectedPath,
|
||||
width,
|
||||
selectedGraphPaths,
|
||||
setGraphData,
|
||||
setLinks,
|
||||
setLinksTrans,
|
||||
showTransitive,
|
||||
transPathFrom,
|
||||
transPathTo
|
||||
}) => {
|
||||
const [fromNode, setFromNode] = React.useState("");
|
||||
const [toNode, setToNode] = React.useState("");
|
||||
const [fromNodeId, setFromNodeId] = React.useState(0);
|
||||
const [toNodeId, setToNodeId] = React.useState(0);
|
||||
const [fromNodeExpanded, setFromNodeExpanded] = React.useState(false);
|
||||
const [toNodeExpanded, setToNodeExpanded] = React.useState(false);
|
||||
const [paneSize, setPaneSize] = React.useState("50%");
|
||||
|
||||
const [fromResizeListener, fromSizes] = useResizeAware();
|
||||
const [toResizeListener, toSizes] = useResizeAware();
|
||||
|
||||
const useStyles = makeStyles((theme) => ({
|
||||
root: {
|
||||
width: "100%",
|
||||
maxWidth: width,
|
||||
backgroundColor: theme.palette.background.paper,
|
||||
},
|
||||
nested: {
|
||||
paddingLeft: theme.spacing(4),
|
||||
},
|
||||
listItem: {
|
||||
width: width,
|
||||
},
|
||||
}));
|
||||
const classes = useStyles();
|
||||
|
||||
React.useEffect(() => {
|
||||
setFromNode(transPathFrom);
|
||||
setFromNodeExpanded(false);
|
||||
setToNode(transPathFrom);
|
||||
setToNodeExpanded(false);
|
||||
setPaneSize("50%");
|
||||
if (transPathFrom != '' && transPathTo != '') {
|
||||
getGraphPaths(transPathFrom, transPathTo);
|
||||
} else {
|
||||
selectedGraphPaths({
|
||||
fromNode: '',
|
||||
toNode: '',
|
||||
paths: [],
|
||||
selectedPath: -1
|
||||
});
|
||||
}
|
||||
}, [transPathFrom, transPathTo]);
|
||||
|
||||
function getGraphPaths(fromNode, toNode) {
|
||||
let gitHash = selectedGraph;
|
||||
if (gitHash) {
|
||||
let postData = {
|
||||
"fromNode": fromNode,
|
||||
"toNode": toNode
|
||||
};
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/paths', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
selectedGraphPaths(data);
|
||||
let postData = {
|
||||
"selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node),
|
||||
"extra_nodes": data.extraNodes,
|
||||
"transitive_edges": showTransitive
|
||||
};
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setGraphData(data.graphData);
|
||||
setLinks(
|
||||
data.graphData.links.map((link) => {
|
||||
if (link.source == fromNode && link.target == toNode) {
|
||||
link.selected = true;
|
||||
} else {
|
||||
link.selected = false;
|
||||
}
|
||||
return link;
|
||||
})
|
||||
);
|
||||
setLinksTrans(data.graphData.links_trans);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function toNodeRow({ index, style, data }) {
|
||||
return (
|
||||
<ListItem
|
||||
button
|
||||
style={style}
|
||||
key={index}
|
||||
onClick={() => {
|
||||
setToNode(data[index].name);
|
||||
setToNodeId(index);
|
||||
setToNodeExpanded(false);
|
||||
setPaneSize("50%");
|
||||
if (fromNode != "" && data[fromNodeId]) {
|
||||
getGraphPaths(data[fromNodeId].node, data[index].node);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ListItemText primary={data[index].name} />
|
||||
</ListItem>
|
||||
);
|
||||
}
|
||||
|
||||
function fromNodeRow({ index, style, data }) {
|
||||
return (
|
||||
<ListItem
|
||||
button
|
||||
style={style}
|
||||
key={index}
|
||||
onClick={() => {
|
||||
setFromNode(data[index].name);
|
||||
setFromNodeId(index);
|
||||
setFromNodeExpanded(false);
|
||||
setPaneSize("50%");
|
||||
|
||||
if (toNode != "" && data[toNodeId]) {
|
||||
getGraphPaths(data[fromNodeId].node, data[index].node);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ListItemText primary={data[index].name} />
|
||||
</ListItem>
|
||||
);
|
||||
}
|
||||
|
||||
function pathRow({ index, style, data }) {
|
||||
return (
|
||||
<ListItem
|
||||
button
|
||||
style={style}
|
||||
key={index}
|
||||
onClick={() => {
|
||||
setSelectedPath(index);
|
||||
}}
|
||||
>
|
||||
<ListItemText
|
||||
primary={
|
||||
"Path #" +
|
||||
(index + 1).toString() +
|
||||
" - Hops: " +
|
||||
(data[index].length - 1).toString()
|
||||
}
|
||||
/>
|
||||
</ListItem>
|
||||
);
|
||||
}
|
||||
|
||||
function listHeight(numItems, minHeight, maxHeight) {
|
||||
const size = numItems * rowHeight;
|
||||
if (size > maxHeight) {
|
||||
return maxHeight;
|
||||
}
|
||||
if (size < minHeight) {
|
||||
return minHeight;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
const handleToChange = (panel) => (event, newExpanded) => {
|
||||
setPaneSize(newExpanded ? "0%" : "50%");
|
||||
setToNodeExpanded(newExpanded ? panel : false);
|
||||
};
|
||||
|
||||
const handleFromChange = (panel) => (event, newExpanded) => {
|
||||
setPaneSize(newExpanded ? "100%" : "50%");
|
||||
setFromNodeExpanded(newExpanded ? panel : false);
|
||||
};
|
||||
|
||||
return (
|
||||
<Paper elevation={3} style={{ backgroundColor: "rgba(0, 0, 0, .03)" }}>
|
||||
<SplitPane
|
||||
split="vertical"
|
||||
minSize={"50%"}
|
||||
size={paneSize}
|
||||
style={{ position: "relative" }}
|
||||
defaultSize={"50%"}
|
||||
pane1Style={{ height: "100%" }}
|
||||
pane2Style={{ height: "100%", width: "100%" }}
|
||||
>
|
||||
<Accordion
|
||||
expanded={fromNodeExpanded}
|
||||
onChange={handleFromChange(!fromNodeExpanded)}
|
||||
>
|
||||
<AccordionSummary
|
||||
expandIcon={<ExpandMoreIcon />}
|
||||
aria-controls="panel1a-content"
|
||||
id="panel1a-header"
|
||||
>
|
||||
<Box
|
||||
style={{
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
}}
|
||||
>
|
||||
<Typography className={classes.heading}>From Node:</Typography>
|
||||
<Typography
|
||||
className={classes.heading}
|
||||
style={{ width: fromSizes.width - 50 }}
|
||||
noWrap={true}
|
||||
display={"block"}
|
||||
>
|
||||
{fromResizeListener}
|
||||
{fromNode}
|
||||
</Typography>
|
||||
</Box>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails>
|
||||
<FixedSizeList
|
||||
height={listHeight(selectedNodes.length, 100, 200)}
|
||||
width={width}
|
||||
itemSize={rowHeight}
|
||||
itemCount={selectedNodes.length}
|
||||
itemData={selectedNodes}
|
||||
>
|
||||
{fromNodeRow}
|
||||
</FixedSizeList>
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
|
||||
<Accordion
|
||||
expanded={toNodeExpanded}
|
||||
onChange={handleToChange(!toNodeExpanded)}
|
||||
>
|
||||
<AccordionSummary
|
||||
expandIcon={<ExpandMoreIcon />}
|
||||
aria-controls="panel1a-content"
|
||||
id="panel1a-header"
|
||||
>
|
||||
<Box style={{ display: "flex", flexDirection: "column" }}>
|
||||
<Typography className={classes.heading}>To Node:</Typography>
|
||||
<Typography
|
||||
className={classes.heading}
|
||||
style={{ width: toSizes.width - 50 }}
|
||||
noWrap={true}
|
||||
display={"block"}
|
||||
>
|
||||
{toResizeListener}
|
||||
{toNode}
|
||||
</Typography>
|
||||
</Box>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails>
|
||||
<FixedSizeList
|
||||
height={listHeight(selectedNodes.length, 100, 200)}
|
||||
width={width}
|
||||
itemSize={rowHeight}
|
||||
itemCount={selectedNodes.length}
|
||||
itemData={selectedNodes}
|
||||
>
|
||||
{toNodeRow}
|
||||
</FixedSizeList>
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
</SplitPane>
|
||||
<Paper elevation={2} style={{ backgroundColor: "rgba(0, 0, 0, .03)" }}>
|
||||
<Typography className={classes.heading} style={{ margin: "10px" }}>
|
||||
Num Paths: {graphPaths.paths.length}{" "}
|
||||
</Typography>
|
||||
</Paper>
|
||||
<FixedSizeList
|
||||
height={listHeight(graphPaths.paths.length, 100, 200)}
|
||||
width={width}
|
||||
itemSize={rowHeight}
|
||||
itemCount={graphPaths.paths.length}
|
||||
itemData={graphPaths.paths}
|
||||
style={{ margin: "10px" }}
|
||||
>
|
||||
{pathRow}
|
||||
</FixedSizeList>
|
||||
</Paper>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getSelected, { selectedGraphPaths, setSelectedPath, setGraphData, setLinks, setLinksTrans })(
|
||||
GraphPaths
|
||||
);
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
import React from "react";
|
||||
import { connect } from "react-redux";
|
||||
import { makeStyles, withStyles } from "@material-ui/core/styles";
|
||||
import Typography from "@material-ui/core/Typography";
|
||||
import ExpandMoreIcon from "@material-ui/icons/ExpandMore";
|
||||
import Paper from "@material-ui/core/Paper";
|
||||
import MuiAccordion from "@material-ui/core/Accordion";
|
||||
import MuiAccordionSummary from "@material-ui/core/AccordionSummary";
|
||||
import MuiAccordionDetails from "@material-ui/core/AccordionDetails";
|
||||
|
||||
import { getSelected } from "./redux/store";
|
||||
|
||||
import GraphInfo from "./GraphInfo";
|
||||
import NodeInfo from "./NodeInfo";
|
||||
import LoadingBar from "./LoadingBar";
|
||||
|
||||
const useStyles = makeStyles((theme) => ({
|
||||
root: {
|
||||
width: "100%",
|
||||
},
|
||||
heading: {
|
||||
fontSize: theme.typography.pxToRem(15),
|
||||
fontWeight: theme.typography.fontWeightRegular,
|
||||
},
|
||||
}));
|
||||
|
||||
const Accordion = withStyles({
|
||||
root: {
|
||||
border: "1px solid rgba(0, 0, 0, .125)",
|
||||
boxShadow: "none",
|
||||
"&:not(:last-child)": {
|
||||
borderBottom: 0,
|
||||
},
|
||||
"&:before": {
|
||||
display: "none",
|
||||
},
|
||||
"&$expanded": {
|
||||
margin: "auto",
|
||||
},
|
||||
},
|
||||
expanded: {},
|
||||
})(MuiAccordion);
|
||||
|
||||
const AccordionSummary = withStyles({
|
||||
root: {
|
||||
backgroundColor: "rgba(0, 0, 0, .03)",
|
||||
borderBottom: "1px solid rgba(0, 0, 0, .125)",
|
||||
marginBottom: -1,
|
||||
minHeight: 56,
|
||||
"&$expanded": {
|
||||
minHeight: 56,
|
||||
},
|
||||
},
|
||||
content: {
|
||||
"&$expanded": {
|
||||
margin: "12px 0",
|
||||
},
|
||||
},
|
||||
expanded: {},
|
||||
})(MuiAccordionSummary);
|
||||
|
||||
const AccordionDetails = withStyles((theme) => ({
|
||||
root: {
|
||||
padding: theme.spacing(2),
|
||||
},
|
||||
}))(MuiAccordionDetails);
|
||||
|
||||
const InfoExpander = ({ selectedNodes, selectedEdges, loading, width }) => {
|
||||
const classes = useStyles();
|
||||
|
||||
return (
|
||||
<div className={classes.root}>
|
||||
<LoadingBar loading={loading} height={"100%"}>
|
||||
<Paper style={{ maxHeight: "82vh", overflow: "auto" }}>
|
||||
{selectedNodes.map((node) => (
|
||||
<Accordion key={node.node}>
|
||||
<AccordionSummary
|
||||
expandIcon={<ExpandMoreIcon />}
|
||||
aria-controls="panel1a-content"
|
||||
id="panel1a-header"
|
||||
>
|
||||
<Typography className={classes.heading}>{node.name}</Typography>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails>
|
||||
<NodeInfo node={node} width={width} />
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
))}
|
||||
</Paper>
|
||||
</LoadingBar>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getSelected)(InfoExpander);
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
import React from "react";
|
||||
import LinearProgress from "@material-ui/core/LinearProgress";
|
||||
import Fade from "@material-ui/core/Fade";
|
||||
|
||||
export default function LoadingBar({ loading, height, children }) {
|
||||
const dimOnTrue = (flag) => {
|
||||
return {
|
||||
opacity: flag ? 0.15 : 1,
|
||||
height: "100%",
|
||||
};
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={{ height: height }}>
|
||||
<Fade
|
||||
in={loading}
|
||||
style={{ transitionDelay: loading ? "300ms" : "0ms" }}
|
||||
unmountOnExit
|
||||
>
|
||||
<LinearProgress />
|
||||
</Fade>
|
||||
<div style={dimOnTrue(loading)}>{children}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
@ -1,187 +0,0 @@
|
|||
import React from "react";
|
||||
import { connect } from "react-redux";
|
||||
import { FixedSizeList } from "react-window";
|
||||
import { AutoSizer } from "react-virtualized";
|
||||
import { makeStyles } from "@material-ui/core/styles";
|
||||
import List from "@material-ui/core/List";
|
||||
import ListItem from "@material-ui/core/ListItem";
|
||||
import ListItemText from "@material-ui/core/ListItemText";
|
||||
import Collapse from "@material-ui/core/Collapse";
|
||||
import ExpandLess from "@material-ui/icons/ExpandLess";
|
||||
import ExpandMore from "@material-ui/icons/ExpandMore";
|
||||
import Paper from "@material-ui/core/Paper";
|
||||
import Box from "@material-ui/core/Box";
|
||||
|
||||
import { getNodeInfos } from "./redux/store";
|
||||
|
||||
import theme from "./theme";
|
||||
|
||||
import OverflowTooltip from "./OverflowTooltip";
|
||||
|
||||
const NodeInfo = ({ nodeInfos, node, width }) => {
|
||||
const useStyles = makeStyles((theme) => ({
|
||||
root: {
|
||||
width: "100%",
|
||||
maxWidth: width,
|
||||
backgroundColor: theme.palette.background.paper,
|
||||
},
|
||||
nested: {
|
||||
paddingLeft: theme.spacing(4),
|
||||
},
|
||||
listItem: {
|
||||
width: width,
|
||||
},
|
||||
}));
|
||||
|
||||
const rowHeight = 25;
|
||||
const classes = useStyles();
|
||||
const [openDependers, setOpenDependers] = React.useState(false);
|
||||
const [openDependencies, setOpenDependencies] = React.useState(false);
|
||||
const [openNodeAttribs, setOpenNodeAttribs] = React.useState(false);
|
||||
|
||||
const [nodeInfo, setNodeInfo] = React.useState({
|
||||
id: 0,
|
||||
node: "test/test.so",
|
||||
name: "test",
|
||||
attribs: [{ name: "test", value: "test" }],
|
||||
dependers: [{ node: "test/test3.so", symbols: [] }],
|
||||
dependencies: [{ node: "test/test2.so", symbols: [] }],
|
||||
});
|
||||
|
||||
React.useEffect(() => {
|
||||
setNodeInfo(nodeInfos.filter((nodeInfo) => nodeInfo.node == node.node)[0]);
|
||||
}, [nodeInfos]);
|
||||
|
||||
function renderAttribRow({ index, style, data }) {
|
||||
return (
|
||||
<ListItem style={style} key={index}>
|
||||
<Box style={{ margin: "5px" }}>
|
||||
<OverflowTooltip
|
||||
value={data[index].name}
|
||||
text={String(data[index].name) + ":"}
|
||||
/>
|
||||
</Box>
|
||||
<OverflowTooltip
|
||||
value={String(data[index].value)}
|
||||
text={String(data[index].value)}
|
||||
/>
|
||||
</ListItem>
|
||||
);
|
||||
}
|
||||
|
||||
function renderNodeRow({ index, style, data }) {
|
||||
return (
|
||||
<ListItem style={style} key={index}>
|
||||
<OverflowTooltip
|
||||
button
|
||||
name={data[index].name}
|
||||
value={data[index].node}
|
||||
text={data[index].node}
|
||||
/>
|
||||
</ListItem>
|
||||
);
|
||||
}
|
||||
|
||||
function listHeight(numItems) {
|
||||
const size = numItems * rowHeight;
|
||||
if (size > 350) {
|
||||
return 350;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
if (nodeInfo == undefined) {
|
||||
return "";
|
||||
}
|
||||
return (
|
||||
<List
|
||||
component="nav"
|
||||
aria-labelledby="nested-list-subheader"
|
||||
className={classes.root}
|
||||
dense={true}
|
||||
>
|
||||
<Paper elevation={3} style={{ backgroundColor: "rgba(0, 0, 0, .03)" }}>
|
||||
<ListItem button>
|
||||
<ListItemText primary={nodeInfo.node} />
|
||||
</ListItem>
|
||||
<ListItem button>
|
||||
<ListItemText primary={nodeInfo.name} />
|
||||
</ListItem>
|
||||
|
||||
<ListItem button onClick={() => setOpenNodeAttribs(!openNodeAttribs)}>
|
||||
<ListItemText primary="Attributes" />
|
||||
{openNodeAttribs ? <ExpandLess /> : <ExpandMore />}
|
||||
</ListItem>
|
||||
<Collapse in={openNodeAttribs} timeout="auto" unmountOnExit>
|
||||
<Paper
|
||||
elevation={2}
|
||||
style={{
|
||||
width: "100%",
|
||||
backgroundColor: theme.palette.background.paper,
|
||||
}}
|
||||
>
|
||||
<AutoSizer disableHeight={true}>
|
||||
{({ height, width }) => (
|
||||
<FixedSizeList
|
||||
height={listHeight(nodeInfo.attribs.length)}
|
||||
width={width}
|
||||
itemSize={rowHeight}
|
||||
itemCount={nodeInfo.attribs.length}
|
||||
itemData={nodeInfo.attribs}
|
||||
>
|
||||
{renderAttribRow}
|
||||
</FixedSizeList>
|
||||
)}
|
||||
</AutoSizer>
|
||||
</Paper>
|
||||
</Collapse>
|
||||
|
||||
<ListItem button onClick={() => setOpenDependers(!openDependers)}>
|
||||
<ListItemText primary="Dependers" />
|
||||
{openDependers ? <ExpandLess /> : <ExpandMore />}
|
||||
</ListItem>
|
||||
<Collapse in={openDependers} timeout="auto" unmountOnExit>
|
||||
<Paper elevation={4}>
|
||||
<AutoSizer disableHeight={true}>
|
||||
{({ height, width }) => (
|
||||
<FixedSizeList
|
||||
height={listHeight(nodeInfo.dependers.length)}
|
||||
width={width}
|
||||
itemSize={rowHeight}
|
||||
itemCount={nodeInfo.dependers.length}
|
||||
itemData={nodeInfo.dependers}
|
||||
>
|
||||
{renderNodeRow}
|
||||
</FixedSizeList>
|
||||
)}
|
||||
</AutoSizer>
|
||||
</Paper>
|
||||
</Collapse>
|
||||
|
||||
<ListItem button onClick={() => setOpenDependencies(!openDependencies)}>
|
||||
<ListItemText primary="Dependencies" />
|
||||
{openDependencies ? <ExpandLess /> : <ExpandMore />}
|
||||
</ListItem>
|
||||
<Collapse in={openDependencies} timeout="auto" unmountOnExit>
|
||||
<Paper elevation={4}>
|
||||
<AutoSizer disableHeight={true}>
|
||||
{({ height, width }) => (
|
||||
<FixedSizeList
|
||||
height={listHeight(nodeInfo.dependencies.length)}
|
||||
width={width}
|
||||
itemSize={rowHeight}
|
||||
itemCount={nodeInfo.dependencies.length}
|
||||
itemData={nodeInfo.dependencies}
|
||||
>
|
||||
{renderNodeRow}
|
||||
</FixedSizeList>
|
||||
)}
|
||||
</AutoSizer>
|
||||
</Paper>
|
||||
</Collapse>
|
||||
</Paper>
|
||||
</List>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getNodeInfos)(NodeInfo);
|
||||
|
|
@ -1,179 +0,0 @@
|
|||
import React from "react";
|
||||
|
||||
import { connect } from "react-redux";
|
||||
import { getNodes } from "./redux/store";
|
||||
import { setFindNode } from "./redux/findNode";
|
||||
|
||||
import DataGrid from "./DataGrid";
|
||||
import LoadingBar from "./LoadingBar";
|
||||
import TextField from "@material-ui/core/TextField";
|
||||
|
||||
import { setNodes, updateCheckbox, updateSelected } from "./redux/nodes";
|
||||
import { setNodeInfos } from "./redux/nodeInfo";
|
||||
import { setGraphData } from "./redux/graphData";
|
||||
import { setLinks } from "./redux/links";
|
||||
import { setLinksTrans } from "./redux/linksTrans";
|
||||
import { setLoading } from "./redux/loading";
|
||||
import { setListSearchTerm } from "./redux/listSearchTerm";
|
||||
import { Button, Autocomplete, Grid } from "@material-ui/core";
|
||||
|
||||
const {REACT_APP_API_URL} = process.env;
|
||||
|
||||
const columns = [
|
||||
{ dataKey: "check", label: "Selected", width: 70 },
|
||||
{ dataKey: "name", label: "Name", width: 200 },
|
||||
{ id: "ID", dataKey: "node", label: "Node", width: 200 },
|
||||
];
|
||||
|
||||
const NodeList = ({ selectedGraph, nodes, searchedNodes, loading, setFindNode, setNodeInfos, setNodes, setLinks, setLinksTrans, setLoading, setListSearchTerm, updateCheckbox, updateSelected, setGraphData, showTransitive}) => {
|
||||
const [searchPath, setSearchPath] = React.useState('');
|
||||
|
||||
React.useEffect(() => {
|
||||
let gitHash = selectedGraph;
|
||||
if (gitHash) {
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setNodes(data.nodes.map((node, index) => {
|
||||
return {
|
||||
id: index,
|
||||
node: node,
|
||||
name: node.substring(node.lastIndexOf('/') + 1),
|
||||
check: "checkbox",
|
||||
selected: false,
|
||||
};
|
||||
}));
|
||||
setLoading(false);
|
||||
});
|
||||
setSearchPath(null);
|
||||
setListSearchTerm('');
|
||||
}
|
||||
}, [selectedGraph]);
|
||||
|
||||
function newGraphData() {
|
||||
let gitHash = selectedGraph;
|
||||
if (gitHash) {
|
||||
let postData = {
|
||||
"selected_nodes": nodes.filter(node => node.selected == true).map(node => node.node),
|
||||
"transitive_edges": showTransitive
|
||||
};
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setGraphData(data.graphData);
|
||||
setLinks(data.graphData.links);
|
||||
setLinksTrans(data.graphData.links_trans);
|
||||
});
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
setNodeInfos(data.nodeInfos);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function nodePaths() {
|
||||
const paths = nodes.map(node => node.node.substring(0, node.node.lastIndexOf('/') + 1));
|
||||
return [...new Set(paths)];
|
||||
}
|
||||
|
||||
function handleRowClick(event) {
|
||||
setFindNode(event.target.textContent);
|
||||
}
|
||||
|
||||
function handleSelectAll(event) {
|
||||
searchedNodes.forEach(node => {
|
||||
updateCheckbox({ node: node.id, value: "flip" });
|
||||
updateSelected({ index: node.id, value: true });
|
||||
});
|
||||
newGraphData();
|
||||
}
|
||||
|
||||
function handleDeselectAll(event) {
|
||||
searchedNodes.forEach(node => {
|
||||
updateCheckbox({ node: node.id, value: "flip" });
|
||||
updateSelected({ index: node.id, value: false });
|
||||
});
|
||||
newGraphData();
|
||||
}
|
||||
|
||||
function handleSearchTermChange(event, newTerm) {
|
||||
if (newTerm == null) {
|
||||
setSearchPath('');
|
||||
setListSearchTerm('');
|
||||
} else {
|
||||
setSearchPath(newTerm);
|
||||
setListSearchTerm(newTerm);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<LoadingBar loading={loading} height={"95%"}>
|
||||
<Grid container spacing={2}>
|
||||
<Grid item xs={12}/>
|
||||
<Grid item xs={12}>
|
||||
<Autocomplete
|
||||
fullWidth
|
||||
freeSolo
|
||||
ListboxProps={{ style: { maxHeight: "9rem" } }}
|
||||
value={searchPath}
|
||||
onInputChange={handleSearchTermChange}
|
||||
onChange={handleSearchTermChange}
|
||||
options={nodePaths()}
|
||||
renderInput={(params) => <TextField {...params}
|
||||
label="Search by Path or Name"
|
||||
variant="outlined"
|
||||
/>}
|
||||
/>
|
||||
</Grid>
|
||||
<Grid item xs={12}>
|
||||
<Grid
|
||||
container
|
||||
direction="row"
|
||||
justifyContent="center"
|
||||
spacing={4}
|
||||
>
|
||||
<Grid item>
|
||||
<Button
|
||||
variant="contained"
|
||||
onClick={handleSelectAll}
|
||||
>
|
||||
Select All
|
||||
</Button>
|
||||
</Grid>
|
||||
<Grid item>
|
||||
<Button
|
||||
variant="contained"
|
||||
onClick={handleDeselectAll}
|
||||
>
|
||||
Deselect All
|
||||
</Button>
|
||||
</Grid>
|
||||
</Grid>
|
||||
</Grid>
|
||||
<Grid item xs={12}/>
|
||||
</Grid>
|
||||
<DataGrid
|
||||
rows={nodes}
|
||||
columns={columns}
|
||||
rowHeight={30}
|
||||
headerHeight={35}
|
||||
onNodeClicked={handleRowClick}
|
||||
/>
|
||||
</LoadingBar>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getNodes, { setFindNode, setNodes, setNodeInfos, setLinks, setLinksTrans, setLoading, setListSearchTerm, updateCheckbox, updateSelected, setGraphData })(NodeList);
|
||||
|
|
@ -1,110 +0,0 @@
|
|||
import React, { useRef, useEffect, useState } from "react";
|
||||
import { connect } from "react-redux";
|
||||
import Tooltip from "@material-ui/core/Tooltip";
|
||||
import Fade from "@material-ui/core/Fade";
|
||||
import Box from "@material-ui/core/Box";
|
||||
import IconButton from "@material-ui/core/IconButton";
|
||||
import AddCircleOutline from "@material-ui/icons/AddCircleOutline";
|
||||
import Typography from "@material-ui/core/Typography";
|
||||
|
||||
import { updateCheckbox } from "./redux/nodes";
|
||||
import { setGraphData } from "./redux/graphData";
|
||||
import { setNodeInfos } from "./redux/nodeInfo";
|
||||
import { getGraphData } from "./redux/store";
|
||||
import { setLinks } from "./redux/links";
|
||||
import { setLinksTrans } from "./redux/linksTrans";
|
||||
|
||||
const {REACT_APP_API_URL} = process.env;
|
||||
|
||||
const OverflowTip = (props) => {
|
||||
const textElementRef = useRef(null);
|
||||
const [hoverStatus, setHover] = useState(false);
|
||||
|
||||
const compareSize = (textElementRef) => {
|
||||
if (textElementRef.current != null) {
|
||||
const compare =
|
||||
textElementRef.current.scrollWidth > textElementRef.current.offsetWidth;
|
||||
setHover(compare);
|
||||
}
|
||||
};
|
||||
|
||||
function newGraphData() {
|
||||
let gitHash = props.selectedGraph;
|
||||
if (gitHash) {
|
||||
let postData = {
|
||||
"selected_nodes": props.nodes.filter(node => node.selected == true).map(node => node.node),
|
||||
"transitive_edges": props.showTransitive
|
||||
};
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/d3', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
props.setGraphData(data.graphData);
|
||||
props.setLinks(data.graphData.links);
|
||||
props.setLinksTrans(data.graphData.links_trans);
|
||||
});
|
||||
fetch(REACT_APP_API_URL + '/api/graphs/' + gitHash + '/nodes/details', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(postData)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
props.setNodeInfos(data.nodeInfos);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
compareSize(textElementRef);
|
||||
window.addEventListener("resize", compareSize);
|
||||
return function () {
|
||||
window.removeEventListener("resize", compareSize);
|
||||
};
|
||||
}, [props, textElementRef.current]);
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
title={props.value}
|
||||
interactive="true"
|
||||
disableHoverListener={!hoverStatus}
|
||||
style={{ fontSize: "1em" }}
|
||||
enterDelay={500}
|
||||
TransitionComponent={Fade}
|
||||
>
|
||||
<Box
|
||||
style={{
|
||||
fontSize: "1em",
|
||||
whiteSpace: "nowrap",
|
||||
overflow: "hidden",
|
||||
textOverflow: "ellipsis",
|
||||
}}
|
||||
>
|
||||
<Typography noWrap variant={"body2"} gutterBottom>
|
||||
{props.button && (
|
||||
<IconButton
|
||||
size="small"
|
||||
color="secondary"
|
||||
onClick={(event) => {
|
||||
props.updateCheckbox({ node: props.text, value: "flip" });
|
||||
newGraphData();
|
||||
}}
|
||||
>
|
||||
<AddCircleOutline style={{ height: "15px", width: "15px" }} />
|
||||
</IconButton>
|
||||
)}
|
||||
<span ref={textElementRef}>{props.text}</span>
|
||||
</Typography>
|
||||
</Box>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
|
||||
export default connect(getGraphData, { updateCheckbox, setGraphData, setNodeInfos, setLinks, setLinksTrans })(OverflowTip);
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
export default function SwitchComponents({ active, children }) {
|
||||
// Switch all children and return the "active" one
|
||||
return children.filter((child) => child.props.name == active);
|
||||
}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
import React from "react";
|
||||
import ReactDOM from "react-dom/client";
|
||||
import { Provider } from "react-redux";
|
||||
import CssBaseline from "@material-ui/core/CssBaseline";
|
||||
import { ThemeProvider } from "@material-ui/core/styles";
|
||||
|
||||
import theme from "./theme";
|
||||
import store from "./redux/store";
|
||||
|
||||
import App from "./App";
|
||||
const root = ReactDOM.createRoot(document.getElementById("root"));
|
||||
root.render(
|
||||
<Provider store={store}>
|
||||
<ThemeProvider theme={theme}>
|
||||
<CssBaseline />
|
||||
<App />
|
||||
</ThemeProvider>
|
||||
</Provider>
|
||||
);
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const counts = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setCounts":
|
||||
return action.payload;
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setCounts = (counts) => ({
|
||||
type: "setCounts",
|
||||
payload: counts,
|
||||
});
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const findNode = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setFindNode":
|
||||
return action.payload;
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setFindNode = (node) => ({
|
||||
type: "setFindNode",
|
||||
payload: node,
|
||||
});
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const graphData = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setGraphData":
|
||||
return action.payload;
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setGraphData = (graphData) => ({
|
||||
type: "setGraphData",
|
||||
payload: graphData,
|
||||
});
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const graphFiles = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setGraphFiles":
|
||||
return action.payload;
|
||||
case "selectGraphFile":
|
||||
const newState = state.map((graphFile, index) => {
|
||||
if (action.payload.hash == graphFile.git) {
|
||||
graphFile.selected = action.payload.selected;
|
||||
} else {
|
||||
graphFile.selected = false;
|
||||
}
|
||||
return graphFile;
|
||||
});
|
||||
return newState;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setGraphFiles = (graphFiles) => ({
|
||||
type: "setGraphFiles",
|
||||
payload: graphFiles,
|
||||
});
|
||||
|
||||
export const selectGraphFile = (graphFiles) => ({
|
||||
type: "selectGraphFile",
|
||||
payload: graphFiles,
|
||||
});
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const graphPaths = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "selectedGraphPaths":
|
||||
return action.payload;
|
||||
case "setSelectedPath":
|
||||
const newState = { ...state, selectedPath: action.payload };
|
||||
return newState;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const selectedGraphPaths = (pathData) => ({
|
||||
type: "selectedGraphPaths",
|
||||
payload: pathData,
|
||||
});
|
||||
|
||||
export const setSelectedPath = (path) => ({
|
||||
type: "setSelectedPath",
|
||||
payload: path,
|
||||
});
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const links = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "addLink":
|
||||
var arr = Object.assign(state);
|
||||
return [...arr, action.payload];
|
||||
case "setLinks":
|
||||
return action.payload;
|
||||
case "updateSelectedLinks":
|
||||
var newState = Object.assign(state);
|
||||
newState[action.payload.index].selected = action.payload.value;
|
||||
return newState;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const addLink = (link) => ({
|
||||
type: "addLink",
|
||||
payload: link,
|
||||
});
|
||||
|
||||
export const setLinks = (links) => ({
|
||||
type: "setLinks",
|
||||
payload: links,
|
||||
});
|
||||
|
||||
export const updateSelectedLinks = (newValue) => ({
|
||||
type: "updateSelectedLinks",
|
||||
payload: newValue,
|
||||
});
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const linksTrans = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "addLinkTrans":
|
||||
var arr = Object.assign(state);
|
||||
return [...arr, action.payload];
|
||||
case "setLinksTrans":
|
||||
return action.payload;
|
||||
case "updateSelectedLinksTrans":
|
||||
var newState = Object.assign(state);
|
||||
newState[action.payload.index].selected = action.payload.value;
|
||||
return newState;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const addLinkTrans = (link) => ({
|
||||
type: "addLinkTrans",
|
||||
payload: link,
|
||||
});
|
||||
|
||||
export const setLinksTrans = (links) => ({
|
||||
type: "setLinksTrans",
|
||||
payload: links,
|
||||
});
|
||||
|
||||
export const updateSelectedLinksTrans = (newValue) => ({
|
||||
type: "updateSelectedLinksTrans",
|
||||
payload: newValue,
|
||||
});
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const listSearchTerm = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setListSearchTerm":
|
||||
return action.payload;
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setListSearchTerm = (listSearchTerm) => ({
|
||||
type: "setListSearchTerm",
|
||||
payload: listSearchTerm,
|
||||
});
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const loading = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setLoading":
|
||||
return action.payload;
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setLoading = (loading) => ({
|
||||
type: "setLoading",
|
||||
payload: loading,
|
||||
});
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const nodeInfo = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setNodeInfos":
|
||||
return action.payload;
|
||||
case "addNodeInfo":
|
||||
return [...state, action.payload];
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setNodeInfos = (nodeInfos) => ({
|
||||
type: "setNodeInfos",
|
||||
payload: nodeInfos,
|
||||
});
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const nodes = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "addNode":
|
||||
var arr = Object.assign(state);
|
||||
return [...arr, action.payload];
|
||||
case "setNodes":
|
||||
return action.payload;
|
||||
case "updateSelected":
|
||||
var newState = Object.assign(state);
|
||||
newState[action.payload.index].selected = action.payload.value;
|
||||
return newState;
|
||||
case "updateCheckbox":
|
||||
var newState = Object.assign(state);
|
||||
newState = state.map((stateNode) => {
|
||||
if (stateNode.node == action.payload.node) {
|
||||
if (action.payload.value == "flip") {
|
||||
stateNode.selected = !stateNode.selected;
|
||||
} else {
|
||||
stateNode.selected = action.payload.value;
|
||||
}
|
||||
}
|
||||
return stateNode;
|
||||
});
|
||||
return newState;
|
||||
case "updateCheckboxes":
|
||||
var newState = state.map((stateNode, index) => {
|
||||
const nodeToUpdate = action.payload.filter(
|
||||
(node) => stateNode.node == node.node
|
||||
);
|
||||
if (nodeToUpdate.length > 0) {
|
||||
stateNode.selected = nodeToUpdate[0].value;
|
||||
}
|
||||
return stateNode;
|
||||
});
|
||||
return newState;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const addNode = (node) => ({
|
||||
type: "addNode",
|
||||
payload: node,
|
||||
});
|
||||
|
||||
export const setNodes = (nodes) => ({
|
||||
type: "setNodes",
|
||||
payload: nodes,
|
||||
});
|
||||
|
||||
export const updateSelected = (newValue) => ({
|
||||
type: "updateSelected",
|
||||
payload: newValue,
|
||||
});
|
||||
|
||||
export const updateCheckbox = (newValue) => ({
|
||||
type: "updateCheckbox",
|
||||
payload: newValue,
|
||||
});
|
||||
|
||||
export const updateCheckboxes = (newValue) => ({
|
||||
type: "updateCheckboxes",
|
||||
payload: newValue,
|
||||
});
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import { initialState } from "./store";
|
||||
|
||||
export const showTransitive = (state = initialState, action) => {
|
||||
switch (action.type) {
|
||||
case "setShowTransitive":
|
||||
return action.payload;
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
|
||||
export const setShowTransitive = (showTransitive) => ({
|
||||
type: "setShowTransitive",
|
||||
payload: showTransitive,
|
||||
});
|
||||
|
|
@ -1,180 +0,0 @@
|
|||
import { createStore, combineReducers } from "redux";
|
||||
import { nodes } from "./nodes";
|
||||
import { graphFiles } from "./graphFiles";
|
||||
import { counts } from "./counts";
|
||||
import { nodeInfo } from "./nodeInfo";
|
||||
import { loading } from "./loading";
|
||||
import { links } from "./links";
|
||||
import { linksTrans } from "./linksTrans";
|
||||
import { showTransitive } from "./showTransitive";
|
||||
import { graphData } from "./graphData";
|
||||
import { findNode } from "./findNode";
|
||||
import { graphPaths } from "./graphPaths";
|
||||
import { listSearchTerm } from "./listSearchTerm";
|
||||
|
||||
export const initialState = {
|
||||
loading: false,
|
||||
graphFiles: [
|
||||
// { id: 0, value: 'graphfile.graphml', version: 1, git: '1234567', selected: false }
|
||||
],
|
||||
nodes: [
|
||||
// { id: 0, node: "test/test1.so", name: "test1", check: "checkbox", selected: false }
|
||||
],
|
||||
links: [
|
||||
// { source: "test/test1.so", target: "test/test2.so" }
|
||||
],
|
||||
linksTrans: [
|
||||
// { source: "test/test1.so", target: "test/test2.so" }
|
||||
],
|
||||
showTransitive: false,
|
||||
graphData: {
|
||||
nodes: [
|
||||
// {id: 'test/test1.so', name: 'test1.so'},
|
||||
// {id: 'test/test2.so', name: 'test2.so'}
|
||||
],
|
||||
links: [
|
||||
// {source: 'test/test1.so', target: 'test/test2.so'}
|
||||
],
|
||||
},
|
||||
graphPaths: {
|
||||
fromNode: "test",
|
||||
toNode: "test",
|
||||
paths: [
|
||||
["test1", "test2"],
|
||||
["test1", "test3", "test2"],
|
||||
],
|
||||
selectedPath: -1,
|
||||
},
|
||||
counts: [{ id: 0, type: "node2", value: 0 }],
|
||||
findNode: "",
|
||||
nodeInfo: [
|
||||
{
|
||||
id: 0,
|
||||
node: "test/test.so",
|
||||
name: "test",
|
||||
attribs: [{ name: "test", value: "test" }],
|
||||
dependers: [{ node: "test/test3.so", symbols: [] }],
|
||||
dependencies: [{ node: "test/test2.so", symbols: [] }],
|
||||
},
|
||||
],
|
||||
listSearchTerm: "",
|
||||
};
|
||||
|
||||
export const getCurrentGraphHash = (state) => {
|
||||
let selectedGraphFiles = state.graphFiles.filter(x => x.selected == true);
|
||||
let selectedGraph = undefined;
|
||||
if (selectedGraphFiles.length > 0) {
|
||||
selectedGraph = selectedGraphFiles[0].git;
|
||||
}
|
||||
return selectedGraph;
|
||||
};
|
||||
|
||||
export const getLoading = (state) => {
|
||||
return { loading: state };
|
||||
};
|
||||
|
||||
export const getGraphFiles = (state) => {
|
||||
return {
|
||||
loading: state.loading,
|
||||
graphFiles: state.graphFiles,
|
||||
};
|
||||
};
|
||||
|
||||
export const getNodeInfos = (state) => {
|
||||
return {
|
||||
nodeInfos: state.nodeInfo,
|
||||
};
|
||||
};
|
||||
|
||||
export const getCounts = (state) => {
|
||||
const counts = state.counts;
|
||||
return {
|
||||
selectedGraph: getCurrentGraphHash(state),
|
||||
counts: state.counts,
|
||||
};
|
||||
};
|
||||
|
||||
export const getRows = (state) => {
|
||||
let searchedNodes = state.nodes.filter(node => node.node.indexOf(state.listSearchTerm) > -1);
|
||||
return {
|
||||
selectedGraph: getCurrentGraphHash(state),
|
||||
rowCount: searchedNodes.length,
|
||||
rowGetter: ({ index }) => searchedNodes[index],
|
||||
checkBox: ({ index }) => searchedNodes[index].selected,
|
||||
nodes: state.nodes,
|
||||
searchedNodes: searchedNodes,
|
||||
showTransitive: state.showTransitive,
|
||||
};
|
||||
};
|
||||
|
||||
export const getSelected = (state) => {
|
||||
return {
|
||||
selectedGraph: getCurrentGraphHash(state),
|
||||
selectedNodes: state.nodes.filter((node) => node.selected),
|
||||
nodes: state.nodes,
|
||||
links: state.links,
|
||||
selectedEdges: [],
|
||||
loading: state.loading,
|
||||
graphPaths: state.graphPaths,
|
||||
showTransitive: state.showTransitive,
|
||||
};
|
||||
};
|
||||
|
||||
export const getNodes = (state) => {
|
||||
return {
|
||||
selectedGraph: getCurrentGraphHash(state),
|
||||
nodes: state.nodes,
|
||||
loading: state.loading,
|
||||
listSearchTerm: state.listSearchTerm,
|
||||
searchedNodes: state.nodes.filter(node => node.node.indexOf(state.listSearchTerm) > -1),
|
||||
showTransitive: state.showTransitive
|
||||
};
|
||||
};
|
||||
|
||||
export const getEdges = (state) => {
|
||||
return {
|
||||
selectedGraph: getCurrentGraphHash(state),
|
||||
nodes: state.nodes,
|
||||
links: state.links,
|
||||
linksTrans: state.linksTrans,
|
||||
selectedLinks: state.links.filter(link => link.selected == true),
|
||||
searchedNodes: state.nodes.filter(node => node.node.indexOf(state.listSearchTerm) > -1),
|
||||
showTransitive: state.showTransitive,
|
||||
};
|
||||
};
|
||||
|
||||
export const getGraphData = (state) => {
|
||||
return {
|
||||
selectedGraph: getCurrentGraphHash(state),
|
||||
nodes: state.nodes,
|
||||
links: state.links,
|
||||
graphData: state.graphData,
|
||||
loading: state.loading,
|
||||
findNode: state.findNode,
|
||||
graphPaths: state.graphPaths,
|
||||
showTransitive: state.showTransitive,
|
||||
};
|
||||
};
|
||||
|
||||
export const getFullState = (state) => {
|
||||
return { state };
|
||||
};
|
||||
|
||||
const store = createStore(
|
||||
combineReducers({
|
||||
nodes,
|
||||
counts,
|
||||
nodeInfo,
|
||||
graphFiles,
|
||||
loading,
|
||||
links,
|
||||
linksTrans,
|
||||
graphData,
|
||||
findNode,
|
||||
graphPaths,
|
||||
listSearchTerm,
|
||||
showTransitive
|
||||
}),
|
||||
initialState
|
||||
);
|
||||
export default store;
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
/**
|
||||
* This proxy is intended to allow the visualizer to run in a development environment
|
||||
* which includes SSH tunnels communicating with private remote hosts.
|
||||
*/
|
||||
|
||||
const { createProxyMiddleware } = require('http-proxy-middleware');
|
||||
|
||||
module.exports = function(app) {
|
||||
app.use(
|
||||
createProxyMiddleware('/api', {
|
||||
target: 'http://localhost:5000',
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
})
|
||||
);
|
||||
};
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import { green, red, grey } from "@material-ui/core/colors";
|
||||
import { createMuiTheme } from "@material-ui/core/styles";
|
||||
|
||||
// A custom theme for this app
|
||||
const theme = createMuiTheme({
|
||||
palette: {
|
||||
primary: {
|
||||
light: green[300],
|
||||
main: green[500],
|
||||
dark: green[700],
|
||||
},
|
||||
secondary: {
|
||||
light: grey[300],
|
||||
main: grey[500],
|
||||
dark: grey[800],
|
||||
darkAccent: "#4d4d4d",
|
||||
},
|
||||
mode: "dark",
|
||||
},
|
||||
});
|
||||
|
||||
export default theme;
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,182 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2020 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
"""
|
||||
Libdeps Graph Enums.
|
||||
|
||||
These are used for attributing data across the build scripts and analyzer scripts.
|
||||
"""
|
||||
|
||||
import json
|
||||
from enum import Enum, auto
|
||||
|
||||
import networkx
|
||||
|
||||
try:
|
||||
import progressbar
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class CountTypes(Enum):
|
||||
"""Enums for the different types of counts to perform on a graph."""
|
||||
|
||||
ALL = auto()
|
||||
NODE = auto()
|
||||
EDGE = auto()
|
||||
DIR_EDGE = auto()
|
||||
TRANS_EDGE = auto()
|
||||
DIR_PUB_EDGE = auto()
|
||||
PUB_EDGE = auto()
|
||||
PRIV_EDGE = auto()
|
||||
IF_EDGE = auto()
|
||||
PROG = auto()
|
||||
LIB = auto()
|
||||
|
||||
|
||||
class DependsReportTypes(Enum):
|
||||
"""Enums for the different type of depends reports to perform on a graph."""
|
||||
|
||||
DIRECT_DEPENDS = auto()
|
||||
PROGRAM_DEPENDS = auto()
|
||||
COMMON_DEPENDS = auto()
|
||||
EXCLUDE_DEPENDS = auto()
|
||||
GRAPH_PATHS = auto()
|
||||
CRITICAL_EDGES = auto()
|
||||
IN_DEGREE_ONE = auto()
|
||||
SYMBOL_DEPENDS = auto()
|
||||
EFFICIENCY = auto()
|
||||
BAZEL_CONV_CANDIDATES = auto()
|
||||
|
||||
|
||||
class LinterTypes(Enum):
|
||||
"""Enums for the different types of counts to perform on a graph."""
|
||||
|
||||
ALL = auto()
|
||||
PUBLIC_UNUSED = auto()
|
||||
EFFICIENCY_LINT = auto()
|
||||
|
||||
|
||||
class EdgeProps(Enum):
|
||||
"""Enums for edge properties."""
|
||||
|
||||
direct = auto()
|
||||
visibility = auto()
|
||||
symbols = auto()
|
||||
|
||||
|
||||
class NodeProps(Enum):
|
||||
"""Enums for node properties."""
|
||||
|
||||
bin_type = auto()
|
||||
|
||||
|
||||
def null_progressbar(items):
|
||||
"""Fake stand-in for normal progressbar."""
|
||||
for item in items:
|
||||
yield item
|
||||
|
||||
|
||||
class LibdepsGraph(networkx.DiGraph):
|
||||
"""Class for analyzing the graph."""
|
||||
|
||||
def __init__(self, graph=networkx.DiGraph()):
|
||||
"""Load the graph data."""
|
||||
super().__init__(incoming_graph_data=graph)
|
||||
self._progressbar = None
|
||||
self._deptypes = None
|
||||
|
||||
def get_deptype(self, deptype):
|
||||
"""Convert graphs deptypes from json string to dict, and return requested value."""
|
||||
|
||||
if not self._deptypes:
|
||||
self._deptypes = json.loads(self.graph.get("deptypes", "{}"))
|
||||
if self.graph["graph_schema_version"] == 1:
|
||||
# get and set the legacy values
|
||||
self._deptypes["Global"] = self._deptypes.get("Global", 0)
|
||||
self._deptypes["Public"] = self._deptypes.get("Public", 1)
|
||||
self._deptypes["Private"] = self._deptypes.get("Private", 2)
|
||||
self._deptypes["Interface"] = self._deptypes.get("Interface", 3)
|
||||
|
||||
return self._deptypes[deptype]
|
||||
|
||||
def get_direct_nonprivate_graph(self):
|
||||
"""Get a graph view of direct nonprivate edges."""
|
||||
|
||||
def filter_direct_nonprivate_edges(n1, n2):
|
||||
return self[n1][n2].get(EdgeProps.direct.name) and (
|
||||
self[n1][n2].get(EdgeProps.visibility.name) == self.get_deptype("Public")
|
||||
or self[n1][n2].get(EdgeProps.visibility.name) == self.get_deptype("Interface")
|
||||
)
|
||||
|
||||
return networkx.subgraph_view(self, filter_edge=filter_direct_nonprivate_edges)
|
||||
|
||||
def get_node_tree(self, node):
|
||||
"""Get a tree with the passed node as the single root."""
|
||||
|
||||
direct_nonprivate_graph = self.get_direct_nonprivate_graph()
|
||||
substree_set = networkx.descendants(direct_nonprivate_graph, node)
|
||||
|
||||
def subtree(n1):
|
||||
return n1 in substree_set or n1 == node
|
||||
|
||||
return networkx.subgraph_view(direct_nonprivate_graph, filter_node=subtree)
|
||||
|
||||
def get_progress(self, value=None):
|
||||
"""
|
||||
Set if a progress bar should be used or not.
|
||||
|
||||
No args means use progress bar if available.
|
||||
"""
|
||||
|
||||
if value is None:
|
||||
value = "progressbar" in globals()
|
||||
|
||||
if self._progressbar:
|
||||
return self._progressbar
|
||||
|
||||
if value:
|
||||
|
||||
def get_progress_bar(title, *args):
|
||||
custom_bar = progressbar.ProgressBar(
|
||||
widgets=[
|
||||
title,
|
||||
progressbar.Counter(format="[%(value)d/%(max_value)d]"),
|
||||
progressbar.Timer(format=" Time: %(elapsed)s "),
|
||||
progressbar.Bar(marker=">", fill=" ", left="|", right="|"),
|
||||
]
|
||||
)
|
||||
return custom_bar(*args)
|
||||
|
||||
self._progressbar = get_progress_bar
|
||||
else:
|
||||
self._progressbar = null_progressbar
|
||||
|
||||
return self._progressbar
|
||||
|
||||
|
||||
def load_libdeps_graph(graph_file):
|
||||
"""Load a graphml file and create a LibdepGraph."""
|
||||
|
||||
graph = networkx.read_graphml(graph_file)
|
||||
return LibdepsGraph(graph=graph)
|
||||
|
|
@ -25,13 +25,8 @@ MONGO_REVISION_ENV_VAR = "REVISION"
|
|||
|
||||
def _get_repos_and_revisions() -> Tuple[List[Repo], RevisionMap]:
|
||||
"""Get the repo object and a map of revisions to compare against."""
|
||||
modules = [
|
||||
path
|
||||
for path in git.get_module_paths()
|
||||
# Exclude enterprise module; it's in the "modules" folder but does not correspond to a repo
|
||||
if "src/mongo/db/modules/enterprise" not in path
|
||||
]
|
||||
repos = [Repo(path) for path in modules]
|
||||
|
||||
repos = [Repo(git.get_base_dir())]
|
||||
revision_map = generate_revision_map(repos, {"mongo": os.environ.get(MONGO_REVISION_ENV_VAR)})
|
||||
return repos, revision_map
|
||||
|
||||
|
|
|
|||
|
|
@ -8,11 +8,9 @@ import re
|
|||
from pathlib import Path
|
||||
from typing import Callable, List
|
||||
|
||||
from buildscripts import moduleconfig
|
||||
from buildscripts.linter import git_base as _git
|
||||
|
||||
# Path to the modules in the mongodb source tree
|
||||
# Has to match the string in SConstruct
|
||||
MODULE_DIR = "src/mongo/db/modules"
|
||||
|
||||
|
||||
|
|
@ -31,25 +29,9 @@ def get_base_dir():
|
|||
return os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
|
||||
def get_module_paths() -> List[str]:
|
||||
"""Get a list of paths that contain modules."""
|
||||
base_dir = get_base_dir()
|
||||
|
||||
# Get a list of modules
|
||||
mongo_modules = moduleconfig.discover_module_directories(
|
||||
os.path.join(base_dir, MODULE_DIR), None
|
||||
)
|
||||
|
||||
paths = [os.path.join(base_dir, MODULE_DIR, m) for m in mongo_modules]
|
||||
paths.append(base_dir)
|
||||
|
||||
return paths
|
||||
|
||||
|
||||
def get_repos() -> List[Repo]:
|
||||
"""Get a list of Repos to check linters for."""
|
||||
paths = get_module_paths()
|
||||
return [Repo(p) for p in paths]
|
||||
return [Repo(get_base_dir())]
|
||||
|
||||
|
||||
class Repo(_git.Repository):
|
||||
|
|
|
|||
|
|
@ -1,225 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Helper script for constructing an archive (zip or tar) from a list of files.
|
||||
|
||||
The output format (tar, tgz, zip) is determined from the file name, unless the user specifies
|
||||
--format on the command line.
|
||||
|
||||
This script simplifies the specification of filename transformations, so that, e.g.,
|
||||
src/mongo/foo.cpp and build/linux2/normal/buildinfo.cpp can get put into the same
|
||||
directory in the archive, perhaps mongodb-2.0.2/src/mongo.
|
||||
|
||||
Usage:
|
||||
|
||||
make_archive.py -o <output-file> [--format (tar|tgz|zip)] \\
|
||||
[--transform match1=replacement1 [--transform match2=replacement2 [...]]] \\
|
||||
<input file 1> [...]
|
||||
|
||||
If the input file names start with "@", the file is expected to contain a list of
|
||||
whitespace-separated file names to include in the archive. This helps get around the Windows
|
||||
command line length limit.
|
||||
|
||||
Transformations are processed in command-line order and are short-circuiting. So, if a file matches
|
||||
match1, it is never compared against match2 or later. Matches are just python startswith()
|
||||
comparisons.
|
||||
|
||||
For a detailed usage example, see src/SConscript.client or src/mongo/SConscript.
|
||||
"""
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
from subprocess import PIPE, STDOUT, Popen
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""Execute Main program."""
|
||||
args = []
|
||||
for arg in argv[1:]:
|
||||
if arg.startswith("@"):
|
||||
file_name = arg[1:]
|
||||
f_handle = open(file_name, "r")
|
||||
args.extend(s1.strip('"') for s1 in shlex.split(f_handle.readline(), posix=False))
|
||||
f_handle.close()
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
opts = parse_options(args)
|
||||
if opts.archive_format in ("tar", "tgz"):
|
||||
make_tar_archive(opts)
|
||||
elif opts.archive_format == "zip":
|
||||
make_zip_archive(opts)
|
||||
else:
|
||||
raise ValueError('Unsupported archive format "%s"' % opts.archive_format)
|
||||
|
||||
|
||||
def delete_directory(directory):
|
||||
"""Recursively deletes a directory and its contents."""
|
||||
try:
|
||||
shutil.rmtree(directory)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def make_tar_archive(opts):
|
||||
"""Generate tar archive.
|
||||
|
||||
Given the parsed options, generates the 'opt.output_filename'
|
||||
tarball containing all the files in 'opt.input_filename' renamed
|
||||
according to the mappings in 'opts.transformations'.
|
||||
|
||||
e.g. for an input file named "a/mongo/build/DISTSRC", and an
|
||||
existing transformation {"a/mongo/build": "release"}, the input
|
||||
file will be written to the tarball as "release/DISTSRC"
|
||||
|
||||
All files to be compressed are copied into new directories as
|
||||
required by 'opts.transformations'. Once the tarball has been
|
||||
created, all temporary directory structures created for the
|
||||
purposes of compressing, are removed.
|
||||
"""
|
||||
tar_options = "cvf"
|
||||
if opts.archive_format == "tgz":
|
||||
tar_options += "z"
|
||||
|
||||
# clean and create a temp directory to copy files to
|
||||
enclosing_archive_directory = tempfile.mkdtemp(prefix="archive_", dir=os.path.abspath("build"))
|
||||
output_tarfile = os.path.join(os.getcwd(), opts.output_filename)
|
||||
|
||||
tar_command = ["tar", tar_options, output_tarfile]
|
||||
|
||||
for input_filename in opts.input_filenames:
|
||||
preferred_filename = get_preferred_filename(input_filename, opts.transformations)
|
||||
temp_file_location = os.path.join(enclosing_archive_directory, preferred_filename)
|
||||
enclosing_file_directory = os.path.dirname(temp_file_location)
|
||||
if not os.path.exists(enclosing_file_directory):
|
||||
os.makedirs(enclosing_file_directory)
|
||||
print("copying %s => %s" % (input_filename, temp_file_location))
|
||||
if os.path.isdir(input_filename):
|
||||
shutil.copytree(input_filename, temp_file_location)
|
||||
else:
|
||||
shutil.copy2(input_filename, temp_file_location)
|
||||
tar_command.append(preferred_filename)
|
||||
|
||||
print(" ".join(tar_command))
|
||||
# execute the full tar command
|
||||
run_directory = os.path.join(os.getcwd(), enclosing_archive_directory)
|
||||
proc = Popen(tar_command, stdout=PIPE, stderr=STDOUT, bufsize=0, cwd=run_directory)
|
||||
proc.wait()
|
||||
|
||||
# delete temp directory
|
||||
delete_directory(enclosing_archive_directory)
|
||||
|
||||
|
||||
def make_zip_archive(opts):
|
||||
"""Generate the zip archive.
|
||||
|
||||
Given the parsed options, generates the 'opt.output_filename'
|
||||
zipfile containing all the files in 'opt.input_filename' renamed
|
||||
according to the mappings in 'opts.transformations'.
|
||||
|
||||
All files in 'opt.output_filename' are renamed before being
|
||||
written into the zipfile.
|
||||
"""
|
||||
archive = open_zip_archive_for_write(opts.output_filename)
|
||||
try:
|
||||
for input_filename in opts.input_filenames:
|
||||
archive.add(
|
||||
input_filename, arcname=get_preferred_filename(input_filename, opts.transformations)
|
||||
)
|
||||
finally:
|
||||
archive.close()
|
||||
|
||||
|
||||
def parse_options(args):
|
||||
"""Parse program options."""
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option(
|
||||
"-o",
|
||||
dest="output_filename",
|
||||
default=None,
|
||||
help="Name of the archive to output.",
|
||||
metavar="FILE",
|
||||
)
|
||||
parser.add_option(
|
||||
"--format",
|
||||
dest="archive_format",
|
||||
default=None,
|
||||
choices=("zip", "tar", "tgz"),
|
||||
help=(
|
||||
"Format of archive to create. "
|
||||
"If omitted, use the suffix of the output filename to decide."
|
||||
),
|
||||
)
|
||||
parser.add_option("--transform", action="append", dest="transformations", default=[])
|
||||
|
||||
(opts, input_filenames) = parser.parse_args(args)
|
||||
opts.input_filenames = []
|
||||
|
||||
for input_filename in input_filenames:
|
||||
if input_filename.startswith("@"):
|
||||
opts.input_filenames.extend(open(input_filename[1:], "r").read().split())
|
||||
else:
|
||||
opts.input_filenames.append(input_filename)
|
||||
|
||||
if opts.output_filename is None:
|
||||
parser.error("-o switch is required")
|
||||
|
||||
if opts.archive_format is None:
|
||||
if opts.output_filename.endswith(".zip"):
|
||||
opts.archive_format = "zip"
|
||||
elif opts.output_filename.endswith("tar.gz") or opts.output_filename.endswith(".tgz"):
|
||||
opts.archive_format = "tgz"
|
||||
elif opts.output_filename.endswith(".tar"):
|
||||
opts.archive_format = "tar"
|
||||
else:
|
||||
parser.error(
|
||||
'Could not deduce archive format from output filename "%s"' % opts.output_filename
|
||||
)
|
||||
|
||||
try:
|
||||
opts.transformations = [
|
||||
xform.replace(os.path.altsep or os.path.sep, os.path.sep).split("=", 1)
|
||||
for xform in opts.transformations
|
||||
]
|
||||
except Exception as err:
|
||||
parser.error(err)
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def open_zip_archive_for_write(filename):
|
||||
"""Open a zip archive for writing and return it."""
|
||||
|
||||
# Infuriatingly, Zipfile calls the "add" method "write", but they're otherwise identical,
|
||||
# for our purposes. WrappedZipFile is a minimal adapter class.
|
||||
class WrappedZipFile(zipfile.ZipFile):
|
||||
"""WrappedZipFile class."""
|
||||
|
||||
def add(self, filename, arcname):
|
||||
"""Add filename to zip."""
|
||||
return self.write(filename, arcname)
|
||||
|
||||
return WrappedZipFile(filename, "w", zipfile.ZIP_DEFLATED)
|
||||
|
||||
|
||||
def get_preferred_filename(input_filename, transformations):
|
||||
"""Return preferred filename.
|
||||
|
||||
Perform a prefix subsitution on 'input_filename' for the
|
||||
first matching transformation in 'transformations' and
|
||||
returns the substituted string.
|
||||
"""
|
||||
for match, replace in transformations:
|
||||
match_lower = match.lower()
|
||||
input_filename_lower = input_filename.lower()
|
||||
if input_filename_lower.startswith(match_lower):
|
||||
return replace + input_filename[len(match) :]
|
||||
return input_filename
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
sys.exit(0)
|
||||
|
|
@ -1,11 +1,11 @@
|
|||
"""Generate vcxproj and vcxproj.filters files for browsing code in Visual Studio 2015.
|
||||
|
||||
To build mongodb, you must use scons. You can use this project to navigate code during debugging.
|
||||
To build mongodb, you must use bazel. You can use this project to navigate code during debugging.
|
||||
|
||||
HOW TO USE
|
||||
|
||||
First, you need a compile_commands.json file, to generate run the following command:
|
||||
scons compiledb
|
||||
bazel build compiledb
|
||||
|
||||
Next, run the following command
|
||||
python buildscripts/make_vcxproj.py FILE_NAME
|
||||
|
|
@ -257,10 +257,6 @@ class ProjFileGenerator(object):
|
|||
break
|
||||
prev_arg = arg
|
||||
|
||||
# Skip files made by scons for configure testing
|
||||
if "sconf_temp" in file_name:
|
||||
return
|
||||
|
||||
if file_name not in self.files:
|
||||
self.files.add(file_name)
|
||||
|
||||
|
|
@ -300,7 +296,7 @@ class ProjFileGenerator(object):
|
|||
# 3. Output these lists of files to vcxproj and vcxproj.headers
|
||||
# Note: order of these lists does not matter, VS will sort them anyway
|
||||
dirs = set()
|
||||
scons_files = set()
|
||||
bazel_files = set()
|
||||
|
||||
for file_name in self.files:
|
||||
dirs.add(os.path.dirname(file_name))
|
||||
|
|
@ -331,13 +327,12 @@ class ProjFileGenerator(object):
|
|||
|
||||
dirs = dirs.union(base_dirs)
|
||||
|
||||
# Get all the scons files
|
||||
# Get all the bazel files
|
||||
for directory in dirs:
|
||||
if os.path.exists(directory):
|
||||
for file_name in os.listdir(directory):
|
||||
if file_name == "SConstruct" or "SConscript" in file_name:
|
||||
scons_files.add(directory + "\\" + file_name)
|
||||
scons_files.add("SConstruct")
|
||||
if file_name == "BUILD.bazel" or ".bazel" in file_name:
|
||||
bazel_files.add(directory + "\\" + file_name)
|
||||
|
||||
# Write a list of directory entries with unique guids
|
||||
self.filters.write(" <ItemGroup>\n")
|
||||
|
|
@ -365,9 +360,9 @@ class ProjFileGenerator(object):
|
|||
self.filters.write(" </ClInclude>\n")
|
||||
self.filters.write(" </ItemGroup>\n")
|
||||
|
||||
# Write a list of scons files
|
||||
# Write a list of bazel files
|
||||
self.filters.write(" <ItemGroup>\n")
|
||||
for file_name in sorted(scons_files):
|
||||
for file_name in sorted(bazel_files):
|
||||
self.filters.write(" <None Include='%s'>\n" % file_name)
|
||||
self.filters.write(" <Filter>%s</Filter>\n" % os.path.dirname(file_name))
|
||||
self.filters.write(" </None>\n")
|
||||
|
|
@ -380,9 +375,9 @@ class ProjFileGenerator(object):
|
|||
self.vcxproj.write(" <ClInclude Include='%s' />\n" % file_name)
|
||||
self.vcxproj.write(" </ItemGroup>\n")
|
||||
|
||||
# Write a list of scons files into the vcxproj
|
||||
# Write a list of bazel files into the vcxproj
|
||||
self.vcxproj.write(" <ItemGroup>\n")
|
||||
for file_name in sorted(scons_files):
|
||||
for file_name in sorted(bazel_files):
|
||||
self.vcxproj.write(" <None Include='%s' />\n" % file_name)
|
||||
self.vcxproj.write(" </ItemGroup>\n")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,223 +0,0 @@
|
|||
"""Utility functions for SCons to discover and configure MongoDB modules.
|
||||
|
||||
A MongoDB module is an organized collection of source code and build rules that can be provided at
|
||||
compile-time to alter or extend the behavior of MongoDB. The files comprising a single MongoDB
|
||||
module are arranged in a directory hierarchy, rooted in a directory whose name is by convention the
|
||||
module name, and containing in that root directory at least two files: a build.py file and a
|
||||
SConscript file.
|
||||
|
||||
MongoDB modules are discovered by a call to the discover_modules() function, whose sole parameter is
|
||||
the directory which is the immediate parent of all module directories. The exact directory is
|
||||
chosen by the SConstruct file, which is the direct consumer of this python module. The only rule is
|
||||
that it must be a subdirectory of the src/ directory, to correctly work with the SCons variant
|
||||
directory system that separates build products for source.
|
||||
|
||||
Once discovered, modules are configured by the configure_modules() function, and the build system
|
||||
integrates their SConscript files into the rest of the build.
|
||||
|
||||
MongoDB module build.py files implement a single function, configure(conf, env), which they may use
|
||||
to configure the supplied "env" object. The configure functions may add extra LIBDEPS to mongod,
|
||||
mongos and the mongo shell (TODO: other mongo tools and the C++ client), and through those libraries
|
||||
alter those programs' behavior.
|
||||
|
||||
MongoDB module SConscript files can describe libraries, programs and unit tests, just as other
|
||||
MongoDB SConscript files do.
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
"discover_modules",
|
||||
"discover_module_directories",
|
||||
"configure_modules",
|
||||
"register_module_test",
|
||||
)
|
||||
|
||||
import imp
|
||||
import inspect
|
||||
import os
|
||||
|
||||
|
||||
def discover_modules(module_root, allowed_modules):
|
||||
"""Scan module_root for subdirectories that look like MongoDB modules.
|
||||
|
||||
Return a list of imported build.py module objects.
|
||||
"""
|
||||
found_modules = []
|
||||
found_module_names = []
|
||||
|
||||
if allowed_modules is not None:
|
||||
allowed_modules = allowed_modules.split(",")
|
||||
# When `--modules=` is passed, the split on empty string is represented
|
||||
# in memory as ['']
|
||||
if allowed_modules == [""]:
|
||||
allowed_modules = []
|
||||
|
||||
if not os.path.isdir(module_root):
|
||||
if allowed_modules:
|
||||
raise RuntimeError(
|
||||
f"Requested the following modules: {allowed_modules}, but the module root '{module_root}' could not be found. Check the module root, or remove the module from the scons invocation."
|
||||
)
|
||||
return found_modules
|
||||
|
||||
for name in os.listdir(module_root):
|
||||
root = os.path.join(module_root, name)
|
||||
if name.startswith(".") or not os.path.isdir(root):
|
||||
continue
|
||||
|
||||
build_py = os.path.join(root, "build.py")
|
||||
module = None
|
||||
|
||||
if allowed_modules is not None and name not in allowed_modules:
|
||||
print("skipping module: %s" % (name))
|
||||
continue
|
||||
|
||||
try:
|
||||
print("adding module: %s" % (name))
|
||||
fp = open(build_py, "r")
|
||||
try:
|
||||
module = imp.load_module(
|
||||
"module_" + name, fp, build_py, (".py", "r", imp.PY_SOURCE)
|
||||
)
|
||||
if getattr(module, "name", None) is None:
|
||||
module.name = name
|
||||
found_modules.append(module)
|
||||
found_module_names.append(name)
|
||||
finally:
|
||||
fp.close()
|
||||
except (FileNotFoundError, IOError):
|
||||
pass
|
||||
|
||||
if allowed_modules is not None:
|
||||
missing_modules = set(allowed_modules) - set(found_module_names)
|
||||
if missing_modules:
|
||||
raise RuntimeError(f"Failed to locate all modules. Could not find: {missing_modules}")
|
||||
|
||||
return found_modules
|
||||
|
||||
|
||||
def discover_module_directories(module_root, allowed_modules):
|
||||
"""Scan module_root for subdirectories that look like MongoDB modules.
|
||||
|
||||
Return a list of directory names.
|
||||
"""
|
||||
if not os.path.isdir(module_root):
|
||||
return []
|
||||
|
||||
found_modules = []
|
||||
|
||||
if allowed_modules is not None:
|
||||
allowed_modules = allowed_modules.split(",")
|
||||
|
||||
for name in os.listdir(module_root):
|
||||
root = os.path.join(module_root, name)
|
||||
if name.startswith(".") or not os.path.isdir(root):
|
||||
continue
|
||||
|
||||
build_py = os.path.join(root, "build.py")
|
||||
|
||||
if allowed_modules is not None and name not in allowed_modules:
|
||||
print("skipping module: %s" % (name))
|
||||
continue
|
||||
|
||||
if os.path.isfile(build_py):
|
||||
print("adding module: %s" % (name))
|
||||
found_modules.append(name)
|
||||
|
||||
return found_modules
|
||||
|
||||
|
||||
def configure_modules(modules, conf):
|
||||
"""Run the configure() function in the build.py python modules for each module in "modules".
|
||||
|
||||
The modules were created by discover_modules.
|
||||
|
||||
The configure() function should prepare the Mongo build system for building the module.
|
||||
"""
|
||||
env = conf.env
|
||||
env["MONGO_MODULES"] = []
|
||||
for module in modules:
|
||||
name = module.name
|
||||
print("configuring module: %s" % (name))
|
||||
modules_configured = module.configure(conf, env)
|
||||
if modules_configured:
|
||||
for module_name in modules_configured:
|
||||
env["MONGO_MODULES"].append(module_name)
|
||||
else:
|
||||
env["MONGO_MODULES"].append(name)
|
||||
|
||||
|
||||
def get_module_sconscripts(modules):
|
||||
"""Return all modules' sconscripts."""
|
||||
sconscripts = []
|
||||
for mod in modules:
|
||||
module_dir_path = __get_src_relative_path(os.path.join(os.path.dirname(mod.__file__)))
|
||||
sconscripts.append(os.path.join(module_dir_path, "SConscript"))
|
||||
return sconscripts
|
||||
|
||||
|
||||
def __get_src_relative_path(path):
|
||||
"""Return a path relative to ./src.
|
||||
|
||||
The src directory is important because of its relationship to BUILD_DIR,
|
||||
established in the SConstruct file. For variant directories to work properly
|
||||
in SCons, paths relative to the src or BUILD_DIR must often be generated.
|
||||
"""
|
||||
src_dir = os.path.abspath("src")
|
||||
path = os.path.abspath(os.path.normpath(path))
|
||||
if not path.startswith(src_dir):
|
||||
raise ValueError('Path "%s" is not relative to the src directory "%s"' % (path, src_dir))
|
||||
result = path[len(src_dir) + 1 :]
|
||||
return result
|
||||
|
||||
|
||||
def __get_module_path(module_frame_depth):
|
||||
"""Return the path to the MongoDB module whose build.py is executing "module_frame_depth" frames.
|
||||
|
||||
This is above this function, relative to the "src" directory.
|
||||
"""
|
||||
module_filename = inspect.stack()[module_frame_depth + 1][1]
|
||||
return os.path.dirname(__get_src_relative_path(module_filename))
|
||||
|
||||
|
||||
def __get_module_src_path(module_frame_depth):
|
||||
"""Return the path relative to the SConstruct file of the MongoDB module's source tree.
|
||||
|
||||
module_frame_depth is the number of frames above the current one in which one can find a
|
||||
function from the MongoDB module's build.py function.
|
||||
"""
|
||||
return os.path.join("src", __get_module_path(module_frame_depth + 1))
|
||||
|
||||
|
||||
def __get_module_build_path(module_frame_depth):
|
||||
"""Return the path relative to the SConstruct file of the MongoDB module's build tree.
|
||||
|
||||
module_frame_depth is the number of frames above the current one in which one can find a
|
||||
function from the MongoDB module's build.py function.
|
||||
"""
|
||||
return os.path.join("$BUILD_DIR", __get_module_path(module_frame_depth + 1))
|
||||
|
||||
|
||||
def get_current_module_src_path():
|
||||
"""Return the path relative to the SConstruct file of the current MongoDB module's source tree.
|
||||
|
||||
May only meaningfully be called from within build.py
|
||||
"""
|
||||
return __get_module_src_path(1)
|
||||
|
||||
|
||||
def get_current_module_build_path():
|
||||
"""Return the path relative to the SConstruct file of the current MongoDB module's build tree.
|
||||
|
||||
May only meaningfully be called from within build.py
|
||||
"""
|
||||
|
||||
return __get_module_build_path(1)
|
||||
|
||||
|
||||
def get_current_module_libdep_name(libdep_rel_path):
|
||||
"""Return a $BUILD_DIR relative path to a "libdep_rel_path".
|
||||
|
||||
The "libdep_rel_path" is relative to the MongoDB module's build.py file.
|
||||
|
||||
May only meaningfully be called from within build.py
|
||||
"""
|
||||
return os.path.join(__get_module_build_path(1), libdep_rel_path)
|
||||
|
|
@ -88,7 +88,7 @@ def create_test_membership_map(fail_on_missing_selector=False, test_kind=None):
|
|||
# We ignore errors from missing files referenced in the test suite's "selector"
|
||||
# section. Certain test suites (e.g. unittests.yml) have a dedicated text file to
|
||||
# capture the list of tests they run; the text file may not be available if the
|
||||
# associated SCons target hasn't been built yet.
|
||||
# associated bazel target hasn't been built yet.
|
||||
if err.filename in _config.EXTERNAL_SUITE_SELECTORS:
|
||||
if not fail_on_missing_selector:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1,43 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Scons module."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
SCONS_VERSION = os.environ.get("SCONS_VERSION", "3.1.2")
|
||||
|
||||
MONGODB_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
SCONS_DIR = os.path.join(
|
||||
MONGODB_ROOT, "src", "third_party", "scons-" + SCONS_VERSION, "scons-local-" + SCONS_VERSION
|
||||
)
|
||||
|
||||
if not os.path.exists(SCONS_DIR):
|
||||
print("Could not find SCons in '%s'" % (SCONS_DIR))
|
||||
sys.exit(1)
|
||||
|
||||
SITE_TOOLS_DIR = os.path.join(MONGODB_ROOT, "site_scons")
|
||||
|
||||
sys.path = [SCONS_DIR, SITE_TOOLS_DIR] + sys.path
|
||||
|
||||
from mongo.pip_requirements import MissingRequirements, verify_requirements
|
||||
|
||||
try:
|
||||
verify_requirements()
|
||||
except MissingRequirements as ex:
|
||||
print(ex)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
import SCons.Script
|
||||
except ImportError as import_err:
|
||||
print("Could not import SCons from '%s'" % (SCONS_DIR))
|
||||
print("ImportError:", import_err)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def entrypoint():
|
||||
SCons.Script.main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
entrypoint()
|
||||
|
|
@ -1,192 +0,0 @@
|
|||
#!/USSR/bin/python
|
||||
# encoding: utf-8
|
||||
"""
|
||||
Prune the scons cache.
|
||||
|
||||
This script, borrowed from some waf code, with a stand alone interface, provides a way to
|
||||
remove files from the cache on an LRU (least recently used) basis to prevent the scons cache
|
||||
from outgrowing the storage capacity.
|
||||
"""
|
||||
|
||||
# Inspired by: https://github.com/krig/waf/blob/master/waflib/extras/lru_cache.py
|
||||
# Thomas Nagy 2011
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
LOGGER = logging.getLogger("scons.cache.prune.lru") # type: ignore
|
||||
|
||||
GIGBYTES = 1024 * 1024 * 1024
|
||||
|
||||
CacheItem = collections.namedtuple("CacheContents", ["path", "time", "size"])
|
||||
|
||||
|
||||
def get_cachefile_size(file_path, is_cksum):
|
||||
"""Get the size of the cachefile."""
|
||||
if is_cksum:
|
||||
size = 0
|
||||
for cksum_path in os.listdir(file_path):
|
||||
cksum_path = os.path.join(file_path, cksum_path)
|
||||
size += os.stat(cksum_path).st_size
|
||||
else:
|
||||
size = os.stat(file_path).st_size
|
||||
return size
|
||||
|
||||
|
||||
def collect_cache_contents(cache_path):
|
||||
"""Collect the cache contents."""
|
||||
# map folder names to timestamps
|
||||
contents = []
|
||||
total = 0
|
||||
|
||||
# collect names of directories and creation times
|
||||
for name in os.listdir(cache_path):
|
||||
path = os.path.join(cache_path, name)
|
||||
|
||||
if os.path.isdir(path):
|
||||
for file_name in os.listdir(path):
|
||||
file_path = os.path.join(path, file_name)
|
||||
# Cache prune script is allowing only directories with this extension
|
||||
# which comes from the validate_cache_dir.py tool in SCons, it must match
|
||||
# the extension set in that file.
|
||||
cksum_type = False
|
||||
if os.path.isdir(file_path):
|
||||
hash_length = -32
|
||||
tmp_length = -len(".cksum.tmp") + hash_length
|
||||
cksum_type = (
|
||||
file_path.lower().endswith(".cksum")
|
||||
or file_path.lower().endswith(".del")
|
||||
or file_path.lower()[tmp_length:hash_length] == ".cksum.tmp"
|
||||
)
|
||||
|
||||
if not cksum_type:
|
||||
LOGGER.warning(
|
||||
"cache item %s is a directory and not a file. "
|
||||
"The cache may be corrupt.",
|
||||
file_path,
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
item = CacheItem(
|
||||
path=file_path,
|
||||
time=os.stat(file_path).st_atime,
|
||||
size=get_cachefile_size(file_path, cksum_type),
|
||||
)
|
||||
|
||||
total += item.size
|
||||
|
||||
contents.append(item)
|
||||
except OSError as err:
|
||||
LOGGER.warning("Ignoring error querying file %s : %s", file_path, err)
|
||||
|
||||
return (total, contents)
|
||||
|
||||
|
||||
def prune_cache(cache_path, cache_size_gb, clean_ratio):
|
||||
"""Prune the cache."""
|
||||
# This function is taken as is from waf, with the interface cleaned up and some minor
|
||||
# stylistic changes.
|
||||
|
||||
cache_size = cache_size_gb * GIGBYTES
|
||||
|
||||
(total_size, contents) = collect_cache_contents(cache_path)
|
||||
|
||||
LOGGER.info("cache size %d, quota %d", total_size, cache_size)
|
||||
|
||||
if total_size >= cache_size:
|
||||
LOGGER.info("trimming the cache since %d > %d", total_size, cache_size)
|
||||
|
||||
# make a list to sort the folders' by timestamp
|
||||
contents.sort(key=lambda x: x.time, reverse=True) # sort by timestamp
|
||||
|
||||
# now that the contents of things to delete is sorted by timestamp in reverse order, we
|
||||
# just delete things until the total_size falls below the target cache size ratio.
|
||||
while total_size >= cache_size * clean_ratio:
|
||||
if not contents:
|
||||
LOGGER.error(
|
||||
"cache size is over quota, and there are no files in " "the queue to delete."
|
||||
)
|
||||
return False
|
||||
|
||||
cache_item = contents.pop()
|
||||
|
||||
# check the atime again just to make sure something wasn't accessed while
|
||||
# we pruning other files.
|
||||
try:
|
||||
if cache_item.time < os.stat(cache_item.path).st_atime:
|
||||
continue
|
||||
except FileNotFoundError as err:
|
||||
LOGGER.warning("Unable to find file %s : %s", cache_item, err)
|
||||
continue
|
||||
|
||||
to_remove = cache_item.path + ".del"
|
||||
try:
|
||||
os.rename(cache_item.path, to_remove)
|
||||
except Exception as err:
|
||||
# another process may have already cleared the file.
|
||||
LOGGER.warning("Unable to rename %s : %s", cache_item, err)
|
||||
else:
|
||||
try:
|
||||
if os.path.isdir(to_remove):
|
||||
shutil.rmtree(to_remove)
|
||||
else:
|
||||
os.remove(to_remove)
|
||||
total_size -= cache_item.size
|
||||
except Exception as err:
|
||||
# this should not happen, but who knows?
|
||||
LOGGER.error(
|
||||
"error [%s, %s] removing file '%s', " "please report this error",
|
||||
err,
|
||||
type(err),
|
||||
to_remove,
|
||||
)
|
||||
|
||||
LOGGER.info("total cache size at the end of pruning: %d", total_size)
|
||||
return True
|
||||
LOGGER.info("cache size (%d) is currently within boundaries", total_size)
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
"""Execute Main entry."""
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
parser = argparse.ArgumentParser(description="SCons cache pruning tool")
|
||||
|
||||
parser.add_argument("--cache-dir", "-d", default=None, help="path to the cache directory.")
|
||||
parser.add_argument(
|
||||
"--cache-size", "-s", default=200, type=int, help="maximum size of cache in GB."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--prune-ratio",
|
||||
"-p",
|
||||
default=0.8,
|
||||
type=float,
|
||||
help=(
|
||||
"ratio (as 1.0 > x > 0) of total cache size to prune " "to when cache exceeds quota."
|
||||
),
|
||||
)
|
||||
parser.add_argument("--print-cache-dir", default=False, action="store_true")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.cache_dir is None or not os.path.isdir(args.cache_dir):
|
||||
LOGGER.error("must specify a valid cache path, [%s]", args.cache_dir)
|
||||
exit(1)
|
||||
|
||||
ok = prune_cache(
|
||||
cache_path=args.cache_dir, cache_size_gb=args.cache_size, clean_ratio=args.prune_ratio
|
||||
)
|
||||
|
||||
if not ok:
|
||||
LOGGER.error("encountered error cleaning the cache. exiting.")
|
||||
exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -28,26 +28,9 @@ class Report(TypedDict):
|
|||
results: List[Result]
|
||||
|
||||
|
||||
def _open_and_truncate_log_lines(log_file: pathlib.Path) -> List[str]:
|
||||
with open(log_file) as fh:
|
||||
lines = fh.read().splitlines()
|
||||
for i, line in enumerate(lines):
|
||||
if line == "scons: done reading SConscript files.":
|
||||
offset = i
|
||||
# if possible, also shave off the current and next line
|
||||
# as they contain:
|
||||
# scons: done reading SConscript files.
|
||||
# scons: Building targets ...
|
||||
# which is superfluous.
|
||||
if len(lines) > i + 2:
|
||||
offset = i + 2
|
||||
return lines[offset:]
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def _clean_log_file(log_file: pathlib.Path, dedup_lines: bool) -> str:
|
||||
lines = _open_and_truncate_log_lines(log_file)
|
||||
with open(log_file) as fh:
|
||||
lines = fh.readlines()
|
||||
if dedup_lines:
|
||||
lines = _dedup_lines(lines)
|
||||
return os.linesep.join(lines)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class TestSuitesConfigurations(unittest.TestCase):
|
|||
# We ignore errors from missing files referenced in the test suite's "selector"
|
||||
# section. Certain test suites (e.g. unittests.yml) have a dedicated text file to
|
||||
# capture the list of tests they run; the text file may not be available if the
|
||||
# associated SCons target hasn't been built yet.
|
||||
# associated bazel target hasn't been built yet.
|
||||
if err.filename in config.EXTERNAL_SUITE_SELECTORS:
|
||||
continue
|
||||
except Exception as ex:
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue