mirror of https://github.com/astral-sh/ruff
Clean up Python implementation; enable all projects
This commit is contained in:
parent
1f4e87f043
commit
1c820b0200
|
|
@ -1,3 +1,6 @@
|
||||||
|
"""
|
||||||
|
Execution, comparison, and summary of `ruff check` ecosystem checks.
|
||||||
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
@ -11,7 +14,7 @@ from subprocess import PIPE
|
||||||
from typing import TYPE_CHECKING, Iterator, Self, Sequence
|
from typing import TYPE_CHECKING, Iterator, Self, Sequence
|
||||||
|
|
||||||
from ruff_ecosystem import logger
|
from ruff_ecosystem import logger
|
||||||
from ruff_ecosystem.markdown import markdown_project_section, markdown_details
|
from ruff_ecosystem.markdown import markdown_details, markdown_project_section
|
||||||
from ruff_ecosystem.types import (
|
from ruff_ecosystem.types import (
|
||||||
Comparison,
|
Comparison,
|
||||||
Diff,
|
Diff,
|
||||||
|
|
@ -33,40 +36,7 @@ CHECK_DIFF_LINE_RE = re.compile(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def compare_check(
|
def markdown_check_result(result: Result) -> str:
|
||||||
ruff_baseline_executable: Path,
|
|
||||||
ruff_comparison_executable: Path,
|
|
||||||
options: CheckOptions,
|
|
||||||
cloned_repo: ClonedRepository,
|
|
||||||
) -> Comparison:
|
|
||||||
async with asyncio.TaskGroup() as tg:
|
|
||||||
baseline_task = tg.create_task(
|
|
||||||
ruff_check(
|
|
||||||
executable=ruff_baseline_executable.resolve(),
|
|
||||||
path=cloned_repo.path,
|
|
||||||
name=cloned_repo.fullname,
|
|
||||||
options=options,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
comparison_task = tg.create_task(
|
|
||||||
ruff_check(
|
|
||||||
executable=ruff_comparison_executable.resolve(),
|
|
||||||
path=cloned_repo.path,
|
|
||||||
name=cloned_repo.fullname,
|
|
||||||
options=options,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
baseline_output, comparison_output = (
|
|
||||||
baseline_task.result(),
|
|
||||||
comparison_task.result(),
|
|
||||||
)
|
|
||||||
diff = Diff.new(baseline_output, comparison_output)
|
|
||||||
|
|
||||||
return Comparison(diff=diff, repo=cloned_repo)
|
|
||||||
|
|
||||||
|
|
||||||
def summarize_check_result(result: Result) -> str:
|
|
||||||
# Calculate the total number of rule changes
|
# Calculate the total number of rule changes
|
||||||
all_rule_changes = RuleChanges()
|
all_rule_changes = RuleChanges()
|
||||||
for _, comparison in result.completed:
|
for _, comparison in result.completed:
|
||||||
|
|
@ -154,82 +124,6 @@ def summarize_check_result(result: Result) -> str:
|
||||||
return "\n".join(lines)
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
def add_permalink_to_diagnostic_line(repo: ClonedRepository, line: str) -> str:
|
|
||||||
match = CHECK_DIFF_LINE_RE.match(line)
|
|
||||||
if match is None:
|
|
||||||
return line
|
|
||||||
|
|
||||||
pre, inner, path, lnum, post = match.groups()
|
|
||||||
url = repo.url_for(path, int(lnum))
|
|
||||||
return f"{pre} <a href='{url}'>{inner}</a> {post}"
|
|
||||||
|
|
||||||
|
|
||||||
async def ruff_check(
|
|
||||||
*, executable: Path, path: Path, name: str, options: CheckOptions
|
|
||||||
) -> Sequence[str]:
|
|
||||||
"""Run the given ruff binary against the specified path."""
|
|
||||||
logger.debug(f"Checking {name} with {executable}")
|
|
||||||
ruff_args = options.to_cli_args()
|
|
||||||
|
|
||||||
start = time.time()
|
|
||||||
proc = await create_subprocess_exec(
|
|
||||||
executable.absolute(),
|
|
||||||
*ruff_args,
|
|
||||||
".",
|
|
||||||
stdout=PIPE,
|
|
||||||
stderr=PIPE,
|
|
||||||
cwd=path,
|
|
||||||
)
|
|
||||||
result, err = await proc.communicate()
|
|
||||||
end = time.time()
|
|
||||||
|
|
||||||
logger.debug(f"Finished checking {name} with {executable} in {end - start:.2f}s")
|
|
||||||
|
|
||||||
if proc.returncode != 0:
|
|
||||||
raise RuffError(err.decode("utf8"))
|
|
||||||
|
|
||||||
# Strip summary lines so the diff is only diagnostic lines
|
|
||||||
lines = [
|
|
||||||
line
|
|
||||||
for line in result.decode("utf8").splitlines()
|
|
||||||
if not CHECK_SUMMARY_LINE_RE.match(line)
|
|
||||||
]
|
|
||||||
|
|
||||||
return lines
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class CheckOptions(Serializable):
|
|
||||||
"""
|
|
||||||
Ruff check options
|
|
||||||
"""
|
|
||||||
|
|
||||||
select: str = ""
|
|
||||||
ignore: str = ""
|
|
||||||
exclude: str = ""
|
|
||||||
|
|
||||||
# Generating fixes is slow and verbose
|
|
||||||
show_fixes: bool = False
|
|
||||||
|
|
||||||
# Limit the number of reported lines per rule
|
|
||||||
max_lines_per_rule: int | None = 50
|
|
||||||
|
|
||||||
def markdown(self) -> str:
|
|
||||||
return f"select {self.select} ignore {self.ignore} exclude {self.exclude}"
|
|
||||||
|
|
||||||
def to_cli_args(self) -> list[str]:
|
|
||||||
args = ["check", "--no-cache", "--exit-zero"]
|
|
||||||
if self.select:
|
|
||||||
args.extend(["--select", self.select])
|
|
||||||
if self.ignore:
|
|
||||||
args.extend(["--ignore", self.ignore])
|
|
||||||
if self.exclude:
|
|
||||||
args.extend(["--exclude", self.exclude])
|
|
||||||
if self.show_fixes:
|
|
||||||
args.extend(["--show-fixes", "--ecosystem-ci"])
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class RuleChanges:
|
class RuleChanges:
|
||||||
"""
|
"""
|
||||||
|
|
@ -357,3 +251,112 @@ def limit_rule_lines(diff: Diff, max_per_rule: int | None = 100) -> list[str]:
|
||||||
reduced.append(f"... {hidden_count} changes omitted for rule {code}")
|
reduced.append(f"... {hidden_count} changes omitted for rule {code}")
|
||||||
|
|
||||||
return reduced
|
return reduced
|
||||||
|
|
||||||
|
|
||||||
|
def add_permalink_to_diagnostic_line(repo: ClonedRepository, line: str) -> str:
|
||||||
|
match = CHECK_DIFF_LINE_RE.match(line)
|
||||||
|
if match is None:
|
||||||
|
return line
|
||||||
|
|
||||||
|
pre, inner, path, lnum, post = match.groups()
|
||||||
|
url = repo.url_for(path, int(lnum))
|
||||||
|
return f"{pre} <a href='{url}'>{inner}</a> {post}"
|
||||||
|
|
||||||
|
|
||||||
|
async def compare_check(
|
||||||
|
ruff_baseline_executable: Path,
|
||||||
|
ruff_comparison_executable: Path,
|
||||||
|
options: CheckOptions,
|
||||||
|
cloned_repo: ClonedRepository,
|
||||||
|
) -> Comparison:
|
||||||
|
async with asyncio.TaskGroup() as tg:
|
||||||
|
baseline_task = tg.create_task(
|
||||||
|
ruff_check(
|
||||||
|
executable=ruff_baseline_executable.resolve(),
|
||||||
|
path=cloned_repo.path,
|
||||||
|
name=cloned_repo.fullname,
|
||||||
|
options=options,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
comparison_task = tg.create_task(
|
||||||
|
ruff_check(
|
||||||
|
executable=ruff_comparison_executable.resolve(),
|
||||||
|
path=cloned_repo.path,
|
||||||
|
name=cloned_repo.fullname,
|
||||||
|
options=options,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
baseline_output, comparison_output = (
|
||||||
|
baseline_task.result(),
|
||||||
|
comparison_task.result(),
|
||||||
|
)
|
||||||
|
diff = Diff.from_pair(baseline_output, comparison_output)
|
||||||
|
|
||||||
|
return Comparison(diff=diff, repo=cloned_repo)
|
||||||
|
|
||||||
|
|
||||||
|
async def ruff_check(
|
||||||
|
*, executable: Path, path: Path, name: str, options: CheckOptions
|
||||||
|
) -> Sequence[str]:
|
||||||
|
"""Run the given ruff binary against the specified path."""
|
||||||
|
logger.debug(f"Checking {name} with {executable}")
|
||||||
|
ruff_args = options.to_cli_args()
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
proc = await create_subprocess_exec(
|
||||||
|
executable.absolute(),
|
||||||
|
*ruff_args,
|
||||||
|
".",
|
||||||
|
stdout=PIPE,
|
||||||
|
stderr=PIPE,
|
||||||
|
cwd=path,
|
||||||
|
)
|
||||||
|
result, err = await proc.communicate()
|
||||||
|
end = time.time()
|
||||||
|
|
||||||
|
logger.debug(f"Finished checking {name} with {executable} in {end - start:.2f}s")
|
||||||
|
|
||||||
|
if proc.returncode != 0:
|
||||||
|
raise RuffError(err.decode("utf8"))
|
||||||
|
|
||||||
|
# Strip summary lines so the diff is only diagnostic lines
|
||||||
|
lines = [
|
||||||
|
line
|
||||||
|
for line in result.decode("utf8").splitlines()
|
||||||
|
if not CHECK_SUMMARY_LINE_RE.match(line)
|
||||||
|
]
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class CheckOptions(Serializable):
|
||||||
|
"""
|
||||||
|
Ruff check options
|
||||||
|
"""
|
||||||
|
|
||||||
|
select: str = ""
|
||||||
|
ignore: str = ""
|
||||||
|
exclude: str = ""
|
||||||
|
|
||||||
|
# Generating fixes is slow and verbose
|
||||||
|
show_fixes: bool = False
|
||||||
|
|
||||||
|
# Limit the number of reported lines per rule
|
||||||
|
max_lines_per_rule: int | None = 50
|
||||||
|
|
||||||
|
def markdown(self) -> str:
|
||||||
|
return f"select {self.select} ignore {self.ignore} exclude {self.exclude}"
|
||||||
|
|
||||||
|
def to_cli_args(self) -> list[str]:
|
||||||
|
args = ["check", "--no-cache", "--exit-zero"]
|
||||||
|
if self.select:
|
||||||
|
args.extend(["--select", self.select])
|
||||||
|
if self.ignore:
|
||||||
|
args.extend(["--ignore", self.ignore])
|
||||||
|
if self.exclude:
|
||||||
|
args.extend(["--exclude", self.exclude])
|
||||||
|
if self.show_fixes:
|
||||||
|
args.extend(["--show-fixes", "--ecosystem-ci"])
|
||||||
|
return args
|
||||||
|
|
|
||||||
|
|
@ -117,21 +117,3 @@ def parse_args() -> argparse.Namespace:
|
||||||
)
|
)
|
||||||
|
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def get_executable_path(name: str) -> str | None:
|
|
||||||
# Add suffix for Windows executables
|
|
||||||
name += ".exe" if sys.platform == "win32" else ""
|
|
||||||
|
|
||||||
path = os.path.join(sysconfig.get_path("scripts"), name)
|
|
||||||
|
|
||||||
# The executable in the current interpreter's scripts directory.
|
|
||||||
if os.path.exists(path):
|
|
||||||
return path
|
|
||||||
|
|
||||||
# The executable in the global environment.
|
|
||||||
environment_path = shutil.which("ruff")
|
|
||||||
if environment_path:
|
|
||||||
return environment_path
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
|
||||||
|
|
@ -1,64 +1,67 @@
|
||||||
|
"""
|
||||||
|
Default projects for ecosystem checks
|
||||||
|
"""
|
||||||
from ruff_ecosystem.projects import CheckOptions, Project, Repository
|
from ruff_ecosystem.projects import CheckOptions, Project, Repository
|
||||||
|
|
||||||
# TODO: Consider exporting this as JSON
|
# TODO(zanieb): Consider exporting this as JSON and loading from there instead
|
||||||
DEFAULT_TARGETS = [
|
DEFAULT_TARGETS = [
|
||||||
# Project(repo=Repository(owner="DisnakeDev", name="disnake", ref="master")),
|
Project(repo=Repository(owner="DisnakeDev", name="disnake", ref="master")),
|
||||||
# Project(repo=Repository(owner="PostHog", name="HouseWatch", ref="main")),
|
Project(repo=Repository(owner="PostHog", name="HouseWatch", ref="main")),
|
||||||
# Project(repo=Repository(owner="RasaHQ", name="rasa", ref="main")),
|
Project(repo=Repository(owner="RasaHQ", name="rasa", ref="main")),
|
||||||
# Project(repo=Repository(owner="Snowflake-Labs", name="snowcli", ref="main")),
|
Project(repo=Repository(owner="Snowflake-Labs", name="snowcli", ref="main")),
|
||||||
# Project(repo=Repository(owner="aiven", name="aiven-client", ref="main")),
|
Project(repo=Repository(owner="aiven", name="aiven-client", ref="main")),
|
||||||
# Project(repo=Repository(owner="alteryx", name="featuretools", ref="main")),
|
Project(repo=Repository(owner="alteryx", name="featuretools", ref="main")),
|
||||||
# Project(
|
Project(
|
||||||
# repo=Repository(owner="apache", name="airflow", ref="main"),
|
repo=Repository(owner="apache", name="airflow", ref="main"),
|
||||||
# check_options=CheckOptions(select="ALL"),
|
check_options=CheckOptions(select="ALL"),
|
||||||
# ),
|
),
|
||||||
# Project(repo=Repository(owner="aws", name="aws-sam-cli", ref="develop")),
|
Project(repo=Repository(owner="aws", name="aws-sam-cli", ref="develop")),
|
||||||
# Project(repo=Repository(owner="bloomberg", name="pytest-memray", ref="main")),
|
Project(repo=Repository(owner="bloomberg", name="pytest-memray", ref="main")),
|
||||||
Project(
|
Project(
|
||||||
repo=Repository(owner="bokeh", name="bokeh", ref="branch-3.3"),
|
repo=Repository(owner="bokeh", name="bokeh", ref="branch-3.3"),
|
||||||
check_options=CheckOptions(select="ALL"),
|
check_options=CheckOptions(select="ALL"),
|
||||||
),
|
),
|
||||||
# Project(repo=Repository(owner="commaai", name="openpilot", ref="master")),
|
Project(repo=Repository(owner="commaai", name="openpilot", ref="master")),
|
||||||
# Project(repo=Repository(owner="demisto", name="content", ref="master")),
|
Project(repo=Repository(owner="demisto", name="content", ref="master")),
|
||||||
# Project(repo=Repository(owner="docker", name="docker-py", ref="main")),
|
Project(repo=Repository(owner="docker", name="docker-py", ref="main")),
|
||||||
# Project(repo=Repository(owner="freedomofpress", name="securedrop", ref="develop")),
|
Project(repo=Repository(owner="freedomofpress", name="securedrop", ref="develop")),
|
||||||
# Project(repo=Repository(owner="fronzbot", name="blinkpy", ref="dev")),
|
Project(repo=Repository(owner="fronzbot", name="blinkpy", ref="dev")),
|
||||||
# Project(repo=Repository(owner="ibis-project", name="ibis", ref="master")),
|
Project(repo=Repository(owner="ibis-project", name="ibis", ref="master")),
|
||||||
# Project(repo=Repository(owner="ing-bank", name="probatus", ref="main")),
|
Project(repo=Repository(owner="ing-bank", name="probatus", ref="main")),
|
||||||
# Project(repo=Repository(owner="jrnl-org", name="jrnl", ref="develop")),
|
Project(repo=Repository(owner="jrnl-org", name="jrnl", ref="develop")),
|
||||||
# Project(repo=Repository(owner="latchbio", name="latch", ref="main")),
|
Project(repo=Repository(owner="latchbio", name="latch", ref="main")),
|
||||||
# Project(repo=Repository(owner="lnbits", name="lnbits", ref="main")),
|
Project(repo=Repository(owner="lnbits", name="lnbits", ref="main")),
|
||||||
# Project(repo=Repository(owner="milvus-io", name="pymilvus", ref="master")),
|
Project(repo=Repository(owner="milvus-io", name="pymilvus", ref="master")),
|
||||||
# Project(repo=Repository(owner="mlflow", name="mlflow", ref="master")),
|
Project(repo=Repository(owner="mlflow", name="mlflow", ref="master")),
|
||||||
# Project(repo=Repository(owner="model-bakers", name="model_bakery", ref="main")),
|
Project(repo=Repository(owner="model-bakers", name="model_bakery", ref="main")),
|
||||||
# Project(repo=Repository(owner="pandas-dev", name="pandas", ref="main")),
|
Project(repo=Repository(owner="pandas-dev", name="pandas", ref="main")),
|
||||||
# Project(repo=Repository(owner="prefecthq", name="prefect", ref="main")),
|
Project(repo=Repository(owner="prefecthq", name="prefect", ref="main")),
|
||||||
# Project(repo=Repository(owner="pypa", name="build", ref="main")),
|
Project(repo=Repository(owner="pypa", name="build", ref="main")),
|
||||||
# Project(repo=Repository(owner="pypa", name="cibuildwheel", ref="main")),
|
Project(repo=Repository(owner="pypa", name="cibuildwheel", ref="main")),
|
||||||
# Project(repo=Repository(owner="pypa", name="pip", ref="main")),
|
Project(repo=Repository(owner="pypa", name="pip", ref="main")),
|
||||||
# Project(repo=Repository(owner="pypa", name="setuptools", ref="main")),
|
Project(repo=Repository(owner="pypa", name="setuptools", ref="main")),
|
||||||
# Project(repo=Repository(owner="python", name="mypy", ref="master")),
|
Project(repo=Repository(owner="python", name="mypy", ref="master")),
|
||||||
# Project(
|
Project(
|
||||||
# repo=Repository(
|
repo=Repository(
|
||||||
# owner="python",
|
owner="python",
|
||||||
# name="typeshed",
|
name="typeshed",
|
||||||
# ref="main",
|
ref="main",
|
||||||
# ),
|
),
|
||||||
# check_options=CheckOptions(select="PYI"),
|
check_options=CheckOptions(select="PYI"),
|
||||||
# ),
|
),
|
||||||
# Project(repo=Repository(owner="python-poetry", name="poetry", ref="master")),
|
Project(repo=Repository(owner="python-poetry", name="poetry", ref="master")),
|
||||||
# Project(repo=Repository(owner="reflex-dev", name="reflex", ref="main")),
|
Project(repo=Repository(owner="reflex-dev", name="reflex", ref="main")),
|
||||||
# Project(repo=Repository(owner="rotki", name="rotki", ref="develop")),
|
Project(repo=Repository(owner="rotki", name="rotki", ref="develop")),
|
||||||
# Project(repo=Repository(owner="scikit-build", name="scikit-build", ref="main")),
|
Project(repo=Repository(owner="scikit-build", name="scikit-build", ref="main")),
|
||||||
# Project(
|
Project(
|
||||||
# repo=Repository(owner="scikit-build", name="scikit-build-core", ref="main")
|
repo=Repository(owner="scikit-build", name="scikit-build-core", ref="main")
|
||||||
# ),
|
),
|
||||||
# Project(repo=Repository(owner="sphinx-doc", name="sphinx", ref="master")),
|
Project(repo=Repository(owner="sphinx-doc", name="sphinx", ref="master")),
|
||||||
# Project(repo=Repository(owner="spruceid", name="siwe-py", ref="main")),
|
Project(repo=Repository(owner="spruceid", name="siwe-py", ref="main")),
|
||||||
# Project(repo=Repository(owner="tiangolo", name="fastapi", ref="master")),
|
Project(repo=Repository(owner="tiangolo", name="fastapi", ref="master")),
|
||||||
# Project(repo=Repository(owner="yandex", name="ch-backup", ref="main")),
|
Project(repo=Repository(owner="yandex", name="ch-backup", ref="main")),
|
||||||
# Project(
|
Project(
|
||||||
# repo=Repository(owner="zulip", name="zulip", ref="main"),
|
repo=Repository(owner="zulip", name="zulip", ref="main"),
|
||||||
# check_options=CheckOptions(select="ALL"),
|
check_options=CheckOptions(select="ALL"),
|
||||||
# ),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,7 @@
|
||||||
|
"""
|
||||||
|
Execution, comparison, and summary of `ruff format` ecosystem checks.
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
@ -6,7 +10,7 @@ from asyncio import create_subprocess_exec
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from subprocess import PIPE
|
from subprocess import PIPE
|
||||||
from typing import TYPE_CHECKING, Self, Sequence
|
from typing import TYPE_CHECKING, Sequence
|
||||||
|
|
||||||
from unidiff import PatchSet
|
from unidiff import PatchSet
|
||||||
|
|
||||||
|
|
@ -15,13 +19,13 @@ from ruff_ecosystem.markdown import markdown_project_section
|
||||||
from ruff_ecosystem.types import Comparison, Diff, Result, RuffError
|
from ruff_ecosystem.types import Comparison, Diff, Result, RuffError
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ruff_ecosystem.projects import ClonedRepository, Project
|
from ruff_ecosystem.projects import ClonedRepository
|
||||||
|
|
||||||
|
|
||||||
FORMAT_IGNORE_LINES = re.compile("^warning: `ruff format` is a work-in-progress.*")
|
FORMAT_IGNORE_LINES = re.compile("^warning: `ruff format` is a work-in-progress.*")
|
||||||
|
|
||||||
|
|
||||||
def summarize_format_result(result: Result) -> str:
|
def markdown_format_result(result: Result) -> str:
|
||||||
lines = []
|
lines = []
|
||||||
total_lines_removed = total_lines_added = 0
|
total_lines_removed = total_lines_added = 0
|
||||||
total_files_modified = 0
|
total_files_modified = 0
|
||||||
|
|
@ -58,7 +62,7 @@ def summarize_format_result(result: Result) -> str:
|
||||||
lines.extend(
|
lines.extend(
|
||||||
markdown_project_section(
|
markdown_project_section(
|
||||||
title=title,
|
title=title,
|
||||||
content=patch_set_with_permalinks(patch_set, comparison.repo),
|
content=format_patchset(patch_set, comparison.repo),
|
||||||
options=project.format_options,
|
options=project.format_options,
|
||||||
project=project,
|
project=project,
|
||||||
)
|
)
|
||||||
|
|
@ -77,6 +81,57 @@ def summarize_format_result(result: Result) -> str:
|
||||||
return "\n".join(lines)
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def format_patchset(patch_set: PatchSet, repo: ClonedRepository) -> str:
|
||||||
|
"""
|
||||||
|
Convert a patchset to markdown, adding permalinks to the start of each hunk.
|
||||||
|
"""
|
||||||
|
lines = []
|
||||||
|
for file_patch in patch_set:
|
||||||
|
for hunk in file_patch:
|
||||||
|
# Note: When used for `format` checks, the line number is not exact because
|
||||||
|
# we formatted the repository for a baseline; we can't know the exact
|
||||||
|
# line number in the original
|
||||||
|
# source file.
|
||||||
|
hunk_link = repo.url_for(file_patch.path, hunk.source_start)
|
||||||
|
hunk_lines = str(hunk).splitlines()
|
||||||
|
|
||||||
|
# Add a link before the hunk
|
||||||
|
link_title = file_patch.path + "~L" + str(hunk.source_start)
|
||||||
|
lines.append(f"<a href='{hunk_link}'>{link_title}</a>")
|
||||||
|
|
||||||
|
# Wrap the contents of the hunk in a diff code block
|
||||||
|
lines.append("```diff")
|
||||||
|
lines.extend(hunk_lines[1:])
|
||||||
|
lines.append("```")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
async def compare_format(
|
||||||
|
ruff_baseline_executable: Path,
|
||||||
|
ruff_comparison_executable: Path,
|
||||||
|
options: FormatOptions,
|
||||||
|
cloned_repo: ClonedRepository,
|
||||||
|
):
|
||||||
|
# Run format without diff to get the baseline
|
||||||
|
await ruff_format(
|
||||||
|
executable=ruff_baseline_executable.resolve(),
|
||||||
|
path=cloned_repo.path,
|
||||||
|
name=cloned_repo.fullname,
|
||||||
|
options=options,
|
||||||
|
)
|
||||||
|
# Then get the diff from stdout
|
||||||
|
diff = await ruff_format(
|
||||||
|
executable=ruff_comparison_executable.resolve(),
|
||||||
|
path=cloned_repo.path,
|
||||||
|
name=cloned_repo.fullname,
|
||||||
|
options=options,
|
||||||
|
diff=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Comparison(diff=Diff(diff), repo=cloned_repo)
|
||||||
|
|
||||||
|
|
||||||
async def ruff_format(
|
async def ruff_format(
|
||||||
*,
|
*,
|
||||||
executable: Path,
|
executable: Path,
|
||||||
|
|
@ -113,66 +168,6 @@ async def ruff_format(
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
|
||||||
async def black_format(
|
|
||||||
*,
|
|
||||||
executable: Path,
|
|
||||||
path: Path,
|
|
||||||
name: str,
|
|
||||||
) -> Sequence[str]:
|
|
||||||
"""Run the given black binary against the specified path."""
|
|
||||||
logger.debug(f"Formatting {name} with {executable}")
|
|
||||||
black_args = []
|
|
||||||
|
|
||||||
start = time.time()
|
|
||||||
proc = await create_subprocess_exec(
|
|
||||||
executable.absolute(),
|
|
||||||
*black_args,
|
|
||||||
".",
|
|
||||||
stdout=PIPE,
|
|
||||||
stderr=PIPE,
|
|
||||||
cwd=path,
|
|
||||||
)
|
|
||||||
result, err = await proc.communicate()
|
|
||||||
end = time.time()
|
|
||||||
|
|
||||||
logger.debug(f"Finished formatting {name} with {executable} in {end - start:.2f}s")
|
|
||||||
|
|
||||||
if proc.returncode != 0:
|
|
||||||
raise RuffError(err.decode("utf8"))
|
|
||||||
|
|
||||||
lines = result.decode("utf8").splitlines()
|
|
||||||
return [line for line in lines if not FORMAT_IGNORE_LINES.match(line)]
|
|
||||||
|
|
||||||
|
|
||||||
async def compare_format(
|
|
||||||
ruff_baseline_executable: Path,
|
|
||||||
ruff_comparison_executable: Path,
|
|
||||||
options: FormatOptions,
|
|
||||||
cloned_repo: ClonedRepository,
|
|
||||||
):
|
|
||||||
# Run format without diff to get the baseline
|
|
||||||
await ruff_format(
|
|
||||||
executable=ruff_baseline_executable.resolve(),
|
|
||||||
path=cloned_repo.path,
|
|
||||||
name=cloned_repo.fullname,
|
|
||||||
options=options,
|
|
||||||
)
|
|
||||||
# Then get the diff from stdout
|
|
||||||
diff = await ruff_format(
|
|
||||||
executable=ruff_comparison_executable.resolve(),
|
|
||||||
path=cloned_repo.path,
|
|
||||||
name=cloned_repo.fullname,
|
|
||||||
options=options,
|
|
||||||
diff=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_format_comparison(cloned_repo, FormatDiff(lines=diff))
|
|
||||||
|
|
||||||
|
|
||||||
def create_format_comparison(repo: ClonedRepository, diff: str) -> FormatComparison:
|
|
||||||
return FormatComparison(diff=diff, repo=repo)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class FormatOptions:
|
class FormatOptions:
|
||||||
"""
|
"""
|
||||||
|
|
@ -182,55 +177,3 @@ class FormatOptions:
|
||||||
def to_cli_args(self) -> list[str]:
|
def to_cli_args(self) -> list[str]:
|
||||||
args = ["format", "--diff"]
|
args = ["format", "--diff"]
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class FormatDiff(Diff):
|
|
||||||
"""A diff from ruff format."""
|
|
||||||
|
|
||||||
lines: list[str]
|
|
||||||
|
|
||||||
def __bool__(self: Self) -> bool:
|
|
||||||
"""Return true if this diff is non-empty."""
|
|
||||||
return bool(self.lines)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def added(self) -> set[str]:
|
|
||||||
return set(line for line in self.lines if line.startswith("+"))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def removed(self) -> set[str]:
|
|
||||||
return set(line for line in self.lines if line.startswith("-"))
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class FormatComparison(Comparison):
|
|
||||||
diff: FormatDiff
|
|
||||||
repo: ClonedRepository
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class FormatResult(Result):
|
|
||||||
comparisons: tuple[Project, FormatComparison]
|
|
||||||
|
|
||||||
|
|
||||||
def patch_set_with_permalinks(patch_set: PatchSet, repo: ClonedRepository) -> str:
|
|
||||||
lines = []
|
|
||||||
for file_patch in patch_set:
|
|
||||||
for hunk in file_patch:
|
|
||||||
# Note: The line number is not exact because we formatted the repository for
|
|
||||||
# a baseline; we can't know the exact line number in the original
|
|
||||||
# source file.
|
|
||||||
hunk_link = repo.url_for(file_patch.path, hunk.source_start)
|
|
||||||
hunk_lines = str(hunk).splitlines()
|
|
||||||
|
|
||||||
# Add a link before the hunk
|
|
||||||
link_title = file_patch.path + "~L" + str(hunk.source_start)
|
|
||||||
lines.append(f"<a href='{hunk_link}'>{link_title}</a>")
|
|
||||||
|
|
||||||
# Wrap the contents of the hunk in a diff code block
|
|
||||||
lines.append("```diff")
|
|
||||||
lines.extend(hunk_lines[1:])
|
|
||||||
lines.append("```")
|
|
||||||
|
|
||||||
return "\n".join(lines)
|
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,8 @@ from pathlib import Path
|
||||||
from typing import TypeVar
|
from typing import TypeVar
|
||||||
|
|
||||||
from ruff_ecosystem import logger
|
from ruff_ecosystem import logger
|
||||||
from ruff_ecosystem.check import compare_check, summarize_check_result
|
from ruff_ecosystem.check import compare_check, markdown_check_result
|
||||||
from ruff_ecosystem.format import compare_format, summarize_format_result
|
from ruff_ecosystem.format import compare_format, markdown_format_result
|
||||||
from ruff_ecosystem.projects import (
|
from ruff_ecosystem.projects import (
|
||||||
Project,
|
Project,
|
||||||
RuffCommand,
|
RuffCommand,
|
||||||
|
|
@ -38,6 +38,7 @@ async def main(
|
||||||
logger.debug("Using cache directory %s", cache)
|
logger.debug("Using cache directory %s", cache)
|
||||||
logger.debug("Checking %s targets", len(targets))
|
logger.debug("Checking %s targets", len(targets))
|
||||||
|
|
||||||
|
# Limit parallelism to avoid high memory consumption
|
||||||
semaphore = asyncio.Semaphore(max_parallelism)
|
semaphore = asyncio.Semaphore(max_parallelism)
|
||||||
|
|
||||||
async def limited_parallelism(coroutine: T) -> T:
|
async def limited_parallelism(coroutine: T) -> T:
|
||||||
|
|
@ -61,15 +62,15 @@ async def main(
|
||||||
)
|
)
|
||||||
comparisons_by_target = dict(zip(targets, comparisons, strict=True))
|
comparisons_by_target = dict(zip(targets, comparisons, strict=True))
|
||||||
|
|
||||||
errors, successes = [], []
|
# Split comparisons into errored / completed
|
||||||
|
errored, completed = [], []
|
||||||
for target, comparison in comparisons_by_target.items():
|
for target, comparison in comparisons_by_target.items():
|
||||||
if isinstance(comparison, Exception):
|
if isinstance(comparison, Exception):
|
||||||
errors.append((target, comparison))
|
errored.append((target, comparison))
|
||||||
continue
|
else:
|
||||||
|
completed.append((target, comparison))
|
||||||
|
|
||||||
successes.append((target, comparison))
|
result = Result(completed=completed, errored=errored)
|
||||||
|
|
||||||
result = Result(completed=successes, errored=errors)
|
|
||||||
|
|
||||||
match format:
|
match format:
|
||||||
case OutputFormat.json:
|
case OutputFormat.json:
|
||||||
|
|
@ -77,9 +78,9 @@ async def main(
|
||||||
case OutputFormat.markdown:
|
case OutputFormat.markdown:
|
||||||
match command:
|
match command:
|
||||||
case RuffCommand.check:
|
case RuffCommand.check:
|
||||||
print(summarize_check_result(result))
|
print(markdown_check_result(result))
|
||||||
case RuffCommand.format:
|
case RuffCommand.format:
|
||||||
print(summarize_format_result(result))
|
print(markdown_format_result(result))
|
||||||
case _:
|
case _:
|
||||||
raise ValueError(f"Unknown target Ruff command {command}")
|
raise ValueError(f"Unknown target Ruff command {command}")
|
||||||
case _:
|
case _:
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,9 @@ from __future__ import annotations
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ruff_ecosystem.projects import Project
|
from unidiff import PatchSet
|
||||||
|
|
||||||
|
from ruff_ecosystem.projects import ClonedRepository, Project
|
||||||
|
|
||||||
|
|
||||||
def markdown_project_section(
|
def markdown_project_section(
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,7 @@
|
||||||
|
"""
|
||||||
|
Abstractions and utilities for working with projects to run ecosystem checks on.
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from asyncio import create_subprocess_exec
|
from asyncio import create_subprocess_exec
|
||||||
|
|
@ -36,7 +40,7 @@ class ProjectSetupError(Exception):
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class Repository(Serializable):
|
class Repository(Serializable):
|
||||||
"""
|
"""
|
||||||
A remote GitHub repository
|
A remote GitHub repository.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
owner: str
|
owner: str
|
||||||
|
|
@ -75,13 +79,7 @@ class Repository(Serializable):
|
||||||
f"Failed to checkout {self.ref}: {stderr.decode()}"
|
f"Failed to checkout {self.ref}: {stderr.decode()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return ClonedRepository(
|
return await ClonedRepository.from_path(checkout_dir, self)
|
||||||
name=self.name,
|
|
||||||
owner=self.owner,
|
|
||||||
ref=self.ref,
|
|
||||||
path=checkout_dir,
|
|
||||||
commit_hash=await self._get_head_commit(checkout_dir),
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug(f"Cloning {self.owner}:{self.name} to {checkout_dir}")
|
logger.debug(f"Cloning {self.owner}:{self.name} to {checkout_dir}")
|
||||||
command = [
|
command = [
|
||||||
|
|
@ -113,35 +111,13 @@ class Repository(Serializable):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Finished cloning {self.fullname} with status {status_code}",
|
f"Finished cloning {self.fullname} with status {status_code}",
|
||||||
)
|
)
|
||||||
return ClonedRepository(
|
return await ClonedRepository.from_path(checkout_dir, self)
|
||||||
name=self.name,
|
|
||||||
owner=self.owner,
|
|
||||||
ref=self.ref,
|
|
||||||
path=checkout_dir,
|
|
||||||
commit_hash=await self._get_head_commit(checkout_dir),
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def _get_head_commit(checkout_dir: Path) -> str:
|
|
||||||
"""
|
|
||||||
Return the commit sha for the repository in the checkout directory.
|
|
||||||
"""
|
|
||||||
process = await create_subprocess_exec(
|
|
||||||
*["git", "rev-parse", "HEAD"],
|
|
||||||
cwd=checkout_dir,
|
|
||||||
stdout=PIPE,
|
|
||||||
)
|
|
||||||
stdout, _ = await process.communicate()
|
|
||||||
if await process.wait() != 0:
|
|
||||||
raise ProjectSetupError(f"Failed to retrieve commit sha at {checkout_dir}")
|
|
||||||
|
|
||||||
return stdout.decode().strip()
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class ClonedRepository(Repository, Serializable):
|
class ClonedRepository(Repository, Serializable):
|
||||||
"""
|
"""
|
||||||
A cloned GitHub repository, which includes the hash of the cloned commit.
|
A cloned GitHub repository, which includes the hash of the current commit.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
commit_hash: str
|
commit_hash: str
|
||||||
|
|
@ -166,3 +142,29 @@ class ClonedRepository(Repository, Serializable):
|
||||||
@property
|
@property
|
||||||
def url(self: Self) -> str:
|
def url(self: Self) -> str:
|
||||||
return f"https://github.com/{self.owner}/{self.name}@{self.commit_hash}"
|
return f"https://github.com/{self.owner}/{self.name}@{self.commit_hash}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_path(cls, path: Path, repo: Repository):
|
||||||
|
return cls(
|
||||||
|
name=repo.name,
|
||||||
|
owner=repo.owner,
|
||||||
|
ref=repo.ref,
|
||||||
|
path=path,
|
||||||
|
commit_hash=await cls._get_head_commit(path),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _get_head_commit(checkout_dir: Path) -> str:
|
||||||
|
"""
|
||||||
|
Return the commit sha for the repository in the checkout directory.
|
||||||
|
"""
|
||||||
|
process = await create_subprocess_exec(
|
||||||
|
*["git", "rev-parse", "HEAD"],
|
||||||
|
cwd=checkout_dir,
|
||||||
|
stdout=PIPE,
|
||||||
|
)
|
||||||
|
stdout, _ = await process.communicate()
|
||||||
|
if await process.wait() != 0:
|
||||||
|
raise ProjectSetupError(f"Failed to retrieve commit sha at {checkout_dir}")
|
||||||
|
|
||||||
|
return stdout.decode().strip()
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,10 @@ class Diff(Serializable):
|
||||||
return len(self.removed)
|
return len(self.removed)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, baseline: Sequence[str], comparison: Sequence[str]):
|
def from_pair(cls, baseline: Sequence[str], comparison: Sequence[str]):
|
||||||
|
"""
|
||||||
|
Construct a diff from before and after.
|
||||||
|
"""
|
||||||
return cls(difflib.ndiff(baseline, comparison))
|
return cls(difflib.ndiff(baseline, comparison))
|
||||||
|
|
||||||
def jsonable(self) -> Any:
|
def jsonable(self) -> Any:
|
||||||
|
|
@ -55,15 +58,23 @@ class Diff(Serializable):
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class Result(Serializable):
|
class Result(Serializable):
|
||||||
|
"""
|
||||||
|
The result of an ecosystem check for a collection of projects.
|
||||||
|
"""
|
||||||
|
|
||||||
errored: list[tuple[Project, Exception]]
|
errored: list[tuple[Project, Exception]]
|
||||||
completed: list[tuple[Project, Comparison]]
|
completed: list[tuple[Project, Comparison]]
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class Comparison(Serializable):
|
class Comparison(Serializable):
|
||||||
|
"""
|
||||||
|
The result of a completed ecosystem comparison for a single project.
|
||||||
|
"""
|
||||||
|
|
||||||
diff: Diff
|
diff: Diff
|
||||||
repo: ClonedRepository
|
repo: ClonedRepository
|
||||||
|
|
||||||
|
|
||||||
class RuffError(Exception):
|
class RuffError(Exception):
|
||||||
"""An error reported by ruff."""
|
"""An error reported by Ruff."""
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue