mirror of https://github.com/astral-sh/ruff
Use datatest for formatter tests (#21933)
This commit is contained in:
parent
b413a6dec4
commit
e2ec2bc306
|
|
@ -254,6 +254,21 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-vec"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
|
|
@ -944,6 +959,18 @@ dependencies = [
|
|||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "datatest-stable"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a867d7322eb69cf3a68a5426387a25b45cb3b9c5ee41023ee6cea92e2afadd82"
|
||||
dependencies = [
|
||||
"camino",
|
||||
"fancy-regex",
|
||||
"libtest-mimic 0.8.1",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive-where"
|
||||
version = "1.6.0"
|
||||
|
|
@ -1138,6 +1165,17 @@ dependencies = [
|
|||
"windows-sys 0.61.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.3.0"
|
||||
|
|
@ -1625,7 +1663,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
||||
dependencies = [
|
||||
"console 0.15.11",
|
||||
"globset",
|
||||
"once_cell",
|
||||
"pest",
|
||||
"pest_derive",
|
||||
|
|
@ -1633,7 +1670,6 @@ dependencies = [
|
|||
"ron",
|
||||
"serde",
|
||||
"similar",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1919,6 +1955,18 @@ dependencies = [
|
|||
"threadpool",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libtest-mimic"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5297962ef19edda4ce33aaa484386e0a5b3d7f2f4e037cbeee00503ef6b29d33"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
"clap",
|
||||
"escape8259",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.11.0"
|
||||
|
|
@ -3278,6 +3326,7 @@ dependencies = [
|
|||
"anyhow",
|
||||
"clap",
|
||||
"countme",
|
||||
"datatest-stable",
|
||||
"insta",
|
||||
"itertools 0.14.0",
|
||||
"memchr",
|
||||
|
|
@ -3347,6 +3396,7 @@ dependencies = [
|
|||
"bitflags 2.10.0",
|
||||
"bstr",
|
||||
"compact_str",
|
||||
"datatest-stable",
|
||||
"get-size2",
|
||||
"insta",
|
||||
"itertools 0.14.0",
|
||||
|
|
@ -4311,7 +4361,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
|
||||
dependencies = [
|
||||
"ignore",
|
||||
"libtest-mimic",
|
||||
"libtest-mimic 0.7.3",
|
||||
"snapbox",
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ compact_str = "0.9.0"
|
|||
criterion = { version = "0.7.0", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
datatest-stable = { version = "0.3.3" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
|
|
|
|||
|
|
@ -43,7 +43,8 @@ tracing = { workspace = true }
|
|||
[dev-dependencies]
|
||||
ruff_formatter = { workspace = true }
|
||||
|
||||
insta = { workspace = true, features = ["glob"] }
|
||||
datatest-stable = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
|
@ -54,8 +55,8 @@ similar = { workspace = true }
|
|||
ignored = ["ruff_cache"]
|
||||
|
||||
[[test]]
|
||||
name = "ruff_python_formatter_fixtures"
|
||||
path = "tests/fixtures.rs"
|
||||
name = "fixtures"
|
||||
harness = false
|
||||
test = true
|
||||
required-features = ["serde"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
use crate::normalizer::Normalizer;
|
||||
use anyhow::anyhow;
|
||||
use datatest_stable::Utf8Path;
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::diagnostic::{
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
|
||||
DisplayDiagnostics, DummyFileResolver, Severity, Span, SubDiagnostic, SubDiagnosticSeverity,
|
||||
|
|
@ -24,26 +27,27 @@ use std::{fmt, fs};
|
|||
|
||||
mod normalizer;
|
||||
|
||||
#[test]
|
||||
fn black_compatibility() {
|
||||
let test_file = |input_path: &Path| {
|
||||
let content = fs::read_to_string(input_path).unwrap();
|
||||
#[expect(clippy::needless_pass_by_value)]
|
||||
fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||
let test_name = input_path
|
||||
.strip_prefix("./resources/test/fixtures/black")
|
||||
.unwrap_or(input_path)
|
||||
.as_str();
|
||||
|
||||
let options_path = input_path.with_extension("options.json");
|
||||
let options_path = input_path.with_extension("options.json");
|
||||
|
||||
let options: PyFormatOptions = if let Ok(options_file) = fs::File::open(&options_path) {
|
||||
let reader = BufReader::new(options_file);
|
||||
serde_json::from_reader(reader).unwrap_or_else(|_| {
|
||||
panic!("Expected option file {options_path:?} to be a valid Json file")
|
||||
})
|
||||
} else {
|
||||
PyFormatOptions::from_extension(input_path)
|
||||
};
|
||||
let options: PyFormatOptions = if let Ok(options_file) = fs::File::open(&options_path) {
|
||||
let reader = BufReader::new(options_file);
|
||||
serde_json::from_reader(reader).map_err(|err| {
|
||||
anyhow!("Expected option file {options_path:?} to be a valid Json file: {err}")
|
||||
})?
|
||||
} else {
|
||||
PyFormatOptions::from_extension(input_path.as_std_path())
|
||||
};
|
||||
|
||||
let first_line = content.lines().next().unwrap_or_default();
|
||||
let formatted_code = if first_line.starts_with("# flags:")
|
||||
&& first_line.contains("--line-ranges=")
|
||||
{
|
||||
let first_line = content.lines().next().unwrap_or_default();
|
||||
let formatted_code =
|
||||
if first_line.starts_with("# flags:") && first_line.contains("--line-ranges=") {
|
||||
let line_index = LineIndex::from_source_text(&content);
|
||||
|
||||
let ranges = first_line
|
||||
|
|
@ -69,13 +73,9 @@ fn black_compatibility() {
|
|||
let mut formatted_code = content.clone();
|
||||
|
||||
for range in ranges {
|
||||
let formatted =
|
||||
format_range(&content, range, options.clone()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Range-formatting of {} to succeed but encountered error {err}",
|
||||
input_path.display()
|
||||
)
|
||||
});
|
||||
let formatted = format_range(&content, range, options.clone()).map_err(|err| {
|
||||
anyhow!("Range-formatting to succeed but encountered error {err}")
|
||||
})?;
|
||||
|
||||
let range = formatted.source_range();
|
||||
|
||||
|
|
@ -86,12 +86,8 @@ fn black_compatibility() {
|
|||
|
||||
formatted_code
|
||||
} else {
|
||||
let printed = format_module_source(&content, options.clone()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Formatting of {} to succeed but encountered error {err}",
|
||||
input_path.display()
|
||||
)
|
||||
});
|
||||
let printed = format_module_source(&content, options.clone())
|
||||
.map_err(|err| anyhow!("Formatting to succeed but encountered error {err}"))?;
|
||||
|
||||
let formatted_code = printed.into_code();
|
||||
|
||||
|
|
@ -100,191 +96,133 @@ fn black_compatibility() {
|
|||
formatted_code
|
||||
};
|
||||
|
||||
let extension = input_path
|
||||
.extension()
|
||||
.expect("Test file to have py or pyi extension")
|
||||
.to_string_lossy();
|
||||
let expected_path = input_path.with_extension(format!("{extension}.expect"));
|
||||
let expected_output = fs::read_to_string(&expected_path)
|
||||
.unwrap_or_else(|_| panic!("Expected Black output file '{expected_path:?}' to exist"));
|
||||
let extension = input_path
|
||||
.extension()
|
||||
.expect("Test file to have py or pyi extension");
|
||||
let expected_path = input_path.with_extension(format!("{extension}.expect"));
|
||||
let expected_output = fs::read_to_string(&expected_path)
|
||||
.unwrap_or_else(|_| panic!("Expected Black output file '{expected_path:?}' to exist"));
|
||||
|
||||
let unsupported_syntax_errors =
|
||||
ensure_unchanged_ast(&content, &formatted_code, &options, input_path);
|
||||
let unsupported_syntax_errors =
|
||||
ensure_unchanged_ast(&content, &formatted_code, &options, input_path);
|
||||
|
||||
if formatted_code == expected_output {
|
||||
// Black and Ruff formatting matches. Delete any existing snapshot files because the Black output
|
||||
// already perfectly captures the expected output.
|
||||
// The following code mimics insta's logic generating the snapshot name for a test.
|
||||
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
// Black and Ruff formatting matches. Delete any existing snapshot files because the Black output
|
||||
// already perfectly captures the expected output.
|
||||
// The following code mimics insta's logic generating the snapshot name for a test.
|
||||
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
|
||||
let mut components = input_path.components().rev();
|
||||
let file_name = components.next().unwrap();
|
||||
let test_suite = components.next().unwrap();
|
||||
let full_snapshot_name = format!("black_compatibility@{test_name}.snap",);
|
||||
|
||||
let snapshot_name = format!(
|
||||
"black_compatibility@{}__{}.snap",
|
||||
test_suite.as_os_str().to_string_lossy(),
|
||||
file_name.as_os_str().to_string_lossy()
|
||||
);
|
||||
let snapshot_path = Path::new(&workspace_path)
|
||||
.join("tests/snapshots")
|
||||
.join(full_snapshot_name);
|
||||
|
||||
let snapshot_path = Path::new(&workspace_path)
|
||||
.join("tests/snapshots")
|
||||
.join(snapshot_name);
|
||||
if snapshot_path.exists() && snapshot_path.is_file() {
|
||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||
// when deleting a no longer needed snapshot fails.
|
||||
fs::remove_file(&snapshot_path).ok();
|
||||
}
|
||||
|
||||
let new_snapshot_path = snapshot_path.with_extension("snap.new");
|
||||
if new_snapshot_path.exists() && new_snapshot_path.is_file() {
|
||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||
// when deleting a no longer needed snapshot fails.
|
||||
fs::remove_file(&new_snapshot_path).ok();
|
||||
}
|
||||
} else {
|
||||
// Black and Ruff have different formatting. Write out a snapshot that covers the differences
|
||||
// today.
|
||||
let mut snapshot = String::new();
|
||||
write!(snapshot, "{}", Header::new("Input")).unwrap();
|
||||
write!(snapshot, "{}", CodeFrame::new("python", &content)).unwrap();
|
||||
|
||||
write!(snapshot, "{}", Header::new("Black Differences")).unwrap();
|
||||
|
||||
let diff = TextDiff::from_lines(expected_output.as_str(), &formatted_code)
|
||||
.unified_diff()
|
||||
.header("Black", "Ruff")
|
||||
.to_string();
|
||||
|
||||
write!(snapshot, "{}", CodeFrame::new("diff", &diff)).unwrap();
|
||||
|
||||
write!(snapshot, "{}", Header::new("Ruff Output")).unwrap();
|
||||
write!(snapshot, "{}", CodeFrame::new("python", &formatted_code)).unwrap();
|
||||
|
||||
write!(snapshot, "{}", Header::new("Black Output")).unwrap();
|
||||
write!(snapshot, "{}", CodeFrame::new("python", &expected_output)).unwrap();
|
||||
|
||||
if !unsupported_syntax_errors.is_empty() {
|
||||
write!(snapshot, "{}", Header::new("New Unsupported Syntax Errors")).unwrap();
|
||||
writeln!(
|
||||
snapshot,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(
|
||||
&DummyFileResolver,
|
||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||
&unsupported_syntax_errors
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
insta::with_settings!({
|
||||
omit_expression => true,
|
||||
input_file => input_path,
|
||||
prepend_module_to_snapshot => false,
|
||||
}, {
|
||||
insta::assert_snapshot!(snapshot);
|
||||
});
|
||||
if formatted_code == expected_output {
|
||||
if snapshot_path.exists() && snapshot_path.is_file() {
|
||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||
// when deleting a no longer needed snapshot fails.
|
||||
fs::remove_file(&snapshot_path).ok();
|
||||
}
|
||||
};
|
||||
|
||||
insta::glob!(
|
||||
"../resources",
|
||||
"test/fixtures/black/**/*.{py,pyi}",
|
||||
test_file
|
||||
);
|
||||
let new_snapshot_path = snapshot_path.with_extension("snap.new");
|
||||
if new_snapshot_path.exists() && new_snapshot_path.is_file() {
|
||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||
// when deleting a no longer needed snapshot fails.
|
||||
fs::remove_file(&new_snapshot_path).ok();
|
||||
}
|
||||
} else {
|
||||
// Black and Ruff have different formatting. Write out a snapshot that covers the differences
|
||||
// today.
|
||||
let mut snapshot = String::new();
|
||||
write!(snapshot, "{}", Header::new("Input")).unwrap();
|
||||
write!(snapshot, "{}", CodeFrame::new("python", &content)).unwrap();
|
||||
|
||||
write!(snapshot, "{}", Header::new("Black Differences")).unwrap();
|
||||
|
||||
let diff = TextDiff::from_lines(expected_output.as_str(), &formatted_code)
|
||||
.unified_diff()
|
||||
.header("Black", "Ruff")
|
||||
.to_string();
|
||||
|
||||
write!(snapshot, "{}", CodeFrame::new("diff", &diff)).unwrap();
|
||||
|
||||
write!(snapshot, "{}", Header::new("Ruff Output")).unwrap();
|
||||
write!(snapshot, "{}", CodeFrame::new("python", &formatted_code)).unwrap();
|
||||
|
||||
write!(snapshot, "{}", Header::new("Black Output")).unwrap();
|
||||
write!(snapshot, "{}", CodeFrame::new("python", &expected_output)).unwrap();
|
||||
|
||||
if !unsupported_syntax_errors.is_empty() {
|
||||
write!(snapshot, "{}", Header::new("New Unsupported Syntax Errors")).unwrap();
|
||||
writeln!(
|
||||
snapshot,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(
|
||||
&DummyFileResolver,
|
||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||
&unsupported_syntax_errors
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.set_omit_expression(true);
|
||||
settings.set_input_file(input_path);
|
||||
settings.set_prepend_module_to_snapshot(false);
|
||||
settings.set_snapshot_suffix(test_name);
|
||||
let _settings = settings.bind_to_scope();
|
||||
|
||||
assert_snapshot!(snapshot);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format() {
|
||||
let test_file = |input_path: &Path| {
|
||||
let content = fs::read_to_string(input_path).unwrap();
|
||||
#[expect(clippy::needless_pass_by_value)]
|
||||
fn format(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||
let test_name = input_path
|
||||
.strip_prefix("./resources/test/fixtures/ruff")
|
||||
.unwrap_or(input_path)
|
||||
.as_str();
|
||||
|
||||
let mut snapshot = format!("## Input\n{}", CodeFrame::new("python", &content));
|
||||
let options_path = input_path.with_extension("options.json");
|
||||
let mut snapshot = format!("## Input\n{}", CodeFrame::new("python", &content));
|
||||
let options_path = input_path.with_extension("options.json");
|
||||
|
||||
if let Ok(options_file) = fs::File::open(&options_path) {
|
||||
let reader = BufReader::new(options_file);
|
||||
let options: Vec<PyFormatOptions> =
|
||||
serde_json::from_reader(reader).unwrap_or_else(|_| {
|
||||
panic!("Expected option file {options_path:?} to be a valid Json file")
|
||||
});
|
||||
if let Ok(options_file) = fs::File::open(&options_path) {
|
||||
let reader = BufReader::new(options_file);
|
||||
let options: Vec<PyFormatOptions> = serde_json::from_reader(reader).map_err(|_| {
|
||||
anyhow!("Expected option file {options_path:?} to be a valid Json file")
|
||||
})?;
|
||||
|
||||
writeln!(snapshot, "## Outputs").unwrap();
|
||||
writeln!(snapshot, "## Outputs").unwrap();
|
||||
|
||||
for (i, options) in options.into_iter().enumerate() {
|
||||
let (formatted_code, unsupported_syntax_errors) =
|
||||
format_file(&content, &options, input_path);
|
||||
|
||||
writeln!(
|
||||
snapshot,
|
||||
"### Output {}\n{}{}",
|
||||
i + 1,
|
||||
CodeFrame::new("", &DisplayPyOptions(&options)),
|
||||
CodeFrame::new("python", &formatted_code)
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
if options.preview().is_enabled() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We want to capture the differences in the preview style in our fixtures
|
||||
let options_preview = options.with_preview(PreviewMode::Enabled);
|
||||
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
||||
|
||||
if formatted_code != formatted_preview {
|
||||
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
||||
// diff.
|
||||
writeln!(
|
||||
snapshot,
|
||||
"#### Preview changes\n{}",
|
||||
CodeFrame::new(
|
||||
"diff",
|
||||
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
||||
.unified_diff()
|
||||
.header("Stable", "Preview")
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
if !unsupported_syntax_errors.is_empty() {
|
||||
writeln!(
|
||||
snapshot,
|
||||
"### Unsupported Syntax Errors\n{}",
|
||||
DisplayDiagnostics::new(
|
||||
&DummyFileResolver,
|
||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||
&unsupported_syntax_errors
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// We want to capture the differences in the preview style in our fixtures
|
||||
let options = PyFormatOptions::from_extension(input_path);
|
||||
for (i, options) in options.into_iter().enumerate() {
|
||||
let (formatted_code, unsupported_syntax_errors) =
|
||||
format_file(&content, &options, input_path);
|
||||
|
||||
writeln!(
|
||||
snapshot,
|
||||
"### Output {}\n{}{}",
|
||||
i + 1,
|
||||
CodeFrame::new("", &DisplayPyOptions(&options)),
|
||||
CodeFrame::new("python", &formatted_code)
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
if options.preview().is_enabled() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We want to capture the differences in the preview style in our fixtures
|
||||
let options_preview = options.with_preview(PreviewMode::Enabled);
|
||||
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
||||
|
||||
if formatted_code == formatted_preview {
|
||||
writeln!(
|
||||
snapshot,
|
||||
"## Output\n{}",
|
||||
CodeFrame::new("python", &formatted_code)
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
if formatted_code != formatted_preview {
|
||||
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
||||
// diff.
|
||||
writeln!(
|
||||
snapshot,
|
||||
"## Output\n{}\n## Preview changes\n{}",
|
||||
CodeFrame::new("python", &formatted_code),
|
||||
"#### Preview changes\n{}",
|
||||
CodeFrame::new(
|
||||
"diff",
|
||||
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
||||
|
|
@ -298,7 +236,7 @@ fn format() {
|
|||
if !unsupported_syntax_errors.is_empty() {
|
||||
writeln!(
|
||||
snapshot,
|
||||
"## Unsupported Syntax Errors\n{}",
|
||||
"### Unsupported Syntax Errors\n{}",
|
||||
DisplayDiagnostics::new(
|
||||
&DummyFileResolver,
|
||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||
|
|
@ -308,27 +246,74 @@ fn format() {
|
|||
.unwrap();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// We want to capture the differences in the preview style in our fixtures
|
||||
let options = PyFormatOptions::from_extension(input_path.as_std_path());
|
||||
let (formatted_code, unsupported_syntax_errors) =
|
||||
format_file(&content, &options, input_path);
|
||||
|
||||
insta::with_settings!({
|
||||
omit_expression => true,
|
||||
input_file => input_path,
|
||||
prepend_module_to_snapshot => false,
|
||||
}, {
|
||||
insta::assert_snapshot!(snapshot);
|
||||
});
|
||||
};
|
||||
let options_preview = options.with_preview(PreviewMode::Enabled);
|
||||
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
||||
|
||||
insta::glob!(
|
||||
"../resources",
|
||||
"test/fixtures/ruff/**/*.{py,pyi}",
|
||||
test_file
|
||||
);
|
||||
if formatted_code == formatted_preview {
|
||||
writeln!(
|
||||
snapshot,
|
||||
"## Output\n{}",
|
||||
CodeFrame::new("python", &formatted_code)
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
||||
// diff.
|
||||
writeln!(
|
||||
snapshot,
|
||||
"## Output\n{}\n## Preview changes\n{}",
|
||||
CodeFrame::new("python", &formatted_code),
|
||||
CodeFrame::new(
|
||||
"diff",
|
||||
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
||||
.unified_diff()
|
||||
.header("Stable", "Preview")
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
if !unsupported_syntax_errors.is_empty() {
|
||||
writeln!(
|
||||
snapshot,
|
||||
"## Unsupported Syntax Errors\n{}",
|
||||
DisplayDiagnostics::new(
|
||||
&DummyFileResolver,
|
||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||
&unsupported_syntax_errors
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.set_omit_expression(true);
|
||||
settings.set_input_file(input_path);
|
||||
settings.set_prepend_module_to_snapshot(false);
|
||||
settings.set_snapshot_suffix(test_name);
|
||||
let _settings = settings.bind_to_scope();
|
||||
|
||||
assert_snapshot!(snapshot);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
datatest_stable::harness! {
|
||||
{ test = black_compatibility, root = "./resources/test/fixtures/black", pattern = r".+\.pyi?$" },
|
||||
{ test = format, root="./resources/test/fixtures/ruff", pattern = r".+\.pyi?$" }
|
||||
}
|
||||
|
||||
fn format_file(
|
||||
source: &str,
|
||||
options: &PyFormatOptions,
|
||||
input_path: &Path,
|
||||
input_path: &Utf8Path,
|
||||
) -> (String, Vec<Diagnostic>) {
|
||||
let (unformatted, formatted_code) = if source.contains("<RANGE_START>") {
|
||||
let mut content = source.to_string();
|
||||
|
|
@ -363,8 +348,7 @@ fn format_file(
|
|||
let formatted =
|
||||
format_range(&format_input, range, options.clone()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Range-formatting of {} to succeed but encountered error {err}",
|
||||
input_path.display()
|
||||
"Range-formatting of {input_path} to succeed but encountered error {err}",
|
||||
)
|
||||
});
|
||||
|
||||
|
|
@ -377,10 +361,7 @@ fn format_file(
|
|||
(Cow::Owned(without_markers), content)
|
||||
} else {
|
||||
let printed = format_module_source(source, options.clone()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Formatting `{input_path} was expected to succeed but it failed: {err}",
|
||||
input_path = input_path.display()
|
||||
)
|
||||
panic!("Formatting `{input_path} was expected to succeed but it failed: {err}",)
|
||||
});
|
||||
let formatted_code = printed.into_code();
|
||||
|
||||
|
|
@ -399,22 +380,20 @@ fn format_file(
|
|||
fn ensure_stability_when_formatting_twice(
|
||||
formatted_code: &str,
|
||||
options: &PyFormatOptions,
|
||||
input_path: &Path,
|
||||
input_path: &Utf8Path,
|
||||
) {
|
||||
let reformatted = match format_module_source(formatted_code, options.clone()) {
|
||||
Ok(reformatted) => reformatted,
|
||||
Err(err) => {
|
||||
let mut diag = Diagnostic::from(&err);
|
||||
if let Some(range) = err.range() {
|
||||
let file =
|
||||
SourceFileBuilder::new(input_path.to_string_lossy(), formatted_code).finish();
|
||||
let file = SourceFileBuilder::new(input_path.as_str(), formatted_code).finish();
|
||||
let span = Span::from(file).with_range(range);
|
||||
diag.annotate(Annotation::primary(span));
|
||||
}
|
||||
panic!(
|
||||
"Expected formatted code of {} to be valid syntax: {err}:\
|
||||
"Expected formatted code of {input_path} to be valid syntax: {err}:\
|
||||
\n---\n{formatted_code}---\n{}",
|
||||
input_path.display(),
|
||||
diag.display(&DummyFileResolver, &DisplayDiagnosticConfig::default()),
|
||||
);
|
||||
}
|
||||
|
|
@ -440,7 +419,6 @@ Formatted once:
|
|||
Formatted twice:
|
||||
---
|
||||
{reformatted}---"#,
|
||||
input_path = input_path.display(),
|
||||
options = &DisplayPyOptions(options),
|
||||
reformatted = reformatted.as_code(),
|
||||
);
|
||||
|
|
@ -467,7 +445,7 @@ fn ensure_unchanged_ast(
|
|||
unformatted_code: &str,
|
||||
formatted_code: &str,
|
||||
options: &PyFormatOptions,
|
||||
input_path: &Path,
|
||||
input_path: &Utf8Path,
|
||||
) -> Vec<Diagnostic> {
|
||||
let source_type = options.source_type();
|
||||
|
||||
|
|
@ -499,11 +477,7 @@ fn ensure_unchanged_ast(
|
|||
formatted_unsupported_syntax_errors
|
||||
.retain(|fingerprint, _| !unformatted_unsupported_syntax_errors.contains_key(fingerprint));
|
||||
|
||||
let file = SourceFileBuilder::new(
|
||||
input_path.file_name().unwrap().to_string_lossy(),
|
||||
formatted_code,
|
||||
)
|
||||
.finish();
|
||||
let file = SourceFileBuilder::new(input_path.file_name().unwrap(), formatted_code).finish();
|
||||
let diagnostics = formatted_unsupported_syntax_errors
|
||||
.values()
|
||||
.map(|error| {
|
||||
|
|
@ -533,11 +507,10 @@ fn ensure_unchanged_ast(
|
|||
.header("Unformatted", "Formatted")
|
||||
.to_string();
|
||||
panic!(
|
||||
r#"Reformatting the unformatted code of {} resulted in AST changes.
|
||||
r#"Reformatting the unformatted code of {input_path} resulted in AST changes.
|
||||
---
|
||||
{diff}
|
||||
"#,
|
||||
input_path.display(),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ license = { workspace = true }
|
|||
|
||||
[lib]
|
||||
|
||||
[[test]]
|
||||
name = "fixtures"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
|
|
@ -34,7 +38,8 @@ ruff_python_ast = { workspace = true, features = ["serde"] }
|
|||
ruff_source_file = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true, features = ["glob"] }
|
||||
datatest-stable = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{Formatter, Write};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use datatest_stable::Utf8Path;
|
||||
use itertools::Itertools;
|
||||
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
||||
use ruff_python_ast::token::{Token, Tokens};
|
||||
|
|
@ -17,38 +16,49 @@ use ruff_python_parser::{Mode, ParseErrorType, ParseOptions, Parsed, parse_unche
|
|||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
#[test]
|
||||
fn valid_syntax() {
|
||||
insta::glob!("../resources", "valid/**/*.py", test_valid_syntax);
|
||||
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||
fn valid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||
test_valid_syntax(path, &content, "./resources/valid");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_syntax() {
|
||||
insta::glob!("../resources", "invalid/**/*.py", test_invalid_syntax);
|
||||
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||
fn invalid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||
test_invalid_syntax(path, &content, "./resources/invalid");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inline_ok() {
|
||||
insta::glob!("../resources/inline", "ok/**/*.py", test_valid_syntax);
|
||||
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||
fn inline_ok(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||
test_valid_syntax(path, &content, "./resources/inline/ok");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inline_err() {
|
||||
insta::glob!("../resources/inline", "err/**/*.py", test_invalid_syntax);
|
||||
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||
fn inline_err(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||
test_invalid_syntax(path, &content, "./resources/inline/err");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
datatest_stable::harness! {
|
||||
{ test = valid_syntax, root = "./resources/valid", pattern = r"\.pyi?$" },
|
||||
{ test = inline_ok, root = "./resources/inline/ok", pattern = r"\.pyi?$" },
|
||||
{ test = invalid_syntax, root = "./resources/invalid", pattern = r"\.pyi?$" },
|
||||
{ test = inline_err, root="./resources/inline/err", pattern = r"\.pyi?$" }
|
||||
}
|
||||
|
||||
/// Asserts that the parser generates no syntax errors for a valid program.
|
||||
/// Snapshots the AST.
|
||||
fn test_valid_syntax(input_path: &Path) {
|
||||
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
|
||||
let options = extract_options(&source).unwrap_or_else(|| {
|
||||
fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
|
||||
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
|
||||
let options = extract_options(source).unwrap_or_else(|| {
|
||||
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::latest_preview())
|
||||
});
|
||||
let parsed = parse_unchecked(&source, options.clone());
|
||||
let parsed = parse_unchecked(source, options.clone());
|
||||
|
||||
if parsed.has_syntax_errors() {
|
||||
let line_index = LineIndex::from_source_text(&source);
|
||||
let source_code = SourceCode::new(&source, &line_index);
|
||||
let line_index = LineIndex::from_source_text(source);
|
||||
let source_code = SourceCode::new(source, &line_index);
|
||||
|
||||
let mut message = "Expected no syntax errors for a valid program but the parser generated the following errors:\n".to_string();
|
||||
|
||||
|
|
@ -81,8 +91,8 @@ fn test_valid_syntax(input_path: &Path) {
|
|||
panic!("{input_path:?}: {message}");
|
||||
}
|
||||
|
||||
validate_tokens(parsed.tokens(), source.text_len(), input_path);
|
||||
validate_ast(&parsed, source.text_len(), input_path);
|
||||
validate_tokens(parsed.tokens(), source.text_len());
|
||||
validate_ast(&parsed, source.text_len());
|
||||
|
||||
let mut output = String::new();
|
||||
writeln!(&mut output, "## AST").unwrap();
|
||||
|
|
@ -91,7 +101,7 @@ fn test_valid_syntax(input_path: &Path) {
|
|||
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
||||
|
||||
let mut visitor =
|
||||
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
|
||||
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
|
||||
|
||||
for stmt in parsed.suite() {
|
||||
visitor.visit_stmt(stmt);
|
||||
|
|
@ -102,8 +112,8 @@ fn test_valid_syntax(input_path: &Path) {
|
|||
if !semantic_syntax_errors.is_empty() {
|
||||
let mut message = "Expected no semantic syntax errors for a valid program:\n".to_string();
|
||||
|
||||
let line_index = LineIndex::from_source_text(&source);
|
||||
let source_code = SourceCode::new(&source, &line_index);
|
||||
let line_index = LineIndex::from_source_text(source);
|
||||
let source_code = SourceCode::new(source, &line_index);
|
||||
|
||||
for error in semantic_syntax_errors {
|
||||
writeln!(
|
||||
|
|
@ -125,6 +135,7 @@ fn test_valid_syntax(input_path: &Path) {
|
|||
omit_expression => true,
|
||||
input_file => input_path,
|
||||
prepend_module_to_snapshot => false,
|
||||
snapshot_suffix => test_name
|
||||
}, {
|
||||
insta::assert_snapshot!(output);
|
||||
});
|
||||
|
|
@ -132,22 +143,23 @@ fn test_valid_syntax(input_path: &Path) {
|
|||
|
||||
/// Assert that the parser generates at least one syntax error for the given input file.
|
||||
/// Snapshots the AST and the error messages.
|
||||
fn test_invalid_syntax(input_path: &Path) {
|
||||
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
|
||||
let options = extract_options(&source).unwrap_or_else(|| {
|
||||
fn test_invalid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
|
||||
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
|
||||
|
||||
let options = extract_options(source).unwrap_or_else(|| {
|
||||
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::PY314)
|
||||
});
|
||||
let parsed = parse_unchecked(&source, options.clone());
|
||||
let parsed = parse_unchecked(source, options.clone());
|
||||
|
||||
validate_tokens(parsed.tokens(), source.text_len(), input_path);
|
||||
validate_ast(&parsed, source.text_len(), input_path);
|
||||
validate_tokens(parsed.tokens(), source.text_len());
|
||||
validate_ast(&parsed, source.text_len());
|
||||
|
||||
let mut output = String::new();
|
||||
writeln!(&mut output, "## AST").unwrap();
|
||||
writeln!(&mut output, "\n```\n{:#?}\n```", parsed.syntax()).unwrap();
|
||||
|
||||
let line_index = LineIndex::from_source_text(&source);
|
||||
let source_code = SourceCode::new(&source, &line_index);
|
||||
let line_index = LineIndex::from_source_text(source);
|
||||
let source_code = SourceCode::new(source, &line_index);
|
||||
|
||||
if !parsed.errors().is_empty() {
|
||||
writeln!(&mut output, "## Errors\n").unwrap();
|
||||
|
|
@ -186,7 +198,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
|||
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
||||
|
||||
let mut visitor =
|
||||
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
|
||||
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
|
||||
|
||||
for stmt in parsed.suite() {
|
||||
visitor.visit_stmt(stmt);
|
||||
|
|
@ -196,7 +208,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
|||
|
||||
assert!(
|
||||
parsed.has_syntax_errors() || !semantic_syntax_errors.is_empty(),
|
||||
"{input_path:?}: Expected parser to generate at least one syntax error for a program containing syntax errors."
|
||||
"Expected parser to generate at least one syntax error for a program containing syntax errors."
|
||||
);
|
||||
|
||||
if !semantic_syntax_errors.is_empty() {
|
||||
|
|
@ -220,6 +232,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
|||
omit_expression => true,
|
||||
input_file => input_path,
|
||||
prepend_module_to_snapshot => false,
|
||||
snapshot_suffix => test_name
|
||||
}, {
|
||||
insta::assert_snapshot!(output);
|
||||
});
|
||||
|
|
@ -372,26 +385,24 @@ impl std::fmt::Display for CodeFrame<'_> {
|
|||
/// Verifies that:
|
||||
/// * the ranges are strictly increasing when loop the tokens in insertion order
|
||||
/// * all ranges are within the length of the source code
|
||||
fn validate_tokens(tokens: &[Token], source_length: TextSize, test_path: &Path) {
|
||||
fn validate_tokens(tokens: &[Token], source_length: TextSize) {
|
||||
let mut previous: Option<&Token> = None;
|
||||
|
||||
for token in tokens {
|
||||
assert!(
|
||||
token.end() <= source_length,
|
||||
"{path}: Token range exceeds the source code length. Token: {token:#?}",
|
||||
path = test_path.display()
|
||||
"Token range exceeds the source code length. Token: {token:#?}",
|
||||
);
|
||||
|
||||
if let Some(previous) = previous {
|
||||
assert_eq!(
|
||||
previous.range().ordering(token.range()),
|
||||
Ordering::Less,
|
||||
"{path}: Token ranges are not in increasing order
|
||||
"Token ranges are not in increasing order
|
||||
Previous token: {previous:#?}
|
||||
Current token: {token:#?}
|
||||
Tokens: {tokens:#?}
|
||||
",
|
||||
path = test_path.display(),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -403,9 +414,9 @@ Tokens: {tokens:#?}
|
|||
/// * the range of the parent node fully encloses all its child nodes
|
||||
/// * the ranges are strictly increasing when traversing the nodes in pre-order.
|
||||
/// * all ranges are within the length of the source code.
|
||||
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize, test_path: &Path) {
|
||||
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize) {
|
||||
walk_module(
|
||||
&mut ValidateAstVisitor::new(parsed.tokens(), source_len, test_path),
|
||||
&mut ValidateAstVisitor::new(parsed.tokens(), source_len),
|
||||
parsed.syntax(),
|
||||
);
|
||||
}
|
||||
|
|
@ -416,17 +427,15 @@ struct ValidateAstVisitor<'a> {
|
|||
parents: Vec<AnyNodeRef<'a>>,
|
||||
previous: Option<AnyNodeRef<'a>>,
|
||||
source_length: TextSize,
|
||||
test_path: &'a Path,
|
||||
}
|
||||
|
||||
impl<'a> ValidateAstVisitor<'a> {
|
||||
fn new(tokens: &'a Tokens, source_length: TextSize, test_path: &'a Path) -> Self {
|
||||
fn new(tokens: &'a Tokens, source_length: TextSize) -> Self {
|
||||
Self {
|
||||
tokens: tokens.iter().peekable(),
|
||||
parents: Vec::new(),
|
||||
previous: None,
|
||||
source_length,
|
||||
test_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -444,8 +453,7 @@ impl ValidateAstVisitor<'_> {
|
|||
// At this point, next_token.end() > node.start()
|
||||
assert!(
|
||||
next.start() >= node.start(),
|
||||
"{path}: The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||
path = self.test_path.display(),
|
||||
"The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||
root = self.parents.first()
|
||||
);
|
||||
}
|
||||
|
|
@ -464,8 +472,7 @@ impl ValidateAstVisitor<'_> {
|
|||
// At this point, `next_token.end() > node.end()`
|
||||
assert!(
|
||||
next.start() >= node.end(),
|
||||
"{path}: The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||
path = self.test_path.display(),
|
||||
"The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||
root = self.parents.first()
|
||||
);
|
||||
}
|
||||
|
|
@ -476,16 +483,14 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
|
|||
fn enter_node(&mut self, node: AnyNodeRef<'ast>) -> TraversalSignal {
|
||||
assert!(
|
||||
node.end() <= self.source_length,
|
||||
"{path}: The range of the node exceeds the length of the source code. Node: {node:#?}",
|
||||
path = self.test_path.display()
|
||||
"The range of the node exceeds the length of the source code. Node: {node:#?}",
|
||||
);
|
||||
|
||||
if let Some(previous) = self.previous {
|
||||
assert_ne!(
|
||||
previous.range().ordering(node.range()),
|
||||
Ordering::Greater,
|
||||
"{path}: The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
|
||||
path = self.test_path.display(),
|
||||
"The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
|
||||
root = self.parents.first()
|
||||
);
|
||||
}
|
||||
|
|
@ -493,8 +498,7 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
|
|||
if let Some(parent) = self.parents.last() {
|
||||
assert!(
|
||||
parent.range().contains_range(node.range()),
|
||||
"{path}: The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
|
||||
path = self.test_path.display(),
|
||||
"The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
|
||||
root = self.parents.first()
|
||||
);
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue