mirror of https://github.com/astral-sh/ruff
Use datatest for formatter tests (#21933)
This commit is contained in:
parent
b413a6dec4
commit
e2ec2bc306
|
|
@ -254,6 +254,21 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bit-set"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
||||||
|
dependencies = [
|
||||||
|
"bit-vec",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bit-vec"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
|
|
@ -944,6 +959,18 @@ dependencies = [
|
||||||
"parking_lot_core",
|
"parking_lot_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "datatest-stable"
|
||||||
|
version = "0.3.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a867d7322eb69cf3a68a5426387a25b45cb3b9c5ee41023ee6cea92e2afadd82"
|
||||||
|
dependencies = [
|
||||||
|
"camino",
|
||||||
|
"fancy-regex",
|
||||||
|
"libtest-mimic 0.8.1",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive-where"
|
name = "derive-where"
|
||||||
version = "1.6.0"
|
version = "1.6.0"
|
||||||
|
|
@ -1138,6 +1165,17 @@ dependencies = [
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.61.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fancy-regex"
|
||||||
|
version = "0.14.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
|
||||||
|
dependencies = [
|
||||||
|
"bit-set",
|
||||||
|
"regex-automata",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastrand"
|
name = "fastrand"
|
||||||
version = "2.3.0"
|
version = "2.3.0"
|
||||||
|
|
@ -1625,7 +1663,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console 0.15.11",
|
"console 0.15.11",
|
||||||
"globset",
|
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"pest",
|
"pest",
|
||||||
"pest_derive",
|
"pest_derive",
|
||||||
|
|
@ -1633,7 +1670,6 @@ dependencies = [
|
||||||
"ron",
|
"ron",
|
||||||
"serde",
|
"serde",
|
||||||
"similar",
|
"similar",
|
||||||
"walkdir",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1919,6 +1955,18 @@ dependencies = [
|
||||||
"threadpool",
|
"threadpool",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libtest-mimic"
|
||||||
|
version = "0.8.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5297962ef19edda4ce33aaa484386e0a5b3d7f2f4e037cbeee00503ef6b29d33"
|
||||||
|
dependencies = [
|
||||||
|
"anstream",
|
||||||
|
"anstyle",
|
||||||
|
"clap",
|
||||||
|
"escape8259",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
version = "0.11.0"
|
version = "0.11.0"
|
||||||
|
|
@ -3278,6 +3326,7 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
"countme",
|
"countme",
|
||||||
|
"datatest-stable",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
|
@ -3347,6 +3396,7 @@ dependencies = [
|
||||||
"bitflags 2.10.0",
|
"bitflags 2.10.0",
|
||||||
"bstr",
|
"bstr",
|
||||||
"compact_str",
|
"compact_str",
|
||||||
|
"datatest-stable",
|
||||||
"get-size2",
|
"get-size2",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
|
|
@ -4311,7 +4361,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
|
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ignore",
|
"ignore",
|
||||||
"libtest-mimic",
|
"libtest-mimic 0.7.3",
|
||||||
"snapbox",
|
"snapbox",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -81,6 +81,7 @@ compact_str = "0.9.0"
|
||||||
criterion = { version = "0.7.0", default-features = false }
|
criterion = { version = "0.7.0", default-features = false }
|
||||||
crossbeam = { version = "0.8.4" }
|
crossbeam = { version = "0.8.4" }
|
||||||
dashmap = { version = "6.0.1" }
|
dashmap = { version = "6.0.1" }
|
||||||
|
datatest-stable = { version = "0.3.3" }
|
||||||
dir-test = { version = "0.4.0" }
|
dir-test = { version = "0.4.0" }
|
||||||
dunce = { version = "1.0.5" }
|
dunce = { version = "1.0.5" }
|
||||||
drop_bomb = { version = "0.1.5" }
|
drop_bomb = { version = "0.1.5" }
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,8 @@ tracing = { workspace = true }
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff_formatter = { workspace = true }
|
ruff_formatter = { workspace = true }
|
||||||
|
|
||||||
insta = { workspace = true, features = ["glob"] }
|
datatest-stable = { workspace = true }
|
||||||
|
insta = { workspace = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
@ -54,8 +55,8 @@ similar = { workspace = true }
|
||||||
ignored = ["ruff_cache"]
|
ignored = ["ruff_cache"]
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "ruff_python_formatter_fixtures"
|
name = "fixtures"
|
||||||
path = "tests/fixtures.rs"
|
harness = false
|
||||||
test = true
|
test = true
|
||||||
required-features = ["serde"]
|
required-features = ["serde"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,7 @@
|
||||||
use crate::normalizer::Normalizer;
|
use crate::normalizer::Normalizer;
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use datatest_stable::Utf8Path;
|
||||||
|
use insta::assert_snapshot;
|
||||||
use ruff_db::diagnostic::{
|
use ruff_db::diagnostic::{
|
||||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
|
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
|
||||||
DisplayDiagnostics, DummyFileResolver, Severity, Span, SubDiagnostic, SubDiagnosticSeverity,
|
DisplayDiagnostics, DummyFileResolver, Severity, Span, SubDiagnostic, SubDiagnosticSeverity,
|
||||||
|
|
@ -24,26 +27,27 @@ use std::{fmt, fs};
|
||||||
|
|
||||||
mod normalizer;
|
mod normalizer;
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value)]
|
||||||
fn black_compatibility() {
|
fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
let test_file = |input_path: &Path| {
|
let test_name = input_path
|
||||||
let content = fs::read_to_string(input_path).unwrap();
|
.strip_prefix("./resources/test/fixtures/black")
|
||||||
|
.unwrap_or(input_path)
|
||||||
|
.as_str();
|
||||||
|
|
||||||
let options_path = input_path.with_extension("options.json");
|
let options_path = input_path.with_extension("options.json");
|
||||||
|
|
||||||
let options: PyFormatOptions = if let Ok(options_file) = fs::File::open(&options_path) {
|
let options: PyFormatOptions = if let Ok(options_file) = fs::File::open(&options_path) {
|
||||||
let reader = BufReader::new(options_file);
|
let reader = BufReader::new(options_file);
|
||||||
serde_json::from_reader(reader).unwrap_or_else(|_| {
|
serde_json::from_reader(reader).map_err(|err| {
|
||||||
panic!("Expected option file {options_path:?} to be a valid Json file")
|
anyhow!("Expected option file {options_path:?} to be a valid Json file: {err}")
|
||||||
})
|
})?
|
||||||
} else {
|
} else {
|
||||||
PyFormatOptions::from_extension(input_path)
|
PyFormatOptions::from_extension(input_path.as_std_path())
|
||||||
};
|
};
|
||||||
|
|
||||||
let first_line = content.lines().next().unwrap_or_default();
|
let first_line = content.lines().next().unwrap_or_default();
|
||||||
let formatted_code = if first_line.starts_with("# flags:")
|
let formatted_code =
|
||||||
&& first_line.contains("--line-ranges=")
|
if first_line.starts_with("# flags:") && first_line.contains("--line-ranges=") {
|
||||||
{
|
|
||||||
let line_index = LineIndex::from_source_text(&content);
|
let line_index = LineIndex::from_source_text(&content);
|
||||||
|
|
||||||
let ranges = first_line
|
let ranges = first_line
|
||||||
|
|
@ -69,13 +73,9 @@ fn black_compatibility() {
|
||||||
let mut formatted_code = content.clone();
|
let mut formatted_code = content.clone();
|
||||||
|
|
||||||
for range in ranges {
|
for range in ranges {
|
||||||
let formatted =
|
let formatted = format_range(&content, range, options.clone()).map_err(|err| {
|
||||||
format_range(&content, range, options.clone()).unwrap_or_else(|err| {
|
anyhow!("Range-formatting to succeed but encountered error {err}")
|
||||||
panic!(
|
})?;
|
||||||
"Range-formatting of {} to succeed but encountered error {err}",
|
|
||||||
input_path.display()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let range = formatted.source_range();
|
let range = formatted.source_range();
|
||||||
|
|
||||||
|
|
@ -86,12 +86,8 @@ fn black_compatibility() {
|
||||||
|
|
||||||
formatted_code
|
formatted_code
|
||||||
} else {
|
} else {
|
||||||
let printed = format_module_source(&content, options.clone()).unwrap_or_else(|err| {
|
let printed = format_module_source(&content, options.clone())
|
||||||
panic!(
|
.map_err(|err| anyhow!("Formatting to succeed but encountered error {err}"))?;
|
||||||
"Formatting of {} to succeed but encountered error {err}",
|
|
||||||
input_path.display()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let formatted_code = printed.into_code();
|
let formatted_code = printed.into_code();
|
||||||
|
|
||||||
|
|
@ -100,191 +96,133 @@ fn black_compatibility() {
|
||||||
formatted_code
|
formatted_code
|
||||||
};
|
};
|
||||||
|
|
||||||
let extension = input_path
|
let extension = input_path
|
||||||
.extension()
|
.extension()
|
||||||
.expect("Test file to have py or pyi extension")
|
.expect("Test file to have py or pyi extension");
|
||||||
.to_string_lossy();
|
let expected_path = input_path.with_extension(format!("{extension}.expect"));
|
||||||
let expected_path = input_path.with_extension(format!("{extension}.expect"));
|
let expected_output = fs::read_to_string(&expected_path)
|
||||||
let expected_output = fs::read_to_string(&expected_path)
|
.unwrap_or_else(|_| panic!("Expected Black output file '{expected_path:?}' to exist"));
|
||||||
.unwrap_or_else(|_| panic!("Expected Black output file '{expected_path:?}' to exist"));
|
|
||||||
|
|
||||||
let unsupported_syntax_errors =
|
let unsupported_syntax_errors =
|
||||||
ensure_unchanged_ast(&content, &formatted_code, &options, input_path);
|
ensure_unchanged_ast(&content, &formatted_code, &options, input_path);
|
||||||
|
|
||||||
if formatted_code == expected_output {
|
// Black and Ruff formatting matches. Delete any existing snapshot files because the Black output
|
||||||
// Black and Ruff formatting matches. Delete any existing snapshot files because the Black output
|
// already perfectly captures the expected output.
|
||||||
// already perfectly captures the expected output.
|
// The following code mimics insta's logic generating the snapshot name for a test.
|
||||||
// The following code mimics insta's logic generating the snapshot name for a test.
|
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||||
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
|
|
||||||
|
|
||||||
let mut components = input_path.components().rev();
|
let full_snapshot_name = format!("black_compatibility@{test_name}.snap",);
|
||||||
let file_name = components.next().unwrap();
|
|
||||||
let test_suite = components.next().unwrap();
|
|
||||||
|
|
||||||
let snapshot_name = format!(
|
let snapshot_path = Path::new(&workspace_path)
|
||||||
"black_compatibility@{}__{}.snap",
|
.join("tests/snapshots")
|
||||||
test_suite.as_os_str().to_string_lossy(),
|
.join(full_snapshot_name);
|
||||||
file_name.as_os_str().to_string_lossy()
|
|
||||||
);
|
|
||||||
|
|
||||||
let snapshot_path = Path::new(&workspace_path)
|
if formatted_code == expected_output {
|
||||||
.join("tests/snapshots")
|
if snapshot_path.exists() && snapshot_path.is_file() {
|
||||||
.join(snapshot_name);
|
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||||
if snapshot_path.exists() && snapshot_path.is_file() {
|
// when deleting a no longer needed snapshot fails.
|
||||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
fs::remove_file(&snapshot_path).ok();
|
||||||
// when deleting a no longer needed snapshot fails.
|
|
||||||
fs::remove_file(&snapshot_path).ok();
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_snapshot_path = snapshot_path.with_extension("snap.new");
|
|
||||||
if new_snapshot_path.exists() && new_snapshot_path.is_file() {
|
|
||||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
|
||||||
// when deleting a no longer needed snapshot fails.
|
|
||||||
fs::remove_file(&new_snapshot_path).ok();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Black and Ruff have different formatting. Write out a snapshot that covers the differences
|
|
||||||
// today.
|
|
||||||
let mut snapshot = String::new();
|
|
||||||
write!(snapshot, "{}", Header::new("Input")).unwrap();
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("python", &content)).unwrap();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", Header::new("Black Differences")).unwrap();
|
|
||||||
|
|
||||||
let diff = TextDiff::from_lines(expected_output.as_str(), &formatted_code)
|
|
||||||
.unified_diff()
|
|
||||||
.header("Black", "Ruff")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("diff", &diff)).unwrap();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", Header::new("Ruff Output")).unwrap();
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("python", &formatted_code)).unwrap();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", Header::new("Black Output")).unwrap();
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("python", &expected_output)).unwrap();
|
|
||||||
|
|
||||||
if !unsupported_syntax_errors.is_empty() {
|
|
||||||
write!(snapshot, "{}", Header::new("New Unsupported Syntax Errors")).unwrap();
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"{}",
|
|
||||||
DisplayDiagnostics::new(
|
|
||||||
&DummyFileResolver,
|
|
||||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
|
||||||
&unsupported_syntax_errors
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
insta::with_settings!({
|
|
||||||
omit_expression => true,
|
|
||||||
input_file => input_path,
|
|
||||||
prepend_module_to_snapshot => false,
|
|
||||||
}, {
|
|
||||||
insta::assert_snapshot!(snapshot);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
insta::glob!(
|
let new_snapshot_path = snapshot_path.with_extension("snap.new");
|
||||||
"../resources",
|
if new_snapshot_path.exists() && new_snapshot_path.is_file() {
|
||||||
"test/fixtures/black/**/*.{py,pyi}",
|
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||||
test_file
|
// when deleting a no longer needed snapshot fails.
|
||||||
);
|
fs::remove_file(&new_snapshot_path).ok();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Black and Ruff have different formatting. Write out a snapshot that covers the differences
|
||||||
|
// today.
|
||||||
|
let mut snapshot = String::new();
|
||||||
|
write!(snapshot, "{}", Header::new("Input")).unwrap();
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("python", &content)).unwrap();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", Header::new("Black Differences")).unwrap();
|
||||||
|
|
||||||
|
let diff = TextDiff::from_lines(expected_output.as_str(), &formatted_code)
|
||||||
|
.unified_diff()
|
||||||
|
.header("Black", "Ruff")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("diff", &diff)).unwrap();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", Header::new("Ruff Output")).unwrap();
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("python", &formatted_code)).unwrap();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", Header::new("Black Output")).unwrap();
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("python", &expected_output)).unwrap();
|
||||||
|
|
||||||
|
if !unsupported_syntax_errors.is_empty() {
|
||||||
|
write!(snapshot, "{}", Header::new("New Unsupported Syntax Errors")).unwrap();
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"{}",
|
||||||
|
DisplayDiagnostics::new(
|
||||||
|
&DummyFileResolver,
|
||||||
|
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||||
|
&unsupported_syntax_errors
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut settings = insta::Settings::clone_current();
|
||||||
|
settings.set_omit_expression(true);
|
||||||
|
settings.set_input_file(input_path);
|
||||||
|
settings.set_prepend_module_to_snapshot(false);
|
||||||
|
settings.set_snapshot_suffix(test_name);
|
||||||
|
let _settings = settings.bind_to_scope();
|
||||||
|
|
||||||
|
assert_snapshot!(snapshot);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value)]
|
||||||
fn format() {
|
fn format(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
let test_file = |input_path: &Path| {
|
let test_name = input_path
|
||||||
let content = fs::read_to_string(input_path).unwrap();
|
.strip_prefix("./resources/test/fixtures/ruff")
|
||||||
|
.unwrap_or(input_path)
|
||||||
|
.as_str();
|
||||||
|
|
||||||
let mut snapshot = format!("## Input\n{}", CodeFrame::new("python", &content));
|
let mut snapshot = format!("## Input\n{}", CodeFrame::new("python", &content));
|
||||||
let options_path = input_path.with_extension("options.json");
|
let options_path = input_path.with_extension("options.json");
|
||||||
|
|
||||||
if let Ok(options_file) = fs::File::open(&options_path) {
|
if let Ok(options_file) = fs::File::open(&options_path) {
|
||||||
let reader = BufReader::new(options_file);
|
let reader = BufReader::new(options_file);
|
||||||
let options: Vec<PyFormatOptions> =
|
let options: Vec<PyFormatOptions> = serde_json::from_reader(reader).map_err(|_| {
|
||||||
serde_json::from_reader(reader).unwrap_or_else(|_| {
|
anyhow!("Expected option file {options_path:?} to be a valid Json file")
|
||||||
panic!("Expected option file {options_path:?} to be a valid Json file")
|
})?;
|
||||||
});
|
|
||||||
|
|
||||||
writeln!(snapshot, "## Outputs").unwrap();
|
writeln!(snapshot, "## Outputs").unwrap();
|
||||||
|
|
||||||
for (i, options) in options.into_iter().enumerate() {
|
for (i, options) in options.into_iter().enumerate() {
|
||||||
let (formatted_code, unsupported_syntax_errors) =
|
|
||||||
format_file(&content, &options, input_path);
|
|
||||||
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"### Output {}\n{}{}",
|
|
||||||
i + 1,
|
|
||||||
CodeFrame::new("", &DisplayPyOptions(&options)),
|
|
||||||
CodeFrame::new("python", &formatted_code)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
if options.preview().is_enabled() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We want to capture the differences in the preview style in our fixtures
|
|
||||||
let options_preview = options.with_preview(PreviewMode::Enabled);
|
|
||||||
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
|
||||||
|
|
||||||
if formatted_code != formatted_preview {
|
|
||||||
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
|
||||||
// diff.
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"#### Preview changes\n{}",
|
|
||||||
CodeFrame::new(
|
|
||||||
"diff",
|
|
||||||
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
|
||||||
.unified_diff()
|
|
||||||
.header("Stable", "Preview")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
if !unsupported_syntax_errors.is_empty() {
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"### Unsupported Syntax Errors\n{}",
|
|
||||||
DisplayDiagnostics::new(
|
|
||||||
&DummyFileResolver,
|
|
||||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
|
||||||
&unsupported_syntax_errors
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// We want to capture the differences in the preview style in our fixtures
|
|
||||||
let options = PyFormatOptions::from_extension(input_path);
|
|
||||||
let (formatted_code, unsupported_syntax_errors) =
|
let (formatted_code, unsupported_syntax_errors) =
|
||||||
format_file(&content, &options, input_path);
|
format_file(&content, &options, input_path);
|
||||||
|
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"### Output {}\n{}{}",
|
||||||
|
i + 1,
|
||||||
|
CodeFrame::new("", &DisplayPyOptions(&options)),
|
||||||
|
CodeFrame::new("python", &formatted_code)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if options.preview().is_enabled() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We want to capture the differences in the preview style in our fixtures
|
||||||
let options_preview = options.with_preview(PreviewMode::Enabled);
|
let options_preview = options.with_preview(PreviewMode::Enabled);
|
||||||
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
||||||
|
|
||||||
if formatted_code == formatted_preview {
|
if formatted_code != formatted_preview {
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"## Output\n{}",
|
|
||||||
CodeFrame::new("python", &formatted_code)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
} else {
|
|
||||||
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
||||||
// diff.
|
// diff.
|
||||||
writeln!(
|
writeln!(
|
||||||
snapshot,
|
snapshot,
|
||||||
"## Output\n{}\n## Preview changes\n{}",
|
"#### Preview changes\n{}",
|
||||||
CodeFrame::new("python", &formatted_code),
|
|
||||||
CodeFrame::new(
|
CodeFrame::new(
|
||||||
"diff",
|
"diff",
|
||||||
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
||||||
|
|
@ -298,7 +236,7 @@ fn format() {
|
||||||
if !unsupported_syntax_errors.is_empty() {
|
if !unsupported_syntax_errors.is_empty() {
|
||||||
writeln!(
|
writeln!(
|
||||||
snapshot,
|
snapshot,
|
||||||
"## Unsupported Syntax Errors\n{}",
|
"### Unsupported Syntax Errors\n{}",
|
||||||
DisplayDiagnostics::new(
|
DisplayDiagnostics::new(
|
||||||
&DummyFileResolver,
|
&DummyFileResolver,
|
||||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||||
|
|
@ -308,27 +246,74 @@ fn format() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// We want to capture the differences in the preview style in our fixtures
|
||||||
|
let options = PyFormatOptions::from_extension(input_path.as_std_path());
|
||||||
|
let (formatted_code, unsupported_syntax_errors) =
|
||||||
|
format_file(&content, &options, input_path);
|
||||||
|
|
||||||
insta::with_settings!({
|
let options_preview = options.with_preview(PreviewMode::Enabled);
|
||||||
omit_expression => true,
|
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
||||||
input_file => input_path,
|
|
||||||
prepend_module_to_snapshot => false,
|
|
||||||
}, {
|
|
||||||
insta::assert_snapshot!(snapshot);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
insta::glob!(
|
if formatted_code == formatted_preview {
|
||||||
"../resources",
|
writeln!(
|
||||||
"test/fixtures/ruff/**/*.{py,pyi}",
|
snapshot,
|
||||||
test_file
|
"## Output\n{}",
|
||||||
);
|
CodeFrame::new("python", &formatted_code)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
} else {
|
||||||
|
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
||||||
|
// diff.
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"## Output\n{}\n## Preview changes\n{}",
|
||||||
|
CodeFrame::new("python", &formatted_code),
|
||||||
|
CodeFrame::new(
|
||||||
|
"diff",
|
||||||
|
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
||||||
|
.unified_diff()
|
||||||
|
.header("Stable", "Preview")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
if !unsupported_syntax_errors.is_empty() {
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"## Unsupported Syntax Errors\n{}",
|
||||||
|
DisplayDiagnostics::new(
|
||||||
|
&DummyFileResolver,
|
||||||
|
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||||
|
&unsupported_syntax_errors
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut settings = insta::Settings::clone_current();
|
||||||
|
settings.set_omit_expression(true);
|
||||||
|
settings.set_input_file(input_path);
|
||||||
|
settings.set_prepend_module_to_snapshot(false);
|
||||||
|
settings.set_snapshot_suffix(test_name);
|
||||||
|
let _settings = settings.bind_to_scope();
|
||||||
|
|
||||||
|
assert_snapshot!(snapshot);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
datatest_stable::harness! {
|
||||||
|
{ test = black_compatibility, root = "./resources/test/fixtures/black", pattern = r".+\.pyi?$" },
|
||||||
|
{ test = format, root="./resources/test/fixtures/ruff", pattern = r".+\.pyi?$" }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_file(
|
fn format_file(
|
||||||
source: &str,
|
source: &str,
|
||||||
options: &PyFormatOptions,
|
options: &PyFormatOptions,
|
||||||
input_path: &Path,
|
input_path: &Utf8Path,
|
||||||
) -> (String, Vec<Diagnostic>) {
|
) -> (String, Vec<Diagnostic>) {
|
||||||
let (unformatted, formatted_code) = if source.contains("<RANGE_START>") {
|
let (unformatted, formatted_code) = if source.contains("<RANGE_START>") {
|
||||||
let mut content = source.to_string();
|
let mut content = source.to_string();
|
||||||
|
|
@ -363,8 +348,7 @@ fn format_file(
|
||||||
let formatted =
|
let formatted =
|
||||||
format_range(&format_input, range, options.clone()).unwrap_or_else(|err| {
|
format_range(&format_input, range, options.clone()).unwrap_or_else(|err| {
|
||||||
panic!(
|
panic!(
|
||||||
"Range-formatting of {} to succeed but encountered error {err}",
|
"Range-formatting of {input_path} to succeed but encountered error {err}",
|
||||||
input_path.display()
|
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -377,10 +361,7 @@ fn format_file(
|
||||||
(Cow::Owned(without_markers), content)
|
(Cow::Owned(without_markers), content)
|
||||||
} else {
|
} else {
|
||||||
let printed = format_module_source(source, options.clone()).unwrap_or_else(|err| {
|
let printed = format_module_source(source, options.clone()).unwrap_or_else(|err| {
|
||||||
panic!(
|
panic!("Formatting `{input_path} was expected to succeed but it failed: {err}",)
|
||||||
"Formatting `{input_path} was expected to succeed but it failed: {err}",
|
|
||||||
input_path = input_path.display()
|
|
||||||
)
|
|
||||||
});
|
});
|
||||||
let formatted_code = printed.into_code();
|
let formatted_code = printed.into_code();
|
||||||
|
|
||||||
|
|
@ -399,22 +380,20 @@ fn format_file(
|
||||||
fn ensure_stability_when_formatting_twice(
|
fn ensure_stability_when_formatting_twice(
|
||||||
formatted_code: &str,
|
formatted_code: &str,
|
||||||
options: &PyFormatOptions,
|
options: &PyFormatOptions,
|
||||||
input_path: &Path,
|
input_path: &Utf8Path,
|
||||||
) {
|
) {
|
||||||
let reformatted = match format_module_source(formatted_code, options.clone()) {
|
let reformatted = match format_module_source(formatted_code, options.clone()) {
|
||||||
Ok(reformatted) => reformatted,
|
Ok(reformatted) => reformatted,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let mut diag = Diagnostic::from(&err);
|
let mut diag = Diagnostic::from(&err);
|
||||||
if let Some(range) = err.range() {
|
if let Some(range) = err.range() {
|
||||||
let file =
|
let file = SourceFileBuilder::new(input_path.as_str(), formatted_code).finish();
|
||||||
SourceFileBuilder::new(input_path.to_string_lossy(), formatted_code).finish();
|
|
||||||
let span = Span::from(file).with_range(range);
|
let span = Span::from(file).with_range(range);
|
||||||
diag.annotate(Annotation::primary(span));
|
diag.annotate(Annotation::primary(span));
|
||||||
}
|
}
|
||||||
panic!(
|
panic!(
|
||||||
"Expected formatted code of {} to be valid syntax: {err}:\
|
"Expected formatted code of {input_path} to be valid syntax: {err}:\
|
||||||
\n---\n{formatted_code}---\n{}",
|
\n---\n{formatted_code}---\n{}",
|
||||||
input_path.display(),
|
|
||||||
diag.display(&DummyFileResolver, &DisplayDiagnosticConfig::default()),
|
diag.display(&DummyFileResolver, &DisplayDiagnosticConfig::default()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -440,7 +419,6 @@ Formatted once:
|
||||||
Formatted twice:
|
Formatted twice:
|
||||||
---
|
---
|
||||||
{reformatted}---"#,
|
{reformatted}---"#,
|
||||||
input_path = input_path.display(),
|
|
||||||
options = &DisplayPyOptions(options),
|
options = &DisplayPyOptions(options),
|
||||||
reformatted = reformatted.as_code(),
|
reformatted = reformatted.as_code(),
|
||||||
);
|
);
|
||||||
|
|
@ -467,7 +445,7 @@ fn ensure_unchanged_ast(
|
||||||
unformatted_code: &str,
|
unformatted_code: &str,
|
||||||
formatted_code: &str,
|
formatted_code: &str,
|
||||||
options: &PyFormatOptions,
|
options: &PyFormatOptions,
|
||||||
input_path: &Path,
|
input_path: &Utf8Path,
|
||||||
) -> Vec<Diagnostic> {
|
) -> Vec<Diagnostic> {
|
||||||
let source_type = options.source_type();
|
let source_type = options.source_type();
|
||||||
|
|
||||||
|
|
@ -499,11 +477,7 @@ fn ensure_unchanged_ast(
|
||||||
formatted_unsupported_syntax_errors
|
formatted_unsupported_syntax_errors
|
||||||
.retain(|fingerprint, _| !unformatted_unsupported_syntax_errors.contains_key(fingerprint));
|
.retain(|fingerprint, _| !unformatted_unsupported_syntax_errors.contains_key(fingerprint));
|
||||||
|
|
||||||
let file = SourceFileBuilder::new(
|
let file = SourceFileBuilder::new(input_path.file_name().unwrap(), formatted_code).finish();
|
||||||
input_path.file_name().unwrap().to_string_lossy(),
|
|
||||||
formatted_code,
|
|
||||||
)
|
|
||||||
.finish();
|
|
||||||
let diagnostics = formatted_unsupported_syntax_errors
|
let diagnostics = formatted_unsupported_syntax_errors
|
||||||
.values()
|
.values()
|
||||||
.map(|error| {
|
.map(|error| {
|
||||||
|
|
@ -533,11 +507,10 @@ fn ensure_unchanged_ast(
|
||||||
.header("Unformatted", "Formatted")
|
.header("Unformatted", "Formatted")
|
||||||
.to_string();
|
.to_string();
|
||||||
panic!(
|
panic!(
|
||||||
r#"Reformatting the unformatted code of {} resulted in AST changes.
|
r#"Reformatting the unformatted code of {input_path} resulted in AST changes.
|
||||||
---
|
---
|
||||||
{diff}
|
{diff}
|
||||||
"#,
|
"#,
|
||||||
input_path.display(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,10 @@ license = { workspace = true }
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "fixtures"
|
||||||
|
harness = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||||
ruff_python_trivia = { workspace = true }
|
ruff_python_trivia = { workspace = true }
|
||||||
|
|
@ -34,7 +38,8 @@ ruff_python_ast = { workspace = true, features = ["serde"] }
|
||||||
ruff_source_file = { workspace = true }
|
ruff_source_file = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
insta = { workspace = true, features = ["glob"] }
|
datatest-stable = { workspace = true }
|
||||||
|
insta = { workspace = true }
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,8 @@
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::fmt::{Formatter, Write};
|
use std::fmt::{Formatter, Write};
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
|
use datatest_stable::Utf8Path;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
||||||
use ruff_python_ast::token::{Token, Tokens};
|
use ruff_python_ast::token::{Token, Tokens};
|
||||||
|
|
@ -17,38 +16,49 @@ use ruff_python_parser::{Mode, ParseErrorType, ParseOptions, Parsed, parse_unche
|
||||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn valid_syntax() {
|
fn valid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources", "valid/**/*.py", test_valid_syntax);
|
test_valid_syntax(path, &content, "./resources/valid");
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn invalid_syntax() {
|
fn invalid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources", "invalid/**/*.py", test_invalid_syntax);
|
test_invalid_syntax(path, &content, "./resources/invalid");
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn inline_ok() {
|
fn inline_ok(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources/inline", "ok/**/*.py", test_valid_syntax);
|
test_valid_syntax(path, &content, "./resources/inline/ok");
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn inline_err() {
|
fn inline_err(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources/inline", "err/**/*.py", test_invalid_syntax);
|
test_invalid_syntax(path, &content, "./resources/inline/err");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
datatest_stable::harness! {
|
||||||
|
{ test = valid_syntax, root = "./resources/valid", pattern = r"\.pyi?$" },
|
||||||
|
{ test = inline_ok, root = "./resources/inline/ok", pattern = r"\.pyi?$" },
|
||||||
|
{ test = invalid_syntax, root = "./resources/invalid", pattern = r"\.pyi?$" },
|
||||||
|
{ test = inline_err, root="./resources/inline/err", pattern = r"\.pyi?$" }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Asserts that the parser generates no syntax errors for a valid program.
|
/// Asserts that the parser generates no syntax errors for a valid program.
|
||||||
/// Snapshots the AST.
|
/// Snapshots the AST.
|
||||||
fn test_valid_syntax(input_path: &Path) {
|
fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
|
||||||
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
|
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
|
||||||
let options = extract_options(&source).unwrap_or_else(|| {
|
let options = extract_options(source).unwrap_or_else(|| {
|
||||||
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::latest_preview())
|
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::latest_preview())
|
||||||
});
|
});
|
||||||
let parsed = parse_unchecked(&source, options.clone());
|
let parsed = parse_unchecked(source, options.clone());
|
||||||
|
|
||||||
if parsed.has_syntax_errors() {
|
if parsed.has_syntax_errors() {
|
||||||
let line_index = LineIndex::from_source_text(&source);
|
let line_index = LineIndex::from_source_text(source);
|
||||||
let source_code = SourceCode::new(&source, &line_index);
|
let source_code = SourceCode::new(source, &line_index);
|
||||||
|
|
||||||
let mut message = "Expected no syntax errors for a valid program but the parser generated the following errors:\n".to_string();
|
let mut message = "Expected no syntax errors for a valid program but the parser generated the following errors:\n".to_string();
|
||||||
|
|
||||||
|
|
@ -81,8 +91,8 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
panic!("{input_path:?}: {message}");
|
panic!("{input_path:?}: {message}");
|
||||||
}
|
}
|
||||||
|
|
||||||
validate_tokens(parsed.tokens(), source.text_len(), input_path);
|
validate_tokens(parsed.tokens(), source.text_len());
|
||||||
validate_ast(&parsed, source.text_len(), input_path);
|
validate_ast(&parsed, source.text_len());
|
||||||
|
|
||||||
let mut output = String::new();
|
let mut output = String::new();
|
||||||
writeln!(&mut output, "## AST").unwrap();
|
writeln!(&mut output, "## AST").unwrap();
|
||||||
|
|
@ -91,7 +101,7 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
||||||
|
|
||||||
let mut visitor =
|
let mut visitor =
|
||||||
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
|
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
|
||||||
|
|
||||||
for stmt in parsed.suite() {
|
for stmt in parsed.suite() {
|
||||||
visitor.visit_stmt(stmt);
|
visitor.visit_stmt(stmt);
|
||||||
|
|
@ -102,8 +112,8 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
if !semantic_syntax_errors.is_empty() {
|
if !semantic_syntax_errors.is_empty() {
|
||||||
let mut message = "Expected no semantic syntax errors for a valid program:\n".to_string();
|
let mut message = "Expected no semantic syntax errors for a valid program:\n".to_string();
|
||||||
|
|
||||||
let line_index = LineIndex::from_source_text(&source);
|
let line_index = LineIndex::from_source_text(source);
|
||||||
let source_code = SourceCode::new(&source, &line_index);
|
let source_code = SourceCode::new(source, &line_index);
|
||||||
|
|
||||||
for error in semantic_syntax_errors {
|
for error in semantic_syntax_errors {
|
||||||
writeln!(
|
writeln!(
|
||||||
|
|
@ -125,6 +135,7 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
omit_expression => true,
|
omit_expression => true,
|
||||||
input_file => input_path,
|
input_file => input_path,
|
||||||
prepend_module_to_snapshot => false,
|
prepend_module_to_snapshot => false,
|
||||||
|
snapshot_suffix => test_name
|
||||||
}, {
|
}, {
|
||||||
insta::assert_snapshot!(output);
|
insta::assert_snapshot!(output);
|
||||||
});
|
});
|
||||||
|
|
@ -132,22 +143,23 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
|
|
||||||
/// Assert that the parser generates at least one syntax error for the given input file.
|
/// Assert that the parser generates at least one syntax error for the given input file.
|
||||||
/// Snapshots the AST and the error messages.
|
/// Snapshots the AST and the error messages.
|
||||||
fn test_invalid_syntax(input_path: &Path) {
|
fn test_invalid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
|
||||||
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
|
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
|
||||||
let options = extract_options(&source).unwrap_or_else(|| {
|
|
||||||
|
let options = extract_options(source).unwrap_or_else(|| {
|
||||||
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::PY314)
|
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::PY314)
|
||||||
});
|
});
|
||||||
let parsed = parse_unchecked(&source, options.clone());
|
let parsed = parse_unchecked(source, options.clone());
|
||||||
|
|
||||||
validate_tokens(parsed.tokens(), source.text_len(), input_path);
|
validate_tokens(parsed.tokens(), source.text_len());
|
||||||
validate_ast(&parsed, source.text_len(), input_path);
|
validate_ast(&parsed, source.text_len());
|
||||||
|
|
||||||
let mut output = String::new();
|
let mut output = String::new();
|
||||||
writeln!(&mut output, "## AST").unwrap();
|
writeln!(&mut output, "## AST").unwrap();
|
||||||
writeln!(&mut output, "\n```\n{:#?}\n```", parsed.syntax()).unwrap();
|
writeln!(&mut output, "\n```\n{:#?}\n```", parsed.syntax()).unwrap();
|
||||||
|
|
||||||
let line_index = LineIndex::from_source_text(&source);
|
let line_index = LineIndex::from_source_text(source);
|
||||||
let source_code = SourceCode::new(&source, &line_index);
|
let source_code = SourceCode::new(source, &line_index);
|
||||||
|
|
||||||
if !parsed.errors().is_empty() {
|
if !parsed.errors().is_empty() {
|
||||||
writeln!(&mut output, "## Errors\n").unwrap();
|
writeln!(&mut output, "## Errors\n").unwrap();
|
||||||
|
|
@ -186,7 +198,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
||||||
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
||||||
|
|
||||||
let mut visitor =
|
let mut visitor =
|
||||||
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
|
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
|
||||||
|
|
||||||
for stmt in parsed.suite() {
|
for stmt in parsed.suite() {
|
||||||
visitor.visit_stmt(stmt);
|
visitor.visit_stmt(stmt);
|
||||||
|
|
@ -196,7 +208,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
parsed.has_syntax_errors() || !semantic_syntax_errors.is_empty(),
|
parsed.has_syntax_errors() || !semantic_syntax_errors.is_empty(),
|
||||||
"{input_path:?}: Expected parser to generate at least one syntax error for a program containing syntax errors."
|
"Expected parser to generate at least one syntax error for a program containing syntax errors."
|
||||||
);
|
);
|
||||||
|
|
||||||
if !semantic_syntax_errors.is_empty() {
|
if !semantic_syntax_errors.is_empty() {
|
||||||
|
|
@ -220,6 +232,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
||||||
omit_expression => true,
|
omit_expression => true,
|
||||||
input_file => input_path,
|
input_file => input_path,
|
||||||
prepend_module_to_snapshot => false,
|
prepend_module_to_snapshot => false,
|
||||||
|
snapshot_suffix => test_name
|
||||||
}, {
|
}, {
|
||||||
insta::assert_snapshot!(output);
|
insta::assert_snapshot!(output);
|
||||||
});
|
});
|
||||||
|
|
@ -372,26 +385,24 @@ impl std::fmt::Display for CodeFrame<'_> {
|
||||||
/// Verifies that:
|
/// Verifies that:
|
||||||
/// * the ranges are strictly increasing when loop the tokens in insertion order
|
/// * the ranges are strictly increasing when loop the tokens in insertion order
|
||||||
/// * all ranges are within the length of the source code
|
/// * all ranges are within the length of the source code
|
||||||
fn validate_tokens(tokens: &[Token], source_length: TextSize, test_path: &Path) {
|
fn validate_tokens(tokens: &[Token], source_length: TextSize) {
|
||||||
let mut previous: Option<&Token> = None;
|
let mut previous: Option<&Token> = None;
|
||||||
|
|
||||||
for token in tokens {
|
for token in tokens {
|
||||||
assert!(
|
assert!(
|
||||||
token.end() <= source_length,
|
token.end() <= source_length,
|
||||||
"{path}: Token range exceeds the source code length. Token: {token:#?}",
|
"Token range exceeds the source code length. Token: {token:#?}",
|
||||||
path = test_path.display()
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(previous) = previous {
|
if let Some(previous) = previous {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
previous.range().ordering(token.range()),
|
previous.range().ordering(token.range()),
|
||||||
Ordering::Less,
|
Ordering::Less,
|
||||||
"{path}: Token ranges are not in increasing order
|
"Token ranges are not in increasing order
|
||||||
Previous token: {previous:#?}
|
Previous token: {previous:#?}
|
||||||
Current token: {token:#?}
|
Current token: {token:#?}
|
||||||
Tokens: {tokens:#?}
|
Tokens: {tokens:#?}
|
||||||
",
|
",
|
||||||
path = test_path.display(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -403,9 +414,9 @@ Tokens: {tokens:#?}
|
||||||
/// * the range of the parent node fully encloses all its child nodes
|
/// * the range of the parent node fully encloses all its child nodes
|
||||||
/// * the ranges are strictly increasing when traversing the nodes in pre-order.
|
/// * the ranges are strictly increasing when traversing the nodes in pre-order.
|
||||||
/// * all ranges are within the length of the source code.
|
/// * all ranges are within the length of the source code.
|
||||||
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize, test_path: &Path) {
|
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize) {
|
||||||
walk_module(
|
walk_module(
|
||||||
&mut ValidateAstVisitor::new(parsed.tokens(), source_len, test_path),
|
&mut ValidateAstVisitor::new(parsed.tokens(), source_len),
|
||||||
parsed.syntax(),
|
parsed.syntax(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -416,17 +427,15 @@ struct ValidateAstVisitor<'a> {
|
||||||
parents: Vec<AnyNodeRef<'a>>,
|
parents: Vec<AnyNodeRef<'a>>,
|
||||||
previous: Option<AnyNodeRef<'a>>,
|
previous: Option<AnyNodeRef<'a>>,
|
||||||
source_length: TextSize,
|
source_length: TextSize,
|
||||||
test_path: &'a Path,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ValidateAstVisitor<'a> {
|
impl<'a> ValidateAstVisitor<'a> {
|
||||||
fn new(tokens: &'a Tokens, source_length: TextSize, test_path: &'a Path) -> Self {
|
fn new(tokens: &'a Tokens, source_length: TextSize) -> Self {
|
||||||
Self {
|
Self {
|
||||||
tokens: tokens.iter().peekable(),
|
tokens: tokens.iter().peekable(),
|
||||||
parents: Vec::new(),
|
parents: Vec::new(),
|
||||||
previous: None,
|
previous: None,
|
||||||
source_length,
|
source_length,
|
||||||
test_path,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -444,8 +453,7 @@ impl ValidateAstVisitor<'_> {
|
||||||
// At this point, next_token.end() > node.start()
|
// At this point, next_token.end() > node.start()
|
||||||
assert!(
|
assert!(
|
||||||
next.start() >= node.start(),
|
next.start() >= node.start(),
|
||||||
"{path}: The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
"The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -464,8 +472,7 @@ impl ValidateAstVisitor<'_> {
|
||||||
// At this point, `next_token.end() > node.end()`
|
// At this point, `next_token.end() > node.end()`
|
||||||
assert!(
|
assert!(
|
||||||
next.start() >= node.end(),
|
next.start() >= node.end(),
|
||||||
"{path}: The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
"The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -476,16 +483,14 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
|
||||||
fn enter_node(&mut self, node: AnyNodeRef<'ast>) -> TraversalSignal {
|
fn enter_node(&mut self, node: AnyNodeRef<'ast>) -> TraversalSignal {
|
||||||
assert!(
|
assert!(
|
||||||
node.end() <= self.source_length,
|
node.end() <= self.source_length,
|
||||||
"{path}: The range of the node exceeds the length of the source code. Node: {node:#?}",
|
"The range of the node exceeds the length of the source code. Node: {node:#?}",
|
||||||
path = self.test_path.display()
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(previous) = self.previous {
|
if let Some(previous) = self.previous {
|
||||||
assert_ne!(
|
assert_ne!(
|
||||||
previous.range().ordering(node.range()),
|
previous.range().ordering(node.range()),
|
||||||
Ordering::Greater,
|
Ordering::Greater,
|
||||||
"{path}: The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
|
"The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -493,8 +498,7 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
|
||||||
if let Some(parent) = self.parents.last() {
|
if let Some(parent) = self.parents.last() {
|
||||||
assert!(
|
assert!(
|
||||||
parent.range().contains_range(node.range()),
|
parent.range().contains_range(node.range()),
|
||||||
"{path}: The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
|
"The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue