Enable `pycodestyle` rules under new "nursery" category (#4407)

This commit is contained in:
Charlie Marsh 2023-05-16 17:21:58 -04:00 committed by GitHub
parent 39fa38cb35
commit 6b1062ccc3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 882 additions and 722 deletions

File diff suppressed because it is too large Load Diff

View File

@ -119,8 +119,7 @@ impl Visitor<'_> for SelectorVisitor {
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str( formatter.write_str(
"expected a string code identifying a linter or specific rule, or a partial rule code \ "expected a string code identifying a linter or specific rule, or a partial rule code or ALL to refer to all rules",
or ALL to refer to all rules",
) )
} }
@ -141,13 +140,22 @@ impl From<RuleCodePrefix> for RuleSelector {
} }
} }
/// Returns `true` if the given rule should be selected by the `RuleSelector::All` selector.
fn select_all(rule: Rule) -> bool {
// Nursery rules have to be explicitly selected, so we ignore them when looking at
// prefixes.
!rule.is_nursery()
}
impl IntoIterator for &RuleSelector { impl IntoIterator for &RuleSelector {
type IntoIter = RuleSelectorIter;
type Item = Rule; type Item = Rule;
type IntoIter = RuleSelectorIter;
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
match self { match self {
RuleSelector::All => RuleSelectorIter::All(Rule::iter()), RuleSelector::All => {
RuleSelectorIter::All(Rule::iter().filter(|rule| select_all(*rule)))
}
RuleSelector::C => RuleSelectorIter::Chain( RuleSelector::C => RuleSelectorIter::Chain(
Linter::Flake8Comprehensions Linter::Flake8Comprehensions
.into_iter() .into_iter()
@ -165,7 +173,7 @@ impl IntoIterator for &RuleSelector {
} }
pub enum RuleSelectorIter { pub enum RuleSelectorIter {
All(RuleIter), All(std::iter::Filter<RuleIter, fn(&Rule) -> bool>),
Chain(std::iter::Chain<std::vec::IntoIter<Rule>, std::vec::IntoIter<Rule>>), Chain(std::iter::Chain<std::vec::IntoIter<Rule>, std::vec::IntoIter<Rule>>),
Vec(std::vec::IntoIter<Rule>), Vec(std::vec::IntoIter<Rule>),
} }

View File

@ -43,6 +43,17 @@ pub(crate) fn rule(rule: Rule, format: HelpFormat) -> Result<()> {
output.push('\n'); output.push('\n');
} }
if rule.is_nursery() {
output.push_str(&format!(
r#"This rule is part of the **nursery**, a collection of newer lints that are
still under development. As such, it must be enabled by explicitly selecting
{}."#,
rule.noqa_code()
));
output.push('\n');
output.push('\n');
}
if let Some(explanation) = rule.explanation() { if let Some(explanation) = rule.explanation() {
output.push_str(explanation.trim()); output.push_str(explanation.trim());
} else { } else {

View File

@ -12,7 +12,7 @@ use path_absolutize::path_dedot;
const BIN_NAME: &str = "ruff"; const BIN_NAME: &str = "ruff";
#[test] #[test]
fn test_stdin_success() -> Result<()> { fn stdin_success() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
cmd.args(["-", "--format", "text", "--isolated"]) cmd.args(["-", "--format", "text", "--isolated"])
.write_stdin("") .write_stdin("")
@ -22,7 +22,7 @@ fn test_stdin_success() -> Result<()> {
} }
#[test] #[test]
fn test_stdin_error() -> Result<()> { fn stdin_error() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
let output = cmd let output = cmd
.args(["-", "--format", "text", "--isolated"]) .args(["-", "--format", "text", "--isolated"])
@ -40,7 +40,7 @@ Found 1 error.
} }
#[test] #[test]
fn test_stdin_filename() -> Result<()> { fn stdin_filename() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
let output = cmd let output = cmd
.args([ .args([
@ -66,7 +66,7 @@ Found 1 error.
#[cfg(unix)] #[cfg(unix)]
#[test] #[test]
fn test_stdin_json() -> Result<()> { fn stdin_json() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
let output = cmd let output = cmd
.args([ .args([
@ -127,7 +127,7 @@ fn test_stdin_json() -> Result<()> {
} }
#[test] #[test]
fn test_stdin_autofix() -> Result<()> { fn stdin_autofix() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
let output = cmd let output = cmd
.args(["-", "--format", "text", "--fix", "--isolated"]) .args(["-", "--format", "text", "--fix", "--isolated"])
@ -142,7 +142,7 @@ fn test_stdin_autofix() -> Result<()> {
} }
#[test] #[test]
fn test_stdin_autofix_when_not_fixable_should_still_print_contents() -> Result<()> { fn stdin_autofix_when_not_fixable_should_still_print_contents() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
let output = cmd let output = cmd
.args(["-", "--format", "text", "--fix", "--isolated"]) .args(["-", "--format", "text", "--fix", "--isolated"])
@ -157,7 +157,7 @@ fn test_stdin_autofix_when_not_fixable_should_still_print_contents() -> Result<(
} }
#[test] #[test]
fn test_stdin_autofix_when_no_issues_should_still_print_contents() -> Result<()> { fn stdin_autofix_when_no_issues_should_still_print_contents() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
let output = cmd let output = cmd
.args(["-", "--format", "text", "--fix", "--isolated"]) .args(["-", "--format", "text", "--fix", "--isolated"])
@ -172,7 +172,7 @@ fn test_stdin_autofix_when_no_issues_should_still_print_contents() -> Result<()>
} }
#[test] #[test]
fn test_show_source() -> Result<()> { fn show_source() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?; let mut cmd = Command::cargo_bin(BIN_NAME)?;
let output = cmd let output = cmd
.args(["-", "--format", "text", "--show-source", "--isolated"]) .args(["-", "--format", "text", "--show-source", "--isolated"])
@ -217,3 +217,63 @@ fn show_statistics() -> Result<()> {
); );
Ok(()) Ok(())
} }
#[test]
fn nursery_prefix() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?;
// `--select E` should detect E741, but not E225, which is in the nursery.
let output = cmd
.args(["-", "--format", "text", "--isolated", "--select", "E"])
.write_stdin("I=42\n")
.assert()
.failure();
assert_eq!(
str::from_utf8(&output.get_output().stdout)?,
r#"-:1:1: E741 Ambiguous variable name: `I`
Found 1 error.
"#
);
Ok(())
}
#[test]
fn nursery_all() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?;
// `--select ALL` should detect E741, but not E225, which is in the nursery.
let output = cmd
.args(["-", "--format", "text", "--isolated", "--select", "E"])
.write_stdin("I=42\n")
.assert()
.failure();
assert_eq!(
str::from_utf8(&output.get_output().stdout)?,
r#"-:1:1: E741 Ambiguous variable name: `I`
Found 1 error.
"#
);
Ok(())
}
#[test]
fn nursery_direct() -> Result<()> {
let mut cmd = Command::cargo_bin(BIN_NAME)?;
// `--select E225` should detect E225.
let output = cmd
.args(["-", "--format", "text", "--isolated", "--select", "E225"])
.write_stdin("I=42\n")
.assert()
.failure();
assert_eq!(
str::from_utf8(&output.get_output().stdout)?,
r#"-:1:2: E225 Missing whitespace around operator
Found 1 error.
"#
);
Ok(())
}

View File

@ -43,6 +43,17 @@ pub(crate) fn main(args: &Args) -> Result<()> {
output.push('\n'); output.push('\n');
} }
if rule.is_nursery() {
output.push_str(&format!(
r#"This rule is part of the **nursery**, a collection of newer lints that are
still under development. As such, it must be enabled by explicitly selecting
{}."#,
rule.noqa_code()
));
output.push('\n');
output.push('\n');
}
process_documentation(explanation.trim(), &mut output); process_documentation(explanation.trim(), &mut output);
let filename = PathBuf::from(ROOT_DIR) let filename = PathBuf::from(ROOT_DIR)
@ -116,7 +127,7 @@ mod tests {
#[test] #[test]
fn test_process_documentation() { fn test_process_documentation() {
let mut out = String::new(); let mut output = String::new();
process_documentation( process_documentation(
" "
See also [`mccabe.max-complexity`]. See also [`mccabe.max-complexity`].
@ -127,10 +138,10 @@ Something [`else`][other].
- `mccabe.max-complexity` - `mccabe.max-complexity`
[other]: http://example.com.", [other]: http://example.com.",
&mut out, &mut output,
); );
assert_eq!( assert_eq!(
out, output,
" "
See also [`mccabe.max-complexity`][mccabe.max-complexity]. See also [`mccabe.max-complexity`][mccabe.max-complexity].
Something [`else`][other]. Something [`else`][other].

View File

@ -1,6 +1,6 @@
use std::fmt::{Debug, Display}; use std::fmt::{Debug, Display};
#[derive(Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub enum AutofixKind { pub enum AutofixKind {
Sometimes, Sometimes,
Always, Always,
@ -30,7 +30,7 @@ pub trait Violation: Debug + PartialEq + Eq {
None None
} }
// TODO micha: Move `autofix_title` to `Fix`, add new `advice` method that is shown as an advice. // TODO(micha): Move `autofix_title` to `Fix`, add new `advice` method that is shown as an advice.
// Change the `Diagnostic` renderer to show the advice, and render the fix message after the `Suggested fix: <here>` // Change the `Diagnostic` renderer to show the advice, and render the fix message after the `Suggested fix: <here>`
/// Returns the title for the autofix. The message is also shown as an advice as part of the diagnostics. /// Returns the title for the autofix. The message is also shown as an advice as part of the diagnostics.

View File

@ -15,7 +15,7 @@ mod rule_namespace;
mod violation; mod violation;
#[proc_macro_derive(ConfigurationOptions, attributes(option, doc, option_group))] #[proc_macro_derive(ConfigurationOptions, attributes(option, doc, option_group))]
pub fn derive_config(input: proc_macro::TokenStream) -> proc_macro::TokenStream { pub fn derive_config(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput); let input = parse_macro_input!(input as DeriveInput);
config::derive_impl(input) config::derive_impl(input)
@ -43,13 +43,12 @@ pub fn cache_key(input: TokenStream) -> TokenStream {
} }
#[proc_macro] #[proc_macro]
pub fn register_rules(item: proc_macro::TokenStream) -> proc_macro::TokenStream { pub fn register_rules(item: TokenStream) -> TokenStream {
let mapping = parse_macro_input!(item as register_rules::Input); let mapping = parse_macro_input!(item as register_rules::Input);
register_rules::register_rules(&mapping).into() register_rules::register_rules(&mapping).into()
} }
/// Adds an `explanation()` method from the doc comment and /// Adds an `explanation()` method from the doc comment.
/// `#[derive(Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]`
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn violation(_attr: TokenStream, item: TokenStream) -> TokenStream { pub fn violation(_attr: TokenStream, item: TokenStream) -> TokenStream {
let violation = parse_macro_input!(item as ItemStruct); let violation = parse_macro_input!(item as ItemStruct);
@ -59,7 +58,7 @@ pub fn violation(_attr: TokenStream, item: TokenStream) -> TokenStream {
} }
#[proc_macro_derive(RuleNamespace, attributes(prefix))] #[proc_macro_derive(RuleNamespace, attributes(prefix))]
pub fn derive_rule_namespace(input: proc_macro::TokenStream) -> proc_macro::TokenStream { pub fn derive_rule_namespace(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput); let input = parse_macro_input!(input as DeriveInput);
rule_namespace::derive_impl(input) rule_namespace::derive_impl(input)

View File

@ -8,21 +8,25 @@ use syn::{
Ident, ItemFn, LitStr, Pat, Path, Stmt, Token, Ident, ItemFn, LitStr, Pat, Path, Stmt, Token,
}; };
use crate::rule_code_prefix::{get_prefix_ident, if_all_same}; use crate::rule_code_prefix::{get_prefix_ident, if_all_same, is_nursery};
pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> { pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
let Some(last_stmt) = func.block.stmts.last() else { let Some(last_stmt) = func.block.stmts.last() else {
return Err(Error::new(func.block.span(), "expected body to end in an expression")); return Err(Error::new(func.block.span(), "expected body to end in an expression"));
}; };
let Stmt::Expr(Expr::Call(ExprCall{args: some_args, ..}), _) = last_stmt else { let Stmt::Expr(Expr::Call(ExprCall{args: some_args, ..}), _) = last_stmt else {
return Err(Error::new(last_stmt.span(), "expected last expression to be Some(match (..) { .. })")) return Err(Error::new(last_stmt.span(), "expected last expression to be `Some(match (..) { .. })`"))
}; };
let mut some_args = some_args.into_iter(); let mut some_args = some_args.into_iter();
let (Some(Expr::Match(ExprMatch { arms, .. })), None) = (some_args.next(), some_args.next()) else { let (Some(Expr::Match(ExprMatch { arms, .. })), None) = (some_args.next(), some_args.next()) else {
return Err(Error::new(last_stmt.span(), "expected last expression to be Some(match (..) { .. })")) return Err(Error::new(last_stmt.span(), "expected last expression to be `Some(match (..) { .. })`"))
}; };
let mut linters: BTreeMap<Ident, BTreeMap<String, (Path, Vec<Attribute>)>> = BTreeMap::new(); // Map from: linter (e.g., `Flake8Bugbear`) to rule code (e.g.,`"002"`) to rule data (e.g.,
// `(Rule::UnaryPrefixIncrement, RuleGroup::Unspecified, vec![])`).
#[allow(clippy::type_complexity)]
let mut linter_to_rules: BTreeMap<Ident, BTreeMap<String, (Path, Path, Vec<Attribute>)>> =
BTreeMap::new();
for arm in arms { for arm in arms {
if matches!(arm.pat, Pat::Wild(..)) { if matches!(arm.pat, Pat::Wild(..)) {
@ -30,15 +34,15 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
} }
let entry = syn::parse::<Entry>(arm.into_token_stream().into())?; let entry = syn::parse::<Entry>(arm.into_token_stream().into())?;
linters linter_to_rules
.entry(entry.linter) .entry(entry.linter)
.or_default() .or_default()
.insert(entry.code.value(), (entry.rule, entry.attrs)); .insert(entry.code.value(), (entry.rule, entry.group, entry.attrs));
} }
let linter_idents: Vec<_> = linters.keys().collect(); let linter_idents: Vec<_> = linter_to_rules.keys().collect();
let mut out = quote! { let mut output = quote! {
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum RuleCodePrefix { pub enum RuleCodePrefix {
#(#linter_idents(#linter_idents),)* #(#linter_idents(#linter_idents),)*
@ -47,7 +51,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
impl RuleCodePrefix { impl RuleCodePrefix {
pub fn linter(&self) -> &'static Linter { pub fn linter(&self) -> &'static Linter {
match self { match self {
#(Self::#linter_idents(..) => &crate::registry::Linter::#linter_idents,)* #(Self::#linter_idents(..) => &Linter::#linter_idents,)*
} }
} }
@ -59,13 +63,15 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
} }
}; };
for (linter, map) in &linters { for (linter, rules) in &linter_to_rules {
out.extend(super::rule_code_prefix::expand( output.extend(super::rule_code_prefix::expand(
linter, linter,
map.iter().map(|(k, v)| (k.as_str(), &v.1)), rules
.iter()
.map(|(code, (.., group, attrs))| (code.as_str(), group, attrs)),
)); ));
out.extend(quote! { output.extend(quote! {
impl From<#linter> for RuleCodePrefix { impl From<#linter> for RuleCodePrefix {
fn from(linter: #linter) -> Self { fn from(linter: #linter) -> Self {
Self::#linter(linter) Self::#linter(linter)
@ -81,32 +87,44 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
let mut all_codes = Vec::new(); let mut all_codes = Vec::new();
for (linter, map) in &linters { for (linter, rules) in &linter_to_rules {
let mut full_map: HashMap<_, _> = map // Group the rules by their common prefixes.
.iter() // TODO(charlie): Why do we do this here _and_ in `rule_code_prefix::expand`?
.map(|(code, rule)| (code.clone(), vec![rule.clone()])) let mut rules_by_prefix = BTreeMap::new();
.collect();
for code in map.keys() { for (code, (rule, group, attrs)) in rules {
// Nursery rules have to be explicitly selected, so we ignore them when looking at
// prefixes.
if is_nursery(group) {
rules_by_prefix.insert(code.clone(), vec![(rule.clone(), attrs.clone())]);
continue;
}
for i in 1..=code.len() { for i in 1..=code.len() {
let prefix = code[..i].to_string(); let prefix = code[..i].to_string();
let rules: Vec<_> = map let rules: Vec<_> = rules
.iter() .iter()
.filter_map(|(code, rules)| { .filter_map(|(code, (rule, group, attrs))| {
// Nursery rules have to be explicitly selected, so we ignore them when
// looking at prefixes.
if is_nursery(group) {
return None;
}
if code.starts_with(&prefix) { if code.starts_with(&prefix) {
Some(rules) Some((rule.clone(), attrs.clone()))
} else { } else {
None None
} }
}) })
.cloned()
.collect(); .collect();
full_map.insert(prefix, rules); rules_by_prefix.insert(prefix, rules);
} }
} }
for (code, names) in &full_map { for (prefix, rules) in &rules_by_prefix {
let prefix_ident = get_prefix_ident(code); let prefix_ident = get_prefix_ident(prefix);
let attr = match if_all_same(names.iter().map(|(_, attrs)| attrs)) { let attr = match if_all_same(rules.iter().map(|(.., attrs)| attrs)) {
Some(attr) => quote!(#(#attr)*), Some(attr) => quote!(#(#attr)*),
None => quote!(), None => quote!(),
}; };
@ -117,10 +135,12 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
let mut prefix_into_iter_match_arms = quote!(); let mut prefix_into_iter_match_arms = quote!();
for (code, rules) in full_map { for (prefix, rules) in rules_by_prefix {
let rule_paths = rules.iter().map(|(path, attrs)| quote!(#(#attrs)* #path)); let rule_paths = rules
let prefix_ident = get_prefix_ident(&code); .iter()
let attr = match if_all_same(rules.iter().map(|(_, attrs)| attrs)) { .map(|(path, .., attrs)| quote!(#(#attrs)* #path));
let prefix_ident = get_prefix_ident(&prefix);
let attr = match if_all_same(rules.iter().map(|(.., attrs)| attrs)) {
Some(attr) => quote!(#(#attr)*), Some(attr) => quote!(#(#attr)*),
None => quote!(), None => quote!(),
}; };
@ -129,7 +149,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
}); });
} }
out.extend(quote! { output.extend(quote! {
impl IntoIterator for &#linter { impl IntoIterator for &#linter {
type Item = Rule; type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>; type IntoIter = ::std::vec::IntoIter<Self::Item>;
@ -141,7 +161,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
}); });
} }
out.extend(quote! { output.extend(quote! {
impl IntoIterator for &RuleCodePrefix { impl IntoIterator for &RuleCodePrefix {
type Item = Rule; type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>; type IntoIter = ::std::vec::IntoIter<Self::Item>;
@ -154,7 +174,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
} }
}); });
out.extend(quote! { output.extend(quote! {
impl RuleCodePrefix { impl RuleCodePrefix {
pub fn parse(linter: &Linter, code: &str) -> Result<Self, crate::registry::FromCodeError> { pub fn parse(linter: &Linter, code: &str) -> Result<Self, crate::registry::FromCodeError> {
use std::str::FromStr; use std::str::FromStr;
@ -166,16 +186,22 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
} }
}); });
// Map from rule to codes that can be used to select it.
// This abstraction exists to support a one-to-many mapping, whereby a single rule could map
// to multiple codes (e.g., if it existed in multiple linters, like Pylint and Flake8, under
// different codes). We haven't actually activated this functionality yet, but some work was
// done to support it, so the logic exists here.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
let mut rule_to_codes: HashMap<&Path, Vec<(&Ident, &str, &[Attribute])>> = HashMap::new(); let mut rule_to_codes: HashMap<&Path, Vec<(&Ident, &str, &Path, &[Attribute])>> =
HashMap::new();
let mut linter_code_for_rule_match_arms = quote!(); let mut linter_code_for_rule_match_arms = quote!();
for (linter, map) in &linters { for (linter, map) in &linter_to_rules {
for (code, (rule, attrs)) in map { for (code, (rule, group, attrs)) in map {
rule_to_codes rule_to_codes
.entry(rule) .entry(rule)
.or_default() .or_default()
.push((linter, code, attrs)); .push((linter, code, group, attrs));
linter_code_for_rule_match_arms.extend(quote! { linter_code_for_rule_match_arms.extend(quote! {
#(#attrs)* (Self::#linter, #rule) => Some(#code), #(#attrs)* (Self::#linter, #rule) => Some(#code),
}); });
@ -183,50 +209,66 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
} }
let mut rule_noqa_code_match_arms = quote!(); let mut rule_noqa_code_match_arms = quote!();
let mut rule_group_match_arms = quote!();
for (rule, codes) in rule_to_codes { for (rule, codes) in rule_to_codes {
assert!( assert_eq!(
codes.len() == 1, codes.len(),
1,
" "
The mapping of multiple codes to one rule has been disabled due to UX concerns (it would {} is mapped to multiple codes.
be confusing if violations were reported under a different code than the code you selected).
We firstly want to allow rules to be selected by their names (and report them by name), The mapping of multiple codes to one rule has been disabled due to UX concerns (it would
and before we can do that we have to rename all our rules to match our naming convention be confusing if violations were reported under a different code than the code you selected).
(see CONTRIBUTING.md) because after that change every rule rename will be a breaking change.
See also https://github.com/charliermarsh/ruff/issues/2186. We firstly want to allow rules to be selected by their names (and report them by name),
and before we can do that we have to rename all our rules to match our naming convention
(see CONTRIBUTING.md) because after that change every rule rename will be a breaking change.
(this was triggered by {} being mapped to multiple codes) See also https://github.com/charliermarsh/ruff/issues/2186.
", ",
rule.segments.last().unwrap().ident rule.segments.last().unwrap().ident
); );
let (linter, code, attrs) = codes let (linter, code, group, attrs) = codes
.iter() .iter()
.sorted_by_key(|(l, ..)| *l == "Pylint") // TODO: more sophisticated sorting .sorted_by_key(|(l, ..)| *l == "Pylint")
.next() .next()
.unwrap(); .unwrap();
rule_noqa_code_match_arms.extend(quote! { rule_noqa_code_match_arms.extend(quote! {
#(#attrs)* #rule => NoqaCode(crate::registry::Linter::#linter.common_prefix(), #code), #(#attrs)* #rule => NoqaCode(crate::registry::Linter::#linter.common_prefix(), #code),
}); });
rule_group_match_arms.extend(quote! {
#(#attrs)* #rule => #group,
});
} }
out.extend(quote! { output.extend(quote! {
impl crate::registry::Rule { impl Rule {
pub fn noqa_code(&self) -> NoqaCode { pub fn noqa_code(&self) -> NoqaCode {
use crate::registry::RuleNamespace; use crate::registry::RuleNamespace;
match self { match self {
#rule_noqa_code_match_arms #rule_noqa_code_match_arms
// TODO: support rules without codes
// rule => rule.as_ref()
} }
} }
pub fn group(&self) -> RuleGroup {
use crate::registry::RuleNamespace;
match self {
#rule_group_match_arms
}
}
pub fn is_nursery(&self) -> bool {
matches!(self.group(), RuleGroup::Nursery)
}
} }
impl crate::registry::Linter { impl Linter {
pub fn code_for_rule(&self, rule: Rule) -> Option<&'static str> { pub fn code_for_rule(&self, rule: Rule) -> Option<&'static str> {
match (self, rule) { match (self, rule) {
#linter_code_for_rule_match_arms #linter_code_for_rule_match_arms
@ -237,16 +279,17 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
}); });
let mut linter_into_iter_match_arms = quote!(); let mut linter_into_iter_match_arms = quote!();
for (linter, map) in &linters { for (linter, map) in &linter_to_rules {
let rule_paths = map.values().map(|(path, attrs)| quote!(#(#attrs)* #path)); let rule_paths = map
.values()
.map(|(path, .., attrs)| quote!(#(#attrs)* #path));
linter_into_iter_match_arms.extend(quote! { linter_into_iter_match_arms.extend(quote! {
crate::registry::Linter::#linter => vec![#(#rule_paths,)*].into_iter(), Linter::#linter => vec![#(#rule_paths,)*].into_iter(),
}); });
} }
out.extend(quote! { output.extend(quote! {
impl IntoIterator for &Linter {
impl IntoIterator for &crate::registry::Linter {
type Item = Rule; type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>; type IntoIter = ::std::vec::IntoIter<Self::Item>;
@ -259,7 +302,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
}); });
out.extend(quote! { output.extend(quote! {
impl RuleCodePrefix { impl RuleCodePrefix {
pub fn iter() -> ::std::vec::IntoIter<RuleCodePrefix> { pub fn iter() -> ::std::vec::IntoIter<RuleCodePrefix> {
vec![ #(#all_codes,)* ].into_iter() vec![ #(#all_codes,)* ].into_iter()
@ -267,18 +310,19 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
} }
}); });
Ok(out) Ok(output)
} }
struct Entry { struct Entry {
linter: Ident, linter: Ident,
code: LitStr, code: LitStr,
group: Path,
rule: Path, rule: Path,
attrs: Vec<Attribute>, attrs: Vec<Attribute>,
} }
impl Parse for Entry { impl Parse for Entry {
/// Parses a match arm such as `(Pycodestyle, "E101") => Rule::MixedSpacesAndTabs,` /// Parses a match arm such as `(Pycodestyle, "E112") => (RuleGroup::Nursery, Rule::NoIndentedBlock),`
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let attrs = Attribute::parse_outer(input)?; let attrs = Attribute::parse_outer(input)?;
let pat_tuple; let pat_tuple;
@ -287,11 +331,16 @@ impl Parse for Entry {
let _: Token!(,) = pat_tuple.parse()?; let _: Token!(,) = pat_tuple.parse()?;
let code: LitStr = pat_tuple.parse()?; let code: LitStr = pat_tuple.parse()?;
let _: Token!(=>) = input.parse()?; let _: Token!(=>) = input.parse()?;
let rule: Path = input.parse()?; let pat_tuple;
parenthesized!(pat_tuple in input);
let group: Path = pat_tuple.parse()?;
let _: Token!(,) = pat_tuple.parse()?;
let rule: Path = pat_tuple.parse()?;
let _: Token!(,) = input.parse()?; let _: Token!(,) = input.parse()?;
Ok(Entry { Ok(Entry {
linter, linter,
code, code,
group,
rule, rule,
attrs, attrs,
}) })

View File

@ -2,35 +2,34 @@ use std::collections::{BTreeMap, BTreeSet};
use proc_macro2::Span; use proc_macro2::Span;
use quote::quote; use quote::quote;
use syn::{Attribute, Ident}; use syn::{Attribute, Ident, Path};
pub(crate) fn get_prefix_ident(prefix: &str) -> Ident {
let prefix = if prefix.as_bytes()[0].is_ascii_digit() {
// Identifiers in Rust may not start with a number.
format!("_{prefix}")
} else {
prefix.to_string()
};
Ident::new(&prefix, Span::call_site())
}
pub(crate) fn expand<'a>( pub(crate) fn expand<'a>(
prefix_ident: &Ident, prefix_ident: &Ident,
variants: impl Iterator<Item = (&'a str, &'a Vec<Attribute>)>, variants: impl Iterator<Item = (&'a str, &'a Path, &'a Vec<Attribute>)>,
) -> proc_macro2::TokenStream { ) -> proc_macro2::TokenStream {
// Build up a map from prefix to matching RuleCodes. // Build up a map from prefix to matching RuleCodes.
let mut prefix_to_codes: BTreeMap<String, BTreeSet<String>> = BTreeMap::default(); let mut prefix_to_codes: BTreeMap<String, BTreeSet<String>> = BTreeMap::default();
let mut code_to_attributes: BTreeMap<String, &[Attribute]> = BTreeMap::default(); let mut code_to_attributes: BTreeMap<String, &[Attribute]> = BTreeMap::default();
for (variant, attr) in variants { for (variant, group, attr) in variants {
let code_str = variant.to_string(); let code_str = variant.to_string();
for i in 1..=code_str.len() { // Nursery rules have to be explicitly selected, so we ignore them when looking at prefixes.
let prefix = code_str[..i].to_string(); if is_nursery(group) {
prefix_to_codes prefix_to_codes
.entry(prefix) .entry(code_str.clone())
.or_default() .or_default()
.insert(code_str.clone()); .insert(code_str.clone());
} else {
for i in 1..=code_str.len() {
let prefix = code_str[..i].to_string();
prefix_to_codes
.entry(prefix)
.or_default()
.insert(code_str.clone());
}
} }
code_to_attributes.insert(code_str, attr); code_to_attributes.insert(code_str, attr);
} }
@ -115,3 +114,25 @@ pub(crate) fn if_all_same<T: PartialEq>(iter: impl Iterator<Item = T>) -> Option
None None
} }
} }
/// Returns an identifier for the given prefix.
pub(crate) fn get_prefix_ident(prefix: &str) -> Ident {
let prefix = if prefix.as_bytes()[0].is_ascii_digit() {
// Identifiers in Rust may not start with a number.
format!("_{prefix}")
} else {
prefix.to_string()
};
Ident::new(&prefix, Span::call_site())
}
/// Returns true if the given group is the "nursery" group.
pub(crate) fn is_nursery(group: &Path) -> bool {
let group = group
.segments
.iter()
.map(|segment| segment.ident.to_string())
.collect::<Vec<String>>()
.join("::");
group == "RuleGroup::Nursery"
}

9
ruff.schema.json generated
View File

@ -1688,7 +1688,6 @@
"E1", "E1",
"E10", "E10",
"E101", "E101",
"E11",
"E111", "E111",
"E112", "E112",
"E113", "E113",
@ -1696,14 +1695,10 @@
"E115", "E115",
"E116", "E116",
"E117", "E117",
"E2",
"E20",
"E201", "E201",
"E202", "E202",
"E203", "E203",
"E21",
"E211", "E211",
"E22",
"E221", "E221",
"E222", "E222",
"E223", "E223",
@ -1712,17 +1707,13 @@
"E226", "E226",
"E227", "E227",
"E228", "E228",
"E23",
"E231", "E231",
"E25",
"E251", "E251",
"E252", "E252",
"E26",
"E261", "E261",
"E262", "E262",
"E265", "E265",
"E266", "E266",
"E27",
"E271", "E271",
"E272", "E272",
"E273", "E273",

View File

@ -181,9 +181,10 @@ pub fn {rule_name_snake}(checker: &mut Checker) {{}}
while (line := next(fp)).strip() != "": while (line := next(fp)).strip() != "":
lines.append(line) lines.append(line)
linter_variant = pascal_case(linter) variant = pascal_case(linter)
lines.append( lines.append(
" " * 8 + f"""({linter_variant}, "{code}") => Rule::{name},\n""", " " * 8
+ f"""({variant}, "{code}") => (RuleGroup::Unspecified, Rule::{name}),\n""",
) )
lines.sort() lines.sort()

View File

@ -11,13 +11,12 @@ line-length = 88
line-length = 88 line-length = 88
select = ["ALL"] select = ["ALL"]
ignore = [ ignore = [
"C901", # McCabe complexity "C901", # McCabe complexity
"PL", # pylint "PL", # pylint
"S", # bandit "S", # bandit
"G", # flake8-logging "G", # flake8-logging
"T", # flake8-print "T", # flake8-print
"FBT", # flake8-boolean-trap "FBT", # flake8-boolean-trap
"E203",
] ]
[tool.ruff.pydocstyle] [tool.ruff.pydocstyle]