refactor: Generate Linter -> RuleSelector mapping via macro

To enable ruff_dev to automatically generate the rule Markdown tables in
the README the ruff library contained the following function:

    Linter::codes() -> Vec<RuleSelector>

which was slightly changed to `fn prefixes(&self) -> Prefixes` in
9dc66b5a65 to enable ruff_dev to split
up the Markdown tables for linters that have multiple prefixes
(pycodestyle has E & W, Pylint has PLC, PLE, PLR & PLW).

The definition of this method was however largely redundant with the
#[prefix] macro attributes in the Linter enum, which are used to
derive the Linter::parse_code function, used by the --explain command.

This commit removes the redundant Linter::prefixes by introducing a
same-named method with a different signature to the RuleNamespace trait:

     fn prefixes(&self) -> &'static [&'static str];

As well as implementing IntoIterator<Rule> for &Linter. We extend the
extisting RuleNamespace proc macro to automatically derive both
implementations from the Linter enum definition.

To support the previously mentioned Markdown table splitting we
introduce a very simple hand-written method to the Linter impl:

    fn categories(&self) -> Option<&'static [LinterCategory]>;
This commit is contained in:
Martin Fischer 2023-01-21 07:19:10 +01:00 committed by Charlie Marsh
parent c3dd1b0e3c
commit 4758ee6ac4
6 changed files with 77 additions and 87 deletions

View File

@ -134,7 +134,7 @@ developer of [Zulip](https://github.com/zulip/zulip):
1. [eradicate (ERA)](#eradicate-era) 1. [eradicate (ERA)](#eradicate-era)
1. [pandas-vet (PD)](#pandas-vet-pd) 1. [pandas-vet (PD)](#pandas-vet-pd)
1. [pygrep-hooks (PGH)](#pygrep-hooks-pgh) 1. [pygrep-hooks (PGH)](#pygrep-hooks-pgh)
1. [Pylint (PLC, PLE, PLR, PLW)](#pylint-plc-ple-plr-plw) 1. [Pylint (PL)](#pylint-pl)
1. [flake8-pie (PIE)](#flake8-pie-pie) 1. [flake8-pie (PIE)](#flake8-pie-pie)
1. [flake8-commas (COM)](#flake8-commas-com) 1. [flake8-commas (COM)](#flake8-commas-com)
1. [flake8-no-pep420 (INP)](#flake8-no-pep420-inp) 1. [flake8-no-pep420 (INP)](#flake8-no-pep420-inp)
@ -1108,7 +1108,7 @@ For more, see [pygrep-hooks](https://github.com/pre-commit/pygrep-hooks) on GitH
| PGH003 | blanket-type-ignore | Use specific rule codes when ignoring type issues | | | PGH003 | blanket-type-ignore | Use specific rule codes when ignoring type issues | |
| PGH004 | blanket-noqa | Use specific rule codes when using `noqa` | | | PGH004 | blanket-noqa | Use specific rule codes when using `noqa` | |
### Pylint (PLC, PLE, PLR, PLW) ### Pylint (PL)
For more, see [Pylint](https://pypi.org/project/pylint/2.15.7/) on PyPI. For more, see [Pylint](https://pypi.org/project/pylint/2.15.7/) on PyPI.

View File

@ -13,7 +13,7 @@ use assert_cmd::Command;
use itertools::Itertools; use itertools::Itertools;
use log::info; use log::info;
use ruff::logging::{set_up_logging, LogLevel}; use ruff::logging::{set_up_logging, LogLevel};
use ruff::registry::Linter; use ruff::registry::{Linter, RuleNamespace};
use strum::IntoEnumIterator; use strum::IntoEnumIterator;
use walkdir::WalkDir; use walkdir::WalkDir;
@ -180,7 +180,7 @@ fn test_ruff_black_compatibility() -> Result<()> {
// problem. Ruff would add a `# noqa: W292` after the first run, black introduces a // problem. Ruff would add a `# noqa: W292` after the first run, black introduces a
// newline, and ruff removes the `# noqa: W292` again. // newline, and ruff removes the `# noqa: W292` again.
.filter(|linter| *linter != Linter::Ruff) .filter(|linter| *linter != Linter::Ruff)
.map(|linter| linter.prefixes().as_list(",")) .map(|linter| linter.prefixes().join(","))
.join(","); .join(",");
let ruff_args = [ let ruff_args = [
"-", "-",

View File

@ -2,7 +2,7 @@
use anyhow::Result; use anyhow::Result;
use clap::Args; use clap::Args;
use ruff::registry::{Linter, Prefixes, RuleSelector}; use ruff::registry::{Linter, LinterCategory, Rule, RuleNamespace};
use strum::IntoEnumIterator; use strum::IntoEnumIterator;
use crate::utils::replace_readme_section; use crate::utils::replace_readme_section;
@ -20,12 +20,12 @@ pub struct Cli {
pub(crate) dry_run: bool, pub(crate) dry_run: bool,
} }
fn generate_table(table_out: &mut String, selector: &RuleSelector) { fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>) {
table_out.push_str("| Code | Name | Message | Fix |"); table_out.push_str("| Code | Name | Message | Fix |");
table_out.push('\n'); table_out.push('\n');
table_out.push_str("| ---- | ---- | ------- | --- |"); table_out.push_str("| ---- | ---- | ------- | --- |");
table_out.push('\n'); table_out.push('\n');
for rule in selector { for rule in rules {
let fix_token = match rule.autofixable() { let fix_token = match rule.autofixable() {
None => "", None => "",
Some(_) => "🛠", Some(_) => "🛠",
@ -48,8 +48,7 @@ pub fn main(cli: &Cli) -> Result<()> {
let mut table_out = String::new(); let mut table_out = String::new();
let mut toc_out = String::new(); let mut toc_out = String::new();
for linter in Linter::iter() { for linter in Linter::iter() {
let prefixes = linter.prefixes(); let codes_csv: String = linter.prefixes().join(", ");
let codes_csv: String = prefixes.as_list(", ");
table_out.push_str(&format!("### {} ({codes_csv})", linter.name())); table_out.push_str(&format!("### {} ({codes_csv})", linter.name()));
table_out.push('\n'); table_out.push('\n');
table_out.push('\n'); table_out.push('\n');
@ -86,15 +85,14 @@ pub fn main(cli: &Cli) -> Result<()> {
table_out.push('\n'); table_out.push('\n');
} }
match prefixes { if let Some(categories) = linter.categories() {
Prefixes::Single(prefix) => generate_table(&mut table_out, &prefix), for LinterCategory(prefix, name, selector) in categories {
Prefixes::Multiple(entries) => { table_out.push_str(&format!("#### {name} ({prefix})"));
for (selector, category) in entries {
table_out.push_str(&format!("#### {category} ({})", selector.as_ref()));
table_out.push('\n'); table_out.push('\n');
generate_table(&mut table_out, &selector); generate_table(&mut table_out, selector);
}
} }
} else {
generate_table(&mut table_out, &linter);
} }
} }

View File

@ -1,3 +1,4 @@
use proc_macro2::{Ident, Span};
use quote::quote; use quote::quote;
use syn::spanned::Spanned; use syn::spanned::Spanned;
use syn::{Data, DataEnum, DeriveInput, Error, Lit, Meta, MetaNameValue}; use syn::{Data, DataEnum, DeriveInput, Error, Lit, Meta, MetaNameValue};
@ -11,6 +12,8 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
let mut parsed = Vec::new(); let mut parsed = Vec::new();
let mut prefix_match_arms = quote!();
for variant in variants { for variant in variants {
let prefix_attrs: Vec<_> = variant let prefix_attrs: Vec<_> = variant
.attrs .attrs
@ -25,23 +28,48 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
)); ));
} }
let mut prefix_literals = Vec::new();
for attr in prefix_attrs { for attr in prefix_attrs {
let Ok(Meta::NameValue(MetaNameValue{lit: Lit::Str(lit), ..})) = attr.parse_meta() else { let Ok(Meta::NameValue(MetaNameValue{lit: Lit::Str(lit), ..})) = attr.parse_meta() else {
return Err(Error::new(attr.span(), r#"expected attribute to be in the form of [#prefix = "..."]"#)) return Err(Error::new(attr.span(), r#"expected attribute to be in the form of [#prefix = "..."]"#))
}; };
parsed.push((lit, variant.ident.clone())); parsed.push((lit.clone(), variant.ident.clone()));
prefix_literals.push(lit);
} }
let variant_ident = variant.ident;
prefix_match_arms.extend(quote! {
Self::#variant_ident => &[#(#prefix_literals),*],
});
} }
parsed.sort_by_key(|(prefix, _)| prefix.value().len()); parsed.sort_by_key(|(prefix, _)| prefix.value().len());
let mut if_statements = quote!(); let mut if_statements = quote!();
let mut into_iter_match_arms = quote!();
for (prefix, field) in parsed { for (prefix, field) in parsed {
if_statements.extend(quote! {if let Some(rest) = code.strip_prefix(#prefix) { if_statements.extend(quote! {if let Some(rest) = code.strip_prefix(#prefix) {
return Some((#ident::#field, rest)); return Some((#ident::#field, rest));
}}); }});
let prefix_ident = Ident::new(&prefix.value(), Span::call_site());
if field != "Pycodestyle" {
into_iter_match_arms.extend(quote! {
#ident::#field => RuleSelector::#prefix_ident.into_iter(),
});
} }
}
into_iter_match_arms.extend(quote! {
#ident::Pycodestyle => {
let rules: Vec<_> = (&RuleSelector::E).into_iter().chain(&RuleSelector::W).collect();
rules.into_iter()
}
});
Ok(quote! { Ok(quote! {
impl crate::registry::RuleNamespace for #ident { impl crate::registry::RuleNamespace for #ident {
@ -49,6 +77,24 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
#if_statements #if_statements
None None
} }
fn prefixes(&self) -> &'static [&'static str] {
match self { #prefix_match_arms }
}
}
impl IntoIterator for &#ident {
type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
use colored::Colorize;
match self {
#into_iter_match_arms
}
}
} }
}) })
} }

View File

@ -89,12 +89,6 @@ mod tests {
fp.write(f"{indent}{pascal_case(plugin)},") fp.write(f"{indent}{pascal_case(plugin)},")
fp.write("\n") fp.write("\n")
elif line.strip() == "Linter::Ruff => Prefixes::Single(RuleSelector::RUF),":
fp.write(
f"{indent}Linter::{pascal_case(plugin)} => Prefixes::Single(RuleSelector::{prefix_code}),"
)
fp.write("\n")
fp.write(line) fp.write(line)
fp.write("\n") fp.write("\n")

View File

@ -1,6 +1,5 @@
//! Registry of [`Rule`] to [`DiagnosticKind`] mappings. //! Registry of [`Rule`] to [`DiagnosticKind`] mappings.
use itertools::Itertools;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use ruff_macros::RuleNamespace; use ruff_macros::RuleNamespace;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -446,7 +445,7 @@ pub enum Linter {
#[prefix = "E"] #[prefix = "E"]
#[prefix = "W"] #[prefix = "W"]
Pycodestyle, Pycodestyle,
#[prefix = "C9"] #[prefix = "C90"]
McCabe, McCabe,
#[prefix = "I"] #[prefix = "I"]
Isort, Isort,
@ -522,76 +521,29 @@ pub enum Linter {
pub trait RuleNamespace: Sized { pub trait RuleNamespace: Sized {
fn parse_code(code: &str) -> Option<(Self, &str)>; fn parse_code(code: &str) -> Option<(Self, &str)>;
}
pub enum Prefixes { fn prefixes(&self) -> &'static [&'static str];
Single(RuleSelector),
Multiple(Vec<(RuleSelector, &'static str)>),
}
impl Prefixes {
pub fn as_list(&self, separator: &str) -> String {
match self {
Prefixes::Single(prefix) => prefix.as_ref().to_string(),
Prefixes::Multiple(entries) => entries
.iter()
.map(|(prefix, _)| prefix.as_ref())
.join(separator),
}
}
} }
include!(concat!(env!("OUT_DIR"), "/linter.rs")); include!(concat!(env!("OUT_DIR"), "/linter.rs"));
/// The prefix, name and selector for an upstream linter category.
pub struct LinterCategory(pub &'static str, pub &'static str, pub RuleSelector);
impl Linter { impl Linter {
pub fn prefixes(&self) -> Prefixes { pub fn categories(&self) -> Option<&'static [LinterCategory]> {
match self { match self {
Linter::Eradicate => Prefixes::Single(RuleSelector::ERA), Linter::Pycodestyle => Some(&[
Linter::Flake82020 => Prefixes::Single(RuleSelector::YTT), LinterCategory("E", "Error", RuleSelector::E),
Linter::Flake8Annotations => Prefixes::Single(RuleSelector::ANN), LinterCategory("W", "Warning", RuleSelector::W),
Linter::Flake8Bandit => Prefixes::Single(RuleSelector::S),
Linter::Flake8BlindExcept => Prefixes::Single(RuleSelector::BLE),
Linter::Flake8BooleanTrap => Prefixes::Single(RuleSelector::FBT),
Linter::Flake8Bugbear => Prefixes::Single(RuleSelector::B),
Linter::Flake8Builtins => Prefixes::Single(RuleSelector::A),
Linter::Flake8Comprehensions => Prefixes::Single(RuleSelector::C4),
Linter::Flake8Datetimez => Prefixes::Single(RuleSelector::DTZ),
Linter::Flake8Debugger => Prefixes::Single(RuleSelector::T10),
Linter::Flake8ErrMsg => Prefixes::Single(RuleSelector::EM),
Linter::Flake8ImplicitStrConcat => Prefixes::Single(RuleSelector::ISC),
Linter::Flake8ImportConventions => Prefixes::Single(RuleSelector::ICN),
Linter::Flake8Print => Prefixes::Single(RuleSelector::T20),
Linter::Flake8PytestStyle => Prefixes::Single(RuleSelector::PT),
Linter::Flake8Quotes => Prefixes::Single(RuleSelector::Q),
Linter::Flake8Return => Prefixes::Single(RuleSelector::RET),
Linter::Flake8Simplify => Prefixes::Single(RuleSelector::SIM),
Linter::Flake8TidyImports => Prefixes::Single(RuleSelector::TID),
Linter::Flake8UnusedArguments => Prefixes::Single(RuleSelector::ARG),
Linter::Isort => Prefixes::Single(RuleSelector::I),
Linter::McCabe => Prefixes::Single(RuleSelector::C90),
Linter::PEP8Naming => Prefixes::Single(RuleSelector::N),
Linter::PandasVet => Prefixes::Single(RuleSelector::PD),
Linter::Pycodestyle => Prefixes::Multiple(vec![
(RuleSelector::E, "Error"),
(RuleSelector::W, "Warning"),
]), ]),
Linter::Pydocstyle => Prefixes::Single(RuleSelector::D), Linter::Pylint => Some(&[
Linter::Pyflakes => Prefixes::Single(RuleSelector::F), LinterCategory("PLC", "Convention", RuleSelector::PLC),
Linter::PygrepHooks => Prefixes::Single(RuleSelector::PGH), LinterCategory("PLE", "Error", RuleSelector::PLE),
Linter::Pylint => Prefixes::Multiple(vec![ LinterCategory("PLR", "Refactor", RuleSelector::PLR),
(RuleSelector::PLC, "Convention"), LinterCategory("PLW", "Warning", RuleSelector::PLW),
(RuleSelector::PLE, "Error"),
(RuleSelector::PLR, "Refactor"),
(RuleSelector::PLW, "Warning"),
]), ]),
Linter::Pyupgrade => Prefixes::Single(RuleSelector::UP), _ => None,
Linter::Flake8Pie => Prefixes::Single(RuleSelector::PIE),
Linter::Flake8Commas => Prefixes::Single(RuleSelector::COM),
Linter::Flake8NoPep420 => Prefixes::Single(RuleSelector::INP),
Linter::Flake8Executable => Prefixes::Single(RuleSelector::EXE),
Linter::Flake8TypeChecking => Prefixes::Single(RuleSelector::TYP),
Linter::Tryceratops => Prefixes::Single(RuleSelector::TRY),
Linter::Ruff => Prefixes::Single(RuleSelector::RUF),
} }
} }
} }