mirror of https://github.com/astral-sh/ruff
refactor: Drop RuleSelector::codes in favor of IntoIterator impl
This commit is contained in:
parent
9f14e7c830
commit
4f12b31dc8
|
|
@ -25,7 +25,7 @@ fn generate_table(table_out: &mut String, selector: &RuleSelector) {
|
||||||
table_out.push('\n');
|
table_out.push('\n');
|
||||||
table_out.push_str("| ---- | ---- | ------- | --- |");
|
table_out.push_str("| ---- | ---- | ------- | --- |");
|
||||||
table_out.push('\n');
|
table_out.push('\n');
|
||||||
for rule in selector.codes() {
|
for rule in selector {
|
||||||
let fix_token = match rule.autofixable() {
|
let fix_token = match rule.autofixable() {
|
||||||
None => "",
|
None => "",
|
||||||
Some(_) => "🛠",
|
Some(_) => "🛠",
|
||||||
|
|
|
||||||
|
|
@ -184,7 +184,7 @@ fn generate_impls<'a>(
|
||||||
prefix_to_codes: &BTreeMap<Ident, BTreeSet<String>>,
|
prefix_to_codes: &BTreeMap<Ident, BTreeSet<String>>,
|
||||||
variant_name: impl Fn(&str) -> &'a Ident,
|
variant_name: impl Fn(&str) -> &'a Ident,
|
||||||
) -> proc_macro2::TokenStream {
|
) -> proc_macro2::TokenStream {
|
||||||
let codes_match_arms = prefix_to_codes.iter().map(|(prefix, codes)| {
|
let into_iter_match_arms = prefix_to_codes.iter().map(|(prefix, codes)| {
|
||||||
let codes = codes.iter().map(|code| {
|
let codes = codes.iter().map(|code| {
|
||||||
let rule_variant = variant_name(code);
|
let rule_variant = variant_name(code);
|
||||||
quote! {
|
quote! {
|
||||||
|
|
@ -198,12 +198,12 @@ fn generate_impls<'a>(
|
||||||
crate::warn_user_once!(
|
crate::warn_user_once!(
|
||||||
"`{}` has been remapped to `{}`", #prefix_str, #target
|
"`{}` has been remapped to `{}`", #prefix_str, #target
|
||||||
);
|
);
|
||||||
vec![#(#codes),*]
|
vec![#(#codes),*].into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
quote! {
|
quote! {
|
||||||
#prefix_ident::#prefix => vec![#(#codes),*],
|
#prefix_ident::#prefix => vec![#(#codes),*].into_iter(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
@ -248,15 +248,6 @@ fn generate_impls<'a>(
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
impl #prefix_ident {
|
impl #prefix_ident {
|
||||||
pub fn codes(&self) -> Vec<#rule_type> {
|
|
||||||
use colored::Colorize;
|
|
||||||
|
|
||||||
#[allow(clippy::match_same_arms)]
|
|
||||||
match self {
|
|
||||||
#(#codes_match_arms)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn specificity(&self) -> SuffixLength {
|
pub fn specificity(&self) -> SuffixLength {
|
||||||
#[allow(clippy::match_same_arms)]
|
#[allow(clippy::match_same_arms)]
|
||||||
match self {
|
match self {
|
||||||
|
|
@ -265,6 +256,20 @@ fn generate_impls<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl IntoIterator for &#prefix_ident {
|
||||||
|
type Item = #rule_type;
|
||||||
|
type IntoIter = ::std::vec::IntoIter<Self::Item>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
use colored::Colorize;
|
||||||
|
|
||||||
|
#[allow(clippy::match_same_arms)]
|
||||||
|
match self {
|
||||||
|
#(#into_iter_match_arms)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub const CATEGORIES: &[#prefix_ident] = &[#(#categories)*];
|
pub const CATEGORIES: &[#prefix_ident] = &[#(#categories)*];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -275,9 +275,8 @@ pub fn infer_plugins_from_codes(selectors: &BTreeSet<RuleSelector>) -> Vec<Plugi
|
||||||
.filter(|plugin| {
|
.filter(|plugin| {
|
||||||
for selector in selectors {
|
for selector in selectors {
|
||||||
if selector
|
if selector
|
||||||
.codes()
|
.into_iter()
|
||||||
.iter()
|
.any(|rule| plugin.selector().into_iter().any(|r| r == rule))
|
||||||
.any(|rule| plugin.selector().codes().contains(rule))
|
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ mod tests {
|
||||||
|
|
||||||
fn rule_code(contents: &str, expected: &[Rule]) -> Result<()> {
|
fn rule_code(contents: &str, expected: &[Rule]) -> Result<()> {
|
||||||
let contents = dedent(contents);
|
let contents = dedent(contents);
|
||||||
let settings = settings::Settings::for_rules(RuleSelector::PD.codes());
|
let settings = settings::Settings::for_rules(&RuleSelector::PD);
|
||||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||||
let locator = Locator::new(&contents);
|
let locator = Locator::new(&contents);
|
||||||
let stylist = Stylist::from_contents(&contents, &locator);
|
let stylist = Stylist::from_contents(&contents, &locator);
|
||||||
|
|
|
||||||
|
|
@ -209,7 +209,7 @@ mod tests {
|
||||||
/// Note that all tests marked with `#[ignore]` should be considered TODOs.
|
/// Note that all tests marked with `#[ignore]` should be considered TODOs.
|
||||||
fn flakes(contents: &str, expected: &[Rule]) -> Result<()> {
|
fn flakes(contents: &str, expected: &[Rule]) -> Result<()> {
|
||||||
let contents = dedent(contents);
|
let contents = dedent(contents);
|
||||||
let settings = settings::Settings::for_rules(RuleSelector::F.codes());
|
let settings = settings::Settings::for_rules(&RuleSelector::F);
|
||||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||||
let locator = Locator::new(&contents);
|
let locator = Locator::new(&contents);
|
||||||
let stylist = Stylist::from_contents(&contents, &locator);
|
let stylist = Stylist::from_contents(&contents, &locator);
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,7 @@ pub static EXCLUDE: Lazy<Vec<FilePattern>> = Lazy::new(|| {
|
||||||
impl Default for Settings {
|
impl Default for Settings {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
rules: PREFIXES.iter().flat_map(RuleSelector::codes).into(),
|
rules: PREFIXES.iter().flat_map(IntoIterator::into_iter).into(),
|
||||||
allowed_confusables: FxHashSet::from_iter([]).into(),
|
allowed_confusables: FxHashSet::from_iter([]).into(),
|
||||||
builtins: vec![],
|
builtins: vec![],
|
||||||
dummy_variable_rgx: DUMMY_VARIABLE_RGX.clone().into(),
|
dummy_variable_rgx: DUMMY_VARIABLE_RGX.clone().into(),
|
||||||
|
|
|
||||||
|
|
@ -210,9 +210,9 @@ impl Settings {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub fn for_rules(rule_codes: Vec<Rule>) -> Self {
|
pub fn for_rules(rules: impl IntoIterator<Item = Rule>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
rules: rule_codes.into(),
|
rules: rules.into(),
|
||||||
..Settings::default()
|
..Settings::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -323,12 +323,12 @@ fn resolve_codes<'a>(specs: impl IntoIterator<Item = RuleCodeSpec<'a>>) -> FxHas
|
||||||
] {
|
] {
|
||||||
for selector in spec.select {
|
for selector in spec.select {
|
||||||
if selector.specificity() == specificity {
|
if selector.specificity() == specificity {
|
||||||
rules.extend(selector.codes());
|
rules.extend(selector);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for selector in spec.ignore {
|
for selector in spec.ignore {
|
||||||
if selector.specificity() == specificity {
|
if selector.specificity() == specificity {
|
||||||
for rule in selector.codes() {
|
for rule in selector {
|
||||||
rules.remove(&rule);
|
rules.remove(&rule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -94,7 +94,7 @@ pub struct PerFileIgnore {
|
||||||
|
|
||||||
impl PerFileIgnore {
|
impl PerFileIgnore {
|
||||||
pub fn new(pattern: String, prefixes: &[RuleSelector], project_root: Option<&Path>) -> Self {
|
pub fn new(pattern: String, prefixes: &[RuleSelector], project_root: Option<&Path>) -> Self {
|
||||||
let rules: FxHashSet<_> = prefixes.iter().flat_map(RuleSelector::codes).collect();
|
let rules: FxHashSet<_> = prefixes.iter().flat_map(IntoIterator::into_iter).collect();
|
||||||
let path = Path::new(&pattern);
|
let path = Path::new(&pattern);
|
||||||
let absolute = match project_root {
|
let absolute = match project_root {
|
||||||
Some(project_root) => fs::normalize_path_to(path, project_root),
|
Some(project_root) => fs::normalize_path_to(path, project_root),
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue