Fix cargo audit issues (#1137)

* run cargo outdated and cargo audit

* run cargo upgrade -i && cargo update

* apply fixes due to updating dependencies and running cargo clippy -- -D clippy::all

* clean up; run cargo outdated/audit again; update; build; test

* cargo fmt

---------

Co-authored-by: qtfkwk <qtfkwk+tokei@gmail.com>
This commit is contained in:
qtfkwk 2024-08-19 15:16:15 -04:00 committed by GitHub
parent 9ad137f943
commit 9627a24c9c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 658 additions and 643 deletions

1020
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -32,37 +32,37 @@ lto = "thin"
panic = "abort"
[build-dependencies]
tera = "1.19.0"
ignore = "0.4.20"
serde_json = "1.0.104"
tera = "1.20.0"
ignore = "0.4.22"
serde_json = "1.0.125"
json5 = "0.4.1"
[dependencies]
aho-corasick = "1.0.2"
arbitrary = { version = "1.3.0", features = ["derive"] }
clap = { version = "3", features = ["cargo", "wrap_help"] }
colored = "2.0.4"
crossbeam-channel = "0.5.8"
aho-corasick = "1.1.3"
arbitrary = { version = "1.3.2", features = ["derive"] }
clap = { version = "4", features = ["cargo", "string", "wrap_help"] }
colored = "2.1.0"
crossbeam-channel = "0.5.13"
encoding_rs_io = "0.1.7"
grep-searcher = "0.1.11"
ignore = "0.4.20"
log = "0.4.19"
rayon = "1.7.0"
serde = { version = "1.0.180", features = ["derive", "rc"] }
grep-searcher = "0.1.13"
ignore = "0.4.22"
log = "0.4.22"
rayon = "1.10.0"
serde = { version = "1.0.208", features = ["derive", "rc"] }
term_size = "0.3.2"
toml = "0.7.6"
parking_lot = "0.12.1"
dashmap = { version = "5.5.0", features = ["serde"] }
toml = "0.8.19"
parking_lot = "0.12.3"
dashmap = { version = "6.0.1", features = ["serde"] }
num-format = "0.4.4"
once_cell = "1.18.0"
regex = "1.9.1"
serde_json = "1.0.104"
once_cell = "1.19.0"
regex = "1.10.6"
serde_json = "1.0.125"
etcetera = "0.8.0"
table_formatter = "0.6.1"
[dependencies.env_logger]
features = []
version = "0.10.0"
version = "0.11.5"
[dependencies.hex]
optional = true
@ -74,11 +74,11 @@ version = "0.11.2"
[dependencies.serde_yaml]
optional = true
version = "0.9.25"
version = "0.9.34"
[dev-dependencies]
proptest = "1.2.0"
strum = "0.25.0"
strum_macros = "0.25.1"
tempfile = "3.7.0"
git2 = { version = "0.17.2", default-features = false, features = [] }
proptest = "1.5.0"
strum = "0.26.3"
strum_macros = "0.26.4"
tempfile = "3.12.0"
git2 = { version = "0.19.0", default-features = false, features = [] }

View File

@ -1,6 +1,6 @@
extern crate ignore;
extern crate serde_json;
extern crate json5;
extern crate serde_json;
use std::ffi::OsStr;
use std::fs;

View File

@ -1,8 +1,6 @@
use std::mem;
use std::process;
use clap::Arg;
use clap::{crate_description, ArgMatches};
use clap::{crate_description, value_parser, Arg, ArgAction, ArgMatches};
use colored::Colorize;
use tokei::{Config, LanguageType, Sort};
@ -59,8 +57,8 @@ pub struct Cli {
impl Cli {
pub fn from_args() -> Self {
let matches = clap::App::new("tokei")
.version(&*crate_version())
let matches = clap::Command::new("tokei")
.version(crate_version())
.author("Erin P. <xampprocky@gmail.com> + Contributors")
.about(concat!(
crate_description!(),
@ -71,7 +69,7 @@ impl Cli {
Arg::new("columns")
.long("columns")
.short('c')
.takes_value(true)
.value_parser(value_parser!(usize))
.conflicts_with("output")
.help(
"Sets a strict column width of the output, only available for \
@ -82,8 +80,7 @@ impl Cli {
Arg::new("exclude")
.long("exclude")
.short('e')
.takes_value(true)
.multiple_values(true)
.num_args(0..)
.help("Ignore all files & directories matching the pattern."),
)
.arg(
@ -96,7 +93,6 @@ impl Cli {
Arg::new("file_input")
.long("input")
.short('i')
.takes_value(true)
.help(
"Gives statistics from a previous tokei run. Can be given a file path, \
or \"stdin\" to read from stdin.",
@ -109,7 +105,7 @@ impl Cli {
)
.arg(
Arg::new("input")
.min_values(1)
.num_args(1..)
.conflicts_with("languages")
.help("The path(s) to the file or directory to be counted.(default current directory)"),
)
@ -140,8 +136,13 @@ impl Cli {
Arg::new("output")
.long("output")
.short('o')
.takes_value(true)
.possible_values(Format::all())
.value_parser(|x: &str| {
if Format::all().contains(&x) {
Ok(x.to_string())
} else {
Err(format!("Invalid output format: {x:?}"))
}
})
.help(
"Outputs Tokei in a specific format. Compile with additional features for \
more format support.",
@ -150,8 +151,7 @@ impl Cli {
.arg(
Arg::new("streaming")
.long("streaming")
.takes_value(true)
.possible_values(["simple", "json"])
.value_parser(["simple", "json"])
.ignore_case(true)
.help(
"prints the (language, path, lines, blanks, code, comments) records as \
@ -162,8 +162,7 @@ impl Cli {
Arg::new("sort")
.long("sort")
.short('s')
.takes_value(true)
.possible_values(["files", "lines", "blanks", "code", "comments"])
.value_parser(["files", "lines", "blanks", "code", "comments"])
.ignore_case(true)
.conflicts_with("rsort")
.help("Sort languages based on column"),
@ -172,8 +171,7 @@ impl Cli {
Arg::new("rsort")
.long("rsort")
.short('r')
.takes_value(true)
.possible_values(["files", "lines", "blanks", "code", "comments"])
.value_parser(["files", "lines", "blanks", "code", "comments"])
.ignore_case(true)
.conflicts_with("sort")
.help("Reverse sort languages based on column"),
@ -182,7 +180,7 @@ impl Cli {
Arg::new("types")
.long("types")
.short('t')
.takes_value(true)
.action(ArgAction::Append)
.help(
"Filters output by language type, separated by a comma. i.e. \
-t=Rust,Markdown",
@ -198,8 +196,7 @@ impl Cli {
Arg::new("num_format_style")
.long("num-format")
.short('n')
.takes_value(true)
.possible_values(NumberFormatStyle::all())
.value_parser(["commas", "dots", "plain", "underscores"])
.conflicts_with("output")
.help(
"Format of printed numbers, i.e., plain (1234, default), \
@ -211,7 +208,7 @@ impl Cli {
Arg::new("verbose")
.long("verbose")
.short('v')
.multiple_occurrences(true)
.action(ArgAction::Count)
.help(
"Set log output level:
1: to show unknown file extensions,
@ -221,26 +218,29 @@ impl Cli {
)
.get_matches();
let columns = matches.value_of("columns").map(parse_or_exit::<usize>);
let files = matches.is_present("files");
let hidden = matches.is_present("hidden");
let no_ignore = matches.is_present("no_ignore");
let no_ignore_parent = matches.is_present("no_ignore_parent");
let no_ignore_dot = matches.is_present("no_ignore_dot");
let no_ignore_vcs = matches.is_present("no_ignore_vcs");
let print_languages = matches.is_present("languages");
let verbose = matches.occurrences_of("verbose");
let compact = matches.is_present("compact");
let types = matches.value_of("types").map(|e| {
e.split(',')
.map(str::parse::<LanguageType>)
.filter_map(Result::ok)
.collect()
let columns = matches.get_one::<usize>("columns").cloned();
let files = matches.get_flag("files");
let hidden = matches.get_flag("hidden");
let no_ignore = matches.get_flag("no_ignore");
let no_ignore_parent = matches.get_flag("no_ignore_parent");
let no_ignore_dot = matches.get_flag("no_ignore_dot");
let no_ignore_vcs = matches.get_flag("no_ignore_vcs");
let print_languages = matches.get_flag("languages");
let verbose = matches.get_count("verbose") as u64;
let compact = matches.get_flag("compact");
let types = matches.get_many("types").map(|e| {
e.flat_map(|x: &String| {
x.split(',')
.map(str::parse::<LanguageType>)
.filter_map(Result::ok)
.collect::<Vec<_>>()
})
.collect()
});
let num_format_style: NumberFormatStyle = matches
.value_of("num_format_style")
.map(parse_or_exit::<NumberFormatStyle>)
.get_one::<NumberFormatStyle>("num_format_style")
.cloned()
.unwrap_or_default();
let number_format = match num_format_style.get_format() {
@ -253,18 +253,20 @@ impl Cli {
// Sorting category should be restricted by clap but parse before we do
// work just in case.
let sort = matches
.value_of("sort")
.or_else(|| matches.value_of("rsort"))
.map(parse_or_exit::<Sort>);
let sort_reverse = matches.value_of("rsort").is_some();
let (sort, sort_reverse) = if let Some(sort) = matches.get_one::<Sort>("sort") {
(Some(*sort), false)
} else {
let sort = matches.get_one::<Sort>("rsort");
(sort.cloned(), sort.is_some())
};
// Format category is overly accepting by clap (so the user knows what
// is supported) but this will fail if support is not compiled in and
// give a useful error to the user.
let output = matches.value_of("output").map(parse_or_exit::<Format>);
let output = matches.get_one("output").cloned();
let streaming = matches
.value_of("streaming")
.get_one("streaming")
.cloned()
.map(parse_or_exit::<Streaming>);
crate::cli_utils::setup_logger(verbose);
@ -295,20 +297,20 @@ impl Cli {
}
pub fn file_input(&self) -> Option<&str> {
self.matches.value_of("file_input")
self.matches.get_one("file_input").cloned()
}
pub fn ignored_directories(&self) -> Vec<&str> {
let mut ignored_directories: Vec<&str> = Vec::new();
if let Some(user_ignored) = self.matches.values_of("exclude") {
if let Some(user_ignored) = self.matches.get_many::<&str>("exclude") {
ignored_directories.extend(user_ignored);
}
ignored_directories
}
pub fn input(&self) -> Vec<&str> {
match self.matches.values_of("input") {
Some(vs) => vs.collect(),
match self.matches.get_many::<&str>("input") {
Some(vs) => vs.cloned().collect(),
None => vec!["."],
}
}
@ -330,14 +332,18 @@ impl Cli {
padding = Padding::NONE,
width = Some(lang_w)
)
.with_formatter(vec![table_formatter::table::FormatterFunc::Normal(Colorize::bold)]),
.with_formatter(vec![table_formatter::table::FormatterFunc::Normal(
Colorize::bold,
)]),
cell!(
"Extensions",
align = Align::Left,
padding = Padding::new(3, 0),
width = Some(suffix_w)
)
.with_formatter(vec![table_formatter::table::FormatterFunc::Normal(Colorize::bold)]),
.with_formatter(vec![table_formatter::table::FormatterFunc::Normal(
Colorize::bold,
)]),
];
let content = LanguageType::list()
.iter()
@ -429,7 +435,7 @@ impl Cli {
_ => None,
};
config.types = mem::replace(&mut self.types, None).or(config.types);
config.types = self.types.take().or(config.types);
config
}

View File

@ -348,8 +348,7 @@ impl<W: Write> Printer<W> {
if self.list_files {
self.print_subrow()?;
let mut reports: Vec<&Report> =
language.reports.iter().map(|report| &*report).collect();
let mut reports: Vec<&Report> = language.reports.iter().collect();
if !is_sorted {
reports.sort_by(|&a, &b| a.name.cmp(&b.name));
}

View File

@ -135,7 +135,6 @@ impl Config {
no_ignore_vcs: current_dir
.no_ignore_vcs
.or(home_dir.no_ignore_vcs.or(conf_dir.no_ignore_vcs)),
..Self::default()
}
}
}

View File

@ -29,7 +29,7 @@ macro_rules! supported_formats {
///
/// To enable all formats compile with the `all` feature.
#[cfg_attr(test, derive(strum_macros::EnumIter))]
#[derive(Debug)]
#[derive(Debug, Clone)]
pub enum Format {
Json,
$(

View File

@ -79,7 +79,6 @@ pub(crate) struct SimpleCapture<'a> {
starts: Option<Box<[Capture<'a>]>>,
}
impl<'a> HtmlLike<'a> {
pub fn start_script_in_range(
&'a self,
@ -111,7 +110,12 @@ impl<'a> SimpleCapture<'a> {
filter_range(self.starts.as_ref()?, start, end).and_then(|mut it| it.next())
}
fn make_capture(regex: &Regex, lines: &'a [u8], start: usize, end: usize) -> Option<SimpleCapture<'a>> {
fn make_capture(
regex: &Regex,
lines: &'a [u8],
start: usize,
end: usize,
) -> Option<SimpleCapture<'a>> {
let capture = SimpleCapture {
starts: save_captures(regex, lines, start, end),
};
@ -156,12 +160,14 @@ impl<'a> RegexCache<'a> {
pub(crate) fn build(lang: LanguageType, lines: &'a [u8], start: usize, end: usize) -> Self {
let inner = match lang {
LanguageType::Markdown | LanguageType::UnrealDeveloperMarkdown => {
SimpleCapture::make_capture(&STARTING_MARKDOWN_REGEX, lines, start, end).map(RegexFamily::Markdown)
SimpleCapture::make_capture(&STARTING_MARKDOWN_REGEX, lines, start, end)
.map(RegexFamily::Markdown)
}
LanguageType::Rust => Some(RegexFamily::Rust),
LanguageType::LinguaFranca => {
SimpleCapture::make_capture(&STARTING_LF_BLOCK_REGEX, lines, start, end).map(RegexFamily::LinguaFranca)
},
SimpleCapture::make_capture(&STARTING_LF_BLOCK_REGEX, lines, start, end)
.map(RegexFamily::LinguaFranca)
}
LanguageType::Html
| LanguageType::RubyHtml
| LanguageType::Svelte

View File

@ -26,8 +26,9 @@ include!(concat!(env!("OUT_DIR"), "/language_type.rs"));
impl Serialize for LanguageType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer {
where
S: serde::Serializer,
{
serializer.serialize_str(self.name())
}
}
@ -69,7 +70,7 @@ impl LanguageType {
if self == Jupyter {
return self
.parse_jupyter(text.as_ref(), config)
.unwrap_or_else(CodeStats::new);
.unwrap_or_default();
}
let syntax = {
@ -80,21 +81,16 @@ impl LanguageType {
syntax_mut
};
if let Some(end) = syntax
.shared
.important_syntax
.find(text)
.and_then(|m| {
// Get the position of the last line before the important
// syntax.
text[..=m.start()]
.iter()
.rev()
.position(|&c| c == b'\n')
.filter(|&p| p != 0)
.map(|p| m.start() - p)
})
{
if let Some(end) = syntax.shared.important_syntax.find(text).and_then(|m| {
// Get the position of the last line before the important
// syntax.
text[..=m.start()]
.iter()
.rev()
.position(|&c| c == b'\n')
.filter(|&p| p != 0)
.map(|p| m.start() - p)
}) {
let (skippable_text, rest) = text.split_at(end + 1);
let is_fortran = syntax.shared.is_fortran;
let is_literate = syntax.shared.is_literate;
@ -291,22 +287,22 @@ impl LanguageType {
/// This is the first thing in the file (although there may be comments before).
fn find_lf_target_language(&self, bytes: &[u8]) -> Option<LanguageType> {
use regex::bytes::Regex;
static LF_TARGET_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)\btarget\s+(\w+)\s*($|;|\{)"#).unwrap());
LF_TARGET_REGEX.captures(bytes)
.and_then(|captures| {
let name = captures.get(1).unwrap().as_bytes();
if name == b"CCpp" {
// this is a special alias for the C target in LF
Some(C)
} else {
let name_str = &String::from_utf8_lossy(name);
let by_name = LanguageType::from_name(&name_str);
if by_name.is_none() {
trace!("LF target not recognized: {}", name_str);
}
by_name
static LF_TARGET_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"(?m)\btarget\s+(\w+)\s*($|;|\{)"#).unwrap());
LF_TARGET_REGEX.captures(bytes).and_then(|captures| {
let name = captures.get(1).unwrap().as_bytes();
if name == b"CCpp" {
// this is a special alias for the C target in LF
Some(C)
} else {
let name_str = &String::from_utf8_lossy(name);
let by_name = LanguageType::from_name(name_str);
if by_name.is_none() {
trace!("LF target not recognized: {}", name_str);
}
})
by_name
}
})
}
}
@ -321,11 +317,14 @@ mod tests {
assert!(LanguageType::Rust.allows_nested());
}
fn assert_stats(stats: &CodeStats, blanks: usize, code: usize, comments: usize) {
assert_eq!(stats.blanks, blanks, "expected {} blank lines", blanks);
assert_eq!(stats.code, code, "expected {} code lines", code);
assert_eq!(stats.comments, comments, "expected {} comment lines", comments);
assert_eq!(
stats.comments, comments,
"expected {} comment lines",
comments
);
}
#[test]
@ -345,8 +344,7 @@ mod tests {
let file_text =
fs::read_to_string(Path::new("tests").join("data").join("linguafranca.lf")).unwrap();
let stats = LinguaFranca
.parse_from_str(file_text, &Config::default());
let stats = LinguaFranca.parse_from_str(file_text, &Config::default());
assert_stats(&stats, 9, 11, 8);

View File

@ -294,7 +294,7 @@ impl LanguageType {
match fsutils::get_extension(entry) {
Some(extension) => LanguageType::from_file_extension(extension.as_str()),
None => LanguageType::from_shebang(&entry),
None => LanguageType::from_shebang(entry),
}
}

View File

@ -7,10 +7,11 @@ use log::Level::Trace;
use once_cell::sync::Lazy;
use super::embedding::{
RegexCache, RegexFamily, ENDING_MARKDOWN_REGEX, ENDING_LF_BLOCK_REGEX, END_SCRIPT, END_STYLE, END_TEMPLATE
RegexCache, RegexFamily, ENDING_LF_BLOCK_REGEX, ENDING_MARKDOWN_REGEX, END_SCRIPT, END_STYLE,
END_TEMPLATE,
};
use crate::{stats::CodeStats, utils::ext::SliceExt, Config, LanguageType};
use crate::LanguageType::LinguaFranca;
use crate::{stats::CodeStats, utils::ext::SliceExt, Config, LanguageType};
/// Tracks the syntax of the language as well as the current state in the file.
/// Current has what could be consider three types of mode.
@ -30,7 +31,7 @@ pub(crate) struct SyntaxCounter {
pub(crate) quote_is_doc_quote: bool,
pub(crate) stack: Vec<&'static str>,
pub(crate) quote_is_verbatim: bool,
pub(crate) lf_embedded_language: Option<LanguageType>
pub(crate) lf_embedded_language: Option<LanguageType>,
}
#[derive(Clone, Debug)]
@ -449,16 +450,14 @@ impl SyntaxCounter {
let start_of_code = opening_fence.end();
let closing_fence = ENDING_LF_BLOCK_REGEX.find(&lines[start_of_code..]);
let end_of_code = closing_fence
.map_or_else(|| lines.len(),
|fence| start_of_code + fence.start());
.map_or_else(|| lines.len(), |fence| start_of_code + fence.start());
let block_contents = &lines[start_of_code..end_of_code];
trace!(
"LF block: {:?}",
String::from_utf8_lossy(block_contents)
trace!("LF block: {:?}", String::from_utf8_lossy(block_contents));
let stats = self.get_lf_target_language().parse_from_slice(
block_contents.trim_first_and_last_line_of_whitespace(),
config,
);
let stats =
self.get_lf_target_language().parse_from_slice(block_contents.trim_first_and_last_line_of_whitespace(), config);
trace!("-> stats: {:?}", stats);
Some(FileContext::new(