mirror of
https://github.com/RGBCube/uutils-coreutils
synced 2025-07-27 19:17:43 +00:00
Merge pull request #8201 from gabelluardo/fix-clippy-rules
Prepare for clippy pedantic rules
This commit is contained in:
commit
0e564ea5da
34 changed files with 237 additions and 247 deletions
|
@ -1137,9 +1137,8 @@ impl Options {
|
|||
CpError::Error("SELinux was not enabled during the compile time!".to_owned());
|
||||
if required {
|
||||
return Err(selinux_disabled_error);
|
||||
} else {
|
||||
show_error_if_needed(&selinux_disabled_error);
|
||||
}
|
||||
show_error_if_needed(&selinux_disabled_error);
|
||||
}
|
||||
|
||||
// Extract the SELinux related flags and options
|
||||
|
@ -1912,10 +1911,9 @@ fn handle_existing_dest(
|
|||
source.quote()
|
||||
)
|
||||
.into());
|
||||
} else {
|
||||
is_dest_removed = dest.is_symlink();
|
||||
backup_dest(dest, &backup_path, is_dest_removed)?;
|
||||
}
|
||||
is_dest_removed = dest.is_symlink();
|
||||
backup_dest(dest, &backup_path, is_dest_removed)?;
|
||||
}
|
||||
if !is_dest_removed {
|
||||
delete_dest_if_needed_and_allowed(
|
||||
|
@ -2182,21 +2180,21 @@ fn handle_copy_mode(
|
|||
let dest_time = dest_metadata.modified()?;
|
||||
if src_time <= dest_time {
|
||||
return Ok(PerformedAction::Skipped);
|
||||
} else {
|
||||
options.overwrite.verify(dest, options.debug)?;
|
||||
|
||||
copy_helper(
|
||||
source,
|
||||
dest,
|
||||
options,
|
||||
context,
|
||||
source_is_symlink,
|
||||
source_is_fifo,
|
||||
symlinked_files,
|
||||
#[cfg(unix)]
|
||||
source_is_stream,
|
||||
)?;
|
||||
}
|
||||
|
||||
options.overwrite.verify(dest, options.debug)?;
|
||||
|
||||
copy_helper(
|
||||
source,
|
||||
dest,
|
||||
options,
|
||||
context,
|
||||
source_is_symlink,
|
||||
source_is_fifo,
|
||||
symlinked_files,
|
||||
#[cfg(unix)]
|
||||
source_is_stream,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -429,9 +429,8 @@ fn get_delimiters(matches: &ArgMatches) -> UResult<(Delimiter, Option<&[u8]>)> {
|
|||
1,
|
||||
get_message("cut-error-delimiter-must-be-single-character"),
|
||||
));
|
||||
} else {
|
||||
Delimiter::from(os_string)
|
||||
}
|
||||
Delimiter::from(os_string)
|
||||
}
|
||||
}
|
||||
None => {
|
||||
|
|
|
@ -34,9 +34,8 @@ impl Matcher for ExactMatcher<'_> {
|
|||
|| haystack[match_idx + 1..].starts_with(&self.needle[1..])
|
||||
{
|
||||
return Some((match_idx, match_idx + self.needle.len()));
|
||||
} else {
|
||||
pos = match_idx + 1;
|
||||
}
|
||||
pos = match_idx + 1;
|
||||
}
|
||||
None => {
|
||||
return None;
|
||||
|
|
|
@ -204,81 +204,81 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
|||
};
|
||||
|
||||
return set_system_datetime(date);
|
||||
} else {
|
||||
// Get the current time, either in the local time zone or UTC.
|
||||
let now = if settings.utc {
|
||||
Timestamp::now().to_zoned(TimeZone::UTC)
|
||||
} else {
|
||||
Zoned::now()
|
||||
};
|
||||
}
|
||||
|
||||
// Iterate over all dates - whether it's a single date or a file.
|
||||
let dates: Box<dyn Iterator<Item = _>> = match settings.date_source {
|
||||
DateSource::Custom(ref input) => {
|
||||
let date = parse_date(input);
|
||||
let iter = std::iter::once(date);
|
||||
Box::new(iter)
|
||||
}
|
||||
DateSource::Human(relative_time) => {
|
||||
// Double check the result is overflow or not of the current_time + relative_time
|
||||
// it may cause a panic of chrono::datetime::DateTime add
|
||||
match now.checked_add(relative_time) {
|
||||
Ok(date) => {
|
||||
let iter = std::iter::once(Ok(date));
|
||||
Box::new(iter)
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(USimpleError::new(
|
||||
1,
|
||||
format!("invalid date {relative_time}"),
|
||||
));
|
||||
}
|
||||
// Get the current time, either in the local time zone or UTC.
|
||||
let now = if settings.utc {
|
||||
Timestamp::now().to_zoned(TimeZone::UTC)
|
||||
} else {
|
||||
Zoned::now()
|
||||
};
|
||||
|
||||
// Iterate over all dates - whether it's a single date or a file.
|
||||
let dates: Box<dyn Iterator<Item = _>> = match settings.date_source {
|
||||
DateSource::Custom(ref input) => {
|
||||
let date = parse_date(input);
|
||||
let iter = std::iter::once(date);
|
||||
Box::new(iter)
|
||||
}
|
||||
DateSource::Human(relative_time) => {
|
||||
// Double check the result is overflow or not of the current_time + relative_time
|
||||
// it may cause a panic of chrono::datetime::DateTime add
|
||||
match now.checked_add(relative_time) {
|
||||
Ok(date) => {
|
||||
let iter = std::iter::once(Ok(date));
|
||||
Box::new(iter)
|
||||
}
|
||||
}
|
||||
DateSource::Stdin => {
|
||||
let lines = BufReader::new(std::io::stdin()).lines();
|
||||
let iter = lines.map_while(Result::ok).map(parse_date);
|
||||
Box::new(iter)
|
||||
}
|
||||
DateSource::File(ref path) => {
|
||||
if path.is_dir() {
|
||||
Err(_) => {
|
||||
return Err(USimpleError::new(
|
||||
2,
|
||||
format!("expected file, got directory {}", path.quote()),
|
||||
1,
|
||||
format!("invalid date {relative_time}"),
|
||||
));
|
||||
}
|
||||
let file = File::open(path)
|
||||
.map_err_context(|| path.as_os_str().to_string_lossy().to_string())?;
|
||||
let lines = BufReader::new(file).lines();
|
||||
let iter = lines.map_while(Result::ok).map(parse_date);
|
||||
Box::new(iter)
|
||||
}
|
||||
DateSource::Now => {
|
||||
let iter = std::iter::once(Ok(now));
|
||||
Box::new(iter)
|
||||
}
|
||||
DateSource::Stdin => {
|
||||
let lines = BufReader::new(std::io::stdin()).lines();
|
||||
let iter = lines.map_while(Result::ok).map(parse_date);
|
||||
Box::new(iter)
|
||||
}
|
||||
DateSource::File(ref path) => {
|
||||
if path.is_dir() {
|
||||
return Err(USimpleError::new(
|
||||
2,
|
||||
format!("expected file, got directory {}", path.quote()),
|
||||
));
|
||||
}
|
||||
};
|
||||
let file = File::open(path)
|
||||
.map_err_context(|| path.as_os_str().to_string_lossy().to_string())?;
|
||||
let lines = BufReader::new(file).lines();
|
||||
let iter = lines.map_while(Result::ok).map(parse_date);
|
||||
Box::new(iter)
|
||||
}
|
||||
DateSource::Now => {
|
||||
let iter = std::iter::once(Ok(now));
|
||||
Box::new(iter)
|
||||
}
|
||||
};
|
||||
|
||||
let format_string = make_format_string(&settings);
|
||||
let format_string = make_format_string(&settings);
|
||||
|
||||
// Format all the dates
|
||||
for date in dates {
|
||||
match date {
|
||||
// TODO: Switch to lenient formatting.
|
||||
Ok(date) => match strtime::format(format_string, &date) {
|
||||
Ok(s) => println!("{s}"),
|
||||
Err(e) => {
|
||||
return Err(USimpleError::new(
|
||||
1,
|
||||
format!("invalid format {} ({e})", format_string),
|
||||
));
|
||||
}
|
||||
},
|
||||
Err((input, _err)) => show!(USimpleError::new(
|
||||
1,
|
||||
format!("invalid date {}", input.quote())
|
||||
)),
|
||||
}
|
||||
// Format all the dates
|
||||
for date in dates {
|
||||
match date {
|
||||
// TODO: Switch to lenient formatting.
|
||||
Ok(date) => match strtime::format(format_string, &date) {
|
||||
Ok(s) => println!("{s}"),
|
||||
Err(e) => {
|
||||
return Err(USimpleError::new(
|
||||
1,
|
||||
format!("invalid format {format_string} ({e})"),
|
||||
));
|
||||
}
|
||||
},
|
||||
Err((input, _err)) => show!(USimpleError::new(
|
||||
1,
|
||||
format!("invalid date {}", input.quote())
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -437,7 +437,7 @@ impl Read for Input<'_> {
|
|||
}
|
||||
}
|
||||
Ok(len) => return Ok(len),
|
||||
Err(e) if e.kind() == io::ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.kind() == io::ErrorKind::Interrupted => (),
|
||||
Err(_) if self.settings.iconv.noerror => return Ok(base_idx),
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
|
@ -861,7 +861,7 @@ impl<'a> Output<'a> {
|
|||
return Ok(base_idx);
|
||||
}
|
||||
}
|
||||
Err(e) if e.kind() == io::ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.kind() == io::ErrorKind::Interrupted => (),
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,27 +33,27 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
|||
|
||||
if dirnames.is_empty() {
|
||||
return Err(UUsageError::new(1, get_message("dirname-missing-operand")));
|
||||
} else {
|
||||
for path in &dirnames {
|
||||
let p = Path::new(path);
|
||||
match p.parent() {
|
||||
Some(d) => {
|
||||
if d.components().next().is_none() {
|
||||
print!(".");
|
||||
} else {
|
||||
print_verbatim(d).unwrap();
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if p.is_absolute() || path == "/" {
|
||||
print!("/");
|
||||
} else {
|
||||
print!(".");
|
||||
}
|
||||
}
|
||||
|
||||
for path in &dirnames {
|
||||
let p = Path::new(path);
|
||||
match p.parent() {
|
||||
Some(d) => {
|
||||
if d.components().next().is_none() {
|
||||
print!(".");
|
||||
} else {
|
||||
print_verbatim(d).unwrap();
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if p.is_absolute() || path == "/" {
|
||||
print!("/");
|
||||
} else {
|
||||
print!(".");
|
||||
}
|
||||
}
|
||||
print!("{line_ending}");
|
||||
}
|
||||
print!("{line_ending}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
8
src/uu/env/src/env.rs
vendored
8
src/uu/env/src/env.rs
vendored
|
@ -594,9 +594,11 @@ impl EnvAppData {
|
|||
match cmd.status() {
|
||||
Ok(exit) if !exit.success() => {
|
||||
#[cfg(unix)]
|
||||
if let Some(exit_code) = exit.code() {
|
||||
return Err(exit_code.into());
|
||||
} else {
|
||||
{
|
||||
if let Some(exit_code) = exit.code() {
|
||||
return Err(exit_code.into());
|
||||
}
|
||||
|
||||
// `exit.code()` returns `None` on Unix when the process is terminated by a signal.
|
||||
// See std::os::unix::process::ExitStatusExt for more information. This prints out
|
||||
// the interrupted process and the signal it received.
|
||||
|
|
|
@ -464,7 +464,6 @@ fn expand(options: &Options) -> UResult<()> {
|
|||
Err(e) => {
|
||||
show_error!("{e}");
|
||||
set_exit_code(1);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -175,9 +175,8 @@ fn break_knuth_plass<'a, T: Clone + Iterator<Item = &'a WordInfo<'a>>>(
|
|||
fresh = true;
|
||||
}
|
||||
break;
|
||||
} else {
|
||||
write_with_spaces(word, slen, args.ostream)?;
|
||||
}
|
||||
write_with_spaces(word, slen, args.ostream)?;
|
||||
}
|
||||
Ok((prev_punct, fresh))
|
||||
},
|
||||
|
|
|
@ -26,7 +26,6 @@ pub fn parse_obsolete(src: &str) -> Option<Result<Vec<OsString>, ParseError>> {
|
|||
} else if c == '+' && plus_possible {
|
||||
plus_possible = false;
|
||||
num_start += 1;
|
||||
continue;
|
||||
} else {
|
||||
num_end = n;
|
||||
last_char = c;
|
||||
|
|
|
@ -31,7 +31,7 @@ impl TakeAllBuffer {
|
|||
self.buffer.truncate(n);
|
||||
return Ok(n);
|
||||
}
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1091,13 +1091,13 @@ impl Config {
|
|||
Dereference::DirArgs
|
||||
};
|
||||
|
||||
let tab_size = if !needs_color {
|
||||
let tab_size = if needs_color {
|
||||
Some(0)
|
||||
} else {
|
||||
options
|
||||
.get_one::<String>(options::format::TAB_SIZE)
|
||||
.and_then(|size| size.parse::<usize>().ok())
|
||||
.or_else(|| std::env::var("TABSIZE").ok().and_then(|s| s.parse().ok()))
|
||||
} else {
|
||||
Some(0)
|
||||
}
|
||||
.unwrap_or(SPACES_IN_TAB);
|
||||
|
||||
|
@ -2366,7 +2366,6 @@ fn enter_directory(
|
|||
err,
|
||||
e.command_line
|
||||
));
|
||||
continue;
|
||||
}
|
||||
Ok(rd) => {
|
||||
if listed_ancestors
|
||||
|
|
|
@ -745,7 +745,7 @@ impl<'a> Pager<'a> {
|
|||
&format!("{}{pattern}{}", Attribute::Reverse, Attribute::Reset),
|
||||
);
|
||||
};
|
||||
self.stdout.write_all(format!("\r{}\n", line).as_bytes())?;
|
||||
self.stdout.write_all(format!("\r{line}\n").as_bytes())?;
|
||||
lines_printed += 1;
|
||||
index += 1;
|
||||
}
|
||||
|
@ -792,7 +792,7 @@ impl<'a> Pager<'a> {
|
|||
if percentage >= 100 {
|
||||
" (END)".to_string()
|
||||
} else {
|
||||
format!(" ({}%)", percentage)
|
||||
format!(" ({percentage}%)")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -390,7 +390,7 @@ fn format_and_print_whitespace(s: &str, options: &NumfmtOptions) -> Result<()> {
|
|||
}
|
||||
|
||||
let eol = if options.zero_terminated { '\0' } else { '\n' };
|
||||
print!("{}", eol);
|
||||
print!("{eol}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1086,9 +1086,8 @@ fn write_columns(
|
|||
}
|
||||
if not_found_break && feed_line_present {
|
||||
break;
|
||||
} else {
|
||||
out.write_all(line_separator)?;
|
||||
}
|
||||
out.write_all(line_separator)?;
|
||||
}
|
||||
|
||||
Ok(lines_printed)
|
||||
|
|
|
@ -736,16 +736,7 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
|||
.flatten()
|
||||
.cloned();
|
||||
|
||||
if !config.gnu_ext {
|
||||
input_files = vec![files.next().unwrap_or("-".to_string())];
|
||||
output_file = files.next().unwrap_or("-".to_string());
|
||||
if let Some(file) = files.next() {
|
||||
return Err(UUsageError::new(
|
||||
1,
|
||||
format!("extra operand {}", file.quote()),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
if config.gnu_ext {
|
||||
input_files = {
|
||||
let mut files = files.collect::<Vec<_>>();
|
||||
if files.is_empty() {
|
||||
|
@ -754,6 +745,15 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
|||
files
|
||||
};
|
||||
output_file = "-".to_string();
|
||||
} else {
|
||||
input_files = vec![files.next().unwrap_or("-".to_string())];
|
||||
output_file = files.next().unwrap_or("-".to_string());
|
||||
if let Some(file) = files.next() {
|
||||
return Err(UUsageError::new(
|
||||
1,
|
||||
format!("extra operand {}", file.quote()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let word_filter = WordFilter::new(&matches, &config)?;
|
||||
|
|
|
@ -138,7 +138,6 @@ impl Iterator for FilenameIter {
|
|||
if *index == NAME_CHARSET.len() - 1 {
|
||||
// Carry the 1
|
||||
*index = 0;
|
||||
continue;
|
||||
} else {
|
||||
*index += 1;
|
||||
return Some(ret);
|
||||
|
|
|
@ -287,13 +287,13 @@ fn read_to_buffer<T: Read>(
|
|||
let end = last_line_end.unwrap();
|
||||
// We want to include the separator here, because it shouldn't be carried over.
|
||||
return Ok((end + 1, true));
|
||||
} else {
|
||||
// We need to read more lines
|
||||
let len = buffer.len();
|
||||
// resize the vector to 10 KB more
|
||||
buffer.resize(len + 1024 * 10, 0);
|
||||
read_target = &mut buffer[len..];
|
||||
}
|
||||
|
||||
// We need to read more lines
|
||||
let len = buffer.len();
|
||||
// resize the vector to 10 KB more
|
||||
buffer.resize(len + 1024 * 10, 0);
|
||||
read_target = &mut buffer[len..];
|
||||
} else {
|
||||
// This file has been fully read.
|
||||
let mut leftover_len = read_target.len();
|
||||
|
|
|
@ -1808,9 +1808,11 @@ fn general_bd_parse(a: &str) -> GeneralBigDecimalParseResult {
|
|||
// Parse digits, and fold in recoverable errors
|
||||
let ebd = match ExtendedBigDecimal::extended_parse(a) {
|
||||
Err(ExtendedParserError::NotNumeric) => return GeneralBigDecimalParseResult::Invalid,
|
||||
Err(ExtendedParserError::PartialMatch(ebd, _))
|
||||
| Err(ExtendedParserError::Overflow(ebd))
|
||||
| Err(ExtendedParserError::Underflow(ebd))
|
||||
Err(
|
||||
ExtendedParserError::PartialMatch(ebd, _)
|
||||
| ExtendedParserError::Overflow(ebd)
|
||||
| ExtendedParserError::Underflow(ebd),
|
||||
)
|
||||
| Ok(ebd) => ebd,
|
||||
};
|
||||
|
||||
|
|
|
@ -763,28 +763,28 @@ impl Write for ByteChunkWriter<'_> {
|
|||
let num_bytes_written = custom_write(buf, &mut self.inner, self.settings)?;
|
||||
self.num_bytes_remaining_in_current_chunk -= num_bytes_written as u64;
|
||||
return Ok(carryover_bytes_written + num_bytes_written);
|
||||
} else {
|
||||
// Write enough bytes to fill the current chunk.
|
||||
//
|
||||
// Conversion to usize is safe because we checked that
|
||||
// self.num_bytes_remaining_in_current_chunk is lower than
|
||||
// n, which is already usize.
|
||||
let i = self.num_bytes_remaining_in_current_chunk as usize;
|
||||
let num_bytes_written = custom_write(&buf[..i], &mut self.inner, self.settings)?;
|
||||
self.num_bytes_remaining_in_current_chunk -= num_bytes_written as u64;
|
||||
|
||||
// It's possible that the underlying writer did not
|
||||
// write all the bytes.
|
||||
if num_bytes_written < i {
|
||||
return Ok(carryover_bytes_written + num_bytes_written);
|
||||
} else {
|
||||
// Move the window to look at only the remaining bytes.
|
||||
buf = &buf[i..];
|
||||
|
||||
// Remember for the next iteration that we wrote these bytes.
|
||||
carryover_bytes_written += num_bytes_written;
|
||||
}
|
||||
}
|
||||
|
||||
// Write enough bytes to fill the current chunk.
|
||||
//
|
||||
// Conversion to usize is safe because we checked that
|
||||
// self.num_bytes_remaining_in_current_chunk is lower than
|
||||
// n, which is already usize.
|
||||
let i = self.num_bytes_remaining_in_current_chunk as usize;
|
||||
let num_bytes_written = custom_write(&buf[..i], &mut self.inner, self.settings)?;
|
||||
self.num_bytes_remaining_in_current_chunk -= num_bytes_written as u64;
|
||||
|
||||
// It's possible that the underlying writer did not
|
||||
// write all the bytes.
|
||||
if num_bytes_written < i {
|
||||
return Ok(carryover_bytes_written + num_bytes_written);
|
||||
}
|
||||
|
||||
// Move the window to look at only the remaining bytes.
|
||||
buf = &buf[i..];
|
||||
|
||||
// Remember for the next iteration that we wrote these bytes.
|
||||
carryover_bytes_written += num_bytes_written;
|
||||
}
|
||||
}
|
||||
fn flush(&mut self) -> io::Result<()> {
|
||||
|
|
|
@ -29,7 +29,7 @@ fn bsd_sum(mut reader: impl Read) -> std::io::Result<(usize, u16)> {
|
|||
rotated.wrapping_add(u16::from(byte))
|
||||
});
|
||||
}
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ fn sysv_sum(mut reader: impl Read) -> std::io::Result<(usize, u16)> {
|
|||
.iter()
|
||||
.fold(ret, |acc, &byte| acc.wrapping_add(u32::from(byte)));
|
||||
}
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ impl FilterMode {
|
|||
1,
|
||||
get_message_with_args(
|
||||
"tail-error-invalid-number-of-bytes",
|
||||
HashMap::from([("arg".to_string(), format!("'{}'", e))]),
|
||||
HashMap::from([("arg".to_string(), format!("'{e}'"))]),
|
||||
),
|
||||
));
|
||||
}
|
||||
|
|
|
@ -582,13 +582,13 @@ impl LinesChunkBuffer {
|
|||
if self.chunks.is_empty() {
|
||||
// chunks is empty when a file is empty so quitting early here
|
||||
return Ok(());
|
||||
} else {
|
||||
let length = &self.chunks.len();
|
||||
let last = &mut self.chunks[length - 1];
|
||||
if !last.get_buffer().ends_with(&[self.delimiter]) {
|
||||
last.lines += 1;
|
||||
self.lines += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let length = &self.chunks.len();
|
||||
let last = &mut self.chunks[length - 1];
|
||||
if !last.get_buffer().ends_with(&[self.delimiter]) {
|
||||
last.lines += 1;
|
||||
self.lines += 1;
|
||||
}
|
||||
|
||||
// skip unnecessary chunks and save the first chunk which may hold some lines we have to
|
||||
|
|
|
@ -284,7 +284,7 @@ impl Observer {
|
|||
if let Some(watcher_rx) = &mut self.watcher_rx {
|
||||
for input in inputs {
|
||||
match input.kind() {
|
||||
InputKind::Stdin => continue,
|
||||
InputKind::Stdin => (),
|
||||
InputKind::File(path) => {
|
||||
#[cfg(all(unix, not(target_os = "linux")))]
|
||||
if !path.is_file() {
|
||||
|
|
|
@ -393,7 +393,7 @@ fn forwards_thru_file(
|
|||
}
|
||||
total += n;
|
||||
}
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ impl Display for BadSequence {
|
|||
"{}",
|
||||
get_message_with_args(
|
||||
"tr-error-invalid-repeat-count",
|
||||
HashMap::from([("count".to_string(), format!("'{}'", count))])
|
||||
HashMap::from([("count".to_string(), format!("'{count}'"))])
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -218,7 +218,7 @@ impl<'input> Graph<'input> {
|
|||
let cycle = self.detect_cycle();
|
||||
show!(TsortError::Loop(self.name.clone()));
|
||||
for node in &cycle {
|
||||
show!(TsortError::LoopNode(node.to_string()));
|
||||
show!(TsortError::LoopNode((*node).to_string()));
|
||||
}
|
||||
let u = cycle[0];
|
||||
let v = cycle[1];
|
||||
|
|
|
@ -192,7 +192,7 @@ pub(crate) fn count_bytes_fast<T: WordCountable>(handle: &mut T) -> (usize, Opti
|
|||
Ok(n) => {
|
||||
byte_count += n;
|
||||
}
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => (),
|
||||
Err(e) => return (byte_count, Some(e)),
|
||||
}
|
||||
}
|
||||
|
@ -246,7 +246,7 @@ pub(crate) fn count_bytes_chars_and_lines_fast<
|
|||
total.lines += bytecount::count(&buf[..n], b'\n');
|
||||
}
|
||||
}
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => (),
|
||||
Err(e) => return (total, Some(e)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -671,7 +671,7 @@ impl LineInfo {
|
|||
match cached_format {
|
||||
LineFormat::Untagged => LineFormat::parse_untagged(line_bytes),
|
||||
LineFormat::SingleSpace => LineFormat::parse_single_space(line_bytes),
|
||||
_ => unreachable!("we never catch the algo based format"),
|
||||
LineFormat::AlgoBased => unreachable!("we never catch the algo based format"),
|
||||
}
|
||||
} else if let Some(info) = LineFormat::parse_untagged(line_bytes) {
|
||||
*cached_line_format = Some(LineFormat::Untagged);
|
||||
|
@ -1063,7 +1063,7 @@ fn process_checksum_file(
|
|||
}
|
||||
Err(CantOpenFile | FileIsDirectory) => res.failed_open_file += 1,
|
||||
Err(FileNotFound) if !opts.ignore_missing => res.failed_open_file += 1,
|
||||
_ => continue,
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1132,7 +1132,7 @@ where
|
|||
match process_checksum_file(filename_input, algo_name_input, length_input, opts) {
|
||||
Err(UError(e)) => return Err(e),
|
||||
Err(Failed | CantOpenChecksumFile) => failed = true,
|
||||
Ok(_) => continue,
|
||||
Ok(_) => (),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -82,7 +82,6 @@ pub fn get_groups() -> IOResult<Vec<gid_t>> {
|
|||
let err = IOError::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::EINVAL) {
|
||||
// Number of groups has increased, retry
|
||||
continue;
|
||||
} else {
|
||||
return Err(err);
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ impl Formatter<u64> for UnsignedInt {
|
|||
};
|
||||
|
||||
s = format!("{prefix}{s:0>width$}", width = self.precision);
|
||||
write_output(writer, "".to_string(), s, self.width, self.alignment)
|
||||
write_output(writer, String::new(), s, self.width, self.alignment)
|
||||
}
|
||||
|
||||
fn try_from_spec(s: Spec) -> Result<Self, FormatError> {
|
||||
|
@ -328,14 +328,14 @@ impl Formatter<&ExtendedBigDecimal> for Float {
|
|||
}
|
||||
|
||||
fn get_sign_indicator(sign: PositiveSign, negative: bool) -> String {
|
||||
if !negative {
|
||||
if negative {
|
||||
String::from("-")
|
||||
} else {
|
||||
match sign {
|
||||
PositiveSign::None => String::new(),
|
||||
PositiveSign::Plus => String::from("+"),
|
||||
PositiveSign::Space => String::from(" "),
|
||||
}
|
||||
} else {
|
||||
String::from("-")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -363,7 +363,7 @@ fn parse_special_value<'a>(
|
|||
("nan", ExtendedBigDecimal::Nan),
|
||||
];
|
||||
|
||||
for (str, ebd) in MATCH_TABLE.iter() {
|
||||
for (str, ebd) in MATCH_TABLE {
|
||||
if input_lc.starts_with(str) {
|
||||
let mut special = ebd.clone();
|
||||
if negative {
|
||||
|
@ -516,11 +516,8 @@ fn construct_extended_big_decimal<'a>(
|
|||
|
||||
// pow_with_context "only" supports i64 values. Just overflow/underflow if the value provided
|
||||
// is > 2**64 or < 2**-64.
|
||||
let exponent = match exponent.to_i64() {
|
||||
Some(exp) => exp,
|
||||
None => {
|
||||
return Err(make_error(exponent.is_positive(), negative));
|
||||
}
|
||||
let Some(exponent) = exponent.to_i64() else {
|
||||
return Err(make_error(exponent.is_positive(), negative));
|
||||
};
|
||||
|
||||
// Confusingly, exponent is in base 2 for hex floating point numbers.
|
||||
|
@ -638,13 +635,13 @@ pub(crate) fn parse<'a>(
|
|||
|
||||
// Return what has been parsed so far. If there are extra characters, mark the
|
||||
// parsing as a partial match.
|
||||
if !rest.is_empty() {
|
||||
if rest.is_empty() {
|
||||
ebd_result
|
||||
} else {
|
||||
Err(ExtendedParserError::PartialMatch(
|
||||
ebd_result.unwrap_or_else(|e| e.extract()),
|
||||
rest,
|
||||
))
|
||||
} else {
|
||||
ebd_result
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -113,51 +113,52 @@ pub fn wrap_chown<P: AsRef<Path>>(
|
|||
}
|
||||
}
|
||||
return Err(out);
|
||||
} else {
|
||||
let changed = dest_uid != meta.uid() || dest_gid != meta.gid();
|
||||
if changed {
|
||||
match verbosity.level {
|
||||
VerbosityLevel::Changes | VerbosityLevel::Verbose => {
|
||||
let gid = meta.gid();
|
||||
out = if verbosity.groups_only {
|
||||
format!(
|
||||
"changed group of {} from {} to {}",
|
||||
path.quote(),
|
||||
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||
)
|
||||
} else {
|
||||
let gid = meta.gid();
|
||||
let uid = meta.uid();
|
||||
format!(
|
||||
"changed ownership of {} from {}:{} to {}:{}",
|
||||
path.quote(),
|
||||
entries::uid2usr(uid).unwrap_or_else(|_| uid.to_string()),
|
||||
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
||||
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||
)
|
||||
};
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
} else if verbosity.level == VerbosityLevel::Verbose {
|
||||
out = if verbosity.groups_only {
|
||||
format!(
|
||||
"group of {} retained as {}",
|
||||
path.quote(),
|
||||
entries::gid2grp(dest_gid).unwrap_or_default()
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"ownership of {} retained as {}:{}",
|
||||
path.quote(),
|
||||
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||
)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let changed = dest_uid != meta.uid() || dest_gid != meta.gid();
|
||||
if changed {
|
||||
match verbosity.level {
|
||||
VerbosityLevel::Changes | VerbosityLevel::Verbose => {
|
||||
let gid = meta.gid();
|
||||
out = if verbosity.groups_only {
|
||||
format!(
|
||||
"changed group of {} from {} to {}",
|
||||
path.quote(),
|
||||
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||
)
|
||||
} else {
|
||||
let gid = meta.gid();
|
||||
let uid = meta.uid();
|
||||
format!(
|
||||
"changed ownership of {} from {}:{} to {}:{}",
|
||||
path.quote(),
|
||||
entries::uid2usr(uid).unwrap_or_else(|_| uid.to_string()),
|
||||
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
||||
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||
)
|
||||
};
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
} else if verbosity.level == VerbosityLevel::Verbose {
|
||||
out = if verbosity.groups_only {
|
||||
format!(
|
||||
"group of {} retained as {}",
|
||||
path.quote(),
|
||||
entries::gid2grp(dest_gid).unwrap_or_default()
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"ownership of {} retained as {}:{}",
|
||||
path.quote(),
|
||||
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
|
|
|
@ -174,8 +174,7 @@ fn create_bundle(
|
|||
|
||||
bundle.add_resource(resource).map_err(|errs| {
|
||||
LocalizationError::Bundle(format!(
|
||||
"Failed to add resource to bundle for {}: {:?}",
|
||||
locale, errs
|
||||
"Failed to add resource to bundle for {locale}: {errs:?}",
|
||||
))
|
||||
})?;
|
||||
|
||||
|
@ -276,7 +275,7 @@ fn detect_system_locale() -> Result<LanguageIdentifier, LocalizationError> {
|
|||
.unwrap_or(DEFAULT_LOCALE)
|
||||
.to_string();
|
||||
LanguageIdentifier::from_str(&locale_str).map_err(|_| {
|
||||
LocalizationError::ParseLocale(format!("Failed to parse locale: {}", locale_str))
|
||||
LocalizationError::ParseLocale(format!("Failed to parse locale: {locale_str}"))
|
||||
})
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue