mirror of
https://github.com/RGBCube/uutils-coreutils
synced 2025-07-28 03:27:44 +00:00
Merge pull request #8201 from gabelluardo/fix-clippy-rules
Prepare for clippy pedantic rules
This commit is contained in:
commit
0e564ea5da
34 changed files with 237 additions and 247 deletions
|
@ -1137,9 +1137,8 @@ impl Options {
|
||||||
CpError::Error("SELinux was not enabled during the compile time!".to_owned());
|
CpError::Error("SELinux was not enabled during the compile time!".to_owned());
|
||||||
if required {
|
if required {
|
||||||
return Err(selinux_disabled_error);
|
return Err(selinux_disabled_error);
|
||||||
} else {
|
|
||||||
show_error_if_needed(&selinux_disabled_error);
|
|
||||||
}
|
}
|
||||||
|
show_error_if_needed(&selinux_disabled_error);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract the SELinux related flags and options
|
// Extract the SELinux related flags and options
|
||||||
|
@ -1912,10 +1911,9 @@ fn handle_existing_dest(
|
||||||
source.quote()
|
source.quote()
|
||||||
)
|
)
|
||||||
.into());
|
.into());
|
||||||
} else {
|
|
||||||
is_dest_removed = dest.is_symlink();
|
|
||||||
backup_dest(dest, &backup_path, is_dest_removed)?;
|
|
||||||
}
|
}
|
||||||
|
is_dest_removed = dest.is_symlink();
|
||||||
|
backup_dest(dest, &backup_path, is_dest_removed)?;
|
||||||
}
|
}
|
||||||
if !is_dest_removed {
|
if !is_dest_removed {
|
||||||
delete_dest_if_needed_and_allowed(
|
delete_dest_if_needed_and_allowed(
|
||||||
|
@ -2182,21 +2180,21 @@ fn handle_copy_mode(
|
||||||
let dest_time = dest_metadata.modified()?;
|
let dest_time = dest_metadata.modified()?;
|
||||||
if src_time <= dest_time {
|
if src_time <= dest_time {
|
||||||
return Ok(PerformedAction::Skipped);
|
return Ok(PerformedAction::Skipped);
|
||||||
} else {
|
|
||||||
options.overwrite.verify(dest, options.debug)?;
|
|
||||||
|
|
||||||
copy_helper(
|
|
||||||
source,
|
|
||||||
dest,
|
|
||||||
options,
|
|
||||||
context,
|
|
||||||
source_is_symlink,
|
|
||||||
source_is_fifo,
|
|
||||||
symlinked_files,
|
|
||||||
#[cfg(unix)]
|
|
||||||
source_is_stream,
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
options.overwrite.verify(dest, options.debug)?;
|
||||||
|
|
||||||
|
copy_helper(
|
||||||
|
source,
|
||||||
|
dest,
|
||||||
|
options,
|
||||||
|
context,
|
||||||
|
source_is_symlink,
|
||||||
|
source_is_fifo,
|
||||||
|
symlinked_files,
|
||||||
|
#[cfg(unix)]
|
||||||
|
source_is_stream,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -429,9 +429,8 @@ fn get_delimiters(matches: &ArgMatches) -> UResult<(Delimiter, Option<&[u8]>)> {
|
||||||
1,
|
1,
|
||||||
get_message("cut-error-delimiter-must-be-single-character"),
|
get_message("cut-error-delimiter-must-be-single-character"),
|
||||||
));
|
));
|
||||||
} else {
|
|
||||||
Delimiter::from(os_string)
|
|
||||||
}
|
}
|
||||||
|
Delimiter::from(os_string)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
|
|
@ -34,9 +34,8 @@ impl Matcher for ExactMatcher<'_> {
|
||||||
|| haystack[match_idx + 1..].starts_with(&self.needle[1..])
|
|| haystack[match_idx + 1..].starts_with(&self.needle[1..])
|
||||||
{
|
{
|
||||||
return Some((match_idx, match_idx + self.needle.len()));
|
return Some((match_idx, match_idx + self.needle.len()));
|
||||||
} else {
|
|
||||||
pos = match_idx + 1;
|
|
||||||
}
|
}
|
||||||
|
pos = match_idx + 1;
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -204,81 +204,81 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
||||||
};
|
};
|
||||||
|
|
||||||
return set_system_datetime(date);
|
return set_system_datetime(date);
|
||||||
} else {
|
}
|
||||||
// Get the current time, either in the local time zone or UTC.
|
|
||||||
let now = if settings.utc {
|
|
||||||
Timestamp::now().to_zoned(TimeZone::UTC)
|
|
||||||
} else {
|
|
||||||
Zoned::now()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Iterate over all dates - whether it's a single date or a file.
|
// Get the current time, either in the local time zone or UTC.
|
||||||
let dates: Box<dyn Iterator<Item = _>> = match settings.date_source {
|
let now = if settings.utc {
|
||||||
DateSource::Custom(ref input) => {
|
Timestamp::now().to_zoned(TimeZone::UTC)
|
||||||
let date = parse_date(input);
|
} else {
|
||||||
let iter = std::iter::once(date);
|
Zoned::now()
|
||||||
Box::new(iter)
|
};
|
||||||
}
|
|
||||||
DateSource::Human(relative_time) => {
|
// Iterate over all dates - whether it's a single date or a file.
|
||||||
// Double check the result is overflow or not of the current_time + relative_time
|
let dates: Box<dyn Iterator<Item = _>> = match settings.date_source {
|
||||||
// it may cause a panic of chrono::datetime::DateTime add
|
DateSource::Custom(ref input) => {
|
||||||
match now.checked_add(relative_time) {
|
let date = parse_date(input);
|
||||||
Ok(date) => {
|
let iter = std::iter::once(date);
|
||||||
let iter = std::iter::once(Ok(date));
|
Box::new(iter)
|
||||||
Box::new(iter)
|
}
|
||||||
}
|
DateSource::Human(relative_time) => {
|
||||||
Err(_) => {
|
// Double check the result is overflow or not of the current_time + relative_time
|
||||||
return Err(USimpleError::new(
|
// it may cause a panic of chrono::datetime::DateTime add
|
||||||
1,
|
match now.checked_add(relative_time) {
|
||||||
format!("invalid date {relative_time}"),
|
Ok(date) => {
|
||||||
));
|
let iter = std::iter::once(Ok(date));
|
||||||
}
|
Box::new(iter)
|
||||||
}
|
}
|
||||||
}
|
Err(_) => {
|
||||||
DateSource::Stdin => {
|
|
||||||
let lines = BufReader::new(std::io::stdin()).lines();
|
|
||||||
let iter = lines.map_while(Result::ok).map(parse_date);
|
|
||||||
Box::new(iter)
|
|
||||||
}
|
|
||||||
DateSource::File(ref path) => {
|
|
||||||
if path.is_dir() {
|
|
||||||
return Err(USimpleError::new(
|
return Err(USimpleError::new(
|
||||||
2,
|
1,
|
||||||
format!("expected file, got directory {}", path.quote()),
|
format!("invalid date {relative_time}"),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let file = File::open(path)
|
|
||||||
.map_err_context(|| path.as_os_str().to_string_lossy().to_string())?;
|
|
||||||
let lines = BufReader::new(file).lines();
|
|
||||||
let iter = lines.map_while(Result::ok).map(parse_date);
|
|
||||||
Box::new(iter)
|
|
||||||
}
|
}
|
||||||
DateSource::Now => {
|
}
|
||||||
let iter = std::iter::once(Ok(now));
|
DateSource::Stdin => {
|
||||||
Box::new(iter)
|
let lines = BufReader::new(std::io::stdin()).lines();
|
||||||
|
let iter = lines.map_while(Result::ok).map(parse_date);
|
||||||
|
Box::new(iter)
|
||||||
|
}
|
||||||
|
DateSource::File(ref path) => {
|
||||||
|
if path.is_dir() {
|
||||||
|
return Err(USimpleError::new(
|
||||||
|
2,
|
||||||
|
format!("expected file, got directory {}", path.quote()),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
};
|
let file = File::open(path)
|
||||||
|
.map_err_context(|| path.as_os_str().to_string_lossy().to_string())?;
|
||||||
|
let lines = BufReader::new(file).lines();
|
||||||
|
let iter = lines.map_while(Result::ok).map(parse_date);
|
||||||
|
Box::new(iter)
|
||||||
|
}
|
||||||
|
DateSource::Now => {
|
||||||
|
let iter = std::iter::once(Ok(now));
|
||||||
|
Box::new(iter)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let format_string = make_format_string(&settings);
|
let format_string = make_format_string(&settings);
|
||||||
|
|
||||||
// Format all the dates
|
// Format all the dates
|
||||||
for date in dates {
|
for date in dates {
|
||||||
match date {
|
match date {
|
||||||
// TODO: Switch to lenient formatting.
|
// TODO: Switch to lenient formatting.
|
||||||
Ok(date) => match strtime::format(format_string, &date) {
|
Ok(date) => match strtime::format(format_string, &date) {
|
||||||
Ok(s) => println!("{s}"),
|
Ok(s) => println!("{s}"),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return Err(USimpleError::new(
|
return Err(USimpleError::new(
|
||||||
1,
|
1,
|
||||||
format!("invalid format {} ({e})", format_string),
|
format!("invalid format {format_string} ({e})"),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err((input, _err)) => show!(USimpleError::new(
|
Err((input, _err)) => show!(USimpleError::new(
|
||||||
1,
|
1,
|
||||||
format!("invalid date {}", input.quote())
|
format!("invalid date {}", input.quote())
|
||||||
)),
|
)),
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -437,7 +437,7 @@ impl Read for Input<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(len) => return Ok(len),
|
Ok(len) => return Ok(len),
|
||||||
Err(e) if e.kind() == io::ErrorKind::Interrupted => continue,
|
Err(e) if e.kind() == io::ErrorKind::Interrupted => (),
|
||||||
Err(_) if self.settings.iconv.noerror => return Ok(base_idx),
|
Err(_) if self.settings.iconv.noerror => return Ok(base_idx),
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
|
@ -861,7 +861,7 @@ impl<'a> Output<'a> {
|
||||||
return Ok(base_idx);
|
return Ok(base_idx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == io::ErrorKind::Interrupted => continue,
|
Err(e) if e.kind() == io::ErrorKind::Interrupted => (),
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,27 +33,27 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
||||||
|
|
||||||
if dirnames.is_empty() {
|
if dirnames.is_empty() {
|
||||||
return Err(UUsageError::new(1, get_message("dirname-missing-operand")));
|
return Err(UUsageError::new(1, get_message("dirname-missing-operand")));
|
||||||
} else {
|
}
|
||||||
for path in &dirnames {
|
|
||||||
let p = Path::new(path);
|
for path in &dirnames {
|
||||||
match p.parent() {
|
let p = Path::new(path);
|
||||||
Some(d) => {
|
match p.parent() {
|
||||||
if d.components().next().is_none() {
|
Some(d) => {
|
||||||
print!(".");
|
if d.components().next().is_none() {
|
||||||
} else {
|
print!(".");
|
||||||
print_verbatim(d).unwrap();
|
} else {
|
||||||
}
|
print_verbatim(d).unwrap();
|
||||||
}
|
}
|
||||||
None => {
|
}
|
||||||
if p.is_absolute() || path == "/" {
|
None => {
|
||||||
print!("/");
|
if p.is_absolute() || path == "/" {
|
||||||
} else {
|
print!("/");
|
||||||
print!(".");
|
} else {
|
||||||
}
|
print!(".");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
print!("{line_ending}");
|
|
||||||
}
|
}
|
||||||
|
print!("{line_ending}");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
8
src/uu/env/src/env.rs
vendored
8
src/uu/env/src/env.rs
vendored
|
@ -594,9 +594,11 @@ impl EnvAppData {
|
||||||
match cmd.status() {
|
match cmd.status() {
|
||||||
Ok(exit) if !exit.success() => {
|
Ok(exit) if !exit.success() => {
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
if let Some(exit_code) = exit.code() {
|
{
|
||||||
return Err(exit_code.into());
|
if let Some(exit_code) = exit.code() {
|
||||||
} else {
|
return Err(exit_code.into());
|
||||||
|
}
|
||||||
|
|
||||||
// `exit.code()` returns `None` on Unix when the process is terminated by a signal.
|
// `exit.code()` returns `None` on Unix when the process is terminated by a signal.
|
||||||
// See std::os::unix::process::ExitStatusExt for more information. This prints out
|
// See std::os::unix::process::ExitStatusExt for more information. This prints out
|
||||||
// the interrupted process and the signal it received.
|
// the interrupted process and the signal it received.
|
||||||
|
|
|
@ -464,7 +464,6 @@ fn expand(options: &Options) -> UResult<()> {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
show_error!("{e}");
|
show_error!("{e}");
|
||||||
set_exit_code(1);
|
set_exit_code(1);
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -175,9 +175,8 @@ fn break_knuth_plass<'a, T: Clone + Iterator<Item = &'a WordInfo<'a>>>(
|
||||||
fresh = true;
|
fresh = true;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
} else {
|
|
||||||
write_with_spaces(word, slen, args.ostream)?;
|
|
||||||
}
|
}
|
||||||
|
write_with_spaces(word, slen, args.ostream)?;
|
||||||
}
|
}
|
||||||
Ok((prev_punct, fresh))
|
Ok((prev_punct, fresh))
|
||||||
},
|
},
|
||||||
|
|
|
@ -26,7 +26,6 @@ pub fn parse_obsolete(src: &str) -> Option<Result<Vec<OsString>, ParseError>> {
|
||||||
} else if c == '+' && plus_possible {
|
} else if c == '+' && plus_possible {
|
||||||
plus_possible = false;
|
plus_possible = false;
|
||||||
num_start += 1;
|
num_start += 1;
|
||||||
continue;
|
|
||||||
} else {
|
} else {
|
||||||
num_end = n;
|
num_end = n;
|
||||||
last_char = c;
|
last_char = c;
|
||||||
|
|
|
@ -31,7 +31,7 @@ impl TakeAllBuffer {
|
||||||
self.buffer.truncate(n);
|
self.buffer.truncate(n);
|
||||||
return Ok(n);
|
return Ok(n);
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1091,13 +1091,13 @@ impl Config {
|
||||||
Dereference::DirArgs
|
Dereference::DirArgs
|
||||||
};
|
};
|
||||||
|
|
||||||
let tab_size = if !needs_color {
|
let tab_size = if needs_color {
|
||||||
|
Some(0)
|
||||||
|
} else {
|
||||||
options
|
options
|
||||||
.get_one::<String>(options::format::TAB_SIZE)
|
.get_one::<String>(options::format::TAB_SIZE)
|
||||||
.and_then(|size| size.parse::<usize>().ok())
|
.and_then(|size| size.parse::<usize>().ok())
|
||||||
.or_else(|| std::env::var("TABSIZE").ok().and_then(|s| s.parse().ok()))
|
.or_else(|| std::env::var("TABSIZE").ok().and_then(|s| s.parse().ok()))
|
||||||
} else {
|
|
||||||
Some(0)
|
|
||||||
}
|
}
|
||||||
.unwrap_or(SPACES_IN_TAB);
|
.unwrap_or(SPACES_IN_TAB);
|
||||||
|
|
||||||
|
@ -2366,7 +2366,6 @@ fn enter_directory(
|
||||||
err,
|
err,
|
||||||
e.command_line
|
e.command_line
|
||||||
));
|
));
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
Ok(rd) => {
|
Ok(rd) => {
|
||||||
if listed_ancestors
|
if listed_ancestors
|
||||||
|
|
|
@ -745,7 +745,7 @@ impl<'a> Pager<'a> {
|
||||||
&format!("{}{pattern}{}", Attribute::Reverse, Attribute::Reset),
|
&format!("{}{pattern}{}", Attribute::Reverse, Attribute::Reset),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
self.stdout.write_all(format!("\r{}\n", line).as_bytes())?;
|
self.stdout.write_all(format!("\r{line}\n").as_bytes())?;
|
||||||
lines_printed += 1;
|
lines_printed += 1;
|
||||||
index += 1;
|
index += 1;
|
||||||
}
|
}
|
||||||
|
@ -792,7 +792,7 @@ impl<'a> Pager<'a> {
|
||||||
if percentage >= 100 {
|
if percentage >= 100 {
|
||||||
" (END)".to_string()
|
" (END)".to_string()
|
||||||
} else {
|
} else {
|
||||||
format!(" ({}%)", percentage)
|
format!(" ({percentage}%)")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -390,7 +390,7 @@ fn format_and_print_whitespace(s: &str, options: &NumfmtOptions) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let eol = if options.zero_terminated { '\0' } else { '\n' };
|
let eol = if options.zero_terminated { '\0' } else { '\n' };
|
||||||
print!("{}", eol);
|
print!("{eol}");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1086,9 +1086,8 @@ fn write_columns(
|
||||||
}
|
}
|
||||||
if not_found_break && feed_line_present {
|
if not_found_break && feed_line_present {
|
||||||
break;
|
break;
|
||||||
} else {
|
|
||||||
out.write_all(line_separator)?;
|
|
||||||
}
|
}
|
||||||
|
out.write_all(line_separator)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(lines_printed)
|
Ok(lines_printed)
|
||||||
|
|
|
@ -736,16 +736,7 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
||||||
.flatten()
|
.flatten()
|
||||||
.cloned();
|
.cloned();
|
||||||
|
|
||||||
if !config.gnu_ext {
|
if config.gnu_ext {
|
||||||
input_files = vec![files.next().unwrap_or("-".to_string())];
|
|
||||||
output_file = files.next().unwrap_or("-".to_string());
|
|
||||||
if let Some(file) = files.next() {
|
|
||||||
return Err(UUsageError::new(
|
|
||||||
1,
|
|
||||||
format!("extra operand {}", file.quote()),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
input_files = {
|
input_files = {
|
||||||
let mut files = files.collect::<Vec<_>>();
|
let mut files = files.collect::<Vec<_>>();
|
||||||
if files.is_empty() {
|
if files.is_empty() {
|
||||||
|
@ -754,6 +745,15 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
||||||
files
|
files
|
||||||
};
|
};
|
||||||
output_file = "-".to_string();
|
output_file = "-".to_string();
|
||||||
|
} else {
|
||||||
|
input_files = vec![files.next().unwrap_or("-".to_string())];
|
||||||
|
output_file = files.next().unwrap_or("-".to_string());
|
||||||
|
if let Some(file) = files.next() {
|
||||||
|
return Err(UUsageError::new(
|
||||||
|
1,
|
||||||
|
format!("extra operand {}", file.quote()),
|
||||||
|
));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let word_filter = WordFilter::new(&matches, &config)?;
|
let word_filter = WordFilter::new(&matches, &config)?;
|
||||||
|
|
|
@ -138,7 +138,6 @@ impl Iterator for FilenameIter {
|
||||||
if *index == NAME_CHARSET.len() - 1 {
|
if *index == NAME_CHARSET.len() - 1 {
|
||||||
// Carry the 1
|
// Carry the 1
|
||||||
*index = 0;
|
*index = 0;
|
||||||
continue;
|
|
||||||
} else {
|
} else {
|
||||||
*index += 1;
|
*index += 1;
|
||||||
return Some(ret);
|
return Some(ret);
|
||||||
|
|
|
@ -287,13 +287,13 @@ fn read_to_buffer<T: Read>(
|
||||||
let end = last_line_end.unwrap();
|
let end = last_line_end.unwrap();
|
||||||
// We want to include the separator here, because it shouldn't be carried over.
|
// We want to include the separator here, because it shouldn't be carried over.
|
||||||
return Ok((end + 1, true));
|
return Ok((end + 1, true));
|
||||||
} else {
|
|
||||||
// We need to read more lines
|
|
||||||
let len = buffer.len();
|
|
||||||
// resize the vector to 10 KB more
|
|
||||||
buffer.resize(len + 1024 * 10, 0);
|
|
||||||
read_target = &mut buffer[len..];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We need to read more lines
|
||||||
|
let len = buffer.len();
|
||||||
|
// resize the vector to 10 KB more
|
||||||
|
buffer.resize(len + 1024 * 10, 0);
|
||||||
|
read_target = &mut buffer[len..];
|
||||||
} else {
|
} else {
|
||||||
// This file has been fully read.
|
// This file has been fully read.
|
||||||
let mut leftover_len = read_target.len();
|
let mut leftover_len = read_target.len();
|
||||||
|
|
|
@ -1808,9 +1808,11 @@ fn general_bd_parse(a: &str) -> GeneralBigDecimalParseResult {
|
||||||
// Parse digits, and fold in recoverable errors
|
// Parse digits, and fold in recoverable errors
|
||||||
let ebd = match ExtendedBigDecimal::extended_parse(a) {
|
let ebd = match ExtendedBigDecimal::extended_parse(a) {
|
||||||
Err(ExtendedParserError::NotNumeric) => return GeneralBigDecimalParseResult::Invalid,
|
Err(ExtendedParserError::NotNumeric) => return GeneralBigDecimalParseResult::Invalid,
|
||||||
Err(ExtendedParserError::PartialMatch(ebd, _))
|
Err(
|
||||||
| Err(ExtendedParserError::Overflow(ebd))
|
ExtendedParserError::PartialMatch(ebd, _)
|
||||||
| Err(ExtendedParserError::Underflow(ebd))
|
| ExtendedParserError::Overflow(ebd)
|
||||||
|
| ExtendedParserError::Underflow(ebd),
|
||||||
|
)
|
||||||
| Ok(ebd) => ebd,
|
| Ok(ebd) => ebd,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -763,28 +763,28 @@ impl Write for ByteChunkWriter<'_> {
|
||||||
let num_bytes_written = custom_write(buf, &mut self.inner, self.settings)?;
|
let num_bytes_written = custom_write(buf, &mut self.inner, self.settings)?;
|
||||||
self.num_bytes_remaining_in_current_chunk -= num_bytes_written as u64;
|
self.num_bytes_remaining_in_current_chunk -= num_bytes_written as u64;
|
||||||
return Ok(carryover_bytes_written + num_bytes_written);
|
return Ok(carryover_bytes_written + num_bytes_written);
|
||||||
} else {
|
|
||||||
// Write enough bytes to fill the current chunk.
|
|
||||||
//
|
|
||||||
// Conversion to usize is safe because we checked that
|
|
||||||
// self.num_bytes_remaining_in_current_chunk is lower than
|
|
||||||
// n, which is already usize.
|
|
||||||
let i = self.num_bytes_remaining_in_current_chunk as usize;
|
|
||||||
let num_bytes_written = custom_write(&buf[..i], &mut self.inner, self.settings)?;
|
|
||||||
self.num_bytes_remaining_in_current_chunk -= num_bytes_written as u64;
|
|
||||||
|
|
||||||
// It's possible that the underlying writer did not
|
|
||||||
// write all the bytes.
|
|
||||||
if num_bytes_written < i {
|
|
||||||
return Ok(carryover_bytes_written + num_bytes_written);
|
|
||||||
} else {
|
|
||||||
// Move the window to look at only the remaining bytes.
|
|
||||||
buf = &buf[i..];
|
|
||||||
|
|
||||||
// Remember for the next iteration that we wrote these bytes.
|
|
||||||
carryover_bytes_written += num_bytes_written;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Write enough bytes to fill the current chunk.
|
||||||
|
//
|
||||||
|
// Conversion to usize is safe because we checked that
|
||||||
|
// self.num_bytes_remaining_in_current_chunk is lower than
|
||||||
|
// n, which is already usize.
|
||||||
|
let i = self.num_bytes_remaining_in_current_chunk as usize;
|
||||||
|
let num_bytes_written = custom_write(&buf[..i], &mut self.inner, self.settings)?;
|
||||||
|
self.num_bytes_remaining_in_current_chunk -= num_bytes_written as u64;
|
||||||
|
|
||||||
|
// It's possible that the underlying writer did not
|
||||||
|
// write all the bytes.
|
||||||
|
if num_bytes_written < i {
|
||||||
|
return Ok(carryover_bytes_written + num_bytes_written);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move the window to look at only the remaining bytes.
|
||||||
|
buf = &buf[i..];
|
||||||
|
|
||||||
|
// Remember for the next iteration that we wrote these bytes.
|
||||||
|
carryover_bytes_written += num_bytes_written;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn flush(&mut self) -> io::Result<()> {
|
fn flush(&mut self) -> io::Result<()> {
|
||||||
|
|
|
@ -29,7 +29,7 @@ fn bsd_sum(mut reader: impl Read) -> std::io::Result<(usize, u16)> {
|
||||||
rotated.wrapping_add(u16::from(byte))
|
rotated.wrapping_add(u16::from(byte))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,7 +53,7 @@ fn sysv_sum(mut reader: impl Read) -> std::io::Result<(usize, u16)> {
|
||||||
.iter()
|
.iter()
|
||||||
.fold(ret, |acc, &byte| acc.wrapping_add(u32::from(byte)));
|
.fold(ret, |acc, &byte| acc.wrapping_add(u32::from(byte)));
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ impl FilterMode {
|
||||||
1,
|
1,
|
||||||
get_message_with_args(
|
get_message_with_args(
|
||||||
"tail-error-invalid-number-of-bytes",
|
"tail-error-invalid-number-of-bytes",
|
||||||
HashMap::from([("arg".to_string(), format!("'{}'", e))]),
|
HashMap::from([("arg".to_string(), format!("'{e}'"))]),
|
||||||
),
|
),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
|
@ -582,13 +582,13 @@ impl LinesChunkBuffer {
|
||||||
if self.chunks.is_empty() {
|
if self.chunks.is_empty() {
|
||||||
// chunks is empty when a file is empty so quitting early here
|
// chunks is empty when a file is empty so quitting early here
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else {
|
}
|
||||||
let length = &self.chunks.len();
|
|
||||||
let last = &mut self.chunks[length - 1];
|
let length = &self.chunks.len();
|
||||||
if !last.get_buffer().ends_with(&[self.delimiter]) {
|
let last = &mut self.chunks[length - 1];
|
||||||
last.lines += 1;
|
if !last.get_buffer().ends_with(&[self.delimiter]) {
|
||||||
self.lines += 1;
|
last.lines += 1;
|
||||||
}
|
self.lines += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// skip unnecessary chunks and save the first chunk which may hold some lines we have to
|
// skip unnecessary chunks and save the first chunk which may hold some lines we have to
|
||||||
|
|
|
@ -284,7 +284,7 @@ impl Observer {
|
||||||
if let Some(watcher_rx) = &mut self.watcher_rx {
|
if let Some(watcher_rx) = &mut self.watcher_rx {
|
||||||
for input in inputs {
|
for input in inputs {
|
||||||
match input.kind() {
|
match input.kind() {
|
||||||
InputKind::Stdin => continue,
|
InputKind::Stdin => (),
|
||||||
InputKind::File(path) => {
|
InputKind::File(path) => {
|
||||||
#[cfg(all(unix, not(target_os = "linux")))]
|
#[cfg(all(unix, not(target_os = "linux")))]
|
||||||
if !path.is_file() {
|
if !path.is_file() {
|
||||||
|
|
|
@ -393,7 +393,7 @@ fn forwards_thru_file(
|
||||||
}
|
}
|
||||||
total += n;
|
total += n;
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
Err(e) if e.kind() == ErrorKind::Interrupted => (),
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,7 +72,7 @@ impl Display for BadSequence {
|
||||||
"{}",
|
"{}",
|
||||||
get_message_with_args(
|
get_message_with_args(
|
||||||
"tr-error-invalid-repeat-count",
|
"tr-error-invalid-repeat-count",
|
||||||
HashMap::from([("count".to_string(), format!("'{}'", count))])
|
HashMap::from([("count".to_string(), format!("'{count}'"))])
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -218,7 +218,7 @@ impl<'input> Graph<'input> {
|
||||||
let cycle = self.detect_cycle();
|
let cycle = self.detect_cycle();
|
||||||
show!(TsortError::Loop(self.name.clone()));
|
show!(TsortError::Loop(self.name.clone()));
|
||||||
for node in &cycle {
|
for node in &cycle {
|
||||||
show!(TsortError::LoopNode(node.to_string()));
|
show!(TsortError::LoopNode((*node).to_string()));
|
||||||
}
|
}
|
||||||
let u = cycle[0];
|
let u = cycle[0];
|
||||||
let v = cycle[1];
|
let v = cycle[1];
|
||||||
|
|
|
@ -192,7 +192,7 @@ pub(crate) fn count_bytes_fast<T: WordCountable>(handle: &mut T) -> (usize, Opti
|
||||||
Ok(n) => {
|
Ok(n) => {
|
||||||
byte_count += n;
|
byte_count += n;
|
||||||
}
|
}
|
||||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
|
Err(ref e) if e.kind() == ErrorKind::Interrupted => (),
|
||||||
Err(e) => return (byte_count, Some(e)),
|
Err(e) => return (byte_count, Some(e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -246,7 +246,7 @@ pub(crate) fn count_bytes_chars_and_lines_fast<
|
||||||
total.lines += bytecount::count(&buf[..n], b'\n');
|
total.lines += bytecount::count(&buf[..n], b'\n');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
|
Err(ref e) if e.kind() == ErrorKind::Interrupted => (),
|
||||||
Err(e) => return (total, Some(e)),
|
Err(e) => return (total, Some(e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -671,7 +671,7 @@ impl LineInfo {
|
||||||
match cached_format {
|
match cached_format {
|
||||||
LineFormat::Untagged => LineFormat::parse_untagged(line_bytes),
|
LineFormat::Untagged => LineFormat::parse_untagged(line_bytes),
|
||||||
LineFormat::SingleSpace => LineFormat::parse_single_space(line_bytes),
|
LineFormat::SingleSpace => LineFormat::parse_single_space(line_bytes),
|
||||||
_ => unreachable!("we never catch the algo based format"),
|
LineFormat::AlgoBased => unreachable!("we never catch the algo based format"),
|
||||||
}
|
}
|
||||||
} else if let Some(info) = LineFormat::parse_untagged(line_bytes) {
|
} else if let Some(info) = LineFormat::parse_untagged(line_bytes) {
|
||||||
*cached_line_format = Some(LineFormat::Untagged);
|
*cached_line_format = Some(LineFormat::Untagged);
|
||||||
|
@ -1063,7 +1063,7 @@ fn process_checksum_file(
|
||||||
}
|
}
|
||||||
Err(CantOpenFile | FileIsDirectory) => res.failed_open_file += 1,
|
Err(CantOpenFile | FileIsDirectory) => res.failed_open_file += 1,
|
||||||
Err(FileNotFound) if !opts.ignore_missing => res.failed_open_file += 1,
|
Err(FileNotFound) if !opts.ignore_missing => res.failed_open_file += 1,
|
||||||
_ => continue,
|
_ => (),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1132,7 +1132,7 @@ where
|
||||||
match process_checksum_file(filename_input, algo_name_input, length_input, opts) {
|
match process_checksum_file(filename_input, algo_name_input, length_input, opts) {
|
||||||
Err(UError(e)) => return Err(e),
|
Err(UError(e)) => return Err(e),
|
||||||
Err(Failed | CantOpenChecksumFile) => failed = true,
|
Err(Failed | CantOpenChecksumFile) => failed = true,
|
||||||
Ok(_) => continue,
|
Ok(_) => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,7 +82,6 @@ pub fn get_groups() -> IOResult<Vec<gid_t>> {
|
||||||
let err = IOError::last_os_error();
|
let err = IOError::last_os_error();
|
||||||
if err.raw_os_error() == Some(libc::EINVAL) {
|
if err.raw_os_error() == Some(libc::EINVAL) {
|
||||||
// Number of groups has increased, retry
|
// Number of groups has increased, retry
|
||||||
continue;
|
|
||||||
} else {
|
} else {
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
|
|
|
@ -154,7 +154,7 @@ impl Formatter<u64> for UnsignedInt {
|
||||||
};
|
};
|
||||||
|
|
||||||
s = format!("{prefix}{s:0>width$}", width = self.precision);
|
s = format!("{prefix}{s:0>width$}", width = self.precision);
|
||||||
write_output(writer, "".to_string(), s, self.width, self.alignment)
|
write_output(writer, String::new(), s, self.width, self.alignment)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_from_spec(s: Spec) -> Result<Self, FormatError> {
|
fn try_from_spec(s: Spec) -> Result<Self, FormatError> {
|
||||||
|
@ -328,14 +328,14 @@ impl Formatter<&ExtendedBigDecimal> for Float {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_sign_indicator(sign: PositiveSign, negative: bool) -> String {
|
fn get_sign_indicator(sign: PositiveSign, negative: bool) -> String {
|
||||||
if !negative {
|
if negative {
|
||||||
|
String::from("-")
|
||||||
|
} else {
|
||||||
match sign {
|
match sign {
|
||||||
PositiveSign::None => String::new(),
|
PositiveSign::None => String::new(),
|
||||||
PositiveSign::Plus => String::from("+"),
|
PositiveSign::Plus => String::from("+"),
|
||||||
PositiveSign::Space => String::from(" "),
|
PositiveSign::Space => String::from(" "),
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
String::from("-")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -363,7 +363,7 @@ fn parse_special_value<'a>(
|
||||||
("nan", ExtendedBigDecimal::Nan),
|
("nan", ExtendedBigDecimal::Nan),
|
||||||
];
|
];
|
||||||
|
|
||||||
for (str, ebd) in MATCH_TABLE.iter() {
|
for (str, ebd) in MATCH_TABLE {
|
||||||
if input_lc.starts_with(str) {
|
if input_lc.starts_with(str) {
|
||||||
let mut special = ebd.clone();
|
let mut special = ebd.clone();
|
||||||
if negative {
|
if negative {
|
||||||
|
@ -516,11 +516,8 @@ fn construct_extended_big_decimal<'a>(
|
||||||
|
|
||||||
// pow_with_context "only" supports i64 values. Just overflow/underflow if the value provided
|
// pow_with_context "only" supports i64 values. Just overflow/underflow if the value provided
|
||||||
// is > 2**64 or < 2**-64.
|
// is > 2**64 or < 2**-64.
|
||||||
let exponent = match exponent.to_i64() {
|
let Some(exponent) = exponent.to_i64() else {
|
||||||
Some(exp) => exp,
|
return Err(make_error(exponent.is_positive(), negative));
|
||||||
None => {
|
|
||||||
return Err(make_error(exponent.is_positive(), negative));
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Confusingly, exponent is in base 2 for hex floating point numbers.
|
// Confusingly, exponent is in base 2 for hex floating point numbers.
|
||||||
|
@ -638,13 +635,13 @@ pub(crate) fn parse<'a>(
|
||||||
|
|
||||||
// Return what has been parsed so far. If there are extra characters, mark the
|
// Return what has been parsed so far. If there are extra characters, mark the
|
||||||
// parsing as a partial match.
|
// parsing as a partial match.
|
||||||
if !rest.is_empty() {
|
if rest.is_empty() {
|
||||||
|
ebd_result
|
||||||
|
} else {
|
||||||
Err(ExtendedParserError::PartialMatch(
|
Err(ExtendedParserError::PartialMatch(
|
||||||
ebd_result.unwrap_or_else(|e| e.extract()),
|
ebd_result.unwrap_or_else(|e| e.extract()),
|
||||||
rest,
|
rest,
|
||||||
))
|
))
|
||||||
} else {
|
|
||||||
ebd_result
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -113,51 +113,52 @@ pub fn wrap_chown<P: AsRef<Path>>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Err(out);
|
return Err(out);
|
||||||
} else {
|
|
||||||
let changed = dest_uid != meta.uid() || dest_gid != meta.gid();
|
|
||||||
if changed {
|
|
||||||
match verbosity.level {
|
|
||||||
VerbosityLevel::Changes | VerbosityLevel::Verbose => {
|
|
||||||
let gid = meta.gid();
|
|
||||||
out = if verbosity.groups_only {
|
|
||||||
format!(
|
|
||||||
"changed group of {} from {} to {}",
|
|
||||||
path.quote(),
|
|
||||||
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
|
||||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
let gid = meta.gid();
|
|
||||||
let uid = meta.uid();
|
|
||||||
format!(
|
|
||||||
"changed ownership of {} from {}:{} to {}:{}",
|
|
||||||
path.quote(),
|
|
||||||
entries::uid2usr(uid).unwrap_or_else(|_| uid.to_string()),
|
|
||||||
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
|
||||||
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
|
||||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
|
||||||
)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
};
|
|
||||||
} else if verbosity.level == VerbosityLevel::Verbose {
|
|
||||||
out = if verbosity.groups_only {
|
|
||||||
format!(
|
|
||||||
"group of {} retained as {}",
|
|
||||||
path.quote(),
|
|
||||||
entries::gid2grp(dest_gid).unwrap_or_default()
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
format!(
|
|
||||||
"ownership of {} retained as {}:{}",
|
|
||||||
path.quote(),
|
|
||||||
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
|
||||||
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
|
||||||
)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let changed = dest_uid != meta.uid() || dest_gid != meta.gid();
|
||||||
|
if changed {
|
||||||
|
match verbosity.level {
|
||||||
|
VerbosityLevel::Changes | VerbosityLevel::Verbose => {
|
||||||
|
let gid = meta.gid();
|
||||||
|
out = if verbosity.groups_only {
|
||||||
|
format!(
|
||||||
|
"changed group of {} from {} to {}",
|
||||||
|
path.quote(),
|
||||||
|
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
||||||
|
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let gid = meta.gid();
|
||||||
|
let uid = meta.uid();
|
||||||
|
format!(
|
||||||
|
"changed ownership of {} from {}:{} to {}:{}",
|
||||||
|
path.quote(),
|
||||||
|
entries::uid2usr(uid).unwrap_or_else(|_| uid.to_string()),
|
||||||
|
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string()),
|
||||||
|
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
||||||
|
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||||
|
)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
};
|
||||||
|
} else if verbosity.level == VerbosityLevel::Verbose {
|
||||||
|
out = if verbosity.groups_only {
|
||||||
|
format!(
|
||||||
|
"group of {} retained as {}",
|
||||||
|
path.quote(),
|
||||||
|
entries::gid2grp(dest_gid).unwrap_or_default()
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"ownership of {} retained as {}:{}",
|
||||||
|
path.quote(),
|
||||||
|
entries::uid2usr(dest_uid).unwrap_or_else(|_| dest_uid.to_string()),
|
||||||
|
entries::gid2grp(dest_gid).unwrap_or_else(|_| dest_gid.to_string())
|
||||||
|
)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
Ok(out)
|
Ok(out)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -174,8 +174,7 @@ fn create_bundle(
|
||||||
|
|
||||||
bundle.add_resource(resource).map_err(|errs| {
|
bundle.add_resource(resource).map_err(|errs| {
|
||||||
LocalizationError::Bundle(format!(
|
LocalizationError::Bundle(format!(
|
||||||
"Failed to add resource to bundle for {}: {:?}",
|
"Failed to add resource to bundle for {locale}: {errs:?}",
|
||||||
locale, errs
|
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -276,7 +275,7 @@ fn detect_system_locale() -> Result<LanguageIdentifier, LocalizationError> {
|
||||||
.unwrap_or(DEFAULT_LOCALE)
|
.unwrap_or(DEFAULT_LOCALE)
|
||||||
.to_string();
|
.to_string();
|
||||||
LanguageIdentifier::from_str(&locale_str).map_err(|_| {
|
LanguageIdentifier::from_str(&locale_str).map_err(|_| {
|
||||||
LocalizationError::ParseLocale(format!("Failed to parse locale: {}", locale_str))
|
LocalizationError::ParseLocale(format!("Failed to parse locale: {locale_str}"))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue