mirror of
https://github.com/RGBCube/uutils-coreutils
synced 2025-07-27 11:07:44 +00:00
parent
704421bd6b
commit
5d6a04ab71
21 changed files with 106 additions and 169 deletions
|
@ -576,6 +576,7 @@ semicolon_if_nothing_returned = "warn"
|
|||
single_char_pattern = "warn"
|
||||
explicit_iter_loop = "warn"
|
||||
if_not_else = "warn"
|
||||
manual_if_else = "warn"
|
||||
|
||||
all = { level = "deny", priority = -1 }
|
||||
cargo = { level = "warn", priority = -1 }
|
||||
|
|
|
@ -125,16 +125,13 @@ fn basename(fullname: &str, suffix: &str) -> String {
|
|||
|
||||
// Convert to path buffer and get last path component
|
||||
let pb = PathBuf::from(path);
|
||||
match pb.components().last() {
|
||||
Some(c) => {
|
||||
let name = c.as_os_str().to_str().unwrap();
|
||||
if name == suffix {
|
||||
name.to_string()
|
||||
} else {
|
||||
name.strip_suffix(suffix).unwrap_or(name).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
None => String::new(),
|
||||
}
|
||||
pb.components().next_back().map_or_else(String::new, |c| {
|
||||
let name = c.as_os_str().to_str().unwrap();
|
||||
if name == suffix {
|
||||
name.to_string()
|
||||
} else {
|
||||
name.strip_suffix(suffix).unwrap_or(name).to_string()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ fn is_over_mounted(mounts: &[MountInfo], mount: &MountInfo) -> bool {
|
|||
let last_mount_for_dir = mounts
|
||||
.iter()
|
||||
.filter(|m| m.mount_dir == mount.mount_dir)
|
||||
.last();
|
||||
.next_back();
|
||||
|
||||
if let Some(lmi) = last_mount_for_dir {
|
||||
lmi.dev_name != mount.dev_name
|
||||
|
|
13
src/uu/env/src/env.rs
vendored
13
src/uu/env/src/env.rs
vendored
|
@ -130,14 +130,11 @@ fn parse_signal_opt<'a>(opts: &mut Options<'a>, opt: &'a OsStr) -> UResult<()> {
|
|||
}
|
||||
});
|
||||
for sig in sig_vec {
|
||||
let sig_str = match sig.to_str() {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
return Err(USimpleError::new(
|
||||
1,
|
||||
format!("{}: invalid signal", sig.quote()),
|
||||
))
|
||||
}
|
||||
let Some(sig_str) = sig.to_str() else {
|
||||
return Err(USimpleError::new(
|
||||
1,
|
||||
format!("{}: invalid signal", sig.quote()),
|
||||
));
|
||||
};
|
||||
let sig_val = parse_signal_value(sig_str)?;
|
||||
if !opts.ignore_signal.contains(&sig_val) {
|
||||
|
|
|
@ -255,9 +255,8 @@ impl ParagraphStream<'_> {
|
|||
if l_slice.starts_with("From ") {
|
||||
true
|
||||
} else {
|
||||
let colon_posn = match l_slice.find(':') {
|
||||
Some(n) => n,
|
||||
None => return false,
|
||||
let Some(colon_posn) = l_slice.find(':') else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// header field must be nonzero length
|
||||
|
@ -560,12 +559,11 @@ impl<'a> Iterator for WordSplit<'a> {
|
|||
|
||||
// find the start of the next word, and record if we find a tab character
|
||||
let (before_tab, after_tab, word_start) =
|
||||
match self.analyze_tabs(&self.string[old_position..]) {
|
||||
(b, a, Some(s)) => (b, a, s + old_position),
|
||||
(_, _, None) => {
|
||||
self.position = self.length;
|
||||
return None;
|
||||
}
|
||||
if let (b, a, Some(s)) = self.analyze_tabs(&self.string[old_position..]) {
|
||||
(b, a, s + old_position)
|
||||
} else {
|
||||
self.position = self.length;
|
||||
return None;
|
||||
};
|
||||
|
||||
// find the beginning of the next whitespace
|
||||
|
|
|
@ -91,9 +91,8 @@ fn process_num_block(
|
|||
}
|
||||
if let Some(n) = multiplier {
|
||||
options.push(OsString::from("-c"));
|
||||
let num = match num.checked_mul(n) {
|
||||
Some(n) => n,
|
||||
None => return Some(Err(ParseError::Overflow)),
|
||||
let Some(num) = num.checked_mul(n) else {
|
||||
return Some(Err(ParseError::Overflow));
|
||||
};
|
||||
options.push(OsString::from(format!("{num}")));
|
||||
} else {
|
||||
|
|
|
@ -652,7 +652,7 @@ fn copy_files_into_dir(files: &[PathBuf], target_dir: &Path, b: &Behavior) -> UR
|
|||
}
|
||||
|
||||
let mut targetpath = target_dir.to_path_buf();
|
||||
let filename = sourcepath.components().last().unwrap();
|
||||
let filename = sourcepath.components().next_back().unwrap();
|
||||
targetpath.push(filename);
|
||||
|
||||
show_if_err!(copy(sourcepath, &targetpath, b));
|
||||
|
|
|
@ -379,8 +379,8 @@ fn parse_time_style(options: &clap::ArgMatches) -> Result<TimeStyle, LsError> {
|
|||
//If both FULL_TIME and TIME_STYLE are present
|
||||
//The one added last is dominant
|
||||
if options.get_flag(options::FULL_TIME)
|
||||
&& options.indices_of(options::FULL_TIME).unwrap().last()
|
||||
> options.indices_of(options::TIME_STYLE).unwrap().last()
|
||||
&& options.indices_of(options::FULL_TIME).unwrap().next_back()
|
||||
> options.indices_of(options::TIME_STYLE).unwrap().next_back()
|
||||
{
|
||||
Ok(TimeStyle::FullIso)
|
||||
} else {
|
||||
|
|
|
@ -101,8 +101,7 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
|||
let options = SeqOptions {
|
||||
separator: matches
|
||||
.get_one::<String>(OPT_SEPARATOR)
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("\n")
|
||||
.map_or("\n", |s| s.as_str())
|
||||
.to_string(),
|
||||
terminator: matches
|
||||
.get_one::<String>(OPT_TERMINATOR)
|
||||
|
@ -150,13 +149,11 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
|||
|
||||
let precision = select_precision(first_precision, increment_precision, last_precision);
|
||||
|
||||
let format = match options.format {
|
||||
Some(f) => {
|
||||
let f = Format::<num_format::Float>::parse(f)?;
|
||||
Some(f)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let format = options
|
||||
.format
|
||||
.map(Format::<num_format::Float>::parse)
|
||||
.transpose()?;
|
||||
|
||||
let result = print_seq(
|
||||
(first.number, increment.number, last.number),
|
||||
precision,
|
||||
|
@ -164,12 +161,12 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
|
|||
&options.terminator,
|
||||
options.equal_width,
|
||||
padding,
|
||||
&format,
|
||||
format.as_ref(),
|
||||
);
|
||||
match result {
|
||||
Ok(_) => Ok(()),
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) if err.kind() == ErrorKind::BrokenPipe => Ok(()),
|
||||
Err(e) => Err(e.map_err_context(|| "write error".into())),
|
||||
Err(err) => Err(err.map_err_context(|| "write error".into())),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -263,7 +260,7 @@ fn print_seq(
|
|||
terminator: &str,
|
||||
pad: bool,
|
||||
padding: usize,
|
||||
format: &Option<Format<num_format::Float>>,
|
||||
format: Option<&Format<num_format::Float>>,
|
||||
) -> std::io::Result<()> {
|
||||
let stdout = stdout();
|
||||
let mut stdout = stdout.lock();
|
||||
|
|
|
@ -224,11 +224,8 @@ fn read_write_loop<I: WriteableTmpFile>(
|
|||
let mut sender_option = Some(sender);
|
||||
let mut tmp_files = vec![];
|
||||
loop {
|
||||
let chunk = match receiver.recv() {
|
||||
Ok(it) => it,
|
||||
_ => {
|
||||
return Ok(ReadResult::WroteChunksToFile { tmp_files });
|
||||
}
|
||||
let Ok(chunk) = receiver.recv() else {
|
||||
return Ok(ReadResult::WroteChunksToFile { tmp_files });
|
||||
};
|
||||
|
||||
let tmp_file = write::<I>(
|
||||
|
|
|
@ -1408,18 +1408,15 @@ where
|
|||
};
|
||||
|
||||
let bytes = line.as_slice();
|
||||
match kth_chunk {
|
||||
Some(chunk_number) => {
|
||||
if (i % num_chunks) == (chunk_number - 1) as usize {
|
||||
stdout_writer.write_all(bytes)?;
|
||||
}
|
||||
if let Some(chunk_number) = kth_chunk {
|
||||
if (i % num_chunks) == (chunk_number - 1) as usize {
|
||||
stdout_writer.write_all(bytes)?;
|
||||
}
|
||||
None => {
|
||||
let writer = out_files.get_writer(i % num_chunks, settings)?;
|
||||
let writer_stdin_open = custom_write_all(bytes, writer, settings)?;
|
||||
if !writer_stdin_open {
|
||||
closed_writers += 1;
|
||||
}
|
||||
} else {
|
||||
let writer = out_files.get_writer(i % num_chunks, settings)?;
|
||||
let writer_stdin_open = custom_write_all(bytes, writer, settings)?;
|
||||
if !writer_stdin_open {
|
||||
closed_writers += 1;
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
|
|
|
@ -34,9 +34,8 @@ pub enum ParseError {
|
|||
/// Parses obsolete syntax
|
||||
/// tail -\[NUM\]\[bcl\]\[f\] and tail +\[NUM\]\[bcl\]\[f\]
|
||||
pub fn parse_obsolete(src: &OsString) -> Option<Result<ObsoleteArgs, ParseError>> {
|
||||
let mut rest = match src.to_str() {
|
||||
Some(src) => src,
|
||||
None => return Some(Err(ParseError::InvalidEncoding)),
|
||||
let Some(mut rest) = src.to_str() else {
|
||||
return Some(Err(ParseError::InvalidEncoding));
|
||||
};
|
||||
let sign = if let Some(r) = rest.strip_prefix('-') {
|
||||
rest = r;
|
||||
|
@ -86,9 +85,8 @@ pub fn parse_obsolete(src: &OsString) -> Option<Result<ObsoleteArgs, ParseError>
|
|||
}
|
||||
|
||||
let multiplier = if mode == 'b' { 512 } else { 1 };
|
||||
let num = match num.checked_mul(multiplier) {
|
||||
Some(n) => n,
|
||||
None => return Some(Err(ParseError::Overflow)),
|
||||
let Some(num) = num.checked_mul(multiplier) else {
|
||||
return Some(Err(ParseError::Overflow));
|
||||
};
|
||||
|
||||
Some(Ok(ObsoleteArgs {
|
||||
|
|
|
@ -210,13 +210,8 @@ fn integers(a: &OsStr, b: &OsStr, op: &OsStr) -> ParseResult<bool> {
|
|||
fn files(a: &OsStr, b: &OsStr, op: &OsStr) -> ParseResult<bool> {
|
||||
// Don't manage the error. GNU doesn't show error when doing
|
||||
// test foo -nt bar
|
||||
let f_a = match fs::metadata(a) {
|
||||
Ok(f) => f,
|
||||
Err(_) => return Ok(false),
|
||||
};
|
||||
let f_b = match fs::metadata(b) {
|
||||
Ok(f) => f,
|
||||
Err(_) => return Ok(false),
|
||||
let (Ok(f_a), Ok(f_b)) = (fs::metadata(a), fs::metadata(b)) else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(match op.to_str() {
|
||||
|
@ -290,11 +285,8 @@ fn path(path: &OsStr, condition: &PathCondition) -> bool {
|
|||
fs::metadata(path)
|
||||
};
|
||||
|
||||
let metadata = match metadata {
|
||||
Ok(metadata) => metadata,
|
||||
Err(_) => {
|
||||
return false;
|
||||
}
|
||||
let Ok(metadata) = metadata else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let file_type = metadata.file_type();
|
||||
|
|
|
@ -599,14 +599,11 @@ fn parse_timestamp(s: &str) -> UResult<FileTime> {
|
|||
|
||||
let local = NaiveDateTime::parse_from_str(&ts, format)
|
||||
.map_err(|_| USimpleError::new(1, format!("invalid date ts format {}", ts.quote())))?;
|
||||
let mut local = match chrono::Local.from_local_datetime(&local) {
|
||||
LocalResult::Single(dt) => dt,
|
||||
_ => {
|
||||
return Err(USimpleError::new(
|
||||
1,
|
||||
format!("invalid date ts format {}", ts.quote()),
|
||||
))
|
||||
}
|
||||
let LocalResult::Single(mut local) = chrono::Local.from_local_datetime(&local) else {
|
||||
return Err(USimpleError::new(
|
||||
1,
|
||||
format!("invalid date ts format {}", ts.quote()),
|
||||
));
|
||||
};
|
||||
|
||||
// Chrono caps seconds at 59, but 60 is valid. It might be a leap second
|
||||
|
|
|
@ -171,12 +171,9 @@ impl Uniq {
|
|||
|
||||
// Convert the leftover bytes to UTF-8 for character-based -w
|
||||
// If invalid UTF-8, just compare them as individual bytes (fallback).
|
||||
let string_after_skip = match std::str::from_utf8(fields_to_check) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
// Fallback: if invalid UTF-8, treat them as single-byte “chars”
|
||||
return closure(&mut fields_to_check.iter().map(|&b| b as char));
|
||||
}
|
||||
let Ok(string_after_skip) = std::str::from_utf8(fields_to_check) else {
|
||||
// Fallback: if invalid UTF-8, treat them as single-byte “chars”
|
||||
return closure(&mut fields_to_check.iter().map(|&b| b as char));
|
||||
};
|
||||
|
||||
let total_chars = string_after_skip.chars().count();
|
||||
|
|
|
@ -652,14 +652,10 @@ pub fn are_hardlinks_to_same_file(_source: &Path, _target: &Path) -> bool {
|
|||
/// * `bool` - Returns `true` if the paths are hard links to the same file, and `false` otherwise.
|
||||
#[cfg(unix)]
|
||||
pub fn are_hardlinks_to_same_file(source: &Path, target: &Path) -> bool {
|
||||
let source_metadata = match fs::symlink_metadata(source) {
|
||||
Ok(metadata) => metadata,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
let target_metadata = match fs::symlink_metadata(target) {
|
||||
Ok(metadata) => metadata,
|
||||
Err(_) => return false,
|
||||
let (Ok(source_metadata), Ok(target_metadata)) =
|
||||
(fs::symlink_metadata(source), fs::symlink_metadata(target))
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
source_metadata.ino() == target_metadata.ino() && source_metadata.dev() == target_metadata.dev()
|
||||
|
@ -682,14 +678,10 @@ pub fn are_hardlinks_or_one_way_symlink_to_same_file(_source: &Path, _target: &P
|
|||
/// * `bool` - Returns `true` if either of above conditions are true, and `false` otherwise.
|
||||
#[cfg(unix)]
|
||||
pub fn are_hardlinks_or_one_way_symlink_to_same_file(source: &Path, target: &Path) -> bool {
|
||||
let source_metadata = match fs::metadata(source) {
|
||||
Ok(metadata) => metadata,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
let target_metadata = match fs::symlink_metadata(target) {
|
||||
Ok(metadata) => metadata,
|
||||
Err(_) => return false,
|
||||
let (Ok(source_metadata), Ok(target_metadata)) =
|
||||
(fs::metadata(source), fs::symlink_metadata(target))
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
source_metadata.ino() == target_metadata.ino() && source_metadata.dev() == target_metadata.dev()
|
||||
|
|
|
@ -79,13 +79,10 @@ pub fn apply_xattrs<P: AsRef<Path>>(
|
|||
/// `true` if the file has extended attributes (indicating an ACL), `false` otherwise.
|
||||
pub fn has_acl<P: AsRef<Path>>(file: P) -> bool {
|
||||
// don't use exacl here, it is doing more getxattr call then needed
|
||||
match xattr::list(file) {
|
||||
Ok(acl) => {
|
||||
// if we have extra attributes, we have an acl
|
||||
acl.count() > 0
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
xattr::list(file).is_ok_and(|acl| {
|
||||
// if we have extra attributes, we have an acl
|
||||
acl.count() > 0
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the permissions bits of a file or directory which has Access Control List (ACL) entries based on its
|
||||
|
@ -132,7 +129,7 @@ pub fn get_acl_perm_bits_from_xattr<P: AsRef<Path>>(source: P) -> u32 {
|
|||
|
||||
for entry in acl_entries.chunks_exact(4) {
|
||||
// Third byte and fourth byte will be the perm bits
|
||||
perm = (perm << 3) | entry[2] as u32 | entry[3] as u32;
|
||||
perm = (perm << 3) | u32::from(entry[2]) | u32::from(entry[3]);
|
||||
}
|
||||
return perm;
|
||||
}
|
||||
|
|
|
@ -273,18 +273,15 @@ impl ChownExecutor {
|
|||
#[allow(clippy::cognitive_complexity)]
|
||||
fn traverse<P: AsRef<Path>>(&self, root: P) -> i32 {
|
||||
let path = root.as_ref();
|
||||
let meta = match self.obtain_meta(path, self.dereference) {
|
||||
Some(m) => m,
|
||||
_ => {
|
||||
if self.verbosity.level == VerbosityLevel::Verbose {
|
||||
println!(
|
||||
"failed to change ownership of {} to {}",
|
||||
path.quote(),
|
||||
self.raw_owner
|
||||
);
|
||||
}
|
||||
return 1;
|
||||
let Some(meta) = self.obtain_meta(path, self.dereference) else {
|
||||
if self.verbosity.level == VerbosityLevel::Verbose {
|
||||
println!(
|
||||
"failed to change ownership of {} to {}",
|
||||
path.quote(),
|
||||
self.raw_owner
|
||||
);
|
||||
}
|
||||
return 1;
|
||||
};
|
||||
|
||||
if self.recursive
|
||||
|
@ -370,17 +367,15 @@ impl ChownExecutor {
|
|||
Ok(entry) => entry,
|
||||
};
|
||||
let path = entry.path();
|
||||
let meta = match self.obtain_meta(path, self.dereference) {
|
||||
Some(m) => m,
|
||||
_ => {
|
||||
ret = 1;
|
||||
if entry.file_type().is_dir() {
|
||||
// Instruct walkdir to skip this directory to avoid getting another error
|
||||
// when walkdir tries to query the children of this directory.
|
||||
iterator.skip_current_dir();
|
||||
}
|
||||
continue;
|
||||
|
||||
let Some(meta) = self.obtain_meta(path, self.dereference) else {
|
||||
ret = 1;
|
||||
if entry.file_type().is_dir() {
|
||||
// Instruct walkdir to skip this directory to avoid getting another error
|
||||
// when walkdir tries to query the children of this directory.
|
||||
iterator.skip_current_dir();
|
||||
}
|
||||
continue;
|
||||
};
|
||||
|
||||
if self.preserve_root && is_root(path, self.traverse_symlinks == TraverseSymlinks::All)
|
||||
|
@ -425,24 +420,18 @@ impl ChownExecutor {
|
|||
|
||||
fn obtain_meta<P: AsRef<Path>>(&self, path: P, follow: bool) -> Option<Metadata> {
|
||||
let path = path.as_ref();
|
||||
|
||||
let meta = get_metadata(path, follow);
|
||||
|
||||
match meta {
|
||||
Err(e) => {
|
||||
match self.verbosity.level {
|
||||
VerbosityLevel::Silent => (),
|
||||
_ => show_error!(
|
||||
get_metadata(path, follow)
|
||||
.inspect_err(|e| {
|
||||
if self.verbosity.level != VerbosityLevel::Silent {
|
||||
show_error!(
|
||||
"cannot {} {}: {}",
|
||||
if follow { "dereference" } else { "access" },
|
||||
path.quote(),
|
||||
strip_errno(&e)
|
||||
),
|
||||
strip_errno(e)
|
||||
);
|
||||
}
|
||||
None
|
||||
}
|
||||
Ok(meta) => Some(meta),
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
|
|
@ -199,16 +199,9 @@ impl<'parser> Parser<'parser> {
|
|||
|
||||
/// Same as `parse()` but tries to return u64
|
||||
pub fn parse_u64(&self, size: &str) -> Result<u64, ParseSizeError> {
|
||||
match self.parse(size) {
|
||||
Ok(num_u128) => {
|
||||
let num_u64 = match u64::try_from(num_u128) {
|
||||
Ok(n) => n,
|
||||
Err(_) => return Err(ParseSizeError::size_too_big(size)),
|
||||
};
|
||||
Ok(num_u64)
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
self.parse(size).and_then(|num_u128| {
|
||||
u64::try_from(num_u128).map_err(|_| ParseSizeError::size_too_big(size))
|
||||
})
|
||||
}
|
||||
|
||||
/// Same as `parse_u64()`, except returns `u64::MAX` on overflow
|
||||
|
|
|
@ -49,9 +49,8 @@ pub fn from_str(string: &str) -> Result<Duration, String> {
|
|||
if len == 0 {
|
||||
return Err("empty string".to_owned());
|
||||
}
|
||||
let slice = match string.get(..len - 1) {
|
||||
Some(s) => s,
|
||||
None => return Err(format!("invalid time interval {}", string.quote())),
|
||||
let Some(slice) = string.get(..len - 1) else {
|
||||
return Err(format!("invalid time interval {}", string.quote()));
|
||||
};
|
||||
let (numstr, times) = match string.chars().next_back().unwrap() {
|
||||
's' => (slice, 1),
|
||||
|
|
|
@ -950,7 +950,7 @@ mod tests_split_iterator {
|
|||
| '*' | '?' | '[' | '#' | '˜' | '=' | '%' => {
|
||||
special = true;
|
||||
}
|
||||
_ => continue,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue