1
Fork 0
mirror of https://github.com/RGBCube/uutils-coreutils synced 2025-07-31 04:57:45 +00:00

Fix a few clippy warnings

This commit is contained in:
Vinzent Steinberg 2018-09-03 19:02:51 +02:00
parent 07b01a85f9
commit 4034a322a2
26 changed files with 201 additions and 220 deletions

View file

@ -44,7 +44,7 @@ pub fn uumain(args: Vec<String>) -> i32 {
.parse(args); .parse(args);
// too few arguments // too few arguments
if matches.free.len() < 1 { if matches.free.is_empty() {
crash!( crash!(
1, 1,
"{0}: {1}\nTry '{0} --help' for more information.", "{0}: {1}\nTry '{0} --help' for more information.",

View file

@ -174,32 +174,23 @@ pub fn uumain(args: Vec<String>) -> i32 {
let success = if can_write_fast { let success = if can_write_fast {
write_fast(files).is_ok() write_fast(files).is_ok()
} else { } else {
let tab = match show_tabs { let tab = if show_tabs { "^I" } else { "\t" }.to_owned();
true => "^I",
false => "\t",
}.to_owned();
let end_of_line = match show_ends { let end_of_line = if show_ends { "$\n" } else { "\n" }.to_owned();
true => "$\n",
false => "\n",
}.to_owned();
let options = OutputOptions { let options = OutputOptions {
end_of_line: end_of_line, end_of_line,
number: number_mode, number: number_mode,
show_nonprint: show_nonprint, show_nonprint,
show_tabs: show_tabs, show_tabs,
squeeze_blank: squeeze_blank, squeeze_blank,
tab: tab, tab,
}; };
write_lines(files, &options).is_ok() write_lines(files, &options).is_ok()
}; };
match success { if success { 0 } else { 1 }
true => 0,
false => 1,
}
} }
/// Classifies the `InputType` of file at `path` if possible /// Classifies the `InputType` of file at `path` if possible
@ -363,7 +354,7 @@ fn write_file_lines(file: &str, options: &OutputOptions, state: &mut OutputState
let mut pos = 0; let mut pos = 0;
while pos < n { while pos < n {
// skip empty line_number enumerating them if needed // skip empty line_number enumerating them if needed
if in_buf[pos] == '\n' as u8 { if in_buf[pos] == b'\n' {
if !state.at_line_start || !options.squeeze_blank || !one_blank_kept { if !state.at_line_start || !options.squeeze_blank || !one_blank_kept {
one_blank_kept = true; one_blank_kept = true;
if state.at_line_start && options.number == NumberingMode::NumberAll { if state.at_line_start && options.number == NumberingMode::NumberAll {
@ -415,7 +406,7 @@ fn write_file_lines(file: &str, options: &OutputOptions, state: &mut OutputState
// Write all symbols till end of line or end of buffer is reached // Write all symbols till end of line or end of buffer is reached
// Return the (number of written symbols + 1) or 0 if the end of buffer is reached // Return the (number of written symbols + 1) or 0 if the end of buffer is reached
fn write_to_end<W: Write>(in_buf: &[u8], writer: &mut W) -> usize { fn write_to_end<W: Write>(in_buf: &[u8], writer: &mut W) -> usize {
match in_buf.iter().position(|c| *c == '\n' as u8) { match in_buf.iter().position(|c| *c == b'\n') {
Some(p) => { Some(p) => {
writer.write_all(&in_buf[..p]).unwrap(); writer.write_all(&in_buf[..p]).unwrap();
p + 1 p + 1
@ -431,14 +422,14 @@ fn write_tab_to_end<W: Write>(mut in_buf: &[u8], writer: &mut W) -> usize {
loop { loop {
match in_buf match in_buf
.iter() .iter()
.position(|c| *c == '\n' as u8 || *c == '\t' as u8) .position(|c| *c == b'\n' || *c == b'\t')
{ {
Some(p) => { Some(p) => {
writer.write_all(&in_buf[..p]).unwrap(); writer.write_all(&in_buf[..p]).unwrap();
if in_buf[p] == '\n' as u8 { if in_buf[p] == b'\n' {
return p + 1; return p + 1;
} else { } else {
writer.write_all("^I".as_bytes()).unwrap(); writer.write_all(b"^I").unwrap();
in_buf = &in_buf[p + 1..]; in_buf = &in_buf[p + 1..];
} }
} }
@ -454,17 +445,17 @@ fn write_nonprint_to_end<W: Write>(in_buf: &[u8], writer: &mut W, tab: &[u8]) ->
let mut count = 0; let mut count = 0;
for byte in in_buf.iter().map(|c| *c) { for byte in in_buf.iter().map(|c| *c) {
if byte == '\n' as u8 { if byte == b'\n' {
break; break;
} }
match byte { match byte {
9 => writer.write_all(tab), 9 => writer.write_all(tab),
0...8 | 10...31 => writer.write_all(&['^' as u8, byte + 64]), 0...8 | 10...31 => writer.write_all(&[b'^', byte + 64]),
32...126 => writer.write_all(&[byte]), 32...126 => writer.write_all(&[byte]),
127 => writer.write_all(&['^' as u8, byte - 64]), 127 => writer.write_all(&[b'^', byte - 64]),
128...159 => writer.write_all(&['M' as u8, '-' as u8, '^' as u8, byte - 64]), 128...159 => writer.write_all(&[b'M', b'-', b'^', byte - 64]),
160...254 => writer.write_all(&['M' as u8, '-' as u8, byte - 128]), 160...254 => writer.write_all(&[b'M', b'-', byte - 128]),
_ => writer.write_all(&['M' as u8, '-' as u8, '^' as u8, 63]), _ => writer.write_all(&[b'M', b'-', b'^', 63]),
}.unwrap(); }.unwrap();
count += 1; count += 1;
} }

View file

@ -117,7 +117,7 @@ pub fn uumain(args: Vec<String>) -> i32 {
Verbosity::Normal Verbosity::Normal
}; };
if matches.free.len() < 1 { if matches.free.is_empty() {
disp_err!("missing operand"); disp_err!("missing operand");
return 1; return 1;
} else if matches.free.len() < 2 && !matches.opt_present("reference") { } else if matches.free.len() < 2 && !matches.opt_present("reference") {
@ -153,13 +153,13 @@ pub fn uumain(args: Vec<String>) -> i32 {
} }
let executor = Chgrper { let executor = Chgrper {
bit_flag: bit_flag, bit_flag,
dest_gid: dest_gid, dest_gid,
verbosity: verbosity, verbosity,
recursive: recursive, recursive,
dereference: derefer != 0, dereference: derefer != 0,
preserve_root: preserve_root, preserve_root,
files: files, files,
}; };
executor.exec() executor.exec()
} }

View file

@ -24,10 +24,10 @@ use walker::Walker;
use uucore::mode; use uucore::mode;
use uucore::fs::display_permissions_unix; use uucore::fs::display_permissions_unix;
const NAME: &'static str = "chmod"; const NAME: &str = "chmod";
static SUMMARY: &'static str = "Change the mode of each FILE to MODE. static SUMMARY: &str = "Change the mode of each FILE to MODE.
With --reference, change the mode of each FILE to that of RFILE."; With --reference, change the mode of each FILE to that of RFILE.";
static LONG_HELP: &'static str = " static LONG_HELP: &str = "
Each MODE is of the form '[ugoa]*([-+=]([rwxXst]*|[ugo]))+|[-+=]?[0-7]+'. Each MODE is of the form '[ugoa]*([-+=]([rwxXst]*|[ugo]))+|[-+=]?[0-7]+'.
"; ";
@ -81,13 +81,13 @@ pub fn uumain(mut args: Vec<String>) -> i32 {
None None
}; };
let chmoder = Chmoder { let chmoder = Chmoder {
changes: changes, changes,
quiet: quiet, quiet,
verbose: verbose, verbose,
preserve_root: preserve_root, preserve_root,
recursive: recursive, recursive,
fmode: fmode, fmode,
cmode: cmode, cmode,
}; };
match chmoder.chmod(matches.free) { match chmoder.chmod(matches.free) {
Ok(()) => {} Ok(()) => {}

View file

@ -136,7 +136,7 @@ pub fn uumain(args: Vec<String>) -> i32 {
IfFrom::All IfFrom::All
}; };
if matches.free.len() < 1 { if matches.free.is_empty() {
disp_err!("missing operand"); disp_err!("missing operand");
return 1; return 1;
} else if matches.free.len() < 2 && !matches.opt_present("reference") { } else if matches.free.len() < 2 && !matches.opt_present("reference") {
@ -172,15 +172,15 @@ pub fn uumain(args: Vec<String>) -> i32 {
let mut files = matches.free; let mut files = matches.free;
files.remove(0); files.remove(0);
let executor = Chowner { let executor = Chowner {
bit_flag: bit_flag, bit_flag,
dest_uid: dest_uid, dest_uid,
dest_gid: dest_gid, dest_gid,
verbosity: verbosity, verbosity,
recursive: recursive, recursive,
dereference: derefer != 0, dereference: derefer != 0,
filter: filter, filter,
preserve_root: preserve_root, preserve_root,
files: files, files,
}; };
executor.exec() executor.exec()
} }

View file

@ -140,7 +140,7 @@ fn enter_chroot(root: &Path) {
let root_str = root.display(); let root_str = root.display();
std::env::set_current_dir(root).unwrap(); std::env::set_current_dir(root).unwrap();
let err = unsafe { let err = unsafe {
chroot(CString::new(".".as_bytes()) chroot(CString::new(".")
.unwrap() .unwrap()
.as_bytes_with_nul() .as_bytes_with_nul()
.as_ptr() as *const libc::c_char) .as_ptr() as *const libc::c_char)

View file

@ -20,9 +20,9 @@ use std::path::Path;
include!(concat!(env!("OUT_DIR"), "/crc_table.rs")); include!(concat!(env!("OUT_DIR"), "/crc_table.rs"));
static SYNTAX: &'static str = "[OPTIONS] [FILE]..."; static SYNTAX: &str = "[OPTIONS] [FILE]...";
static SUMMARY: &'static str = "Print CRC and size for each file"; static SUMMARY: &str = "Print CRC and size for each file";
static LONG_HELP: &'static str = ""; static LONG_HELP: &str = "";
#[inline] #[inline]
fn crc_update(crc: u32, input: u8) -> u32 { fn crc_update(crc: u32, input: u8) -> u32 {

View file

@ -19,9 +19,9 @@ use std::fs::File;
use std::io::{self, stdin, BufRead, BufReader, Stdin}; use std::io::{self, stdin, BufRead, BufReader, Stdin};
use std::path::Path; use std::path::Path;
static SYNTAX: &'static str = "[OPTIONS] FILE1 FILE2"; static SYNTAX: &str = "[OPTIONS] FILE1 FILE2";
static SUMMARY: &'static str = "Compare sorted files line by line"; static SUMMARY: &str = "Compare sorted files line by line";
static LONG_HELP: &'static str = ""; static LONG_HELP: &str = "";
fn mkdelim(col: usize, opts: &getopts::Matches) -> String { fn mkdelim(col: usize, opts: &getopts::Matches) -> String {
let mut s = String::new(); let mut s = String::new();

View file

@ -676,14 +676,14 @@ impl TargetType {
fn parse_path_args(path_args: &[String], options: &Options) -> CopyResult<(Vec<Source>, Target)> { fn parse_path_args(path_args: &[String], options: &Options) -> CopyResult<(Vec<Source>, Target)> {
let mut paths = path_args.iter().map(PathBuf::from).collect::<Vec<_>>(); let mut paths = path_args.iter().map(PathBuf::from).collect::<Vec<_>>();
if paths.len() < 1 { if paths.is_empty() {
// No files specified // No files specified
return Err("missing file operand".into()); return Err("missing file operand".into());
} }
// Return an error if the user requested to copy more than one // Return an error if the user requested to copy more than one
// file source to a file target // file source to a file target
if options.no_target_dir && !options.target_dir.is_some() && paths.len() > 2 { if options.no_target_dir && options.target_dir.is_none() && paths.len() > 2 {
return Err(format!("extra operand {:?}", paths[2]).into()); return Err(format!("extra operand {:?}", paths[2]).into());
} }

View file

@ -43,7 +43,7 @@ impl<R: Read> ByteReader<R> {
pub fn new(read: R, newline_char: u8) -> ByteReader<R> { pub fn new(read: R, newline_char: u8) -> ByteReader<R> {
ByteReader { ByteReader {
inner: BufReader::with_capacity(4096, read), inner: BufReader::with_capacity(4096, read),
newline_char: newline_char, newline_char,
} }
} }
} }
@ -75,7 +75,7 @@ impl<R: Read> ByteReader<R> {
// need filled_buf to go out of scope // need filled_buf to go out of scope
let filled_buf = match self.fill_buf() { let filled_buf = match self.fill_buf() {
Ok(b) => { Ok(b) => {
if b.len() == 0 { if b.is_empty() {
return bytes_consumed; return bytes_consumed;
} else { } else {
b b
@ -137,9 +137,8 @@ impl<R: Read> self::Bytes::Select for ByteReader<R> {
}, },
}; };
match out { if let Some(out) = out {
Some(out) => crash_if_err!(1, out.write_all(&buffer[0..consume_val])), crash_if_err!(1, out.write_all(&buffer[0..consume_val]));
None => (),
} }
(res, consume_val) (res, consume_val)
}; };

View file

@ -23,11 +23,11 @@ mod buffer;
mod ranges; mod ranges;
mod searcher; mod searcher;
static SYNTAX: &'static str = static SYNTAX: &str =
"[-d] [-s] [-z] [--output-delimiter] ((-f|-b|-c) {{sequence}}) {{sourcefile}}+"; "[-d] [-s] [-z] [--output-delimiter] ((-f|-b|-c) {{sequence}}) {{sourcefile}}+";
static SUMMARY: &'static str = static SUMMARY: &str =
"Prints specified byte or field columns from each line of stdin or the input files"; "Prints specified byte or field columns from each line of stdin or the input files";
static LONG_HELP: &'static str = " static LONG_HELP: &str = "
Each call must specify a mode (what to use for columns), Each call must specify a mode (what to use for columns),
a sequence (which columns to print), and provide a data source a sequence (which columns to print), and provide a data source
@ -169,14 +169,11 @@ fn cut_bytes<R: Read>(reader: R, ranges: &[Range], opts: &Options) -> i32 {
} }
} }
match opts.out_delim { if let Some(ref delim) = opts.out_delim {
Some(ref delim) => { if print_delim {
if print_delim { crash_if_err!(1, out.write_all(delim.as_bytes()));
crash_if_err!(1, out.write_all(delim.as_bytes()));
}
print_delim = true;
} }
None => (), print_delim = true;
} }
// write out from low to high // write out from low to high
@ -293,18 +290,15 @@ fn cut_fields_delimiter<R: Read>(
fn cut_fields<R: Read>(reader: R, ranges: &[Range], opts: &FieldOptions) -> i32 { fn cut_fields<R: Read>(reader: R, ranges: &[Range], opts: &FieldOptions) -> i32 {
let newline_char = if opts.zero_terminated { b'\0' } else { b'\n' }; let newline_char = if opts.zero_terminated { b'\0' } else { b'\n' };
match opts.out_delimeter { if let Some(ref o_delim) = opts.out_delimeter {
Some(ref o_delim) => { return cut_fields_delimiter(
return cut_fields_delimiter( reader,
reader, ranges,
ranges, &opts.delimiter,
&opts.delimiter, opts.only_delimited,
opts.only_delimited, newline_char,
newline_char, o_delim,
o_delim, );
)
}
None => (),
} }
let mut buf_in = BufReader::new(reader); let mut buf_in = BufReader::new(reader);
@ -348,10 +342,8 @@ fn cut_fields<R: Read>(reader: R, ranges: &[Range], opts: &FieldOptions) -> i32
}; };
} }
if print_delim { if print_delim && low_idx >= opts.delimiter.as_bytes().len() {
if low_idx >= opts.delimiter.as_bytes().len() { low_idx -= opts.delimiter.as_bytes().len();
low_idx -= opts.delimiter.as_bytes().len();
}
} }
match delim_search.nth(high - low) { match delim_search.nth(high - low) {
@ -509,8 +501,8 @@ pub fn uumain(args: Vec<String>) -> i32 {
FieldOptions { FieldOptions {
delimiter: delim, delimiter: delim,
out_delimeter: out_delim, out_delimeter: out_delim,
only_delimited: only_delimited, only_delimited,
zero_terminated: zero_terminated, zero_terminated,
}, },
)) ))
} }
@ -520,8 +512,8 @@ pub fn uumain(args: Vec<String>) -> i32 {
FieldOptions { FieldOptions {
delimiter: "\t".to_owned(), delimiter: "\t".to_owned(),
out_delimeter: out_delim, out_delimeter: out_delim,
only_delimited: only_delimited, only_delimited,
zero_terminated: zero_terminated, zero_terminated,
}, },
)), )),
} }

View file

@ -39,11 +39,11 @@ impl FromStr for Range {
Err(inval) Err(inval)
} }
} }
(Some(n), Some(m)) if m.len() == 0 => { (Some(n), Some(m)) if m.is_empty() => {
if let Ok(low) = n.parse::<usize>() { if let Ok(low) = n.parse::<usize>() {
if low > 0 { if low > 0 {
Ok(Range { Ok(Range {
low: low, low,
high: MAX - 1, high: MAX - 1,
}) })
} else { } else {
@ -56,7 +56,7 @@ impl FromStr for Range {
(Some(n), Some(m)) if n.len() == 0 => { (Some(n), Some(m)) if n.len() == 0 => {
if let Ok(high) = m.parse::<usize>() { if let Ok(high) = m.parse::<usize>() {
if high > 0 { if high > 0 {
Ok(Range { low: 1, high: high }) Ok(Range { low: 1, high })
} else { } else {
Err(field) Err(field)
} }
@ -68,8 +68,8 @@ impl FromStr for Range {
(Ok(low), Ok(high)) => { (Ok(low), Ok(high)) => {
if low > 0 && low <= high { if low > 0 && low <= high {
Ok(Range { Ok(Range {
low: low, low,
high: high, high,
}) })
} else if low == 0 { } else if low == 0 {
Err(field) Err(field)
@ -118,7 +118,7 @@ pub fn complement(ranges: &[Range]) -> Vec<Range> {
let mut complements = Vec::with_capacity(ranges.len() + 1); let mut complements = Vec::with_capacity(ranges.len() + 1);
if ranges.len() > 0 && ranges[0].low > 1 { if !ranges.is_empty() && ranges[0].low > 1 {
complements.push(Range { complements.push(Range {
low: 1, low: 1,
high: ranges[0].low - 1, high: ranges[0].low - 1,

View file

@ -17,8 +17,8 @@ pub struct Searcher<'a> {
impl<'a> Searcher<'a> { impl<'a> Searcher<'a> {
pub fn new(haystack: &'a [u8], needle: &'a [u8]) -> Searcher<'a> { pub fn new(haystack: &'a [u8], needle: &'a [u8]) -> Searcher<'a> {
Searcher { Searcher {
haystack: haystack, haystack,
needle: needle, needle,
position: 0, position: 0,
} }
} }

View file

@ -21,24 +21,24 @@ use std::io::{BufRead, BufReader};
use std::path::PathBuf; use std::path::PathBuf;
// Options // Options
const DATE: &'static str = "date"; const DATE: &str = "date";
const HOURS: &'static str = "hours"; const HOURS: &str = "hours";
const MINUTES: &'static str = "minutes"; const MINUTES: &str = "minutes";
const SECONDS: &'static str = "seconds"; const SECONDS: &str = "seconds";
const NS: &'static str = "ns"; const NS: &str = "ns";
// Help strings // Help strings
static ISO_8601_HELP_STRING: &'static str = "output date/time in ISO 8601 format. static ISO_8601_HELP_STRING: &str = "output date/time in ISO 8601 format.
FMT='date' for date only (the default), FMT='date' for date only (the default),
'hours', 'minutes', 'seconds', or 'ns' 'hours', 'minutes', 'seconds', or 'ns'
for date and time to the indicated precision. for date and time to the indicated precision.
Example: 2006-08-14T02:34:56-06:00"; Example: 2006-08-14T02:34:56-06:00";
static RFC_2822_HELP_STRING: &'static str = "output date and time in RFC 2822 format. static RFC_2822_HELP_STRING: &str = "output date and time in RFC 2822 format.
Example: Mon, 14 Aug 2006 02:34:56 -0600"; Example: Mon, 14 Aug 2006 02:34:56 -0600";
static RFC_3339_HELP_STRING: &'static str = "output date/time in RFC 3339 format. static RFC_3339_HELP_STRING: &str = "output date/time in RFC 3339 format.
FMT='date', 'seconds', or 'ns' FMT='date', 'seconds', or 'ns'
for date and time to the indicated precision. for date and time to the indicated precision.
Example: 2006-08-14 02:34:56-06:00"; Example: 2006-08-14 02:34:56-06:00";
@ -119,15 +119,12 @@ pub fn uumain(args: Vec<String>) -> i32 {
let file: File; let file: File;
// Get the current time, either in the local time zone or UTC. // Get the current time, either in the local time zone or UTC.
let now: DateTime<FixedOffset> = match settings.utc { let now: DateTime<FixedOffset> = if settings.utc {
true => { let now = Utc::now();
let now = Utc::now(); now.with_timezone(&now.offset().fix())
now.with_timezone(&now.offset().fix()) } else {
} let now = Local::now();
false => { now.with_timezone(now.offset())
let now = Local::now();
now.with_timezone(now.offset())
}
}; };
/// Parse a `String` into a `DateTime`. /// Parse a `String` into a `DateTime`.
@ -198,10 +195,11 @@ fn parse_cli(args: Vec<String>) -> Settings {
possible_value[date seconds ns] possible_value[date seconds ns]
RFC_3339_HELP_STRING) RFC_3339_HELP_STRING)
(@arg custom_format: +takes_value { (@arg custom_format: +takes_value {
|s| match s.starts_with("+") { |s| if s.starts_with('+') {
true => Ok(()), Ok(())
false => Err(String::from("Date formats must start with a '+' character")) } else {
} Err(String::from("Date formats must start with a '+' character"))
}
})) }))
(@arg debug: --debug (@arg debug: --debug
@ -245,8 +243,8 @@ fn parse_cli(args: Vec<String>) -> Settings {
Settings { Settings {
utc: matches.is_present("utc"), utc: matches.is_present("utc"),
format: format, format,
date_source: date_source, date_source,
// TODO: Handle this option: // TODO: Handle this option:
set_to: None, set_to: None,
} }
@ -255,18 +253,18 @@ fn parse_cli(args: Vec<String>) -> Settings {
/// Return the appropriate format string for the given settings. /// Return the appropriate format string for the given settings.
fn make_format_string(settings: &Settings) -> &str { fn make_format_string(settings: &Settings) -> &str {
match settings.format { match settings.format {
Format::Iso8601(ref fmt) => match fmt { Format::Iso8601(ref fmt) => match *fmt {
&Iso8601Format::Date => "%F", Iso8601Format::Date => "%F",
&Iso8601Format::Hours => "%FT%H%:z", Iso8601Format::Hours => "%FT%H%:z",
&Iso8601Format::Minutes => "%FT%H:%M%:z", Iso8601Format::Minutes => "%FT%H:%M%:z",
&Iso8601Format::Seconds => "%FT%T%:z", Iso8601Format::Seconds => "%FT%T%:z",
&Iso8601Format::Ns => "%FT%T,%f%:z", Iso8601Format::Ns => "%FT%T,%f%:z",
}, },
Format::Rfc2822 => "%a, %d %h %Y %T %z", Format::Rfc2822 => "%a, %d %h %Y %T %z",
Format::Rfc3339(ref fmt) => match fmt { Format::Rfc3339(ref fmt) => match *fmt {
&Rfc3339Format::Date => "%F", Rfc3339Format::Date => "%F",
&Rfc3339Format::Seconds => "%F %T%:z", Rfc3339Format::Seconds => "%F %T%:z",
&Rfc3339Format::Ns => "%F %T.%f%:z", Rfc3339Format::Ns => "%F %T.%f%:z",
}, },
Format::Custom(ref fmt) => fmt, Format::Custom(ref fmt) => fmt,
Format::Default => "%c", Format::Default => "%c",

View file

@ -1,4 +1,4 @@
pub const INTERNAL_DB: &'static str = pub const INTERNAL_DB: &str =
r#"# Configuration file for dircolors, a utility to help you set the r#"# Configuration file for dircolors, a utility to help you set the
# LS_COLORS environment variable used by GNU ls with the --color option. # LS_COLORS environment variable used by GNU ls with the --color option.
# Copyright (C) 1996-2016 Free Software Foundation, Inc. # Copyright (C) 1996-2016 Free Software Foundation, Inc.

View file

@ -18,9 +18,9 @@ use std::io::{BufRead, BufReader};
use std::borrow::Borrow; use std::borrow::Borrow;
use std::env; use std::env;
static SYNTAX: &'static str = "[OPTION]... [FILE]"; static SYNTAX: &str = "[OPTION]... [FILE]";
static SUMMARY: &'static str = "Output commands to set the LS_COLORS environment variable."; static SUMMARY: &str = "Output commands to set the LS_COLORS environment variable.";
static LONG_HELP: &'static str = " static LONG_HELP: &str = "
If FILE is specified, read it to determine which colors to use for which If FILE is specified, read it to determine which colors to use for which
file types and extensions. Otherwise, a precompiled database is used. file types and extensions. Otherwise, a precompiled database is used.
For details on the format of these files, run 'dircolors --print-database' For details on the format of these files, run 'dircolors --print-database'
@ -252,7 +252,7 @@ where
table.insert("multihardlink", "mh"); table.insert("multihardlink", "mh");
table.insert("clrtoeol", "cl"); table.insert("clrtoeol", "cl");
let term = env::var("TERM").unwrap_or("none".to_owned()); let term = env::var("TERM").unwrap_or_else(|_| "none".to_owned());
let term = term.as_str(); let term = term.as_str();
let mut state = ParseState::Global; let mut state = ParseState::Global;
@ -286,18 +286,16 @@ where
state = ParseState::Continue; state = ParseState::Continue;
} }
if state != ParseState::Pass { if state != ParseState::Pass {
if key.starts_with(".") { if key.starts_with('.') {
result.push_str(format!("*{}={}:", key, val).as_str()); result.push_str(format!("*{}={}:", key, val).as_str());
} else if key.starts_with("*") { } else if key.starts_with('*') {
result.push_str(format!("{}={}:", key, val).as_str()); result.push_str(format!("{}={}:", key, val).as_str());
} else if lower == "options" || lower == "color" || lower == "eightbit" { } else if lower == "options" || lower == "color" || lower == "eightbit" {
// Slackware only. Ignore // Slackware only. Ignore
} else if let Some(s) = table.get(lower.as_str()) {
result.push_str(format!("{}={}:", s, val).as_str());
} else { } else {
if let Some(s) = table.get(lower.as_str()) { return Err(format!("{}:{}: unrecognized keyword {}", fp, num, key));
result.push_str(format!("{}={}:", s, val).as_str());
} else {
return Err(format!("{}:{}: unrecognized keyword {}", fp, num, key));
}
} }
} }
} }

View file

@ -25,9 +25,9 @@ use std::os::unix::fs::MetadataExt;
use std::path::PathBuf; use std::path::PathBuf;
use time::Timespec; use time::Timespec;
const NAME: &'static str = "du"; const NAME: &str = "du";
const SUMMARY: &'static str = "estimate file space usage"; const SUMMARY: &str = "estimate file space usage";
const LONG_HELP: &'static str = " const LONG_HELP: &str = "
Display values are in units of the first available SIZE from Display values are in units of the first available SIZE from
--block-size, and the DU_BLOCK_SIZE, BLOCK_SIZE and BLOCKSIZE environ --block-size, and the DU_BLOCK_SIZE, BLOCK_SIZE and BLOCKSIZE environ
ment variables. Otherwise, units default to 1024 bytes (or 512 if ment variables. Otherwise, units default to 1024 bytes (or 512 if
@ -65,7 +65,7 @@ impl Stat {
fn new(path: PathBuf) -> Result<Stat> { fn new(path: PathBuf) -> Result<Stat> {
let metadata = fs::symlink_metadata(&path)?; let metadata = fs::symlink_metadata(&path)?;
Ok(Stat { Ok(Stat {
path: path, path,
is_dir: metadata.is_dir(), is_dir: metadata.is_dir(),
size: metadata.len(), size: metadata.len(),
blocks: metadata.blocks() as u64, blocks: metadata.blocks() as u64,
@ -110,9 +110,9 @@ fn unit_string_to_number(s: &str) -> Option<u64> {
} }
fn translate_to_pure_number(s: &Option<String>) -> Option<u64> { fn translate_to_pure_number(s: &Option<String>) -> Option<u64> {
match s { match *s {
&Some(ref s) => unit_string_to_number(s), Some(ref s) => unit_string_to_number(s),
&None => None, None => None,
} }
} }
@ -165,7 +165,7 @@ fn du(
} }
}; };
for f in read.into_iter() { for f in read {
match f { match f {
Ok(entry) => { Ok(entry) => {
match Stat::new(entry.path()) { match Stat::new(entry.path()) {
@ -319,7 +319,7 @@ pub fn uumain(args: Vec<String>) -> i32 {
let options = Options { let options = Options {
all: matches.opt_present("all"), all: matches.opt_present("all"),
program_name: NAME.to_owned(), program_name: NAME.to_owned(),
max_depth: max_depth, max_depth,
total: matches.opt_present("total"), total: matches.opt_present("total"),
separate_dirs: matches.opt_present("S"), separate_dirs: matches.opt_present("S"),
}; };
@ -377,13 +377,13 @@ Try '{} --help' for more information.",
let line_separator = if matches.opt_present("0") { "\0" } else { "\n" }; let line_separator = if matches.opt_present("0") { "\0" } else { "\n" };
let mut grand_total = 0; let mut grand_total = 0;
for path_str in strs.into_iter() { for path_str in strs {
let path = PathBuf::from(&path_str); let path = PathBuf::from(&path_str);
match Stat::new(path) { match Stat::new(path) {
Ok(stat) => { Ok(stat) => {
let mut inodes: HashSet<u64> = HashSet::new(); let mut inodes: HashSet<u64> = HashSet::new();
let iter = du(stat, &options, 0, &mut inodes).into_iter(); let iter = du(stat, &options, 0, &mut inodes);
let (_, len) = iter.size_hint(); let (_, len) = iter.size_hint();
let len = len.unwrap(); let len = len.unwrap();
for (index, stat) in iter.enumerate() { for (index, stat) in iter.enumerate() {
@ -417,11 +417,11 @@ Try '{} --help' for more information.",
} }
None => stat.modified, None => stat.modified,
}; };
((time / 1000) as i64, (time % 1000 * 1000000) as i32) ((time / 1000) as i64, (time % 1000 * 1_000_000) as i32)
}; };
time::at(Timespec::new(secs, nsecs)) time::at(Timespec::new(secs, nsecs))
}; };
if !summarize || (summarize && index == len - 1) { if !summarize || index == len - 1 {
let time_str = tm.strftime(time_format_str).unwrap(); let time_str = tm.strftime(time_format_str).unwrap();
print!( print!(
"{}\t{}\t{}{}", "{}\t{}\t{}{}",
@ -431,15 +431,13 @@ Try '{} --help' for more information.",
line_separator line_separator
); );
} }
} else { } else if !summarize || index == len - 1 {
if !summarize || (summarize && index == len - 1) { print!(
print!( "{}\t{}{}",
"{}\t{}{}", convert_size(size),
convert_size(size), stat.path.display(),
stat.path.display(), line_separator
line_separator );
);
}
} }
if options.total && index == (len - 1) { if options.total && index == (len - 1) {
// The last element will be the total size of the the path under // The last element will be the total size of the the path under

4
src/env/env.rs vendored
View file

@ -147,7 +147,7 @@ pub fn uumain(args: Vec<String>) -> i32 {
} }
} else { } else {
// is it a NAME=VALUE like opt ? // is it a NAME=VALUE like opt ?
let mut sp = opt.splitn(2, "="); let mut sp = opt.splitn(2, '=');
let name = sp.next(); let name = sp.next();
let value = sp.next(); let value = sp.next();
@ -187,7 +187,7 @@ pub fn uumain(args: Vec<String>) -> i32 {
env::set_var(name, val); env::set_var(name, val);
} }
if opts.program.len() >= 1 { if !opts.program.is_empty() {
let prog = opts.program[0].clone(); let prog = opts.program[0].clone();
let args = &opts.program[1..]; let args = &opts.program[1..];
match Command::new(prog).args(args).status() { match Command::new(prog).args(args).status() {

View file

@ -92,11 +92,11 @@ impl Options {
}; };
Options { Options {
files: files, files,
tabstops: tabstops, tabstops,
tspaces: tspaces, tspaces,
iflag: iflag, iflag,
uflag: uflag, uflag,
} }
} }
} }

View file

@ -16,8 +16,8 @@ extern crate uucore;
mod tokens; mod tokens;
mod syntax_tree; mod syntax_tree;
static NAME: &'static str = "expr"; static NAME: &str = "expr";
static VERSION: &'static str = env!("CARGO_PKG_VERSION"); static VERSION: &str = env!("CARGO_PKG_VERSION");
pub fn uumain(args: Vec<String>) -> i32 { pub fn uumain(args: Vec<String>) -> i32 {
// For expr utility we do not want getopts. // For expr utility we do not want getopts.
@ -35,13 +35,13 @@ pub fn uumain(args: Vec<String>) -> i32 {
} }
} }
fn process_expr(token_strings: &Vec<String>) -> Result<String, String> { fn process_expr(token_strings: &[String]) -> Result<String, String> {
let maybe_tokens = tokens::strings_to_tokens(&token_strings); let maybe_tokens = tokens::strings_to_tokens(&token_strings);
let maybe_ast = syntax_tree::tokens_to_ast(maybe_tokens); let maybe_ast = syntax_tree::tokens_to_ast(maybe_tokens);
evaluate_ast(maybe_ast) evaluate_ast(maybe_ast)
} }
fn print_expr_ok(expr_result: &String) -> i32 { fn print_expr_ok(expr_result: &str) -> i32 {
println!("{}", expr_result); println!("{}", expr_result);
if expr_result == "0" || expr_result == "" { if expr_result == "0" || expr_result == "" {
1 1
@ -50,7 +50,7 @@ fn print_expr_ok(expr_result: &String) -> i32 {
} }
} }
fn print_expr_error(expr_error: &String) -> ! { fn print_expr_error(expr_error: &str) -> ! {
crash!(2, "{}", expr_error) crash!(2, "{}", expr_error)
} }
@ -62,7 +62,7 @@ fn evaluate_ast(maybe_ast: Result<Box<syntax_tree::ASTNode>, String>) -> Result<
} }
} }
fn maybe_handle_help_or_version(args: &Vec<String>) -> bool { fn maybe_handle_help_or_version(args: &[String]) -> bool {
if args.len() == 2 { if args.len() == 2 {
if args[1] == "--help" { if args[1] == "--help" {
print_help(); print_help();

View file

@ -66,17 +66,17 @@ impl ASTNode {
} }
} }
fn new_node(token_idx: usize, op_type: &String, operands: OperandsList) -> Box<ASTNode> { fn new_node(token_idx: usize, op_type: &str, operands: OperandsList) -> Box<ASTNode> {
Box::new(ASTNode::Node { Box::new(ASTNode::Node {
token_idx: token_idx, token_idx: token_idx,
op_type: op_type.clone(), op_type: op_type.into(),
operands: operands, operands: operands,
}) })
} }
fn new_leaf(token_idx: usize, value: &String) -> Box<ASTNode> { fn new_leaf(token_idx: usize, value: &str) -> Box<ASTNode> {
Box::new(ASTNode::Leaf { Box::new(ASTNode::Leaf {
token_idx: token_idx, token_idx,
value: value.clone(), value: value.into(),
}) })
} }
pub fn evaluate(&self) -> Result<String, String> { pub fn evaluate(&self) -> Result<String, String> {
@ -252,7 +252,7 @@ fn ast_from_rpn(rpn: &mut TokenStack) -> Result<Box<ASTNode>, String> {
} }
fn maybe_ast_node( fn maybe_ast_node(
token_idx: usize, token_idx: usize,
op_type: &String, op_type: &str,
arity: usize, arity: usize,
rpn: &mut TokenStack, rpn: &mut TokenStack,
) -> Result<Box<ASTNode>, String> { ) -> Result<Box<ASTNode>, String> {
@ -351,7 +351,10 @@ fn push_op_to_stack(
match op_stack.last() { match op_stack.last() {
None => return Ok(op_stack.push((token_idx, token.clone()))), None => return Ok(op_stack.push((token_idx, token.clone()))),
Some(&(_, Token::ParOpen)) => return Ok(op_stack.push((token_idx, token.clone()))), Some(&(_, Token::ParOpen)) => {
op_stack.push((token_idx, token.clone()));
return Ok(());
}
Some(&( Some(&(
_, _,
@ -362,11 +365,13 @@ fn push_op_to_stack(
)) => if la && prev_prec >= prec || !la && prev_prec > prec { )) => if la && prev_prec >= prec || !la && prev_prec > prec {
out_stack.push(op_stack.pop().unwrap()) out_stack.push(op_stack.pop().unwrap())
} else { } else {
return Ok(op_stack.push((token_idx, token.clone()))); op_stack.push((token_idx, token.clone()));
return Ok(());
}, },
Some(&(_, Token::PrefixOp { .. })) => { Some(&(_, Token::PrefixOp { .. })) => {
return Ok(op_stack.push((token_idx, token.clone()))) op_stack.push((token_idx, token.clone()));
return Ok(());
} }
Some(_) => panic!("Non-operator on op_stack"), Some(_) => panic!("Non-operator on op_stack"),
@ -397,13 +402,13 @@ fn checked_binop<F: Fn() -> Option<T>, T>(cb: F, op: &str) -> Result<T, String>
} }
} }
fn infix_operator_two_ints<F>(f: F, values: &Vec<String>) -> Result<String, String> fn infix_operator_two_ints<F>(f: F, values: &[String]) -> Result<String, String>
where where
F: Fn(i64, i64) -> Result<i64, String>, F: Fn(i64, i64) -> Result<i64, String>,
{ {
assert!(values.len() == 2); assert!(values.len() == 2);
if let Some(left) = values[0].parse::<i64>().ok() { if let Ok(left) = values[0].parse::<i64>() {
if let Some(right) = values[1].parse::<i64>().ok() { if let Ok(right) = values[1].parse::<i64>() {
return match f(left, right) { return match f(left, right) {
Ok(result) => Ok(result.to_string()), Ok(result) => Ok(result.to_string()),
Err(reason) => Err(reason), Err(reason) => Err(reason),
@ -416,7 +421,7 @@ where
fn infix_operator_two_ints_or_two_strings<FI, FS>( fn infix_operator_two_ints_or_two_strings<FI, FS>(
fi: FI, fi: FI,
fs: FS, fs: FS,
values: &Vec<String>, values: &[String],
) -> Result<String, String> ) -> Result<String, String>
where where
FI: Fn(i64, i64) -> Result<i64, String>, FI: Fn(i64, i64) -> Result<i64, String>,
@ -435,7 +440,7 @@ where
} }
} }
fn infix_operator_or(values: &Vec<String>) -> Result<String, String> { fn infix_operator_or(values: &[String]) -> Result<String, String> {
assert!(values.len() == 2); assert!(values.len() == 2);
if value_as_bool(&values[0]) { if value_as_bool(&values[0]) {
Ok(values[0].clone()) Ok(values[0].clone())
@ -444,7 +449,7 @@ fn infix_operator_or(values: &Vec<String>) -> Result<String, String> {
} }
} }
fn infix_operator_and(values: &Vec<String>) -> Result<String, String> { fn infix_operator_and(values: &[String]) -> Result<String, String> {
if value_as_bool(&values[0]) && value_as_bool(&values[1]) { if value_as_bool(&values[0]) && value_as_bool(&values[1]) {
Ok(values[0].clone()) Ok(values[0].clone())
} else { } else {
@ -452,7 +457,7 @@ fn infix_operator_and(values: &Vec<String>) -> Result<String, String> {
} }
} }
fn operator_match(values: &Vec<String>) -> Result<String, String> { fn operator_match(values: &[String]) -> Result<String, String> {
assert!(values.len() == 2); assert!(values.len() == 2);
let re = match Regex::with_options(&values[1], RegexOptions::REGEX_OPTION_NONE, Syntax::grep()) let re = match Regex::with_options(&values[1], RegexOptions::REGEX_OPTION_NONE, Syntax::grep())
{ {
@ -472,12 +477,12 @@ fn operator_match(values: &Vec<String>) -> Result<String, String> {
} }
} }
fn prefix_operator_length(values: &Vec<String>) -> Result<String, String> { fn prefix_operator_length(values: &[String]) -> Result<String, String> {
assert!(values.len() == 1); assert!(values.len() == 1);
Ok(values[0].len().to_string()) Ok(values[0].len().to_string())
} }
fn prefix_operator_index(values: &Vec<String>) -> Result<String, String> { fn prefix_operator_index(values: &[String]) -> Result<String, String> {
assert!(values.len() == 2); assert!(values.len() == 2);
let haystack = &values[0]; let haystack = &values[0];
let needles = &values[1]; let needles = &values[1];
@ -495,7 +500,7 @@ fn prefix_operator_index(values: &Vec<String>) -> Result<String, String> {
Ok("0".to_string()) Ok("0".to_string())
} }
fn prefix_operator_substr(values: &Vec<String>) -> Result<String, String> { fn prefix_operator_substr(values: &[String]) -> Result<String, String> {
assert!(values.len() == 3); assert!(values.len() == 3);
let subj = &values[0]; let subj = &values[0];
let mut idx = match values[1].parse::<i64>() { let mut idx = match values[1].parse::<i64>() {
@ -541,7 +546,7 @@ fn bool_as_string(b: bool) -> String {
} }
} }
fn value_as_bool(s: &str) -> bool { fn value_as_bool(s: &str) -> bool {
if s.len() == 0 { if s.is_empty() {
return false; return false;
} }
match s.parse::<i64>() { match s.parse::<i64>() {

View file

@ -39,15 +39,15 @@ pub enum Token {
}, },
} }
impl Token { impl Token {
fn new_infix_op(v: &String, left_assoc: bool, precedence: u8) -> Self { fn new_infix_op(v: &str, left_assoc: bool, precedence: u8) -> Self {
Token::InfixOp { Token::InfixOp {
left_assoc: left_assoc, left_assoc,
precedence: precedence, precedence,
value: v.clone(), value: v.into(),
} }
} }
fn new_value(v: &String) -> Self { fn new_value(v: &str) -> Self {
Token::Value { value: v.clone() } Token::Value { value: v.into() }
} }
fn is_infix_plus(&self) -> bool { fn is_infix_plus(&self) -> bool {
@ -148,7 +148,7 @@ fn push_token_if_not_escaped(
acc: &mut Vec<(usize, Token)>, acc: &mut Vec<(usize, Token)>,
tok_idx: usize, tok_idx: usize,
token: Token, token: Token,
s: &String, s: &str,
) { ) {
// Smells heuristics... :( // Smells heuristics... :(
let prev_is_plus = match acc.last() { let prev_is_plus = match acc.last() {

View file

@ -18,7 +18,7 @@ impl<'a> CoreOptions<'a> {
pub fn new(help_text: HelpText<'a>) -> Self { pub fn new(help_text: HelpText<'a>) -> Self {
let mut ret = CoreOptions { let mut ret = CoreOptions {
options: getopts::Options::new(), options: getopts::Options::new(),
help_text: help_text, help_text,
}; };
ret.options ret.options
.optflag("", "help", "print usage information") .optflag("", "help", "print usage information")

View file

@ -66,8 +66,8 @@ impl<R: Read> Data<R> {
Data { Data {
line_wrap: 76, line_wrap: 76,
ignore_garbage: false, ignore_garbage: false,
input: input, input,
format: format, format,
alphabet: match format { alphabet: match format {
Base32 => b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=", Base32 => b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
Base64 => b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789=+/", Base64 => b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789=+/",

View file

@ -31,7 +31,7 @@ macro_rules! has {
) )
} }
pub fn resolve_relative_path<'a>(path: &'a Path) -> Cow<'a, Path> { pub fn resolve_relative_path(path: &Path) -> Cow<Path> {
if path.components().all(|e| e != Component::ParentDir) { if path.components().all(|e| e != Component::ParentDir) {
return path.into(); return path.into();
} }

View file

@ -21,7 +21,7 @@ use std::borrow::Cow;
use std::io::{self, Write}; use std::io::{self, Write};
// force a re-build whenever Cargo.toml changes // force a re-build whenever Cargo.toml changes
const _CARGO_TOML: &'static str = include_str!("Cargo.toml"); const _CARGO_TOML: &str = include_str!("Cargo.toml");
// it's possible that using a smaller or larger buffer might provide better performance on some // it's possible that using a smaller or larger buffer might provide better performance on some
// systems, but honestly this is good enough // systems, but honestly this is good enough