mirror of
https://github.com/RGBCube/uutils-coreutils
synced 2025-08-03 14:37:45 +00:00
Replace deprecated methods.
I replaced position_elem() and slice_chars(). I also removed an unused feature attribute.
This commit is contained in:
parent
ab5a5ba9e9
commit
9bc6eb4e7b
5 changed files with 21 additions and 23 deletions
|
@ -77,7 +77,7 @@ impl<R: Read> ByteReader<R> {
|
||||||
Err(e) => crash!(1, "read error: {}", e),
|
Err(e) => crash!(1, "read error: {}", e),
|
||||||
};
|
};
|
||||||
|
|
||||||
match filled_buf.position_elem(&b'\n') {
|
match filled_buf.iter().position(|byte| *byte == b'\n') {
|
||||||
Some(idx) => {
|
Some(idx) => {
|
||||||
consume_val = idx + 1;
|
consume_val = idx + 1;
|
||||||
bytes_consumed += consume_val;
|
bytes_consumed += consume_val;
|
||||||
|
@ -121,13 +121,13 @@ impl<R: Read> self::Bytes::Select for ByteReader<R> {
|
||||||
// segments check if the byte after bytes is a newline
|
// segments check if the byte after bytes is a newline
|
||||||
let buf_slice = &buffer[0..bytes + 1];
|
let buf_slice = &buffer[0..bytes + 1];
|
||||||
|
|
||||||
match buf_slice.position_elem(&b'\n') {
|
match buf_slice.iter().position(|byte| *byte == b'\n') {
|
||||||
Some(idx) => (SRes::Newl, idx+1),
|
Some(idx) => (SRes::Newl, idx+1),
|
||||||
None => (SRes::Comp, bytes),
|
None => (SRes::Comp, bytes),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
match buffer.position_elem(&b'\n') {
|
match buffer.iter().position(|byte| *byte == b'\n') {
|
||||||
Some(idx) => (SRes::Newl, idx+1),
|
Some(idx) => (SRes::Newl, idx+1),
|
||||||
None => (SRes::Part, buffer.len()),
|
None => (SRes::Part, buffer.len()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#![crate_name = "cut"]
|
#![crate_name = "cut"]
|
||||||
#![feature(path_ext, slice_position_elem)]
|
#![feature(path_ext)]
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This file is part of the uutils coreutils package.
|
* This file is part of the uutils coreutils package.
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#![crate_name = "fold"]
|
#![crate_name = "fold"]
|
||||||
#![feature(slice_chars)]
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This file is part of the uutils coreutils package.
|
* This file is part of the uutils coreutils package.
|
||||||
|
@ -153,7 +152,7 @@ fn fold_file<T: Read>(mut file: BufReader<T>, bytes: bool, spaces: bool, width:
|
||||||
if spaces && i + 1 < len {
|
if spaces && i + 1 < len {
|
||||||
match rfind_whitespace(slice) {
|
match rfind_whitespace(slice) {
|
||||||
Some(m) => {
|
Some(m) => {
|
||||||
let routput = slice.slice_chars(m + 1, slice.chars().count());
|
let routput = &slice[m + 1 .. slice.chars().count()];
|
||||||
let ncount = routput.chars().fold(0usize, |out, ch: char| {
|
let ncount = routput.chars().fold(0usize, |out, ch: char| {
|
||||||
out + match ch {
|
out + match ch {
|
||||||
'\t' => 8,
|
'\t' => 8,
|
||||||
|
@ -162,7 +161,7 @@ fn fold_file<T: Read>(mut file: BufReader<T>, bytes: bool, spaces: bool, width:
|
||||||
_ => 1
|
_ => 1
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
(slice.slice_chars(0, m + 1), routput, ncount)
|
(&slice[0 .. m + 1], routput, ncount)
|
||||||
},
|
},
|
||||||
None => (slice, "", 0)
|
None => (slice, "", 0)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#![crate_name = "hostname"]
|
#![crate_name = "hostname"]
|
||||||
#![feature(slice_extras)]
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This file is part of the uutils coreutils package.
|
* This file is part of the uutils coreutils package.
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#![crate_name = "ptx"]
|
#![crate_name = "ptx"]
|
||||||
#![feature(convert, slice_chars, vec_push_all)]
|
#![feature(convert, vec_push_all)]
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This file is part of the uutils coreutils package.
|
* This file is part of the uutils coreutils package.
|
||||||
|
@ -247,7 +247,7 @@ fn create_word_set(config: &Config, filter: &WordFilter,
|
||||||
if config.input_ref && ((beg, end) == (ref_beg, ref_end)) {
|
if config.input_ref && ((beg, end) == (ref_beg, ref_end)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let mut word = line.slice_chars(beg, end).to_string();
|
let mut word = line[beg .. end].to_string();
|
||||||
if filter.only_specified &&
|
if filter.only_specified &&
|
||||||
!(filter.only_set.contains(&word)) {
|
!(filter.only_set.contains(&word)) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -284,7 +284,7 @@ fn get_reference(config: &Config, word_ref: &WordRef, line: &String) ->
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
None => (0,0)
|
None => (0,0)
|
||||||
};
|
};
|
||||||
format!("{}", line.slice_chars(beg, end))
|
format!("{}", &line[beg .. end])
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
String::new()
|
||||||
}
|
}
|
||||||
|
@ -359,14 +359,14 @@ fn get_output_chunks(all_before: &String, keyword: &String, all_after: &String,
|
||||||
bb_tmp = trim_broken_word_left(&all_before_vec, bb_tmp, all_before.len());
|
bb_tmp = trim_broken_word_left(&all_before_vec, bb_tmp, all_before.len());
|
||||||
let (before_beg, before_end) =
|
let (before_beg, before_end) =
|
||||||
trim_idx(&all_before_vec, bb_tmp, all_before.len());
|
trim_idx(&all_before_vec, bb_tmp, all_before.len());
|
||||||
before.push_str(all_before.slice_chars(before_beg, before_end));
|
before.push_str(&all_before[before_beg .. before_end]);
|
||||||
assert!(max_before_size >= before.len());
|
assert!(max_before_size >= before.len());
|
||||||
|
|
||||||
// get after
|
// get after
|
||||||
let mut ae_tmp = cmp::min(max_after_size, all_after.len());
|
let mut ae_tmp = cmp::min(max_after_size, all_after.len());
|
||||||
ae_tmp = trim_broken_word_right(&all_after_vec, 0, ae_tmp);
|
ae_tmp = trim_broken_word_right(&all_after_vec, 0, ae_tmp);
|
||||||
let (after_beg, after_end) = trim_idx(&all_after_vec, 0, ae_tmp);
|
let (after_beg, after_end) = trim_idx(&all_after_vec, 0, ae_tmp);
|
||||||
after.push_str(all_after.slice_chars(after_beg, after_end));
|
after.push_str(&all_after[after_beg .. after_end]);
|
||||||
assert!(max_after_size >= after.len());
|
assert!(max_after_size >= after.len());
|
||||||
|
|
||||||
// get tail
|
// get tail
|
||||||
|
@ -375,7 +375,7 @@ fn get_output_chunks(all_before: &String, keyword: &String, all_after: &String,
|
||||||
let mut te_tmp = cmp::min(tb + max_tail_size, all_after.len());
|
let mut te_tmp = cmp::min(tb + max_tail_size, all_after.len());
|
||||||
te_tmp = trim_broken_word_right(&all_after_vec, tb, te_tmp);
|
te_tmp = trim_broken_word_right(&all_after_vec, tb, te_tmp);
|
||||||
let (tail_beg, tail_end) = trim_idx(&all_after_vec, tb, te_tmp);
|
let (tail_beg, tail_end) = trim_idx(&all_after_vec, tb, te_tmp);
|
||||||
tail.push_str(all_after.slice_chars(tail_beg, tail_end));
|
tail.push_str(&all_after[tail_beg .. tail_end]);
|
||||||
|
|
||||||
// get head
|
// get head
|
||||||
let max_head_size = max_after_size - after.len();
|
let max_head_size = max_after_size - after.len();
|
||||||
|
@ -384,7 +384,7 @@ fn get_output_chunks(all_before: &String, keyword: &String, all_after: &String,
|
||||||
cmp::max(he as isize - max_head_size as isize, 0) as usize;
|
cmp::max(he as isize - max_head_size as isize, 0) as usize;
|
||||||
hb_tmp = trim_broken_word_left(&all_before_vec, hb_tmp, he);
|
hb_tmp = trim_broken_word_left(&all_before_vec, hb_tmp, he);
|
||||||
let (head_beg, head_end) = trim_idx(&all_before_vec, hb_tmp, he);
|
let (head_beg, head_end) = trim_idx(&all_before_vec, hb_tmp, he);
|
||||||
head.push_str(all_before.slice_chars(head_beg, head_end));
|
head.push_str(&all_before[head_beg .. head_end]);
|
||||||
|
|
||||||
// put right context truncation string if needed
|
// put right context truncation string if needed
|
||||||
if after_end != all_after.len() && tail_beg == tail_end {
|
if after_end != all_after.len() && tail_beg == tail_end {
|
||||||
|
@ -430,15 +430,15 @@ fn format_tex_line(config: &Config, word_ref: &WordRef, line: &String,
|
||||||
let mut output = String::new();
|
let mut output = String::new();
|
||||||
output.push_str(&format!("\\{} ", config.macro_name));
|
output.push_str(&format!("\\{} ", config.macro_name));
|
||||||
let all_before = if config.input_ref {
|
let all_before = if config.input_ref {
|
||||||
let before = line.slice_chars(0, word_ref.position);
|
let before = &line[0 .. word_ref.position];
|
||||||
adjust_tex_str(before.trim().trim_left_matches(reference))
|
adjust_tex_str(before.trim().trim_left_matches(reference))
|
||||||
} else {
|
} else {
|
||||||
adjust_tex_str(line.slice_chars(0, word_ref.position))
|
adjust_tex_str(&line[0 .. word_ref.position])
|
||||||
};
|
};
|
||||||
let keyword = adjust_tex_str(
|
let keyword = adjust_tex_str(
|
||||||
line.slice_chars(word_ref.position, word_ref.position_end));
|
&line[word_ref.position .. word_ref.position_end]);
|
||||||
let all_after = adjust_tex_str(
|
let all_after = adjust_tex_str(
|
||||||
line.slice_chars(word_ref.position_end, line.len()));
|
&line[word_ref.position_end .. line.len()]);
|
||||||
let (tail, before, after, head) =
|
let (tail, before, after, head) =
|
||||||
get_output_chunks(&all_before, &keyword, &all_after, &config);
|
get_output_chunks(&all_before, &keyword, &all_after, &config);
|
||||||
output.push_str(format!("{5}{0}{6}{5}{1}{6}{5}{2}{6}{5}{3}{6}{5}{4}{6}",
|
output.push_str(format!("{5}{0}{6}{5}{1}{6}{5}{2}{6}{5}{3}{6}{5}{4}{6}",
|
||||||
|
@ -460,15 +460,15 @@ fn format_roff_line(config: &Config, word_ref: &WordRef, line: &str,
|
||||||
let mut output = String::new();
|
let mut output = String::new();
|
||||||
output.push_str(&format!(".{}", config.macro_name));
|
output.push_str(&format!(".{}", config.macro_name));
|
||||||
let all_before = if config.input_ref {
|
let all_before = if config.input_ref {
|
||||||
let before = line.slice_chars(0, word_ref.position);
|
let before = &line[0 .. word_ref.position];
|
||||||
adjust_roff_str(before.trim().trim_left_matches(reference))
|
adjust_roff_str(before.trim().trim_left_matches(reference))
|
||||||
} else {
|
} else {
|
||||||
adjust_roff_str(line.slice_chars(0, word_ref.position))
|
adjust_roff_str(&line[0 .. word_ref.position])
|
||||||
};
|
};
|
||||||
let keyword = adjust_roff_str(
|
let keyword = adjust_roff_str(
|
||||||
line.slice_chars(word_ref.position, word_ref.position_end));
|
&line[word_ref.position .. word_ref.position_end]);
|
||||||
let all_after = adjust_roff_str(
|
let all_after = adjust_roff_str(
|
||||||
line.slice_chars(word_ref.position_end, line.len()));
|
&line[word_ref.position_end .. line.len()]);
|
||||||
let (tail, before, after, head) =
|
let (tail, before, after, head) =
|
||||||
get_output_chunks(&all_before, &keyword, &all_after, &config);
|
get_output_chunks(&all_before, &keyword, &all_after, &config);
|
||||||
output.push_str(format!(" \"{}\" \"{}\" \"{}{}\" \"{}\"",
|
output.push_str(format!(" \"{}\" \"{}\" \"{}{}\" \"{}\"",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue