1
Fork 0
mirror of https://github.com/RGBCube/uutils-coreutils synced 2025-07-28 19:47:45 +00:00

Merge branch 'master' into numfmt/round-and-c-locale

This commit is contained in:
Sylvestre Ledru 2021-06-23 12:39:55 +02:00 committed by GitHub
commit d40fc65ee6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
56 changed files with 700 additions and 328 deletions

View file

@ -17,6 +17,40 @@ env:
on: [push, pull_request] on: [push, pull_request]
jobs: jobs:
code_deps:
name: Style/dependencies
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v2
- name: Initialize workflow variables
id: vars
shell: bash
run: |
## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: "`cargo update` testing"
shell: bash
run: |
## `cargo update` testing
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
cargo fetch --locked --quiet || { echo "::error file=Cargo.lock::'Cargo.lock' file requires update (use \`cargo +${{ env.RUST_MIN_SRV }} update\`)" ; exit 1 ; }
code_format: code_format:
name: Style/format name: Style/format
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
@ -26,13 +60,13 @@ jobs:
job: job:
- { os: ubuntu-latest , features: feat_os_unix } - { os: ubuntu-latest , features: feat_os_unix }
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: Initialize workflow variables - name: Initialize workflow variables
id: vars id: vars
shell: bash shell: bash
run: | run: |
## VARs setup ## VARs setup
outputs() { for var in "$@" ; do echo steps.vars.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# target-specific options # target-specific options
# * CARGO_FEATURES_OPTION # * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ; CARGO_FEATURES_OPTION='' ;
@ -48,36 +82,19 @@ jobs:
- name: "`fmt` testing" - name: "`fmt` testing"
shell: bash shell: bash
run: | run: |
# `fmt` testing ## `fmt` testing
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message> # * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" | sed -E -n -e "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::warning file=\1,line=\2::WARNING: \`cargo fmt\`: style violation/p" ; } S=$(cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s\n" "$S" | sed -E -n -e "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::error file=\1,line=\2::ERROR: \`cargo fmt\`: style violation (file:'\1', line:\2; use \`cargo fmt \"\1\"\`)/p" ; exit 1 ; }
- name: "`fmt` testing of tests" - name: "`fmt` testing of tests"
if: success() || failure() # run regardless of prior step success/failure
shell: bash shell: bash
run: | run: |
# `fmt` testing of tests ## `fmt` testing of tests
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message> # * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(find tests -name "*.rs" -print0 | xargs -0 cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" | sed -E -n "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::warning file=\1,line=\2::WARNING: \`cargo fmt\`: style violation/p" ; } S=$(find tests -name "*.rs" -print0 | xargs -0 cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s\n" "$S" | sed -E -n "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::error file=\1,line=\2::ERROR: \`cargo fmt\`: style violation (file:'\1', line:\2; use \`cargo fmt \"\1\"\`)/p" ; exit 1 ; }
code_spellcheck: code_lint:
name: Style/spelling name: Style/lint
runs-on: ${{ matrix.job.os }}
strategy:
matrix:
job:
- { os: ubuntu-latest }
steps:
- uses: actions/checkout@v1
- name: Install/setup prerequisites
shell: bash
run: |
sudo apt-get -y update ; sudo apt-get -y install npm ; sudo npm install cspell -g;
- name: Run `cspell`
shell: bash
run: |
cspell --config .vscode/cSpell.json --no-summary --no-progress "**/*" | sed "s/\(.*\):\(.*\):\(.*\) - \(.*\)/::warning file=\1,line=\2,col=\3::cspell: \4/" || true
code_warnings:
name: Style/warnings
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -87,13 +104,13 @@ jobs:
- { os: macos-latest , features: feat_os_macos } - { os: macos-latest , features: feat_os_macos }
- { os: windows-latest , features: feat_os_windows } - { os: windows-latest , features: feat_os_windows }
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: Initialize workflow variables - name: Initialize workflow variables
id: vars id: vars
shell: bash shell: bash
run: | run: |
## VARs setup ## VARs setup
outputs() { for var in "$@" ; do echo steps.vars.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# target-specific options # target-specific options
# * CARGO_FEATURES_OPTION # * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ; CARGO_FEATURES_OPTION='' ;
@ -106,13 +123,32 @@ jobs:
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
components: clippy components: clippy
- name: "`clippy` testing" - name: "`clippy` lint testing"
if: success() || failure() # run regardless of prior step success/failure
shell: bash shell: bash
run: | run: |
# `clippy` testing ## `clippy` lint testing
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message> # * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(cargo +nightly clippy --all-targets ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings 2>&1) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n -e '/^error:/{' -e "N; s/^error:[[:space:]]+(.*)\\n[[:space:]]+-->[[:space:]]+(.*):([0-9]+):([0-9]+).*$/::warning file=\2,line=\3,col=\4::WARNING: \`cargo clippy\`: \1/p;" -e '}' ; } S=$(cargo +nightly clippy --all-targets ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings 2>&1) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n -e '/^error:/{' -e "N; s/^error:[[:space:]]+(.*)\\n[[:space:]]+-->[[:space:]]+${PWD//\//\\/}\/(.*):([0-9]+):([0-9]+).*$/::error file=\2,line=\3,col=\4::ERROR: \`cargo clippy\`: \1 (file:'\2', line:\3)/p;" -e '}' ; exit 1 ; }
code_spellcheck:
name: Style/spelling
runs-on: ${{ matrix.job.os }}
strategy:
matrix:
job:
- { os: ubuntu-latest }
steps:
- uses: actions/checkout@v2
- name: Install/setup prerequisites
shell: bash
run: |
## Install/setup prerequisites
sudo apt-get -y update ; sudo apt-get -y install npm ; sudo npm install cspell -g ;
- name: Run `cspell`
shell: bash
run: |
## Run `cspell`
cspell --config .vscode/cSpell.json --no-summary --no-progress "**/*" | sed -E -n "s/${PWD//\//\\/}\/(.*):(.*):(.*) - (.*)/::error file=\1,line=\2,col=\3::ERROR: \4 (file:'\1', line:\2)/p"
min_version: min_version:
name: MinRustV # Minimum supported rust version name: MinRustV # Minimum supported rust version
@ -122,7 +158,7 @@ jobs:
job: job:
- { os: ubuntu-latest , features: feat_os_unix } - { os: ubuntu-latest , features: feat_os_unix }
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }}) - name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
@ -137,20 +173,20 @@ jobs:
use-tool-cache: true use-tool-cache: true
env: env:
RUSTUP_TOOLCHAIN: stable RUSTUP_TOOLCHAIN: stable
- name: Confirm compatible 'Cargo.lock' - name: Confirm MinSRV compatible 'Cargo.lock'
shell: bash shell: bash
run: | run: |
# Confirm compatible 'Cargo.lock' ## Confirm MinSRV compatible 'Cargo.lock'
# * 'Cargo.lock' is required to be in a format that `cargo` of MinSRV can interpret (eg, v1-format for MinSRV < v1.38) # * 'Cargo.lock' is required to be in a format that `cargo` of MinSRV can interpret (eg, v1-format for MinSRV < v1.38)
cargo fetch --locked --quiet || { echo "::error file=Cargo.lock::Incompatible 'Cargo.lock' format; try \`cargo +${{ env.RUST_MIN_SRV }} update\`" ; exit 1 ; } cargo fetch --locked --quiet || { echo "::error file=Cargo.lock::Incompatible (or out-of-date) 'Cargo.lock' file; update using \`cargo +${{ env.RUST_MIN_SRV }} update\`" ; exit 1 ; }
- name: Info - name: Info
shell: bash shell: bash
run: | run: |
# Info ## Info
## environment # environment
echo "## environment" echo "## environment"
echo "CI='${CI}'" echo "CI='${CI}'"
## tooling info display # tooling info display
echo "## tooling" echo "## tooling"
which gcc >/dev/null 2>&1 && (gcc --version | head -1) || true which gcc >/dev/null 2>&1 && (gcc --version | head -1) || true
rustup -V rustup -V
@ -158,12 +194,11 @@ jobs:
cargo -V cargo -V
rustc -V rustc -V
cargo-tree tree -V cargo-tree tree -V
## dependencies # dependencies
echo "## dependency list" echo "## dependency list"
cargo fetch --locked --quiet cargo fetch --locked --quiet
## * using the 'stable' toolchain is necessary to avoid "unexpected '--filter-platform'" errors ## * using the 'stable' toolchain is necessary to avoid "unexpected '--filter-platform'" errors
RUSTUP_TOOLCHAIN=stable cargo-tree tree --frozen --all --no-dev-dependencies --no-indent --features ${{ matrix.job.features }} | grep -vE "$PWD" | sort --unique RUSTUP_TOOLCHAIN=stable cargo-tree tree --frozen --all --no-dev-dependencies --no-indent --features ${{ matrix.job.features }} | grep -vE "$PWD" | sort --unique
- name: Test - name: Test
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
@ -172,8 +207,8 @@ jobs:
env: env:
RUSTFLAGS: '-Awarnings' RUSTFLAGS: '-Awarnings'
busybox_test: build_makefile:
name: Busybox test suite name: Build/Makefile
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -181,49 +216,26 @@ jobs:
job: job:
- { os: ubuntu-latest } - { os: ubuntu-latest }
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: Install `rust` toolchain - name: Install `rust` toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
toolchain: stable toolchain: stable
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
- name: "prepare busytest" - name: Install/setup prerequisites
shell: bash shell: bash
run: | run: |
make prepare-busytest ## Install/setup prerequisites
- name: "run busybox testsuite" sudo apt-get -y update ; sudo apt-get -y install python3-sphinx ;
- name: "`make build`"
shell: bash shell: bash
run: | run: |
bindir=$(pwd)/target/debug
cd tmp/busybox-*/testsuite
## S=$(bindir=$bindir ./runtest) && printf "%s\n" "$S" || { printf "%s\n" "$S" | grep "FAIL:" | sed -e "s/FAIL: /::warning ::Test failure:/g" ; }
output=$(bindir=$bindir ./runtest 2>&1 || true)
printf "%s\n" "${output}"
n_fails=$(echo "$output" | grep "^FAIL:\s" | wc --lines)
if [ $n_fails -gt 0 ] ; then echo "::warning ::${n_fails}+ test failures" ; fi
makefile_build:
name: Test the build target of the Makefile
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest }
steps:
- uses: actions/checkout@v1
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: "Run make build"
shell: bash
run: |
sudo apt-get -y update ; sudo apt-get -y install python3-sphinx;
make build make build
- name: "`make test`"
shell: bash
run: |
make test
build: build:
name: Build name: Build
@ -235,8 +247,6 @@ jobs:
# { os, target, cargo-options, features, use-cross, toolchain } # { os, target, cargo-options, features, use-cross, toolchain }
- { os: ubuntu-latest , target: arm-unknown-linux-gnueabihf , features: feat_os_unix_gnueabihf , use-cross: use-cross } - { os: ubuntu-latest , target: arm-unknown-linux-gnueabihf , features: feat_os_unix_gnueabihf , use-cross: use-cross }
- { os: ubuntu-latest , target: aarch64-unknown-linux-gnu , features: feat_os_unix_gnueabihf , use-cross: use-cross } - { os: ubuntu-latest , target: aarch64-unknown-linux-gnu , features: feat_os_unix_gnueabihf , use-cross: use-cross }
- { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
- { os: ubuntu-16.04 , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
# - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only # - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only
# - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only # - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only
- { os: ubuntu-18.04 , target: i686-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } - { os: ubuntu-18.04 , target: i686-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
@ -249,11 +259,11 @@ jobs:
- { os: windows-latest , target: x86_64-pc-windows-gnu , features: feat_os_windows } ## note: requires rust >= 1.43.0 to link correctly - { os: windows-latest , target: x86_64-pc-windows-gnu , features: feat_os_windows } ## note: requires rust >= 1.43.0 to link correctly
- { os: windows-latest , target: x86_64-pc-windows-msvc , features: feat_os_windows } - { os: windows-latest , target: x86_64-pc-windows-msvc , features: feat_os_windows }
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: Install/setup prerequisites - name: Install/setup prerequisites
shell: bash shell: bash
run: | run: |
## install/setup prerequisites ## Install/setup prerequisites
case '${{ matrix.job.target }}' in case '${{ matrix.job.target }}' in
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;; arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;; aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
@ -266,7 +276,7 @@ jobs:
shell: bash shell: bash
run: | run: |
## VARs setup ## VARs setup
outputs() { for var in "$@" ; do echo steps.vars.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# toolchain # toolchain
TOOLCHAIN="stable" ## default to "stable" toolchain TOOLCHAIN="stable" ## default to "stable" toolchain
# * specify alternate/non-default TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: GH:rust-lang/rust#47048, GH:rust-lang/rust#53454, GH:rust-lang/cargo#6754) # * specify alternate/non-default TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: GH:rust-lang/rust#47048, GH:rust-lang/rust#53454, GH:rust-lang/cargo#6754)
@ -352,7 +362,7 @@ jobs:
- name: Create all needed build/work directories - name: Create all needed build/work directories
shell: bash shell: bash
run: | run: |
## create build/work space ## Create build/work space
mkdir -p '${{ steps.vars.outputs.STAGING }}' mkdir -p '${{ steps.vars.outputs.STAGING }}'
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}' mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
mkdir -p '${{ steps.vars.outputs.STAGING }}/dpkg' mkdir -p '${{ steps.vars.outputs.STAGING }}/dpkg'
@ -372,7 +382,7 @@ jobs:
shell: bash shell: bash
run: | run: |
## Dependent VARs setup ## Dependent VARs setup
outputs() { for var in "$@" ; do echo steps.vars.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="dep_vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# * determine sub-crate utility list # * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})" UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})"
echo UTILITY_LIST=${UTILITY_LIST} echo UTILITY_LIST=${UTILITY_LIST}
@ -389,15 +399,15 @@ jobs:
- name: Info - name: Info
shell: bash shell: bash
run: | run: |
# Info ## Info
## commit info # commit info
echo "## commit" echo "## commit"
echo GITHUB_REF=${GITHUB_REF} echo GITHUB_REF=${GITHUB_REF}
echo GITHUB_SHA=${GITHUB_SHA} echo GITHUB_SHA=${GITHUB_SHA}
## environment # environment
echo "## environment" echo "## environment"
echo "CI='${CI}'" echo "CI='${CI}'"
## tooling info display # tooling info display
echo "## tooling" echo "## tooling"
which gcc >/dev/null 2>&1 && (gcc --version | head -1) || true which gcc >/dev/null 2>&1 && (gcc --version | head -1) || true
rustup -V rustup -V
@ -405,7 +415,7 @@ jobs:
cargo -V cargo -V
rustc -V rustc -V
cargo-tree tree -V cargo-tree tree -V
## dependencies # dependencies
echo "## dependency list" echo "## dependency list"
cargo fetch --locked --quiet cargo fetch --locked --quiet
cargo-tree tree --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --all --no-dev-dependencies --no-indent | grep -vE "$PWD" | sort --unique cargo-tree tree --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --all --no-dev-dependencies --no-indent | grep -vE "$PWD" | sort --unique
@ -435,7 +445,7 @@ jobs:
- name: Package - name: Package
shell: bash shell: bash
run: | run: |
## package artifact(s) ## Package artifact(s)
# binary # binary
cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/' cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
# `strip` binary (if needed) # `strip` binary (if needed)
@ -476,6 +486,37 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
test_busybox:
name: Tests/BusyBox test suite
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest }
steps:
- uses: actions/checkout@v2
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: Install/setup prerequisites
shell: bash
run: |
make prepare-busytest
- name: "Run BusyBox test suite"
shell: bash
run: |
## Run BusyBox test suite
bindir=$(pwd)/target/debug
cd tmp/busybox-*/testsuite
output=$(bindir=$bindir ./runtest 2>&1 || true)
printf "%s\n" "${output}"
n_fails=$(echo "$output" | grep "^FAIL:\s" | wc --lines)
if [ $n_fails -gt 0 ] ; then echo "::warning ::${n_fails}+ test failures" ; fi
coverage: coverage:
name: Code Coverage name: Code Coverage
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
@ -488,11 +529,11 @@ jobs:
- { os: macos-latest , features: macos } - { os: macos-latest , features: macos }
- { os: windows-latest , features: windows } - { os: windows-latest , features: windows }
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: Install/setup prerequisites - name: Install/setup prerequisites
shell: bash shell: bash
run: | run: |
## install/setup prerequisites ## Install/setup prerequisites
case '${{ matrix.job.os }}' in case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for testing macos-latest) brew install coreutils ;; # needed for testing
esac esac
@ -503,7 +544,7 @@ jobs:
shell: bash shell: bash
run: | run: |
## VARs setup ## VARs setup
outputs() { for var in "$@" ; do echo steps.vars.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# toolchain # toolchain
TOOLCHAIN="nightly-${{ env.RUST_COV_SRV }}" ## default to "nightly" toolchain (required for certain required unstable compiler flags) ## !maint: refactor when stable channel has needed support TOOLCHAIN="nightly-${{ env.RUST_COV_SRV }}" ## default to "nightly" toolchain (required for certain required unstable compiler flags) ## !maint: refactor when stable channel has needed support
# * specify gnu-type TOOLCHAIN for windows; `grcov` requires gnu-style code coverage data files # * specify gnu-type TOOLCHAIN for windows; `grcov` requires gnu-style code coverage data files
@ -538,7 +579,7 @@ jobs:
shell: bash shell: bash
run: | run: |
## Dependent VARs setup ## Dependent VARs setup
outputs() { for var in "$@" ; do echo steps.vars.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="dep_vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# * determine sub-crate utility list # * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})" UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})"
CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo "-puu_${u}"; done;)" CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo "-puu_${u}"; done;)"
@ -586,7 +627,7 @@ jobs:
id: coverage id: coverage
shell: bash shell: bash
run: | run: |
# generate coverage data ## Generate coverage data
COVERAGE_REPORT_DIR="target/debug" COVERAGE_REPORT_DIR="target/debug"
COVERAGE_REPORT_FILE="${COVERAGE_REPORT_DIR}/lcov.info" COVERAGE_REPORT_FILE="${COVERAGE_REPORT_DIR}/lcov.info"
# GRCOV_IGNORE_OPTION='--ignore build.rs --ignore "/*" --ignore "[a-zA-Z]:/*"' ## `grcov` ignores these params when passed as an environment variable (why?) # GRCOV_IGNORE_OPTION='--ignore build.rs --ignore "/*" --ignore "[a-zA-Z]:/*"' ## `grcov` ignores these params when passed as an environment variable (why?)

133
.github/workflows/FixPR.yml vendored Normal file
View file

@ -0,0 +1,133 @@
name: FixPR
# Trigger automated fixes for PRs being merged (with associated commits)
env:
BRANCH_TARGET: master
on:
# * only trigger on pull request closed to specific branches
# ref: https://github.community/t/trigger-workflow-only-on-pull-request-merge/17359/9
pull_request:
branches:
- master # == env.BRANCH_TARGET ## unfortunately, env context variables are only available in jobs/steps (see <https://github.community/t/how-to-use-env-context/16975/2>)
types: [ closed ]
jobs:
code_deps:
# Refresh dependencies (ie, 'Cargo.lock') and show updated dependency tree
if: github.event.pull_request.merged == true ## only for PR merges
name: Update/dependencies
runs-on: ${{ matrix.job.os }}
strategy:
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v2
- name: Initialize job variables
id: vars
shell: bash
run: |
## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# surface MSRV from CICD workflow
RUST_MIN_SRV=$(grep -P "^\s+RUST_MIN_SRV:" .github/workflows/CICD.yml | grep -Po "(?<=\x22)\d+[.]\d+(?:[.]\d+)?(?=\x22)" )
outputs RUST_MIN_SRV
- name: Install `rust` toolchain (v${{ steps.vars.outputs.RUST_MIN_SRV }})
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ steps.vars.outputs.RUST_MIN_SRV }}
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: Install `cargo-tree` # for dependency information
uses: actions-rs/install@v0.1
with:
crate: cargo-tree
version: latest
use-tool-cache: true
env:
RUSTUP_TOOLCHAIN: stable
- name: Ensure updated 'Cargo.lock'
shell: bash
run: |
# Ensure updated 'Cargo.lock'
# * 'Cargo.lock' is required to be in a format that `cargo` of MinSRV can interpret (eg, v1-format for MinSRV < v1.38)
cargo fetch --locked --quiet || cargo +${{ steps.vars.outputs.RUST_MIN_SRV }} update
- name: Info
shell: bash
run: |
# Info
## environment
echo "## environment"
echo "CI='${CI}'"
## tooling info display
echo "## tooling"
which gcc >/dev/null 2>&1 && (gcc --version | head -1) || true
rustup -V
rustup show active-toolchain
cargo -V
rustc -V
cargo-tree tree -V
## dependencies
echo "## dependency list"
cargo fetch --locked --quiet
## * using the 'stable' toolchain is necessary to avoid "unexpected '--filter-platform'" errors
RUSTUP_TOOLCHAIN=stable cargo-tree tree --frozen --all --no-dev-dependencies --no-indent --features ${{ matrix.job.features }} | grep -vE "$PWD" | sort --unique
- name: Commit any changes (to '${{ env.BRANCH_TARGET }}')
uses: EndBug/add-and-commit@v7
with:
branch: ${{ env.BRANCH_TARGET }}
default_author: github_actions
message: "maint ~ refresh 'Cargo.lock'"
add: Cargo.lock
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
code_format:
# Recheck/refresh code formatting
if: github.event.pull_request.merged == true ## only for PR merges
name: Update/format
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v2
- name: Initialize job variables
id: vars
shell: bash
run: |
## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt
- name: "`cargo fmt`"
shell: bash
run: |
cargo fmt
- name: "`cargo fmt` tests"
shell: bash
run: |
# `cargo fmt` of tests
find tests -name "*.rs" -print0 | xargs -0 cargo fmt --
- name: Commit any changes (to '${{ env.BRANCH_TARGET }}')
uses: EndBug/add-and-commit@v7
with:
branch: ${{ env.BRANCH_TARGET }}
default_author: github_actions
message: "maint ~ rustfmt (`cargo fmt`)"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,4 +1,6 @@
name: GNU name: GnuTests
# spell-checker:ignore (names) gnulib ; (utils) autopoint gperf pyinotify texinfo ; (vars) XPASS
on: [push, pull_request] on: [push, pull_request]
@ -7,7 +9,6 @@ jobs:
name: Run GNU tests name: Run GNU tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
# Checks out a copy of your repository on the ubuntu-latest machine
- name: Checkout code uutil - name: Checkout code uutil
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
@ -18,7 +19,7 @@ jobs:
repository: 'coreutils/coreutils' repository: 'coreutils/coreutils'
path: 'gnu' path: 'gnu'
ref: v8.32 ref: v8.32
- name: Checkout GNU corelib - name: Checkout GNU coreutils library (gnulib)
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: 'coreutils/gnulib' repository: 'coreutils/gnulib'
@ -32,23 +33,26 @@ jobs:
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt components: rustfmt
- name: Install deps - name: Install dependencies
shell: bash shell: bash
run: | run: |
## Install dependencies
sudo apt-get update sudo apt-get update
sudo apt-get install autoconf autopoint bison texinfo gperf gcc g++ gdb python-pyinotify python3-sphinx jq sudo apt-get install autoconf autopoint bison texinfo gperf gcc g++ gdb python-pyinotify python3-sphinx jq
- name: Build binaries - name: Build binaries
shell: bash shell: bash
run: | run: |
cd uutils ## Build binaries
bash util/build-gnu.sh cd uutils
bash util/build-gnu.sh
- name: Run GNU tests - name: Run GNU tests
shell: bash shell: bash
run: | run: |
bash uutils/util/run-gnu-test.sh bash uutils/util/run-gnu-test.sh
- name: Extract tests info - name: Extract testing info
shell: bash shell: bash
run: | run: |
## Extract testing info
LOG_FILE=gnu/tests/test-suite.log LOG_FILE=gnu/tests/test-suite.log
if test -f "$LOG_FILE" if test -f "$LOG_FILE"
then then
@ -58,7 +62,9 @@ jobs:
FAIL=$(sed -n "s/.*# FAIL: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) FAIL=$(sed -n "s/.*# FAIL: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1)
XPASS=$(sed -n "s/.*# XPASS: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) XPASS=$(sed -n "s/.*# XPASS: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1)
ERROR=$(sed -n "s/.*# ERROR: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) ERROR=$(sed -n "s/.*# ERROR: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1)
echo "::warning ::GNU testsuite = TOTAL: $TOTAL / PASS: $PASS / FAIL: $FAIL / ERROR: $ERROR" output="GNU tests summary = TOTAL: $TOTAL / PASS: $PASS / FAIL: $FAIL / ERROR: $ERROR"
echo "${output}"
if [[ "$FAIL" -gt 0 || "$ERROR" -gt 0 ]]; then echo "::warning ::${output}" ; fi
jq -n \ jq -n \
--arg date "$(date --rfc-email)" \ --arg date "$(date --rfc-email)" \
--arg sha "$GITHUB_SHA" \ --arg sha "$GITHUB_SHA" \
@ -72,12 +78,10 @@ jobs:
else else
echo "::error ::Failed to get summary of test results" echo "::error ::Failed to get summary of test results"
fi fi
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: test-report name: test-report
path: gnu/tests/**/*.log path: gnu/tests/**/*.log
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: gnu-result name: gnu-result

View file

@ -12,6 +12,7 @@ FIFOs
FQDN # fully qualified domain name FQDN # fully qualified domain name
GID # group ID GID # group ID
GIDs GIDs
GNU
GNUEABI GNUEABI
GNUEABIhf GNUEABIhf
JFS JFS
@ -45,6 +46,7 @@ Deno
EditorConfig EditorConfig
FreeBSD FreeBSD
Gmail Gmail
GNU
Irix Irix
MS-DOS MS-DOS
MSDOS MSDOS

View file

@ -78,6 +78,7 @@ symlinks
syscall syscall
syscalls syscalls
tokenize tokenize
toolchain
truthy truthy
unbuffered unbuffered
unescape unescape

View file

@ -48,17 +48,19 @@ xattr
# * rust/rustc # * rust/rustc
RUSTDOCFLAGS RUSTDOCFLAGS
RUSTFLAGS RUSTFLAGS
clippy
rustc
rustfmt
rustup
#
bitor # BitOr trait function bitor # BitOr trait function
bitxor # BitXor trait function bitxor # BitXor trait function
clippy
concat concat
fract fract
powi powi
println println
repr repr
rfind rfind
rustc
rustfmt
struct struct
structs structs
substr substr

View file

@ -54,6 +54,29 @@ pub fn main() {
for krate in crates { for krate in crates {
match krate.as_ref() { match krate.as_ref() {
// 'test' is named uu_test to avoid collision with rust core crate 'test'.
// It can also be invoked by name '[' for the '[ expr ] syntax'.
"uu_test" => {
mf.write_all(
format!(
"\
\tmap.insert(\"test\", {krate}::uumain);\n\
\t\tmap.insert(\"[\", {krate}::uumain);\n\
",
krate = krate
)
.as_bytes(),
)
.unwrap();
tf.write_all(
format!(
"#[path=\"{dir}/test_test.rs\"]\nmod test_test;\n",
dir = util_tests_dir,
)
.as_bytes(),
)
.unwrap()
}
k if k.starts_with(override_prefix) => { k if k.starts_with(override_prefix) => {
mf.write_all( mf.write_all(
format!( format!(

View file

@ -39,7 +39,7 @@ impl Config {
Some(mut values) => { Some(mut values) => {
let name = values.next().unwrap(); let name = values.next().unwrap();
if values.len() != 0 { if values.len() != 0 {
return Err(format!("extra operand {}", name)); return Err(format!("extra operand '{}'", name));
} }
if name == "-" { if name == "-" {
@ -58,7 +58,7 @@ impl Config {
.value_of(options::WRAP) .value_of(options::WRAP)
.map(|num| { .map(|num| {
num.parse::<usize>() num.parse::<usize>()
.map_err(|e| format!("Invalid wrap size: {}: {}", num, e)) .map_err(|e| format!("Invalid wrap size: '{}': {}", num, e))
}) })
.transpose()?; .transpose()?;

View file

@ -281,7 +281,7 @@ fn parse_spec(spec: &str) -> Result<(Option<u32>, Option<u32>), String> {
let uid = if usr_only || usr_grp { let uid = if usr_only || usr_grp {
Some( Some(
Passwd::locate(args[0]) Passwd::locate(args[0])
.map_err(|_| format!("invalid user: {}", spec))? .map_err(|_| format!("invalid user: '{}'", spec))?
.uid(), .uid(),
) )
} else { } else {
@ -290,7 +290,7 @@ fn parse_spec(spec: &str) -> Result<(Option<u32>, Option<u32>), String> {
let gid = if grp_only || usr_grp { let gid = if grp_only || usr_grp {
Some( Some(
Group::locate(args[1]) Group::locate(args[1])
.map_err(|_| format!("invalid group: {}", spec))? .map_err(|_| format!("invalid group: '{}'", spec))?
.gid(), .gid(),
) )
} else { } else {

View file

@ -667,7 +667,14 @@ impl Options {
} }
} }
} else { } else {
ReflinkMode::Never #[cfg(any(target_os = "linux", target_os = "macos"))]
{
ReflinkMode::Auto
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
{
ReflinkMode::Never
}
} }
}, },
backup: backup_mode, backup: backup_mode,
@ -1218,28 +1225,39 @@ fn copy_file(source: &Path, dest: &Path, options: &Options) -> CopyResult<()> {
/// Copy the file from `source` to `dest` either using the normal `fs::copy` or a /// Copy the file from `source` to `dest` either using the normal `fs::copy` or a
/// copy-on-write scheme if --reflink is specified and the filesystem supports it. /// copy-on-write scheme if --reflink is specified and the filesystem supports it.
fn copy_helper(source: &Path, dest: &Path, options: &Options) -> CopyResult<()> { fn copy_helper(source: &Path, dest: &Path, options: &Options) -> CopyResult<()> {
if options.reflink_mode != ReflinkMode::Never { if options.parents {
#[cfg(not(any(target_os = "linux", target_os = "macos")))] let parent = dest.parent().unwrap_or(dest);
return Err("--reflink is only supported on linux and macOS" fs::create_dir_all(parent)?;
.to_string() }
.into()); let is_symlink = fs::symlink_metadata(&source)?.file_type().is_symlink();
if source.to_string_lossy() == "/dev/null" {
#[cfg(target_os = "macos")]
copy_on_write_macos(source, dest, options.reflink_mode)?;
#[cfg(target_os = "linux")]
copy_on_write_linux(source, dest, options.reflink_mode)?;
} else if !options.dereference && fs::symlink_metadata(&source)?.file_type().is_symlink() {
copy_link(source, dest)?;
} else if source.to_string_lossy() == "/dev/null" {
/* workaround a limitation of fs::copy /* workaround a limitation of fs::copy
* https://github.com/rust-lang/rust/issues/79390 * https://github.com/rust-lang/rust/issues/79390
*/ */
File::create(dest)?; File::create(dest)?;
} else { } else if !options.dereference && is_symlink {
if options.parents { copy_link(source, dest)?;
let parent = dest.parent().unwrap_or(dest); } else if options.reflink_mode != ReflinkMode::Never {
fs::create_dir_all(parent)?; #[cfg(not(any(target_os = "linux", target_os = "macos")))]
return Err("--reflink is only supported on linux and macOS"
.to_string()
.into());
#[cfg(any(target_os = "linux", target_os = "macos"))]
if is_symlink {
assert!(options.dereference);
let real_path = std::fs::read_link(source)?;
#[cfg(target_os = "macos")]
copy_on_write_macos(&real_path, dest, options.reflink_mode)?;
#[cfg(target_os = "linux")]
copy_on_write_linux(&real_path, dest, options.reflink_mode)?;
} else {
#[cfg(target_os = "macos")]
copy_on_write_macos(source, dest, options.reflink_mode)?;
#[cfg(target_os = "linux")]
copy_on_write_linux(source, dest, options.reflink_mode)?;
} }
} else {
fs::copy(source, dest).context(&*context_for(source, dest))?; fs::copy(source, dest).context(&*context_for(source, dest))?;
} }
@ -1254,11 +1272,16 @@ fn copy_link(source: &Path, dest: &Path) -> CopyResult<()> {
Some(name) => dest.join(name).into(), Some(name) => dest.join(name).into(),
None => crash!( None => crash!(
EXIT_ERR, EXIT_ERR,
"cannot stat {}: No such file or directory", "cannot stat '{}': No such file or directory",
source.display() source.display()
), ),
} }
} else { } else {
// we always need to remove the file to be able to create a symlink,
// even if it is writeable.
if dest.exists() {
fs::remove_file(dest)?;
}
dest.into() dest.into()
}; };
symlink_file(&link, &dest, &*context_for(&link, &dest)) symlink_file(&link, &dest, &*context_for(&link, &dest))

View file

@ -210,7 +210,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
let format = if let Some(form) = matches.value_of(OPT_FORMAT) { let format = if let Some(form) = matches.value_of(OPT_FORMAT) {
if !form.starts_with('+') { if !form.starts_with('+') {
eprintln!("date: invalid date {}", form); eprintln!("date: invalid date '{}'", form);
return 1; return 1;
} }
let form = form[1..].to_string(); let form = form[1..].to_string();
@ -239,7 +239,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
let set_to = match matches.value_of(OPT_SET).map(parse_date) { let set_to = match matches.value_of(OPT_SET).map(parse_date) {
None => None, None => None,
Some(Err((input, _err))) => { Some(Err((input, _err))) => {
eprintln!("date: invalid date {}", input); eprintln!("date: invalid date '{}'", input);
return 1; return 1;
} }
Some(Ok(date)) => Some(date), Some(Ok(date)) => Some(date),
@ -305,7 +305,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
println!("{}", formatted); println!("{}", formatted);
} }
Err((input, _err)) => { Err((input, _err)) => {
println!("date: invalid date {}", input); println!("date: invalid date '{}'", input);
} }
} }
} }

View file

@ -123,7 +123,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
if matches.is_present(options::PRINT_DATABASE) { if matches.is_present(options::PRINT_DATABASE) {
if !files.is_empty() { if !files.is_empty() {
show_usage_error!( show_usage_error!(
"extra operand {}\nfile operands cannot be combined with \ "extra operand '{}'\nfile operands cannot be combined with \
--print-database (-p)", --print-database (-p)",
files[0] files[0]
); );
@ -155,7 +155,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
result = parse(INTERNAL_DB.lines(), out_format, "") result = parse(INTERNAL_DB.lines(), out_format, "")
} else { } else {
if files.len() > 1 { if files.len() > 1 {
show_usage_error!("extra operand {}", files[1]); show_usage_error!("extra operand '{}'", files[1]);
return 1; return 1;
} }
match File::open(files[0]) { match File::open(files[0]) {
@ -192,21 +192,25 @@ pub trait StrUtils {
impl StrUtils for str { impl StrUtils for str {
fn purify(&self) -> &Self { fn purify(&self) -> &Self {
let mut line = self; let mut line = self;
for (n, c) in self.chars().enumerate() { for (n, _) in self
if c != '#' { .as_bytes()
continue; .iter()
} .enumerate()
.filter(|(_, c)| **c == b'#')
// Ignore if '#' is at the beginning of line {
if n == 0 {
line = &self[..0];
break;
}
// Ignore the content after '#' // Ignore the content after '#'
// only if it is preceded by at least one whitespace // only if it is preceded by at least one whitespace
if self.chars().nth(n - 1).unwrap().is_whitespace() { match self[..n].chars().last() {
line = &self[..n]; Some(c) if c.is_whitespace() => {
line = &self[..n - c.len_utf8()];
break;
}
None => {
// n == 0
line = &self[..0];
break;
}
_ => (),
} }
} }
line.trim() line.trim()

View file

@ -274,7 +274,7 @@ fn du(
Err(e) => { Err(e) => {
safe_writeln!( safe_writeln!(
stderr(), stderr(),
"{}: cannot read directory {}: {}", "{}: cannot read directory '{}': {}",
options.program_name, options.program_name,
my_stat.path.display(), my_stat.path.display(),
e e
@ -318,9 +318,7 @@ fn du(
let error_message = "Permission denied"; let error_message = "Permission denied";
show_error_custom_description!(description, "{}", error_message) show_error_custom_description!(description, "{}", error_message)
} }
_ => { _ => show_error!("cannot access '{}': {}", entry.path().display(), error),
show_error!("cannot access '{}': {}", entry.path().display(), error)
}
}, },
}, },
Err(error) => show_error!("{}", error), Err(error) => show_error!("{}", error),
@ -594,9 +592,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
let files = match matches.value_of(options::FILE) { let files = match matches.value_of(options::FILE) {
Some(_) => matches.values_of(options::FILE).unwrap().collect(), Some(_) => matches.values_of(options::FILE).unwrap().collect(),
None => { None => vec!["."],
vec!["."]
}
}; };
let block_size = u64::try_from(read_block_size(matches.value_of(options::BLOCK_SIZE))).unwrap(); let block_size = u64::try_from(read_block_size(matches.value_of(options::BLOCK_SIZE))).unwrap();
@ -693,8 +689,8 @@ Try '{} --help' for more information.",
time time
} else { } else {
show_error!( show_error!(
"Invalid argument {} for --time. "Invalid argument '{}' for --time.
birth and creation arguments are not supported on this platform.", 'birth' and 'creation' arguments are not supported on this platform.",
s s
); );
return 1; return 1;

View file

@ -269,7 +269,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
match Passwd::locate(users[i].as_str()) { match Passwd::locate(users[i].as_str()) {
Ok(p) => Some(p), Ok(p) => Some(p),
Err(_) => { Err(_) => {
show_error!("{}: no such user", users[i]); show_error!("'{}': no such user", users[i]);
exit_code = 1; exit_code = 1;
if i + 1 >= users.len() { if i + 1 >= users.len() {
break; break;

View file

@ -373,7 +373,7 @@ impl Config {
.value_of(options::WIDTH) .value_of(options::WIDTH)
.map(|x| { .map(|x| {
x.parse::<u16>().unwrap_or_else(|_e| { x.parse::<u16>().unwrap_or_else(|_e| {
show_error!("invalid line width: {}", x); show_error!("invalid line width: '{}'", x);
exit(2); exit(2);
}) })
}) })
@ -756,7 +756,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
Arg::with_name(options::time::CHANGE) Arg::with_name(options::time::CHANGE)
.short(options::time::CHANGE) .short(options::time::CHANGE)
.help("If the long listing format (e.g., -l, -o) is being used, print the status \ .help("If the long listing format (e.g., -l, -o) is being used, print the status \
change time (the ctime in the inode) instead of the modification time. When \ change time (the 'ctime' in the inode) instead of the modification time. When \
explicitly sorting by time (--sort=time or -t) or when not using a long listing \ explicitly sorting by time (--sort=time or -t) or when not using a long listing \
format, sort according to the status change time.") format, sort according to the status change time.")
.overrides_with_all(&[ .overrides_with_all(&[
@ -1196,7 +1196,9 @@ fn list(locs: Vec<String>, config: Config) -> i32 {
for loc in &locs { for loc in &locs {
let p = PathBuf::from(&loc); let p = PathBuf::from(&loc);
if !p.exists() { let path_data = PathData::new(p, None, None, &config, true);
if path_data.md().is_none() {
show_error!("'{}': {}", &loc, "No such file or directory"); show_error!("'{}': {}", &loc, "No such file or directory");
/* /*
We found an error, the return code of ls should not be 0 We found an error, the return code of ls should not be 0
@ -1206,8 +1208,6 @@ fn list(locs: Vec<String>, config: Config) -> i32 {
continue; continue;
} }
let path_data = PathData::new(p, None, None, &config, true);
let show_dir_contents = match path_data.file_type() { let show_dir_contents = match path_data.file_type() {
Some(ft) => !config.directory && ft.is_dir(), Some(ft) => !config.directory && ft.is_dir(),
None => { None => {
@ -1270,7 +1270,8 @@ fn sort_entries(entries: &mut Vec<PathData>, config: &Config) {
#[cfg(windows)] #[cfg(windows)]
fn is_hidden(file_path: &DirEntry) -> bool { fn is_hidden(file_path: &DirEntry) -> bool {
let metadata = fs::metadata(file_path.path()).unwrap(); let path = file_path.path();
let metadata = fs::metadata(&path).unwrap_or_else(|_| fs::symlink_metadata(&path).unwrap());
let attr = metadata.file_attributes(); let attr = metadata.file_attributes();
(attr & 0x2) > 0 (attr & 0x2) > 0
} }
@ -1331,7 +1332,7 @@ fn enter_directory(dir: &PathData, config: &Config, out: &mut BufWriter<Stdout>)
fn get_metadata(entry: &Path, dereference: bool) -> std::io::Result<Metadata> { fn get_metadata(entry: &Path, dereference: bool) -> std::io::Result<Metadata> {
if dereference { if dereference {
entry.metadata().or_else(|_| entry.symlink_metadata()) entry.metadata()
} else { } else {
entry.symlink_metadata() entry.symlink_metadata()
} }
@ -1733,7 +1734,11 @@ fn display_file_name(path: &PathData, config: &Config) -> Option<Cell> {
#[cfg(unix)] #[cfg(unix)]
{ {
if config.format != Format::Long && config.inode { if config.format != Format::Long && config.inode {
name = get_inode(path.md()?) + " " + &name; name = path
.md()
.map_or_else(|| "?".to_string(), |md| get_inode(md))
+ " "
+ &name;
} }
} }

View file

@ -40,7 +40,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
Arg::with_name(OPT_MODE) Arg::with_name(OPT_MODE)
.short("m") .short("m")
.long(OPT_MODE) .long(OPT_MODE)
.help("set file mode") .help("set file mode (not implemented on windows)")
.default_value("755"), .default_value("755"),
) )
.arg( .arg(

View file

@ -210,7 +210,7 @@ fn valid_type(tpe: String) -> Result<(), String> {
if vec!['b', 'c', 'u', 'p'].contains(&first_char) { if vec!['b', 'c', 'u', 'p'].contains(&first_char) {
Ok(()) Ok(())
} else { } else {
Err(format!("invalid device type {}", tpe)) Err(format!("invalid device type '{}'", tpe))
} }
}) })
} }

View file

@ -77,14 +77,14 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
.long(OPT_TMPDIR) .long(OPT_TMPDIR)
.help( .help(
"interpret TEMPLATE relative to DIR; if DIR is not specified, use \ "interpret TEMPLATE relative to DIR; if DIR is not specified, use \
$TMPDIR if set, else /tmp. With this option, TEMPLATE must not \ $TMPDIR ($TMP on windows) if set, else /tmp. With this option, TEMPLATE must not \
be an absolute name; unlike with -t, TEMPLATE may contain \ be an absolute name; unlike with -t, TEMPLATE may contain \
slashes, but mktemp creates only the final component", slashes, but mktemp creates only the final component",
) )
.value_name("DIR"), .value_name("DIR"),
) )
.arg(Arg::with_name(OPT_T).short(OPT_T).help( .arg(Arg::with_name(OPT_T).short(OPT_T).help(
"Generate a template (using the supplied prefix and TMPDIR if set) \ "Generate a template (using the supplied prefix and TMPDIR (TMP on windows) if set) \
to create a filename template [deprecated]", to create a filename template [deprecated]",
)) ))
.arg( .arg(
@ -154,7 +154,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
if matches.is_present(OPT_TMPDIR) && PathBuf::from(prefix).is_absolute() { if matches.is_present(OPT_TMPDIR) && PathBuf::from(prefix).is_absolute() {
show_error!( show_error!(
"invalid template, {}; with --tmpdir, it may not be absolute", "invalid template, '{}'; with --tmpdir, it may not be absolute",
template template
); );
return 1; return 1;

View file

@ -210,14 +210,14 @@ fn more(buff: &str, mut stdout: &mut Stdout, next_file: Option<&str>, silent: bo
let (cols, rows) = terminal::size().unwrap(); let (cols, rows) = terminal::size().unwrap();
let lines = break_buff(buff, usize::from(cols)); let lines = break_buff(buff, usize::from(cols));
let mut pager = Pager::new(rows as usize, lines, next_file, silent); let mut pager = Pager::new(rows, lines, next_file, silent);
pager.draw(stdout, false); pager.draw(stdout, None);
if pager.should_close() { if pager.should_close() {
return; return;
} }
loop { loop {
let mut wrong_key = false; let mut wrong_key = None;
if event::poll(Duration::from_millis(10)).unwrap() { if event::poll(Duration::from_millis(10)).unwrap() {
match event::read().unwrap() { match event::read().unwrap() {
Event::Key(KeyEvent { Event::Key(KeyEvent {
@ -239,7 +239,11 @@ fn more(buff: &str, mut stdout: &mut Stdout, next_file: Option<&str>, silent: bo
code: KeyCode::Char(' '), code: KeyCode::Char(' '),
modifiers: KeyModifiers::NONE, modifiers: KeyModifiers::NONE,
}) => { }) => {
pager.page_down(); if pager.should_close() {
return;
} else {
pager.page_down();
}
} }
Event::Key(KeyEvent { Event::Key(KeyEvent {
code: KeyCode::Up, code: KeyCode::Up,
@ -247,15 +251,17 @@ fn more(buff: &str, mut stdout: &mut Stdout, next_file: Option<&str>, silent: bo
}) => { }) => {
pager.page_up(); pager.page_up();
} }
_ => { Event::Resize(col, row) => {
wrong_key = true; pager.page_resize(col, row);
} }
Event::Key(KeyEvent {
code: KeyCode::Char(k),
..
}) => wrong_key = Some(k),
_ => continue,
} }
pager.draw(stdout, wrong_key); pager.draw(stdout, wrong_key);
if pager.should_close() {
return;
}
} }
} }
} }
@ -264,54 +270,49 @@ struct Pager<'a> {
// The current line at the top of the screen // The current line at the top of the screen
upper_mark: usize, upper_mark: usize,
// The number of rows that fit on the screen // The number of rows that fit on the screen
content_rows: usize, content_rows: u16,
lines: Vec<String>, lines: Vec<String>,
next_file: Option<&'a str>, next_file: Option<&'a str>,
line_count: usize, line_count: usize,
close_on_down: bool,
silent: bool, silent: bool,
} }
impl<'a> Pager<'a> { impl<'a> Pager<'a> {
fn new(rows: usize, lines: Vec<String>, next_file: Option<&'a str>, silent: bool) -> Self { fn new(rows: u16, lines: Vec<String>, next_file: Option<&'a str>, silent: bool) -> Self {
let line_count = lines.len(); let line_count = lines.len();
Self { Self {
upper_mark: 0, upper_mark: 0,
content_rows: rows - 1, content_rows: rows.saturating_sub(1),
lines, lines,
next_file, next_file,
line_count, line_count,
close_on_down: false,
silent, silent,
} }
} }
fn should_close(&mut self) -> bool { fn should_close(&mut self) -> bool {
if self.upper_mark + self.content_rows >= self.line_count { self.upper_mark
if self.close_on_down { .saturating_add(self.content_rows.into())
return true; .ge(&self.line_count)
}
if self.next_file.is_none() {
return true;
} else {
self.close_on_down = true;
}
} else {
self.close_on_down = false;
}
false
} }
fn page_down(&mut self) { fn page_down(&mut self) {
self.upper_mark += self.content_rows; self.upper_mark = self.upper_mark.saturating_add(self.content_rows.into());
} }
fn page_up(&mut self) { fn page_up(&mut self) {
self.upper_mark = self.upper_mark.saturating_sub(self.content_rows); self.upper_mark = self.upper_mark.saturating_sub(self.content_rows.into());
} }
fn draw(&self, stdout: &mut std::io::Stdout, wrong_key: bool) { // TODO: Deal with column size changes.
let lower_mark = self.line_count.min(self.upper_mark + self.content_rows); fn page_resize(&mut self, _: u16, row: u16) {
self.content_rows = row.saturating_sub(1);
}
fn draw(&self, stdout: &mut std::io::Stdout, wrong_key: Option<char>) {
let lower_mark = self
.line_count
.min(self.upper_mark.saturating_add(self.content_rows.into()));
self.draw_lines(stdout); self.draw_lines(stdout);
self.draw_prompt(stdout, lower_mark, wrong_key); self.draw_prompt(stdout, lower_mark, wrong_key);
stdout.flush().unwrap(); stdout.flush().unwrap();
@ -323,7 +324,7 @@ impl<'a> Pager<'a> {
.lines .lines
.iter() .iter()
.skip(self.upper_mark) .skip(self.upper_mark)
.take(self.content_rows); .take(self.content_rows.into());
for line in displayed_lines { for line in displayed_lines {
stdout stdout
@ -332,7 +333,7 @@ impl<'a> Pager<'a> {
} }
} }
fn draw_prompt(&self, stdout: &mut Stdout, lower_mark: usize, wrong_key: bool) { fn draw_prompt(&self, stdout: &mut Stdout, lower_mark: usize, wrong_key: Option<char>) {
let status_inner = if lower_mark == self.line_count { let status_inner = if lower_mark == self.line_count {
format!("Next file: {}", self.next_file.unwrap_or_default()) format!("Next file: {}", self.next_file.unwrap_or_default())
} else { } else {
@ -345,10 +346,15 @@ impl<'a> Pager<'a> {
let status = format!("--More--({})", status_inner); let status = format!("--More--({})", status_inner);
let banner = match (self.silent, wrong_key) { let banner = match (self.silent, wrong_key) {
(true, true) => "[Press 'h' for instructions. (unimplemented)]".to_string(), (true, Some(key)) => {
(true, false) => format!("{}[Press space to continue, 'q' to quit.]", status), format!(
(false, true) => format!("{}{}", status, BELL), "{} [Unknown key: '{}'. Press 'h' for instructions. (unimplemented)]",
(false, false) => status, status, key
)
}
(true, None) => format!("{}[Press space to continue, 'q' to quit.]", status),
(false, Some(_)) => format!("{}{}", status, BELL),
(false, None) => status,
}; };
write!( write!(
@ -364,7 +370,7 @@ impl<'a> Pager<'a> {
// Break the lines on the cols of the terminal // Break the lines on the cols of the terminal
fn break_buff(buff: &str, cols: usize) -> Vec<String> { fn break_buff(buff: &str, cols: usize) -> Vec<String> {
let mut lines = Vec::new(); let mut lines = Vec::with_capacity(buff.lines().count());
for l in buff.lines() { for l in buff.lines() {
lines.append(&mut break_line(l, cols)); lines.append(&mut break_line(l, cols));

View file

@ -230,7 +230,7 @@ fn exec(files: &[PathBuf], b: Behavior) -> i32 {
// lacks permission to access metadata. // lacks permission to access metadata.
if source.symlink_metadata().is_err() { if source.symlink_metadata().is_err() {
show_error!( show_error!(
"cannot stat {}: No such file or directory", "cannot stat '{}': No such file or directory",
source.display() source.display()
); );
return 1; return 1;
@ -240,7 +240,7 @@ fn exec(files: &[PathBuf], b: Behavior) -> i32 {
if b.no_target_dir { if b.no_target_dir {
if !source.is_dir() { if !source.is_dir() {
show_error!( show_error!(
"cannot overwrite directory {} with non-directory", "cannot overwrite directory '{}' with non-directory",
target.display() target.display()
); );
return 1; return 1;
@ -249,7 +249,7 @@ fn exec(files: &[PathBuf], b: Behavior) -> i32 {
return match rename(source, target, &b) { return match rename(source, target, &b) {
Err(e) => { Err(e) => {
show_error!( show_error!(
"cannot move {} to {}: {}", "cannot move '{}' to '{}': {}",
source.display(), source.display(),
target.display(), target.display(),
e.to_string() e.to_string()
@ -263,7 +263,7 @@ fn exec(files: &[PathBuf], b: Behavior) -> i32 {
return move_files_into_dir(&[source.clone()], target, &b); return move_files_into_dir(&[source.clone()], target, &b);
} else if target.exists() && source.is_dir() { } else if target.exists() && source.is_dir() {
show_error!( show_error!(
"cannot overwrite non-directory {} with directory {}", "cannot overwrite non-directory '{}' with directory '{}'",
target.display(), target.display(),
source.display() source.display()
); );
@ -278,7 +278,7 @@ fn exec(files: &[PathBuf], b: Behavior) -> i32 {
_ => { _ => {
if b.no_target_dir { if b.no_target_dir {
show_error!( show_error!(
"mv: extra operand {}\n\ "mv: extra operand '{}'\n\
Try '{} --help' for more information.", Try '{} --help' for more information.",
files[2].display(), files[2].display(),
executable!() executable!()
@ -294,7 +294,7 @@ fn exec(files: &[PathBuf], b: Behavior) -> i32 {
fn move_files_into_dir(files: &[PathBuf], target_dir: &Path, b: &Behavior) -> i32 { fn move_files_into_dir(files: &[PathBuf], target_dir: &Path, b: &Behavior) -> i32 {
if !target_dir.is_dir() { if !target_dir.is_dir() {
show_error!("target {} is not a directory", target_dir.display()); show_error!("target '{}' is not a directory", target_dir.display());
return 1; return 1;
} }
@ -304,7 +304,7 @@ fn move_files_into_dir(files: &[PathBuf], target_dir: &Path, b: &Behavior) -> i3
Some(name) => target_dir.join(name), Some(name) => target_dir.join(name),
None => { None => {
show_error!( show_error!(
"cannot stat {}: No such file or directory", "cannot stat '{}': No such file or directory",
sourcepath.display() sourcepath.display()
); );
@ -315,7 +315,7 @@ fn move_files_into_dir(files: &[PathBuf], target_dir: &Path, b: &Behavior) -> i3
if let Err(e) = rename(sourcepath, &targetpath, b) { if let Err(e) = rename(sourcepath, &targetpath, b) {
show_error!( show_error!(
"cannot move {} to {}: {}", "cannot move '{}' to '{}': {}",
sourcepath.display(), sourcepath.display(),
targetpath.display(), targetpath.display(),
e.to_string() e.to_string()
@ -338,7 +338,7 @@ fn rename(from: &Path, to: &Path, b: &Behavior) -> io::Result<()> {
match b.overwrite { match b.overwrite {
OverwriteMode::NoClobber => return Ok(()), OverwriteMode::NoClobber => return Ok(()),
OverwriteMode::Interactive => { OverwriteMode::Interactive => {
println!("{}: overwrite {}? ", executable!(), to.display()); println!("{}: overwrite '{}'? ", executable!(), to.display());
if !read_yes() { if !read_yes() {
return Ok(()); return Ok(());
} }
@ -371,9 +371,9 @@ fn rename(from: &Path, to: &Path, b: &Behavior) -> io::Result<()> {
rename_with_fallback(from, to)?; rename_with_fallback(from, to)?;
if b.verbose { if b.verbose {
print!("{} -> {}", from.display(), to.display()); print!("'{}' -> '{}'", from.display(), to.display());
match backup_path { match backup_path {
Some(path) => println!(" (backup: {})", path.display()), Some(path) => println!(" (backup: '{}')", path.display()),
None => println!(), None => println!(),
} }
} }

View file

@ -79,7 +79,7 @@ fn parse_suffix(s: &str) -> Result<(f64, Option<Suffix>)> {
Some('Y') => Some((RawSuffix::Y, with_i)), Some('Y') => Some((RawSuffix::Y, with_i)),
Some('0'..='9') => None, Some('0'..='9') => None,
_ => return Err(format!("invalid suffix in input: '{}'", s)), _ => return Err(format!("invalid suffix in input: '{}'", s)),
}; };
let suffix_len = match suffix { let suffix_len = match suffix {
None => 0, None => 0,

View file

@ -234,7 +234,7 @@ fn idle_string(when: i64) -> String {
} }
fn time_string(ut: &Utmpx) -> String { fn time_string(ut: &Utmpx) -> String {
time::strftime("%Y-%m-%d %H:%M", &ut.login_time()).unwrap() time::strftime("%b %e %H:%M", &ut.login_time()).unwrap() // LC_ALL=C
} }
impl Pinky { impl Pinky {

View file

@ -234,7 +234,7 @@ impl LineSplitter {
fn new(settings: &Settings) -> LineSplitter { fn new(settings: &Settings) -> LineSplitter {
LineSplitter { LineSplitter {
lines_per_split: settings.strategy_param.parse().unwrap_or_else(|_| { lines_per_split: settings.strategy_param.parse().unwrap_or_else(|_| {
crash!(1, "invalid number of lines: {}", settings.strategy_param) crash!(1, "invalid number of lines: '{}'", settings.strategy_param)
}), }),
} }
} }

View file

@ -24,7 +24,7 @@ use std::{cmp, fs, iter};
macro_rules! check_bound { macro_rules! check_bound {
($str: ident, $bound:expr, $beg: expr, $end: expr) => { ($str: ident, $bound:expr, $beg: expr, $end: expr) => {
if $end >= $bound { if $end >= $bound {
return Err(format!("{}: invalid directive", &$str[$beg..$end])); return Err(format!("'{}': invalid directive", &$str[$beg..$end]));
} }
}; };
} }

View file

@ -167,7 +167,7 @@ impl Parser {
self.expr(); self.expr();
match self.next_token() { match self.next_token() {
Symbol::Literal(s) if s == ")" => (), Symbol::Literal(s) if s == ")" => (),
_ => panic!("expected )"), _ => panic!("expected ')'"),
} }
} }
} }
@ -314,7 +314,7 @@ impl Parser {
self.expr(); self.expr();
match self.tokens.next() { match self.tokens.next() {
Some(token) => Err(format!("extra argument {}", token.to_string_lossy())), Some(token) => Err(format!("extra argument '{}'", token.to_string_lossy())),
None => Ok(()), None => Ok(()),
} }
} }

View file

@ -12,10 +12,24 @@ mod parser;
use parser::{parse, Symbol}; use parser::{parse, Symbol};
use std::ffi::{OsStr, OsString}; use std::ffi::{OsStr, OsString};
use std::path::Path;
pub fn uumain(args: impl uucore::Args) -> i32 { pub fn uumain(mut args: impl uucore::Args) -> i32 {
// TODO: handle being called as `[` let program = args.next().unwrap_or_else(|| OsString::from("test"));
let args: Vec<_> = args.skip(1).collect(); let binary_name = Path::new(&program)
.file_name()
.unwrap_or_else(|| OsStr::new("test"))
.to_string_lossy();
let mut args: Vec<_> = args.collect();
// If invoked via name '[', matching ']' must be in the last arg
if binary_name == "[" {
let last = args.pop();
if last != Some(OsString::from("]")) {
eprintln!("[: missing ']'");
return 2;
}
}
let result = parse(args).and_then(|mut stack| eval(&mut stack)); let result = parse(args).and_then(|mut stack| eval(&mut stack));
@ -74,7 +88,7 @@ fn eval(stack: &mut Vec<Symbol>) -> Result<bool, String> {
return Ok(true); return Ok(true);
} }
_ => { _ => {
return Err(format!("missing argument after {:?}", op)); return Err(format!("missing argument after '{:?}'", op));
} }
}; };
@ -126,7 +140,7 @@ fn eval(stack: &mut Vec<Symbol>) -> Result<bool, String> {
} }
fn integers(a: &OsStr, b: &OsStr, op: &OsStr) -> Result<bool, String> { fn integers(a: &OsStr, b: &OsStr, op: &OsStr) -> Result<bool, String> {
let format_err = |value| format!("invalid integer {}", value); let format_err = |value| format!("invalid integer '{}'", value);
let a = a.to_string_lossy(); let a = a.to_string_lossy();
let a: i64 = a.parse().map_err(|_| format_err(a))?; let a: i64 = a.parse().map_err(|_| format_err(a))?;
@ -142,7 +156,7 @@ fn integers(a: &OsStr, b: &OsStr, op: &OsStr) -> Result<bool, String> {
"-ge" => a >= b, "-ge" => a >= b,
"-lt" => a < b, "-lt" => a < b,
"-le" => a <= b, "-le" => a <= b,
_ => return Err(format!("unknown operator {}", operator)), _ => return Err(format!("unknown operator '{}'", operator)),
}) })
} }
@ -150,7 +164,7 @@ fn isatty(fd: &OsStr) -> Result<bool, String> {
let fd = fd.to_string_lossy(); let fd = fd.to_string_lossy();
fd.parse() fd.parse()
.map_err(|_| format!("invalid integer {}", fd)) .map_err(|_| format!("invalid integer '{}'", fd))
.map(|i| { .map(|i| {
#[cfg(not(target_os = "redox"))] #[cfg(not(target_os = "redox"))]
unsafe { unsafe {

View file

@ -89,8 +89,8 @@ impl Config {
signal, signal,
duration, duration,
preserve_status, preserve_status,
command,
verbose, verbose,
command,
} }
} }
} }

View file

@ -311,7 +311,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
if !(delete_flag || squeeze_flag) && sets.len() < 2 { if !(delete_flag || squeeze_flag) && sets.len() < 2 {
show_error!( show_error!(
"missing operand after {}\nTry `{} --help` for more information.", "missing operand after '{}'\nTry `{} --help` for more information.",
sets[0], sets[0],
executable!() executable!()
); );

View file

@ -210,7 +210,7 @@ fn truncate_reference_and_size(
let mode = match parse_mode_and_size(size_string) { let mode = match parse_mode_and_size(size_string) {
Ok(m) => match m { Ok(m) => match m {
TruncateMode::Absolute(_) => { TruncateMode::Absolute(_) => {
crash!(1, "you must specify a relative --size with --reference") crash!(1, "you must specify a relative '--size' with '--reference'")
} }
_ => m, _ => m,
}, },

View file

@ -300,7 +300,7 @@ fn idle_string<'a>(when: i64, boottime: i64) -> Cow<'a, str> {
} }
fn time_string(ut: &Utmpx) -> String { fn time_string(ut: &Utmpx) -> String {
time::strftime("%Y-%m-%d %H:%M", &ut.login_time()).unwrap() time::strftime("%b %e %H:%M", &ut.login_time()).unwrap() // LC_ALL=C
} }
#[inline] #[inline]
@ -523,8 +523,8 @@ impl Who {
buf.push_str(&msg); buf.push_str(&msg);
} }
buf.push_str(&format!(" {:<12}", line)); buf.push_str(&format!(" {:<12}", line));
// "%Y-%m-%d %H:%M" // "%b %e %H:%M" (LC_ALL=C)
let time_size = 4 + 1 + 2 + 1 + 2 + 1 + 2 + 1 + 2; let time_size = 3 + 2 + 2 + 1 + 2;
buf.push_str(&format!(" {:<1$}", time, time_size)); buf.push_str(&format!(" {:<1$}", time, time_size));
if !self.short_output { if !self.short_output {

View file

@ -186,7 +186,7 @@ mod tests {
fn make_os_vec(os_str: &OsStr) -> Vec<OsString> { fn make_os_vec(os_str: &OsStr) -> Vec<OsString> {
vec![ vec![
OsString::from("test"), OsString::from("test"),
OsString::from("สวัสดี"), OsString::from("สวัสดี"), // spell-checker:disable-line
os_str.to_os_string(), os_str.to_os_string(),
] ]
} }

View file

@ -18,7 +18,7 @@ use std::fmt;
/// ///
/// # Errors /// # Errors
/// ///
/// Will return `ParseSizeError` if its not possible to parse this /// Will return `ParseSizeError` if it's not possible to parse this
/// string into a number, e.g. if the string does not begin with a /// string into a number, e.g. if the string does not begin with a
/// numeral, or if the unit is not one of the supported units described /// numeral, or if the unit is not one of the supported units described
/// in the preceding section. /// in the preceding section.
@ -109,19 +109,19 @@ impl fmt::Display for ParseSizeError {
impl ParseSizeError { impl ParseSizeError {
fn parse_failure(s: &str) -> ParseSizeError { fn parse_failure(s: &str) -> ParseSizeError {
// stderr on linux (GNU coreutils 8.32) // stderr on linux (GNU coreutils 8.32) (LC_ALL=C)
// has to be handled in the respective uutils because strings differ, e.g.: // has to be handled in the respective uutils because strings differ, e.g.:
// //
// `NUM` // `NUM`
// head: invalid number of bytes: 1fb // head: invalid number of bytes: '1fb'
// tail: invalid number of bytes: 1fb // tail: invalid number of bytes: '1fb'
// //
// `SIZE` // `SIZE`
// split: invalid number of bytes: 1fb // split: invalid number of bytes: '1fb'
// truncate: Invalid number: 1fb // truncate: Invalid number: '1fb'
// //
// `MODE` // `MODE`
// stdbuf: invalid mode 1fb // stdbuf: invalid mode '1fb'
// //
// `SIZE` // `SIZE`
// sort: invalid suffix in --buffer-size argument '1fb' // sort: invalid suffix in --buffer-size argument '1fb'
@ -140,27 +140,27 @@ impl ParseSizeError {
// --width // --width
// --strings // --strings
// etc. // etc.
ParseSizeError::ParseFailure(format!("{}", s)) ParseSizeError::ParseFailure(format!("'{}'", s))
} }
fn size_too_big(s: &str) -> ParseSizeError { fn size_too_big(s: &str) -> ParseSizeError {
// stderr on linux (GNU coreutils 8.32) // stderr on linux (GNU coreutils 8.32) (LC_ALL=C)
// has to be handled in the respective uutils because strings differ, e.g.: // has to be handled in the respective uutils because strings differ, e.g.:
// //
// head: invalid number of bytes: 1Y: Value too large for defined data type // head: invalid number of bytes: '1Y': Value too large for defined data type
// tail: invalid number of bytes: 1Y: Value too large for defined data type // tail: invalid number of bytes: '1Y': Value too large for defined data type
// split: invalid number of bytes: 1Y: Value too large for defined data type // split: invalid number of bytes: '1Y': Value too large for defined data type
// truncate: Invalid number: 1Y: Value too large for defined data type // truncate: Invalid number: '1Y': Value too large for defined data type
// stdbuf: invalid mode 1Y: Value too large for defined data type // stdbuf: invalid mode '1Y': Value too large for defined data type
// sort: -S argument '1Y' too large // sort: -S argument '1Y' too large
// du: -B argument '1Y' too large // du: -B argument '1Y' too large
// od: -N argument '1Y' too large // od: -N argument '1Y' too large
// etc. // etc.
// //
// stderr on macos (brew - GNU coreutils 8.32) also differs for the same version, e.g.: // stderr on macos (brew - GNU coreutils 8.32) also differs for the same version, e.g.:
// ghead: invalid number of bytes: 1Y: Value too large to be stored in data type // ghead: invalid number of bytes: '1Y': Value too large to be stored in data type
// gtail: invalid number of bytes: 1Y: Value too large to be stored in data type // gtail: invalid number of bytes: '1Y': Value too large to be stored in data type
ParseSizeError::SizeTooBig(format!("{}: Value too large for defined data type", s)) ParseSizeError::SizeTooBig(format!("'{}': Value too large for defined data type", s))
} }
} }
@ -227,7 +227,7 @@ mod tests {
)); ));
assert_eq!( assert_eq!(
ParseSizeError::SizeTooBig("1Y: Value too large for defined data type".to_string()), ParseSizeError::SizeTooBig("'1Y': Value too large for defined data type".to_string()),
parse_size("1Y").unwrap_err() parse_size("1Y").unwrap_err()
); );
} }
@ -262,7 +262,7 @@ mod tests {
for &test_string in &test_strings { for &test_string in &test_strings {
assert_eq!( assert_eq!(
parse_size(test_string).unwrap_err(), parse_size(test_string).unwrap_err(),
ParseSizeError::ParseFailure(format!("{}", test_string)) ParseSizeError::ParseFailure(format!("'{}'", test_string))
); );
} }
} }

View file

@ -103,7 +103,7 @@ fn test_wrap_bad_arg() {
.arg(wrap_param) .arg(wrap_param)
.arg("b") .arg("b")
.fails() .fails()
.stderr_only("base32: Invalid wrap size: b: invalid digit found in string\n"); .stderr_only("base32: Invalid wrap size: 'b': invalid digit found in string\n");
} }
} }
@ -114,7 +114,7 @@ fn test_base32_extra_operand() {
.arg("a.txt") .arg("a.txt")
.arg("a.txt") .arg("a.txt")
.fails() .fails()
.stderr_only("base32: extra operand a.txt"); .stderr_only("base32: extra operand 'a.txt'");
} }
#[test] #[test]

View file

@ -89,7 +89,7 @@ fn test_wrap_bad_arg() {
.arg(wrap_param) .arg(wrap_param)
.arg("b") .arg("b")
.fails() .fails()
.stderr_only("base64: Invalid wrap size: b: invalid digit found in string\n"); .stderr_only("base64: Invalid wrap size: 'b': invalid digit found in string\n");
} }
} }
@ -100,7 +100,7 @@ fn test_base64_extra_operand() {
.arg("a.txt") .arg("a.txt")
.arg("a.txt") .arg("a.txt")
.fails() .fails()
.stderr_only("base64: extra operand a.txt"); .stderr_only("base64: extra operand 'a.txt'");
} }
#[test] #[test]

View file

@ -172,14 +172,14 @@ fn test_chown_only_colon() {
// expected: // expected:
// $ chown -v :: file.txt 2>out_err ; echo $? ; cat out_err // $ chown -v :: file.txt 2>out_err ; echo $? ; cat out_err
// 1 // 1
// chown: invalid group: :: // chown: invalid group: '::'
scene scene
.ucmd() .ucmd()
.arg("::") .arg("::")
.arg("--verbose") .arg("--verbose")
.arg(file1) .arg(file1)
.fails() .fails()
.stderr_contains(&"invalid group: ::"); .stderr_contains(&"invalid group: '::'");
} }
#[test] #[test]

View file

@ -1325,3 +1325,16 @@ fn test_copy_dir_with_symlinks() {
ucmd.args(&["-r", "dir", "copy"]).succeeds(); ucmd.args(&["-r", "dir", "copy"]).succeeds();
assert_eq!(at.resolve_link("copy/file-link"), "file"); assert_eq!(at.resolve_link("copy/file-link"), "file");
} }
#[test]
#[cfg(not(windows))]
fn test_copy_symlink_force() {
let (at, mut ucmd) = at_and_ucmd!();
at.touch("file");
at.symlink_file("file", "file-link");
at.touch("copy");
ucmd.args(&["file-link", "copy", "-f", "--no-dereference"])
.succeeds();
assert_eq!(at.resolve_link("copy"), "file");
}

View file

@ -162,7 +162,7 @@ fn test_directory_and_no_such_file() {
fn test_equal_as_delimiter() { fn test_equal_as_delimiter() {
new_ucmd!() new_ucmd!()
.args(&["-f", "2", "-d="]) .args(&["-f", "2", "-d="])
.pipe_in("--libdir=./out/lib") .pipe_in("--dir=./out/lib")
.succeeds() .succeeds()
.stdout_only("./out/lib\n"); .stdout_only("./out/lib\n");
} }

View file

@ -117,7 +117,7 @@ fn test_date_format_without_plus() {
new_ucmd!() new_ucmd!()
.arg("%s") .arg("%s")
.fails() .fails()
.stderr_contains("date: invalid date %s") .stderr_contains("date: invalid date '%s'")
.code_is(1); .code_is(1);
} }

View file

@ -355,7 +355,7 @@ fn test_du_no_permission() {
let result = scene.ucmd().arg(SUB_DIR_LINKS).run(); // TODO: replace with ".fails()" once `du` is fixed let result = scene.ucmd().arg(SUB_DIR_LINKS).run(); // TODO: replace with ".fails()" once `du` is fixed
result.stderr_contains( result.stderr_contains(
"du: cannot read directory subdir/links: Permission denied (os error 13)", "du: cannot read directory 'subdir/links': Permission denied (os error 13)",
); );
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]

View file

@ -255,21 +255,21 @@ fn test_head_invalid_num() {
new_ucmd!() new_ucmd!()
.args(&["-c", "1024R", "emptyfile.txt"]) .args(&["-c", "1024R", "emptyfile.txt"])
.fails() .fails()
.stderr_is("head: invalid number of bytes: 1024R"); .stderr_is("head: invalid number of bytes: '1024R'");
new_ucmd!() new_ucmd!()
.args(&["-n", "1024R", "emptyfile.txt"]) .args(&["-n", "1024R", "emptyfile.txt"])
.fails() .fails()
.stderr_is("head: invalid number of lines: 1024R"); .stderr_is("head: invalid number of lines: '1024R'");
#[cfg(not(target_pointer_width = "128"))] #[cfg(not(target_pointer_width = "128"))]
new_ucmd!() new_ucmd!()
.args(&["-c", "1Y", "emptyfile.txt"]) .args(&["-c", "1Y", "emptyfile.txt"])
.fails() .fails()
.stderr_is("head: invalid number of bytes: 1Y: Value too large for defined data type"); .stderr_is("head: invalid number of bytes: '1Y': Value too large for defined data type");
#[cfg(not(target_pointer_width = "128"))] #[cfg(not(target_pointer_width = "128"))]
new_ucmd!() new_ucmd!()
.args(&["-n", "1Y", "emptyfile.txt"]) .args(&["-n", "1Y", "emptyfile.txt"])
.fails() .fails()
.stderr_is("head: invalid number of lines: 1Y: Value too large for defined data type"); .stderr_is("head: invalid number of lines: '1Y': Value too large for defined data type");
#[cfg(target_pointer_width = "32")] #[cfg(target_pointer_width = "32")]
{ {
let sizes = ["1000G", "10T"]; let sizes = ["1000G", "10T"];
@ -279,7 +279,7 @@ fn test_head_invalid_num() {
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only(format!( .stderr_only(format!(
"head: invalid number of bytes: {}: Value too large for defined data type", "head: invalid number of bytes: '{}': Value too large for defined data type",
size size
)); ));
} }

View file

@ -432,7 +432,7 @@ fn check_coreutil_version(util_name: &str, version_expected: &str) -> String {
let scene = TestScenario::new(util_name); let scene = TestScenario::new(util_name);
let version_check = scene let version_check = scene
.cmd_keepenv(&util_name) .cmd_keepenv(&util_name)
.env("LANGUAGE", "C") .env("LC_ALL", "C")
.arg("--version") .arg("--version")
.run(); .run();
version_check version_check
@ -476,7 +476,7 @@ fn expected_result(args: &[&str]) -> Result<CmdResult, String> {
let scene = TestScenario::new(util_name); let scene = TestScenario::new(util_name);
let result = scene let result = scene
.cmd_keepenv(util_name) .cmd_keepenv(util_name)
.env("LANGUAGE", "C") .env("LC_ALL", "C")
.args(args) .args(args)
.run(); .run();

View file

@ -168,7 +168,7 @@ fn test_ls_width() {
.ucmd() .ucmd()
.args(&option.split(' ').collect::<Vec<_>>()) .args(&option.split(' ').collect::<Vec<_>>())
.fails() .fails()
.stderr_only("ls: invalid line width: 1a"); .stderr_only("ls: invalid line width: '1a'");
} }
} }
@ -2021,3 +2021,28 @@ fn test_ls_path() {
.run() .run()
.stdout_is(expected_stdout); .stdout_is(expected_stdout);
} }
#[test]
fn test_ls_dangling_symlinks() {
let scene = TestScenario::new(util_name!());
let at = &scene.fixtures;
at.mkdir("temp_dir");
at.symlink_file("does_not_exist", "temp_dir/dangle");
scene.ucmd().arg("-L").arg("temp_dir/dangle").fails();
scene.ucmd().arg("-H").arg("temp_dir/dangle").fails();
scene
.ucmd()
.arg("temp_dir/dangle")
.succeeds()
.stdout_contains("dangle");
scene
.ucmd()
.arg("-Li")
.arg("temp_dir")
.succeeds() // this should fail, though at the moment, ls lacks a way to propagate errors encountered during display
.stdout_contains(if cfg!(windows) { "dangle" } else { "? dangle" });
}

View file

@ -17,7 +17,10 @@ static TEST_TEMPLATE8: &str = "tempXXXl/ate";
#[cfg(windows)] #[cfg(windows)]
static TEST_TEMPLATE8: &str = "tempXXXl\\ate"; static TEST_TEMPLATE8: &str = "tempXXXl\\ate";
#[cfg(not(windows))]
const TMPDIR: &str = "TMPDIR"; const TMPDIR: &str = "TMPDIR";
#[cfg(windows)]
const TMPDIR: &str = "TMP";
#[test] #[test]
fn test_mktemp_mktemp() { fn test_mktemp_mktemp() {
@ -386,7 +389,7 @@ fn test_mktemp_tmpdir_one_arg() {
let scene = TestScenario::new(util_name!()); let scene = TestScenario::new(util_name!());
let result = scene let result = scene
.ucmd() .ucmd_keepenv()
.arg("--tmpdir") .arg("--tmpdir")
.arg("apt-key-gpghome.XXXXXXXXXX") .arg("apt-key-gpghome.XXXXXXXXXX")
.succeeds(); .succeeds();
@ -399,7 +402,7 @@ fn test_mktemp_directory_tmpdir() {
let scene = TestScenario::new(util_name!()); let scene = TestScenario::new(util_name!());
let result = scene let result = scene
.ucmd() .ucmd_keepenv()
.arg("--directory") .arg("--directory")
.arg("--tmpdir") .arg("--tmpdir")
.arg("apt-key-gpghome.XXXXXXXXXX") .arg("apt-key-gpghome.XXXXXXXXXX")

View file

@ -614,7 +614,7 @@ fn test_mv_overwrite_nonempty_dir() {
// Not same error as GNU; the error message is a rust builtin // Not same error as GNU; the error message is a rust builtin
// TODO: test (and implement) correct error message (or at least decide whether to do so) // TODO: test (and implement) correct error message (or at least decide whether to do so)
// Current: "mv: couldn't rename path (Directory not empty; from=a; to=b)" // Current: "mv: couldn't rename path (Directory not empty; from=a; to=b)"
// GNU: "mv: cannot move a to b: Directory not empty" // GNU: "mv: cannot move 'a' to 'b': Directory not empty"
// Verbose output for the move should not be shown on failure // Verbose output for the move should not be shown on failure
let result = ucmd.arg("-vT").arg(dir_a).arg(dir_b).fails(); let result = ucmd.arg("-vT").arg(dir_a).arg(dir_b).fails();
@ -638,7 +638,7 @@ fn test_mv_backup_dir() {
.arg(dir_b) .arg(dir_b)
.succeeds() .succeeds()
.stdout_only(format!( .stdout_only(format!(
"{} -> {} (backup: {}~)\n", "'{}' -> '{}' (backup: '{}~')\n",
dir_a, dir_b, dir_b dir_a, dir_b, dir_b
)); ));
@ -672,7 +672,7 @@ fn test_mv_errors() {
// $ at.touch file && at.mkdir dir // $ at.touch file && at.mkdir dir
// $ mv -T file dir // $ mv -T file dir
// err == mv: cannot overwrite directory dir with non-directory // err == mv: cannot overwrite directory 'dir' with non-directory
scene scene
.ucmd() .ucmd()
.arg("-T") .arg("-T")
@ -680,13 +680,13 @@ fn test_mv_errors() {
.arg(dir) .arg(dir)
.fails() .fails()
.stderr_is(format!( .stderr_is(format!(
"mv: cannot overwrite directory {} with non-directory\n", "mv: cannot overwrite directory '{}' with non-directory\n",
dir dir
)); ));
// $ at.mkdir dir && at.touch file // $ at.mkdir dir && at.touch file
// $ mv dir file // $ mv dir file
// err == mv: cannot overwrite non-directory file with directory dir // err == mv: cannot overwrite non-directory 'file' with directory 'dir'
assert!(!scene assert!(!scene
.ucmd() .ucmd()
.arg(dir) .arg(dir)
@ -713,7 +713,7 @@ fn test_mv_verbose() {
.arg(file_a) .arg(file_a)
.arg(file_b) .arg(file_b)
.succeeds() .succeeds()
.stdout_only(format!("{} -> {}\n", file_a, file_b)); .stdout_only(format!("'{}' -> '{}'\n", file_a, file_b));
at.touch(file_a); at.touch(file_a);
scene scene
@ -723,12 +723,13 @@ fn test_mv_verbose() {
.arg(file_b) .arg(file_b)
.succeeds() .succeeds()
.stdout_only(format!( .stdout_only(format!(
"{} -> {} (backup: {}~)\n", "'{}' -> '{}' (backup: '{}~')\n",
file_a, file_b, file_b file_a, file_b, file_b
)); ));
} }
#[test] #[test]
#[cfg(target_os = "linux")] // mkdir does not support -m on windows. Freebsd doesn't return a permission error either.
fn test_mv_permission_error() { fn test_mv_permission_error() {
let scene = TestScenario::new("mkdir"); let scene = TestScenario::new("mkdir");
let folder1 = "bar"; let folder1 = "bar";
@ -738,12 +739,11 @@ fn test_mv_permission_error() {
scene.ucmd().arg("-m777").arg(folder2).succeeds(); scene.ucmd().arg("-m777").arg(folder2).succeeds();
scene scene
.cmd_keepenv(util_name!()) .ccmd("mv")
.arg(folder2) .arg(folder2)
.arg(folder_to_move) .arg(folder_to_move)
.run() .fails()
.stderr_str() .stderr_contains("Permission denied");
.ends_with("Permission denied");
} }
// Todo: // Todo:
@ -756,5 +756,5 @@ fn test_mv_permission_error() {
// -r--r--r-- 1 user user 0 okt 25 11:21 b // -r--r--r-- 1 user user 0 okt 25 11:21 b
// $ // $
// $ mv -v a b // $ mv -v a b
// mv: try to overwrite b, overriding mode 0444 (r--r--r--)? y // mv: try to overwrite 'b', overriding mode 0444 (r--r--r--)? y
// a -> b // 'a' -> 'b'

View file

@ -106,7 +106,7 @@ fn expected_result(args: &[&str]) -> String {
#[allow(clippy::needless_borrow)] #[allow(clippy::needless_borrow)]
TestScenario::new(&util_name) TestScenario::new(&util_name)
.cmd_keepenv(util_name) .cmd_keepenv(util_name)
.env("LANGUAGE", "C") .env("LC_ALL", "C")
.args(args) .args(args)
.succeeds() .succeeds()
.stdout_move_str() .stdout_move_str()

View file

@ -22,6 +22,7 @@ fn file_last_modified_time(ucmd: &UCommand, path: &str) -> String {
} }
fn all_minutes(from: DateTime<Local>, to: DateTime<Local>) -> Vec<String> { fn all_minutes(from: DateTime<Local>, to: DateTime<Local>) -> Vec<String> {
let to = to + Duration::minutes(1);
const FORMAT: &str = "%b %d %H:%M %Y"; const FORMAT: &str = "%b %d %H:%M %Y";
let mut vec = vec![]; let mut vec = vec![];
let mut current = from; let mut current = from;

View file

@ -28,7 +28,8 @@ fn test_helper(file_name: &str, possible_args: &[&str]) {
fn test_buffer_sizes() { fn test_buffer_sizes() {
let buffer_sizes = ["0", "50K", "50k", "1M", "100M"]; let buffer_sizes = ["0", "50K", "50k", "1M", "100M"];
for buffer_size in &buffer_sizes { for buffer_size in &buffer_sizes {
new_ucmd!() TestScenario::new(util_name!())
.ucmd_keepenv()
.arg("-n") .arg("-n")
.arg("-S") .arg("-S")
.arg(buffer_size) .arg(buffer_size)
@ -40,7 +41,8 @@ fn test_buffer_sizes() {
{ {
let buffer_sizes = ["1000G", "10T"]; let buffer_sizes = ["1000G", "10T"];
for buffer_size in &buffer_sizes { for buffer_size in &buffer_sizes {
new_ucmd!() TestScenario::new(util_name!())
.ucmd_keepenv()
.arg("-n") .arg("-n")
.arg("-S") .arg("-S")
.arg(buffer_size) .arg(buffer_size)
@ -877,7 +879,8 @@ fn test_compress() {
#[test] #[test]
fn test_compress_fail() { fn test_compress_fail() {
new_ucmd!() TestScenario::new(util_name!())
.ucmd_keepenv()
.args(&[ .args(&[
"ext_sort.txt", "ext_sort.txt",
"-n", "-n",
@ -892,7 +895,8 @@ fn test_compress_fail() {
#[test] #[test]
fn test_merge_batches() { fn test_merge_batches() {
new_ucmd!() TestScenario::new(util_name!())
.ucmd_keepenv()
.args(&["ext_sort.txt", "-n", "-S", "150b"]) .args(&["ext_sort.txt", "-n", "-S", "150b"])
.succeeds() .succeeds()
.stdout_only_fixture("ext_sort.expected"); .stdout_only_fixture("ext_sort.expected");
@ -900,7 +904,8 @@ fn test_merge_batches() {
#[test] #[test]
fn test_merge_batch_size() { fn test_merge_batch_size() {
new_ucmd!() TestScenario::new(util_name!())
.ucmd_keepenv()
.arg("--batch-size=2") .arg("--batch-size=2")
.arg("-m") .arg("-m")
.arg("--unique") .arg("--unique")

View file

@ -309,7 +309,7 @@ fn test_split_lines_number() {
.args(&["--lines", "2fb", "file"]) .args(&["--lines", "2fb", "file"])
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only("split: invalid number of lines: 2fb"); .stderr_only("split: invalid number of lines: '2fb'");
} }
#[test] #[test]
@ -318,13 +318,13 @@ fn test_split_invalid_bytes_size() {
.args(&["-b", "1024R"]) .args(&["-b", "1024R"])
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only("split: invalid number of bytes: 1024R"); .stderr_only("split: invalid number of bytes: '1024R'");
#[cfg(not(target_pointer_width = "128"))] #[cfg(not(target_pointer_width = "128"))]
new_ucmd!() new_ucmd!()
.args(&["-b", "1Y"]) .args(&["-b", "1Y"])
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only("split: invalid number of bytes: 1Y: Value too large for defined data type"); .stderr_only("split: invalid number of bytes: '1Y': Value too large for defined data type");
#[cfg(target_pointer_width = "32")] #[cfg(target_pointer_width = "32")]
{ {
let sizes = ["1000G", "10T"]; let sizes = ["1000G", "10T"];
@ -334,7 +334,7 @@ fn test_split_invalid_bytes_size() {
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only(format!( .stderr_only(format!(
"split: invalid number of bytes: {}: Value too large for defined data type", "split: invalid number of bytes: '{}': Value too large for defined data type",
size size
)); ));
} }

View file

@ -317,7 +317,7 @@ fn expected_result(args: &[&str]) -> String {
#[allow(clippy::needless_borrow)] #[allow(clippy::needless_borrow)]
TestScenario::new(&util_name) TestScenario::new(&util_name)
.cmd_keepenv(util_name) .cmd_keepenv(util_name)
.env("LANGUAGE", "C") .env("LC_ALL", "C")
.args(args) .args(args)
.succeeds() .succeeds()
.stdout_move_str() .stdout_move_str()

View file

@ -63,12 +63,12 @@ fn test_stdbuf_invalid_mode_fails() {
.args(&[*option, "1024R", "head"]) .args(&[*option, "1024R", "head"])
.fails() .fails()
.code_is(125) .code_is(125)
.stderr_only("stdbuf: invalid mode 1024R"); .stderr_only("stdbuf: invalid mode '1024R'");
#[cfg(not(target_pointer_width = "128"))] #[cfg(not(target_pointer_width = "128"))]
new_ucmd!() new_ucmd!()
.args(&[*option, "1Y", "head"]) .args(&[*option, "1Y", "head"])
.fails() .fails()
.code_is(125) .code_is(125)
.stderr_contains("stdbuf: invalid mode 1Y: Value too large for defined data type"); .stderr_contains("stdbuf: invalid mode '1Y': Value too large for defined data type");
} }
} }

View file

@ -364,21 +364,21 @@ fn test_tail_invalid_num() {
new_ucmd!() new_ucmd!()
.args(&["-c", "1024R", "emptyfile.txt"]) .args(&["-c", "1024R", "emptyfile.txt"])
.fails() .fails()
.stderr_is("tail: invalid number of bytes: 1024R"); .stderr_is("tail: invalid number of bytes: '1024R'");
new_ucmd!() new_ucmd!()
.args(&["-n", "1024R", "emptyfile.txt"]) .args(&["-n", "1024R", "emptyfile.txt"])
.fails() .fails()
.stderr_is("tail: invalid number of lines: 1024R"); .stderr_is("tail: invalid number of lines: '1024R'");
#[cfg(not(target_pointer_width = "128"))] #[cfg(not(target_pointer_width = "128"))]
new_ucmd!() new_ucmd!()
.args(&["-c", "1Y", "emptyfile.txt"]) .args(&["-c", "1Y", "emptyfile.txt"])
.fails() .fails()
.stderr_is("tail: invalid number of bytes: 1Y: Value too large for defined data type"); .stderr_is("tail: invalid number of bytes: '1Y': Value too large for defined data type");
#[cfg(not(target_pointer_width = "128"))] #[cfg(not(target_pointer_width = "128"))]
new_ucmd!() new_ucmd!()
.args(&["-n", "1Y", "emptyfile.txt"]) .args(&["-n", "1Y", "emptyfile.txt"])
.fails() .fails()
.stderr_is("tail: invalid number of lines: 1Y: Value too large for defined data type"); .stderr_is("tail: invalid number of lines: '1Y': Value too large for defined data type");
#[cfg(target_pointer_width = "32")] #[cfg(target_pointer_width = "32")]
{ {
let sizes = ["1000G", "10T"]; let sizes = ["1000G", "10T"];
@ -388,7 +388,7 @@ fn test_tail_invalid_num() {
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only(format!( .stderr_only(format!(
"tail: invalid number of bytes: {}: Value too large for defined data type", "tail: invalid number of bytes: '{}': Value too large for defined data type",
size size
)); ));
} }

View file

@ -165,7 +165,7 @@ fn test_dangling_string_comparison_is_error() {
.args(&["missing_something", "="]) .args(&["missing_something", "="])
.run() .run()
.status_code(2) .status_code(2)
.stderr_is("test: missing argument after ="); .stderr_is("test: missing argument after '='");
} }
#[test] #[test]
@ -265,7 +265,7 @@ fn test_float_inequality_is_error() {
.args(&["123.45", "-ge", "6"]) .args(&["123.45", "-ge", "6"])
.run() .run()
.status_code(2) .status_code(2)
.stderr_is("test: invalid integer 123.45"); .stderr_is("test: invalid integer '123.45'");
} }
#[test] #[test]
@ -283,7 +283,7 @@ fn test_invalid_utf8_integer_compare() {
cmd.run() cmd.run()
.status_code(2) .status_code(2)
.stderr_is("test: invalid integer fo<EFBFBD>o"); .stderr_is("test: invalid integer 'fo<66>o'");
let mut cmd = new_ucmd!(); let mut cmd = new_ucmd!();
cmd.raw.arg(arg); cmd.raw.arg(arg);
@ -291,7 +291,7 @@ fn test_invalid_utf8_integer_compare() {
cmd.run() cmd.run()
.status_code(2) .status_code(2)
.stderr_is("test: invalid integer fo<EFBFBD>o"); .stderr_is("test: invalid integer 'fo<66>o'");
} }
#[test] #[test]
@ -674,7 +674,7 @@ fn test_erroneous_parenthesized_expression() {
.args(&["a", "!=", "(", "b", "-a", "b", ")", "!=", "c"]) .args(&["a", "!=", "(", "b", "-a", "b", ")", "!=", "c"])
.run() .run()
.status_code(2) .status_code(2)
.stderr_is("test: extra argument b"); .stderr_is("test: extra argument 'b'");
} }
#[test] #[test]
@ -690,3 +690,31 @@ fn test_or_as_filename() {
fn test_string_length_and_nothing() { fn test_string_length_and_nothing() {
new_ucmd!().args(&["-n", "a", "-a"]).run().status_code(2); new_ucmd!().args(&["-n", "a", "-a"]).run().status_code(2);
} }
#[test]
fn test_bracket_syntax_success() {
let scenario = TestScenario::new("[");
let mut ucmd = scenario.ucmd();
ucmd.args(&["1", "-eq", "1", "]"]).succeeds();
}
#[test]
fn test_bracket_syntax_failure() {
let scenario = TestScenario::new("[");
let mut ucmd = scenario.ucmd();
ucmd.args(&["1", "-eq", "2", "]"]).run().status_code(1);
}
#[test]
fn test_bracket_syntax_missing_right_bracket() {
let scenario = TestScenario::new("[");
let mut ucmd = scenario.ucmd();
// Missing closing bracket takes precedence over other possible errors.
ucmd.args(&["1", "-eq"])
.run()
.status_code(2)
.stderr_is("[: missing ']'");
}

View file

@ -249,7 +249,7 @@ fn test_size_and_reference() {
#[test] #[test]
fn test_error_filename_only() { fn test_error_filename_only() {
// truncate: you must specify either --size or --reference // truncate: you must specify either '--size' or '--reference'
new_ucmd!().args(&["file"]).fails().stderr_contains( new_ucmd!().args(&["file"]).fails().stderr_contains(
"error: The following required arguments were not provided: "error: The following required arguments were not provided:
--reference <RFILE> --reference <RFILE>
@ -262,15 +262,15 @@ fn test_invalid_numbers() {
new_ucmd!() new_ucmd!()
.args(&["-s", "0X", "file"]) .args(&["-s", "0X", "file"])
.fails() .fails()
.stderr_contains("Invalid number: 0X"); .stderr_contains("Invalid number: '0X'");
new_ucmd!() new_ucmd!()
.args(&["-s", "0XB", "file"]) .args(&["-s", "0XB", "file"])
.fails() .fails()
.stderr_contains("Invalid number: 0XB"); .stderr_contains("Invalid number: '0XB'");
new_ucmd!() new_ucmd!()
.args(&["-s", "0B", "file"]) .args(&["-s", "0B", "file"])
.fails() .fails()
.stderr_contains("Invalid number: 0B"); .stderr_contains("Invalid number: '0B'");
} }
#[test] #[test]
@ -299,13 +299,13 @@ fn test_truncate_bytes_size() {
.args(&["--size", "1024R", "file"]) .args(&["--size", "1024R", "file"])
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only("truncate: Invalid number: 1024R"); .stderr_only("truncate: Invalid number: '1024R'");
#[cfg(not(target_pointer_width = "128"))] #[cfg(not(target_pointer_width = "128"))]
new_ucmd!() new_ucmd!()
.args(&["--size", "1Y", "file"]) .args(&["--size", "1Y", "file"])
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only("truncate: Invalid number: 1Y: Value too large for defined data type"); .stderr_only("truncate: Invalid number: '1Y': Value too large for defined data type");
#[cfg(target_pointer_width = "32")] #[cfg(target_pointer_width = "32")]
{ {
let sizes = ["1000G", "10T"]; let sizes = ["1000G", "10T"];
@ -315,7 +315,7 @@ fn test_truncate_bytes_size() {
.fails() .fails()
.code_is(1) .code_is(1)
.stderr_only(format!( .stderr_only(format!(
"truncate: Invalid number: {}: Value too large for defined data type", "truncate: Invalid number: '{}': Value too large for defined data type",
size size
)); ));
} }

View file

@ -17,7 +17,7 @@ fn test_users_check_name() {
#[allow(clippy::needless_borrow)] #[allow(clippy::needless_borrow)]
let expected = TestScenario::new(&util_name) let expected = TestScenario::new(&util_name)
.cmd_keepenv(util_name) .cmd_keepenv(util_name)
.env("LANGUAGE", "C") .env("LC_ALL", "C")
.succeeds() .succeeds()
.stdout_move_str(); .stdout_move_str();

View file

@ -158,13 +158,12 @@ fn test_users() {
let mut v_actual: Vec<&str> = actual.split_whitespace().collect(); let mut v_actual: Vec<&str> = actual.split_whitespace().collect();
let mut v_expect: Vec<&str> = expect.split_whitespace().collect(); let mut v_expect: Vec<&str> = expect.split_whitespace().collect();
// TODO: `--users` differs from GNU's output on macOS // TODO: `--users` sometimes differs from GNU's output on macOS (race condition?)
// Diff < left / right > : // actual: "runner console Jun 23 06:37 00:34 196\n"
// <"runner console 2021-05-20 22:03 00:08 196\n" // expect: "runner console Jun 23 06:37 old 196\n"
// >"runner console 2021-05-20 22:03 old 196\n"
if cfg!(target_os = "macos") { if cfg!(target_os = "macos") {
v_actual.remove(4); v_actual.remove(5);
v_expect.remove(4); v_expect.remove(5);
} }
assert_eq!(v_actual, v_expect); assert_eq!(v_actual, v_expect);
@ -242,7 +241,7 @@ fn expected_result(args: &[&str]) -> String {
#[allow(clippy::needless_borrow)] #[allow(clippy::needless_borrow)]
TestScenario::new(&util_name) TestScenario::new(&util_name)
.cmd_keepenv(util_name) .cmd_keepenv(util_name)
.env("LANGUAGE", "C") .env("LC_ALL", "C")
.args(args) .args(args)
.succeeds() .succeeds()
.stdout_move_str() .stdout_move_str()

View file

@ -0,0 +1,44 @@
#!/bin/sh
# spell-checker:ignore (utils) gitsome jq ; (gh) repos
ME="${0}"
ME_dir="$(dirname -- "${ME}")"
ME_parent_dir="$(dirname -- "${ME_dir}")"
ME_parent_dir_abs="$(realpath -mP -- "${ME_parent_dir}")"
# ref: <https://stackoverflow.com/questions/57927115/anyone-know-a-way-to-delete-a-workflow-from-github-actions>
# note: requires `gh` and `jq`
## tools available?
# * `gh` available?
unset GH
gh --version 1>/dev/null 2>&1
if [ $? -eq 0 ]; then export GH="gh"; fi
# * `jq` available?
unset JQ
jq --version 1>/dev/null 2>&1
if [ $? -eq 0 ]; then export JQ="jq"; fi
if [ -z "${GH}" ] || [ -z "${JQ}" ]; then
if [ -z "${GH}" ]; then
echo 'ERR!: missing `gh` (see install instructions at <https://github.com/cli/cli>)' 1>&2
fi
if [ -z "${JQ}" ]; then
echo 'ERR!: missing `jq` (install with `sudo apt install jq`)' 1>&2
fi
exit 1
fi
dry_run=true
USER_NAME=uutils
REPO_NAME=coreutils
WORK_NAME=GNU
# * `--paginate` retrieves all pages
# gh api --paginate "repos/${USER_NAME}/${REPO_NAME}/actions/runs" | jq -r ".workflow_runs[] | select(.name == \"${WORK_NAME}\") | (.id)" | xargs -n1 sh -c "for arg do { echo gh api repos/${USER_NAME}/${REPO_NAME}/actions/runs/\${arg} -X DELETE ; if [ -z "$dry_run" ]; then gh api repos/${USER_NAME}/${REPO_NAME}/actions/runs/\${arg} -X DELETE ; fi ; } ; done ;" _
gh api "repos/${USER_NAME}/${REPO_NAME}/actions/runs" | jq -r ".workflow_runs[] | select(.name == \"${WORK_NAME}\") | (.id)" | xargs -n1 sh -c "for arg do { echo gh api repos/${USER_NAME}/${REPO_NAME}/actions/runs/\${arg} -X DELETE ; if [ -z "$dry_run" ]; then gh api repos/${USER_NAME}/${REPO_NAME}/actions/runs/\${arg} -X DELETE ; fi ; } ; done ;" _