1
Fork 0
mirror of https://github.com/RGBCube/uutils-coreutils synced 2025-07-28 11:37:44 +00:00

Merge branch 'main' into tail_notify

This commit is contained in:
Jan Scheer 2022-04-19 22:14:55 +02:00
commit eb21330ade
No known key found for this signature in database
GPG key ID: C62AD4C29E2B9828
745 changed files with 27822 additions and 14671 deletions

View file

@ -1,2 +1,11 @@
[target.x86_64-unknown-redox] [target.x86_64-unknown-redox]
linker = "x86_64-unknown-redox-gcc" linker = "x86_64-unknown-redox-gcc"
[target.'cfg(feature = "cargo-clippy")']
rustflags = [
"-Wclippy::use_self",
"-Wclippy::needless_pass_by_value",
"-Wclippy::semicolon_if_nothing_returned",
"-Wclippy::single_char_pattern",
"-Wclippy::explicit_iter_loop",
]

1
.clippy.toml Normal file
View file

@ -0,0 +1 @@
msrv = "1.56.0"

View file

@ -1,4 +1,4 @@
# EditorConfig (is awesome): http://EditorConfig.org # EditorConfig (is awesome!; ref: http://EditorConfig.org; v2022.02.11 [rivy])
# * top-most EditorConfig file # * top-most EditorConfig file
root = true root = true
@ -13,27 +13,49 @@ insert_final_newline = true
max_line_length = 100 max_line_length = 100
trim_trailing_whitespace = true trim_trailing_whitespace = true
[[Mm]akefile{,.*}, *.{mk,[Mm][Kk]}] [{[Mm]akefile{,.*},*.{mak,mk,[Mm][Aa][Kk],[Mm][Kk]},[Gg][Nn][Uu]makefile}]
# makefiles ~ TAB-style indentation # makefiles ~ TAB-style indentation
indent_style = tab indent_style = tab
[*.bash]
# `bash` shell scripts
indent_size = 4
indent_style = space
# * ref: <https://github.com/foxundermoon/vs-shell-format/blob/bc56a8e367b04bbf7d9947b767dc82516a6155b7/src/shFormat.ts>
# shell_variant = bash ## allow `shellcheck` to decide via script hash-bang/sha-bang line
switch_case_indent = true
[*.{bat,cmd,[Bb][Aa][Tt],[Cc][Mm][Dd]}] [*.{bat,cmd,[Bb][Aa][Tt],[Cc][Mm][Dd]}]
# BAT/CMD ~ DOS/Win requires BAT/CMD files to have CRLF EOLNs # BAT/CMD ~ DOS/Win requires BAT/CMD files to have CRLF EOLNs
end_of_line = crlf end_of_line = crlf
[*.{cjs,cjx,cts,ctx,js,jsx,mjs,mts,mtx,ts,tsx,json,jsonc}]
# js/ts/json ~ Prettier/XO-style == TAB indention + SPACE alignment
indent_size = 2
indent_style = tab
[*.go] [*.go]
# go ~ TAB-style indentation (SPACE-style alignment); ref: <https://blog.golang.org/gofmt>@@<https://archive.is/wip/9B6FC> # go ~ TAB-style indentation (SPACE-style alignment); ref: <https://blog.golang.org/gofmt>@@<https://archive.is/wip/9B6FC>
indent_style = tab indent_style = tab
[*.{cjs,js,json,mjs,ts}]
# js/ts
indent_size = 2
[*.{markdown,md,mkd,[Mm][Dd],[Mm][Kk][Dd],[Mm][Dd][Oo][Ww][Nn],[Mm][Kk][Dd][Oo][Ww][Nn],[Mm][Aa][Rr][Kk][Dd][Oo][Ww][Nn]}] [*.{markdown,md,mkd,[Mm][Dd],[Mm][Kk][Dd],[Mm][Dd][Oo][Ww][Nn],[Mm][Kk][Dd][Oo][Ww][Nn],[Mm][Aa][Rr][Kk][Dd][Oo][Ww][Nn]}]
# markdown # markdown
indent_size = 2 indent_size = 2
indent_style = space indent_style = space
[*.sh]
# POSIX shell scripts
indent_size = 4
indent_style = space
# * ref: <https://github.com/foxundermoon/vs-shell-format/blob/bc56a8e367b04bbf7d9947b767dc82516a6155b7/src/shFormat.ts>
# shell_variant = posix ## allow `shellcheck` to decide via script hash-bang/sha-bang line
switch_case_indent = true
[*.{sln,vc{,x}proj{,.*},[Ss][Ln][Nn],[Vv][Cc]{,[Xx]}[Pp][Rr][Oo][Jj]{,.*}}]
# MSVC sln/vcproj/vcxproj files, when used, will persistantly revert to CRLF EOLNs and eat final EOLs
end_of_line = crlf
insert_final_newline = false
[*.{yaml,yml,[Yy][Mm][Ll],[Yy][Aa][Mm][Ll]}] [*.{yaml,yml,[Yy][Mm][Ll],[Yy][Aa][Mm][Ll]}]
# YAML # YAML
indent_size = 2 indent_size = 2

7
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,7 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 5

View file

@ -1,11 +1,11 @@
name: CICD name: CICD
# spell-checker:ignore (acronyms) CICD MSVC musl # spell-checker:ignore (acronyms) CICD MSVC musl
# spell-checker:ignore (env/flags) Awarnings Ccodegen Coverflow Cpanic RUSTDOCFLAGS RUSTFLAGS Zpanic # spell-checker:ignore (env/flags) Awarnings Ccodegen Coverflow Cpanic Dwarnings RUSTDOCFLAGS RUSTFLAGS Zpanic
# spell-checker:ignore (jargon) SHAs deps softprops toolchain # spell-checker:ignore (jargon) SHAs deps dequote softprops subshell toolchain
# spell-checker:ignore (names) CodeCOV MacOS MinGW Peltoche rivy # spell-checker:ignore (names) CodeCOV MacOS MinGW Peltoche rivy
# spell-checker:ignore (shell/tools) choco clippy dmake dpkg esac fakeroot gmake grcov halium lcov libssl mkdir popd printf pushd rustc rustfmt rustup shopt xargs # spell-checker:ignore (shell/tools) choco clippy dmake dpkg esac fakeroot gmake grcov halium lcov libssl mkdir popd printf pushd rsync rustc rustfmt rustup shopt xargs
# spell-checker:ignore (misc) aarch alnum armhf bindir busytest coreutils gnueabihf issuecomment maint nullglob onexitbegin onexitend runtest tempfile testsuite uutils # spell-checker:ignore (misc) aarch alnum armhf bindir busytest coreutils gnueabihf issuecomment maint nullglob onexitbegin onexitend pell runtest tempfile testsuite uutils DESTDIR multisize Swatinem
# ToDO: [2021-06; rivy] change from `cargo-tree` to `cargo tree` once MSRV is >= 1.45 # ToDO: [2021-06; rivy] change from `cargo-tree` to `cargo tree` once MSRV is >= 1.45
@ -13,48 +13,88 @@ env:
PROJECT_NAME: coreutils PROJECT_NAME: coreutils
PROJECT_DESC: "Core universal (cross-platform) utilities" PROJECT_DESC: "Core universal (cross-platform) utilities"
PROJECT_AUTH: "uutils" PROJECT_AUTH: "uutils"
RUST_MIN_SRV: "1.47.0" ## MSRV v1.47.0 RUST_MIN_SRV: "1.56.0" ## MSRV v1.56.0
# * style job configuration
STYLE_FAIL_ON_FAULT: true ## (bool) fail the build if a style job contains a fault (error or warning); may be overridden on a per-job basis
on: [push, pull_request] on: [push, pull_request]
jobs: jobs:
code_deps: cargo-deny:
name: Style/dependencies name: Style/cargo-deny
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
style_deps:
## ToDO: [2021-11-10; rivy] 'Style/deps' needs more informative output and better integration of results into the GHA dashboard
name: Style/deps
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
job: job:
- { os: ubuntu-latest , features: feat_os_unix } # note: `cargo-udeps` panics when processing stdbuf/libstdbuf ("uu_stdbuf_libstdbuf"); either b/c of the 'cpp' crate or 'libstdbuf' itself
# ... b/c of the panic, a more limited feature set is tested (though only excluding `stdbuf`)
- { os: ubuntu-latest , features: "feat_Tier1,feat_require_unix,feat_require_unix_utmpx" }
- { os: macos-latest , features: "feat_Tier1,feat_require_unix,feat_require_unix_utmpx" }
- { os: windows-latest , features: feat_os_windows }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Initialize workflow variables - name: Initialize workflow variables
id: vars id: vars
shell: bash shell: bash
run: | run: |
## VARs setup ## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# failure mode
unset FAIL_ON_FAULT ; case '${{ env.STYLE_FAIL_ON_FAULT }}' in
''|0|f|false|n|no|off) FAULT_TYPE=warning ;;
*) FAIL_ON_FAULT=true ; FAULT_TYPE=error ;;
esac;
outputs FAIL_ON_FAULT FAULT_TYPE
# target-specific options # target-specific options
# * CARGO_FEATURES_OPTION # * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ; CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION outputs CARGO_FEATURES_OPTION
## note: requires 'nightly' toolchain b/c `cargo-udeps` uses the `rustc` '-Z save-analysis' option
## * ... ref: <https://github.com/est31/cargo-udeps/issues/73>
- name: Install `rust` toolchain - name: Install `rust` toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
toolchain: stable toolchain: nightly-2022-03-21
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal
- name: "`cargo update` testing" - name: Install `cargo-udeps`
uses: actions-rs/install@v0.1
with:
crate: cargo-udeps
version: latest
use-tool-cache: false
env:
RUSTUP_TOOLCHAIN: stable
- name: Detect unused dependencies
shell: bash shell: bash
run: | run: |
## `cargo update` testing ## Detect unused dependencies
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message> unset fault
cargo fetch --locked --quiet || { echo "::error file=Cargo.lock::'Cargo.lock' file requires update (use \`cargo +${{ env.RUST_MIN_SRV }} update\`)" ; exit 1 ; } fault_type="${{ steps.vars.outputs.FAULT_TYPE }}"
fault_prefix=$(echo "$fault_type" | tr '[:lower:]' '[:upper:]')
#
cargo +nightly-2022-03-21 udeps ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --all-targets &> udeps.log || cat udeps.log
grep --ignore-case "all deps seem to have been used" udeps.log || { printf "%s\n" "::${fault_type} ::${fault_prefix}: \`cargo udeps\`: style violation (unused dependency found)" ; fault=true ; }
if [ -n "${{ steps.vars.outputs.FAIL_ON_FAULT }}" ] && [ -n "$fault" ]; then exit 1 ; fi
code_format: style_format:
name: Style/format name: Style/format
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -62,12 +102,19 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix } - { os: ubuntu-latest , features: feat_os_unix }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Initialize workflow variables - name: Initialize workflow variables
id: vars id: vars
shell: bash shell: bash
run: | run: |
## VARs setup ## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# failure mode
unset FAIL_ON_FAULT ; case '${{ env.STYLE_FAIL_ON_FAULT }}' in
''|0|f|false|n|no|off) FAULT_TYPE=warning ;;
*) FAIL_ON_FAULT=true ; FAULT_TYPE=error ;;
esac;
outputs FAIL_ON_FAULT FAULT_TYPE
# target-specific options # target-specific options
# * CARGO_FEATURES_OPTION # * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ; CARGO_FEATURES_OPTION='' ;
@ -80,48 +127,172 @@ jobs:
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt components: rustfmt
- name: "`fmt` testing" - name: "`cargo fmt` testing"
shell: bash shell: bash
run: | run: |
## `fmt` testing ## `cargo fmt` testing
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message> unset fault
S=$(cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s\n" "$S" | sed -E -n -e "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::error file=\1,line=\2::ERROR: \`cargo fmt\`: style violation (file:'\1', line:\2; use \`cargo fmt \"\1\"\`)/p" ; exit 1 ; } fault_type="${{ steps.vars.outputs.FAULT_TYPE }}"
- name: "`fmt` testing of tests" fault_prefix=$(echo "$fault_type" | tr '[:lower:]' '[:upper:]')
# * convert any errors/warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s\n" "$S" | sed -E -n -e "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::${fault_type} file=\1,line=\2::${fault_prefix}: \`cargo fmt\`: style violation (file:'\1', line:\2; use \`cargo fmt -- \"\1\"\`)/p" ; fault=true ; }
if [ -n "${{ steps.vars.outputs.FAIL_ON_FAULT }}" ] && [ -n "$fault" ]; then exit 1 ; fi
- name: "`cargo fmt` testing of integration tests"
if: success() || failure() # run regardless of prior step success/failure if: success() || failure() # run regardless of prior step success/failure
shell: bash shell: bash
run: | run: |
## `fmt` testing of tests ## `cargo fmt` testing of integration tests
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message> unset fault
S=$(find tests -name "*.rs" -print0 | xargs -0 cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s\n" "$S" | sed -E -n "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::error file=\1,line=\2::ERROR: \`cargo fmt\`: style violation (file:'\1', line:\2; use \`cargo fmt \"\1\"\`)/p" ; exit 1 ; } fault_type="${{ steps.vars.outputs.FAULT_TYPE }}"
fault_prefix=$(echo "$fault_type" | tr '[:lower:]' '[:upper:]')
# 'tests' is the standard/usual integration test directory
if [ -d tests ]; then
# * convert any errors/warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(find tests -name "*.rs" -print0 | xargs -0 cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s\n" "$S" | sed -E -n "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::${fault_type} file=\1,line=\2::${fault_prefix}: \`cargo fmt\`: style violation (file:'\1', line:\2; use \`cargo fmt \"\1\"\`)/p" ; fault=true ; }
fi
if [ -n "${{ steps.vars.outputs.FAIL_ON_FAULT }}" ] && [ -n "$fault" ]; then exit 1 ; fi
code_lint: style_lint:
name: Style/lint name: Style/lint
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
job: job:
- { os: ubuntu-latest } - { os: ubuntu-latest , features: feat_os_unix }
- { os: macos-latest , features: feat_os_macos } - { os: macos-latest , features: feat_os_macos }
- { os: windows-latest , features: feat_os_windows } - { os: windows-latest , features: feat_os_windows }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Install/setup prerequisites - uses: Swatinem/rust-cache@v1
shell: bash
run: |
case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for show-utils.sh
esac
- name: Initialize workflow variables - name: Initialize workflow variables
id: vars id: vars
shell: bash shell: bash
run: | run: |
## VARs setup ## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# failure mode
unset FAIL_ON_FAULT ; case '${{ env.STYLE_FAIL_ON_FAULT }}' in
''|0|f|false|n|no|off) FAULT_TYPE=warning ;;
*) FAIL_ON_FAULT=true ; FAULT_TYPE=error ;;
esac;
outputs FAIL_ON_FAULT FAULT_TYPE
# target-specific options # target-specific options
# * CARGO_FEATURES_OPTION # * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='--all-features' ; CARGO_FEATURES_OPTION='--all-features' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features ${{ matrix.job.features }}' ; fi if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
# * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})"
echo UTILITY_LIST=${UTILITY_LIST}
CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo "-puu_${u}"; done;)"
outputs CARGO_UTILITY_LIST_OPTIONS
- name: Install/setup prerequisites
shell: bash
run: |
case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for show-utils.sh
esac
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
components: clippy
- name: "`cargo clippy` lint testing"
shell: bash
run: |
## `cargo clippy` lint testing
unset fault
fault_type="${{ steps.vars.outputs.FAULT_TYPE }}"
fault_prefix=$(echo "$fault_type" | tr '[:lower:]' '[:upper:]')
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(cargo clippy --all-targets ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} ${{ steps.vars.outputs.CARGO_UTILITY_LIST_OPTIONS }} -- -D warnings 2>&1) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n -e '/^error:/{' -e "N; s/^error:[[:space:]]+(.*)\\n[[:space:]]+-->[[:space:]]+(.*):([0-9]+):([0-9]+).*$/::${fault_type} file=\2,line=\3,col=\4::${fault_prefix}: \`cargo clippy\`: \1 (file:'\2', line:\3)/p;" -e '}' ; fault=true ; }
if [ -n "${{ steps.vars.outputs.FAIL_ON_FAULT }}" ] && [ -n "$fault" ]; then exit 1 ; fi
style_spellcheck:
name: Style/spelling
runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
strategy:
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Initialize workflow variables
id: vars
shell: bash
run: |
## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# failure mode
unset FAIL_ON_FAULT ; case '${{ env.STYLE_FAIL_ON_FAULT }}' in
''|0|f|false|n|no|off) FAULT_TYPE=warning ;;
*) FAIL_ON_FAULT=true ; FAULT_TYPE=error ;;
esac;
outputs FAIL_ON_FAULT FAULT_TYPE
- name: Install/setup prerequisites
shell: bash
run: |
## Install/setup prerequisites
# * pin installed cspell to v4.2.8 (cspell v5+ is broken for NodeJS < v12)
## maint: [2021-11-10; rivy] `cspell` version may be advanced to v5 when used with NodeJS >= v12
sudo apt-get -y update ; sudo apt-get -y install npm ; sudo npm install cspell@4.2.8 -g ;
- name: Run `cspell`
shell: bash
run: |
## Run `cspell`
unset fault
fault_type="${{ steps.vars.outputs.FAULT_TYPE }}"
fault_prefix=$(echo "$fault_type" | tr '[:lower:]' '[:upper:]')
# * find cspell configuration ; note: avoid quotes around ${cfg_file} b/c `cspell` (v4) doesn't correctly dequote the config argument (or perhaps a subshell expansion issue?)
cfg_files=($(shopt -s nullglob ; echo {.vscode,.}/{,.}c[sS]pell{.json,.config{.js,.cjs,.json,.yaml,.yml},.yaml,.yml} ;))
cfg_file=${cfg_files[0]}
unset CSPELL_CFG_OPTION ; if [ -n "$cfg_file" ]; then CSPELL_CFG_OPTION="--config $cfg_file" ; fi
# * `cspell`
## maint: [2021-11-10; rivy] the `--no-progress` option for `cspell` is a `cspell` v5+ option
# S=$(cspell ${CSPELL_CFG_OPTION} --no-summary --no-progress "**/*") && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n "s/${PWD//\//\\/}\/(.*):(.*):(.*) - (.*)/::${fault_type} file=\1,line=\2,col=\3::${fault_type^^}: \4 (file:'\1', line:\2)/p" ; fault=true ; true ; }
S=$(cspell ${CSPELL_CFG_OPTION} --no-summary "**/*") && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n "s/${PWD//\//\\/}\/(.*):(.*):(.*) - (.*)/::${fault_type} file=\1,line=\2,col=\3::${fault_type^^}: \4 (file:'\1', line:\2)/p" ; fault=true ; true ; }
if [ -n "${{ steps.vars.outputs.FAIL_ON_FAULT }}" ] && [ -n "$fault" ]; then exit 1 ; fi
doc_warnings:
name: Documentation/warnings
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
# for now, don't build it on mac & windows because the doc is only published from linux
# + it needs a bunch of duplication for build
# and I don't want to add a doc step in the regular build to avoid long builds
# - { os: macos-latest , features: feat_os_macos }
# - { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Initialize workflow variables
id: vars
shell: bash
run: |
## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# failure mode
unset FAIL_ON_FAULT ; case '${{ env.STYLE_FAIL_ON_FAULT }}' in
''|0|f|false|n|no|off) FAULT_TYPE=warning ;;
*) FAIL_ON_FAULT=true ; FAULT_TYPE=error ;;
esac;
outputs FAIL_ON_FAULT FAULT_TYPE
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='--all-features' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION outputs CARGO_FEATURES_OPTION
# * determine sub-crate utility list # * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})" UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})"
@ -131,39 +302,18 @@ jobs:
- name: Install `rust` toolchain - name: Install `rust` toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
toolchain: nightly toolchain: stable
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
components: clippy components: clippy
- name: "`clippy` lint testing" - name: "`cargo doc` with warnings"
shell: bash shell: bash
run: | run: |
## `clippy` lint testing RUSTDOCFLAGS="-Dwarnings" cargo doc ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-deps --workspace --document-private-items
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(cargo +nightly clippy --all-targets ${{ steps.vars.outputs.CARGO_UTILITY_LIST_OPTIONS }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings 2>&1) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n -e '/^error:/{' -e "N; s/^error:[[:space:]]+(.*)\\n[[:space:]]+-->[[:space:]]+${PWD//\//\\/}\/(.*):([0-9]+):([0-9]+).*$/::error file=\2,line=\3,col=\4::ERROR: \`cargo clippy\`: \1 (file:'\2', line:\3)/p;" -e '}' ; exit 1 ; }
code_spellcheck:
name: Style/spelling
runs-on: ${{ matrix.job.os }}
strategy:
matrix:
job:
- { os: ubuntu-latest }
steps:
- uses: actions/checkout@v2
- name: Install/setup prerequisites
shell: bash
run: |
## Install/setup prerequisites
sudo apt-get -y update ; sudo apt-get -y install npm ; sudo npm install cspell -g ;
- name: Run `cspell`
shell: bash
run: |
## Run `cspell`
cspell --config .vscode/cSpell.json --no-summary --no-progress "**/*" | sed -E -n "s/${PWD//\//\\/}\/(.*):(.*):(.*) - (.*)/::error file=\1,line=\2,col=\3::ERROR: \4 (file:'\1', line:\2)/p"
min_version: min_version:
name: MinRustV # Minimum supported rust version name: MinRustV # Minimum supported rust version (aka, MinSRV or MSRV)
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
strategy: strategy:
matrix: matrix:
@ -171,6 +321,18 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix } - { os: ubuntu-latest , features: feat_os_unix }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Initialize workflow variables
id: vars
shell: bash
run: |
## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# target-specific options
# * CARGO_FEATURES_OPTION
unset CARGO_FEATURES_OPTION
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }}) - name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
@ -191,6 +353,13 @@ jobs:
## Confirm MinSRV compatible 'Cargo.lock' ## Confirm MinSRV compatible 'Cargo.lock'
# * 'Cargo.lock' is required to be in a format that `cargo` of MinSRV can interpret (eg, v1-format for MinSRV < v1.38) # * 'Cargo.lock' is required to be in a format that `cargo` of MinSRV can interpret (eg, v1-format for MinSRV < v1.38)
cargo fetch --locked --quiet || { echo "::error file=Cargo.lock::Incompatible (or out-of-date) 'Cargo.lock' file; update using \`cargo +${{ env.RUST_MIN_SRV }} update\`" ; exit 1 ; } cargo fetch --locked --quiet || { echo "::error file=Cargo.lock::Incompatible (or out-of-date) 'Cargo.lock' file; update using \`cargo +${{ env.RUST_MIN_SRV }} update\`" ; exit 1 ; }
- name: Confirm MinSRV equivalence for '.clippy.toml'
shell: bash
run: |
## Confirm MinSRV equivalence for '.clippy.toml'
# * ensure '.clippy.toml' MSRV configuration setting is equal to ${{ env.RUST_MIN_SRV }}
CLIPPY_MSRV=$(grep -P "(?i)^\s*msrv\s*=\s*" .clippy.toml | grep -oP "\d+([.]\d+)+")
if [ "${CLIPPY_MSRV}" != "${{ env.RUST_MIN_SRV }}" ]; then { echo "::error file=.clippy.toml::Incorrect MSRV configuration for clippy (found '${CLIPPY_MSRV}'; should be '${{ env.RUST_MIN_SRV }}'); update '.clippy.toml' with 'msrv = \"${{ env.RUST_MIN_SRV }}\"'" ; exit 1 ; } ; fi
- name: Info - name: Info
shell: bash shell: bash
run: | run: |
@ -208,38 +377,59 @@ jobs:
cargo-tree tree -V cargo-tree tree -V
# dependencies # dependencies
echo "## dependency list" echo "## dependency list"
cargo fetch --locked --quiet
## * using the 'stable' toolchain is necessary to avoid "unexpected '--filter-platform'" errors ## * using the 'stable' toolchain is necessary to avoid "unexpected '--filter-platform'" errors
RUSTUP_TOOLCHAIN=stable cargo-tree tree --locked --all --no-dev-dependencies --no-indent --features ${{ matrix.job.features }} | grep -vE "$PWD" | sort --unique RUSTUP_TOOLCHAIN=stable cargo fetch --locked --quiet
RUSTUP_TOOLCHAIN=stable cargo-tree tree --all --locked --no-dev-dependencies --no-indent ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} | grep -vE "$PWD" | sort --unique
- name: Test - name: Test
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: --features "feat_os_unix" -p uucore -p coreutils args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -p uucore -p coreutils
env: env:
RUSTFLAGS: '-Awarnings' RUSTFLAGS: "-Awarnings"
build_makefile: deps:
name: Build/Makefile name: Dependencies
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
job: job:
- { os: ubuntu-latest } - { os: ubuntu-latest , features: feat_os_unix }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Install `rust` toolchain - name: Install `rust` toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
toolchain: stable toolchain: stable
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
- name: Install/setup prerequisites - name: "`cargo update` testing"
shell: bash shell: bash
run: | run: |
## Install/setup prerequisites ## `cargo update` testing
sudo apt-get -y update ; sudo apt-get -y install python3-sphinx ; # * convert any errors/warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
cargo fetch --locked --quiet || { echo "::error file=Cargo.lock::'Cargo.lock' file requires update (use \`cargo +${{ env.RUST_MIN_SRV }} update\`)" ; exit 1 ; }
build_makefile:
name: Build/Makefile
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: "`make build`" - name: "`make build`"
shell: bash shell: bash
run: | run: |
@ -249,42 +439,137 @@ jobs:
run: | run: |
make test make test
build:
name: Build build_rust_stable:
name: Build/stable
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
job: job:
# { os, target, cargo-options, features, use-cross, toolchain } - { os: ubuntu-latest , features: feat_os_unix }
- { os: ubuntu-latest , target: arm-unknown-linux-gnueabihf , features: feat_os_unix_gnueabihf , use-cross: use-cross } - { os: macos-latest , features: feat_os_macos }
- { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: Test
uses: actions-rs/cargo@v1
with:
command: test
args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
build_rust_nightly:
name: Build/nightly
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
- { os: macos-latest , features: feat_os_macos }
- { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2022-03-21
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: Test
uses: actions-rs/cargo@v1
with:
command: test
args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
compute_size:
name: Binary sizes
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Install dependencies
shell: bash
run: |
## Install dependencies
sudo apt-get update
sudo apt-get install jq
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal # minimal component installation (ie, no documentation)
- name: "`make install`"
shell: bash
run: |
make install DESTDIR=target/size-release/
make install MULTICALL=y DESTDIR=target/size-multi-release/
# strip the results
strip target/size*/usr/local/bin/*
- name: "Compute sizes"
shell: bash
run: |
SIZE=$(du -s target/size-release/usr/local/bin/|awk '{print $1}')
SIZEMULTI=$(du -s target/size-multi-release/usr/local/bin/|awk '{print $1}')
jq -n \
--arg date "$(date --rfc-email)" \
--arg sha "$GITHUB_SHA" \
--arg size "$SIZE" \
--arg multisize "$SIZEMULTI" \
'{($date): { sha: $sha, size: $size, multisize: $multisize, }}' > size-result.json
- uses: actions/upload-artifact@v2
with:
name: size-result
path: size-result.json
build:
name: Build
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
# { os , target , cargo-options , features , use-cross , toolchain }
- { os: ubuntu-latest , target: arm-unknown-linux-gnueabihf, features: feat_os_unix_gnueabihf, use-cross: use-cross, }
- { os: ubuntu-latest , target: aarch64-unknown-linux-gnu , features: feat_os_unix_gnueabihf , use-cross: use-cross } - { os: ubuntu-latest , target: aarch64-unknown-linux-gnu , features: feat_os_unix_gnueabihf , use-cross: use-cross }
- { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } - { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
# - { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_selinux , use-cross: use-cross } # - { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_selinux , use-cross: use-cross }
# - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only # - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only
# - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only # - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only
- { os: ubuntu-18.04 , target: i686-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } - { os: ubuntu-latest , target: i686-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
- { os: ubuntu-18.04 , target: i686-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross } - { os: ubuntu-latest , target: i686-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
- { os: ubuntu-18.04 , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } - { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
- { os: ubuntu-18.04 , target: x86_64-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross } - { os: ubuntu-latest , target: x86_64-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
# Commented until https://github.com/uutils/coreutils/issues/3210 is fixed
#- { os: ubuntu-18.04 , target: i686-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
#- { os: ubuntu-18.04 , target: i686-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
#- { os: ubuntu-18.04 , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
#- { os: ubuntu-18.04 , target: x86_64-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
- { os: macos-latest , target: x86_64-apple-darwin , features: feat_os_macos } - { os: macos-latest , target: x86_64-apple-darwin , features: feat_os_macos }
- { os: windows-latest , target: i686-pc-windows-gnu , features: feat_os_windows }
- { os: windows-latest , target: i686-pc-windows-msvc , features: feat_os_windows } - { os: windows-latest , target: i686-pc-windows-msvc , features: feat_os_windows }
- { os: windows-latest , target: x86_64-pc-windows-gnu , features: feat_os_windows } ## note: requires rust >= 1.43.0 to link correctly - { os: windows-latest , target: x86_64-pc-windows-gnu , features: feat_os_windows } ## note: requires rust >= 1.43.0 to link correctly
- { os: windows-latest , target: x86_64-pc-windows-msvc , features: feat_os_windows } - { os: windows-latest , target: x86_64-pc-windows-msvc , features: feat_os_windows }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Install/setup prerequisites - uses: Swatinem/rust-cache@v1
shell: bash
run: |
## Install/setup prerequisites
case '${{ matrix.job.target }}' in
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
esac
case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for testing
esac
- name: Initialize workflow variables - name: Initialize workflow variables
id: vars id: vars
shell: bash shell: bash
@ -353,7 +638,7 @@ jobs:
# target-specific options # target-specific options
# * CARGO_FEATURES_OPTION # * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ; CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features=${{ matrix.job.features }}' ; fi
outputs CARGO_FEATURES_OPTION outputs CARGO_FEATURES_OPTION
# * CARGO_USE_CROSS (truthy) # * CARGO_USE_CROSS (truthy)
CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac; CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac;
@ -380,14 +665,36 @@ jobs:
mkdir -p '${{ steps.vars.outputs.STAGING }}' mkdir -p '${{ steps.vars.outputs.STAGING }}'
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}' mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
mkdir -p '${{ steps.vars.outputs.STAGING }}/dpkg' mkdir -p '${{ steps.vars.outputs.STAGING }}/dpkg'
- name: Install/setup prerequisites
shell: bash
run: |
## Install/setup prerequisites
case '${{ matrix.job.target }}' in
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
esac
case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for testing
esac
case '${{ matrix.job.os }}' in
ubuntu-*)
# pinky is a tool to show logged-in users from utmp, and gecos fields from /etc/passwd.
# In GitHub Action *nix VMs, no accounts log in, even the "runner" account that runs the commands. The account also has empty gecos fields.
# To work around this for pinky tests, we create a fake login entry for the GH runner account...
FAKE_UTMP='[7] [999999] [tty2] [runner] [tty2] [] [0.0.0.0] [2022-02-22T22:22:22,222222+00:00]'
# ... by dumping the login records, adding our fake line, then reverse dumping ...
(utmpdump /var/run/utmp ; echo $FAKE_UTMP) | sudo utmpdump -r -o /var/run/utmp
# ... and add a full name to each account with a gecos field but no full name.
sudo sed -i 's/:,/:runner name,/' /etc/passwd
# We also create a couple optional files pinky looks for
touch /home/runner/.project
echo "foo" > /home/runner/.plan
;;
esac
- name: rust toolchain ~ install - name: rust toolchain ~ install
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
env:
# Override auto-detection of RAM for Rustc install.
# https://github.com/rust-lang/rustup/issues/2229#issuecomment-585855925
RUSTUP_UNPACK_RAM: "21474836480"
with: with:
toolchain: ${{ steps.vars.outputs.TOOLCHAIN }} toolchain: ${{ env.RUST_MIN_SRV }}
target: ${{ matrix.job.target }} target: ${{ matrix.job.target }}
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
@ -398,7 +705,7 @@ jobs:
## Dependent VARs setup ## Dependent VARs setup
outputs() { step_id="dep_vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="dep_vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# * determine sub-crate utility list # * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})" UTILITY_LIST="$(./util/show-utils.sh ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }})"
echo UTILITY_LIST=${UTILITY_LIST} echo UTILITY_LIST=${UTILITY_LIST}
CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo "-puu_${u}"; done;)" CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo "-puu_${u}"; done;)"
outputs CARGO_UTILITY_LIST_OPTIONS outputs CARGO_UTILITY_LIST_OPTIONS
@ -439,18 +746,21 @@ jobs:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }} use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: build command: build
args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
toolchain: ${{ env.RUST_MIN_SRV }}
- name: Test - name: Test
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }} use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: test command: test
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
toolchain: ${{ env.RUST_MIN_SRV }}
- name: Test individual utilities - name: Test individual utilities
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }} use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: test command: test
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} ${{ steps.dep_vars.outputs.CARGO_UTILITY_LIST_OPTIONS }} args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} ${{ steps.dep_vars.outputs.CARGO_UTILITY_LIST_OPTIONS }}
toolchain: ${{ env.RUST_MIN_SRV }}
- name: Archive executable artifacts - name: Archive executable artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -502,6 +812,7 @@ jobs:
test_busybox: test_busybox:
name: Tests/BusyBox test suite name: Tests/BusyBox test suite
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -510,16 +821,18 @@ jobs:
- { os: ubuntu-latest } - { os: ubuntu-latest }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Install/setup prerequisites
shell: bash
run: |
## Install/setup prerequisites
make prepare-busytest
- name: Install `rust` toolchain - name: Install `rust` toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
toolchain: stable toolchain: stable
default: true default: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
- name: Install/setup prerequisites
shell: bash
run: |
make prepare-busytest
- name: "Run BusyBox test suite" - name: "Run BusyBox test suite"
shell: bash shell: bash
run: | run: |
@ -532,71 +845,87 @@ jobs:
if [ $n_fails -gt 0 ] ; then echo "::warning ::${n_fails}+ test failures" ; fi if [ $n_fails -gt 0 ] ; then echo "::warning ::${n_fails}+ test failures" ; fi
test_freebsd: test_freebsd:
runs-on: macos-latest
name: Tests/FreeBSD test suite name: Tests/FreeBSD test suite
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: macos-10.15 , features: unix } ## GHA MacOS-11.0 VM won't have VirtualBox; refs: <https://github.com/actions/virtual-environments/issues/4060> , <https://github.com/actions/virtual-environments/pull/4010>
env: env:
mem: 2048 mem: 2048
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Prepare, build and test - name: Prepare, build and test
id: test ## spell-checker:ignore (ToDO) sshfs usesh vmactions
uses: vmactions/freebsd-vm@v0.1.5 uses: vmactions/freebsd-vm@v0.1.5
with: with:
usesh: true usesh: true
# sync: sshfs
prepare: pkg install -y curl gmake sudo prepare: pkg install -y curl gmake sudo
run: | run: |
# Need to be run in the same block. Otherwise, we are back on the mac host. ## Prepare, build, and test
# implementation modelled after ref: <https://github.com/rust-lang/rustup/pull/2783>
# * NOTE: All steps need to be run in this block, otherwise, we are operating back on the mac host
set -e set -e
pw adduser -n cuuser -d /root/ -g wheel -c "Coreutils user to build" -w random #
chown -R cuuser:wheel /root/ /Users/runner/work/coreutils/ TEST_USER=tester
REPO_NAME=${GITHUB_WORKSPACE##*/}
WORKSPACE_PARENT="/Users/runner/work/${REPO_NAME}"
WORKSPACE="${WORKSPACE_PARENT}/${REPO_NAME}"
#
pw adduser -n ${TEST_USER} -d /root/ -g wheel -c "Coreutils user to build" -w random
# chown -R ${TEST_USER}:wheel /root/ "${WORKSPACE_PARENT}"/
chown -R ${TEST_USER}:wheel /root/ "/Users/runner/work/${REPO_NAME}"/
whoami whoami
#
# Needs to be done in a sudo as we are changing users # Further work needs to be done in a sudo as we are changing users
sudo -i -u cuuser sh << EOF sudo -i -u ${TEST_USER} sh << EOF
set -e set -e
whoami whoami
curl https://sh.rustup.rs -sSf --output rustup.sh curl https://sh.rustup.rs -sSf --output rustup.sh
sh rustup.sh -y --profile=minimal sh rustup.sh -y --profile=minimal
. $HOME/.cargo/env
## Info ## Info
# environment # environment
echo "## environment" echo "## environment"
echo "CI='${CI}'" echo "CI='${CI}'"
# tooling info display echo "REPO_NAME='${REPO_NAME}'"
echo "## tooling" echo "TEST_USER='${TEST_USER}'"
. $HOME/.cargo/env echo "WORKSPACE_PARENT='${WORKSPACE_PARENT}'"
echo "WORKSPACE='${WORKSPACE}'"
env | sort
# tooling info
echo "## tooling info"
cargo -V cargo -V
rustc -V rustc -V
env #
cd "${WORKSPACE}"
# where the files are resynced unset FAULT
cd /Users/runner/work/coreutils/coreutils/ cargo build || FAULT=1
cargo build cargo test --features "${{ matrix.job.features }}" || FAULT=1
cargo test --features feat_os_unix -p uucore -p coreutils cargo test --features "${{ matrix.job.features }}" -p uucore || FAULT=1
# Clean to avoid to rsync back the files # Clean to avoid to rsync back the files
cargo clean cargo clean
if (test -n "$FAULT"); then exit 1 ; fi
EOF EOF
coverage: coverage:
name: Code Coverage name: Code Coverage
needs: build
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
# job: [ { os: ubuntu-latest }, { os: macos-latest }, { os: windows-latest } ]
job: job:
- { os: ubuntu-latest , features: unix } - { os: ubuntu-latest , features: unix }
- { os: macos-latest , features: macos } - { os: macos-latest , features: macos }
- { os: windows-latest , features: windows } - { os: windows-latest , features: windows }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Install/setup prerequisites - uses: Swatinem/rust-cache@v1
shell: bash
run: |
## Install/setup prerequisites
case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for testing
esac
# - name: Reattach HEAD ## may be needed for accurate code coverage info # - name: Reattach HEAD ## may be needed for accurate code coverage info
# run: git checkout ${{ github.head_ref }} # run: git checkout ${{ github.head_ref }}
- name: Initialize workflow variables - name: Initialize workflow variables
@ -606,7 +935,7 @@ jobs:
## VARs setup ## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# toolchain # toolchain
TOOLCHAIN="nightly" ## default to "nightly" toolchain (required for certain required unstable compiler flags) ## !maint: refactor when stable channel has needed support TOOLCHAIN="nightly-2022-03-21" ## default to "nightly" toolchain (required for certain required unstable compiler flags) ## !maint: refactor when stable channel has needed support
# * specify gnu-type TOOLCHAIN for windows; `grcov` requires gnu-style code coverage data files # * specify gnu-type TOOLCHAIN for windows; `grcov` requires gnu-style code coverage data files
case ${{ matrix.job.os }} in windows-*) TOOLCHAIN="$TOOLCHAIN-x86_64-pc-windows-gnu" ;; esac; case ${{ matrix.job.os }} in windows-*) TOOLCHAIN="$TOOLCHAIN-x86_64-pc-windows-gnu" ;; esac;
# * use requested TOOLCHAIN if specified # * use requested TOOLCHAIN if specified
@ -615,19 +944,36 @@ jobs:
# staging directory # staging directory
STAGING='_staging' STAGING='_staging'
outputs STAGING outputs STAGING
## # check for CODECOV_TOKEN availability (work-around for inaccessible 'secrets' object for 'if'; see <https://github.community/t5/GitHub-Actions/jobs-lt-job-id-gt-if-does-not-work-with-env-secrets/m-p/38549>)
## # note: CODECOV_TOKEN / HAS_CODECOV_TOKEN is not needed for public repositories when using AppVeyor, Azure Pipelines, CircleCI, GitHub Actions, Travis (see <https://docs.codecov.io/docs/about-the-codecov-bash-uploader#section-upload-token>)
## unset HAS_CODECOV_TOKEN
## if [ -n $CODECOV_TOKEN ]; then HAS_CODECOV_TOKEN='true' ; fi
## outputs HAS_CODECOV_TOKEN
# target-specific options # target-specific options
# * CARGO_FEATURES_OPTION # * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='--all-features' ; ## default to '--all-features' for code coverage CARGO_FEATURES_OPTION='--all-features' ; ## default to '--all-features' for code coverage
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features=${{ matrix.job.features }}' ; fi
outputs CARGO_FEATURES_OPTION outputs CARGO_FEATURES_OPTION
# * CODECOV_FLAGS # * CODECOV_FLAGS
CODECOV_FLAGS=$( echo "${{ matrix.job.os }}" | sed 's/[^[:alnum:]]/_/g' ) CODECOV_FLAGS=$( echo "${{ matrix.job.os }}" | sed 's/[^[:alnum:]]/_/g' )
outputs CODECOV_FLAGS outputs CODECOV_FLAGS
- name: Install/setup prerequisites
shell: bash
run: |
## Install/setup prerequisites
case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for testing
esac
case '${{ matrix.job.os }}' in
ubuntu-latest)
# pinky is a tool to show logged-in users from utmp, and gecos fields from /etc/passwd.
# In GitHub Action *nix VMs, no accounts log in, even the "runner" account that runs the commands. The account also has empty gecos fields.
# To work around this for pinky tests, we create a fake login entry for the GH runner account...
FAKE_UTMP='[7] [999999] [tty2] [runner] [tty2] [] [0.0.0.0] [2022-02-22T22:22:22,222222+00:00]'
# ... by dumping the login records, adding our fake line, then reverse dumping ...
(utmpdump /var/run/utmp ; echo $FAKE_UTMP) | sudo utmpdump -r -o /var/run/utmp
# ... and add a full name to each account with a gecos field but no full name.
sudo sed -i 's/:,/:runner name,/' /etc/passwd
# We also create a couple optional files pinky looks for
touch /home/runner/.project
echo "foo" > /home/runner/.plan
;;
esac
- name: rust toolchain ~ install - name: rust toolchain ~ install
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
@ -641,7 +987,7 @@ jobs:
## Dependent VARs setup ## Dependent VARs setup
outputs() { step_id="dep_vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; } outputs() { step_id="dep_vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# * determine sub-crate utility list # * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})" UTILITY_LIST="$(./util/show-utils.sh ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }})"
CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo "-puu_${u}"; done;)" CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo "-puu_${u}"; done;)"
outputs CARGO_UTILITY_LIST_OPTIONS outputs CARGO_UTILITY_LIST_OPTIONS
- name: Test uucore - name: Test uucore
@ -650,10 +996,10 @@ jobs:
command: test command: test
args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-fail-fast -p uucore args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-fail-fast -p uucore
env: env:
CARGO_INCREMENTAL: '0' CARGO_INCREMENTAL: "0"
RUSTC_WRAPPER: '' RUSTC_WRAPPER: ""
RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort' RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: '-Cpanic=abort' RUSTDOCFLAGS: "-Cpanic=abort"
# RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }} # RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }}
- name: Test - name: Test
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -661,10 +1007,10 @@ jobs:
command: test command: test
args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-fail-fast args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-fail-fast
env: env:
CARGO_INCREMENTAL: '0' CARGO_INCREMENTAL: "0"
RUSTC_WRAPPER: '' RUSTC_WRAPPER: ""
RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort' RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: '-Cpanic=abort' RUSTDOCFLAGS: "-Cpanic=abort"
# RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }} # RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }}
- name: Test individual utilities - name: Test individual utilities
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -672,10 +1018,10 @@ jobs:
command: test command: test
args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-fail-fast ${{ steps.dep_vars.outputs.CARGO_UTILITY_LIST_OPTIONS }} args: ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-fail-fast ${{ steps.dep_vars.outputs.CARGO_UTILITY_LIST_OPTIONS }}
env: env:
CARGO_INCREMENTAL: '0' CARGO_INCREMENTAL: "0"
RUSTC_WRAPPER: '' RUSTC_WRAPPER: ""
RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort' RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: '-Cpanic=abort' RUSTDOCFLAGS: "-Cpanic=abort"
# RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }} # RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }}
- name: "`grcov` ~ install" - name: "`grcov` ~ install"
uses: actions-rs/install@v0.1 uses: actions-rs/install@v0.1
@ -690,13 +1036,13 @@ jobs:
## Generate coverage data ## Generate coverage data
COVERAGE_REPORT_DIR="target/debug" COVERAGE_REPORT_DIR="target/debug"
COVERAGE_REPORT_FILE="${COVERAGE_REPORT_DIR}/lcov.info" COVERAGE_REPORT_FILE="${COVERAGE_REPORT_DIR}/lcov.info"
# GRCOV_IGNORE_OPTION='--ignore build.rs --ignore "/*" --ignore "[a-zA-Z]:/*"' ## `grcov` ignores these params when passed as an environment variable (why?) # GRCOV_IGNORE_OPTION='--ignore build.rs --ignore "vendor/*" --ignore "/*" --ignore "[a-zA-Z]:/*"' ## `grcov` ignores these params when passed as an environment variable (why?)
# GRCOV_EXCLUDE_OPTION='--excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()"' ## `grcov` ignores these params when passed as an environment variable (why?) # GRCOV_EXCLUDE_OPTION='--excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()"' ## `grcov` ignores these params when passed as an environment variable (why?)
mkdir -p "${COVERAGE_REPORT_DIR}" mkdir -p "${COVERAGE_REPORT_DIR}"
# display coverage files # display coverage files
grcov . --output-type files --ignore build.rs --ignore "/*" --ignore "[a-zA-Z]:/*" --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()" | sort --unique grcov . --output-type files --ignore build.rs --ignore "vendor/*" --ignore "/*" --ignore "[a-zA-Z]:/*" --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()" | sort --unique
# generate coverage report # generate coverage report
grcov . --output-type lcov --output-path "${COVERAGE_REPORT_FILE}" --branch --ignore build.rs --ignore "/*" --ignore "[a-zA-Z]:/*" --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()" grcov . --output-type lcov --output-path "${COVERAGE_REPORT_FILE}" --branch --ignore build.rs --ignore "vendor/*" --ignore "/*" --ignore "[a-zA-Z]:/*" --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()"
echo ::set-output name=report::${COVERAGE_REPORT_FILE} echo ::set-output name=report::${COVERAGE_REPORT_FILE}
- name: Upload coverage results (to Codecov.io) - name: Upload coverage results (to Codecov.io)
uses: codecov/codecov-action@v1 uses: codecov/codecov-action@v1
@ -708,35 +1054,3 @@ jobs:
flags: ${{ steps.vars.outputs.CODECOV_FLAGS }} flags: ${{ steps.vars.outputs.CODECOV_FLAGS }}
name: codecov-umbrella name: codecov-umbrella
fail_ci_if_error: false fail_ci_if_error: false
unused_deps:
name: Unused deps
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
- { os: macos-latest , features: feat_os_macos }
- { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v2
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
default: true
profile: minimal
- name: Install `cargo-udeps`
uses: actions-rs/install@v0.1
with:
crate: cargo-udeps
version: latest
use-tool-cache: true
env:
RUSTUP_TOOLCHAIN: stable
- name: Confirms there isn't any unused deps
shell: bash
run: |
cargo +nightly udeps --all-targets &> udeps.log || cat udeps.log
grep "seem to have been used" udeps.log

View file

@ -1,18 +1,20 @@
name: FixPR name: FixPR
# spell-checker:ignore Swatinem
# Trigger automated fixes for PRs being merged (with associated commits) # Trigger automated fixes for PRs being merged (with associated commits)
# ToDO: [2021-06; rivy] change from `cargo-tree` to `cargo tree` once MSRV is >= 1.45 # ToDO: [2021-06; rivy] change from `cargo-tree` to `cargo tree` once MSRV is >= 1.45
env: env:
BRANCH_TARGET: master BRANCH_TARGET: main
on: on:
# * only trigger on pull request closed to specific branches # * only trigger on pull request closed to specific branches
# ref: https://github.community/t/trigger-workflow-only-on-pull-request-merge/17359/9 # ref: https://github.community/t/trigger-workflow-only-on-pull-request-merge/17359/9
pull_request: pull_request:
branches: branches:
- master # == env.BRANCH_TARGET ## unfortunately, env context variables are only available in jobs/steps (see <https://github.community/t/how-to-use-env-context/16975/2>) - main # == env.BRANCH_TARGET ## unfortunately, env context variables are only available in jobs/steps (see <https://github.community/t/how-to-use-env-context/16975/2>)
types: [ closed ] types: [ closed ]
jobs: jobs:
@ -27,6 +29,7 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix } - { os: ubuntu-latest , features: feat_os_unix }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Initialize job variables - name: Initialize job variables
id: vars id: vars
shell: bash shell: bash
@ -98,6 +101,7 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix } - { os: ubuntu-latest , features: feat_os_unix }
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- name: Initialize job variables - name: Initialize job variables
id: vars id: vars
shell: bash shell: bash

View file

@ -1,6 +1,8 @@
name: GnuTests name: GnuTests
# spell-checker:ignore (names) gnulib ; (utils) autopoint gperf pyinotify texinfo ; (vars) XPASS # spell-checker:ignore (names) gnulib ; (jargon) submodules ; (people) Dawid Dziurla * dawidd ; (utils) autopoint chksum gperf pyinotify shopt texinfo ; (vars) FILESET SUBDIRS XPASS
# * note: to run a single test => `REPO/util/run-gnu-test.sh PATH/TO/TEST/SCRIPT`
on: [push, pull_request] on: [push, pull_request]
@ -9,23 +11,52 @@ jobs:
name: Run GNU tests name: Run GNU tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout code uutil - name: Initialize workflow variables
id: vars
shell: bash
run: |
## VARs setup
outputs() { step_id="vars"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
# * config
path_GNU="gnu"
path_GNU_tests="${path_GNU}/tests"
path_UUTILS="uutils"
path_reference="reference"
outputs path_GNU path_GNU_tests path_reference path_UUTILS
#
repo_default_branch="${{ github.event.repository.default_branch }}"
repo_GNU_ref="v9.0"
repo_reference_branch="${{ github.event.repository.default_branch }}"
outputs repo_default_branch repo_GNU_ref repo_reference_branch
#
SUITE_LOG_FILE="${path_GNU_tests}/test-suite.log"
TEST_LOGS_GLOB="${path_GNU_tests}/**/*.log" ## note: not usable at bash CLI; [why] double globstar not enabled by default b/c MacOS includes only bash v3 which doesn't have double globstar support
TEST_FILESET_PREFIX='test-fileset-IDs.sha1#'
TEST_FILESET_SUFFIX='.txt'
TEST_SUMMARY_FILE='gnu-result.json'
TEST_FULL_SUMMARY_FILE='gnu-full-result.json'
outputs SUITE_LOG_FILE TEST_FILESET_PREFIX TEST_FILESET_SUFFIX TEST_LOGS_GLOB TEST_SUMMARY_FILE TEST_FULL_SUMMARY_FILE
- name: Checkout code (uutil)
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
path: 'uutils' path: '${{ steps.vars.outputs.path_UUTILS }}'
- name: Checkout GNU coreutils - name: Checkout code (GNU coreutils)
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: 'coreutils/coreutils' repository: 'coreutils/coreutils'
path: 'gnu' path: '${{ steps.vars.outputs.path_GNU }}'
ref: v8.32 ref: ${{ steps.vars.outputs.repo_GNU_ref }}
- name: Checkout GNU coreutils library (gnulib) submodules: recursive
uses: actions/checkout@v2 - name: Retrieve reference artifacts
uses: dawidd6/action-download-artifact@v2
# ref: <https://github.com/dawidd6/action-download-artifact>
continue-on-error: true ## don't break the build for missing reference artifacts (may be expired or just not generated yet)
with: with:
repository: 'coreutils/gnulib' workflow: GnuTests.yml
path: 'gnulib' branch: "${{ steps.vars.outputs.repo_reference_branch }}"
ref: 8e99f24c0931a38880c6ee9b8287c7da80b0036b # workflow_conclusion: success ## (default); * but, if commit with failed GnuTests is merged into the default branch, future commits will all show regression errors in GnuTests CI until o/w fixed
fetch-depth: 0 # gnu gets upset if gnulib is a shallow checkout workflow_conclusion: completed ## continually recalibrates to last commit of default branch with a successful GnuTests (ie, "self-heals" from GnuTest regressions, but needs more supervision for/of regressions)
path: "${{ steps.vars.outputs.path_reference }}"
- name: Install `rust` toolchain - name: Install `rust` toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
@ -38,35 +69,58 @@ jobs:
run: | run: |
## Install dependencies ## Install dependencies
sudo apt-get update sudo apt-get update
sudo apt-get install autoconf autopoint bison texinfo gperf gcc g++ gdb python-pyinotify python3-sphinx jq sudo apt-get install autoconf autopoint bison texinfo gperf gcc g++ gdb python-pyinotify jq valgrind libexpect-perl
- name: Add various locales
shell: bash
run: |
echo "Before:"
locale -a
## Some tests fail with 'cannot change locale (en_US.ISO-8859-1): No such file or directory'
## Some others need a French locale
sudo locale-gen
sudo locale-gen fr_FR
sudo locale-gen fr_FR.UTF-8
sudo update-locale
echo "After:"
locale -a
- name: Build binaries - name: Build binaries
shell: bash shell: bash
run: | run: |
## Build binaries ## Build binaries
cd uutils cd '${{ steps.vars.outputs.path_UUTILS }}'
bash util/build-gnu.sh bash util/build-gnu.sh
- name: Run GNU tests - name: Run GNU tests
shell: bash shell: bash
run: | run: |
bash uutils/util/run-gnu-test.sh path_GNU='${{ steps.vars.outputs.path_GNU }}'
- name: Extract testing info path_UUTILS='${{ steps.vars.outputs.path_UUTILS }}'
bash "${path_UUTILS}/util/run-gnu-test.sh"
- name: Extract testing info into JSON
shell: bash
run : |
path_UUTILS='${{ steps.vars.outputs.path_UUTILS }}'
python ${path_UUTILS}/util/gnu-json-result.py ${{ steps.vars.outputs.path_GNU_tests }} > ${{ steps.vars.outputs.TEST_FULL_SUMMARY_FILE }}
- name: Extract/summarize testing info
id: summary
shell: bash shell: bash
run: | run: |
## Extract testing info ## Extract/summarize testing info
LOG_FILE=gnu/tests/test-suite.log outputs() { step_id="summary"; for var in "$@" ; do echo steps.${step_id}.outputs.${var}="${!var}"; echo ::set-output name=${var}::${!var}; done; }
if test -f "$LOG_FILE" #
SUITE_LOG_FILE='${{ steps.vars.outputs.SUITE_LOG_FILE }}'
if test -f "${SUITE_LOG_FILE}"
then then
TOTAL=$(sed -n "s/.*# TOTAL: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) TOTAL=$(sed -n "s/.*# TOTAL: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
PASS=$(sed -n "s/.*# PASS: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) PASS=$(sed -n "s/.*# PASS: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
SKIP=$(sed -n "s/.*# SKIP: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) SKIP=$(sed -n "s/.*# SKIP: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
FAIL=$(sed -n "s/.*# FAIL: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) FAIL=$(sed -n "s/.*# FAIL: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
XPASS=$(sed -n "s/.*# XPASS: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) XPASS=$(sed -n "s/.*# XPASS: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
ERROR=$(sed -n "s/.*# ERROR: \(.*\)/\1/p" "$LOG_FILE"|tr -d '\r'|head -n1) ERROR=$(sed -n "s/.*# ERROR: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
if [[ "$TOTAL" -eq 0 || "$TOTAL" -eq 1 ]]; then if [[ "$TOTAL" -eq 0 || "$TOTAL" -eq 1 ]]; then
echo "Error in the execution, failing early" echo "::error ::Failed to parse test results from '${SUITE_LOG_FILE}'; failing early"
exit 1 exit 1
fi fi
output="GNU tests summary = TOTAL: $TOTAL / PASS: $PASS / FAIL: $FAIL / ERROR: $ERROR" output="GNU tests summary = TOTAL: $TOTAL / PASS: $PASS / FAIL: $FAIL / ERROR: $ERROR / SKIP: $SKIP"
echo "${output}" echo "${output}"
if [[ "$FAIL" -gt 0 || "$ERROR" -gt 0 ]]; then echo "::warning ::${output}" ; fi if [[ "$FAIL" -gt 0 || "$ERROR" -gt 0 ]]; then echo "::warning ::${output}" ; fi
jq -n \ jq -n \
@ -78,54 +132,149 @@ jobs:
--arg fail "$FAIL" \ --arg fail "$FAIL" \
--arg xpass "$XPASS" \ --arg xpass "$XPASS" \
--arg error "$ERROR" \ --arg error "$ERROR" \
'{($date): { sha: $sha, total: $total, pass: $pass, skip: $skip, fail: $fail, xpass: $xpass, error: $error, }}' > gnu-result.json '{($date): { sha: $sha, total: $total, pass: $pass, skip: $skip, fail: $fail, xpass: $xpass, error: $error, }}' > '${{ steps.vars.outputs.TEST_SUMMARY_FILE }}'
HASH=$(sha1sum '${{ steps.vars.outputs.TEST_SUMMARY_FILE }}' | cut --delim=" " -f 1)
outputs HASH
else else
echo "::error ::Failed to get summary of test results" echo "::error ::Failed to find summary of test results (missing '${SUITE_LOG_FILE}'); failing early"
exit 1
fi fi
- uses: actions/upload-artifact@v2 # Compress logs before upload (fails otherwise)
gzip ${{ steps.vars.outputs.TEST_LOGS_GLOB }}
- name: Reserve SHA1/ID of 'test-summary'
uses: actions/upload-artifact@v2
with: with:
name: test-report name: "${{ steps.summary.outputs.HASH }}"
path: gnu/tests/**/*.log path: "${{ steps.vars.outputs.TEST_SUMMARY_FILE }}"
- uses: actions/upload-artifact@v2 - name: Reserve test results summary
uses: actions/upload-artifact@v2
with: with:
name: gnu-result name: test-summary
path: gnu-result.json path: "${{ steps.vars.outputs.TEST_SUMMARY_FILE }}"
- name: Download the result - name: Reserve test logs
uses: dawidd6/action-download-artifact@v2 uses: actions/upload-artifact@v2
with: with:
workflow: GnuTests.yml name: test-logs
name: gnu-result path: "${{ steps.vars.outputs.TEST_LOGS_GLOB }}"
repo: uutils/coreutils - name: Upload full json results
branch: master uses: actions/upload-artifact@v2
path: dl
- name: Download the log
uses: dawidd6/action-download-artifact@v2
with: with:
workflow: GnuTests.yml name: gnu-full-result.json
name: test-report path: ${{ steps.vars.outputs.TEST_FULL_SUMMARY_FILE }}
repo: uutils/coreutils - name: Compare test failures VS reference
branch: master
path: dl
- name: Compare failing tests against master
shell: bash shell: bash
run: | run: |
OLD_FAILING=$(sed -n "s/^FAIL: \([[:print:]]\+\).*/\1/p" dl/test-suite.log | sort) have_new_failures=""
NEW_FAILING=$(sed -n "s/^FAIL: \([[:print:]]\+\).*/\1/p" gnu/tests/test-suite.log | sort) REF_LOG_FILE='${{ steps.vars.outputs.path_reference }}/test-logs/test-suite.log'
for LINE in $OLD_FAILING REF_SUMMARY_FILE='${{ steps.vars.outputs.path_reference }}/test-summary/gnu-result.json'
do REPO_DEFAULT_BRANCH='${{ steps.vars.outputs.repo_default_branch }}'
if ! grep -Fxq $LINE<<<"$NEW_FAILING"; then if test -f "${REF_LOG_FILE}"; then
echo "::warning ::Congrats! The gnu test $LINE is now passing!" echo "Reference SHA1/ID: $(sha1sum -- "${REF_SUMMARY_FILE}")"
fi REF_FAILING=$(sed -n "s/^FAIL: \([[:print:]]\+\).*/\1/p" "${REF_LOG_FILE}" | sort)
done NEW_FAILING=$(sed -n "s/^FAIL: \([[:print:]]\+\).*/\1/p" '${{ steps.vars.outputs.path_GNU_tests }}/test-suite.log' | sort)
for LINE in $NEW_FAILING for LINE in ${REF_FAILING}
do do
if ! grep -Fxq $LINE<<<"$OLD_FAILING" if ! grep -Fxq ${LINE}<<<"${NEW_FAILING}"; then
then echo "::warning ::Congrats! The gnu test ${LINE} is no longer failing!"
echo "::error ::GNU test failed: $LINE. $LINE is passing on 'master'. Maybe you have to rebase?" fi
fi done
done for LINE in ${NEW_FAILING}
- name: Compare against master results do
if ! grep -Fxq ${LINE}<<<"${REF_FAILING}"
then
echo "::error ::GNU test failed: ${LINE}. ${LINE} is passing on '${{ steps.vars.outputs.repo_default_branch }}'. Maybe you have to rebase?"
have_new_failures="true"
fi
done
else
echo "::warning ::Skipping test failure comparison; no prior reference test logs are available."
fi
if test -n "${have_new_failures}" ; then exit -1 ; fi
- name: Compare test summary VS reference
if: success() || failure() # run regardless of prior step success/failure
shell: bash shell: bash
run: | run: |
mv dl/gnu-result.json master-gnu-result.json REF_SUMMARY_FILE='${{ steps.vars.outputs.path_reference }}/test-summary/gnu-result.json'
python uutils/util/compare_gnu_result.py if test -f "${REF_SUMMARY_FILE}"; then
echo "Reference SHA1/ID: $(sha1sum -- "${REF_SUMMARY_FILE}")"
mv "${REF_SUMMARY_FILE}" main-gnu-result.json
python uutils/util/compare_gnu_result.py
else
echo "::warning ::Skipping test summary comparison; no prior reference summary is available."
fi
gnu_coverage:
name: Run GNU tests with coverage
runs-on: ubuntu-latest
steps:
- name: Checkout code uutil
uses: actions/checkout@v2
with:
path: 'uutils'
- name: Checkout GNU coreutils
uses: actions/checkout@v2
with:
repository: 'coreutils/coreutils'
path: 'gnu'
ref: 'v9.0'
submodules: recursive
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2022-03-21
default: true
profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt
- name: Install dependencies
run: |
sudo apt update
sudo apt install autoconf autopoint bison texinfo gperf gcc g++ gdb python-pyinotify jq valgrind libexpect-perl -y
- name: Add various locales
run: |
echo "Before:"
locale -a
## Some tests fail with 'cannot change locale (en_US.ISO-8859-1): No such file or directory'
## Some others need a French locale
sudo locale-gen
sudo locale-gen fr_FR
sudo locale-gen fr_FR.UTF-8
sudo update-locale
echo "After:"
locale -a
- name: Build binaries
env:
CARGO_INCREMENTAL: "0"
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: "-Cpanic=abort"
run: |
cd uutils
UU_MAKE_PROFILE=debug bash util/build-gnu.sh
- name: Run GNU tests
run: bash uutils/util/run-gnu-test.sh
- name: "`grcov` ~ install"
uses: actions-rs/install@v0.1
with:
crate: grcov
version: latest
use-tool-cache: false
- name: Generate coverage data (via `grcov`)
id: coverage
run: |
## Generate coverage data
cd uutils
COVERAGE_REPORT_DIR="target/debug"
COVERAGE_REPORT_FILE="${COVERAGE_REPORT_DIR}/lcov.info"
mkdir -p "${COVERAGE_REPORT_DIR}"
sudo chown -R "$(whoami)" "${COVERAGE_REPORT_DIR}"
# display coverage files
grcov . --output-type files --ignore build.rs --ignore "vendor/*" --ignore "/*" --ignore "[a-zA-Z]:/*" --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()" | sort --unique
# generate coverage report
grcov . --output-type lcov --output-path "${COVERAGE_REPORT_FILE}" --branch --ignore build.rs --ignore "vendor/*" --ignore "/*" --ignore "[a-zA-Z]:/*" --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?!|#\[derive\()"
echo ::set-output name=report::${COVERAGE_REPORT_FILE}
- name: Upload coverage results (to Codecov.io)
uses: codecov/codecov-action@v2
with:
file: ${{ steps.coverage.outputs.report }}
flags: gnutests
name: gnutests
working-directory: uutils

1
.rustfmt.toml Normal file
View file

@ -0,0 +1 @@
# * using all default `cargo fmt`/`rustfmt` options

View file

@ -1,4 +1,4 @@
#!/bin/bash #!/bin/sh
rustup target add x86_64-unknown-redox rustup target add x86_64-unknown-redox
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys AA12E97F0881517F sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys AA12E97F0881517F

2
.vscode/.gitattributes vendored Normal file
View file

@ -0,0 +1,2 @@
# Configure GitHub to not mark comments in configuration files as errors
*.json linguist-language=jsonc

26
.vscode/cSpell.json vendored
View file

@ -1,7 +1,12 @@
// `cspell` settings // `cspell` settings
{ {
"version": "0.1", // Version of the setting file. Always 0.1 // version of the setting file
"language": "en", // language - current active spelling language "version": "0.2",
// spelling language
"language": "en",
// custom dictionaries
"dictionaries": ["acronyms+names", "jargon", "people", "shell", "workspace"], "dictionaries": ["acronyms+names", "jargon", "people", "shell", "workspace"],
"dictionaryDefinitions": [ "dictionaryDefinitions": [
{ "name": "acronyms+names", "path": "./cspell.dictionaries/acronyms+names.wordlist.txt" }, { "name": "acronyms+names", "path": "./cspell.dictionaries/acronyms+names.wordlist.txt" },
@ -10,10 +15,19 @@
{ "name": "shell", "path": "./cspell.dictionaries/shell.wordlist.txt" }, { "name": "shell", "path": "./cspell.dictionaries/shell.wordlist.txt" },
{ "name": "workspace", "path": "./cspell.dictionaries/workspace.wordlist.txt" } { "name": "workspace", "path": "./cspell.dictionaries/workspace.wordlist.txt" }
], ],
// ignorePaths - a list of globs to specify which files are to be ignored
"ignorePaths": ["Cargo.lock", "target/**", "tests/**/fixtures/**", "src/uu/dd/test-resources/**"], // files to ignore (globs supported)
// ignoreWords - a list of words to be ignored (even if they are in the flagWords) "ignorePaths": [
"Cargo.lock",
"target/**",
"tests/**/fixtures/**",
"src/uu/dd/test-resources/**",
"vendor/**"
],
// words to ignore (even if they are in the flagWords)
"ignoreWords": [], "ignoreWords": [],
// words - list of words to be always considered correct
// words to always consider correct
"words": [] "words": []
} }

View file

@ -35,6 +35,7 @@ WASM
XFS XFS
aarch aarch
flac flac
impls
lzma lzma
# * names # * names
@ -47,6 +48,7 @@ EditorConfig
FreeBSD FreeBSD
Gmail Gmail
GNU GNU
Illumos
Irix Irix
MS-DOS MS-DOS
MSDOS MSDOS

View file

@ -11,6 +11,7 @@ canonicalize
canonicalizing canonicalizing
codepoint codepoint
codepoints codepoints
codegen
colorizable colorizable
colorize colorize
coprime coprime
@ -28,6 +29,7 @@ devs
discoverability discoverability
duplicative duplicative
dsync dsync
endianness
enqueue enqueue
errored errored
executable executable
@ -36,6 +38,8 @@ exponentiate
eval eval
falsey falsey
fileio fileio
filesystem
filesystems
flamegraph flamegraph
fullblock fullblock
getfacl getfacl
@ -59,6 +63,7 @@ kibibytes
libacl libacl
lcase lcase
lossily lossily
lstat
mebi mebi
mebibytes mebibytes
mergeable mergeable

View file

@ -25,6 +25,7 @@ getrandom
globset globset
itertools itertools
lscolors lscolors
mdbook
memchr memchr
multifilereader multifilereader
onig onig
@ -43,6 +44,7 @@ termsize
termwidth termwidth
textwrap textwrap
thiserror thiserror
ureq
walkdir walkdir
winapi winapi
xattr xattr
@ -182,6 +184,7 @@ getgrgid
getgrnam getgrnam
getgrouplist getgrouplist
getgroups getgroups
getpwent
getpwnam getpwnam
getpwuid getpwuid
getuid getuid
@ -321,6 +324,7 @@ ucommand
utmpx utmpx
uucore uucore
uucore_procs uucore_procs
uudoc
uumain uumain
uutil uutil
uutils uutils

View file

@ -1,12 +1,13 @@
// spell-checker:ignore (misc) matklad
// see <http://go.microsoft.com/fwlink/?LinkId=827846> for the documentation about the extensions.json format
// *
// "foxundermoon.shell-format" ~ shell script formatting ; note: ENABLE "Use EditorConfig"
// "matklad.rust-analyzer" ~ `rust` language support
// "streetsidesoftware.code-spell-checker" ~ `cspell` spell-checker support
{ {
// spell-checker:ignore (misc) matklad "recommendations": [
// see <http://go.microsoft.com/fwlink/?LinkId=827846> for the documentation about the extensions.json format "matklad.rust-analyzer",
"recommendations": [ "streetsidesoftware.code-spell-checker",
// Rust language support. "foxundermoon.shell-format"
"rust-lang.rust", ]
// Provides support for rust-analyzer: novel LSP server for the Rust programming language.
"matklad.rust-analyzer",
// `cspell` spell-checker support
"streetsidesoftware.code-spell-checker"
]
} }

1
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1 @@
{ "cSpell.import": [".vscode/cspell.json"] }

View file

@ -1,6 +1,6 @@
# Contributing to coreutils # Contributing to coreutils
Contributions are very welcome, and should target Rust's master branch until the Contributions are very welcome, and should target Rust's main branch until the
standard libraries are stabilized. You may *claim* an item on the to-do list by standard libraries are stabilized. You may *claim* an item on the to-do list by
following these steps: following these steps:
@ -94,6 +94,16 @@ uutils: add new utility
gitignore: add temporary files gitignore: add temporary files
``` ```
## cargo-deny
This project uses [cargo-deny](https://github.com/EmbarkStudios/cargo-deny/) to
detect duplicate dependencies, checks licenses, etc. To run it locally, first
install it and then run with:
```
cargo deny --all-features check all
```
## Licensing ## Licensing
uutils is distributed under the terms of the MIT License; see the `LICENSE` file uutils is distributed under the terms of the MIT License; see the `LICENSE` file

1928
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -5,10 +5,11 @@
[package] [package]
name = "coreutils" name = "coreutils"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "coreutils ~ GNU coreutils (updated); implemented as universal (cross-platform) utils, written in Rust" description = "coreutils ~ GNU coreutils (updated); implemented as universal (cross-platform) utils, written in Rust"
default-run = "coreutils"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils" repository = "https://github.com/uutils/coreutils"
@ -244,113 +245,117 @@ test = [ "uu_test" ]
[workspace] [workspace]
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
clap_complete = "3.1"
phf = "0.10.1"
lazy_static = { version="1.3" } lazy_static = { version="1.3" }
textwrap = { version="0.14", features=["terminal_size"] } textwrap = { version="0.15", features=["terminal_size"] }
uucore = { version=">=0.0.10", package="uucore", path="src/uucore" } uucore = { version=">=0.0.11", package="uucore", path="src/uucore" }
selinux = { version="0.2.3", optional = true } selinux = { version="0.2", optional = true }
ureq = "2.4.0"
zip = { version = "0.5.13", default_features=false, features=["deflate"] }
# * uutils # * uutils
uu_test = { optional=true, version="0.0.8", package="uu_test", path="src/uu/test" } uu_test = { optional=true, version="0.0.13", package="uu_test", path="src/uu/test" }
# #
arch = { optional=true, version="0.0.8", package="uu_arch", path="src/uu/arch" } arch = { optional=true, version="0.0.13", package="uu_arch", path="src/uu/arch" }
base32 = { optional=true, version="0.0.8", package="uu_base32", path="src/uu/base32" } base32 = { optional=true, version="0.0.13", package="uu_base32", path="src/uu/base32" }
base64 = { optional=true, version="0.0.8", package="uu_base64", path="src/uu/base64" } base64 = { optional=true, version="0.0.13", package="uu_base64", path="src/uu/base64" }
basename = { optional=true, version="0.0.8", package="uu_basename", path="src/uu/basename" } basename = { optional=true, version="0.0.13", package="uu_basename", path="src/uu/basename" }
basenc = { optional=true, version="0.0.8", package="uu_basenc", path="src/uu/basenc" } basenc = { optional=true, version="0.0.13", package="uu_basenc", path="src/uu/basenc" }
cat = { optional=true, version="0.0.8", package="uu_cat", path="src/uu/cat" } cat = { optional=true, version="0.0.13", package="uu_cat", path="src/uu/cat" }
chcon = { optional=true, version="0.0.8", package="uu_chcon", path="src/uu/chcon" } chcon = { optional=true, version="0.0.13", package="uu_chcon", path="src/uu/chcon" }
chgrp = { optional=true, version="0.0.8", package="uu_chgrp", path="src/uu/chgrp" } chgrp = { optional=true, version="0.0.13", package="uu_chgrp", path="src/uu/chgrp" }
chmod = { optional=true, version="0.0.8", package="uu_chmod", path="src/uu/chmod" } chmod = { optional=true, version="0.0.13", package="uu_chmod", path="src/uu/chmod" }
chown = { optional=true, version="0.0.8", package="uu_chown", path="src/uu/chown" } chown = { optional=true, version="0.0.13", package="uu_chown", path="src/uu/chown" }
chroot = { optional=true, version="0.0.8", package="uu_chroot", path="src/uu/chroot" } chroot = { optional=true, version="0.0.13", package="uu_chroot", path="src/uu/chroot" }
cksum = { optional=true, version="0.0.8", package="uu_cksum", path="src/uu/cksum" } cksum = { optional=true, version="0.0.13", package="uu_cksum", path="src/uu/cksum" }
comm = { optional=true, version="0.0.8", package="uu_comm", path="src/uu/comm" } comm = { optional=true, version="0.0.13", package="uu_comm", path="src/uu/comm" }
cp = { optional=true, version="0.0.8", package="uu_cp", path="src/uu/cp" } cp = { optional=true, version="0.0.13", package="uu_cp", path="src/uu/cp" }
csplit = { optional=true, version="0.0.8", package="uu_csplit", path="src/uu/csplit" } csplit = { optional=true, version="0.0.13", package="uu_csplit", path="src/uu/csplit" }
cut = { optional=true, version="0.0.8", package="uu_cut", path="src/uu/cut" } cut = { optional=true, version="0.0.13", package="uu_cut", path="src/uu/cut" }
date = { optional=true, version="0.0.8", package="uu_date", path="src/uu/date" } date = { optional=true, version="0.0.13", package="uu_date", path="src/uu/date" }
dd = { optional=true, version="0.0.8", package="uu_dd", path="src/uu/dd" } dd = { optional=true, version="0.0.13", package="uu_dd", path="src/uu/dd" }
df = { optional=true, version="0.0.8", package="uu_df", path="src/uu/df" } df = { optional=true, version="0.0.13", package="uu_df", path="src/uu/df" }
dircolors= { optional=true, version="0.0.8", package="uu_dircolors", path="src/uu/dircolors" } dircolors= { optional=true, version="0.0.13", package="uu_dircolors", path="src/uu/dircolors" }
dirname = { optional=true, version="0.0.8", package="uu_dirname", path="src/uu/dirname" } dirname = { optional=true, version="0.0.13", package="uu_dirname", path="src/uu/dirname" }
du = { optional=true, version="0.0.8", package="uu_du", path="src/uu/du" } du = { optional=true, version="0.0.13", package="uu_du", path="src/uu/du" }
echo = { optional=true, version="0.0.8", package="uu_echo", path="src/uu/echo" } echo = { optional=true, version="0.0.13", package="uu_echo", path="src/uu/echo" }
env = { optional=true, version="0.0.8", package="uu_env", path="src/uu/env" } env = { optional=true, version="0.0.13", package="uu_env", path="src/uu/env" }
expand = { optional=true, version="0.0.8", package="uu_expand", path="src/uu/expand" } expand = { optional=true, version="0.0.13", package="uu_expand", path="src/uu/expand" }
expr = { optional=true, version="0.0.8", package="uu_expr", path="src/uu/expr" } expr = { optional=true, version="0.0.13", package="uu_expr", path="src/uu/expr" }
factor = { optional=true, version="0.0.8", package="uu_factor", path="src/uu/factor" } factor = { optional=true, version="0.0.13", package="uu_factor", path="src/uu/factor" }
false = { optional=true, version="0.0.8", package="uu_false", path="src/uu/false" } false = { optional=true, version="0.0.13", package="uu_false", path="src/uu/false" }
fmt = { optional=true, version="0.0.8", package="uu_fmt", path="src/uu/fmt" } fmt = { optional=true, version="0.0.13", package="uu_fmt", path="src/uu/fmt" }
fold = { optional=true, version="0.0.8", package="uu_fold", path="src/uu/fold" } fold = { optional=true, version="0.0.13", package="uu_fold", path="src/uu/fold" }
groups = { optional=true, version="0.0.8", package="uu_groups", path="src/uu/groups" } groups = { optional=true, version="0.0.13", package="uu_groups", path="src/uu/groups" }
hashsum = { optional=true, version="0.0.8", package="uu_hashsum", path="src/uu/hashsum" } hashsum = { optional=true, version="0.0.13", package="uu_hashsum", path="src/uu/hashsum" }
head = { optional=true, version="0.0.8", package="uu_head", path="src/uu/head" } head = { optional=true, version="0.0.13", package="uu_head", path="src/uu/head" }
hostid = { optional=true, version="0.0.8", package="uu_hostid", path="src/uu/hostid" } hostid = { optional=true, version="0.0.13", package="uu_hostid", path="src/uu/hostid" }
hostname = { optional=true, version="0.0.8", package="uu_hostname", path="src/uu/hostname" } hostname = { optional=true, version="0.0.13", package="uu_hostname", path="src/uu/hostname" }
id = { optional=true, version="0.0.8", package="uu_id", path="src/uu/id" } id = { optional=true, version="0.0.13", package="uu_id", path="src/uu/id" }
install = { optional=true, version="0.0.8", package="uu_install", path="src/uu/install" } install = { optional=true, version="0.0.13", package="uu_install", path="src/uu/install" }
join = { optional=true, version="0.0.8", package="uu_join", path="src/uu/join" } join = { optional=true, version="0.0.13", package="uu_join", path="src/uu/join" }
kill = { optional=true, version="0.0.8", package="uu_kill", path="src/uu/kill" } kill = { optional=true, version="0.0.13", package="uu_kill", path="src/uu/kill" }
link = { optional=true, version="0.0.8", package="uu_link", path="src/uu/link" } link = { optional=true, version="0.0.13", package="uu_link", path="src/uu/link" }
ln = { optional=true, version="0.0.8", package="uu_ln", path="src/uu/ln" } ln = { optional=true, version="0.0.13", package="uu_ln", path="src/uu/ln" }
ls = { optional=true, version="0.0.8", package="uu_ls", path="src/uu/ls" } ls = { optional=true, version="0.0.13", package="uu_ls", path="src/uu/ls" }
logname = { optional=true, version="0.0.8", package="uu_logname", path="src/uu/logname" } logname = { optional=true, version="0.0.13", package="uu_logname", path="src/uu/logname" }
mkdir = { optional=true, version="0.0.8", package="uu_mkdir", path="src/uu/mkdir" } mkdir = { optional=true, version="0.0.13", package="uu_mkdir", path="src/uu/mkdir" }
mkfifo = { optional=true, version="0.0.8", package="uu_mkfifo", path="src/uu/mkfifo" } mkfifo = { optional=true, version="0.0.13", package="uu_mkfifo", path="src/uu/mkfifo" }
mknod = { optional=true, version="0.0.8", package="uu_mknod", path="src/uu/mknod" } mknod = { optional=true, version="0.0.13", package="uu_mknod", path="src/uu/mknod" }
mktemp = { optional=true, version="0.0.8", package="uu_mktemp", path="src/uu/mktemp" } mktemp = { optional=true, version="0.0.13", package="uu_mktemp", path="src/uu/mktemp" }
more = { optional=true, version="0.0.8", package="uu_more", path="src/uu/more" } more = { optional=true, version="0.0.13", package="uu_more", path="src/uu/more" }
mv = { optional=true, version="0.0.8", package="uu_mv", path="src/uu/mv" } mv = { optional=true, version="0.0.13", package="uu_mv", path="src/uu/mv" }
nice = { optional=true, version="0.0.8", package="uu_nice", path="src/uu/nice" } nice = { optional=true, version="0.0.13", package="uu_nice", path="src/uu/nice" }
nl = { optional=true, version="0.0.8", package="uu_nl", path="src/uu/nl" } nl = { optional=true, version="0.0.13", package="uu_nl", path="src/uu/nl" }
nohup = { optional=true, version="0.0.8", package="uu_nohup", path="src/uu/nohup" } nohup = { optional=true, version="0.0.13", package="uu_nohup", path="src/uu/nohup" }
nproc = { optional=true, version="0.0.8", package="uu_nproc", path="src/uu/nproc" } nproc = { optional=true, version="0.0.13", package="uu_nproc", path="src/uu/nproc" }
numfmt = { optional=true, version="0.0.8", package="uu_numfmt", path="src/uu/numfmt" } numfmt = { optional=true, version="0.0.13", package="uu_numfmt", path="src/uu/numfmt" }
od = { optional=true, version="0.0.8", package="uu_od", path="src/uu/od" } od = { optional=true, version="0.0.13", package="uu_od", path="src/uu/od" }
paste = { optional=true, version="0.0.8", package="uu_paste", path="src/uu/paste" } paste = { optional=true, version="0.0.13", package="uu_paste", path="src/uu/paste" }
pathchk = { optional=true, version="0.0.8", package="uu_pathchk", path="src/uu/pathchk" } pathchk = { optional=true, version="0.0.13", package="uu_pathchk", path="src/uu/pathchk" }
pinky = { optional=true, version="0.0.8", package="uu_pinky", path="src/uu/pinky" } pinky = { optional=true, version="0.0.13", package="uu_pinky", path="src/uu/pinky" }
pr = { optional=true, version="0.0.8", package="uu_pr", path="src/uu/pr" } pr = { optional=true, version="0.0.13", package="uu_pr", path="src/uu/pr" }
printenv = { optional=true, version="0.0.8", package="uu_printenv", path="src/uu/printenv" } printenv = { optional=true, version="0.0.13", package="uu_printenv", path="src/uu/printenv" }
printf = { optional=true, version="0.0.8", package="uu_printf", path="src/uu/printf" } printf = { optional=true, version="0.0.13", package="uu_printf", path="src/uu/printf" }
ptx = { optional=true, version="0.0.8", package="uu_ptx", path="src/uu/ptx" } ptx = { optional=true, version="0.0.13", package="uu_ptx", path="src/uu/ptx" }
pwd = { optional=true, version="0.0.8", package="uu_pwd", path="src/uu/pwd" } pwd = { optional=true, version="0.0.13", package="uu_pwd", path="src/uu/pwd" }
readlink = { optional=true, version="0.0.8", package="uu_readlink", path="src/uu/readlink" } readlink = { optional=true, version="0.0.13", package="uu_readlink", path="src/uu/readlink" }
realpath = { optional=true, version="0.0.8", package="uu_realpath", path="src/uu/realpath" } realpath = { optional=true, version="0.0.13", package="uu_realpath", path="src/uu/realpath" }
relpath = { optional=true, version="0.0.8", package="uu_relpath", path="src/uu/relpath" } relpath = { optional=true, version="0.0.13", package="uu_relpath", path="src/uu/relpath" }
rm = { optional=true, version="0.0.8", package="uu_rm", path="src/uu/rm" } rm = { optional=true, version="0.0.13", package="uu_rm", path="src/uu/rm" }
rmdir = { optional=true, version="0.0.8", package="uu_rmdir", path="src/uu/rmdir" } rmdir = { optional=true, version="0.0.13", package="uu_rmdir", path="src/uu/rmdir" }
runcon = { optional=true, version="0.0.8", package="uu_runcon", path="src/uu/runcon" } runcon = { optional=true, version="0.0.13", package="uu_runcon", path="src/uu/runcon" }
seq = { optional=true, version="0.0.8", package="uu_seq", path="src/uu/seq" } seq = { optional=true, version="0.0.13", package="uu_seq", path="src/uu/seq" }
shred = { optional=true, version="0.0.8", package="uu_shred", path="src/uu/shred" } shred = { optional=true, version="0.0.13", package="uu_shred", path="src/uu/shred" }
shuf = { optional=true, version="0.0.8", package="uu_shuf", path="src/uu/shuf" } shuf = { optional=true, version="0.0.13", package="uu_shuf", path="src/uu/shuf" }
sleep = { optional=true, version="0.0.8", package="uu_sleep", path="src/uu/sleep" } sleep = { optional=true, version="0.0.13", package="uu_sleep", path="src/uu/sleep" }
sort = { optional=true, version="0.0.8", package="uu_sort", path="src/uu/sort" } sort = { optional=true, version="0.0.13", package="uu_sort", path="src/uu/sort" }
split = { optional=true, version="0.0.8", package="uu_split", path="src/uu/split" } split = { optional=true, version="0.0.13", package="uu_split", path="src/uu/split" }
stat = { optional=true, version="0.0.8", package="uu_stat", path="src/uu/stat" } stat = { optional=true, version="0.0.13", package="uu_stat", path="src/uu/stat" }
stdbuf = { optional=true, version="0.0.8", package="uu_stdbuf", path="src/uu/stdbuf" } stdbuf = { optional=true, version="0.0.13", package="uu_stdbuf", path="src/uu/stdbuf" }
sum = { optional=true, version="0.0.8", package="uu_sum", path="src/uu/sum" } sum = { optional=true, version="0.0.13", package="uu_sum", path="src/uu/sum" }
sync = { optional=true, version="0.0.8", package="uu_sync", path="src/uu/sync" } sync = { optional=true, version="0.0.13", package="uu_sync", path="src/uu/sync" }
tac = { optional=true, version="0.0.8", package="uu_tac", path="src/uu/tac" } tac = { optional=true, version="0.0.13", package="uu_tac", path="src/uu/tac" }
tail = { optional=true, version="0.0.8", package="uu_tail", path="src/uu/tail" } tail = { optional=true, version="0.0.13", package="uu_tail", path="src/uu/tail" }
tee = { optional=true, version="0.0.8", package="uu_tee", path="src/uu/tee" } tee = { optional=true, version="0.0.13", package="uu_tee", path="src/uu/tee" }
timeout = { optional=true, version="0.0.8", package="uu_timeout", path="src/uu/timeout" } timeout = { optional=true, version="0.0.13", package="uu_timeout", path="src/uu/timeout" }
touch = { optional=true, version="0.0.8", package="uu_touch", path="src/uu/touch" } touch = { optional=true, version="0.0.13", package="uu_touch", path="src/uu/touch" }
tr = { optional=true, version="0.0.8", package="uu_tr", path="src/uu/tr" } tr = { optional=true, version="0.0.13", package="uu_tr", path="src/uu/tr" }
true = { optional=true, version="0.0.8", package="uu_true", path="src/uu/true" } true = { optional=true, version="0.0.13", package="uu_true", path="src/uu/true" }
truncate = { optional=true, version="0.0.8", package="uu_truncate", path="src/uu/truncate" } truncate = { optional=true, version="0.0.13", package="uu_truncate", path="src/uu/truncate" }
tsort = { optional=true, version="0.0.8", package="uu_tsort", path="src/uu/tsort" } tsort = { optional=true, version="0.0.13", package="uu_tsort", path="src/uu/tsort" }
tty = { optional=true, version="0.0.8", package="uu_tty", path="src/uu/tty" } tty = { optional=true, version="0.0.13", package="uu_tty", path="src/uu/tty" }
uname = { optional=true, version="0.0.8", package="uu_uname", path="src/uu/uname" } uname = { optional=true, version="0.0.13", package="uu_uname", path="src/uu/uname" }
unexpand = { optional=true, version="0.0.8", package="uu_unexpand", path="src/uu/unexpand" } unexpand = { optional=true, version="0.0.13", package="uu_unexpand", path="src/uu/unexpand" }
uniq = { optional=true, version="0.0.8", package="uu_uniq", path="src/uu/uniq" } uniq = { optional=true, version="0.0.13", package="uu_uniq", path="src/uu/uniq" }
unlink = { optional=true, version="0.0.8", package="uu_unlink", path="src/uu/unlink" } unlink = { optional=true, version="0.0.13", package="uu_unlink", path="src/uu/unlink" }
uptime = { optional=true, version="0.0.8", package="uu_uptime", path="src/uu/uptime" } uptime = { optional=true, version="0.0.13", package="uu_uptime", path="src/uu/uptime" }
users = { optional=true, version="0.0.8", package="uu_users", path="src/uu/users" } users = { optional=true, version="0.0.13", package="uu_users", path="src/uu/users" }
wc = { optional=true, version="0.0.8", package="uu_wc", path="src/uu/wc" } wc = { optional=true, version="0.0.13", package="uu_wc", path="src/uu/wc" }
who = { optional=true, version="0.0.8", package="uu_who", path="src/uu/who" } who = { optional=true, version="0.0.13", package="uu_who", path="src/uu/who" }
whoami = { optional=true, version="0.0.8", package="uu_whoami", path="src/uu/whoami" } whoami = { optional=true, version="0.0.13", package="uu_whoami", path="src/uu/whoami" }
yes = { optional=true, version="0.0.8", package="uu_yes", path="src/uu/yes" } yes = { optional=true, version="0.0.13", package="uu_yes", path="src/uu/yes" }
# this breaks clippy linting with: "tests/by-util/test_factor_benches.rs: No such file or directory (os error 2)" # this breaks clippy linting with: "tests/by-util/test_factor_benches.rs: No such file or directory (os error 2)"
# factor_benches = { optional = true, version = "0.0.0", package = "uu_factor_benches", path = "tests/benches/factor" } # factor_benches = { optional = true, version = "0.0.0", package = "uu_factor_benches", path = "tests/benches/factor" }
@ -361,32 +366,38 @@ yes = { optional=true, version="0.0.8", package="uu_yes", path="src/uu/yes"
#pin_cc = { version="1.0.61, < 1.0.62", package="cc" } ## cc v1.0.62 has compiler errors for MinRustV v1.32.0, requires 1.34 (for `std::str::split_ascii_whitespace()`) #pin_cc = { version="1.0.61, < 1.0.62", package="cc" } ## cc v1.0.62 has compiler errors for MinRustV v1.32.0, requires 1.34 (for `std::str::split_ascii_whitespace()`)
[dev-dependencies] [dev-dependencies]
chrono = "0.4.11" chrono = "^0.4.11"
conv = "0.3" conv = "0.3"
filetime = "0.2" filetime = "0.2"
glob = "0.3.0" glob = "0.3.0"
libc = "0.2" libc = "0.2"
pretty_assertions = "0.7.2" pretty_assertions = "1"
rand = "0.7" rand = "0.8"
regex = "1.0" regex = "1.0"
sha1 = { version="0.6", features=["std"] } sha1 = { version="0.10", features=["std"] }
tempfile = "3.2.0" tempfile = "3"
time = "0.1" time = "0.1"
unindent = "0.1" unindent = "0.1"
uucore = { version=">=0.0.10", package="uucore", path="src/uucore", features=["entries", "process"] } uucore = { version=">=0.0.11", package="uucore", path="src/uucore", features=["entries", "process"] }
walkdir = "2.2" walkdir = "2.2"
atty = "0.2" atty = "0.2"
hex-literal = "0.3.1"
[target.'cfg(target_os = "linux")'.dev-dependencies] [target.'cfg(target_os = "linux")'.dev-dependencies]
rlimit = "0.4.0" rlimit = "0.4.0"
[target.'cfg(unix)'.dev-dependencies] [target.'cfg(unix)'.dev-dependencies]
nix = "0.20.0" nix = "0.23.1"
rust-users = { version="0.10", package="users" } rust-users = { version="0.10", package="users" }
unix_socket = "0.5.0" unix_socket = "0.5.0"
[build-dependencies]
phf_codegen = "0.10.0"
[[bin]] [[bin]]
name = "coreutils" name = "coreutils"
path = "src/bin/coreutils.rs" path = "src/bin/coreutils.rs"
[[bin]]
name = "uudoc"
path = "src/bin/uudoc.rs"

View file

@ -21,7 +21,7 @@ Running GNU tests
At the end you should have uutils, gnu and gnulib checked out next to each other. At the end you should have uutils, gnu and gnulib checked out next to each other.
- Run `cd uutils && ./util/build-gnu.sh && cd ..` to get everything ready (this may take a while) - Run `cd uutils && ./util/build-gnu.sh && cd ..` to get everything ready (this may take a while)
- Finally, you can run `tests with bash uutils/util/run-gnu-test.sh <test>`. Instead of `<test>` insert the test you want to run, e.g. `tests/misc/wc-proc`. - Finally, you can run tests with `bash uutils/util/run-gnu-test.sh <test>`. Instead of `<test>` insert the test you want to run, e.g. `tests/misc/wc-proc.sh`.
Code Coverage Report Generation Code Coverage Report Generation
@ -33,7 +33,7 @@ Code coverage report can be generated using [grcov](https://github.com/mozilla/g
### Using Nightly Rust ### Using Nightly Rust
To generate [gcov-based](https://github.com/mozilla/grcov#example-how-to-generate-gcda-files-for-cc) coverage report To generate [gcov-based](https://github.com/mozilla/grcov#example-how-to-generate-gcda-files-for-a-rust-project) coverage report
```bash ```bash
$ export CARGO_INCREMENTAL=0 $ export CARGO_INCREMENTAL=0

View file

@ -26,11 +26,6 @@ BINDIR ?= /bin
MANDIR ?= /man/man1 MANDIR ?= /man/man1
INSTALLDIR_BIN=$(DESTDIR)$(PREFIX)$(BINDIR) INSTALLDIR_BIN=$(DESTDIR)$(PREFIX)$(BINDIR)
INSTALLDIR_MAN=$(DESTDIR)$(PREFIX)/share/$(MANDIR)
$(shell test -d $(INSTALLDIR_MAN))
ifneq ($(.SHELLSTATUS),0)
override INSTALLDIR_MAN=$(DESTDIR)$(PREFIX)$(MANDIR)
endif
#prefix to apply to coreutils binary and all tool binaries #prefix to apply to coreutils binary and all tool binaries
PROG_PREFIX ?= PROG_PREFIX ?=
@ -47,18 +42,19 @@ BUSYBOX_VER := 1.32.1
BUSYBOX_SRC := $(BUSYBOX_ROOT)/busybox-$(BUSYBOX_VER) BUSYBOX_SRC := $(BUSYBOX_ROOT)/busybox-$(BUSYBOX_VER)
ifeq ($(SELINUX_ENABLED),) ifeq ($(SELINUX_ENABLED),)
SELINUX_ENABLED := 0 SELINUX_ENABLED := 0
ifneq ($(OS),Windows_NT) ifneq ($(OS),Windows_NT)
ifeq ($(shell /sbin/selinuxenabled 2>/dev/null ; echo $$?),0) ifeq ($(shell /sbin/selinuxenabled 2>/dev/null ; echo $$?),0)
SELINUX_ENABLED := 1 SELINUX_ENABLED := 1
endif endif
endif endif
endif endif
# Possible programs # Possible programs
PROGS := \ PROGS := \
base32 \ base32 \
base64 \ base64 \
basenc \
basename \ basename \
cat \ cat \
cksum \ cksum \
@ -67,6 +63,7 @@ PROGS := \
csplit \ csplit \
cut \ cut \
date \ date \
dd \
df \ df \
dircolors \ dircolors \
dirname \ dirname \
@ -161,11 +158,11 @@ SELINUX_PROGS := \
runcon runcon
ifneq ($(OS),Windows_NT) ifneq ($(OS),Windows_NT)
PROGS := $(PROGS) $(UNIX_PROGS) PROGS := $(PROGS) $(UNIX_PROGS)
endif endif
ifeq ($(SELINUX_ENABLED),1) ifeq ($(SELINUX_ENABLED),1)
PROGS := $(PROGS) $(SELINUX_PROGS) PROGS := $(PROGS) $(SELINUX_PROGS)
endif endif
UTILS ?= $(PROGS) UTILS ?= $(PROGS)
@ -279,10 +276,7 @@ endif
build-coreutils: build-coreutils:
${CARGO} build ${CARGOFLAGS} --features "${EXES}" ${PROFILE_CMD} --no-default-features ${CARGO} build ${CARGOFLAGS} --features "${EXES}" ${PROFILE_CMD} --no-default-features
build-manpages: build: build-coreutils build-pkgs
cd $(DOCSDIR) && $(MAKE) man
build: build-coreutils build-pkgs build-manpages
$(foreach test,$(filter-out $(SKIP_UTILS),$(PROGS)),$(eval $(call TEST_BUSYBOX,$(test)))) $(foreach test,$(filter-out $(SKIP_UTILS),$(PROGS)),$(eval $(call TEST_BUSYBOX,$(test))))
@ -316,7 +310,7 @@ busytest: $(BUILDDIR)/busybox $(addprefix test_busybox_,$(filter-out $(SKIP_UTIL
endif endif
clean: clean:
$(RM) $(BUILDDIR) cargo clean
cd $(DOCSDIR) && $(MAKE) clean cd $(DOCSDIR) && $(MAKE) clean
distclean: clean distclean: clean
@ -324,20 +318,16 @@ distclean: clean
install: build install: build
mkdir -p $(INSTALLDIR_BIN) mkdir -p $(INSTALLDIR_BIN)
mkdir -p $(INSTALLDIR_MAN)
ifeq (${MULTICALL}, y) ifeq (${MULTICALL}, y)
$(INSTALL) $(BUILDDIR)/coreutils $(INSTALLDIR_BIN)/$(PROG_PREFIX)coreutils $(INSTALL) $(BUILDDIR)/coreutils $(INSTALLDIR_BIN)/$(PROG_PREFIX)coreutils
cd $(INSTALLDIR_BIN) && $(foreach prog, $(filter-out coreutils, $(INSTALLEES)), \ cd $(INSTALLDIR_BIN) && $(foreach prog, $(filter-out coreutils, $(INSTALLEES)), \
ln -fs $(PROG_PREFIX)coreutils $(PROG_PREFIX)$(prog) &&) : ln -fs $(PROG_PREFIX)coreutils $(PROG_PREFIX)$(prog) &&) :
$(if $(findstring test,$(INSTALLEES)), cd $(INSTALLDIR_BIN) && ln -fs $(PROG_PREFIX)coreutils $(PROG_PREFIX)[) $(if $(findstring test,$(INSTALLEES)), cd $(INSTALLDIR_BIN) && ln -fs $(PROG_PREFIX)coreutils $(PROG_PREFIX)[)
cat $(DOCSDIR)/_build/man/coreutils.1 | gzip > $(INSTALLDIR_MAN)/$(PROG_PREFIX)coreutils.1.gz
else else
$(foreach prog, $(INSTALLEES), \ $(foreach prog, $(INSTALLEES), \
$(INSTALL) $(BUILDDIR)/$(prog) $(INSTALLDIR_BIN)/$(PROG_PREFIX)$(prog);) $(INSTALL) $(BUILDDIR)/$(prog) $(INSTALLDIR_BIN)/$(PROG_PREFIX)$(prog);)
$(if $(findstring test,$(INSTALLEES)), $(INSTALL) $(BUILDDIR)/test $(INSTALLDIR_BIN)/$(PROG_PREFIX)[) $(if $(findstring test,$(INSTALLEES)), $(INSTALL) $(BUILDDIR)/test $(INSTALLDIR_BIN)/$(PROG_PREFIX)[)
endif endif
$(foreach man, $(filter $(INSTALLEES), $(basename $(notdir $(wildcard $(DOCSDIR)/_build/man/*)))), \
cat $(DOCSDIR)/_build/man/$(man).1 | gzip > $(INSTALLDIR_MAN)/$(PROG_PREFIX)$(man).1.gz &&) :
mkdir -p $(DESTDIR)$(PREFIX)/share/zsh/site-functions mkdir -p $(DESTDIR)$(PREFIX)/share/zsh/site-functions
mkdir -p $(DESTDIR)$(PREFIX)/share/bash-completion/completions mkdir -p $(DESTDIR)$(PREFIX)/share/bash-completion/completions
mkdir -p $(DESTDIR)$(PREFIX)/share/fish/vendor_completions.d mkdir -p $(DESTDIR)$(PREFIX)/share/fish/vendor_completions.d
@ -351,12 +341,10 @@ uninstall:
ifeq (${MULTICALL}, y) ifeq (${MULTICALL}, y)
rm -f $(addprefix $(INSTALLDIR_BIN)/,$(PROG_PREFIX)coreutils) rm -f $(addprefix $(INSTALLDIR_BIN)/,$(PROG_PREFIX)coreutils)
endif endif
rm -f $(addprefix $(INSTALLDIR_MAN)/,$(PROG_PREFIX)coreutils.1.gz)
rm -f $(addprefix $(INSTALLDIR_BIN)/$(PROG_PREFIX),$(PROGS)) rm -f $(addprefix $(INSTALLDIR_BIN)/$(PROG_PREFIX),$(PROGS))
rm -f $(INSTALLDIR_BIN)/$(PROG_PREFIX)[ rm -f $(INSTALLDIR_BIN)/$(PROG_PREFIX)[
rm -f $(addprefix $(DESTDIR)$(PREFIX)/share/zsh/site-functions/_$(PROG_PREFIX),$(PROGS)) rm -f $(addprefix $(DESTDIR)$(PREFIX)/share/zsh/site-functions/_$(PROG_PREFIX),$(PROGS))
rm -f $(addprefix $(DESTDIR)$(PREFIX)/share/bash-completion/completions/$(PROG_PREFIX),$(PROGS)) rm -f $(addprefix $(DESTDIR)$(PREFIX)/share/bash-completion/completions/$(PROG_PREFIX),$(PROGS))
rm -f $(addprefix $(DESTDIR)$(PREFIX)/share/fish/vendor_completions.d/$(PROG_PREFIX),$(addsuffix .fish,$(PROGS))) rm -f $(addprefix $(DESTDIR)$(PREFIX)/share/fish/vendor_completions.d/$(PROG_PREFIX),$(addsuffix .fish,$(PROGS)))
rm -f $(addprefix $(INSTALLDIR_MAN)/$(PROG_PREFIX),$(addsuffix .1.gz,$(PROGS)))
.PHONY: all build build-coreutils build-pkgs build-docs test distclean clean busytest install uninstall .PHONY: all build build-coreutils build-pkgs test distclean clean busytest install uninstall

View file

@ -1,4 +1,4 @@
Copyright (c) Jordi Boggiano Copyright (c) Jordi Boggiano and many others
Permission is hereby granted, free of charge, to any person obtaining a copy of Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in this software and associated documentation files (the "Software"), to deal in

114
README.md
View file

@ -2,7 +2,7 @@
[![Crates.io](https://img.shields.io/crates/v/coreutils.svg)](https://crates.io/crates/coreutils) [![Crates.io](https://img.shields.io/crates/v/coreutils.svg)](https://crates.io/crates/coreutils)
[![Discord](https://img.shields.io/badge/discord-join-7289DA.svg?logo=discord&longCache=true&style=flat)](https://discord.gg/wQVJbvJ) [![Discord](https://img.shields.io/badge/discord-join-7289DA.svg?logo=discord&longCache=true&style=flat)](https://discord.gg/wQVJbvJ)
[![License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/uutils/coreutils/blob/master/LICENSE) [![License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/uutils/coreutils/blob/main/LICENSE)
[![LOC](https://tokei.rs/b1/github/uutils/coreutils?category=code)](https://github.com/Aaronepower/tokei) [![LOC](https://tokei.rs/b1/github/uutils/coreutils?category=code)](https://github.com/Aaronepower/tokei)
[![dependency status](https://deps.rs/repo/github/uutils/coreutils/status.svg)](https://deps.rs/repo/github/uutils/coreutils) [![dependency status](https://deps.rs/repo/github/uutils/coreutils/status.svg)](https://deps.rs/repo/github/uutils/coreutils)
@ -15,35 +15,46 @@
<!-- spell-checker:ignore markdownlint ; (options) DESTDIR RUNTEST UTILNAME --> <!-- spell-checker:ignore markdownlint ; (options) DESTDIR RUNTEST UTILNAME -->
uutils is an attempt at writing universal (as in cross-platform) CLI uutils is an attempt at writing universal (as in cross-platform) CLI
utilities in [Rust](http://www.rust-lang.org). This repository is intended to utilities in [Rust](http://www.rust-lang.org).
aggregate GNU coreutils rewrites.
To install it:
```
$ cargo install coreutils
$ ~/.cargo/bin/coreutils
```
## Why? ## Why?
Many GNU, Linux and other utilities are useful, and obviously uutils aims to work on as many platforms as possible, to be able to use the
[some](http://gnuwin32.sourceforge.net) [effort](http://unxutils.sourceforge.net) same utils on Linux, Mac, Windows and other platforms. This ensures, for
has been spent in the past to port them to Windows. However, those projects example, that scripts can be easily transferred between platforms. Rust was
are written in platform-specific C, a language considered unsafe compared to Rust, and chosen not only because it is fast and safe, but is also excellent for
have other issues. writing cross-platform code.
Rust provides a good, platform-agnostic way of writing systems utilities that are easy ## Documentation
to compile anywhere, and this is as good a way as any to try and learn it. uutils has both user and developer documentation available:
- [User Manual](https://uutils.github.io/coreutils-docs/user/)
- [Developer Documentation](https://uutils.github.io/coreutils-docs/dev/coreutils/)
Both can also be generated locally, the instructions for that can be found in the
[coreutils docs](https://github.com/uutils/coreutils-docs) repository.
<!-- ANCHOR: installation (this mark is needed for mdbook) -->
## Requirements ## Requirements
* Rust (`cargo`, `rustc`) * Rust (`cargo`, `rustc`)
* GNU Make (required to build documentation) * GNU Make (optional)
* [Sphinx](http://www.sphinx-doc.org/) (for documentation)
* gzip (for installing documentation)
### Rust Version ### Rust Version
uutils follows Rust's release channels and is tested against stable, beta and nightly. uutils follows Rust's release channels and is tested against stable, beta and nightly.
The current oldest supported version of the Rust compiler is `1.47`. The current oldest supported version of the Rust compiler is `1.56`.
On both Windows and Redox, only the nightly version is tested currently. On both Windows and Redox, only the nightly version is tested currently.
## Build Instructions ## Building
There are currently two methods to build the uutils binaries: either Cargo There are currently two methods to build the uutils binaries: either Cargo
or GNU Make. or GNU Make.
@ -122,7 +133,7 @@ To build only a few of the available utilities:
$ make UTILS='UTILITY_1 UTILITY_2' $ make UTILS='UTILITY_1 UTILITY_2'
``` ```
## Installation Instructions ## Installation
### Cargo ### Cargo
@ -212,7 +223,7 @@ run:
cargo run completion ls bash > /usr/local/share/bash-completion/completions/ls cargo run completion ls bash > /usr/local/share/bash-completion/completions/ls
``` ```
## Un-installation Instructions ## Un-installation
Un-installation differs depending on how you have installed uutils. If you used Un-installation differs depending on how you have installed uutils. If you used
Cargo to install, use Cargo to uninstall. If you used GNU Make to install, use Cargo to install, use Cargo to uninstall. If you used GNU Make to install, use
@ -252,8 +263,9 @@ To uninstall from a custom parent directory:
# DESTDIR is also supported # DESTDIR is also supported
$ make PREFIX=/my/path uninstall $ make PREFIX=/my/path uninstall
``` ```
<!-- ANCHOR_END: installation (this mark is needed for mdbook) -->
## Test Instructions ## Testing
Testing can be done using either Cargo or `make`. Testing can be done using either Cargo or `make`.
@ -319,7 +331,7 @@ To include tests for unimplemented behavior:
$ make UTILS='UTILITY_1 UTILITY_2' SPEC=y test $ make UTILS='UTILITY_1 UTILITY_2' SPEC=y test
``` ```
## Run Busybox Tests ### Run Busybox Tests
This testing functionality is only available on *nix operating systems and This testing functionality is only available on *nix operating systems and
requires `make`. requires `make`.
@ -342,7 +354,11 @@ To pass an argument like "-v" to the busybox test runtime
$ make UTILS='UTILITY_1 UTILITY_2' RUNTEST_ARGS='-v' busytest $ make UTILS='UTILITY_1 UTILITY_2' RUNTEST_ARGS='-v' busytest
``` ```
## Comparing with GNU ### Comparing with GNU
Below is the evolution of how many GNU tests uutils passes. A more detailed
breakdown of the GNU test results of the main branch can be found
[in the user manual](https://uutils.github.io/coreutils-docs/user/test_coverage.html).
![Evolution over time](https://github.com/uutils/coreutils-tracking/blob/main/gnu-results.png?raw=true) ![Evolution over time](https://github.com/uutils/coreutils-tracking/blob/main/gnu-results.png?raw=true)
@ -357,7 +373,26 @@ $ bash util/run-gnu-test.sh tests/touch/not-owner.sh # for example
Note that it relies on individual utilities (not the multicall binary). Note that it relies on individual utilities (not the multicall binary).
## Contribute ### Improving the GNU compatibility
The Python script `./util/remaining-gnu-error.py` shows the list of failing tests in the CI.
To improve the GNU compatibility, the following process is recommended:
1. Identify a test (the smaller, the better) on a program that you understand or is easy to understand. You can use the `./util/remaining-gnu-error.py` script to help with this decision.
1. Build both the GNU and Rust coreutils using: `bash util/build-gnu.sh`
1. Run the test with `bash util/run-gnu-test.sh <your test>`
1. Start to modify `<your test>` to understand what is wrong. Examples:
1. Add `set -v` to have the bash verbose mode
1. Add `echo $?` where needed
1. Bump the content of the output (ex: `cat err`)
1. ...
1. Or, if the test is simple, extract the relevant information to create a new test case running both GNU & Rust implementation
1. Start to modify the Rust implementation to match the expected behavior
1. Add a test to make sure that we don't regress (our test suite is super quick)
## Contributing
To contribute to uutils, please see [CONTRIBUTING](CONTRIBUTING.md). To contribute to uutils, please see [CONTRIBUTING](CONTRIBUTING.md).
@ -371,18 +406,18 @@ To contribute to uutils, please see [CONTRIBUTING](CONTRIBUTING.md).
| basename | df | | | basename | df | |
| basenc | expr | | | basenc | expr | |
| cat | install | | | cat | install | |
| chcon | join | | | chcon | ls | |
| chgrp | ls | | | chgrp | more | |
| chmod | more | | | chmod | numfmt | |
| chown | numfmt | | | chown | od (`--strings` and 128-bit data types missing) | |
| chroot | od (`--strings` and 128-bit data types missing) | | | chroot | pr | |
| cksum | pr | | | cksum | printf | |
| comm | printf | | | comm | sort | |
| csplit | sort | | | csplit | split | |
| cut | split | | | cut | tac | |
| dircolors | tac | | | dircolors | tail | |
| dirname | tail | | | dirname | test | |
| du | test | | | du | | |
| echo | | | | echo | | |
| env | | | | env | | |
| expand | | | | expand | | |
@ -396,16 +431,17 @@ To contribute to uutils, please see [CONTRIBUTING](CONTRIBUTING.md).
| hostid | | | | hostid | | |
| hostname | | | | hostname | | |
| id | | | | id | | |
| join | | |
| kill | | | | kill | | |
| link | | | | link | | |
| ln | | | | ln | | |
| logname | | | | logname | | |
| ~~md5sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/master/src/uu/hashsum/src/hashsum.rs)) | | | | ~~md5sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | | |
| ~~sha1sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/master/src/uu/hashsum/src/hashsum.rs)) | | | | ~~sha1sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | | |
| ~~sha224sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/master/src/uu/hashsum/src/hashsum.rs)) | | | | ~~sha224sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | | |
| ~~sha256sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/master/src/uu/hashsum/src/hashsum.rs)) | | | | ~~sha256sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | | |
| ~~sha384sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/master/src/uu/hashsum/src/hashsum.rs)) | | | | ~~sha384sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | | |
| ~~sha512sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/master/src/uu/hashsum/src/hashsum.rs)) | | | | ~~sha512sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | | |
| mkdir | | | | mkdir | | |
| mkfifo | | | | mkfifo | | |
| mknod | | | | mknod | | |

137
build.rs
View file

@ -12,29 +12,29 @@ pub fn main() {
println!("cargo:rustc-cfg=build={:?}", profile); println!("cargo:rustc-cfg=build={:?}", profile);
} }
let env_feature_prefix: &str = "CARGO_FEATURE_"; const ENV_FEATURE_PREFIX: &str = "CARGO_FEATURE_";
let feature_prefix: &str = "feat_"; const FEATURE_PREFIX: &str = "feat_";
let override_prefix: &str = "uu_"; const OVERRIDE_PREFIX: &str = "uu_";
let out_dir = env::var("OUT_DIR").unwrap(); let out_dir = env::var("OUT_DIR").unwrap();
// println!("cargo:warning=out_dir={}", out_dir); // println!("cargo:warning=out_dir={}", out_dir);
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap().replace("\\", "/"); let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap().replace('\\', "/");
// println!("cargo:warning=manifest_dir={}", manifest_dir); // println!("cargo:warning=manifest_dir={}", manifest_dir);
let util_tests_dir = format!("{}/tests/by-util", manifest_dir); let util_tests_dir = format!("{}/tests/by-util", manifest_dir);
// println!("cargo:warning=util_tests_dir={}", util_tests_dir); // println!("cargo:warning=util_tests_dir={}", util_tests_dir);
let mut crates = Vec::new(); let mut crates = Vec::new();
for (key, val) in env::vars() { for (key, val) in env::vars() {
if val == "1" && key.starts_with(env_feature_prefix) { if val == "1" && key.starts_with(ENV_FEATURE_PREFIX) {
let krate = key[env_feature_prefix.len()..].to_lowercase(); let krate = key[ENV_FEATURE_PREFIX.len()..].to_lowercase();
match krate.as_ref() { match krate.as_ref() {
"default" | "macos" | "unix" | "windows" | "selinux" => continue, // common/standard feature names "default" | "macos" | "unix" | "windows" | "selinux" => continue, // common/standard feature names
"nightly" | "test_unimplemented" => continue, // crate-local custom features "nightly" | "test_unimplemented" => continue, // crate-local custom features
"test" => continue, // over-ridden with 'uu_test' to avoid collision with rust core crate 'test' "test" => continue, // over-ridden with 'uu_test' to avoid collision with rust core crate 'test'
s if s.starts_with(feature_prefix) => continue, // crate feature sets s if s.starts_with(FEATURE_PREFIX) => continue, // crate feature sets
_ => {} // util feature name _ => {} // util feature name
} }
crates.push(krate.to_string()); crates.push(krate);
} }
} }
crates.sort(); crates.sort();
@ -43,33 +43,23 @@ pub fn main() {
let mut tf = File::create(Path::new(&out_dir).join("test_modules.rs")).unwrap(); let mut tf = File::create(Path::new(&out_dir).join("test_modules.rs")).unwrap();
mf.write_all( mf.write_all(
"type UtilityMap<T> = HashMap<&'static str, (fn(T) -> i32, fn() -> App<'static, 'static>)>;\n\ "type UtilityMap<T> = phf::Map<&'static str, (fn(T) -> i32, fn() -> Command<'static>)>;\n\
\n\ \n\
fn util_map<T: uucore::Args>() -> UtilityMap<T> {\n\ fn util_map<T: uucore::Args>() -> UtilityMap<T> {\n"
\t#[allow(unused_mut)]\n\ .as_bytes(),
\t#[allow(clippy::let_and_return)]\n\
\tlet mut map = UtilityMap::new();\n\
"
.as_bytes(),
) )
.unwrap(); .unwrap();
for krate in crates { let mut phf_map = phf_codegen::Map::<&str>::new();
for krate in &crates {
let map_value = format!("({krate}::uumain, {krate}::uu_app)", krate = krate);
match krate.as_ref() { match krate.as_ref() {
// 'test' is named uu_test to avoid collision with rust core crate 'test'. // 'test' is named uu_test to avoid collision with rust core crate 'test'.
// It can also be invoked by name '[' for the '[ expr ] syntax'. // It can also be invoked by name '[' for the '[ expr ] syntax'.
"uu_test" => { "uu_test" => {
mf.write_all( phf_map.entry("test", &map_value);
format!( phf_map.entry("[", &map_value);
"\
\tmap.insert(\"test\", ({krate}::uumain, {krate}::uu_app));\n\
\t\tmap.insert(\"[\", ({krate}::uumain, {krate}::uu_app));\n\
",
krate = krate
)
.as_bytes(),
)
.unwrap();
tf.write_all( tf.write_all(
format!( format!(
"#[path=\"{dir}/test_test.rs\"]\nmod test_test;\n", "#[path=\"{dir}/test_test.rs\"]\nmod test_test;\n",
@ -77,37 +67,25 @@ pub fn main() {
) )
.as_bytes(), .as_bytes(),
) )
.unwrap()
}
k if k.starts_with(override_prefix) => {
mf.write_all(
format!(
"\tmap.insert(\"{k}\", ({krate}::uumain, {krate}::uu_app));\n",
k = krate[override_prefix.len()..].to_string(),
krate = krate
)
.as_bytes(),
)
.unwrap(); .unwrap();
}
k if k.starts_with(OVERRIDE_PREFIX) => {
phf_map.entry(&k[OVERRIDE_PREFIX.len()..], &map_value);
tf.write_all( tf.write_all(
format!( format!(
"#[path=\"{dir}/test_{k}.rs\"]\nmod test_{k};\n", "#[path=\"{dir}/test_{k}.rs\"]\nmod test_{k};\n",
k = krate[override_prefix.len()..].to_string(), k = &krate[OVERRIDE_PREFIX.len()..],
dir = util_tests_dir, dir = util_tests_dir,
) )
.as_bytes(), .as_bytes(),
) )
.unwrap() .unwrap();
} }
"false" | "true" => { "false" | "true" => {
mf.write_all( phf_map.entry(
format!( krate,
"\tmap.insert(\"{krate}\", (r#{krate}::uumain, r#{krate}::uu_app));\n", &format!("(r#{krate}::uumain, r#{krate}::uu_app)", krate = krate),
krate = krate );
)
.as_bytes(),
)
.unwrap();
tf.write_all( tf.write_all(
format!( format!(
"#[path=\"{dir}/test_{krate}.rs\"]\nmod test_{krate};\n", "#[path=\"{dir}/test_{krate}.rs\"]\nmod test_{krate};\n",
@ -116,32 +94,30 @@ pub fn main() {
) )
.as_bytes(), .as_bytes(),
) )
.unwrap() .unwrap();
} }
"hashsum" => { "hashsum" => {
mf.write_all( phf_map.entry(
format!( krate,
"\ &format!("({krate}::uumain, {krate}::uu_app_custom)", krate = krate),
\tmap.insert(\"{krate}\", ({krate}::uumain, {krate}::uu_app_custom));\n\ );
\t\tmap.insert(\"md5sum\", ({krate}::uumain, {krate}::uu_app_common));\n\
\t\tmap.insert(\"sha1sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ let map_value = format!("({krate}::uumain, {krate}::uu_app_common)", krate = krate);
\t\tmap.insert(\"sha224sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("md5sum", &map_value);
\t\tmap.insert(\"sha256sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha1sum", &map_value);
\t\tmap.insert(\"sha384sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha224sum", &map_value);
\t\tmap.insert(\"sha512sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha256sum", &map_value);
\t\tmap.insert(\"sha3sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha384sum", &map_value);
\t\tmap.insert(\"sha3-224sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha512sum", &map_value);
\t\tmap.insert(\"sha3-256sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha3sum", &map_value);
\t\tmap.insert(\"sha3-384sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha3-224sum", &map_value);
\t\tmap.insert(\"sha3-512sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha3-256sum", &map_value);
\t\tmap.insert(\"shake128sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha3-384sum", &map_value);
\t\tmap.insert(\"shake256sum\", ({krate}::uumain, {krate}::uu_app_common));\n\ phf_map.entry("sha3-512sum", &map_value);
", phf_map.entry("shake128sum", &map_value);
krate = krate phf_map.entry("shake256sum", &map_value);
) phf_map.entry("b2sum", &map_value);
.as_bytes(), phf_map.entry("b3sum", &map_value);
)
.unwrap();
tf.write_all( tf.write_all(
format!( format!(
"#[path=\"{dir}/test_{krate}.rs\"]\nmod test_{krate};\n", "#[path=\"{dir}/test_{krate}.rs\"]\nmod test_{krate};\n",
@ -150,17 +126,10 @@ pub fn main() {
) )
.as_bytes(), .as_bytes(),
) )
.unwrap() .unwrap();
} }
_ => { _ => {
mf.write_all( phf_map.entry(krate, &map_value);
format!(
"\tmap.insert(\"{krate}\", ({krate}::uumain, {krate}::uu_app));\n",
krate = krate
)
.as_bytes(),
)
.unwrap();
tf.write_all( tf.write_all(
format!( format!(
"#[path=\"{dir}/test_{krate}.rs\"]\nmod test_{krate};\n", "#[path=\"{dir}/test_{krate}.rs\"]\nmod test_{krate};\n",
@ -169,12 +138,12 @@ pub fn main() {
) )
.as_bytes(), .as_bytes(),
) )
.unwrap() .unwrap();
} }
} }
} }
write!(mf, "{}", phf_map.build()).unwrap();
mf.write_all(b"map\n}\n").unwrap(); mf.write_all(b"\n}\n").unwrap();
mf.flush().unwrap(); mf.flush().unwrap();
tf.flush().unwrap(); tf.flush().unwrap();

View file

@ -1 +0,0 @@
msrv = "1.47.0"

95
deny.toml Normal file
View file

@ -0,0 +1,95 @@
# spell-checker:ignore SSLeay RUSTSEC
# This section is considered when running `cargo deny check advisories`
# More documentation for the advisories section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
[advisories]
db-path = "~/.cargo/advisory-db"
db-urls = ["https://github.com/rustsec/advisory-db"]
vulnerability = "warn"
unmaintained = "warn"
yanked = "warn"
notice = "warn"
ignore = [
#"RUSTSEC-0000-0000",
]
# This section is considered when running `cargo deny check licenses`
# More documentation for the licenses section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
[licenses]
unlicensed = "deny"
allow = [
"MIT",
"Apache-2.0",
"ISC",
"BSD-2-Clause",
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause",
"CC0-1.0",
"MPL-2.0", # XXX considered copyleft?
]
copyleft = "deny"
allow-osi-fsf-free = "neither"
default = "deny"
confidence-threshold = 0.8
exceptions = [
{ allow = ["OpenSSL"], name = "ring" },
]
[[licenses.clarify]]
name = "ring"
# SPDX considers OpenSSL to encompass both the OpenSSL and SSLeay licenses
# https://spdx.org/licenses/OpenSSL.html
# ISC - Both BoringSSL and ring use this for their new files
# MIT - "Files in third_party/ have their own licenses, as described therein. The MIT
# license, for third_party/fiat, which, unlike other third_party directories, is
# compiled into non-test libraries, is included below."
# OpenSSL - Obviously
expression = "ISC AND MIT AND OpenSSL"
license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }]
# This section is considered when running `cargo deny check bans`.
# More documentation about the 'bans' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
[bans]
multiple-versions = "deny"
wildcards = "allow"
highlight = "all"
# For each duplicate dependency, indicate the name of the dependency which
# introduces it.
# spell-checker: disable
skip = [
# blake2d_simd
{ name = "arrayvec", version = "=0.7.2" },
# flimit/unix_socket
{ name = "cfg-if", version = "=0.1.10" },
# ordered-multimap
{ name = "hashbrown", version = "=0.9.1" },
# kernel32-sys
{ name = "winapi", version = "=0.2.8" },
# bindgen 0.59.2
{ name = "clap", version = "=2.34.0" },
{ name = "strsim", version = "=0.8.0" },
{ name = "textwrap", version = "=0.11.0" },
{ name = "cpp_common", version = "=0.4.0" },
# quickcheck
{ name = "env_logger", version = "=0.8.4" },
# cpp_*
{ name = "memchr", version = "=1.0.2" },
{ name = "quote", version = "=0.3.15" },
{ name = "unicode-xid", version = "=0.0.4" },
# exacl
{ name = "nix", version = "=0.21.0" },
]
# spell-checker: enable
# This section is considered when running `cargo deny check sources`.
# More documentation about the 'sources' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
[sources]
unknown-registry = "warn"
unknown-git = "warn"
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
allow-git = []

3
docs/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
book
src/utils
src/SUMMARY.md

View file

@ -1,21 +0,0 @@
# spell-checker:ignore (vars/env) SPHINXOPTS SPHINXBUILD SPHINXPROJ SOURCEDIR BUILDDIR
# Minimal makefile for Sphinx documentation
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXPROJ = uutils
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help GNUmakefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: GNUmakefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View file

@ -1,5 +1,4 @@
UseGNU=gmake $* clean:
all: rm -rf book
@$(UseGNU) rm -f src/SUMMARY.md
.DEFAULT: rm -f src/utils/*
@$(UseGNU)

View file

@ -1,28 +0,0 @@
.. print machine hardware name
====
arch
====
.. FIXME: this needs to be autogenerated somehow
--------
Synopsis
--------
``arch`` [OPTION]...
-----------
Description
-----------
``arch`` is an alias for ``uname -m``. They both print the machine hardware
name.
An exit code of zero indicates success, whereas anything else means failure.
For this program, a non-zero exit code generally means the user provided
invalid options.
-h, --help print a help menu for this program displaying accepted
options and arguments
-v, --version print the version number of this program

9
docs/book.toml Normal file
View file

@ -0,0 +1,9 @@
[book]
authors = ["uutils contributors"]
language = "en"
multilingual = false
src = "src"
title = "uutils Documentation"
[output.html]
git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/src"

View file

@ -40,6 +40,8 @@ TARGETS = [
"x86_64-linux-android", "x86_64-linux-android",
# Solaris # Solaris
"x86_64-sun-solaris", "x86_64-sun-solaris",
# Illumos
"x86_64-unknown-illumos",
# WASM # WASM
"wasm32-wasi", "wasm32-wasi",
# Redox # Redox

View file

@ -1,187 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# uutils documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 5 23:20:18 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# spell-checker:ignore (words) howto htbp imgmath toctree todos uutilsdoc
import glob
import os
import re
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.imgmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'uutils'
copyright = '2017, uutils developers'
author = 'uutils developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
# * take version from project "Cargo.toml"
version_file = open(os.path.join("..","Cargo.toml"), "r")
version_file_content = version_file.read()
v = re.search("^\s*version\s*=\s*\"([0-9.]+)\"", version_file_content, re.IGNORECASE | re.MULTILINE)
# The short X.Y version.
version = v.groups()[0]
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'uutilsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'uutils.tex', 'uutils Documentation',
'uutils developers', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = []
for name in glob.glob('*.rst'):
if name != 'index.rst':
desc = ''
with open(name) as f:
desc = f.readline().strip()
if desc.startswith('..'):
desc = desc[2:].strip()
else:
desc = ''
man_pages.append((
name[:-4], # source file without extension
name[:-4].replace('/', '-'), # output file
desc,
[author],
1
))
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'uutils', 'uutils Documentation',
author, 'uutils', 'A cross-platform implementation of GNU coreutils, written in Rust.',
'Miscellaneous'),
]

View file

@ -1,24 +0,0 @@
.. uutils documentation master file, created by
sphinx-quickstart on Tue Dec 5 23:20:18 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
..
spell-checker:ignore (directives) genindex maxdepth modindex toctree ; (misc) quickstart
Welcome to uutils' documentation!
=================================
.. toctree::
:maxdepth: 2
:caption: Contents:
arch
uutils
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View file

@ -1,39 +0,0 @@
@setLocal
@ECHO OFF
rem spell-checker:ignore (vars/env) BUILDDIR SOURCEDIR SPHINXBUILD SPHINXOPTS SPHINXPROJ
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
set SPHINXPROJ=uutils
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if ErrorLevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd

1
docs/src/contributing.md Normal file
View file

@ -0,0 +1 @@
{{ #include ../../CONTRIBUTING.md }}

20
docs/src/index.md Normal file
View file

@ -0,0 +1,20 @@
# uutils Coreutils Documentation
uutils is an attempt at writing universal (as in cross-platform) CLI
utilities in [Rust](https://www.rust-lang.org). It is available for
Linux, Windows, Mac and other platforms.
The API reference for `uucore`, the library of functions shared between
various utils, is hosted at at
[docs.rs](https://docs.rs/uucore/latest/uucore/).
uutils is licensed under the [MIT License](https://github.com/uutils/coreutils/blob/main/LICENSE).
## Useful links
* [Releases](https://github.com/uutils/coreutils/releases)
* [Source Code](https://github.com/uutils/coreutils)
* [Issues](https://github.com/uutils/coreutils/issues)
* [Discord](https://discord.gg/wQVJbvJ)
> Note: This manual is automatically generated from the source code and is
> a work in progress.

3
docs/src/installation.md Normal file
View file

@ -0,0 +1,3 @@
# Installation
{{#include ../../README.md:installation }}

17
docs/src/multicall.md Normal file
View file

@ -0,0 +1,17 @@
# Multi-call binary
uutils includes a multi-call binary from which the utils can be invoked. This
reduces the binary size of the binary and can be useful for portability.
The first argument of the multi-call binary is the util to run, after which
the regular arguments to the util can be passed.
```shell
coreutils [util] [util options]
```
The `--help` flag will print a list of available utils.
## Example
```
coreutils ls -l
```

View file

@ -0,0 +1,46 @@
:root {
--PASS: #44AF69;
--ERROR: #F8333C;
--FAIL: #F8333C;
--SKIP: #d3c994;
}
.PASS {
color: var(--PASS);
}
.ERROR {
color: var(--ERROR);
}
.FAIL {
color: var(--FAIL);
}
.SKIP {
color: var(--SKIP);
}
.testSummary {
display: inline-flex;
align-items: center;
justify-content: space-between;
width: 90%;
}
.progress {
width: 80%;
display: flex;
justify-content: right;
align-items: center;
}
.progress-bar {
height: 10px;
width: calc(100% - 15ch);
border-radius: 5px;
}
.result {
font-weight: bold;
width: 7ch;
display: inline-block;
}
.result-line {
margin: 8px;
}
.counts {
margin-right: 10px;
}

82
docs/src/test_coverage.js Normal file
View file

@ -0,0 +1,82 @@
// spell-checker:ignore hljs
function progressBar(totals) {
const bar = document.createElement("div");
bar.className = "progress-bar";
let totalTests = 0;
for (const [key, value] of Object.entries(totals)) {
totalTests += value;
}
const passPercentage = Math.round(100 * totals["PASS"] / totalTests);
const skipPercentage = passPercentage + Math.round(100 * totals["SKIP"] / totalTests);
// The ternary expressions are used for some edge-cases where there are no failing test,
// but still a red (or beige) line shows up because of how CSS draws gradients.
bar.style = `background: linear-gradient(
to right,
var(--PASS) ${passPercentage}%`
+ ( passPercentage === 100 ? ", var(--PASS)" :
`, var(--SKIP) ${passPercentage}%,
var(--SKIP) ${skipPercentage}%`
)
+ (skipPercentage === 100 ? ")" : ", var(--FAIL) 0)");
const progress = document.createElement("div");
progress.className = "progress"
progress.innerHTML = `
<span class="counts">
<span class="PASS">${totals["PASS"]}</span>
/
<span class="SKIP">${totals["SKIP"]}</span>
/
<span class="FAIL">${totals["FAIL"] + totals["ERROR"]}</span>
</span>
`;
progress.appendChild(bar);
return progress
}
function parse_result(parent, obj) {
const totals = {
PASS: 0,
SKIP: 0,
FAIL: 0,
ERROR: 0,
};
for (const [category, content] of Object.entries(obj)) {
if (typeof content === "string") {
const p = document.createElement("p");
p.className = "result-line";
totals[content]++;
p.innerHTML = `<span class="result" style="color: var(--${content})">${content}</span> ${category}`;
parent.appendChild(p);
} else {
const categoryName = document.createElement("code");
categoryName.innerHTML = category;
categoryName.className = "hljs";
const details = document.createElement("details");
const subtotals = parse_result(details, content);
for (const [subtotal, count] of Object.entries(subtotals)) {
totals[subtotal] += count;
}
const summaryDiv = document.createElement("div");
summaryDiv.className = "testSummary";
summaryDiv.appendChild(categoryName);
summaryDiv.appendChild(progressBar(subtotals));
const summary = document.createElement("summary");
summary.appendChild(summaryDiv);
details.appendChild(summary);
parent.appendChild(details);
}
}
return totals;
}
fetch("https://raw.githubusercontent.com/uutils/coreutils-tracking/main/gnu-full-result.json")
.then((r) => r.json())
.then((obj) => {
let parent = document.getElementById("test-cov");
parse_result(parent, obj);
});

19
docs/src/test_coverage.md Normal file
View file

@ -0,0 +1,19 @@
# GNU Test Coverage
uutils is actively tested against the GNU coreutils test suite. The results
below are automatically updated every day.
## Coverage per category
Click on the categories to see the names of the tests. Green indicates a passing
test, yellow indicates a skipped test and red means that the test either failed
or resulted in an error.
<link rel="stylesheet" href="test_coverage.css">
<script src="test_coverage.js"></script>
<div id="test-cov"></div>
## Progress over time
<image src="https://github.com/uutils/coreutils-tracking/blob/main/gnu-results.png?raw=true">

BIN
docs/theme/favicon.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

16
docs/theme/head.hbs vendored Normal file
View file

@ -0,0 +1,16 @@
<style>
dd {
margin-bottom: 1em;
}
main {
position: relative;
}
.version {
position: absolute;
top: 1em;
right: 0;
}
dd > p {
margin-top: 0.2em;
}
</style>

View file

@ -1,24 +0,0 @@
.. run core utilities
======
uutils
======
.. FIXME: this needs to be autogenerated somehow
--------
Synopsis
--------
``uutils`` [OPTION]... [PROGRAM] [OPTION]... [ARGUMENTS]...
-----------
Description
-----------
``uutils`` is a program that contains other coreutils commands, somewhat
similar to Busybox.
--help, -h print a help menu for PROGRAM displaying accepted options and
arguments; if PROGRAM was not given, do the same but for this
program

5
renovate.json Normal file
View file

@ -0,0 +1,5 @@
{
"extends": [
"config:base"
]
}

View file

@ -5,11 +5,9 @@
// For the full copyright and license information, please view the LICENSE // For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code. // file that was distributed with this source code.
use clap::App; use clap::{Arg, Command};
use clap::Arg; use clap_complete::Shell;
use clap::Shell;
use std::cmp; use std::cmp;
use std::collections::hash_map::HashMap;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::ffi::OsString; use std::ffi::OsString;
use std::io::{self, Write}; use std::io::{self, Write};
@ -65,7 +63,7 @@ fn main() {
// * prefix/stem may be any string ending in a non-alphanumeric character // * prefix/stem may be any string ending in a non-alphanumeric character
let util_name = if let Some(util) = utils.keys().find(|util| { let util_name = if let Some(util) = utils.keys().find(|util| {
binary_as_util.ends_with(*util) binary_as_util.ends_with(*util)
&& !(&binary_as_util[..binary_as_util.len() - (*util).len()]) && !binary_as_util[..binary_as_util.len() - (*util).len()]
.ends_with(char::is_alphanumeric) .ends_with(char::is_alphanumeric)
}) { }) {
// prefixed util => replace 0th (aka, executable name) argument // prefixed util => replace 0th (aka, executable name) argument
@ -89,7 +87,7 @@ fn main() {
}; };
if util == "completion" { if util == "completion" {
gen_completions(args, utils); gen_completions(args, &utils);
} }
match utils.get(util) { match utils.get(util) {
@ -134,22 +132,22 @@ fn main() {
/// Prints completions for the utility in the first parameter for the shell in the second parameter to stdout /// Prints completions for the utility in the first parameter for the shell in the second parameter to stdout
fn gen_completions<T: uucore::Args>( fn gen_completions<T: uucore::Args>(
args: impl Iterator<Item = OsString>, args: impl Iterator<Item = OsString>,
util_map: UtilityMap<T>, util_map: &UtilityMap<T>,
) -> ! { ) -> ! {
let all_utilities: Vec<_> = std::iter::once("coreutils") let all_utilities: Vec<_> = std::iter::once("coreutils")
.chain(util_map.keys().copied()) .chain(util_map.keys().copied())
.collect(); .collect();
let matches = App::new("completion") let matches = Command::new("completion")
.about("Prints completions to stdout") .about("Prints completions to stdout")
.arg( .arg(
Arg::with_name("utility") Arg::new("utility")
.possible_values(&all_utilities) .possible_values(all_utilities)
.required(true), .required(true),
) )
.arg( .arg(
Arg::with_name("shell") Arg::new("shell")
.possible_values(&Shell::variants()) .possible_values(Shell::possible_values())
.required(true), .required(true),
) )
.get_matches_from(std::iter::once(OsString::from("completion")).chain(args)); .get_matches_from(std::iter::once(OsString::from("completion")).chain(args));
@ -157,7 +155,7 @@ fn gen_completions<T: uucore::Args>(
let utility = matches.value_of("utility").unwrap(); let utility = matches.value_of("utility").unwrap();
let shell = matches.value_of("shell").unwrap(); let shell = matches.value_of("shell").unwrap();
let mut app = if utility == "coreutils" { let mut command = if utility == "coreutils" {
gen_coreutils_app(util_map) gen_coreutils_app(util_map)
} else { } else {
util_map.get(utility).unwrap().1() util_map.get(utility).unwrap().1()
@ -165,15 +163,15 @@ fn gen_completions<T: uucore::Args>(
let shell: Shell = shell.parse().unwrap(); let shell: Shell = shell.parse().unwrap();
let bin_name = std::env::var("PROG_PREFIX").unwrap_or_default() + utility; let bin_name = std::env::var("PROG_PREFIX").unwrap_or_default() + utility;
app.gen_completions_to(bin_name, shell, &mut io::stdout()); clap_complete::generate(shell, &mut command, bin_name, &mut io::stdout());
io::stdout().flush().unwrap(); io::stdout().flush().unwrap();
process::exit(0); process::exit(0);
} }
fn gen_coreutils_app<T: uucore::Args>(util_map: UtilityMap<T>) -> App<'static, 'static> { fn gen_coreutils_app<T: uucore::Args>(util_map: &UtilityMap<T>) -> Command<'static> {
let mut app = App::new("coreutils"); let mut command = Command::new("coreutils");
for (_, (_, sub_app)) in util_map { for (_, (_, sub_app)) in util_map {
app = app.subcommand(sub_app()); command = command.subcommand(sub_app());
} }
app command
} }

211
src/bin/uudoc.rs Normal file
View file

@ -0,0 +1,211 @@
// This file is part of the uutils coreutils package.
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore tldr
use clap::Command;
use std::ffi::OsString;
use std::fs::File;
use std::io::Cursor;
use std::io::{self, Read, Seek, Write};
use zip::ZipArchive;
include!(concat!(env!("OUT_DIR"), "/uutils_map.rs"));
fn main() -> io::Result<()> {
println!("Downloading tldr archive");
let mut zip_reader = ureq::get("https://tldr.sh/assets/tldr.zip")
.call()
.unwrap()
.into_reader();
let mut buffer = Vec::new();
zip_reader.read_to_end(&mut buffer).unwrap();
let mut tldr_zip = ZipArchive::new(Cursor::new(buffer)).unwrap();
let utils = util_map::<Box<dyn Iterator<Item = OsString>>>();
match std::fs::create_dir("docs/src/utils/") {
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()),
x => x,
}?;
let mut summary = File::create("docs/src/SUMMARY.md")?;
let _ = write!(
summary,
"# Summary\n\
\n\
[Introduction](index.md)\n\
* [Installation](installation.md)\n\
* [Contributing](contributing.md)\n\
* [GNU test coverage](test_coverage.md)\n\
\n\
# Reference\n\
* [Multi-call binary](multicall.md)\n",
);
let mut utils = utils.entries().collect::<Vec<_>>();
utils.sort();
for (&name, (_, command)) in utils {
if name == "[" {
continue;
}
let p = format!("docs/src/utils/{}.md", name);
if let Ok(f) = File::create(&p) {
write_markdown(f, &mut command(), name, &mut tldr_zip)?;
println!("Wrote to '{}'", p);
} else {
println!("Error writing to {}", p);
}
writeln!(summary, "* [{0}](utils/{0}.md)", name)?;
}
Ok(())
}
fn write_markdown(
mut w: impl Write,
command: &mut Command,
name: &str,
tldr_zip: &mut zip::ZipArchive<impl Read + Seek>,
) -> io::Result<()> {
write!(w, "# {}\n\n", name)?;
write_version(&mut w, command)?;
write_usage(&mut w, command, name)?;
write_description(&mut w, command)?;
write_options(&mut w, command)?;
write_examples(&mut w, name, tldr_zip)
}
fn write_version(w: &mut impl Write, command: &Command) -> io::Result<()> {
writeln!(
w,
"<div class=\"version\">version: {}</div>",
command.render_version().split_once(' ').unwrap().1
)
}
fn write_usage(w: &mut impl Write, command: &mut Command, name: &str) -> io::Result<()> {
writeln!(w, "\n```")?;
let mut usage: String = command
.render_usage()
.lines()
.skip(1)
.map(|l| l.trim())
.filter(|l| !l.is_empty())
.collect::<Vec<_>>()
.join("\n");
usage = usage.replace(uucore::execution_phrase(), name);
writeln!(w, "{}", usage)?;
writeln!(w, "```")
}
fn write_description(w: &mut impl Write, command: &Command) -> io::Result<()> {
if let Some(about) = command.get_long_about().or_else(|| command.get_about()) {
writeln!(w, "{}", about)
} else {
Ok(())
}
}
fn write_examples(
w: &mut impl Write,
name: &str,
tldr_zip: &mut zip::ZipArchive<impl Read + Seek>,
) -> io::Result<()> {
let content = if let Some(f) = get_zip_content(tldr_zip, &format!("pages/common/{}.md", name)) {
f
} else if let Some(f) = get_zip_content(tldr_zip, &format!("pages/linux/{}.md", name)) {
f
} else {
return Ok(());
};
writeln!(w, "## Examples")?;
writeln!(w)?;
for line in content.lines().skip_while(|l| !l.starts_with('-')) {
if let Some(l) = line.strip_prefix("- ") {
writeln!(w, "{}", l)?;
} else if line.starts_with('`') {
writeln!(w, "```shell\n{}\n```", line.trim_matches('`'))?;
} else if line.is_empty() {
writeln!(w)?;
} else {
println!("Not sure what to do with this line:");
println!("{}", line);
}
}
writeln!(w)?;
writeln!(
w,
"> The examples are provided by the [tldr-pages project](https://tldr.sh) under the [CC BY 4.0 License](https://github.com/tldr-pages/tldr/blob/main/LICENSE.md)."
)?;
writeln!(w, ">")?;
writeln!(
w,
"> Please note that, as uutils is a work in progress, some examples might fail."
)
}
fn get_zip_content(archive: &mut ZipArchive<impl Read + Seek>, name: &str) -> Option<String> {
let mut s = String::new();
archive.by_name(name).ok()?.read_to_string(&mut s).unwrap();
Some(s)
}
fn write_options(w: &mut impl Write, command: &Command) -> io::Result<()> {
writeln!(w, "<h2>Options</h2>")?;
write!(w, "<dl>")?;
for arg in command.get_arguments() {
write!(w, "<dt>")?;
let mut first = true;
for l in arg.get_long_and_visible_aliases().unwrap_or_default() {
if !first {
write!(w, ", ")?;
} else {
first = false;
}
write!(w, "<code>")?;
write!(w, "--{}", l)?;
if let Some(names) = arg.get_value_names() {
write!(
w,
"={}",
names
.iter()
.map(|x| format!("&lt;{}&gt;", x))
.collect::<Vec<_>>()
.join(" ")
)?;
}
write!(w, "</code>")?;
}
for s in arg.get_short_and_visible_aliases().unwrap_or_default() {
if !first {
write!(w, ", ")?;
} else {
first = false;
}
write!(w, "<code>")?;
write!(w, "-{}", s)?;
if let Some(names) = arg.get_value_names() {
write!(
w,
" {}",
names
.iter()
.map(|x| format!("&lt;{}&gt;", x))
.collect::<Vec<_>>()
.join(" ")
)?;
}
write!(w, "</code>")?;
}
writeln!(w, "</dt>")?;
writeln!(
w,
"<dd>\n\n{}\n\n</dd>",
arg.get_help().unwrap_or_default().replace('\n', "<br />")
)?;
}
writeln!(w, "</dl>\n")
}

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_arch" name = "uu_arch"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "arch ~ (uutils) display machine architecture" description = "arch ~ (uutils) display machine architecture"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/arch" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/arch"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,10 +15,9 @@ edition = "2018"
path = "src/arch.rs" path = "src/arch.rs"
[dependencies] [dependencies]
platform-info = "0.1" platform-info = "0.2"
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore" } uucore = { version=">=0.0.11", package="uucore", path="../../uucore" }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "arch" name = "arch"

1
src/uu/arch/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -8,13 +8,13 @@
use platform_info::*; use platform_info::*;
use clap::{crate_version, App}; use clap::{crate_version, Command};
use uucore::error::{FromIo, UResult}; use uucore::error::{FromIo, UResult};
static ABOUT: &str = "Display machine architecture"; static ABOUT: &str = "Display machine architecture";
static SUMMARY: &str = "Determine architecture name for current machine."; static SUMMARY: &str = "Determine architecture name for current machine.";
#[uucore_procs::gen_uumain] #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> { pub fn uumain(args: impl uucore::Args) -> UResult<()> {
uu_app().get_matches_from(args); uu_app().get_matches_from(args);
@ -23,9 +23,10 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
Ok(()) Ok(())
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(crate_version!()) .version(crate_version!())
.about(ABOUT) .about(ABOUT)
.after_help(SUMMARY) .after_help(SUMMARY)
.infer_long_args(true)
} }

View file

@ -1 +1 @@
uucore_procs::main!(uu_arch); uucore::bin!(uu_arch);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_base32" name = "uu_base32"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "base32 ~ (uutils) decode/encode input (base32-encoding)" description = "base32 ~ (uutils) decode/encode input (base32-encoding)"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/base32" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/base32"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,14 +15,9 @@ edition = "2018"
path = "src/base32.rs" path = "src/base32.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features = ["encoding"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features = ["encoding"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "base32" name = "base32"
path = "src/main.rs" path = "src/main.rs"
[package.metadata.cargo-udeps.ignore]
# Necessary for "make all"
normal = ["uucore_procs"]

1
src/uu/base32/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -7,31 +7,28 @@
use std::io::{stdin, Read}; use std::io::{stdin, Read};
use clap::App; use clap::Command;
use uucore::{encoding::Format, error::UResult}; use uucore::{encoding::Format, error::UResult};
pub mod base_common; pub mod base_common;
static ABOUT: &str = " static ABOUT: &str = "\
With no FILE, or when FILE is -, read standard input. With no FILE, or when FILE is -, read standard input.
The data are encoded as described for the base32 alphabet in RFC The data are encoded as described for the base32 alphabet in RFC
4648. When decoding, the input may contain newlines in addition 4648. When decoding, the input may contain newlines in addition
to the bytes of the formal base32 alphabet. Use --ignore-garbage to the bytes of the formal base32 alphabet. Use --ignore-garbage
to attempt to recover from any other non-alphabet bytes in the to attempt to recover from any other non-alphabet bytes in the
encoded stream. encoded stream.
"; ";
fn usage() -> String { const USAGE: &str = "{} [OPTION]... [FILE]";
format!("{0} [OPTION]... [FILE]", uucore::execution_phrase())
}
#[uucore_procs::gen_uumain] #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> { pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let format = Format::Base32; let format = Format::Base32;
let usage = usage();
let config: base_common::Config = base_common::parse_base_cmd_args(args, ABOUT, &usage)?; let config: base_common::Config = base_common::parse_base_cmd_args(args, ABOUT, USAGE)?;
// Create a reference to stdin so we can return a locked stdin from // Create a reference to stdin so we can return a locked stdin from
// parse_base_cmd_args // parse_base_cmd_args
@ -47,6 +44,6 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
) )
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
base_common::base_app(ABOUT) base_common::base_app(ABOUT, USAGE)
} }

View file

@ -12,13 +12,13 @@ use std::io::{stdout, Read, Write};
use uucore::display::Quotable; use uucore::display::Quotable;
use uucore::encoding::{wrap_print, Data, Format}; use uucore::encoding::{wrap_print, Data, Format};
use uucore::error::{FromIo, UResult, USimpleError, UUsageError}; use uucore::error::{FromIo, UResult, USimpleError, UUsageError};
use uucore::InvalidEncodingHandling; use uucore::{format_usage, InvalidEncodingHandling};
use std::fs::File; use std::fs::File;
use std::io::{BufReader, Stdin}; use std::io::{BufReader, Stdin};
use std::path::Path; use std::path::Path;
use clap::{crate_version, App, Arg}; use clap::{crate_version, Arg, Command};
pub static BASE_CMD_PARSE_ERROR: i32 = 1; pub static BASE_CMD_PARSE_ERROR: i32 = 1;
@ -38,7 +38,7 @@ pub mod options {
} }
impl Config { impl Config {
pub fn from(options: &clap::ArgMatches) -> UResult<Config> { pub fn from(options: &clap::ArgMatches) -> UResult<Self> {
let file: Option<String> = match options.values_of(options::FILE) { let file: Option<String> = match options.values_of(options::FILE) {
Some(mut values) => { Some(mut values) => {
let name = values.next().unwrap(); let name = values.next().unwrap();
@ -76,7 +76,7 @@ impl Config {
}) })
.transpose()?; .transpose()?;
Ok(Config { Ok(Self {
decode: options.is_present(options::DECODE), decode: options.is_present(options::DECODE),
ignore_garbage: options.is_present(options::IGNORE_GARBAGE), ignore_garbage: options.is_present(options::IGNORE_GARBAGE),
wrap_cols: cols, wrap_cols: cols,
@ -86,33 +86,35 @@ impl Config {
} }
pub fn parse_base_cmd_args(args: impl uucore::Args, about: &str, usage: &str) -> UResult<Config> { pub fn parse_base_cmd_args(args: impl uucore::Args, about: &str, usage: &str) -> UResult<Config> {
let app = base_app(about).usage(usage); let command = base_app(about, usage);
let arg_list = args let arg_list = args
.collect_str(InvalidEncodingHandling::ConvertLossy) .collect_str(InvalidEncodingHandling::ConvertLossy)
.accept_any(); .accept_any();
Config::from(&app.get_matches_from(arg_list)) Config::from(&command.get_matches_from(arg_list))
} }
pub fn base_app<'a>(about: &'a str) -> App<'static, 'a> { pub fn base_app<'a>(about: &'a str, usage: &'a str) -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(crate_version!()) .version(crate_version!())
.about(about) .about(about)
.override_usage(format_usage(usage))
.infer_long_args(true)
// Format arguments. // Format arguments.
.arg( .arg(
Arg::with_name(options::DECODE) Arg::new(options::DECODE)
.short("d") .short('d')
.long(options::DECODE) .long(options::DECODE)
.help("decode data"), .help("decode data"),
) )
.arg( .arg(
Arg::with_name(options::IGNORE_GARBAGE) Arg::new(options::IGNORE_GARBAGE)
.short("i") .short('i')
.long(options::IGNORE_GARBAGE) .long(options::IGNORE_GARBAGE)
.help("when decoding, ignore non-alphabetic characters"), .help("when decoding, ignore non-alphabetic characters"),
) )
.arg( .arg(
Arg::with_name(options::WRAP) Arg::new(options::WRAP)
.short("w") .short('w')
.long(options::WRAP) .long(options::WRAP)
.takes_value(true) .takes_value(true)
.help( .help(
@ -121,7 +123,7 @@ pub fn base_app<'a>(about: &'a str) -> App<'static, 'a> {
) )
// "multiple" arguments are used to check whether there is more than one // "multiple" arguments are used to check whether there is more than one
// file passed in. // file passed in.
.arg(Arg::with_name(options::FILE).index(1).multiple(true)) .arg(Arg::new(options::FILE).index(1).multiple_occurrences(true))
} }
pub fn get_input<'a>(config: &Config, stdin_ref: &'a Stdin) -> UResult<Box<dyn Read + 'a>> { pub fn get_input<'a>(config: &Config, stdin_ref: &'a Stdin) -> UResult<Box<dyn Read + 'a>> {
@ -152,7 +154,7 @@ pub fn handle_input<R: Read>(
if !decode { if !decode {
match data.encode() { match data.encode() {
Ok(s) => { Ok(s) => {
wrap_print(&data, s); wrap_print(&data, &s);
Ok(()) Ok(())
} }
Err(_) => Err(USimpleError::new( Err(_) => Err(USimpleError::new(

View file

@ -1 +1 @@
uucore_procs::main!(uu_base32); uucore::bin!(uu_base32);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_base64" name = "uu_base64"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "base64 ~ (uutils) decode/encode input (base64-encoding)" description = "base64 ~ (uutils) decode/encode input (base64-encoding)"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/base64" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/base64"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,15 +15,9 @@ edition = "2018"
path = "src/base64.rs" path = "src/base64.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features = ["encoding"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features = ["encoding"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
uu_base32 = { version=">=0.0.8", package="uu_base32", path="../base32"} uu_base32 = { version=">=0.0.8", package="uu_base32", path="../base32"}
[[bin]] [[bin]]
name = "base64" name = "base64"
path = "src/main.rs" path = "src/main.rs"
[package.metadata.cargo-udeps.ignore]
# Necessary for "make all"
normal = ["uucore_procs"]

1
src/uu/base64/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -13,26 +13,23 @@ use uucore::{encoding::Format, error::UResult};
use std::io::{stdin, Read}; use std::io::{stdin, Read};
static ABOUT: &str = " static ABOUT: &str = "\
With no FILE, or when FILE is -, read standard input. With no FILE, or when FILE is -, read standard input.
The data are encoded as described for the base64 alphabet in RFC The data are encoded as described for the base64 alphabet in RFC
3548. When decoding, the input may contain newlines in addition 3548. When decoding, the input may contain newlines in addition
to the bytes of the formal base64 alphabet. Use --ignore-garbage to the bytes of the formal base64 alphabet. Use --ignore-garbage
to attempt to recover from any other non-alphabet bytes in the to attempt to recover from any other non-alphabet bytes in the
encoded stream. encoded stream.
"; ";
fn usage() -> String { const USAGE: &str = "{0} [OPTION]... [FILE]";
format!("{0} [OPTION]... [FILE]", uucore::execution_phrase())
}
#[uucore_procs::gen_uumain] #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> { pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let format = Format::Base64; let format = Format::Base64;
let usage = usage();
let config: base_common::Config = base_common::parse_base_cmd_args(args, ABOUT, &usage)?; let config: base_common::Config = base_common::parse_base_cmd_args(args, ABOUT, USAGE)?;
// Create a reference to stdin so we can return a locked stdin from // Create a reference to stdin so we can return a locked stdin from
// parse_base_cmd_args // parse_base_cmd_args

View file

@ -1 +1 @@
uucore_procs::main!(uu_base64); uucore::bin!(uu_base64);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_basename" name = "uu_basename"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "basename ~ (uutils) display PATHNAME with leading directory components removed" description = "basename ~ (uutils) display PATHNAME with leading directory components removed"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/basename" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/basename"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,14 +15,9 @@ edition = "2018"
path = "src/basename.rs" path = "src/basename.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore" } uucore = { version=">=0.0.11", package="uucore", path="../../uucore" }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "basename" name = "basename"
path = "src/main.rs" path = "src/main.rs"
[package.metadata.cargo-udeps.ignore]
# Necessary for "make all"
normal = ["uucore_procs"]

1
src/uu/basename/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -7,23 +7,17 @@
// spell-checker:ignore (ToDO) fullname // spell-checker:ignore (ToDO) fullname
#[macro_use] use clap::{crate_version, Arg, Command};
extern crate uucore;
use clap::{crate_version, App, Arg};
use std::path::{is_separator, PathBuf}; use std::path::{is_separator, PathBuf};
use uucore::InvalidEncodingHandling; use uucore::display::Quotable;
use uucore::error::{UResult, UUsageError};
use uucore::{format_usage, InvalidEncodingHandling};
static SUMMARY: &str = "Print NAME with any leading directory components removed static SUMMARY: &str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX"; If specified, also remove a trailing SUFFIX";
fn usage() -> String { const USAGE: &str = "{} NAME [SUFFIX]
format!( {} OPTION... NAME...";
"{0} NAME [SUFFIX]
{0} OPTION... NAME...",
uucore::execution_phrase()
)
}
pub mod options { pub mod options {
pub static MULTIPLE: &str = "multiple"; pub static MULTIPLE: &str = "multiple";
@ -32,24 +26,19 @@ pub mod options {
pub static ZERO: &str = "zero"; pub static ZERO: &str = "zero";
} }
pub fn uumain(args: impl uucore::Args) -> i32 { #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args let args = args
.collect_str(InvalidEncodingHandling::ConvertLossy) .collect_str(InvalidEncodingHandling::ConvertLossy)
.accept_any(); .accept_any();
let usage = usage();
// //
// Argument parsing // Argument parsing
// //
let matches = uu_app().usage(&usage[..]).get_matches_from(args); let matches = uu_app().get_matches_from(args);
// too few arguments // too few arguments
if !matches.is_present(options::NAME) { if !matches.is_present(options::NAME) {
crash!( return Err(UUsageError::new(1, "missing operand".to_string()));
1,
"{1}\nTry '{0} --help' for more information.",
uucore::execution_phrase(),
"missing operand"
);
} }
let opt_suffix = matches.is_present(options::SUFFIX); let opt_suffix = matches.is_present(options::SUFFIX);
@ -58,12 +47,18 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
let multiple_paths = opt_suffix || opt_multiple; let multiple_paths = opt_suffix || opt_multiple;
// too many arguments // too many arguments
if !multiple_paths && matches.occurrences_of(options::NAME) > 2 { if !multiple_paths && matches.occurrences_of(options::NAME) > 2 {
crash!( return Err(UUsageError::new(
1, 1,
"extra operand '{1}'\nTry '{0} --help' for more information.", format!(
uucore::execution_phrase(), "extra operand {}",
matches.values_of(options::NAME).unwrap().nth(2).unwrap() matches
); .values_of(options::NAME)
.unwrap()
.nth(2)
.unwrap()
.quote()
),
));
} }
let suffix = if opt_suffix { let suffix = if opt_suffix {
@ -89,30 +84,36 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
print!("{}{}", basename(path, suffix), line_ending); print!("{}{}", basename(path, suffix), line_ending);
} }
0 Ok(())
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(crate_version!()) .version(crate_version!())
.about(SUMMARY) .about(SUMMARY)
.override_usage(format_usage(USAGE))
.infer_long_args(true)
.arg( .arg(
Arg::with_name(options::MULTIPLE) Arg::new(options::MULTIPLE)
.short("a") .short('a')
.long(options::MULTIPLE) .long(options::MULTIPLE)
.help("support multiple arguments and treat each as a NAME"), .help("support multiple arguments and treat each as a NAME"),
) )
.arg(Arg::with_name(options::NAME).multiple(true).hidden(true))
.arg( .arg(
Arg::with_name(options::SUFFIX) Arg::new(options::NAME)
.short("s") .multiple_occurrences(true)
.hide(true),
)
.arg(
Arg::new(options::SUFFIX)
.short('s')
.long(options::SUFFIX) .long(options::SUFFIX)
.value_name("SUFFIX") .value_name("SUFFIX")
.help("remove a trailing SUFFIX; implies -a"), .help("remove a trailing SUFFIX; implies -a"),
) )
.arg( .arg(
Arg::with_name(options::ZERO) Arg::new(options::ZERO)
.short("z") .short('z')
.long(options::ZERO) .long(options::ZERO)
.help("end each output line with NUL, not newline"), .help("end each output line with NUL, not newline"),
) )
@ -131,21 +132,15 @@ fn basename(fullname: &str, suffix: &str) -> String {
// Convert to path buffer and get last path component // Convert to path buffer and get last path component
let pb = PathBuf::from(path); let pb = PathBuf::from(path);
match pb.components().last() { match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix), Some(c) => {
let name = c.as_os_str().to_str().unwrap();
if name == suffix {
name.to_string()
} else {
name.strip_suffix(suffix).unwrap_or(name).to_string()
}
}
None => "".to_owned(), None => "".to_owned(),
} }
} }
// can be replaced with strip_suffix once MSRV is 1.45
#[allow(clippy::manual_strip)]
fn strip_suffix(name: &str, suffix: &str) -> String {
if name == suffix {
return name.to_owned();
}
if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
}

View file

@ -1 +1 @@
uucore_procs::main!(uu_basename); uucore::bin!(uu_basename);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_basenc" name = "uu_basenc"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "basenc ~ (uutils) decode/encode input" description = "basenc ~ (uutils) decode/encode input"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/basenc" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/basenc"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,15 +15,10 @@ edition = "2018"
path = "src/basenc.rs" path = "src/basenc.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features = ["encoding"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features = ["encoding"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
uu_base32 = { version=">=0.0.8", package="uu_base32", path="../base32"} uu_base32 = { version=">=0.0.8", package="uu_base32", path="../base32"}
[[bin]] [[bin]]
name = "basenc" name = "basenc"
path = "src/main.rs" path = "src/main.rs"
[package.metadata.cargo-udeps.ignore]
# Necessary for "make all"
normal = ["uucore_procs"]

1
src/uu/basenc/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -8,7 +8,7 @@
//spell-checker:ignore (args) lsbf msbf //spell-checker:ignore (args) lsbf msbf
use clap::{App, Arg}; use clap::{Arg, Command};
use uu_base32::base_common::{self, Config, BASE_CMD_PARSE_ERROR}; use uu_base32::base_common::{self, Config, BASE_CMD_PARSE_ERROR};
use uucore::{ use uucore::{
@ -19,12 +19,12 @@ use uucore::{
use std::io::{stdin, Read}; use std::io::{stdin, Read};
static ABOUT: &str = " static ABOUT: &str = "\
With no FILE, or when FILE is -, read standard input. With no FILE, or when FILE is -, read standard input.
When decoding, the input may contain newlines in addition to the bytes of When decoding, the input may contain newlines in addition to the bytes of
the formal alphabet. Use --ignore-garbage to attempt to recover the formal alphabet. Use --ignore-garbage to attempt to recover
from any other non-alphabet bytes in the encoded stream. from any other non-alphabet bytes in the encoded stream.
"; ";
const ENCODINGS: &[(&str, Format)] = &[ const ENCODINGS: &[(&str, Format)] = &[
@ -36,26 +36,20 @@ const ENCODINGS: &[(&str, Format)] = &[
("base2lsbf", Format::Base2Lsbf), ("base2lsbf", Format::Base2Lsbf),
("base2msbf", Format::Base2Msbf), ("base2msbf", Format::Base2Msbf),
("z85", Format::Z85), ("z85", Format::Z85),
// common abbreviations. TODO: once we have clap 3.0 we can use `AppSettings::InferLongArgs` to get all abbreviations automatically
("base2l", Format::Base2Lsbf),
("base2m", Format::Base2Msbf),
]; ];
fn usage() -> String { const USAGE: &str = "{} [OPTION]... [FILE]";
format!("{0} [OPTION]... [FILE]", uucore::execution_phrase())
}
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
let mut app = base_common::base_app(ABOUT); let mut command = base_common::base_app(ABOUT, USAGE);
for encoding in ENCODINGS { for encoding in ENCODINGS {
app = app.arg(Arg::with_name(encoding.0).long(encoding.0)); command = command.arg(Arg::new(encoding.0).long(encoding.0));
} }
app command
} }
fn parse_cmd_args(args: impl uucore::Args) -> UResult<(Config, Format)> { fn parse_cmd_args(args: impl uucore::Args) -> UResult<(Config, Format)> {
let usage = usage(); let matches = uu_app().get_matches_from(
let matches = uu_app().usage(&usage[..]).get_matches_from(
args.collect_str(InvalidEncodingHandling::ConvertLossy) args.collect_str(InvalidEncodingHandling::ConvertLossy)
.accept_any(), .accept_any(),
); );
@ -68,7 +62,7 @@ fn parse_cmd_args(args: impl uucore::Args) -> UResult<(Config, Format)> {
Ok((config, format)) Ok((config, format))
} }
#[uucore_procs::gen_uumain] #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> { pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let (config, format) = parse_cmd_args(args)?; let (config, format) = parse_cmd_args(args)?;
// Create a reference to stdin so we can return a locked stdin from // Create a reference to stdin so we can return a locked stdin from

View file

@ -1 +1 @@
uucore_procs::main!(uu_basenc); uucore::bin!(uu_basenc);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_cat" name = "uu_cat"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "cat ~ (uutils) concatenate and display input" description = "cat ~ (uutils) concatenate and display input"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/cat" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/cat"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,18 +15,14 @@ edition = "2018"
path = "src/cat.rs" path = "src/cat.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
thiserror = "1.0" thiserror = "1.0"
atty = "0.2" atty = "0.2"
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features=["fs", "pipes"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features=["fs", "pipes"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
unix_socket = "0.5.0" unix_socket = "0.5.0"
nix = "0.20.0" nix = "0.23.1"
[target.'cfg(windows)'.dependencies]
winapi-util = "0.1.5"
[[bin]] [[bin]]
name = "cat" name = "cat"

1
src/uu/cat/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -14,12 +14,13 @@
extern crate unix_socket; extern crate unix_socket;
// last synced with: cat (GNU coreutils) 8.13 // last synced with: cat (GNU coreutils) 8.13
use clap::{crate_version, App, Arg}; use clap::{crate_version, Arg, Command};
use std::fs::{metadata, File}; use std::fs::{metadata, File};
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
use thiserror::Error; use thiserror::Error;
use uucore::display::Quotable; use uucore::display::Quotable;
use uucore::error::UResult; use uucore::error::UResult;
use uucore::fs::FileInformation;
#[cfg(unix)] #[cfg(unix)]
use std::os::unix::io::AsRawFd; use std::os::unix::io::AsRawFd;
@ -35,10 +36,10 @@ use std::net::Shutdown;
use std::os::unix::fs::FileTypeExt; use std::os::unix::fs::FileTypeExt;
#[cfg(unix)] #[cfg(unix)]
use unix_socket::UnixStream; use unix_socket::UnixStream;
use uucore::InvalidEncodingHandling; use uucore::{format_usage, InvalidEncodingHandling};
static NAME: &str = "cat"; static NAME: &str = "cat";
static SYNTAX: &str = "[OPTION]... [FILE]..."; static USAGE: &str = "{} [OPTION]... [FILE]...";
static SUMMARY: &str = "Concatenate FILE(s), or standard input, to standard output static SUMMARY: &str = "Concatenate FILE(s), or standard input, to standard output
With no FILE, or when FILE is -, read standard input."; With no FILE, or when FILE is -, read standard input.";
@ -181,7 +182,7 @@ mod options {
pub static SHOW_NONPRINTING: &str = "show-nonprinting"; pub static SHOW_NONPRINTING: &str = "show-nonprinting";
} }
#[uucore_procs::gen_uumain] #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> { pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args let args = args
.collect_str(InvalidEncodingHandling::Ignore) .collect_str(InvalidEncodingHandling::Ignore)
@ -235,67 +236,72 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
show_tabs, show_tabs,
squeeze_blank, squeeze_blank,
}; };
cat_files(files, &options) cat_files(&files, &options)
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.name(NAME) .name(NAME)
.version(crate_version!()) .version(crate_version!())
.usage(SYNTAX) .override_usage(format_usage(USAGE))
.about(SUMMARY) .about(SUMMARY)
.arg(Arg::with_name(options::FILE).hidden(true).multiple(true)) .infer_long_args(true)
.arg( .arg(
Arg::with_name(options::SHOW_ALL) Arg::new(options::FILE)
.short("A") .hide(true)
.multiple_occurrences(true),
)
.arg(
Arg::new(options::SHOW_ALL)
.short('A')
.long(options::SHOW_ALL) .long(options::SHOW_ALL)
.help("equivalent to -vET"), .help("equivalent to -vET"),
) )
.arg( .arg(
Arg::with_name(options::NUMBER_NONBLANK) Arg::new(options::NUMBER_NONBLANK)
.short("b") .short('b')
.long(options::NUMBER_NONBLANK) .long(options::NUMBER_NONBLANK)
.help("number nonempty output lines, overrides -n") .help("number nonempty output lines, overrides -n")
.overrides_with(options::NUMBER), .overrides_with(options::NUMBER),
) )
.arg( .arg(
Arg::with_name(options::SHOW_NONPRINTING_ENDS) Arg::new(options::SHOW_NONPRINTING_ENDS)
.short("e") .short('e')
.help("equivalent to -vE"), .help("equivalent to -vE"),
) )
.arg( .arg(
Arg::with_name(options::SHOW_ENDS) Arg::new(options::SHOW_ENDS)
.short("E") .short('E')
.long(options::SHOW_ENDS) .long(options::SHOW_ENDS)
.help("display $ at end of each line"), .help("display $ at end of each line"),
) )
.arg( .arg(
Arg::with_name(options::NUMBER) Arg::new(options::NUMBER)
.short("n") .short('n')
.long(options::NUMBER) .long(options::NUMBER)
.help("number all output lines"), .help("number all output lines"),
) )
.arg( .arg(
Arg::with_name(options::SQUEEZE_BLANK) Arg::new(options::SQUEEZE_BLANK)
.short("s") .short('s')
.long(options::SQUEEZE_BLANK) .long(options::SQUEEZE_BLANK)
.help("suppress repeated empty output lines"), .help("suppress repeated empty output lines"),
) )
.arg( .arg(
Arg::with_name(options::SHOW_NONPRINTING_TABS) Arg::new(options::SHOW_NONPRINTING_TABS)
.short("t") .short('t')
.long(options::SHOW_NONPRINTING_TABS) .long(options::SHOW_NONPRINTING_TABS)
.help("equivalent to -vT"), .help("equivalent to -vT"),
) )
.arg( .arg(
Arg::with_name(options::SHOW_TABS) Arg::new(options::SHOW_TABS)
.short("T") .short('T')
.long(options::SHOW_TABS) .long(options::SHOW_TABS)
.help("display TAB characters at ^I"), .help("display TAB characters at ^I"),
) )
.arg( .arg(
Arg::with_name(options::SHOW_NONPRINTING) Arg::new(options::SHOW_NONPRINTING)
.short("v") .short('v')
.long(options::SHOW_NONPRINTING) .long(options::SHOW_NONPRINTING)
.help("use ^ and M- notation, except for LF (\\n) and TAB (\\t)"), .help("use ^ and M- notation, except for LF (\\n) and TAB (\\t)"),
) )
@ -317,18 +323,17 @@ fn cat_path(
path: &str, path: &str,
options: &OutputOptions, options: &OutputOptions,
state: &mut OutputState, state: &mut OutputState,
#[cfg(unix)] out_info: &nix::sys::stat::FileStat, out_info: Option<&FileInformation>,
#[cfg(windows)] out_info: &winapi_util::file::Information,
) -> CatResult<()> { ) -> CatResult<()> {
if path == "-" {
let stdin = io::stdin();
let mut handle = InputHandle {
reader: stdin,
is_interactive: atty::is(atty::Stream::Stdin),
};
return cat_handle(&mut handle, options, state);
}
match get_input_type(path)? { match get_input_type(path)? {
InputType::StdIn => {
let stdin = io::stdin();
let mut handle = InputHandle {
reader: stdin,
is_interactive: atty::is(atty::Stream::Stdin),
};
cat_handle(&mut handle, options, state)
}
InputType::Directory => Err(CatError::IsDirectory), InputType::Directory => Err(CatError::IsDirectory),
#[cfg(unix)] #[cfg(unix)]
InputType::Socket => { InputType::Socket => {
@ -342,10 +347,15 @@ fn cat_path(
} }
_ => { _ => {
let file = File::open(path)?; let file = File::open(path)?;
#[cfg(any(windows, unix))]
if same_file(out_info, &file) { if let Some(out_info) = out_info {
return Err(CatError::OutputIsInput); if out_info.file_size() != 0
&& FileInformation::from_file(&file).as_ref() == Some(out_info)
{
return Err(CatError::OutputIsInput);
}
} }
let mut handle = InputHandle { let mut handle = InputHandle {
reader: file, reader: file,
is_interactive: false, is_interactive: false,
@ -355,25 +365,8 @@ fn cat_path(
} }
} }
#[cfg(unix)] fn cat_files(files: &[String], options: &OutputOptions) -> UResult<()> {
fn same_file(a_info: &nix::sys::stat::FileStat, b: &File) -> bool { let out_info = FileInformation::from_file(&std::io::stdout());
let b_info = nix::sys::stat::fstat(b.as_raw_fd()).unwrap();
b_info.st_size != 0 && b_info.st_dev == a_info.st_dev && b_info.st_ino == a_info.st_ino
}
#[cfg(windows)]
fn same_file(a_info: &winapi_util::file::Information, b: &File) -> bool {
let b_info = winapi_util::file::information(b).unwrap();
b_info.file_size() != 0
&& b_info.volume_serial_number() == a_info.volume_serial_number()
&& b_info.file_index() == a_info.file_index()
}
fn cat_files(files: Vec<String>, options: &OutputOptions) -> UResult<()> {
#[cfg(windows)]
let out_info = winapi_util::file::information(&std::io::stdout()).unwrap();
#[cfg(unix)]
let out_info = nix::sys::stat::fstat(std::io::stdout().as_raw_fd()).unwrap();
let mut state = OutputState { let mut state = OutputState {
line_number: 1, line_number: 1,
@ -383,8 +376,8 @@ fn cat_files(files: Vec<String>, options: &OutputOptions) -> UResult<()> {
}; };
let mut error_messages: Vec<String> = Vec::new(); let mut error_messages: Vec<String> = Vec::new();
for path in &files { for path in files {
if let Err(err) = cat_path(path, options, &mut state, &out_info) { if let Err(err) = cat_path(path, options, &mut state, out_info.as_ref()) {
error_messages.push(format!("{}: {}", path.maybe_quote(), err)); error_messages.push(format!("{}: {}", path.maybe_quote(), err));
} }
} }
@ -486,7 +479,7 @@ fn write_lines<R: FdReadable>(
if !state.at_line_start || !options.squeeze_blank || !state.one_blank_kept { if !state.at_line_start || !options.squeeze_blank || !state.one_blank_kept {
state.one_blank_kept = true; state.one_blank_kept = true;
if state.at_line_start && options.number == NumberingMode::All { if state.at_line_start && options.number == NumberingMode::All {
write!(&mut writer, "{0:6}\t", state.line_number)?; write!(writer, "{0:6}\t", state.line_number)?;
state.line_number += 1; state.line_number += 1;
} }
writer.write_all(options.end_of_line().as_bytes())?; writer.write_all(options.end_of_line().as_bytes())?;
@ -505,7 +498,7 @@ fn write_lines<R: FdReadable>(
} }
state.one_blank_kept = false; state.one_blank_kept = false;
if state.at_line_start && options.number != NumberingMode::None { if state.at_line_start && options.number != NumberingMode::None {
write!(&mut writer, "{0:6}\t", state.line_number)?; write!(writer, "{0:6}\t", state.line_number)?;
state.line_number += 1; state.line_number += 1;
} }
@ -567,13 +560,12 @@ fn write_tab_to_end<W: Write>(mut in_buf: &[u8], writer: &mut W) -> usize {
{ {
Some(p) => { Some(p) => {
writer.write_all(&in_buf[..p]).unwrap(); writer.write_all(&in_buf[..p]).unwrap();
if in_buf[p] == b'\n' { if in_buf[p] == b'\t' {
return count + p;
} else if in_buf[p] == b'\t' {
writer.write_all(b"^I").unwrap(); writer.write_all(b"^I").unwrap();
in_buf = &in_buf[p + 1..]; in_buf = &in_buf[p + 1..];
count += p + 1; count += p + 1;
} else { } else {
// b'\n' or b'\r'
return count + p; return count + p;
} }
} }
@ -596,10 +588,10 @@ fn write_nonprint_to_end<W: Write>(in_buf: &[u8], writer: &mut W, tab: &[u8]) ->
9 => writer.write_all(tab), 9 => writer.write_all(tab),
0..=8 | 10..=31 => writer.write_all(&[b'^', byte + 64]), 0..=8 | 10..=31 => writer.write_all(&[b'^', byte + 64]),
32..=126 => writer.write_all(&[byte]), 32..=126 => writer.write_all(&[byte]),
127 => writer.write_all(&[b'^', byte - 64]), 127 => writer.write_all(&[b'^', b'?']),
128..=159 => writer.write_all(&[b'M', b'-', b'^', byte - 64]), 128..=159 => writer.write_all(&[b'M', b'-', b'^', byte - 64]),
160..=254 => writer.write_all(&[b'M', b'-', byte - 128]), 160..=254 => writer.write_all(&[b'M', b'-', byte - 128]),
_ => writer.write_all(&[b'M', b'-', b'^', 63]), _ => writer.write_all(&[b'M', b'-', b'^', b'?']),
} }
.unwrap(); .unwrap();
count += 1; count += 1;

View file

@ -1 +1 @@
uucore_procs::main!(uu_cat); uucore::bin!(uu_cat);

View file

@ -1,11 +1,11 @@
[package] [package]
name = "uu_chcon" name = "uu_chcon"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "chcon ~ (uutils) change file security context" description = "chcon ~ (uutils) change file security context"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/chcon" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/chcon"
keywords = ["coreutils", "uutils", "cli", "utility"] keywords = ["coreutils", "uutils", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -14,9 +14,8 @@ edition = "2018"
path = "src/chcon.rs" path = "src/chcon.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version = ">=0.0.9", package="uucore", path="../../uucore", features=["entries", "fs", "perms"] } uucore = { version = ">=0.0.9", package="uucore", path="../../uucore", features=["entries", "fs", "perms"] }
uucore_procs = { version = ">=0.0.6", package="uucore_procs", path="../../uucore_procs" }
selinux = { version = "0.2" } selinux = { version = "0.2" }
fts-sys = { version = "0.2" } fts-sys = { version = "0.2" }
thiserror = { version = "1.0" } thiserror = { version = "1.0" }

1
src/uu/chcon/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -2,9 +2,11 @@
#![allow(clippy::upper_case_acronyms)] #![allow(clippy::upper_case_acronyms)]
use uucore::{display::Quotable, show_error, show_usage_error, show_warning}; use uucore::error::{UResult, USimpleError, UUsageError};
use uucore::format_usage;
use uucore::{display::Quotable, show_error, show_warning};
use clap::{App, Arg}; use clap::{Arg, Command};
use selinux::{OpaqueSecurityContext, SecurityContext}; use selinux::{OpaqueSecurityContext, SecurityContext};
use std::borrow::Cow; use std::borrow::Cow;
@ -21,8 +23,13 @@ use errors::*;
static VERSION: &str = env!("CARGO_PKG_VERSION"); static VERSION: &str = env!("CARGO_PKG_VERSION");
static ABOUT: &str = "Change the SELinux security context of each FILE to CONTEXT. \n\ static ABOUT: &str = "Change the SELinux security context of each FILE to CONTEXT. \n\
With --reference, change the security context of each FILE to that of RFILE."; With --reference, change the security context of each FILE to that of RFILE.";
const USAGE: &str = "\
{} [OPTION]... CONTEXT FILE... \n \
{} [OPTION]... [-u USER] [-r ROLE] [-l RANGE] [-t TYPE] FILE... \n \
{} [OPTION]... --reference=RFILE FILE...";
pub mod options { pub mod options {
pub static HELP: &str = "help";
pub static VERBOSE: &str = "verbose"; pub static VERBOSE: &str = "verbose";
pub static REFERENCE: &str = "reference"; pub static REFERENCE: &str = "reference";
@ -51,35 +58,24 @@ pub mod options {
} }
} }
fn get_usage() -> String { #[uucore::main]
format!( pub fn uumain(args: impl uucore::Args) -> UResult<()> {
"{0} [OPTION]... CONTEXT FILE... \n \ let config = uu_app();
{0} [OPTION]... [-u USER] [-r ROLE] [-l RANGE] [-t TYPE] FILE... \n \
{0} [OPTION]... --reference=RFILE FILE...",
uucore::execution_phrase()
)
}
pub fn uumain(args: impl uucore::Args) -> i32 {
let usage = get_usage();
let config = uu_app().usage(usage.as_ref());
let options = match parse_command_line(config, args) { let options = match parse_command_line(config, args) {
Ok(r) => r, Ok(r) => r,
Err(r) => { Err(r) => {
if let Error::CommandLine(r) = &r { if let Error::CommandLine(r) = &r {
match r.kind { match r.kind() {
clap::ErrorKind::HelpDisplayed | clap::ErrorKind::VersionDisplayed => { clap::ErrorKind::DisplayHelp | clap::ErrorKind::DisplayVersion => {
println!("{}", r); println!("{}", r);
return libc::EXIT_SUCCESS; return Ok(());
} }
_ => {} _ => {}
} }
} }
show_usage_error!("{}.\n", r); return Err(UUsageError::new(libc::EXIT_FAILURE, format!("{}.\n", r)));
return libc::EXIT_FAILURE;
} }
}; };
@ -98,8 +94,10 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
match result { match result {
Err(r) => { Err(r) => {
show_error!("{}.", report_full_error(&r)); return Err(USimpleError::new(
return libc::EXIT_FAILURE; libc::EXIT_FAILURE,
format!("{}.", report_full_error(&r)),
));
} }
Ok(file_context) => SELinuxSecurityContext::File(file_context), Ok(file_context) => SELinuxSecurityContext::File(file_context),
@ -111,14 +109,18 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
Ok(context) => context, Ok(context) => context,
Err(_r) => { Err(_r) => {
show_error!("Invalid security context {}.", context.quote()); return Err(USimpleError::new(
return libc::EXIT_FAILURE; libc::EXIT_FAILURE,
format!("Invalid security context {}.", context.quote()),
));
} }
}; };
if SecurityContext::from_c_str(&c_context, false).check() == Some(false) { if SecurityContext::from_c_str(&c_context, false).check() == Some(false) {
show_error!("Invalid security context {}.", context.quote()); return Err(USimpleError::new(
return libc::EXIT_FAILURE; libc::EXIT_FAILURE,
format!("Invalid security context {}.", context.quote()),
));
} }
SELinuxSecurityContext::String(Some(c_context)) SELinuxSecurityContext::String(Some(c_context))
@ -132,8 +134,10 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
Ok(r) => Some(r), Ok(r) => Some(r),
Err(r) => { Err(r) => {
show_error!("{}.", report_full_error(&r)); return Err(USimpleError::new(
return libc::EXIT_FAILURE; libc::EXIT_FAILURE,
format!("{}.", report_full_error(&r)),
));
} }
} }
} else { } else {
@ -142,21 +146,28 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
let results = process_files(&options, &context, root_dev_ino); let results = process_files(&options, &context, root_dev_ino);
if results.is_empty() { if results.is_empty() {
return libc::EXIT_SUCCESS; return Ok(());
} }
for result in &results { for result in &results {
show_error!("{}.", report_full_error(result)); show_error!("{}.", report_full_error(result));
} }
libc::EXIT_FAILURE Err(libc::EXIT_FAILURE.into())
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(VERSION) .version(VERSION)
.about(ABOUT) .about(ABOUT)
.override_usage(format_usage(USAGE))
.infer_long_args(true)
.arg( .arg(
Arg::with_name(options::dereference::DEREFERENCE) Arg::new(options::HELP)
.long(options::HELP)
.help("Print help information."),
)
.arg(
Arg::new(options::dereference::DEREFERENCE)
.long(options::dereference::DEREFERENCE) .long(options::dereference::DEREFERENCE)
.conflicts_with(options::dereference::NO_DEREFERENCE) .conflicts_with(options::dereference::NO_DEREFERENCE)
.help( .help(
@ -165,24 +176,24 @@ pub fn uu_app() -> App<'static, 'static> {
), ),
) )
.arg( .arg(
Arg::with_name(options::dereference::NO_DEREFERENCE) Arg::new(options::dereference::NO_DEREFERENCE)
.short("h") .short('h')
.long(options::dereference::NO_DEREFERENCE) .long(options::dereference::NO_DEREFERENCE)
.help("Affect symbolic links instead of any referenced file."), .help("Affect symbolic links instead of any referenced file."),
) )
.arg( .arg(
Arg::with_name(options::preserve_root::PRESERVE_ROOT) Arg::new(options::preserve_root::PRESERVE_ROOT)
.long(options::preserve_root::PRESERVE_ROOT) .long(options::preserve_root::PRESERVE_ROOT)
.conflicts_with(options::preserve_root::NO_PRESERVE_ROOT) .conflicts_with(options::preserve_root::NO_PRESERVE_ROOT)
.help("Fail to operate recursively on '/'."), .help("Fail to operate recursively on '/'."),
) )
.arg( .arg(
Arg::with_name(options::preserve_root::NO_PRESERVE_ROOT) Arg::new(options::preserve_root::NO_PRESERVE_ROOT)
.long(options::preserve_root::NO_PRESERVE_ROOT) .long(options::preserve_root::NO_PRESERVE_ROOT)
.help("Do not treat '/' specially (the default)."), .help("Do not treat '/' specially (the default)."),
) )
.arg( .arg(
Arg::with_name(options::REFERENCE) Arg::new(options::REFERENCE)
.long(options::REFERENCE) .long(options::REFERENCE)
.takes_value(true) .takes_value(true)
.value_name("RFILE") .value_name("RFILE")
@ -190,49 +201,54 @@ pub fn uu_app() -> App<'static, 'static> {
.help( .help(
"Use security context of RFILE, rather than specifying \ "Use security context of RFILE, rather than specifying \
a CONTEXT value.", a CONTEXT value.",
), )
.allow_invalid_utf8(true),
) )
.arg( .arg(
Arg::with_name(options::USER) Arg::new(options::USER)
.short("u") .short('u')
.long(options::USER) .long(options::USER)
.takes_value(true) .takes_value(true)
.value_name("USER") .value_name("USER")
.help("Set user USER in the target security context."), .help("Set user USER in the target security context.")
.allow_invalid_utf8(true),
) )
.arg( .arg(
Arg::with_name(options::ROLE) Arg::new(options::ROLE)
.short("r") .short('r')
.long(options::ROLE) .long(options::ROLE)
.takes_value(true) .takes_value(true)
.value_name("ROLE") .value_name("ROLE")
.help("Set role ROLE in the target security context."), .help("Set role ROLE in the target security context.")
.allow_invalid_utf8(true),
) )
.arg( .arg(
Arg::with_name(options::TYPE) Arg::new(options::TYPE)
.short("t") .short('t')
.long(options::TYPE) .long(options::TYPE)
.takes_value(true) .takes_value(true)
.value_name("TYPE") .value_name("TYPE")
.help("Set type TYPE in the target security context."), .help("Set type TYPE in the target security context.")
.allow_invalid_utf8(true),
) )
.arg( .arg(
Arg::with_name(options::RANGE) Arg::new(options::RANGE)
.short("l") .short('l')
.long(options::RANGE) .long(options::RANGE)
.takes_value(true) .takes_value(true)
.value_name("RANGE") .value_name("RANGE")
.help("Set range RANGE in the target security context."), .help("Set range RANGE in the target security context.")
.allow_invalid_utf8(true),
) )
.arg( .arg(
Arg::with_name(options::RECURSIVE) Arg::new(options::RECURSIVE)
.short("R") .short('R')
.long(options::RECURSIVE) .long(options::RECURSIVE)
.help("Operate on files and directories recursively."), .help("Operate on files and directories recursively."),
) )
.arg( .arg(
Arg::with_name(options::sym_links::FOLLOW_ARG_DIR_SYM_LINK) Arg::new(options::sym_links::FOLLOW_ARG_DIR_SYM_LINK)
.short("H") .short('H')
.requires(options::RECURSIVE) .requires(options::RECURSIVE)
.overrides_with_all(&[ .overrides_with_all(&[
options::sym_links::FOLLOW_DIR_SYM_LINKS, options::sym_links::FOLLOW_DIR_SYM_LINKS,
@ -244,8 +260,8 @@ pub fn uu_app() -> App<'static, 'static> {
), ),
) )
.arg( .arg(
Arg::with_name(options::sym_links::FOLLOW_DIR_SYM_LINKS) Arg::new(options::sym_links::FOLLOW_DIR_SYM_LINKS)
.short("L") .short('L')
.requires(options::RECURSIVE) .requires(options::RECURSIVE)
.overrides_with_all(&[ .overrides_with_all(&[
options::sym_links::FOLLOW_ARG_DIR_SYM_LINK, options::sym_links::FOLLOW_ARG_DIR_SYM_LINK,
@ -257,8 +273,8 @@ pub fn uu_app() -> App<'static, 'static> {
), ),
) )
.arg( .arg(
Arg::with_name(options::sym_links::NO_FOLLOW_SYM_LINKS) Arg::new(options::sym_links::NO_FOLLOW_SYM_LINKS)
.short("P") .short('P')
.requires(options::RECURSIVE) .requires(options::RECURSIVE)
.overrides_with_all(&[ .overrides_with_all(&[
options::sym_links::FOLLOW_ARG_DIR_SYM_LINK, options::sym_links::FOLLOW_ARG_DIR_SYM_LINK,
@ -270,12 +286,17 @@ pub fn uu_app() -> App<'static, 'static> {
), ),
) )
.arg( .arg(
Arg::with_name(options::VERBOSE) Arg::new(options::VERBOSE)
.short("v") .short('v')
.long(options::VERBOSE) .long(options::VERBOSE)
.help("Output a diagnostic for every file processed."), .help("Output a diagnostic for every file processed."),
) )
.arg(Arg::with_name("FILE").multiple(true).min_values(1)) .arg(
Arg::new("FILE")
.multiple_occurrences(true)
.min_values(1)
.allow_invalid_utf8(true),
)
} }
#[derive(Debug)] #[derive(Debug)]
@ -288,8 +309,8 @@ struct Options {
files: Vec<PathBuf>, files: Vec<PathBuf>,
} }
fn parse_command_line(config: clap::App, args: impl uucore::Args) -> Result<Options> { fn parse_command_line(config: clap::Command, args: impl uucore::Args) -> Result<Options> {
let matches = config.get_matches_from_safe(args)?; let matches = config.try_get_matches_from(args)?;
let verbose = matches.is_present(options::VERBOSE); let verbose = matches.is_present(options::VERBOSE);
@ -387,23 +408,21 @@ enum RecursiveMode {
impl RecursiveMode { impl RecursiveMode {
fn is_recursive(self) -> bool { fn is_recursive(self) -> bool {
match self { match self {
RecursiveMode::NotRecursive => false, Self::NotRecursive => false,
RecursiveMode::RecursiveButDoNotFollowSymLinks Self::RecursiveButDoNotFollowSymLinks
| RecursiveMode::RecursiveAndFollowAllDirSymLinks | Self::RecursiveAndFollowAllDirSymLinks
| RecursiveMode::RecursiveAndFollowArgDirSymLinks => true, | Self::RecursiveAndFollowArgDirSymLinks => true,
} }
} }
fn fts_open_options(self) -> c_int { fn fts_open_options(self) -> c_int {
match self { match self {
RecursiveMode::NotRecursive | RecursiveMode::RecursiveButDoNotFollowSymLinks => { Self::NotRecursive | Self::RecursiveButDoNotFollowSymLinks => fts_sys::FTS_PHYSICAL,
fts_sys::FTS_PHYSICAL
}
RecursiveMode::RecursiveAndFollowAllDirSymLinks => fts_sys::FTS_LOGICAL, Self::RecursiveAndFollowAllDirSymLinks => fts_sys::FTS_LOGICAL,
RecursiveMode::RecursiveAndFollowArgDirSymLinks => { Self::RecursiveAndFollowArgDirSymLinks => {
fts_sys::FTS_PHYSICAL | fts_sys::FTS_COMFOLLOW fts_sys::FTS_PHYSICAL | fts_sys::FTS_COMFOLLOW
} }
} }
@ -707,7 +726,7 @@ fn root_dev_ino_warn(dir_name: &Path) {
// When a program like chgrp performs a recursive traversal that requires traversing symbolic links, // When a program like chgrp performs a recursive traversal that requires traversing symbolic links,
// it is *not* a problem. // it is *not* a problem.
// However, when invoked with "-P -R", it deserves a warning. // However, when invoked with "-P -R", it deserves a warning.
// The fts_options parameter records the options that control this aspect of fts's behavior, // The fts_options parameter records the options that control this aspect of fts behavior,
// so test that. // so test that.
fn cycle_warning_required(fts_options: c_int, entry: &fts::EntryRef) -> bool { fn cycle_warning_required(fts_options: c_int, entry: &fts::EntryRef) -> bool {
// When dereferencing no symlinks, or when dereferencing only those listed on the command line // When dereferencing no symlinks, or when dereferencing only those listed on the command line
@ -723,7 +742,7 @@ This almost certainly means that you have a corrupted file system.\n\
NOTIFY YOUR SYSTEM MANAGER.\n\ NOTIFY YOUR SYSTEM MANAGER.\n\
The following directory is part of the cycle {}.", The following directory is part of the cycle {}.",
file_name.quote() file_name.quote()
) );
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -64,10 +64,10 @@ impl Error {
pub(crate) fn report_full_error(mut err: &dyn std::error::Error) -> String { pub(crate) fn report_full_error(mut err: &dyn std::error::Error) -> String {
let mut desc = String::with_capacity(256); let mut desc = String::with_capacity(256);
write!(&mut desc, "{}", err).unwrap(); write!(desc, "{}", err).unwrap();
while let Some(source) = err.source() { while let Some(source) = err.source() {
err = source; err = source;
write!(&mut desc, ". {}", err).unwrap(); write!(desc, ". {}", err).unwrap();
} }
desc desc
} }

View file

@ -1 +1 @@
uucore_procs::main!(uu_chcon); uucore::bin!(uu_chcon);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_chgrp" name = "uu_chgrp"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "chgrp ~ (uutils) change the group ownership of FILE" description = "chgrp ~ (uutils) change the group ownership of FILE"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/chgrp" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/chgrp"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,9 +15,8 @@ edition = "2018"
path = "src/chgrp.rs" path = "src/chgrp.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features=["entries", "fs", "perms"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features=["entries", "fs", "perms"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "chgrp" name = "chgrp"

1
src/uu/chgrp/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -10,9 +10,10 @@
use uucore::display::Quotable; use uucore::display::Quotable;
pub use uucore::entries; pub use uucore::entries;
use uucore::error::{FromIo, UResult, USimpleError}; use uucore::error::{FromIo, UResult, USimpleError};
use uucore::format_usage;
use uucore::perms::{chown_base, options, IfFrom}; use uucore::perms::{chown_base, options, IfFrom};
use clap::{App, Arg, ArgMatches}; use clap::{Arg, ArgMatches, Command};
use std::fs; use std::fs;
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
@ -20,12 +21,9 @@ use std::os::unix::fs::MetadataExt;
static ABOUT: &str = "Change the group of each FILE to GROUP."; static ABOUT: &str = "Change the group of each FILE to GROUP.";
static VERSION: &str = env!("CARGO_PKG_VERSION"); static VERSION: &str = env!("CARGO_PKG_VERSION");
fn get_usage() -> String { const USAGE: &str = "\
format!( {} [OPTION]... GROUP FILE...\n \
"{0} [OPTION]... GROUP FILE...\n {0} [OPTION]... --reference=RFILE FILE...", {} [OPTION]... --reference=RFILE FILE...";
uucore::execution_phrase()
)
}
fn parse_gid_and_uid(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>, IfFrom)> { fn parse_gid_and_uid(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>, IfFrom)> {
let dest_gid = if let Some(file) = matches.value_of(options::REFERENCE) { let dest_gid = if let Some(file) = matches.value_of(options::REFERENCE) {
@ -51,95 +49,94 @@ fn parse_gid_and_uid(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>,
Ok((dest_gid, None, IfFrom::All)) Ok((dest_gid, None, IfFrom::All))
} }
#[uucore_procs::gen_uumain] #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> { pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let usage = get_usage(); chown_base(uu_app(), args, options::ARG_GROUP, parse_gid_and_uid, true)
chown_base(
uu_app().usage(&usage[..]),
args,
options::ARG_GROUP,
parse_gid_and_uid,
true,
)
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(VERSION) .version(VERSION)
.about(ABOUT) .about(ABOUT)
.override_usage(format_usage(USAGE))
.infer_long_args(true)
.arg( .arg(
Arg::with_name(options::verbosity::CHANGES) Arg::new(options::HELP)
.short("c") .long(options::HELP)
.help("Print help information.")
)
.arg(
Arg::new(options::verbosity::CHANGES)
.short('c')
.long(options::verbosity::CHANGES) .long(options::verbosity::CHANGES)
.help("like verbose but report only when a change is made"), .help("like verbose but report only when a change is made"),
) )
.arg( .arg(
Arg::with_name(options::verbosity::SILENT) Arg::new(options::verbosity::SILENT)
.short("f") .short('f')
.long(options::verbosity::SILENT), .long(options::verbosity::SILENT),
) )
.arg( .arg(
Arg::with_name(options::verbosity::QUIET) Arg::new(options::verbosity::QUIET)
.long(options::verbosity::QUIET) .long(options::verbosity::QUIET)
.help("suppress most error messages"), .help("suppress most error messages"),
) )
.arg( .arg(
Arg::with_name(options::verbosity::VERBOSE) Arg::new(options::verbosity::VERBOSE)
.short("v") .short('v')
.long(options::verbosity::VERBOSE) .long(options::verbosity::VERBOSE)
.help("output a diagnostic for every file processed"), .help("output a diagnostic for every file processed"),
) )
.arg( .arg(
Arg::with_name(options::dereference::DEREFERENCE) Arg::new(options::dereference::DEREFERENCE)
.long(options::dereference::DEREFERENCE), .long(options::dereference::DEREFERENCE),
) )
.arg( .arg(
Arg::with_name(options::dereference::NO_DEREFERENCE) Arg::new(options::dereference::NO_DEREFERENCE)
.short("h") .short('h')
.long(options::dereference::NO_DEREFERENCE) .long(options::dereference::NO_DEREFERENCE)
.help( .help(
"affect symbolic links instead of any referenced file (useful only on systems that can change the ownership of a symlink)", "affect symbolic links instead of any referenced file (useful only on systems that can change the ownership of a symlink)",
), ),
) )
.arg( .arg(
Arg::with_name(options::preserve_root::PRESERVE) Arg::new(options::preserve_root::PRESERVE)
.long(options::preserve_root::PRESERVE) .long(options::preserve_root::PRESERVE)
.help("fail to operate recursively on '/'"), .help("fail to operate recursively on '/'"),
) )
.arg( .arg(
Arg::with_name(options::preserve_root::NO_PRESERVE) Arg::new(options::preserve_root::NO_PRESERVE)
.long(options::preserve_root::NO_PRESERVE) .long(options::preserve_root::NO_PRESERVE)
.help("do not treat '/' specially (the default)"), .help("do not treat '/' specially (the default)"),
) )
.arg( .arg(
Arg::with_name(options::REFERENCE) Arg::new(options::REFERENCE)
.long(options::REFERENCE) .long(options::REFERENCE)
.value_name("RFILE") .value_name("RFILE")
.help("use RFILE's group rather than specifying GROUP values") .help("use RFILE's group rather than specifying GROUP values")
.takes_value(true) .takes_value(true)
.multiple(false), .multiple_occurrences(false),
) )
.arg( .arg(
Arg::with_name(options::RECURSIVE) Arg::new(options::RECURSIVE)
.short("R") .short('R')
.long(options::RECURSIVE) .long(options::RECURSIVE)
.help("operate on files and directories recursively"), .help("operate on files and directories recursively"),
) )
.arg( .arg(
Arg::with_name(options::traverse::TRAVERSE) Arg::new(options::traverse::TRAVERSE)
.short(options::traverse::TRAVERSE) .short(options::traverse::TRAVERSE.chars().next().unwrap())
.help("if a command line argument is a symbolic link to a directory, traverse it"), .help("if a command line argument is a symbolic link to a directory, traverse it"),
) )
.arg( .arg(
Arg::with_name(options::traverse::NO_TRAVERSE) Arg::new(options::traverse::NO_TRAVERSE)
.short(options::traverse::NO_TRAVERSE) .short(options::traverse::NO_TRAVERSE.chars().next().unwrap())
.help("do not traverse any symbolic links (default)") .help("do not traverse any symbolic links (default)")
.overrides_with_all(&[options::traverse::TRAVERSE, options::traverse::EVERY]), .overrides_with_all(&[options::traverse::TRAVERSE, options::traverse::EVERY]),
) )
.arg( .arg(
Arg::with_name(options::traverse::EVERY) Arg::new(options::traverse::EVERY)
.short(options::traverse::EVERY) .short(options::traverse::EVERY.chars().next().unwrap())
.help("traverse every symbolic link to a directory encountered"), .help("traverse every symbolic link to a directory encountered"),
) )
} }

View file

@ -1 +1 @@
uucore_procs::main!(uu_chgrp); uucore::bin!(uu_chgrp);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_chmod" name = "uu_chmod"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "chmod ~ (uutils) change mode of FILE" description = "chmod ~ (uutils) change mode of FILE"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/chmod" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/chmod"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,11 +15,9 @@ edition = "2018"
path = "src/chmod.rs" path = "src/chmod.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
libc = "0.2.42" libc = "0.2.121"
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features=["fs", "mode"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features=["fs", "mode"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
walkdir = "2.2"
[[bin]] [[bin]]
name = "chmod" name = "chmod"

1
src/uu/chmod/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -7,20 +7,17 @@
// spell-checker:ignore (ToDO) Chmoder cmode fmode fperm fref ugoa RFILE RFILE's // spell-checker:ignore (ToDO) Chmoder cmode fmode fperm fref ugoa RFILE RFILE's
#[macro_use] use clap::{crate_version, Arg, Command};
extern crate uucore;
use clap::{crate_version, App, Arg};
use std::fs; use std::fs;
use std::os::unix::fs::{MetadataExt, PermissionsExt}; use std::os::unix::fs::{MetadataExt, PermissionsExt};
use std::path::Path; use std::path::Path;
use uucore::display::Quotable; use uucore::display::Quotable;
use uucore::error::{ExitCode, UResult, USimpleError, UUsageError};
use uucore::fs::display_permissions_unix; use uucore::fs::display_permissions_unix;
use uucore::libc::mode_t; use uucore::libc::mode_t;
#[cfg(not(windows))] #[cfg(not(windows))]
use uucore::mode; use uucore::mode;
use uucore::InvalidEncodingHandling; use uucore::{format_usage, show_error, InvalidEncodingHandling};
use walkdir::WalkDir;
static ABOUT: &str = "Change the mode of each FILE to MODE. static ABOUT: &str = "Change the mode of each FILE to MODE.
With --reference, change the mode of each FILE to that of RFILE."; With --reference, change the mode of each FILE to that of RFILE.";
@ -37,20 +34,17 @@ mod options {
pub const FILE: &str = "FILE"; pub const FILE: &str = "FILE";
} }
fn usage() -> String { const USAGE: &str = "\
format!( {} [OPTION]... MODE[,MODE]... FILE...
"{0} [OPTION]... MODE[,MODE]... FILE... {} [OPTION]... OCTAL-MODE FILE...
or: {0} [OPTION]... OCTAL-MODE FILE... {} [OPTION]... --reference=RFILE FILE...";
or: {0} [OPTION]... --reference=RFILE FILE...",
uucore::execution_phrase()
)
}
fn get_long_usage() -> String { fn get_long_usage() -> String {
String::from("Each MODE is of the form '[ugoa]*([-+=]([rwxXst]*|[ugo]))+|[-+=]?[0-7]+'.") String::from("Each MODE is of the form '[ugoa]*([-+=]([rwxXst]*|[ugo]))+|[-+=]?[0-7]+'.")
} }
pub fn uumain(args: impl uucore::Args) -> i32 { #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let mut args = args let mut args = args
.collect_str(InvalidEncodingHandling::ConvertLossy) .collect_str(InvalidEncodingHandling::ConvertLossy)
.accept_any(); .accept_any();
@ -59,25 +53,27 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
// a possible MODE prefix '-' needs to be removed (e.g. "chmod -x FILE"). // a possible MODE prefix '-' needs to be removed (e.g. "chmod -x FILE").
let mode_had_minus_prefix = mode::strip_minus_from_mode(&mut args); let mode_had_minus_prefix = mode::strip_minus_from_mode(&mut args);
let usage = usage();
let after_help = get_long_usage(); let after_help = get_long_usage();
let matches = uu_app() let matches = uu_app().after_help(&after_help[..]).get_matches_from(args);
.usage(&usage[..])
.after_help(&after_help[..])
.get_matches_from(args);
let changes = matches.is_present(options::CHANGES); let changes = matches.is_present(options::CHANGES);
let quiet = matches.is_present(options::QUIET); let quiet = matches.is_present(options::QUIET);
let verbose = matches.is_present(options::VERBOSE); let verbose = matches.is_present(options::VERBOSE);
let preserve_root = matches.is_present(options::PRESERVE_ROOT); let preserve_root = matches.is_present(options::PRESERVE_ROOT);
let recursive = matches.is_present(options::RECURSIVE); let recursive = matches.is_present(options::RECURSIVE);
let fmode = matches let fmode = match matches.value_of(options::REFERENCE) {
.value_of(options::REFERENCE) Some(fref) => match fs::metadata(fref) {
.and_then(|fref| match fs::metadata(fref) {
Ok(meta) => Some(meta.mode()), Ok(meta) => Some(meta.mode()),
Err(err) => crash!(1, "cannot stat attributes of {}: {}", fref.quote(), err), Err(err) => {
}); return Err(USimpleError::new(
1,
format!("cannot stat attributes of {}: {}", fref.quote(), err),
))
}
},
None => None,
};
let modes = matches.value_of(options::MODE).unwrap(); // should always be Some because required let modes = matches.value_of(options::MODE).unwrap(); // should always be Some because required
let cmode = if mode_had_minus_prefix { let cmode = if mode_had_minus_prefix {
// clap parsing is finished, now put prefix back // clap parsing is finished, now put prefix back
@ -100,7 +96,7 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
}; };
if files.is_empty() { if files.is_empty() {
crash!(1, "missing operand"); return Err(UUsageError::new(1, "missing operand".to_string()));
} }
let chmoder = Chmoder { let chmoder = Chmoder {
@ -112,71 +108,69 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
fmode, fmode,
cmode, cmode,
}; };
match chmoder.chmod(files) {
Ok(()) => {}
Err(e) => return e,
}
0 chmoder.chmod(&files)
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(crate_version!()) .version(crate_version!())
.about(ABOUT) .about(ABOUT)
.override_usage(format_usage(USAGE))
.infer_long_args(true)
.arg( .arg(
Arg::with_name(options::CHANGES) Arg::new(options::CHANGES)
.long(options::CHANGES) .long(options::CHANGES)
.short("c") .short('c')
.help("like verbose but report only when a change is made"), .help("like verbose but report only when a change is made"),
) )
.arg( .arg(
Arg::with_name(options::QUIET) Arg::new(options::QUIET)
.long(options::QUIET) .long(options::QUIET)
.visible_alias("silent") .visible_alias("silent")
.short("f") .short('f')
.help("suppress most error messages"), .help("suppress most error messages"),
) )
.arg( .arg(
Arg::with_name(options::VERBOSE) Arg::new(options::VERBOSE)
.long(options::VERBOSE) .long(options::VERBOSE)
.short("v") .short('v')
.help("output a diagnostic for every file processed"), .help("output a diagnostic for every file processed"),
) )
.arg( .arg(
Arg::with_name(options::NO_PRESERVE_ROOT) Arg::new(options::NO_PRESERVE_ROOT)
.long(options::NO_PRESERVE_ROOT) .long(options::NO_PRESERVE_ROOT)
.help("do not treat '/' specially (the default)"), .help("do not treat '/' specially (the default)"),
) )
.arg( .arg(
Arg::with_name(options::PRESERVE_ROOT) Arg::new(options::PRESERVE_ROOT)
.long(options::PRESERVE_ROOT) .long(options::PRESERVE_ROOT)
.help("fail to operate recursively on '/'"), .help("fail to operate recursively on '/'"),
) )
.arg( .arg(
Arg::with_name(options::RECURSIVE) Arg::new(options::RECURSIVE)
.long(options::RECURSIVE) .long(options::RECURSIVE)
.short("R") .short('R')
.help("change files and directories recursively"), .help("change files and directories recursively"),
) )
.arg( .arg(
Arg::with_name(options::REFERENCE) Arg::new(options::REFERENCE)
.long("reference") .long("reference")
.takes_value(true) .takes_value(true)
.help("use RFILE's mode instead of MODE values"), .help("use RFILE's mode instead of MODE values"),
) )
.arg( .arg(
Arg::with_name(options::MODE) Arg::new(options::MODE)
.required_unless(options::REFERENCE) .required_unless_present(options::REFERENCE)
.takes_value(true), .takes_value(true),
// It would be nice if clap could parse with delimiter, e.g. "g-x,u+x", // It would be nice if clap could parse with delimiter, e.g. "g-x,u+x",
// however .multiple(true) cannot be used here because FILE already needs that. // however .multiple_occurrences(true) cannot be used here because FILE already needs that.
// Only one positional argument with .multiple(true) set is allowed per command // Only one positional argument with .multiple_occurrences(true) set is allowed per command
) )
.arg( .arg(
Arg::with_name(options::FILE) Arg::new(options::FILE)
.required_unless(options::MODE) .required_unless_present(options::MODE)
.multiple(true), .multiple_occurrences(true),
) )
} }
@ -191,10 +185,10 @@ struct Chmoder {
} }
impl Chmoder { impl Chmoder {
fn chmod(&self, files: Vec<String>) -> Result<(), i32> { fn chmod(&self, files: &[String]) -> UResult<()> {
let mut r = Ok(()); let mut r = Ok(());
for filename in &files { for filename in files {
let filename = &filename[..]; let filename = &filename[..];
let file = Path::new(filename); let file = Path::new(filename);
if !file.exists() { if !file.exists() {
@ -204,29 +198,47 @@ impl Chmoder {
filename.quote() filename.quote()
); );
if !self.quiet { if !self.quiet {
show_error!("cannot operate on dangling symlink {}", filename.quote()); return Err(USimpleError::new(
1,
format!("cannot operate on dangling symlink {}", filename.quote()),
));
} }
} else if !self.quiet { } else if !self.quiet {
show_error!( return Err(USimpleError::new(
"cannot access {}: No such file or directory", 1,
filename.quote() format!(
); "cannot access {}: No such file or directory",
filename.quote()
),
));
} }
return Err(1); return Err(ExitCode::new(1));
} }
if self.recursive && self.preserve_root && filename == "/" { if self.recursive && self.preserve_root && filename == "/" {
show_error!( return Err(USimpleError::new(
"it is dangerous to operate recursively on {}\nuse --no-preserve-root to override this failsafe", 1,
filename.quote() format!(
); "it is dangerous to operate recursively on {}\nuse --no-preserve-root to override this failsafe",
return Err(1); filename.quote()
)
));
} }
if !self.recursive { if !self.recursive {
r = self.chmod_file(file).and(r); r = self.chmod_file(file).and(r);
} else { } else {
for entry in WalkDir::new(&filename).into_iter().filter_map(|e| e.ok()) { r = self.walk_dir(file);
let file = entry.path(); }
r = self.chmod_file(file).and(r); }
r
}
fn walk_dir(&self, file_path: &Path) -> UResult<()> {
let mut r = self.chmod_file(file_path);
if !is_symlink(file_path) && file_path.is_dir() {
for dir_entry in file_path.read_dir()? {
let path = dir_entry?.path();
if !is_symlink(&path) {
r = self.walk_dir(path.as_path());
} }
} }
} }
@ -234,14 +246,14 @@ impl Chmoder {
} }
#[cfg(windows)] #[cfg(windows)]
fn chmod_file(&self, file: &Path) -> Result<(), i32> { fn chmod_file(&self, file: &Path) -> UResult<()> {
// chmod is useless on Windows // chmod is useless on Windows
// it doesn't set any permissions at all // it doesn't set any permissions at all
// instead it just sets the readonly attribute on the file // instead it just sets the readonly attribute on the file
Err(0) Ok(())
} }
#[cfg(unix)] #[cfg(unix)]
fn chmod_file(&self, file: &Path) -> Result<(), i32> { fn chmod_file(&self, file: &Path) -> UResult<()> {
use uucore::mode::get_umask; use uucore::mode::get_umask;
let fperm = match fs::metadata(file) { let fperm = match fs::metadata(file) {
@ -258,11 +270,13 @@ impl Chmoder {
} else if err.kind() == std::io::ErrorKind::PermissionDenied { } else if err.kind() == std::io::ErrorKind::PermissionDenied {
// These two filenames would normally be conditionally // These two filenames would normally be conditionally
// quoted, but GNU's tests expect them to always be quoted // quoted, but GNU's tests expect them to always be quoted
show_error!("{}: Permission denied", file.quote()); return Err(USimpleError::new(
1,
format!("{}: Permission denied", file.quote()),
));
} else { } else {
show_error!("{}: {}", file.quote(), err); return Err(USimpleError::new(1, format!("{}: {}", file.quote(), err)));
} }
return Err(1);
} }
}; };
match self.fmode { match self.fmode {
@ -296,22 +310,25 @@ impl Chmoder {
} }
Err(f) => { Err(f) => {
if !self.quiet { if !self.quiet {
show_error!("{}", f); return Err(USimpleError::new(1, f));
} else {
return Err(ExitCode::new(1));
} }
return Err(1);
} }
} }
} }
self.change_file(fperm, new_mode, file)?; self.change_file(fperm, new_mode, file)?;
// if a permission would have been removed if umask was 0, but it wasn't because umask was not 0, print an error and fail // if a permission would have been removed if umask was 0, but it wasn't because umask was not 0, print an error and fail
if (new_mode & !naively_expected_new_mode) != 0 { if (new_mode & !naively_expected_new_mode) != 0 {
show_error!( return Err(USimpleError::new(
"{}: new permissions are {}, not {}", 1,
file.maybe_quote(), format!(
display_permissions_unix(new_mode as mode_t, false), "{}: new permissions are {}, not {}",
display_permissions_unix(naively_expected_new_mode as mode_t, false) file.maybe_quote(),
); display_permissions_unix(new_mode as mode_t, false),
return Err(1); display_permissions_unix(naively_expected_new_mode as mode_t, false)
),
));
} }
} }
} }

View file

@ -1 +1 @@
uucore_procs::main!(uu_chmod); uucore::bin!(uu_chmod);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_chown" name = "uu_chown"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "chown ~ (uutils) change the ownership of FILE" description = "chown ~ (uutils) change the ownership of FILE"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/chown" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/chown"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,9 +15,8 @@ edition = "2018"
path = "src/chown.rs" path = "src/chown.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features=["entries", "fs", "perms"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features=["entries", "fs", "perms"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "chown" name = "chown"

1
src/uu/chown/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -9,23 +9,21 @@
use uucore::display::Quotable; use uucore::display::Quotable;
pub use uucore::entries::{self, Group, Locate, Passwd}; pub use uucore::entries::{self, Group, Locate, Passwd};
use uucore::format_usage;
use uucore::perms::{chown_base, options, IfFrom}; use uucore::perms::{chown_base, options, IfFrom};
use uucore::error::{FromIo, UResult, USimpleError}; use uucore::error::{FromIo, UResult, USimpleError};
use clap::{crate_version, App, Arg, ArgMatches}; use clap::{crate_version, Arg, ArgMatches, Command};
use std::fs; use std::fs;
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
static ABOUT: &str = "change file owner and group"; static ABOUT: &str = "change file owner and group";
fn get_usage() -> String { const USAGE: &str = "\
format!( {} [OPTION]... [OWNER][:[GROUP]] FILE...
"{0} [OPTION]... [OWNER][:[GROUP]] FILE...\n{0} [OPTION]... --reference=RFILE FILE...", {} [OPTION]... --reference=RFILE FILE...";
uucore::execution_phrase()
)
}
fn parse_gid_uid_and_filter(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>, IfFrom)> { fn parse_gid_uid_and_filter(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>, IfFrom)> {
let filter = if let Some(spec) = matches.value_of(options::FROM) { let filter = if let Some(spec) = matches.value_of(options::FROM) {
@ -54,12 +52,10 @@ fn parse_gid_uid_and_filter(matches: &ArgMatches) -> UResult<(Option<u32>, Optio
Ok((dest_gid, dest_uid, filter)) Ok((dest_gid, dest_uid, filter))
} }
#[uucore_procs::gen_uumain] #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> { pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let usage = get_usage();
chown_base( chown_base(
uu_app().usage(&usage[..]), uu_app(),
args, args,
options::ARG_OWNER, options::ARG_OWNER,
parse_gid_uid_and_filter, parse_gid_uid_and_filter,
@ -67,18 +63,25 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
) )
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(crate_version!()) .version(crate_version!())
.about(ABOUT) .about(ABOUT)
.override_usage(format_usage(USAGE))
.infer_long_args(true)
.arg( .arg(
Arg::with_name(options::verbosity::CHANGES) Arg::new(options::HELP)
.short("c") .long(options::HELP)
.help("Print help information."),
)
.arg(
Arg::new(options::verbosity::CHANGES)
.short('c')
.long(options::verbosity::CHANGES) .long(options::verbosity::CHANGES)
.help("like verbose but report only when a change is made"), .help("like verbose but report only when a change is made"),
) )
.arg( .arg(
Arg::with_name(options::dereference::DEREFERENCE) Arg::new(options::dereference::DEREFERENCE)
.long(options::dereference::DEREFERENCE) .long(options::dereference::DEREFERENCE)
.help( .help(
"affect the referent of each symbolic link (this is the default), \ "affect the referent of each symbolic link (this is the default), \
@ -86,8 +89,8 @@ pub fn uu_app() -> App<'static, 'static> {
), ),
) )
.arg( .arg(
Arg::with_name(options::dereference::NO_DEREFERENCE) Arg::new(options::dereference::NO_DEREFERENCE)
.short("h") .short('h')
.long(options::dereference::NO_DEREFERENCE) .long(options::dereference::NO_DEREFERENCE)
.help( .help(
"affect symbolic links instead of any referenced file \ "affect symbolic links instead of any referenced file \
@ -95,7 +98,7 @@ pub fn uu_app() -> App<'static, 'static> {
), ),
) )
.arg( .arg(
Arg::with_name(options::FROM) Arg::new(options::FROM)
.long(options::FROM) .long(options::FROM)
.help( .help(
"change the owner and/or group of each file only if its \ "change the owner and/or group of each file only if its \
@ -106,60 +109,60 @@ pub fn uu_app() -> App<'static, 'static> {
.value_name("CURRENT_OWNER:CURRENT_GROUP"), .value_name("CURRENT_OWNER:CURRENT_GROUP"),
) )
.arg( .arg(
Arg::with_name(options::preserve_root::PRESERVE) Arg::new(options::preserve_root::PRESERVE)
.long(options::preserve_root::PRESERVE) .long(options::preserve_root::PRESERVE)
.help("fail to operate recursively on '/'"), .help("fail to operate recursively on '/'"),
) )
.arg( .arg(
Arg::with_name(options::preserve_root::NO_PRESERVE) Arg::new(options::preserve_root::NO_PRESERVE)
.long(options::preserve_root::NO_PRESERVE) .long(options::preserve_root::NO_PRESERVE)
.help("do not treat '/' specially (the default)"), .help("do not treat '/' specially (the default)"),
) )
.arg( .arg(
Arg::with_name(options::verbosity::QUIET) Arg::new(options::verbosity::QUIET)
.long(options::verbosity::QUIET) .long(options::verbosity::QUIET)
.help("suppress most error messages"), .help("suppress most error messages"),
) )
.arg( .arg(
Arg::with_name(options::RECURSIVE) Arg::new(options::RECURSIVE)
.short("R") .short('R')
.long(options::RECURSIVE) .long(options::RECURSIVE)
.help("operate on files and directories recursively"), .help("operate on files and directories recursively"),
) )
.arg( .arg(
Arg::with_name(options::REFERENCE) Arg::new(options::REFERENCE)
.long(options::REFERENCE) .long(options::REFERENCE)
.help("use RFILE's owner and group rather than specifying OWNER:GROUP values") .help("use RFILE's owner and group rather than specifying OWNER:GROUP values")
.value_name("RFILE") .value_name("RFILE")
.min_values(1), .min_values(1),
) )
.arg( .arg(
Arg::with_name(options::verbosity::SILENT) Arg::new(options::verbosity::SILENT)
.short("f") .short('f')
.long(options::verbosity::SILENT), .long(options::verbosity::SILENT),
) )
.arg( .arg(
Arg::with_name(options::traverse::TRAVERSE) Arg::new(options::traverse::TRAVERSE)
.short(options::traverse::TRAVERSE) .short(options::traverse::TRAVERSE.chars().next().unwrap())
.help("if a command line argument is a symbolic link to a directory, traverse it") .help("if a command line argument is a symbolic link to a directory, traverse it")
.overrides_with_all(&[options::traverse::EVERY, options::traverse::NO_TRAVERSE]), .overrides_with_all(&[options::traverse::EVERY, options::traverse::NO_TRAVERSE]),
) )
.arg( .arg(
Arg::with_name(options::traverse::EVERY) Arg::new(options::traverse::EVERY)
.short(options::traverse::EVERY) .short(options::traverse::EVERY.chars().next().unwrap())
.help("traverse every symbolic link to a directory encountered") .help("traverse every symbolic link to a directory encountered")
.overrides_with_all(&[options::traverse::TRAVERSE, options::traverse::NO_TRAVERSE]), .overrides_with_all(&[options::traverse::TRAVERSE, options::traverse::NO_TRAVERSE]),
) )
.arg( .arg(
Arg::with_name(options::traverse::NO_TRAVERSE) Arg::new(options::traverse::NO_TRAVERSE)
.short(options::traverse::NO_TRAVERSE) .short(options::traverse::NO_TRAVERSE.chars().next().unwrap())
.help("do not traverse any symbolic links (default)") .help("do not traverse any symbolic links (default)")
.overrides_with_all(&[options::traverse::TRAVERSE, options::traverse::EVERY]), .overrides_with_all(&[options::traverse::TRAVERSE, options::traverse::EVERY]),
) )
.arg( .arg(
Arg::with_name(options::verbosity::VERBOSE) Arg::new(options::verbosity::VERBOSE)
.long(options::verbosity::VERBOSE) .long(options::verbosity::VERBOSE)
.short("v") .short('v')
.help("output a diagnostic for every file processed"), .help("output a diagnostic for every file processed"),
) )
} }
@ -183,7 +186,7 @@ fn parse_spec(spec: &str, sep: char) -> UResult<(Option<u32>, Option<u32>)> {
let uid = if !user.is_empty() { let uid = if !user.is_empty() {
Some(match Passwd::locate(user) { Some(match Passwd::locate(user) {
Ok(u) => u.uid(), // We have been able to get the uid Ok(u) => u.uid, // We have been able to get the uid
Err(_) => Err(_) =>
// we have NOT been able to find the uid // we have NOT been able to find the uid
// but we could be in the case where we have user.group // but we could be in the case where we have user.group
@ -208,7 +211,7 @@ fn parse_spec(spec: &str, sep: char) -> UResult<(Option<u32>, Option<u32>)> {
Some( Some(
Group::locate(group) Group::locate(group)
.map_err(|_| USimpleError::new(1, format!("invalid group: {}", spec.quote())))? .map_err(|_| USimpleError::new(1, format!("invalid group: {}", spec.quote())))?
.gid(), .gid,
) )
} else { } else {
None None

View file

@ -1 +1 @@
uucore_procs::main!(uu_chown); uucore::bin!(uu_chown);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_chroot" name = "uu_chroot"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "chroot ~ (uutils) run COMMAND under a new root directory" description = "chroot ~ (uutils) run COMMAND under a new root directory"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/chroot" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/chroot"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,9 +15,8 @@ edition = "2018"
path = "src/chroot.rs" path = "src/chroot.rs"
[dependencies] [dependencies]
clap= "2.33" clap = { version = "3.1", features = ["wrap_help", "cargo"] }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore", features=["entries"] } uucore = { version=">=0.0.11", package="uucore", path="../../uucore", features=["entries"] }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "chroot" name = "chroot"

1
src/uu/chroot/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -7,20 +7,20 @@
// file that was distributed with this source code. // file that was distributed with this source code.
// spell-checker:ignore (ToDO) NEWROOT Userspec pstatus // spell-checker:ignore (ToDO) NEWROOT Userspec pstatus
mod error;
#[macro_use] use crate::error::ChrootError;
extern crate uucore; use clap::{crate_version, Arg, Command};
use clap::{crate_version, App, Arg};
use std::ffi::CString; use std::ffi::CString;
use std::io::Error; use std::io::Error;
use std::path::Path; use std::path::Path;
use std::process::Command; use std::process;
use uucore::display::Quotable; use uucore::error::{set_exit_code, UResult};
use uucore::libc::{self, chroot, setgid, setgroups, setuid}; use uucore::libc::{self, chroot, setgid, setgroups, setuid};
use uucore::{entries, InvalidEncodingHandling}; use uucore::{entries, format_usage, InvalidEncodingHandling};
static ABOUT: &str = "Run COMMAND with root directory set to NEWROOT."; static ABOUT: &str = "Run COMMAND with root directory set to NEWROOT.";
static SYNTAX: &str = "[OPTION]... NEWROOT [COMMAND [ARG]...]"; static USAGE: &str = "{} [OPTION]... NEWROOT [COMMAND [ARG]...]";
mod options { mod options {
pub const NEWROOT: &str = "newroot"; pub const NEWROOT: &str = "newroot";
@ -31,7 +31,8 @@ mod options {
pub const COMMAND: &str = "command"; pub const COMMAND: &str = "command";
} }
pub fn uumain(args: impl uucore::Args) -> i32 { #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args let args = args
.collect_str(InvalidEncodingHandling::ConvertLossy) .collect_str(InvalidEncodingHandling::ConvertLossy)
.accept_any(); .accept_any();
@ -44,19 +45,11 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
let newroot: &Path = match matches.value_of(options::NEWROOT) { let newroot: &Path = match matches.value_of(options::NEWROOT) {
Some(v) => Path::new(v), Some(v) => Path::new(v),
None => crash!( None => return Err(ChrootError::MissingNewRoot.into()),
1,
"Missing operand: NEWROOT\nTry '{} --help' for more information.",
uucore::execution_phrase()
),
}; };
if !newroot.is_dir() { if !newroot.is_dir() {
crash!( return Err(ChrootError::NoSuchDirectory(format!("{}", newroot.display())).into());
1,
"cannot change root directory to {}: no such directory",
newroot.quote()
);
} }
let commands = match matches.values_of(options::COMMAND) { let commands = match matches.values_of(options::COMMAND) {
@ -82,65 +75,60 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
let chroot_args = &command[1..]; let chroot_args = &command[1..];
// NOTE: Tests can only trigger code beyond this point if they're invoked with root permissions // NOTE: Tests can only trigger code beyond this point if they're invoked with root permissions
set_context(newroot, &matches); set_context(newroot, &matches)?;
let pstatus = Command::new(chroot_command) let pstatus = match process::Command::new(chroot_command)
.args(chroot_args) .args(chroot_args)
.status() .status()
.unwrap_or_else(|e| { {
// TODO: Exit status: Ok(status) => status,
// 125 if chroot itself fails Err(e) => return Err(ChrootError::CommandFailed(command[0].to_string(), e).into()),
// 126 if command is found but cannot be invoked };
// 127 if command cannot be found
crash!(
1,
"failed to run command {}: {}",
command[0].to_string().quote(),
e
)
});
if pstatus.success() { let code = if pstatus.success() {
0 0
} else { } else {
pstatus.code().unwrap_or(-1) pstatus.code().unwrap_or(-1)
} };
set_exit_code(code);
Ok(())
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.version(crate_version!()) .version(crate_version!())
.about(ABOUT) .about(ABOUT)
.usage(SYNTAX) .override_usage(format_usage(USAGE))
.infer_long_args(true)
.arg( .arg(
Arg::with_name(options::NEWROOT) Arg::new(options::NEWROOT)
.hidden(true) .hide(true)
.required(true) .required(true)
.index(1), .index(1),
) )
.arg( .arg(
Arg::with_name(options::USER) Arg::new(options::USER)
.short("u") .short('u')
.long(options::USER) .long(options::USER)
.help("User (ID or name) to switch before running the program") .help("User (ID or name) to switch before running the program")
.value_name("USER"), .value_name("USER"),
) )
.arg( .arg(
Arg::with_name(options::GROUP) Arg::new(options::GROUP)
.short("g") .short('g')
.long(options::GROUP) .long(options::GROUP)
.help("Group (ID or name) to switch to") .help("Group (ID or name) to switch to")
.value_name("GROUP"), .value_name("GROUP"),
) )
.arg( .arg(
Arg::with_name(options::GROUPS) Arg::new(options::GROUPS)
.short("G") .short('G')
.long(options::GROUPS) .long(options::GROUPS)
.help("Comma-separated list of groups to switch to") .help("Comma-separated list of groups to switch to")
.value_name("GROUP1,GROUP2..."), .value_name("GROUP1,GROUP2..."),
) )
.arg( .arg(
Arg::with_name(options::USERSPEC) Arg::new(options::USERSPEC)
.long(options::USERSPEC) .long(options::USERSPEC)
.help( .help(
"Colon-separated user and group to switch to. \ "Colon-separated user and group to switch to. \
@ -150,14 +138,14 @@ pub fn uu_app() -> App<'static, 'static> {
.value_name("USER:GROUP"), .value_name("USER:GROUP"),
) )
.arg( .arg(
Arg::with_name(options::COMMAND) Arg::new(options::COMMAND)
.hidden(true) .hide(true)
.multiple(true) .multiple_occurrences(true)
.index(2), .index(2),
) )
} }
fn set_context(root: &Path, options: &clap::ArgMatches) { fn set_context(root: &Path, options: &clap::ArgMatches) -> UResult<()> {
let userspec_str = options.value_of(options::USERSPEC); let userspec_str = options.value_of(options::USERSPEC);
let user_str = options.value_of(options::USER).unwrap_or_default(); let user_str = options.value_of(options::USER).unwrap_or_default();
let group_str = options.value_of(options::GROUP).unwrap_or_default(); let group_str = options.value_of(options::GROUP).unwrap_or_default();
@ -166,7 +154,7 @@ fn set_context(root: &Path, options: &clap::ArgMatches) {
Some(u) => { Some(u) => {
let s: Vec<&str> = u.split(':').collect(); let s: Vec<&str> = u.split(':').collect();
if s.len() != 2 || s.iter().any(|&spec| spec.is_empty()) { if s.len() != 2 || s.iter().any(|&spec| spec.is_empty()) {
crash!(1, "invalid userspec: {}", u.quote()) return Err(ChrootError::InvalidUserspec(u.to_string()).into());
}; };
s s
} }
@ -179,83 +167,79 @@ fn set_context(root: &Path, options: &clap::ArgMatches) {
(userspec[0], userspec[1]) (userspec[0], userspec[1])
}; };
enter_chroot(root); enter_chroot(root)?;
set_groups_from_str(groups_str); set_groups_from_str(groups_str)?;
set_main_group(group); set_main_group(group)?;
set_user(user); set_user(user)?;
Ok(())
} }
fn enter_chroot(root: &Path) { fn enter_chroot(root: &Path) -> UResult<()> {
std::env::set_current_dir(root).unwrap(); std::env::set_current_dir(root).unwrap();
let err = unsafe { let err = unsafe {
chroot(CString::new(".").unwrap().as_bytes_with_nul().as_ptr() as *const libc::c_char) chroot(CString::new(".").unwrap().as_bytes_with_nul().as_ptr() as *const libc::c_char)
}; };
if err != 0 { if err == 0 {
crash!( Ok(())
1, } else {
"cannot chroot to {}: {}", Err(ChrootError::CannotEnter(format!("{}", root.display()), Error::last_os_error()).into())
root.quote(),
Error::last_os_error()
)
};
}
fn set_main_group(group: &str) {
if !group.is_empty() {
let group_id = match entries::grp2gid(group) {
Ok(g) => g,
_ => crash!(1, "no such group: {}", group.maybe_quote()),
};
let err = unsafe { setgid(group_id) };
if err != 0 {
crash!(
1,
"cannot set gid to {}: {}",
group_id,
Error::last_os_error()
)
}
} }
} }
fn set_main_group(group: &str) -> UResult<()> {
if !group.is_empty() {
let group_id = match entries::grp2gid(group) {
Ok(g) => g,
_ => return Err(ChrootError::NoSuchGroup(group.to_string()).into()),
};
let err = unsafe { setgid(group_id) };
if err != 0 {
return Err(
ChrootError::SetGidFailed(group_id.to_string(), Error::last_os_error()).into(),
);
}
}
Ok(())
}
#[cfg(any(target_vendor = "apple", target_os = "freebsd"))] #[cfg(any(target_vendor = "apple", target_os = "freebsd"))]
fn set_groups(groups: Vec<libc::gid_t>) -> libc::c_int { fn set_groups(groups: &[libc::gid_t]) -> libc::c_int {
unsafe { setgroups(groups.len() as libc::c_int, groups.as_ptr()) } unsafe { setgroups(groups.len() as libc::c_int, groups.as_ptr()) }
} }
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
fn set_groups(groups: Vec<libc::gid_t>) -> libc::c_int { fn set_groups(groups: &[libc::gid_t]) -> libc::c_int {
unsafe { setgroups(groups.len() as libc::size_t, groups.as_ptr()) } unsafe { setgroups(groups.len() as libc::size_t, groups.as_ptr()) }
} }
fn set_groups_from_str(groups: &str) { fn set_groups_from_str(groups: &str) -> UResult<()> {
if !groups.is_empty() { if !groups.is_empty() {
let groups_vec: Vec<libc::gid_t> = groups let mut groups_vec = vec![];
.split(',') for group in groups.split(',') {
.map(|x| match entries::grp2gid(x) { let gid = match entries::grp2gid(group) {
Ok(g) => g, Ok(g) => g,
_ => crash!(1, "no such group: {}", x), Err(_) => return Err(ChrootError::NoSuchGroup(group.to_string()).into()),
}) };
.collect(); groups_vec.push(gid);
let err = set_groups(groups_vec); }
let err = set_groups(&groups_vec);
if err != 0 { if err != 0 {
crash!(1, "cannot set groups: {}", Error::last_os_error()) return Err(ChrootError::SetGroupsFailed(Error::last_os_error()).into());
} }
} }
Ok(())
} }
fn set_user(user: &str) { fn set_user(user: &str) -> UResult<()> {
if !user.is_empty() { if !user.is_empty() {
let user_id = entries::usr2uid(user).unwrap(); let user_id = entries::usr2uid(user).unwrap();
let err = unsafe { setuid(user_id as libc::uid_t) }; let err = unsafe { setuid(user_id as libc::uid_t) };
if err != 0 { if err != 0 {
crash!( return Err(
1, ChrootError::SetUserFailed(user.to_string(), Error::last_os_error()).into(),
"cannot set user to {}: {}", );
user.maybe_quote(),
Error::last_os_error()
)
} }
} }
Ok(())
} }

View file

@ -0,0 +1,81 @@
// * This file is part of the uutils coreutils package.
// *
// * For the full copyright and license information, please view the LICENSE
// * file that was distributed with this source code.
// spell-checker:ignore NEWROOT Userspec userspec
//! Errors returned by chroot.
use std::fmt::Display;
use std::io::Error;
use uucore::display::Quotable;
use uucore::error::UError;
/// Errors that can happen while executing chroot.
#[derive(Debug)]
pub enum ChrootError {
/// Failed to enter the specified directory.
CannotEnter(String, Error),
/// Failed to execute the specified command.
CommandFailed(String, Error),
/// The given user and group specification was invalid.
InvalidUserspec(String),
/// The new root directory was not given.
MissingNewRoot,
/// Failed to find the specified group.
NoSuchGroup(String),
/// The given directory does not exist.
NoSuchDirectory(String),
/// The call to `setgid()` failed.
SetGidFailed(String, Error),
/// The call to `setgroups()` failed.
SetGroupsFailed(Error),
/// The call to `setuid()` failed.
SetUserFailed(String, Error),
}
impl std::error::Error for ChrootError {}
impl UError for ChrootError {
// TODO: Exit status:
// 125 if chroot itself fails
// 126 if command is found but cannot be invoked
// 127 if command cannot be found
fn code(&self) -> i32 {
1
}
}
impl Display for ChrootError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::CannotEnter(s, e) => write!(f, "cannot chroot to {}: {}", s.quote(), e,),
Self::CommandFailed(s, e) => {
write!(f, "failed to run command {}: {}", s.to_string().quote(), e,)
}
Self::InvalidUserspec(s) => write!(f, "invalid userspec: {}", s.quote(),),
Self::MissingNewRoot => write!(
f,
"Missing operand: NEWROOT\nTry '{} --help' for more information.",
uucore::execution_phrase(),
),
Self::NoSuchGroup(s) => write!(f, "no such group: {}", s.maybe_quote(),),
Self::NoSuchDirectory(s) => write!(
f,
"cannot change root directory to {}: no such directory",
s.quote(),
),
Self::SetGidFailed(s, e) => write!(f, "cannot set gid to {}: {}", s, e),
Self::SetGroupsFailed(e) => write!(f, "cannot set groups: {}", e),
Self::SetUserFailed(s, e) => {
write!(f, "cannot set user to {}: {}", s.maybe_quote(), e)
}
}
}
}

View file

@ -1 +1 @@
uucore_procs::main!(uu_chroot); uucore::bin!(uu_chroot);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_cksum" name = "uu_cksum"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "cksum ~ (uutils) display CRC and size of input" description = "cksum ~ (uutils) display CRC and size of input"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/cksum" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/cksum"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,15 +15,9 @@ edition = "2018"
path = "src/cksum.rs" path = "src/cksum.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
libc = "0.2.42" uucore = { version=">=0.0.11", package="uucore", path="../../uucore" }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore" }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "cksum" name = "cksum"
path = "src/main.rs" path = "src/main.rs"
[package.metadata.cargo-udeps.ignore]
# Necessary for "make all"
normal = ["uucore_procs"]

1
src/uu/cksum/LICENSE Symbolic link
View file

@ -0,0 +1 @@
../../../LICENSE

View file

@ -6,23 +6,21 @@
// file that was distributed with this source code. // file that was distributed with this source code.
// spell-checker:ignore (ToDO) fname // spell-checker:ignore (ToDO) fname
use clap::{crate_version, Arg, Command};
#[macro_use]
extern crate uucore;
use clap::{crate_version, App, Arg};
use std::fs::File; use std::fs::File;
use std::io::{self, stdin, BufReader, Read}; use std::io::{self, stdin, BufReader, Read};
use std::path::Path; use std::path::Path;
use uucore::display::Quotable; use uucore::display::Quotable;
use uucore::error::{FromIo, UResult};
use uucore::InvalidEncodingHandling; use uucore::InvalidEncodingHandling;
use uucore::{format_usage, show};
// NOTE: CRC_TABLE_LEN *must* be <= 256 as we cast 0..CRC_TABLE_LEN to u8 // NOTE: CRC_TABLE_LEN *must* be <= 256 as we cast 0..CRC_TABLE_LEN to u8
const CRC_TABLE_LEN: usize = 256; const CRC_TABLE_LEN: usize = 256;
const CRC_TABLE: [u32; CRC_TABLE_LEN] = generate_crc_table(); const CRC_TABLE: [u32; CRC_TABLE_LEN] = generate_crc_table();
const NAME: &str = "cksum"; const NAME: &str = "cksum";
const SYNTAX: &str = "[OPTIONS] [FILE]..."; const USAGE: &str = "{} [OPTIONS] [FILE]...";
const SUMMARY: &str = "Print CRC and size for each file"; const SUMMARY: &str = "Print CRC and size for each file";
const fn generate_crc_table() -> [u32; CRC_TABLE_LEN] { const fn generate_crc_table() -> [u32; CRC_TABLE_LEN] {
@ -82,27 +80,18 @@ fn cksum(fname: &str) -> io::Result<(u32, usize)> {
let mut crc = 0u32; let mut crc = 0u32;
let mut size = 0usize; let mut size = 0usize;
let file;
let mut rd: Box<dyn Read> = match fname { let mut rd: Box<dyn Read> = match fname {
"-" => Box::new(stdin()), "-" => Box::new(stdin()),
_ => { _ => {
let path = &Path::new(fname); let p = Path::new(fname);
if path.is_dir() {
return Err(std::io::Error::new( // Directories should not give an error, but should be interpreted
io::ErrorKind::InvalidInput, // as empty files to match GNU semantics.
"Is a directory", if p.is_dir() {
)); Box::new(BufReader::new(io::empty())) as Box<dyn Read>
}; } else {
// Silent the warning as we want to the error message Box::new(BufReader::new(File::open(p)?)) as Box<dyn Read>
#[allow(clippy::question_mark)] }
if path.metadata().is_err() {
return Err(std::io::Error::new(
io::ErrorKind::NotFound,
"No such file or directory",
));
};
file = File::open(&path)?;
Box::new(BufReader::new(file))
} }
}; };
@ -123,7 +112,8 @@ mod options {
pub static FILE: &str = "file"; pub static FILE: &str = "file";
} }
pub fn uumain(args: impl uucore::Args) -> i32 { #[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args let args = args
.collect_str(InvalidEncodingHandling::Ignore) .collect_str(InvalidEncodingHandling::Ignore)
.accept_any(); .accept_any();
@ -136,35 +126,30 @@ pub fn uumain(args: impl uucore::Args) -> i32 {
}; };
if files.is_empty() { if files.is_empty() {
match cksum("-") { let (crc, size) = cksum("-")?;
Ok((crc, size)) => println!("{} {}", crc, size), println!("{} {}", crc, size);
Err(err) => { return Ok(());
show_error!("-: {}", err);
return 2;
}
}
return 0;
} }
let mut exit_code = 0;
for fname in &files { for fname in &files {
match cksum(fname.as_ref()) { match cksum(fname.as_ref()).map_err_context(|| format!("{}", fname.maybe_quote())) {
Ok((crc, size)) => println!("{} {} {}", crc, size, fname), Ok((crc, size)) => println!("{} {} {}", crc, size, fname),
Err(err) => { Err(err) => show!(err),
show_error!("{}: {}", fname.maybe_quote(), err); };
exit_code = 2;
}
}
} }
Ok(())
exit_code
} }
pub fn uu_app() -> App<'static, 'static> { pub fn uu_app<'a>() -> Command<'a> {
App::new(uucore::util_name()) Command::new(uucore::util_name())
.name(NAME) .name(NAME)
.version(crate_version!()) .version(crate_version!())
.about(SUMMARY) .about(SUMMARY)
.usage(SYNTAX) .override_usage(format_usage(USAGE))
.arg(Arg::with_name(options::FILE).hidden(true).multiple(true)) .infer_long_args(true)
.arg(
Arg::new(options::FILE)
.hide(true)
.multiple_occurrences(true),
)
} }

View file

@ -1 +1 @@
uucore_procs::main!(uu_cksum); uucore::bin!(uu_cksum);

View file

@ -1,12 +1,12 @@
[package] [package]
name = "uu_comm" name = "uu_comm"
version = "0.0.8" version = "0.0.13"
authors = ["uutils developers"] authors = ["uutils developers"]
license = "MIT" license = "MIT"
description = "comm ~ (uutils) compare sorted inputs" description = "comm ~ (uutils) compare sorted inputs"
homepage = "https://github.com/uutils/coreutils" homepage = "https://github.com/uutils/coreutils"
repository = "https://github.com/uutils/coreutils/tree/master/src/uu/comm" repository = "https://github.com/uutils/coreutils/tree/main/src/uu/comm"
keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"] keywords = ["coreutils", "uutils", "cross-platform", "cli", "utility"]
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
edition = "2018" edition = "2018"
@ -15,15 +15,9 @@ edition = "2018"
path = "src/comm.rs" path = "src/comm.rs"
[dependencies] [dependencies]
clap = { version = "2.33", features = ["wrap_help"] } clap = { version = "3.1", features = ["wrap_help", "cargo"] }
libc = "0.2.42" uucore = { version=">=0.0.11", package="uucore", path="../../uucore" }
uucore = { version=">=0.0.10", package="uucore", path="../../uucore" }
uucore_procs = { version=">=0.0.7", package="uucore_procs", path="../../uucore_procs" }
[[bin]] [[bin]]
name = "comm" name = "comm"
path = "src/main.rs" path = "src/main.rs"
[package.metadata.cargo-udeps.ignore]
# Necessary for "make all"
normal = ["uucore_procs"]

Some files were not shown because too many files have changed in this diff Show more