mirror of
https://github.com/RGBCube/uutils-coreutils
synced 2025-07-27 11:07:44 +00:00
Rewrite analyze-gnu-results in python
This time just analyzing the json result file
This commit is contained in:
parent
334e29054e
commit
cbca62866d
2 changed files with 69 additions and 79 deletions
69
util/analyze-gnu-results.py
Normal file
69
util/analyze-gnu-results.py
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_test_results(json_data):
|
||||||
|
# Counters for test results
|
||||||
|
total_tests = 0
|
||||||
|
pass_count = 0
|
||||||
|
fail_count = 0
|
||||||
|
skip_count = 0
|
||||||
|
error_count = 0 # Although not in the JSON, included for compatibility
|
||||||
|
|
||||||
|
# Analyze each utility's tests
|
||||||
|
for utility, tests in json_data.items():
|
||||||
|
for test_name, result in tests.items():
|
||||||
|
total_tests += 1
|
||||||
|
|
||||||
|
if result == "PASS":
|
||||||
|
pass_count += 1
|
||||||
|
elif result == "FAIL":
|
||||||
|
fail_count += 1
|
||||||
|
elif result == "SKIP":
|
||||||
|
skip_count += 1
|
||||||
|
|
||||||
|
# Return the statistics
|
||||||
|
return {
|
||||||
|
"TOTAL": total_tests,
|
||||||
|
"PASS": pass_count,
|
||||||
|
"FAIL": fail_count,
|
||||||
|
"SKIP": skip_count,
|
||||||
|
"ERROR": error_count,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Check if a file argument was provided
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
print("Usage: python script.py <json_file>")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
json_file = sys.argv[1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Parse the JSON data from the specified file
|
||||||
|
with open(json_file, "r") as file:
|
||||||
|
json_data = json.load(file)
|
||||||
|
|
||||||
|
# Analyze the results
|
||||||
|
results = analyze_test_results(json_data)
|
||||||
|
|
||||||
|
# Export the results as environment variables
|
||||||
|
# For use in shell, print export statements
|
||||||
|
print(f"export TOTAL={results['TOTAL']}")
|
||||||
|
print(f"export PASS={results['PASS']}")
|
||||||
|
print(f"export SKIP={results['SKIP']}")
|
||||||
|
print(f"export FAIL={results['FAIL']}")
|
||||||
|
print(f"export ERROR={results['ERROR']}")
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Error: File '{json_file}' not found.", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(f"Error: '{json_file}' is not a valid JSON", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -1,79 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# spell-checker:ignore xpass XPASS testsuite
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# As we do two builds (with and without root), we need to do some trivial maths
|
|
||||||
# to present the merge results
|
|
||||||
# this script will export the values in the term
|
|
||||||
|
|
||||||
if test $# -ne 2; then
|
|
||||||
echo "syntax:"
|
|
||||||
echo "$0 testsuite.log root-testsuite.log"
|
|
||||||
fi
|
|
||||||
|
|
||||||
SUITE_LOG_FILE=$1
|
|
||||||
ROOT_SUITE_LOG_FILE=$2
|
|
||||||
|
|
||||||
if test ! -f "${SUITE_LOG_FILE}"; then
|
|
||||||
echo "${SUITE_LOG_FILE} has not been found"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if test ! -f "${ROOT_SUITE_LOG_FILE}"; then
|
|
||||||
echo "${ROOT_SUITE_LOG_FILE} has not been found"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
function get_total {
|
|
||||||
# Total of tests executed
|
|
||||||
# They are the normal number of tests as they are skipped in the normal run
|
|
||||||
NON_ROOT=$(sed -n "s/.*# TOTAL: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
echo $NON_ROOT
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_pass {
|
|
||||||
# This is the sum of the two test suites.
|
|
||||||
# In the normal run, they are SKIP
|
|
||||||
NON_ROOT=$(sed -n "s/.*# PASS: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
AS_ROOT=$(sed -n "s/.*# PASS: \(.*\)/\1/p" "${ROOT_SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
echo $((NON_ROOT + AS_ROOT))
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_skip {
|
|
||||||
# As some of the tests executed as root as still SKIP (ex: selinux), we
|
|
||||||
# need to some maths:
|
|
||||||
# Number of tests skip as user - total test as root + skipped as root
|
|
||||||
TOTAL_AS_ROOT=$(sed -n "s/.*# TOTAL: \(.*\)/\1/p" "${ROOT_SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
NON_ROOT=$(sed -n "s/.*# SKIP: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
AS_ROOT=$(sed -n "s/.*# SKIP: \(.*\)/\1/p" "${ROOT_SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
echo $((NON_ROOT - TOTAL_AS_ROOT + AS_ROOT))
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_fail {
|
|
||||||
# They used to be SKIP, now they fail (this is a good news)
|
|
||||||
NON_ROOT=$(sed -n "s/.*# FAIL: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
AS_ROOT=$(sed -n "s/.*# FAIL: \(.*\)/\1/p" "${ROOT_SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
echo $((NON_ROOT + AS_ROOT))
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_xpass {
|
|
||||||
NON_ROOT=$(sed -n "s/.*# XPASS: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
echo $NON_ROOT
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_error {
|
|
||||||
# They used to be SKIP, now they error (this is a good news)
|
|
||||||
NON_ROOT=$(sed -n "s/.*# ERROR: \(.*\)/\1/p" "${SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
AS_ROOT=$(sed -n "s/.*# ERROR:: \(.*\)/\1/p" "${ROOT_SUITE_LOG_FILE}" | tr -d '\r' | head -n1)
|
|
||||||
echo $((NON_ROOT + AS_ROOT))
|
|
||||||
}
|
|
||||||
|
|
||||||
# we don't need the return codes indeed, ignore them
|
|
||||||
# shellcheck disable=SC2155
|
|
||||||
{
|
|
||||||
export TOTAL=$(get_total)
|
|
||||||
export PASS=$(get_pass)
|
|
||||||
export SKIP=$(get_skip)
|
|
||||||
export FAIL=$(get_fail)
|
|
||||||
export XPASS=$(get_xpass)
|
|
||||||
export ERROR=$(get_error)
|
|
||||||
}
|
|
Loading…
Add table
Add a link
Reference in a new issue