From af60c740e3e8012b624fa94851c73da8d1911357 Mon Sep 17 00:00:00 2001 From: Shannon Booth Date: Sat, 22 Jul 2023 19:39:31 +1200 Subject: [PATCH] LibTest: Add support for xfail JavaScriptTestRunner tests This is meant to be used in a similar manner to skipping tests, with the extra advantage that if the test begins passing unexpectedly, the test will fail. Being notified of unexpected passes allows for the test to be updated to the correct expectation. --- Userland/Libraries/LibTest/JavaScriptTestRunner.h | 11 +++++++++++ Userland/Libraries/LibTest/Results.h | 3 +++ Userland/Libraries/LibTest/TestRunner.h | 14 ++++++++++++-- Userland/Utilities/run-tests.cpp | 8 ++++++-- 4 files changed, 32 insertions(+), 4 deletions(-) diff --git a/Userland/Libraries/LibTest/JavaScriptTestRunner.h b/Userland/Libraries/LibTest/JavaScriptTestRunner.h index 7895957ed1..e8d8d01f3e 100644 --- a/Userland/Libraries/LibTest/JavaScriptTestRunner.h +++ b/Userland/Libraries/LibTest/JavaScriptTestRunner.h @@ -3,6 +3,7 @@ * Copyright (c) 2020-2022, Linus Groh * Copyright (c) 2021, Ali Mohammad Pur * Copyright (c) 2021, Andreas Kling + * Copyright (c) 2023, Shannon Booth * * SPDX-License-Identifier: BSD-2-Clause */ @@ -425,6 +426,11 @@ inline JSFileResult TestRunner::run_file_test(DeprecatedString const& test_path) auto details = test_value.as_object().get_deprecated_string("details"sv); VERIFY(result.has_value()); test.details = details.value(); + } else if (result_string == "xfail") { + test.result = Test::Result::ExpectedFail; + m_counts.tests_expected_failed++; + if (suite.most_severe_test_result != Test::Result::Fail) + suite.most_severe_test_result = Test::Result::ExpectedFail; } else { test.result = Test::Result::Skip; if (suite.most_severe_test_result == Test::Result::Pass) @@ -443,6 +449,8 @@ inline JSFileResult TestRunner::run_file_test(DeprecatedString const& test_path) } else { if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass) file_result.most_severe_test_result = Test::Result::Skip; + else if (suite.most_severe_test_result == Test::Result::ExpectedFail && (file_result.most_severe_test_result == Test::Result::Pass || file_result.most_severe_test_result == Test::Result::Skip)) + file_result.most_severe_test_result = Test::Result::ExpectedFail; m_counts.suites_passed++; } @@ -605,6 +613,9 @@ inline void TestRunner::print_file_result(JSFileResult const& file_result) const print_modifiers({ CLEAR, FG_RED }); outln("{} (failed):", test.name); outln(" {}", test.details); + } else if (test.result == Test::Result::ExpectedFail) { + print_modifiers({ CLEAR, FG_ORANGE }); + outln("{} (expected fail)", test.name); } else { print_modifiers({ CLEAR, FG_ORANGE }); outln("{} (skipped)", test.name); diff --git a/Userland/Libraries/LibTest/Results.h b/Userland/Libraries/LibTest/Results.h index 3449881df0..04ff756953 100644 --- a/Userland/Libraries/LibTest/Results.h +++ b/Userland/Libraries/LibTest/Results.h @@ -2,6 +2,7 @@ * Copyright (c) 2020, Matthew Olsson * Copyright (c) 2020-2021, Linus Groh * Copyright (c) 2021, Brian Gianforcaro + * Copyright (c) 2023, Shannon Booth * * SPDX-License-Identifier: BSD-2-Clause */ @@ -17,6 +18,7 @@ enum class Result { Pass, Fail, Skip, + ExpectedFail, Crashed, }; @@ -42,6 +44,7 @@ struct Counts { unsigned tests_failed { 0 }; unsigned tests_passed { 0 }; unsigned tests_skipped { 0 }; + unsigned tests_expected_failed { 0 }; unsigned suites_failed { 0 }; unsigned suites_passed { 0 }; unsigned files_total { 0 }; diff --git a/Userland/Libraries/LibTest/TestRunner.h b/Userland/Libraries/LibTest/TestRunner.h index 6dab664ab7..a4c0b60634 100644 --- a/Userland/Libraries/LibTest/TestRunner.h +++ b/Userland/Libraries/LibTest/TestRunner.h @@ -3,6 +3,7 @@ * Copyright (c) 2020-2021, Linus Groh * Copyright (c) 2021, Ali Mohammad Pur * Copyright (c) 2021, Andreas Kling + * Copyright (c) 2023, Shannon Booth * * SPDX-License-Identifier: BSD-2-Clause */ @@ -184,12 +185,17 @@ inline void TestRunner::print_test_results() const out("{} skipped, ", m_counts.tests_skipped); print_modifiers({ CLEAR }); } + if (m_counts.tests_expected_failed) { + print_modifiers({ FG_ORANGE }); + out("{} expected failed, ", m_counts.tests_expected_failed); + print_modifiers({ CLEAR }); + } if (m_counts.tests_passed) { print_modifiers({ FG_GREEN }); out("{} passed, ", m_counts.tests_passed); print_modifiers({ CLEAR }); } - outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed); + outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed + m_counts.tests_expected_failed); outln("Files: {} total", m_counts.files_total); @@ -227,6 +233,9 @@ inline void TestRunner::print_test_results_as_json() const case Result::Skip: result_name = "SKIPPED"sv; break; + case Result::ExpectedFail: + result_name = "XFAIL"sv; + break; case Result::Crashed: result_name = "PROCESS_ERROR"sv; break; @@ -254,7 +263,8 @@ inline void TestRunner::print_test_results_as_json() const tests.set("failed", m_counts.tests_failed); tests.set("passed", m_counts.tests_passed); tests.set("skipped", m_counts.tests_skipped); - tests.set("total", m_counts.tests_failed + m_counts.tests_passed + m_counts.tests_skipped); + tests.set("xfail", m_counts.tests_expected_failed); + tests.set("total", m_counts.tests_failed + m_counts.tests_passed + m_counts.tests_skipped + m_counts.tests_expected_failed); JsonObject results; results.set("suites", suites); diff --git a/Userland/Utilities/run-tests.cpp b/Userland/Utilities/run-tests.cpp index d753fec3e3..f208ed846b 100644 --- a/Userland/Utilities/run-tests.cpp +++ b/Userland/Utilities/run-tests.cpp @@ -120,8 +120,11 @@ void TestRunner::do_run_single_test(DeprecatedString const& test_path, size_t cu case Test::Result::Pass: ++m_counts.tests_passed; break; + case Test::Result::ExpectedFail: + ++m_counts.tests_passed; + break; case Test::Result::Skip: - ++m_counts.tests_skipped; + ++m_counts.tests_expected_failed; break; case Test::Result::Fail: ++m_counts.tests_failed; @@ -196,7 +199,8 @@ void TestRunner::do_run_single_test(DeprecatedString const& test_path, size_t cu outln("{} ({})", test_result.file_path.basename(), test_result.result == Test::Result::Fail ? "failed" : "crashed"); } else { print_modifiers({ Test::CLEAR, Test::FG_ORANGE }); - outln("{} (skipped)", test_result.file_path.basename()); + auto const status = test_result.result == Test::Result::Skip ? "skipped"sv : "expected fail"sv; + outln("{} ({})", test_result.file_path.basename(), status); } print_modifiers({ Test::CLEAR }); }