1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-26 03:37:43 +00:00

LibTest: Add support for xfail JavaScriptTestRunner tests

This is meant to be used in a similar manner to skipping tests, with the
extra advantage that if the test begins passing unexpectedly, the test
will fail.

Being notified of unexpected passes allows for the test to be updated to
the correct expectation.
This commit is contained in:
Shannon Booth 2023-07-22 19:39:31 +12:00 committed by Andreas Kling
parent 2c06ad3a05
commit af60c740e3
4 changed files with 32 additions and 4 deletions

View file

@ -3,6 +3,7 @@
* Copyright (c) 2020-2022, Linus Groh <linusg@serenityos.org> * Copyright (c) 2020-2022, Linus Groh <linusg@serenityos.org>
* Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org> * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
* Copyright (c) 2021, Andreas Kling <kling@serenityos.org> * Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2023, Shannon Booth <shannon@serenityos.org>
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
@ -425,6 +426,11 @@ inline JSFileResult TestRunner::run_file_test(DeprecatedString const& test_path)
auto details = test_value.as_object().get_deprecated_string("details"sv); auto details = test_value.as_object().get_deprecated_string("details"sv);
VERIFY(result.has_value()); VERIFY(result.has_value());
test.details = details.value(); test.details = details.value();
} else if (result_string == "xfail") {
test.result = Test::Result::ExpectedFail;
m_counts.tests_expected_failed++;
if (suite.most_severe_test_result != Test::Result::Fail)
suite.most_severe_test_result = Test::Result::ExpectedFail;
} else { } else {
test.result = Test::Result::Skip; test.result = Test::Result::Skip;
if (suite.most_severe_test_result == Test::Result::Pass) if (suite.most_severe_test_result == Test::Result::Pass)
@ -443,6 +449,8 @@ inline JSFileResult TestRunner::run_file_test(DeprecatedString const& test_path)
} else { } else {
if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass) if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass)
file_result.most_severe_test_result = Test::Result::Skip; file_result.most_severe_test_result = Test::Result::Skip;
else if (suite.most_severe_test_result == Test::Result::ExpectedFail && (file_result.most_severe_test_result == Test::Result::Pass || file_result.most_severe_test_result == Test::Result::Skip))
file_result.most_severe_test_result = Test::Result::ExpectedFail;
m_counts.suites_passed++; m_counts.suites_passed++;
} }
@ -605,6 +613,9 @@ inline void TestRunner::print_file_result(JSFileResult const& file_result) const
print_modifiers({ CLEAR, FG_RED }); print_modifiers({ CLEAR, FG_RED });
outln("{} (failed):", test.name); outln("{} (failed):", test.name);
outln(" {}", test.details); outln(" {}", test.details);
} else if (test.result == Test::Result::ExpectedFail) {
print_modifiers({ CLEAR, FG_ORANGE });
outln("{} (expected fail)", test.name);
} else { } else {
print_modifiers({ CLEAR, FG_ORANGE }); print_modifiers({ CLEAR, FG_ORANGE });
outln("{} (skipped)", test.name); outln("{} (skipped)", test.name);

View file

@ -2,6 +2,7 @@
* Copyright (c) 2020, Matthew Olsson <mattco@serenityos.org> * Copyright (c) 2020, Matthew Olsson <mattco@serenityos.org>
* Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org> * Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org>
* Copyright (c) 2021, Brian Gianforcaro <bgianf@serenityos.org> * Copyright (c) 2021, Brian Gianforcaro <bgianf@serenityos.org>
* Copyright (c) 2023, Shannon Booth <shannon@serenityos.org>
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
@ -17,6 +18,7 @@ enum class Result {
Pass, Pass,
Fail, Fail,
Skip, Skip,
ExpectedFail,
Crashed, Crashed,
}; };
@ -42,6 +44,7 @@ struct Counts {
unsigned tests_failed { 0 }; unsigned tests_failed { 0 };
unsigned tests_passed { 0 }; unsigned tests_passed { 0 };
unsigned tests_skipped { 0 }; unsigned tests_skipped { 0 };
unsigned tests_expected_failed { 0 };
unsigned suites_failed { 0 }; unsigned suites_failed { 0 };
unsigned suites_passed { 0 }; unsigned suites_passed { 0 };
unsigned files_total { 0 }; unsigned files_total { 0 };

View file

@ -3,6 +3,7 @@
* Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org> * Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org>
* Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org> * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
* Copyright (c) 2021, Andreas Kling <kling@serenityos.org> * Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2023, Shannon Booth <shannon@serenityos.org>
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
@ -184,12 +185,17 @@ inline void TestRunner::print_test_results() const
out("{} skipped, ", m_counts.tests_skipped); out("{} skipped, ", m_counts.tests_skipped);
print_modifiers({ CLEAR }); print_modifiers({ CLEAR });
} }
if (m_counts.tests_expected_failed) {
print_modifiers({ FG_ORANGE });
out("{} expected failed, ", m_counts.tests_expected_failed);
print_modifiers({ CLEAR });
}
if (m_counts.tests_passed) { if (m_counts.tests_passed) {
print_modifiers({ FG_GREEN }); print_modifiers({ FG_GREEN });
out("{} passed, ", m_counts.tests_passed); out("{} passed, ", m_counts.tests_passed);
print_modifiers({ CLEAR }); print_modifiers({ CLEAR });
} }
outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed); outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed + m_counts.tests_expected_failed);
outln("Files: {} total", m_counts.files_total); outln("Files: {} total", m_counts.files_total);
@ -227,6 +233,9 @@ inline void TestRunner::print_test_results_as_json() const
case Result::Skip: case Result::Skip:
result_name = "SKIPPED"sv; result_name = "SKIPPED"sv;
break; break;
case Result::ExpectedFail:
result_name = "XFAIL"sv;
break;
case Result::Crashed: case Result::Crashed:
result_name = "PROCESS_ERROR"sv; result_name = "PROCESS_ERROR"sv;
break; break;
@ -254,7 +263,8 @@ inline void TestRunner::print_test_results_as_json() const
tests.set("failed", m_counts.tests_failed); tests.set("failed", m_counts.tests_failed);
tests.set("passed", m_counts.tests_passed); tests.set("passed", m_counts.tests_passed);
tests.set("skipped", m_counts.tests_skipped); tests.set("skipped", m_counts.tests_skipped);
tests.set("total", m_counts.tests_failed + m_counts.tests_passed + m_counts.tests_skipped); tests.set("xfail", m_counts.tests_expected_failed);
tests.set("total", m_counts.tests_failed + m_counts.tests_passed + m_counts.tests_skipped + m_counts.tests_expected_failed);
JsonObject results; JsonObject results;
results.set("suites", suites); results.set("suites", suites);

View file

@ -120,8 +120,11 @@ void TestRunner::do_run_single_test(DeprecatedString const& test_path, size_t cu
case Test::Result::Pass: case Test::Result::Pass:
++m_counts.tests_passed; ++m_counts.tests_passed;
break; break;
case Test::Result::ExpectedFail:
++m_counts.tests_passed;
break;
case Test::Result::Skip: case Test::Result::Skip:
++m_counts.tests_skipped; ++m_counts.tests_expected_failed;
break; break;
case Test::Result::Fail: case Test::Result::Fail:
++m_counts.tests_failed; ++m_counts.tests_failed;
@ -196,7 +199,8 @@ void TestRunner::do_run_single_test(DeprecatedString const& test_path, size_t cu
outln("{} ({})", test_result.file_path.basename(), test_result.result == Test::Result::Fail ? "failed" : "crashed"); outln("{} ({})", test_result.file_path.basename(), test_result.result == Test::Result::Fail ? "failed" : "crashed");
} else { } else {
print_modifiers({ Test::CLEAR, Test::FG_ORANGE }); print_modifiers({ Test::CLEAR, Test::FG_ORANGE });
outln("{} (skipped)", test_result.file_path.basename()); auto const status = test_result.result == Test::Result::Skip ? "skipped"sv : "expected fail"sv;
outln("{} ({})", test_result.file_path.basename(), status);
} }
print_modifiers({ Test::CLEAR }); print_modifiers({ Test::CLEAR });
} }