mirror of
https://github.com/RGBCube/serenity
synced 2025-05-25 18:55:08 +00:00
LibTest + test-js: Add initial skelaton of LibTest and migrate code there.
The test-js reporter is arguably the nicest test runner / reporter that exists in the serenity code base. To the goal of leveling up all the other unit test environments, start a new LibTest library so that we can share code and reporting utilities to make all the test systems look and behave similarly.
This commit is contained in:
parent
0b5ca28475
commit
0a49877fdc
4 changed files with 90 additions and 50 deletions
|
@ -40,6 +40,7 @@
|
|||
#include <LibJS/Runtime/Array.h>
|
||||
#include <LibJS/Runtime/GlobalObject.h>
|
||||
#include <LibJS/Runtime/JSONObject.h>
|
||||
#include <LibTest/Results.h>
|
||||
#include <signal.h>
|
||||
#include <stdlib.h>
|
||||
#include <sys/time.h>
|
||||
|
@ -51,26 +52,6 @@ RefPtr<JS::VM> vm;
|
|||
static bool collect_on_every_allocation = false;
|
||||
static String currently_running_test;
|
||||
|
||||
enum class TestResult {
|
||||
Pass,
|
||||
Fail,
|
||||
Skip,
|
||||
};
|
||||
|
||||
struct JSTest {
|
||||
String name;
|
||||
TestResult result;
|
||||
String details;
|
||||
};
|
||||
|
||||
struct JSSuite {
|
||||
String name;
|
||||
// A failed test takes precedence over a skipped test, which both have
|
||||
// precedence over a passed test
|
||||
TestResult most_severe_test_result { TestResult::Pass };
|
||||
Vector<JSTest> tests {};
|
||||
};
|
||||
|
||||
struct ParserError {
|
||||
JS::Parser::Error error;
|
||||
String hint;
|
||||
|
@ -82,8 +63,8 @@ struct JSFileResult {
|
|||
double time_taken { 0 };
|
||||
// A failed test takes precedence over a skipped test, which both have
|
||||
// precedence over a passed test
|
||||
TestResult most_severe_test_result { TestResult::Pass };
|
||||
Vector<JSSuite> suites {};
|
||||
Test::Result most_severe_test_result { Test::Result::Pass };
|
||||
Vector<Test::Suite> suites {};
|
||||
Vector<String> logged_messages {};
|
||||
};
|
||||
|
||||
|
@ -332,12 +313,12 @@ JSFileResult TestRunner::run_file_test(const String& test_path)
|
|||
}
|
||||
|
||||
test_json.value().as_object().for_each_member([&](const String& suite_name, const JsonValue& suite_value) {
|
||||
JSSuite suite { suite_name };
|
||||
Test::Suite suite { suite_name };
|
||||
|
||||
VERIFY(suite_value.is_object());
|
||||
|
||||
suite_value.as_object().for_each_member([&](const String& test_name, const JsonValue& test_value) {
|
||||
JSTest test { test_name, TestResult::Fail, "" };
|
||||
Test::Case test { test_name, Test::Result::Fail, "" };
|
||||
|
||||
VERIFY(test_value.is_object());
|
||||
VERIFY(test_value.as_object().has("result"));
|
||||
|
@ -346,32 +327,32 @@ JSFileResult TestRunner::run_file_test(const String& test_path)
|
|||
VERIFY(result.is_string());
|
||||
auto result_string = result.as_string();
|
||||
if (result_string == "pass") {
|
||||
test.result = TestResult::Pass;
|
||||
test.result = Test::Result::Pass;
|
||||
m_counts.tests_passed++;
|
||||
} else if (result_string == "fail") {
|
||||
test.result = TestResult::Fail;
|
||||
test.result = Test::Result::Fail;
|
||||
m_counts.tests_failed++;
|
||||
suite.most_severe_test_result = TestResult::Fail;
|
||||
suite.most_severe_test_result = Test::Result::Fail;
|
||||
VERIFY(test_value.as_object().has("details"));
|
||||
auto details = test_value.as_object().get("details");
|
||||
VERIFY(result.is_string());
|
||||
test.details = details.as_string();
|
||||
} else {
|
||||
test.result = TestResult::Skip;
|
||||
if (suite.most_severe_test_result == TestResult::Pass)
|
||||
suite.most_severe_test_result = TestResult::Skip;
|
||||
test.result = Test::Result::Skip;
|
||||
if (suite.most_severe_test_result == Test::Result::Pass)
|
||||
suite.most_severe_test_result = Test::Result::Skip;
|
||||
m_counts.tests_skipped++;
|
||||
}
|
||||
|
||||
suite.tests.append(test);
|
||||
});
|
||||
|
||||
if (suite.most_severe_test_result == TestResult::Fail) {
|
||||
if (suite.most_severe_test_result == Test::Result::Fail) {
|
||||
m_counts.suites_failed++;
|
||||
file_result.most_severe_test_result = TestResult::Fail;
|
||||
file_result.most_severe_test_result = Test::Result::Fail;
|
||||
} else {
|
||||
if (suite.most_severe_test_result == TestResult::Skip && file_result.most_severe_test_result == TestResult::Pass)
|
||||
file_result.most_severe_test_result = TestResult::Skip;
|
||||
if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass)
|
||||
file_result.most_severe_test_result = Test::Result::Skip;
|
||||
m_counts.suites_passed++;
|
||||
}
|
||||
|
||||
|
@ -433,12 +414,12 @@ static void print_modifiers(Vector<Modifier> modifiers)
|
|||
|
||||
void TestRunner::print_file_result(const JSFileResult& file_result) const
|
||||
{
|
||||
if (file_result.most_severe_test_result == TestResult::Fail || file_result.error.has_value()) {
|
||||
if (file_result.most_severe_test_result == Test::Result::Fail || file_result.error.has_value()) {
|
||||
print_modifiers({ BG_RED, FG_BLACK, FG_BOLD });
|
||||
out(" FAIL ");
|
||||
print_modifiers({ CLEAR });
|
||||
} else {
|
||||
if (m_print_times || file_result.most_severe_test_result != TestResult::Pass) {
|
||||
if (m_print_times || file_result.most_severe_test_result != Test::Result::Pass) {
|
||||
print_modifiers({ BG_GREEN, FG_BLACK, FG_BOLD });
|
||||
out(" PASS ");
|
||||
print_modifiers({ CLEAR });
|
||||
|
@ -495,12 +476,12 @@ void TestRunner::print_file_result(const JSFileResult& file_result) const
|
|||
return;
|
||||
}
|
||||
|
||||
if (file_result.most_severe_test_result != TestResult::Pass) {
|
||||
if (file_result.most_severe_test_result != Test::Result::Pass) {
|
||||
for (auto& suite : file_result.suites) {
|
||||
if (suite.most_severe_test_result == TestResult::Pass)
|
||||
if (suite.most_severe_test_result == Test::Result::Pass)
|
||||
continue;
|
||||
|
||||
bool failed = suite.most_severe_test_result == TestResult::Fail;
|
||||
bool failed = suite.most_severe_test_result == Test::Result::Fail;
|
||||
|
||||
print_modifiers({ FG_GRAY, FG_BOLD });
|
||||
|
||||
|
@ -530,12 +511,12 @@ void TestRunner::print_file_result(const JSFileResult& file_result) const
|
|||
print_modifiers({ CLEAR });
|
||||
|
||||
for (auto& test : suite.tests) {
|
||||
if (test.result == TestResult::Pass)
|
||||
if (test.result == Test::Result::Pass)
|
||||
continue;
|
||||
|
||||
print_modifiers({ FG_GRAY, FG_BOLD });
|
||||
out(" Test: ");
|
||||
if (test.result == TestResult::Fail) {
|
||||
if (test.result == Test::Result::Fail) {
|
||||
print_modifiers({ CLEAR, FG_RED });
|
||||
outln("{} (failed):", test.name);
|
||||
outln(" {}", test.details);
|
||||
|
@ -630,25 +611,25 @@ JSFileResult Test262ParserTestRunner::run_file_test(const String& test_path)
|
|||
|
||||
auto start_time = get_time_in_ms();
|
||||
String details = "";
|
||||
TestResult test_result;
|
||||
Test::Result test_result;
|
||||
if (test_path.ends_with(".module.js")) {
|
||||
test_result = TestResult::Skip;
|
||||
test_result = Test::Result::Skip;
|
||||
m_counts.tests_skipped++;
|
||||
m_counts.suites_passed++;
|
||||
} else {
|
||||
auto parse_result = parse_file(test_path);
|
||||
if (expecting_file_to_parse) {
|
||||
if (!parse_result.is_error()) {
|
||||
test_result = TestResult::Pass;
|
||||
test_result = Test::Result::Pass;
|
||||
} else {
|
||||
test_result = TestResult::Fail;
|
||||
test_result = Test::Result::Fail;
|
||||
details = parse_result.error().error.to_string();
|
||||
}
|
||||
} else {
|
||||
if (parse_result.is_error()) {
|
||||
test_result = TestResult::Pass;
|
||||
test_result = Test::Result::Pass;
|
||||
} else {
|
||||
test_result = TestResult::Fail;
|
||||
test_result = Test::Result::Fail;
|
||||
details = "File was expected to produce a parser error but didn't";
|
||||
}
|
||||
}
|
||||
|
@ -661,8 +642,8 @@ JSFileResult Test262ParserTestRunner::run_file_test(const String& test_path)
|
|||
// - That way we can show additional information such as "file parsed but shouldn't have" or
|
||||
// parser errors for files that should parse respectively
|
||||
|
||||
JSTest test { expecting_file_to_parse ? "file should parse" : "file should not parse", test_result, details };
|
||||
JSSuite suite { "Parse file", test_result, { test } };
|
||||
Test::Case test { expecting_file_to_parse ? "file should parse" : "file should not parse", test_result, details };
|
||||
Test::Suite suite { "Parse file", test_result, { test } };
|
||||
JSFileResult file_result {
|
||||
test_path.substring(m_test_root.length() + 1, test_path.length() - m_test_root.length() - 1),
|
||||
{},
|
||||
|
@ -671,7 +652,7 @@ JSFileResult Test262ParserTestRunner::run_file_test(const String& test_path)
|
|||
{ suite }
|
||||
};
|
||||
|
||||
if (test_result == TestResult::Fail) {
|
||||
if (test_result == Test::Result::Fail) {
|
||||
m_counts.tests_failed++;
|
||||
m_counts.suites_failed++;
|
||||
} else {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue