ztest: add test summary after all suites finish running

Add test summary after all test suites finish running.
The summary can be one-line or verbose, which is configured
with CONFIG_ZTEST_VERBOSE_SUMMARY. The one-line summary covers
overall suite stats. The verbose summary covers each test
function within the suite besides the one-line summary.

The new ztest output ultimately go through the printk. If
printk go through the logging subsystem, there may be log
messages dropped. And if log_panic is invoked, log messages
can be flushed in a mess. So several explicit log flush
are used when printing summary to ensure no content is lost
and content is in good shape.

Some macros are shared between old and new ztests. Such as
TC_START_PRINT and TC_END_PRINT. The are defined accordingly.

Signed-off-by: Ming Shao <ming.shao@intel.com>
This commit is contained in:
Ming Shao 2022-07-23 16:04:43 +08:00 committed by Anas Nashif
commit 77e1e39cff
6 changed files with 216 additions and 4 deletions

View file

@ -99,6 +99,9 @@ struct ztest_unit_test {
const char *name;
void (*test)(void *data);
uint32_t thread_options;
/** Stats */
struct ztest_unit_test_stats *const stats;
};
extern struct ztest_unit_test _ztest_unit_test_list_start[];
@ -117,6 +120,17 @@ struct ztest_suite_stats {
uint32_t fail_count;
};
struct ztest_unit_test_stats {
/** The number of times that the test ran */
uint32_t run_count;
/** The number of times that the test was skipped */
uint32_t skip_count;
/** The number of times that the test failed */
uint32_t fail_count;
/** The number of times that the test passed */
uint32_t pass_count;
};
/**
* Setup function to run before running this suite
*
@ -200,7 +214,7 @@ extern struct ztest_suite_node _ztest_suite_node_list_end[];
* @param teardown_fn The function to call after running all the tests in this suite
*/
#define ZTEST_SUITE(SUITE_NAME, PREDICATE, setup_fn, before_fn, after_fn, teardown_fn) \
struct ztest_suite_stats UTIL_CAT(z_ztest_test_node_stats_, SUITE_NAME); \
struct ztest_suite_stats UTIL_CAT(z_ztest_suite_node_stats_, SUITE_NAME); \
static const STRUCT_SECTION_ITERABLE(ztest_suite_node, \
UTIL_CAT(z_ztest_test_node_, SUITE_NAME)) = { \
.name = STRINGIFY(SUITE_NAME), \
@ -209,7 +223,7 @@ extern struct ztest_suite_node _ztest_suite_node_list_end[];
.after = (after_fn), \
.teardown = (teardown_fn), \
.predicate = PREDICATE, \
.stats = &UTIL_CAT(z_ztest_test_node_stats_, SUITE_NAME), \
.stats = &UTIL_CAT(z_ztest_suite_node_stats_, SUITE_NAME), \
}
/**
* Default entry point for running or listing registered unit tests.
@ -316,6 +330,7 @@ void ztest_test_pass(void);
void ztest_test_skip(void);
#define Z_TEST(suite, fn, t_options, use_fixture) \
struct ztest_unit_test_stats z_ztest_unit_test_stats_##suite##_##fn; \
static void _##suite##_##fn##_wrapper(void *data); \
static void suite##_##fn( \
COND_CODE_1(use_fixture, (struct suite##_fixture *fixture), (void))); \
@ -324,6 +339,7 @@ void ztest_test_skip(void);
.name = STRINGIFY(fn), \
.test = (_##suite##_##fn##_wrapper), \
.thread_options = t_options, \
.stats = &z_ztest_unit_test_stats_##suite##_##fn \
}; \
static void _##suite##_##fn##_wrapper(void *data) \
{ \

View file

@ -561,6 +561,7 @@ static int z_ztest_run_test_suite_ptr(struct ztest_suite_node *suite)
struct ztest_unit_test *test = NULL;
void *data = NULL;
int fail = 0;
int tc_result = TC_PASS;
if (test_status < 0) {
return test_status;
@ -601,7 +602,16 @@ static int z_ztest_run_test_suite_ptr(struct ztest_suite_node *suite)
continue;
}
if (ztest_api.should_test_run(suite->name, test->name)) {
if (run_test(suite, test, data) == TC_FAIL) {
test->stats->run_count++;
tc_result = run_test(suite, test, data);
if (tc_result == TC_PASS) {
test->stats->pass_count++;
} else if (tc_result == TC_SKIP) {
test->stats->skip_count++;
} else if (tc_result == TC_FAIL) {
test->stats->fail_count++;
}
if (tc_result == TC_FAIL) {
fail++;
}
}
@ -613,7 +623,17 @@ static int z_ztest_run_test_suite_ptr(struct ztest_suite_node *suite)
#else
while (((test = z_ztest_get_next_test(suite->name, test)) != NULL)) {
if (ztest_api.should_test_run(suite->name, test->name)) {
if (run_test(suite, test, data) == TC_FAIL) {
test->stats->run_count++;
tc_result = run_test(suite, test, data);
if (tc_result == TC_PASS) {
test->stats->pass_count++;
} else if (tc_result == TC_SKIP) {
test->stats->skip_count++;
} else if (tc_result == TC_FAIL) {
test->stats->fail_count++;
}
if (tc_result == TC_FAIL) {
fail++;
}
}
@ -654,11 +674,140 @@ void end_report(void)
K_APPMEM_PARTITION_DEFINE(ztest_mem_partition);
#endif
static void __ztest_init_unit_test_result_for_suite(struct ztest_suite_node *suite)
{
struct ztest_unit_test *test = NULL;
while (((test = z_ztest_get_next_test(suite->name, test)) != NULL)) {
test->stats->run_count = 0;
test->stats->skip_count = 0;
test->stats->fail_count = 0;
test->stats->pass_count = 0;
}
}
static void flush_log(void)
{
if (IS_ENABLED(CONFIG_LOG_PROCESS_THREAD)) {
while (log_data_pending()) {
k_sleep(K_MSEC(10));
}
k_sleep(K_MSEC(10));
} else {
while (LOG_PROCESS()) {
}
}
}
/* Show one line summary for a test suite.
*/
static void __ztest_show_suite_summary_oneline(struct ztest_suite_node *suite)
{
int distinct_pass = 0, distinct_fail = 0, distinct_skip = 0, distinct_total = 0;
int effective_total = 0;
int expanded_pass = 0, expanded_passrate = 0;
int passrate_major = 0, passrate_minor = 0, passrate_tail = 0;
int suite_result = TC_PASS;
struct ztest_unit_test *test = NULL;
/** summary of disctinct run */
while (((test = z_ztest_get_next_test(suite->name, test)) != NULL)) {
distinct_total++;
if (test->stats->skip_count == test->stats->run_count) {
distinct_skip++;
} else if (test->stats->pass_count == test->stats->run_count) {
distinct_pass++;
} else {
distinct_fail++;
}
}
if (distinct_skip == distinct_total) {
suite_result = TC_SKIP;
passrate_major = passrate_minor = 0;
} else {
suite_result = (distinct_fail > 0) ? TC_FAIL : TC_PASS;
effective_total = distinct_total - distinct_skip;
expanded_pass = distinct_pass * 100000;
expanded_passrate = expanded_pass / effective_total;
passrate_major = expanded_passrate / 1000;
passrate_minor = (expanded_passrate - passrate_major * 1000) / 10;
passrate_tail = expanded_passrate - passrate_major * 1000 - passrate_minor * 10;
if (passrate_tail >= 5) { /* rounding */
passrate_minor++;
}
}
TC_SUMMARY_PRINT("SUITE %s - %3d.%02d%% [%s]: pass = %d, fail = %d, "
"skip = %d, total = %d\n",
TC_RESULT_TO_STR(suite_result),
passrate_major, passrate_minor,
suite->name, distinct_pass, distinct_fail,
distinct_skip, distinct_total);
flush_log();
}
#ifdef CONFIG_ZTEST_VERBOSE_SUMMARY
static void __ztest_show_suite_summary_verbose(struct ztest_suite_node *suite)
{
struct ztest_unit_test *test = NULL;
int tc_result = TC_PASS;
int flush_frequency = 0;
__ztest_show_suite_summary_oneline(suite);
while (((test = z_ztest_get_next_test(suite->name, test)) != NULL)) {
if (test->stats->skip_count == test->stats->run_count) {
tc_result = TC_SKIP;
} else if (test->stats->pass_count == test->stats->run_count) {
tc_result = TC_PASS;
} else {
tc_result = TC_FAIL;
}
TC_SUMMARY_PRINT(" - %s - [%s.%s]\n", TC_RESULT_TO_STR(tc_result),
test->test_suite_name, test->name);
if (flush_frequency % 3 == 0) {
/** Reduce the flush frequencey a bit to speed up the output */
flush_log();
}
flush_frequency++;
}
TC_SUMMARY_PRINT("\n");
flush_log();
}
#endif
static void __ztest_show_suite_summary(void)
{
/* Flush the log a lot to ensure that no summary content
* is dropped if it goes through the logging subsystem.
*/
flush_log();
TC_SUMMARY_PRINT("\n------ TESTSUITE SUMMARY START ------\n");
flush_log();
for (struct ztest_suite_node *ptr = _ztest_suite_node_list_start;
ptr < _ztest_suite_node_list_end; ++ptr) {
#ifdef CONFIG_ZTEST_VERBOSE_SUMMARY
__ztest_show_suite_summary_verbose(ptr);
#else
__ztest_show_suite_summary_oneline(ptr);
#endif
}
TC_SUMMARY_PRINT("------ TESTSUITE SUMMARY END ------\n\n");
flush_log();
}
static int __ztest_run_test_suite(struct ztest_suite_node *ptr, const void *state)
{
struct ztest_suite_stats *stats = ptr->stats;
int count = 0;
__ztest_init_unit_test_result_for_suite(ptr);
for (int i = 0; i < NUM_ITER_PER_SUITE; i++) {
if (ztest_api.should_suite_run(state, ptr)) {
int fail = z_ztest_run_test_suite_ptr(ptr);
@ -694,6 +843,8 @@ int z_impl_ztest_run_test_suites(const void *state)
}
#endif
__ztest_show_suite_summary();
return count;
}
@ -726,6 +877,14 @@ void ztest_verify_all_test_suites_ran(void)
test_status = 1;
}
}
for (test = _ztest_unit_test_list_start; test < _ztest_unit_test_list_end; ++test) {
if (test->stats->fail_count + test->stats->pass_count + test->stats->skip_count !=
test->stats->run_count) {
PRINT("Bad stats for %s.%s\n", test->test_suite_name, test->name);
test_status = 1;
}
}
}
void ztest_run_all(const void *state) { ztest_api.run_all(state); }
@ -765,6 +924,7 @@ void main(void)
z_init_mock();
test_main();
end_report();
flush_log();
LOG_PANIC();
if (IS_ENABLED(CONFIG_ZTEST_RETEST_IF_PASSED)) {
static __noinit struct {