twister: verify ztest suite name in output
Test wirtten in ztest framework should print in output information about test suite name defined in test's source code (as a first argument of ztest_test_suite() function). This changes make it possible to find such test names in c files. Next they are used during test execution to verify if performed test was performed properly (verification can base not only on existance of "PROJECT EXECUTION SUCCESSFUL" info). Signed-off-by: Piotr Golyzniak <piotr.golyzniak@nordicsemi.no>
This commit is contained in:
parent
ef4583a32c
commit
340da2ecf3
3 changed files with 113 additions and 34 deletions
|
@ -32,6 +32,7 @@ class Harness:
|
|||
self.fieldnames = []
|
||||
self.ztest = False
|
||||
self.is_pytest = False
|
||||
self.detected_suite_names = []
|
||||
|
||||
def configure(self, instance):
|
||||
config = instance.testcase.harness_config
|
||||
|
@ -219,8 +220,15 @@ class Pytest(Harness):
|
|||
class Test(Harness):
|
||||
RUN_PASSED = "PROJECT EXECUTION SUCCESSFUL"
|
||||
RUN_FAILED = "PROJECT EXECUTION FAILED"
|
||||
test_suite_start_pattern = r"Running test suite (?P<suite_name>.*)"
|
||||
|
||||
def handle(self, line):
|
||||
test_suite_match = re.search(self.test_suite_start_pattern, line)
|
||||
if test_suite_match:
|
||||
suite_name = test_suite_match.group("suite_name")
|
||||
self.detected_suite_names.append(suite_name)
|
||||
self.ztest = True
|
||||
|
||||
match = result_re.match(line)
|
||||
if match and match.group(2):
|
||||
name = "{}.{}".format(self.id, match.group(3))
|
||||
|
|
|
@ -492,6 +492,45 @@ class Handler:
|
|||
harness_testcase_result_skip in self.instance.results.values():
|
||||
self.instance.reason = "ztest skip"
|
||||
|
||||
def _verify_ztest_suite_name(self, harness_state, detected_suite_names, handler_time):
|
||||
"""
|
||||
If test suite names was found in test's C source code, then verify if
|
||||
detected suite names from output correspond to expected suite names
|
||||
(and not in reverse).
|
||||
"""
|
||||
expected_suite_names = self.instance.testcase.ztest_suite_names
|
||||
if not expected_suite_names or \
|
||||
not harness_state == "passed":
|
||||
return
|
||||
if not detected_suite_names:
|
||||
self._missing_suite_name(expected_suite_names, handler_time)
|
||||
for detected_suite_name in detected_suite_names:
|
||||
if detected_suite_name not in expected_suite_names:
|
||||
self._missing_suite_name(expected_suite_names, handler_time)
|
||||
break
|
||||
|
||||
def _missing_suite_name(self, expected_suite_names, handler_time):
|
||||
"""
|
||||
Change result of performed test if problem with missing or unpropper
|
||||
suite name was occurred.
|
||||
"""
|
||||
self.set_state("failed", handler_time)
|
||||
for k in self.instance.testcase.cases:
|
||||
self.instance.results[k] = "FAIL"
|
||||
self.instance.reason = f"Testsuite mismatch"
|
||||
logger.debug("Test suite names were not printed or some of them in " \
|
||||
"output do not correspond with expected: %s",
|
||||
str(expected_suite_names))
|
||||
|
||||
def _final_handle_actions(self, harness, handler_time):
|
||||
self._set_skip_reason(harness.state)
|
||||
|
||||
harness_class_name = type(harness).__name__
|
||||
if harness_class_name == "Test": # only for ZTest tests
|
||||
self._verify_ztest_suite_name(harness.state, harness.detected_suite_names, handler_time)
|
||||
|
||||
self.record(harness)
|
||||
|
||||
|
||||
class BinaryHandler(Handler):
|
||||
def __init__(self, instance, type_str):
|
||||
|
@ -650,9 +689,7 @@ class BinaryHandler(Handler):
|
|||
self.instance.reason = "Timeout"
|
||||
self.add_missing_testscases(harness)
|
||||
|
||||
self._set_skip_reason(harness.state)
|
||||
|
||||
self.record(harness)
|
||||
self._final_handle_actions(harness, handler_time)
|
||||
|
||||
|
||||
class DeviceHandler(Handler):
|
||||
|
@ -950,13 +987,12 @@ class DeviceHandler(Handler):
|
|||
else:
|
||||
self.set_state(out_state, handler_time)
|
||||
|
||||
self._set_skip_reason(harness.state)
|
||||
self._final_handle_actions(harness, handler_time)
|
||||
|
||||
if post_script:
|
||||
self.run_custom_script(post_script, 30)
|
||||
|
||||
self.make_device_available(serial_device)
|
||||
self.record(harness)
|
||||
|
||||
|
||||
class QEMUHandler(Handler):
|
||||
|
@ -1108,8 +1144,6 @@ class QEMUHandler(Handler):
|
|||
harness.pytest_run(logfile)
|
||||
out_state = harness.state
|
||||
|
||||
handler.record(harness)
|
||||
|
||||
handler_time = time.time() - start_time
|
||||
logger.debug(f"QEMU ({pid}) complete ({out_state}) after {handler_time} seconds")
|
||||
|
||||
|
@ -1220,7 +1254,7 @@ class QEMUHandler(Handler):
|
|||
self.instance.reason = "Exited with {}".format(self.returncode)
|
||||
self.add_missing_testscases(harness)
|
||||
|
||||
self._set_skip_reason(harness.state)
|
||||
self._final_handle_actions(harness, 0)
|
||||
|
||||
def get_fifo(self):
|
||||
return self.fifo_fn
|
||||
|
@ -1650,18 +1684,21 @@ class ScanPathResult:
|
|||
ztest_run_registered_test_suites.
|
||||
has_test_main Whether or not the path contains a
|
||||
definition of test_main(void)
|
||||
ztest_suite_names Names of found ztest suites
|
||||
"""
|
||||
def __init__(self,
|
||||
matches: List[str] = None,
|
||||
warnings: str = None,
|
||||
has_registered_test_suites: bool = False,
|
||||
has_run_registered_test_suites: bool = False,
|
||||
has_test_main: bool = False):
|
||||
has_test_main: bool = False,
|
||||
ztest_suite_names: List[str] = []):
|
||||
self.matches = matches
|
||||
self.warnings = warnings
|
||||
self.has_registered_test_suites = has_registered_test_suites
|
||||
self.has_run_registered_test_suites = has_run_registered_test_suites
|
||||
self.has_test_main = has_test_main
|
||||
self.ztest_suite_names = ztest_suite_names
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, ScanPathResult):
|
||||
|
@ -1672,7 +1709,9 @@ class ScanPathResult:
|
|||
other.has_registered_test_suites) and
|
||||
(self.has_run_registered_test_suites ==
|
||||
other.has_run_registered_test_suites) and
|
||||
self.has_test_main == other.has_test_main)
|
||||
self.has_test_main == other.has_test_main and
|
||||
(sorted(self.ztest_suite_names) ==
|
||||
sorted(other.ztest_suite_names)))
|
||||
|
||||
|
||||
class TestCase(DisablePyTestCollectionMixin):
|
||||
|
@ -1730,6 +1769,7 @@ class TestCase(DisablePyTestCollectionMixin):
|
|||
self.min_flash = -1
|
||||
self.extra_sections = None
|
||||
self.integration_platforms = []
|
||||
self.ztest_suite_names = []
|
||||
|
||||
@staticmethod
|
||||
def get_unique(testcase_root, workdir, name):
|
||||
|
@ -1816,18 +1856,19 @@ Tests should reference the category and subsystem with a dot as a separator.
|
|||
'offset': 0}
|
||||
|
||||
with contextlib.closing(mmap.mmap(**mmap_args)) as main_c:
|
||||
suite_regex_match = suite_regex.search(main_c)
|
||||
registered_suite_regex_match = registered_suite_regex.search(
|
||||
main_c)
|
||||
suite_regex_matches = \
|
||||
[m for m in suite_regex.finditer(main_c)]
|
||||
registered_suite_regex_matches = \
|
||||
[m for m in registered_suite_regex.finditer(main_c)]
|
||||
|
||||
if registered_suite_regex_match:
|
||||
if registered_suite_regex_matches:
|
||||
has_registered_test_suites = True
|
||||
if registered_suite_run_regex.search(main_c):
|
||||
has_run_registered_test_suites = True
|
||||
if test_main_regex.search(main_c):
|
||||
has_test_main = True
|
||||
|
||||
if not suite_regex_match and not has_registered_test_suites:
|
||||
if not suite_regex_matches and not has_registered_test_suites:
|
||||
# can't find ztest_test_suite, maybe a client, because
|
||||
# it includes ztest.h
|
||||
return ScanPathResult(
|
||||
|
@ -1835,16 +1876,23 @@ Tests should reference the category and subsystem with a dot as a separator.
|
|||
warnings=None,
|
||||
has_registered_test_suites=has_registered_test_suites,
|
||||
has_run_registered_test_suites=has_run_registered_test_suites,
|
||||
has_test_main=has_test_main)
|
||||
has_test_main=has_test_main,
|
||||
ztest_suite_names=[])
|
||||
|
||||
suite_run_match = suite_run_regex.search(main_c)
|
||||
if suite_regex_match and not suite_run_match:
|
||||
if suite_regex_matches and not suite_run_match:
|
||||
raise ValueError("can't find ztest_run_test_suite")
|
||||
|
||||
if suite_regex_match:
|
||||
search_start = suite_regex_match.end()
|
||||
if suite_regex_matches:
|
||||
search_start = suite_regex_matches[0].end()
|
||||
ztest_suite_names = \
|
||||
[m.group("suite_name") for m in suite_regex_matches]
|
||||
else:
|
||||
search_start = registered_suite_regex_match.end()
|
||||
search_start = registered_suite_regex_matches[0].end()
|
||||
ztest_suite_names = \
|
||||
[m.group("suite_name") for m in registered_suite_regex_matches]
|
||||
ztest_suite_names = \
|
||||
[name.decode("UTF-8") for name in ztest_suite_names]
|
||||
|
||||
if suite_run_match:
|
||||
search_end = suite_run_match.start()
|
||||
|
@ -1870,13 +1918,16 @@ Tests should reference the category and subsystem with a dot as a separator.
|
|||
warnings=warnings,
|
||||
has_registered_test_suites=has_registered_test_suites,
|
||||
has_run_registered_test_suites=has_run_registered_test_suites,
|
||||
has_test_main=has_test_main)
|
||||
has_test_main=has_test_main,
|
||||
ztest_suite_names=ztest_suite_names)
|
||||
|
||||
def scan_path(self, path):
|
||||
subcases = []
|
||||
has_registered_test_suites = False
|
||||
has_run_registered_test_suites = False
|
||||
has_test_main = False
|
||||
ztest_suite_names = []
|
||||
|
||||
for filename in glob.glob(os.path.join(path, "src", "*.c*")):
|
||||
try:
|
||||
result: ScanPathResult = self.scan_file(filename)
|
||||
|
@ -1892,6 +1943,9 @@ Tests should reference the category and subsystem with a dot as a separator.
|
|||
has_run_registered_test_suites = True
|
||||
if result.has_test_main:
|
||||
has_test_main = True
|
||||
if result.ztest_suite_names:
|
||||
ztest_suite_names += result.ztest_suite_names
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("%s: can't find: %s" % (filename, e))
|
||||
|
||||
|
@ -1902,6 +1956,8 @@ Tests should reference the category and subsystem with a dot as a separator.
|
|||
logger.error("%s: %s" % (filename, result.warnings))
|
||||
if result.matches:
|
||||
subcases += result.matches
|
||||
if result.ztest_suite_names:
|
||||
ztest_suite_names += result.ztest_suite_names
|
||||
except ValueError as e:
|
||||
logger.error("%s: can't find: %s" % (filename, e))
|
||||
|
||||
|
@ -1913,17 +1969,19 @@ Tests should reference the category and subsystem with a dot as a separator.
|
|||
logger.error(warning)
|
||||
raise TwisterRuntimeError(warning)
|
||||
|
||||
return subcases
|
||||
return subcases, ztest_suite_names
|
||||
|
||||
def parse_subcases(self, test_path):
|
||||
results = self.scan_path(test_path)
|
||||
for sub in results:
|
||||
subcases, ztest_suite_names = self.scan_path(test_path)
|
||||
for sub in subcases:
|
||||
name = "{}.{}".format(self.id, sub)
|
||||
self.cases.append(name)
|
||||
|
||||
if not results:
|
||||
if not subcases:
|
||||
self.cases.append(self.id)
|
||||
|
||||
self.ztest_suite_names = ztest_suite_names
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
|
|
@ -124,27 +124,31 @@ TESTDATA_5 = [
|
|||
'user', 'last'],
|
||||
has_registered_test_suites=False,
|
||||
has_run_registered_test_suites=False,
|
||||
has_test_main=False)),
|
||||
has_test_main=False,
|
||||
ztest_suite_names = ["test_api"])),
|
||||
("testcases/tests/test_a/test_ztest_error.c",
|
||||
ScanPathResult(
|
||||
warnings="Found a test that does not start with test_",
|
||||
matches=['1a', '1c', '2a', '2b'],
|
||||
has_registered_test_suites=False,
|
||||
has_run_registered_test_suites=False,
|
||||
has_test_main=True)),
|
||||
has_test_main=True,
|
||||
ztest_suite_names = ["feature1", "feature2"])),
|
||||
("testcases/tests/test_a/test_ztest_error_1.c",
|
||||
ScanPathResult(
|
||||
warnings="found invalid #ifdef, #endif in ztest_test_suite()",
|
||||
matches=['unit_1a', 'unit_1b', 'Unit_1c'],
|
||||
has_registered_test_suites=False,
|
||||
has_run_registered_test_suites=False,
|
||||
has_test_main=False)),
|
||||
has_test_main=False,
|
||||
ztest_suite_names = ["feature3"])),
|
||||
("testcases/tests/test_d/test_ztest_error_register_test_suite.c",
|
||||
ScanPathResult(
|
||||
warnings=None, matches=['unit_1a', 'unit_1b'],
|
||||
has_registered_test_suites=True,
|
||||
has_run_registered_test_suites=False,
|
||||
has_test_main=False)),
|
||||
has_test_main=False,
|
||||
ztest_suite_names = ["feature4"])),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("test_file, expected", TESTDATA_5)
|
||||
|
@ -159,17 +163,26 @@ def test_scan_file(test_data, test_file, expected: ScanPathResult):
|
|||
|
||||
|
||||
TESTDATA_6 = [
|
||||
("testcases/tests", ['a', 'c', 'unit_a', 'newline', 'test_test_aa', 'user', 'last']),
|
||||
("testcases/tests/test_a", ['unit_1a', 'unit_1b', 'Unit_1c', '1a', '1c', '2a', '2b']),
|
||||
(
|
||||
"testcases/tests",
|
||||
['a', 'c', 'unit_a', 'newline', 'test_test_aa', 'user', 'last'],
|
||||
["test_api"]
|
||||
),
|
||||
(
|
||||
"testcases/tests/test_a",
|
||||
['unit_1a', 'unit_1b', 'Unit_1c', '1a', '1c', '2a', '2b'],
|
||||
["feature3", "feature1", "feature2"]
|
||||
),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("test_path, expected_subcases", TESTDATA_6)
|
||||
def test_subcases(test_data, test_path, expected_subcases):
|
||||
@pytest.mark.parametrize("test_path, expected_subcases, expected_ztest_suite_names", TESTDATA_6)
|
||||
def test_subcases(test_data, test_path, expected_subcases, expected_ztest_suite_names):
|
||||
'''Testing scan path and parse subcases methods for expected subcases'''
|
||||
testcase = TestCase("/scripts/tests/twister/test_data/testcases/tests", ".", "test_a.check_1")
|
||||
|
||||
subcases = testcase.scan_path(os.path.join(test_data, test_path))
|
||||
subcases, ztest_suite_names = testcase.scan_path(os.path.join(test_data, test_path))
|
||||
assert sorted(subcases) == sorted(expected_subcases)
|
||||
assert sorted(ztest_suite_names) == sorted(expected_ztest_suite_names)
|
||||
|
||||
testcase.id = "test_id"
|
||||
testcase.parse_subcases(test_data + test_path)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue