2019-04-06 09:08:09 -04:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2023-06-12 18:15:31 +02:00
|
|
|
from __future__ import annotations
|
2022-03-24 07:51:29 -04:00
|
|
|
from asyncio.log import logger
|
2024-05-28 12:31:53 +00:00
|
|
|
from enum import Enum
|
2023-05-26 11:43:36 +02:00
|
|
|
import platform
|
2017-12-23 20:20:27 -05:00
|
|
|
import re
|
2020-10-22 09:37:27 +08:00
|
|
|
import os
|
2023-05-26 11:43:36 +02:00
|
|
|
import sys
|
2020-10-22 09:37:27 +08:00
|
|
|
import subprocess
|
2023-01-19 15:52:45 -08:00
|
|
|
import shlex
|
2017-12-23 20:20:27 -05:00
|
|
|
from collections import OrderedDict
|
2020-10-22 09:37:27 +08:00
|
|
|
import xml.etree.ElementTree as ET
|
2022-03-24 07:51:29 -04:00
|
|
|
import logging
|
2023-06-02 11:21:46 +02:00
|
|
|
import threading
|
2023-04-05 15:21:44 +02:00
|
|
|
import time
|
2023-12-17 23:41:07 +01:00
|
|
|
import shutil
|
2024-05-29 16:57:35 +02:00
|
|
|
import json
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2024-08-12 16:07:14 +02:00
|
|
|
from pytest import ExitCode
|
2024-05-28 12:31:53 +00:00
|
|
|
from twisterlib.reports import ReportStatus
|
2024-08-14 13:01:51 +00:00
|
|
|
from twisterlib.error import ConfigurationError, StatusAttributeError
|
2023-05-26 11:43:36 +02:00
|
|
|
from twisterlib.environment import ZEPHYR_BASE, PYTEST_PLUGIN_INSTALLED
|
2023-06-12 18:15:31 +02:00
|
|
|
from twisterlib.handlers import Handler, terminate_process, SUPPORTED_SIMS_IN_PYTEST
|
2024-05-28 12:31:53 +00:00
|
|
|
from twisterlib.statuses import TwisterStatus
|
2023-06-12 18:15:31 +02:00
|
|
|
from twisterlib.testinstance import TestInstance
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2022-03-24 07:51:29 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger('twister')
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
_WINDOWS = platform.system() == 'Windows'
|
|
|
|
|
|
|
|
|
2023-08-28 22:00:04 +00:00
|
|
|
result_re = re.compile(r".*(PASS|FAIL|SKIP) - (test_)?(\S*) in (\d*[.,]?\d*) seconds")
|
2017-12-23 20:20:27 -05:00
|
|
|
class Harness:
|
2019-01-25 09:37:38 -05:00
|
|
|
GCOV_START = "GCOV_COVERAGE_DUMP_START"
|
|
|
|
GCOV_END = "GCOV_COVERAGE_DUMP_END"
|
2019-07-16 15:29:46 -07:00
|
|
|
FAULT = "ZEPHYR FATAL ERROR"
|
2019-06-22 11:04:10 -04:00
|
|
|
RUN_PASSED = "PROJECT EXECUTION SUCCESSFUL"
|
|
|
|
RUN_FAILED = "PROJECT EXECUTION FAILED"
|
2022-04-07 16:11:01 -04:00
|
|
|
run_id_pattern = r"RunID: (?P<run_id>.*)"
|
|
|
|
|
2017-12-23 20:20:27 -05:00
|
|
|
def __init__(self):
|
2024-05-28 12:31:53 +00:00
|
|
|
self._status = TwisterStatus.NONE
|
2024-05-27 10:48:20 -04:00
|
|
|
self.reason = None
|
2017-12-23 20:20:27 -05:00
|
|
|
self.type = None
|
|
|
|
self.regex = []
|
|
|
|
self.matches = OrderedDict()
|
|
|
|
self.ordered = True
|
2018-04-08 13:30:16 -05:00
|
|
|
self.id = None
|
2018-06-05 21:36:20 -05:00
|
|
|
self.fail_on_fault = True
|
2018-08-29 10:05:27 -04:00
|
|
|
self.fault = False
|
2019-01-25 09:37:38 -05:00
|
|
|
self.capture_coverage = False
|
2019-06-18 11:57:21 -07:00
|
|
|
self.next_pattern = 0
|
2019-06-22 11:04:10 -04:00
|
|
|
self.record = None
|
2023-12-01 21:19:09 +01:00
|
|
|
self.record_pattern = None
|
2024-05-29 16:57:35 +02:00
|
|
|
self.record_as_json = None
|
2019-06-22 11:04:10 -04:00
|
|
|
self.recording = []
|
2020-08-31 11:50:51 -04:00
|
|
|
self.ztest = False
|
2022-02-02 22:57:43 +01:00
|
|
|
self.detected_suite_names = []
|
2022-04-07 16:11:01 -04:00
|
|
|
self.run_id = None
|
|
|
|
self.matched_run_id = False
|
|
|
|
self.run_id_exists = False
|
2023-06-12 18:15:31 +02:00
|
|
|
self.instance: TestInstance | None = None
|
2022-03-29 12:28:47 -04:00
|
|
|
self.testcase_output = ""
|
|
|
|
self._match = False
|
2017-12-23 20:20:27 -05:00
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
@property
|
|
|
|
def status(self) -> TwisterStatus:
|
|
|
|
return self._status
|
|
|
|
|
|
|
|
@status.setter
|
|
|
|
def status(self, value : TwisterStatus) -> None:
|
|
|
|
# Check for illegal assignments by value
|
|
|
|
try:
|
|
|
|
key = value.name if isinstance(value, Enum) else value
|
|
|
|
self._status = TwisterStatus[key]
|
|
|
|
except KeyError:
|
2024-08-14 13:01:51 +00:00
|
|
|
raise StatusAttributeError(self.__class__, value)
|
2024-05-28 12:31:53 +00:00
|
|
|
|
2017-12-23 20:20:27 -05:00
|
|
|
def configure(self, instance):
|
2022-03-24 07:51:29 -04:00
|
|
|
self.instance = instance
|
2022-03-23 14:07:54 -04:00
|
|
|
config = instance.testsuite.harness_config
|
|
|
|
self.id = instance.testsuite.id
|
2022-04-07 16:11:01 -04:00
|
|
|
self.run_id = instance.run_id
|
2022-11-24 09:53:21 -05:00
|
|
|
if instance.testsuite.ignore_faults:
|
2018-06-05 21:36:20 -05:00
|
|
|
self.fail_on_fault = False
|
|
|
|
|
2017-12-23 20:20:27 -05:00
|
|
|
if config:
|
|
|
|
self.type = config.get('type', None)
|
2019-09-06 11:13:19 +02:00
|
|
|
self.regex = config.get('regex', [])
|
2017-12-23 20:20:27 -05:00
|
|
|
self.ordered = config.get('ordered', True)
|
2019-06-22 11:04:10 -04:00
|
|
|
self.record = config.get('record', {})
|
2023-12-01 21:19:09 +01:00
|
|
|
if self.record:
|
|
|
|
self.record_pattern = re.compile(self.record.get("regex", ""))
|
2024-05-29 16:57:35 +02:00
|
|
|
self.record_as_json = self.record.get("as_json")
|
2023-12-01 21:19:09 +01:00
|
|
|
|
2023-12-17 23:41:07 +01:00
|
|
|
def build(self):
|
|
|
|
pass
|
2023-12-01 21:19:09 +01:00
|
|
|
|
|
|
|
def get_testcase_name(self):
|
|
|
|
"""
|
|
|
|
Get current TestCase name.
|
|
|
|
"""
|
|
|
|
return self.id
|
|
|
|
|
2024-05-29 16:57:35 +02:00
|
|
|
def translate_record(self, record: dict) -> dict:
|
|
|
|
if self.record_as_json:
|
|
|
|
for k in self.record_as_json:
|
|
|
|
if not k in record:
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
record[k] = json.loads(record[k]) if record[k] else {}
|
|
|
|
except json.JSONDecodeError as parse_error:
|
|
|
|
logger.warning(f"HARNESS:{self.__class__.__name__}: recording JSON failed:"
|
|
|
|
f" {parse_error} for '{k}':'{record[k]}'")
|
|
|
|
# Don't set the Harness state to failed for recordings.
|
|
|
|
record[k] = { 'ERROR': { 'msg': str(parse_error), 'doc': record[k] } }
|
|
|
|
return record
|
|
|
|
|
2024-05-10 17:27:59 +02:00
|
|
|
def parse_record(self, line) -> re.Match:
|
|
|
|
match = None
|
|
|
|
if self.record_pattern:
|
|
|
|
match = self.record_pattern.search(line)
|
|
|
|
if match:
|
2024-05-29 16:57:35 +02:00
|
|
|
rec = self.translate_record({ k:v.strip() for k,v in match.groupdict(default="").items() })
|
|
|
|
self.recording.append(rec)
|
2024-05-10 17:27:59 +02:00
|
|
|
return match
|
|
|
|
#
|
2019-06-22 11:04:10 -04:00
|
|
|
|
|
|
|
def process_test(self, line):
|
|
|
|
|
2024-05-10 17:27:59 +02:00
|
|
|
self.parse_record(line)
|
|
|
|
|
2022-04-07 16:11:01 -04:00
|
|
|
runid_match = re.search(self.run_id_pattern, line)
|
|
|
|
if runid_match:
|
|
|
|
run_id = runid_match.group("run_id")
|
|
|
|
self.run_id_exists = True
|
|
|
|
if run_id == str(self.run_id):
|
|
|
|
self.matched_run_id = True
|
|
|
|
|
2019-06-22 11:04:10 -04:00
|
|
|
if self.RUN_PASSED in line:
|
|
|
|
if self.fault:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2024-05-27 10:48:20 -04:00
|
|
|
self.reason = "Fault detected while running test"
|
2019-06-22 11:04:10 -04:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.PASS
|
2019-06-22 11:04:10 -04:00
|
|
|
|
|
|
|
if self.RUN_FAILED in line:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2024-05-27 10:48:20 -04:00
|
|
|
self.reason = "Testsuite failed"
|
2019-06-22 11:04:10 -04:00
|
|
|
|
|
|
|
if self.fail_on_fault:
|
|
|
|
if self.FAULT == line:
|
|
|
|
self.fault = True
|
|
|
|
|
|
|
|
if self.GCOV_START in line:
|
|
|
|
self.capture_coverage = True
|
|
|
|
elif self.GCOV_END in line:
|
|
|
|
self.capture_coverage = False
|
2017-12-23 20:20:27 -05:00
|
|
|
|
2023-04-05 15:21:44 +02:00
|
|
|
class Robot(Harness):
|
|
|
|
|
|
|
|
is_robot_test = True
|
|
|
|
|
|
|
|
def configure(self, instance):
|
|
|
|
super(Robot, self).configure(instance)
|
|
|
|
self.instance = instance
|
|
|
|
|
|
|
|
config = instance.testsuite.harness_config
|
|
|
|
if config:
|
2024-06-19 21:00:58 +02:00
|
|
|
self.path = config.get('robot_testsuite', None)
|
|
|
|
self.option = config.get('robot_option', None)
|
2023-04-05 15:21:44 +02:00
|
|
|
|
|
|
|
def handle(self, line):
|
|
|
|
''' Test cases that make use of this harness care about results given
|
|
|
|
by Robot Framework which is called in run_robot_test(), so works of this
|
|
|
|
handle is trying to give a PASS or FAIL to avoid timeout, nothing
|
|
|
|
is writen into handler.log
|
|
|
|
'''
|
2024-05-28 12:31:53 +00:00
|
|
|
self.instance.status = TwisterStatus.PASS
|
2023-04-05 15:21:44 +02:00
|
|
|
tc = self.instance.get_case_or_create(self.id)
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.PASS
|
2023-04-05 15:21:44 +02:00
|
|
|
|
|
|
|
def run_robot_test(self, command, handler):
|
|
|
|
start_time = time.time()
|
|
|
|
env = os.environ.copy()
|
|
|
|
|
2024-06-19 21:00:58 +02:00
|
|
|
if self.option:
|
|
|
|
if isinstance(self.option, list):
|
|
|
|
for option in self.option:
|
|
|
|
for v in str(option).split():
|
|
|
|
command.append(f'{v}')
|
|
|
|
else:
|
|
|
|
for v in str(self.option).split():
|
|
|
|
command.append(f'{v}')
|
|
|
|
|
|
|
|
if self.path is None:
|
|
|
|
raise PytestHarnessException(f'The parameter robot_testsuite is mandatory')
|
|
|
|
|
|
|
|
if isinstance(self.path, list):
|
|
|
|
for suite in self.path:
|
|
|
|
command.append(os.path.join(handler.sourcedir, suite))
|
|
|
|
else:
|
|
|
|
command.append(os.path.join(handler.sourcedir, self.path))
|
|
|
|
|
2023-04-05 15:21:44 +02:00
|
|
|
with subprocess.Popen(command, stdout=subprocess.PIPE,
|
2024-03-04 17:47:49 +01:00
|
|
|
stderr=subprocess.STDOUT, cwd=self.instance.build_dir, env=env) as renode_test_proc:
|
|
|
|
out, _ = renode_test_proc.communicate()
|
2023-04-05 15:21:44 +02:00
|
|
|
|
|
|
|
self.instance.execution_time = time.time() - start_time
|
|
|
|
|
2024-03-04 17:47:49 +01:00
|
|
|
if renode_test_proc.returncode == 0:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.instance.status = TwisterStatus.PASS
|
2023-09-01 13:34:09 +02:00
|
|
|
# all tests in one Robot file are treated as a single test case,
|
|
|
|
# so its status should be set accordingly to the instance status
|
|
|
|
# please note that there should be only one testcase in testcases list
|
2024-05-28 12:31:53 +00:00
|
|
|
self.instance.testcases[0].status = TwisterStatus.PASS
|
2023-04-05 15:21:44 +02:00
|
|
|
else:
|
|
|
|
logger.error("Robot test failure: %s for %s" %
|
|
|
|
(handler.sourcedir, self.instance.platform.name))
|
2024-05-28 12:31:53 +00:00
|
|
|
self.instance.status = TwisterStatus.FAIL
|
|
|
|
self.instance.testcases[0].status = TwisterStatus.FAIL
|
2023-04-05 15:21:44 +02:00
|
|
|
|
|
|
|
if out:
|
|
|
|
with open(os.path.join(self.instance.build_dir, handler.log), "wt") as log:
|
|
|
|
log_msg = out.decode(sys.getdefaultencoding())
|
|
|
|
log.write(log_msg)
|
|
|
|
|
2017-12-23 20:20:27 -05:00
|
|
|
class Console(Harness):
|
|
|
|
|
2023-10-13 20:54:47 +02:00
|
|
|
def get_testcase_name(self):
|
|
|
|
'''
|
|
|
|
Get current TestCase name.
|
|
|
|
|
|
|
|
Console Harness id has only TestSuite id without TestCase name suffix.
|
|
|
|
Only the first TestCase name might be taken if available when a Ztest with
|
|
|
|
a single test case is configured to use this harness type for simplified
|
|
|
|
output parsing instead of the Ztest harness as Ztest suite should do.
|
|
|
|
'''
|
|
|
|
if self.instance and len(self.instance.testcases) == 1:
|
|
|
|
return self.instance.testcases[0].name
|
2023-12-01 21:19:09 +01:00
|
|
|
return super(Console, self).get_testcase_name()
|
2023-10-13 20:54:47 +02:00
|
|
|
|
2019-03-06 09:45:30 -08:00
|
|
|
def configure(self, instance):
|
|
|
|
super(Console, self).configure(instance)
|
2023-10-13 20:42:59 +02:00
|
|
|
if self.regex is None or len(self.regex) == 0:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2023-10-13 20:42:59 +02:00
|
|
|
tc = self.instance.set_case_status_by_name(
|
2023-10-13 20:54:47 +02:00
|
|
|
self.get_testcase_name(),
|
2024-05-28 12:31:53 +00:00
|
|
|
TwisterStatus.FAIL,
|
2023-10-13 20:42:59 +02:00
|
|
|
f"HARNESS:{self.__class__.__name__}:no regex patterns configured."
|
|
|
|
)
|
|
|
|
raise ConfigurationError(self.instance.name, tc.reason)
|
2019-03-06 09:45:30 -08:00
|
|
|
if self.type == "one_line":
|
|
|
|
self.pattern = re.compile(self.regex[0])
|
2023-10-03 16:40:34 +02:00
|
|
|
self.patterns_expected = 1
|
2019-03-06 09:45:30 -08:00
|
|
|
elif self.type == "multi_line":
|
|
|
|
self.patterns = []
|
|
|
|
for r in self.regex:
|
|
|
|
self.patterns.append(re.compile(r))
|
2023-10-03 16:40:34 +02:00
|
|
|
self.patterns_expected = len(self.patterns)
|
2023-10-13 20:42:59 +02:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2023-10-13 20:42:59 +02:00
|
|
|
tc = self.instance.set_case_status_by_name(
|
2023-10-13 20:54:47 +02:00
|
|
|
self.get_testcase_name(),
|
2024-05-28 12:31:53 +00:00
|
|
|
TwisterStatus.FAIL,
|
2023-10-13 20:42:59 +02:00
|
|
|
f"HARNESS:{self.__class__.__name__}:incorrect type={self.type}"
|
|
|
|
)
|
|
|
|
raise ConfigurationError(self.instance.name, tc.reason)
|
|
|
|
#
|
2019-03-06 09:45:30 -08:00
|
|
|
|
2017-12-23 20:20:27 -05:00
|
|
|
def handle(self, line):
|
|
|
|
if self.type == "one_line":
|
2019-03-06 09:45:30 -08:00
|
|
|
if self.pattern.search(line):
|
2023-10-12 17:06:04 +02:00
|
|
|
logger.debug(f"HARNESS:{self.__class__.__name__}:EXPECTED:"
|
|
|
|
f"'{self.pattern.pattern}'")
|
2023-10-03 16:40:34 +02:00
|
|
|
self.next_pattern += 1
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.PASS
|
2019-06-18 11:57:21 -07:00
|
|
|
elif self.type == "multi_line" and self.ordered:
|
|
|
|
if (self.next_pattern < len(self.patterns) and
|
|
|
|
self.patterns[self.next_pattern].search(line)):
|
2023-10-12 17:06:04 +02:00
|
|
|
logger.debug(f"HARNESS:{self.__class__.__name__}:EXPECTED("
|
|
|
|
f"{self.next_pattern + 1}/{self.patterns_expected}):"
|
|
|
|
f"'{self.patterns[self.next_pattern].pattern}'")
|
2019-06-18 11:57:21 -07:00
|
|
|
self.next_pattern += 1
|
|
|
|
if self.next_pattern >= len(self.patterns):
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.PASS
|
2019-06-18 11:57:21 -07:00
|
|
|
elif self.type == "multi_line" and not self.ordered:
|
2019-03-06 09:45:30 -08:00
|
|
|
for i, pattern in enumerate(self.patterns):
|
|
|
|
r = self.regex[i]
|
2018-01-24 09:54:08 +05:30
|
|
|
if pattern.search(line) and not r in self.matches:
|
2017-12-23 20:20:27 -05:00
|
|
|
self.matches[r] = line
|
2023-10-12 17:06:04 +02:00
|
|
|
logger.debug(f"HARNESS:{self.__class__.__name__}:EXPECTED("
|
|
|
|
f"{len(self.matches)}/{self.patterns_expected}):"
|
|
|
|
f"'{pattern.pattern}'")
|
2017-12-23 20:20:27 -05:00
|
|
|
if len(self.matches) == len(self.regex):
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.PASS
|
2022-03-30 17:18:40 -04:00
|
|
|
else:
|
|
|
|
logger.error("Unknown harness_config type")
|
2019-01-25 09:37:38 -05:00
|
|
|
|
|
|
|
if self.fail_on_fault:
|
2019-07-16 15:29:46 -07:00
|
|
|
if self.FAULT in line:
|
|
|
|
self.fault = True
|
2019-01-25 09:37:38 -05:00
|
|
|
|
|
|
|
if self.GCOV_START in line:
|
|
|
|
self.capture_coverage = True
|
|
|
|
elif self.GCOV_END in line:
|
|
|
|
self.capture_coverage = False
|
2017-12-23 20:20:27 -05:00
|
|
|
|
2021-03-16 10:05:37 -04:00
|
|
|
self.process_test(line)
|
2024-04-11 08:53:40 +00:00
|
|
|
# Reset the resulting test state to FAIL when not all of the patterns were
|
2023-10-03 16:40:34 +02:00
|
|
|
# found in the output, but just ztest's 'PROJECT EXECUTION SUCCESSFUL'.
|
|
|
|
# It might happen because of the pattern sequence diverged from the
|
|
|
|
# test code, the test platform has console issues, or even some other
|
|
|
|
# test image was executed.
|
2023-10-12 17:06:04 +02:00
|
|
|
# TODO: Introduce explicit match policy type to reject
|
|
|
|
# unexpected console output, allow missing patterns, deny duplicates.
|
2024-05-28 12:31:53 +00:00
|
|
|
if self.status == TwisterStatus.PASS and \
|
2024-04-11 08:53:40 +00:00
|
|
|
self.ordered and \
|
|
|
|
self.next_pattern < self.patterns_expected:
|
2023-10-12 17:06:04 +02:00
|
|
|
logger.error(f"HARNESS:{self.__class__.__name__}: failed with"
|
|
|
|
f" {self.next_pattern} of {self.patterns_expected}"
|
|
|
|
f" expected ordered patterns.")
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2024-05-27 10:48:20 -04:00
|
|
|
self.reason = "patterns did not match (ordered)"
|
2024-05-28 12:31:53 +00:00
|
|
|
if self.status == TwisterStatus.PASS and \
|
2024-04-11 08:53:40 +00:00
|
|
|
not self.ordered and \
|
|
|
|
len(self.matches) < self.patterns_expected:
|
2023-10-12 17:06:04 +02:00
|
|
|
logger.error(f"HARNESS:{self.__class__.__name__}: failed with"
|
|
|
|
f" {len(self.matches)} of {self.patterns_expected}"
|
|
|
|
f" expected unordered patterns.")
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2024-05-27 10:48:20 -04:00
|
|
|
self.reason = "patterns did not match (unordered)"
|
2021-03-16 10:05:37 -04:00
|
|
|
|
2023-10-13 20:54:47 +02:00
|
|
|
tc = self.instance.get_case_or_create(self.get_testcase_name())
|
2024-05-28 12:31:53 +00:00
|
|
|
if self.status == TwisterStatus.PASS:
|
|
|
|
tc.status = TwisterStatus.PASS
|
2019-03-31 16:58:12 -04:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.FAIL
|
2019-03-31 16:58:12 -04:00
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
|
|
|
|
class PytestHarnessException(Exception):
|
|
|
|
"""General exception for pytest."""
|
|
|
|
|
|
|
|
|
2020-10-22 09:37:27 +08:00
|
|
|
class Pytest(Harness):
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2023-06-12 18:15:31 +02:00
|
|
|
def configure(self, instance: TestInstance):
|
2020-10-22 09:37:27 +08:00
|
|
|
super(Pytest, self).configure(instance)
|
|
|
|
self.running_dir = instance.build_dir
|
2022-03-23 14:07:54 -04:00
|
|
|
self.source_dir = instance.testsuite.source_dir
|
2023-05-26 11:43:36 +02:00
|
|
|
self.report_file = os.path.join(self.running_dir, 'report.xml')
|
2023-08-04 17:20:31 +02:00
|
|
|
self.pytest_log_file_path = os.path.join(self.running_dir, 'twister_harness.log')
|
2024-07-07 11:01:26 +02:00
|
|
|
self.reserved_dut = None
|
2024-08-12 16:07:14 +02:00
|
|
|
self._output = []
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2023-06-02 11:21:46 +02:00
|
|
|
def pytest_run(self, timeout):
|
2023-05-26 11:43:36 +02:00
|
|
|
try:
|
2023-08-04 15:23:34 +02:00
|
|
|
cmd = self.generate_command()
|
2023-06-02 11:21:46 +02:00
|
|
|
self.run_command(cmd, timeout)
|
2023-05-26 11:43:36 +02:00
|
|
|
except PytestHarnessException as pytest_exception:
|
|
|
|
logger.error(str(pytest_exception))
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2023-06-12 18:15:31 +02:00
|
|
|
self.instance.reason = str(pytest_exception)
|
2023-05-26 11:43:36 +02:00
|
|
|
finally:
|
2024-07-07 11:26:47 +02:00
|
|
|
self.instance.record(self.recording)
|
|
|
|
self._update_test_status()
|
2024-07-07 11:01:26 +02:00
|
|
|
if self.reserved_dut:
|
|
|
|
self.instance.handler.make_dut_available(self.reserved_dut)
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2023-08-04 15:23:34 +02:00
|
|
|
def generate_command(self):
|
2023-05-26 11:43:36 +02:00
|
|
|
config = self.instance.testsuite.harness_config
|
2023-11-21 11:01:17 +01:00
|
|
|
handler: Handler = self.instance.handler
|
2023-09-14 15:34:07 +02:00
|
|
|
pytest_root = config.get('pytest_root', ['pytest']) if config else ['pytest']
|
2023-11-21 11:01:17 +01:00
|
|
|
pytest_args_yaml = config.get('pytest_args', []) if config else []
|
2023-10-25 12:33:23 +02:00
|
|
|
pytest_dut_scope = config.get('pytest_dut_scope', None) if config else None
|
2023-05-26 11:43:36 +02:00
|
|
|
command = [
|
|
|
|
'pytest',
|
|
|
|
'--twister-harness',
|
2023-08-04 17:20:31 +02:00
|
|
|
'-s', '-v',
|
2023-05-26 11:43:36 +02:00
|
|
|
f'--build-dir={self.running_dir}',
|
2023-08-04 17:20:31 +02:00
|
|
|
f'--junit-xml={self.report_file}',
|
|
|
|
'--log-file-level=DEBUG',
|
|
|
|
'--log-file-format=%(asctime)s.%(msecs)d:%(levelname)s:%(name)s: %(message)s',
|
2024-07-11 10:02:48 -07:00
|
|
|
f'--log-file={self.pytest_log_file_path}',
|
|
|
|
f'--platform={self.instance.platform.name}'
|
2023-05-26 11:43:36 +02:00
|
|
|
]
|
2023-09-14 15:34:07 +02:00
|
|
|
command.extend([os.path.normpath(os.path.join(
|
|
|
|
self.source_dir, os.path.expanduser(os.path.expandvars(src)))) for src in pytest_root])
|
2023-11-21 11:01:17 +01:00
|
|
|
|
2023-10-25 12:33:23 +02:00
|
|
|
if pytest_dut_scope:
|
|
|
|
command.append(f'--dut-scope={pytest_dut_scope}')
|
2020-10-22 09:37:27 +08:00
|
|
|
|
2024-06-14 11:17:24 +02:00
|
|
|
# Always pass output from the pytest test and the test image up to Twister log.
|
|
|
|
command.extend([
|
|
|
|
'--log-cli-level=DEBUG',
|
|
|
|
'--log-cli-format=%(levelname)s: %(message)s'
|
|
|
|
])
|
2020-10-22 09:37:27 +08:00
|
|
|
|
2024-07-12 15:03:16 -07:00
|
|
|
# Use the test timeout as the base timeout for pytest
|
|
|
|
base_timeout = handler.get_test_timeout()
|
|
|
|
command.append(f'--base-timeout={base_timeout}')
|
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
if handler.type_str == 'device':
|
|
|
|
command.extend(
|
|
|
|
self._generate_parameters_for_hardware(handler)
|
|
|
|
)
|
|
|
|
elif handler.type_str in SUPPORTED_SIMS_IN_PYTEST:
|
|
|
|
command.append(f'--device-type={handler.type_str}')
|
|
|
|
elif handler.type_str == 'build':
|
|
|
|
command.append('--device-type=custom')
|
|
|
|
else:
|
2024-01-15 14:14:41 -05:00
|
|
|
raise PytestHarnessException(f'Support for handler {handler.type_str} not implemented yet')
|
2023-11-27 17:09:07 -08:00
|
|
|
|
2024-05-29 17:30:14 +00:00
|
|
|
if handler.type_str != 'device':
|
|
|
|
for fixture in handler.options.fixture:
|
|
|
|
command.append(f'--twister-fixture={fixture}')
|
|
|
|
|
2024-08-21 10:04:33 +02:00
|
|
|
command.extend(pytest_args_yaml)
|
|
|
|
|
2023-11-27 17:09:07 -08:00
|
|
|
if handler.options.pytest_args:
|
|
|
|
command.extend(handler.options.pytest_args)
|
2024-07-15 15:48:29 +02:00
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
return command
|
|
|
|
|
2023-06-12 18:15:31 +02:00
|
|
|
def _generate_parameters_for_hardware(self, handler: Handler):
|
2023-05-26 11:43:36 +02:00
|
|
|
command = ['--device-type=hardware']
|
|
|
|
hardware = handler.get_hardware()
|
|
|
|
if not hardware:
|
|
|
|
raise PytestHarnessException('Hardware is not available')
|
2024-02-07 13:06:45 +01:00
|
|
|
# update the instance with the device id to have it in the summary report
|
|
|
|
self.instance.dut = hardware.id
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2024-07-07 11:01:26 +02:00
|
|
|
self.reserved_dut = hardware
|
2023-05-26 11:43:36 +02:00
|
|
|
if hardware.serial_pty:
|
|
|
|
command.append(f'--device-serial-pty={hardware.serial_pty}')
|
|
|
|
else:
|
|
|
|
command.extend([
|
|
|
|
f'--device-serial={hardware.serial}',
|
|
|
|
f'--device-serial-baud={hardware.baud}'
|
|
|
|
])
|
|
|
|
|
2024-08-30 10:07:22 +02:00
|
|
|
if hardware.flash_timeout:
|
|
|
|
command.append(f'--flash-timeout={hardware.flash_timeout}')
|
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
options = handler.options
|
|
|
|
if runner := hardware.runner or options.west_runner:
|
|
|
|
command.append(f'--runner={runner}')
|
|
|
|
|
2024-04-23 15:54:01 +02:00
|
|
|
if hardware.runner_params:
|
|
|
|
for param in hardware.runner_params:
|
|
|
|
command.append(f'--runner-params={param}')
|
2024-04-09 10:12:47 +02:00
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
if options.west_flash and options.west_flash != []:
|
|
|
|
command.append(f'--west-flash-extra-args={options.west_flash}')
|
|
|
|
|
|
|
|
if board_id := hardware.probe_id or hardware.id:
|
|
|
|
command.append(f'--device-id={board_id}')
|
|
|
|
|
|
|
|
if hardware.product:
|
|
|
|
command.append(f'--device-product={hardware.product}')
|
2020-10-22 09:37:27 +08:00
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
if hardware.pre_script:
|
|
|
|
command.append(f'--pre-script={hardware.pre_script}')
|
2022-02-24 12:45:29 +01:00
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
if hardware.post_flash_script:
|
|
|
|
command.append(f'--post-flash-script={hardware.post_flash_script}')
|
|
|
|
|
|
|
|
if hardware.post_script:
|
|
|
|
command.append(f'--post-script={hardware.post_script}')
|
|
|
|
|
2024-05-19 22:28:15 -05:00
|
|
|
if hardware.flash_before:
|
|
|
|
command.append(f'--flash-before={hardware.flash_before}')
|
|
|
|
|
2024-05-29 17:30:14 +00:00
|
|
|
for fixture in hardware.fixtures:
|
|
|
|
command.append(f'--twister-fixture={fixture}')
|
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
return command
|
|
|
|
|
2023-06-02 11:21:46 +02:00
|
|
|
def run_command(self, cmd, timeout):
|
2023-05-26 11:43:36 +02:00
|
|
|
cmd, env = self._update_command_with_env_dependencies(cmd)
|
2023-06-12 18:15:31 +02:00
|
|
|
with subprocess.Popen(
|
|
|
|
cmd,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
env=env
|
|
|
|
) as proc:
|
2020-10-22 09:37:27 +08:00
|
|
|
try:
|
2024-08-12 16:07:14 +02:00
|
|
|
reader_t = threading.Thread(target=self._output_reader, args=(proc,), daemon=True)
|
2023-06-02 11:21:46 +02:00
|
|
|
reader_t.start()
|
|
|
|
reader_t.join(timeout)
|
|
|
|
if reader_t.is_alive():
|
|
|
|
terminate_process(proc)
|
2023-06-12 18:15:31 +02:00
|
|
|
logger.warning('Timeout has occurred. Can be extended in testspec file. '
|
|
|
|
f'Currently set to {timeout} seconds.')
|
|
|
|
self.instance.reason = 'Pytest timeout'
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2023-06-02 11:21:46 +02:00
|
|
|
proc.wait(timeout)
|
2020-10-22 09:37:27 +08:00
|
|
|
except subprocess.TimeoutExpired:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2020-10-22 09:37:27 +08:00
|
|
|
proc.kill()
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2024-08-12 16:07:14 +02:00
|
|
|
if proc.returncode in (ExitCode.INTERRUPTED, ExitCode.USAGE_ERROR, ExitCode.INTERNAL_ERROR):
|
|
|
|
self.status = TwisterStatus.ERROR
|
|
|
|
self.instance.reason = f'Pytest error - return code {proc.returncode}'
|
|
|
|
with open(self.pytest_log_file_path, 'w') as log_file:
|
|
|
|
log_file.write(shlex.join(cmd) + '\n\n')
|
|
|
|
log_file.write('\n'.join(self._output))
|
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
@staticmethod
|
|
|
|
def _update_command_with_env_dependencies(cmd):
|
|
|
|
'''
|
|
|
|
If python plugin wasn't installed by pip, then try to indicate it to
|
|
|
|
pytest by update PYTHONPATH and append -p argument to pytest command.
|
|
|
|
'''
|
|
|
|
env = os.environ.copy()
|
|
|
|
if not PYTEST_PLUGIN_INSTALLED:
|
|
|
|
cmd.extend(['-p', 'twister_harness.plugin'])
|
|
|
|
pytest_plugin_path = os.path.join(ZEPHYR_BASE, 'scripts', 'pylib', 'pytest-twister-harness', 'src')
|
|
|
|
env['PYTHONPATH'] = pytest_plugin_path + os.pathsep + env.get('PYTHONPATH', '')
|
|
|
|
if _WINDOWS:
|
|
|
|
cmd_append_python_path = f'set PYTHONPATH={pytest_plugin_path};%PYTHONPATH% && '
|
|
|
|
else:
|
|
|
|
cmd_append_python_path = f'export PYTHONPATH={pytest_plugin_path}:${{PYTHONPATH}} && '
|
|
|
|
else:
|
|
|
|
cmd_append_python_path = ''
|
|
|
|
cmd_to_print = cmd_append_python_path + shlex.join(cmd)
|
|
|
|
logger.debug('Running pytest command: %s', cmd_to_print)
|
|
|
|
|
|
|
|
return cmd, env
|
2020-10-22 09:37:27 +08:00
|
|
|
|
2024-08-12 16:07:14 +02:00
|
|
|
def _output_reader(self, proc):
|
|
|
|
self._output = []
|
2023-06-02 11:21:46 +02:00
|
|
|
while proc.stdout.readable() and proc.poll() is None:
|
|
|
|
line = proc.stdout.readline().decode().strip()
|
|
|
|
if not line:
|
|
|
|
continue
|
2024-08-12 16:07:14 +02:00
|
|
|
self._output.append(line)
|
2023-06-02 11:21:46 +02:00
|
|
|
logger.debug('PYTEST: %s', line)
|
2024-08-12 16:07:14 +02:00
|
|
|
self.parse_record(line)
|
2023-06-02 11:21:46 +02:00
|
|
|
proc.communicate()
|
|
|
|
|
2023-06-12 18:15:31 +02:00
|
|
|
def _update_test_status(self):
|
2024-05-28 12:31:53 +00:00
|
|
|
if self.status == TwisterStatus.NONE:
|
2023-06-12 18:15:31 +02:00
|
|
|
self.instance.testcases = []
|
|
|
|
try:
|
|
|
|
self._parse_report_file(self.report_file)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(f'Error when parsing file {self.report_file}: {e}')
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2023-06-12 18:15:31 +02:00
|
|
|
finally:
|
|
|
|
if not self.instance.testcases:
|
|
|
|
self.instance.init_cases()
|
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
self.instance.status = self.status if self.status != TwisterStatus.NONE else \
|
|
|
|
TwisterStatus.FAIL
|
|
|
|
if self.instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
2023-06-12 18:15:31 +02:00
|
|
|
self.instance.reason = self.instance.reason or 'Pytest failed'
|
2024-05-28 12:31:53 +00:00
|
|
|
self.instance.add_missing_case_status(TwisterStatus.BLOCK, self.instance.reason)
|
2023-06-12 18:15:31 +02:00
|
|
|
|
|
|
|
def _parse_report_file(self, report):
|
|
|
|
tree = ET.parse(report)
|
|
|
|
root = tree.getroot()
|
|
|
|
if elem_ts := root.find('testsuite'):
|
|
|
|
if elem_ts.get('failures') != '0':
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
2023-10-26 09:08:54 +02:00
|
|
|
self.instance.reason = f"{elem_ts.get('failures')}/{elem_ts.get('tests')} pytest scenario(s) failed"
|
2023-06-12 18:15:31 +02:00
|
|
|
elif elem_ts.get('errors') != '0':
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.ERROR
|
2023-10-26 09:08:54 +02:00
|
|
|
self.instance.reason = 'Error during pytest execution'
|
2023-06-12 18:15:31 +02:00
|
|
|
elif elem_ts.get('skipped') == elem_ts.get('tests'):
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.SKIP
|
2023-06-12 18:15:31 +02:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.PASS
|
2023-06-12 18:15:31 +02:00
|
|
|
self.instance.execution_time = float(elem_ts.get('time'))
|
|
|
|
|
|
|
|
for elem_tc in elem_ts.findall('testcase'):
|
2023-10-25 12:33:23 +02:00
|
|
|
tc = self.instance.add_testcase(f"{self.id}.{elem_tc.get('name')}")
|
2023-06-12 18:15:31 +02:00
|
|
|
tc.duration = float(elem_tc.get('time'))
|
|
|
|
elem = elem_tc.find('*')
|
|
|
|
if elem is None:
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.PASS
|
2023-06-12 18:15:31 +02:00
|
|
|
else:
|
2024-04-11 08:53:40 +00:00
|
|
|
if elem.tag == ReportStatus.SKIP:
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.SKIP
|
2024-04-11 08:53:40 +00:00
|
|
|
elif elem.tag == ReportStatus.FAIL:
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.FAIL
|
2023-06-12 18:15:31 +02:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.ERROR
|
2023-06-12 18:15:31 +02:00
|
|
|
tc.reason = elem.get('message')
|
|
|
|
tc.output = elem.text
|
2023-11-21 11:01:17 +01:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.SKIP
|
2023-11-21 11:01:17 +01:00
|
|
|
self.instance.reason = 'No tests collected'
|
2020-10-22 09:37:27 +08:00
|
|
|
|
|
|
|
|
2023-05-18 23:36:17 -06:00
|
|
|
class Gtest(Harness):
|
|
|
|
ANSI_ESCAPE = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
2024-08-15 13:32:05 -06:00
|
|
|
_NAME_PATTERN = "[a-zA-Z_][a-zA-Z0-9_]*"
|
|
|
|
_SUITE_TEST_NAME_PATTERN = f"(?P<suite_name>{_NAME_PATTERN})\\.(?P<test_name>{_NAME_PATTERN})"
|
|
|
|
TEST_START_PATTERN = f".*\\[ RUN \\] {_SUITE_TEST_NAME_PATTERN}"
|
|
|
|
TEST_PASS_PATTERN = f".*\\[ OK \\] {_SUITE_TEST_NAME_PATTERN}"
|
|
|
|
TEST_SKIP_PATTERN = f".*\\[ DISABLED \\] {_SUITE_TEST_NAME_PATTERN}"
|
|
|
|
TEST_FAIL_PATTERN = f".*\\[ FAILED \\] {_SUITE_TEST_NAME_PATTERN}"
|
|
|
|
FINISHED_PATTERN = (
|
|
|
|
".*(?:\\[==========\\] Done running all tests\\.|"
|
|
|
|
+ "\\[----------\\] Global test environment tear-down)"
|
|
|
|
)
|
2023-11-06 13:48:51 -07:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
|
|
|
self.tc = None
|
|
|
|
self.has_failures = False
|
2023-05-18 23:36:17 -06:00
|
|
|
|
|
|
|
def handle(self, line):
|
|
|
|
# Strip the ANSI characters, they mess up the patterns
|
|
|
|
non_ansi_line = self.ANSI_ESCAPE.sub('', line)
|
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
if self.status != TwisterStatus.NONE:
|
2023-11-06 13:48:51 -07:00
|
|
|
return
|
|
|
|
|
2023-05-18 23:36:17 -06:00
|
|
|
# Check if we started running a new test
|
|
|
|
test_start_match = re.search(self.TEST_START_PATTERN, non_ansi_line)
|
|
|
|
if test_start_match:
|
|
|
|
# Add the suite name
|
|
|
|
suite_name = test_start_match.group("suite_name")
|
|
|
|
if suite_name not in self.detected_suite_names:
|
|
|
|
self.detected_suite_names.append(suite_name)
|
|
|
|
|
|
|
|
# Generate the internal name of the test
|
|
|
|
name = "{}.{}.{}".format(self.id, suite_name, test_start_match.group("test_name"))
|
|
|
|
|
|
|
|
# Assert that we don't already have a running test
|
|
|
|
assert (
|
|
|
|
self.tc is None
|
|
|
|
), "gTest error, {} didn't finish".format(self.tc)
|
|
|
|
|
|
|
|
# Check that the instance doesn't exist yet (prevents re-running)
|
|
|
|
tc = self.instance.get_case_by_name(name)
|
|
|
|
assert tc is None, "gTest error, {} running twice".format(tc)
|
|
|
|
|
|
|
|
# Create the test instance and set the context
|
|
|
|
tc = self.instance.get_case_or_create(name)
|
|
|
|
self.tc = tc
|
2024-05-28 12:31:53 +00:00
|
|
|
self.tc.status = TwisterStatus.STARTED
|
2023-05-18 23:36:17 -06:00
|
|
|
self.testcase_output += line + "\n"
|
|
|
|
self._match = True
|
|
|
|
|
|
|
|
# Check if the test run finished
|
|
|
|
finished_match = re.search(self.FINISHED_PATTERN, non_ansi_line)
|
|
|
|
if finished_match:
|
|
|
|
tc = self.instance.get_case_or_create(self.id)
|
|
|
|
if self.has_failures or self.tc is not None:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.FAIL
|
|
|
|
tc.status = TwisterStatus.FAIL
|
2023-05-18 23:36:17 -06:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
self.status = TwisterStatus.PASS
|
|
|
|
tc.status = TwisterStatus.PASS
|
2023-05-18 23:36:17 -06:00
|
|
|
return
|
|
|
|
|
|
|
|
# Check if the individual test finished
|
|
|
|
state, name = self._check_result(non_ansi_line)
|
2024-05-28 12:31:53 +00:00
|
|
|
if state == TwisterStatus.NONE or name is None:
|
2023-05-18 23:36:17 -06:00
|
|
|
# Nothing finished, keep processing lines
|
|
|
|
return
|
|
|
|
|
|
|
|
# Get the matching test and make sure it's the same as the current context
|
|
|
|
tc = self.instance.get_case_by_name(name)
|
|
|
|
assert (
|
|
|
|
tc is not None and tc == self.tc
|
|
|
|
), "gTest error, mismatched tests. Expected {} but got {}".format(self.tc, tc)
|
|
|
|
|
|
|
|
# Test finished, clear the context
|
|
|
|
self.tc = None
|
|
|
|
|
|
|
|
# Update the status of the test
|
|
|
|
tc.status = state
|
2024-05-28 12:31:53 +00:00
|
|
|
if tc.status == TwisterStatus.FAIL:
|
2023-05-18 23:36:17 -06:00
|
|
|
self.has_failures = True
|
|
|
|
tc.output = self.testcase_output
|
|
|
|
self.testcase_output = ""
|
|
|
|
self._match = False
|
|
|
|
|
|
|
|
def _check_result(self, line):
|
|
|
|
test_pass_match = re.search(self.TEST_PASS_PATTERN, line)
|
|
|
|
if test_pass_match:
|
2024-05-28 12:31:53 +00:00
|
|
|
return TwisterStatus.PASS, \
|
2024-04-11 08:53:40 +00:00
|
|
|
"{}.{}.{}".format(
|
|
|
|
self.id, test_pass_match.group("suite_name"),
|
|
|
|
test_pass_match.group("test_name")
|
|
|
|
)
|
2024-02-06 09:49:30 -07:00
|
|
|
test_skip_match = re.search(self.TEST_SKIP_PATTERN, line)
|
|
|
|
if test_skip_match:
|
2024-05-28 12:31:53 +00:00
|
|
|
return TwisterStatus.SKIP, \
|
2024-04-11 08:53:40 +00:00
|
|
|
"{}.{}.{}".format(
|
|
|
|
self.id, test_skip_match.group("suite_name"),
|
|
|
|
test_skip_match.group("test_name")
|
|
|
|
)
|
2023-05-18 23:36:17 -06:00
|
|
|
test_fail_match = re.search(self.TEST_FAIL_PATTERN, line)
|
|
|
|
if test_fail_match:
|
2024-05-28 12:31:53 +00:00
|
|
|
return TwisterStatus.FAIL, \
|
2024-04-11 08:53:40 +00:00
|
|
|
"{}.{}.{}".format(
|
|
|
|
self.id, test_fail_match.group("suite_name"),
|
|
|
|
test_fail_match.group("test_name")
|
|
|
|
)
|
2023-05-18 23:36:17 -06:00
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
2017-12-23 20:20:27 -05:00
|
|
|
class Test(Harness):
|
2024-08-09 13:25:16 +02:00
|
|
|
__test__ = False # for pytest to skip this class when collects tests
|
2017-12-23 20:20:27 -05:00
|
|
|
RUN_PASSED = "PROJECT EXECUTION SUCCESSFUL"
|
|
|
|
RUN_FAILED = "PROJECT EXECUTION FAILED"
|
2022-03-24 07:51:29 -04:00
|
|
|
test_suite_start_pattern = r"Running TESTSUITE (?P<suite_name>.*)"
|
2023-04-05 11:03:35 +00:00
|
|
|
ZTEST_START_PATTERN = r"START - (test_)?([a-zA-Z0-9_-]+)"
|
2017-12-23 20:20:27 -05:00
|
|
|
|
|
|
|
def handle(self, line):
|
2022-02-02 22:57:43 +01:00
|
|
|
test_suite_match = re.search(self.test_suite_start_pattern, line)
|
|
|
|
if test_suite_match:
|
|
|
|
suite_name = test_suite_match.group("suite_name")
|
|
|
|
self.detected_suite_names.append(suite_name)
|
|
|
|
|
2022-03-29 12:28:47 -04:00
|
|
|
testcase_match = re.search(self.ZTEST_START_PATTERN, line)
|
2023-04-03 23:09:09 +00:00
|
|
|
if testcase_match:
|
|
|
|
name = "{}.{}".format(self.id, testcase_match.group(2))
|
|
|
|
tc = self.instance.get_case_or_create(name)
|
|
|
|
# Mark the test as started, if something happens here, it is mostly
|
|
|
|
# due to this tests, for example timeout. This should in this case
|
|
|
|
# be marked as failed and not blocked (not run).
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.STARTED
|
2023-04-03 23:09:09 +00:00
|
|
|
|
2022-03-29 12:28:47 -04:00
|
|
|
if testcase_match or self._match:
|
|
|
|
self.testcase_output += line + "\n"
|
|
|
|
self._match = True
|
|
|
|
|
2023-04-03 21:07:02 +00:00
|
|
|
result_match = result_re.match(line)
|
2024-01-30 11:12:31 -05:00
|
|
|
# some testcases are skipped based on predicates and do not show up
|
|
|
|
# during test execution, however they are listed in the summary. Parse
|
|
|
|
# the summary for status and use that status instead.
|
|
|
|
|
|
|
|
summary_re = re.compile(r"- (PASS|FAIL|SKIP) - \[([^\.]*).(test_)?(\S*)\] duration = (\d*[.,]?\d*) seconds")
|
|
|
|
summary_match = summary_re.match(line)
|
2022-03-24 07:51:29 -04:00
|
|
|
|
2023-08-28 22:00:04 +00:00
|
|
|
if result_match:
|
2023-04-03 23:09:09 +00:00
|
|
|
matched_status = result_match.group(1)
|
2023-04-03 21:07:02 +00:00
|
|
|
name = "{}.{}".format(self.id, result_match.group(3))
|
2022-03-24 07:51:29 -04:00
|
|
|
tc = self.instance.get_case_or_create(name)
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus[matched_status]
|
|
|
|
if tc.status == TwisterStatus.SKIP:
|
2022-03-24 07:51:29 -04:00
|
|
|
tc.reason = "ztest skip"
|
2023-04-03 21:07:02 +00:00
|
|
|
tc.duration = float(result_match.group(4))
|
2024-05-28 12:31:53 +00:00
|
|
|
if tc.status == TwisterStatus.FAIL:
|
2022-03-29 12:28:47 -04:00
|
|
|
tc.output = self.testcase_output
|
|
|
|
self.testcase_output = ""
|
|
|
|
self._match = False
|
2020-08-31 11:50:51 -04:00
|
|
|
self.ztest = True
|
2024-01-30 11:12:31 -05:00
|
|
|
elif summary_match:
|
|
|
|
matched_status = summary_match.group(1)
|
|
|
|
self.detected_suite_names.append(summary_match.group(2))
|
|
|
|
name = "{}.{}".format(self.id, summary_match.group(4))
|
|
|
|
tc = self.instance.get_case_or_create(name)
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus[matched_status]
|
|
|
|
if tc.status == TwisterStatus.SKIP:
|
2024-01-30 11:12:31 -05:00
|
|
|
tc.reason = "ztest skip"
|
|
|
|
tc.duration = float(summary_match.group(5))
|
2024-05-28 12:31:53 +00:00
|
|
|
if tc.status == TwisterStatus.FAIL:
|
2024-01-30 11:12:31 -05:00
|
|
|
tc.output = self.testcase_output
|
|
|
|
self.testcase_output = ""
|
|
|
|
self._match = False
|
|
|
|
self.ztest = True
|
2018-02-15 20:07:24 -06:00
|
|
|
|
2021-03-17 10:34:11 +01:00
|
|
|
self.process_test(line)
|
2018-06-05 21:36:20 -05:00
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
if not self.ztest and self.status != TwisterStatus.NONE:
|
2024-05-27 10:48:20 -04:00
|
|
|
logger.debug(f"not a ztest and no state for {self.id}")
|
2022-03-24 07:51:29 -04:00
|
|
|
tc = self.instance.get_case_or_create(self.id)
|
2024-05-28 12:31:53 +00:00
|
|
|
if self.status == TwisterStatus.PASS:
|
|
|
|
tc.status = TwisterStatus.PASS
|
2020-08-31 11:50:51 -04:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
tc.status = TwisterStatus.FAIL
|
2024-05-27 10:48:20 -04:00
|
|
|
tc.reason = "Test failure"
|
2020-05-21 09:11:40 -04:00
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
|
2020-05-21 09:11:40 -04:00
|
|
|
class Ztest(Test):
|
|
|
|
pass
|
2023-05-26 11:43:36 +02:00
|
|
|
|
|
|
|
|
2023-12-17 23:41:07 +01:00
|
|
|
class Bsim(Harness):
|
|
|
|
|
|
|
|
def build(self):
|
|
|
|
"""
|
|
|
|
Copying the application executable to BabbleSim's bin directory enables
|
|
|
|
running multidevice bsim tests after twister has built them.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.instance is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
original_exe_path: str = os.path.join(self.instance.build_dir, 'zephyr', 'zephyr.exe')
|
|
|
|
if not os.path.exists(original_exe_path):
|
|
|
|
logger.warning('Cannot copy bsim exe - cannot find original executable.')
|
|
|
|
return
|
|
|
|
|
|
|
|
bsim_out_path: str = os.getenv('BSIM_OUT_PATH', '')
|
|
|
|
if not bsim_out_path:
|
|
|
|
logger.warning('Cannot copy bsim exe - BSIM_OUT_PATH not provided.')
|
|
|
|
return
|
|
|
|
|
|
|
|
new_exe_name: str = self.instance.testsuite.harness_config.get('bsim_exe_name', '')
|
|
|
|
if new_exe_name:
|
|
|
|
new_exe_name = f'bs_{self.instance.platform.name}_{new_exe_name}'
|
|
|
|
else:
|
|
|
|
new_exe_name = self.instance.name
|
|
|
|
new_exe_name = f'bs_{new_exe_name}'
|
|
|
|
|
2024-03-21 17:42:21 +01:00
|
|
|
new_exe_name = new_exe_name.replace(os.path.sep, '_').replace('.', '_').replace('@', '_')
|
|
|
|
|
2023-12-17 23:41:07 +01:00
|
|
|
new_exe_path: str = os.path.join(bsim_out_path, 'bin', new_exe_name)
|
|
|
|
logger.debug(f'Copying executable from {original_exe_path} to {new_exe_path}')
|
|
|
|
shutil.copy(original_exe_path, new_exe_path)
|
|
|
|
|
|
|
|
|
2023-05-26 11:43:36 +02:00
|
|
|
class HarnessImporter:
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_harness(harness_name):
|
|
|
|
thismodule = sys.modules[__name__]
|
2024-01-10 09:49:26 -05:00
|
|
|
try:
|
|
|
|
if harness_name:
|
|
|
|
harness_class = getattr(thismodule, harness_name)
|
|
|
|
else:
|
|
|
|
harness_class = getattr(thismodule, 'Test')
|
|
|
|
return harness_class()
|
|
|
|
except AttributeError as e:
|
|
|
|
logger.debug(f"harness {harness_name} not implemented: {e}")
|
|
|
|
return None
|