2022-06-09 10:18:41 -04:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# vim: set syntax=python ts=4 :
|
|
|
|
#
|
|
|
|
# Copyright (c) 2018 Intel Corporation
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
|
|
|
import json
|
|
|
|
import logging
|
2024-11-27 10:37:51 +00:00
|
|
|
import os
|
2022-06-09 10:18:41 -04:00
|
|
|
import string
|
2024-11-27 10:37:51 +00:00
|
|
|
import xml.etree.ElementTree as ET
|
2022-06-09 10:18:41 -04:00
|
|
|
from datetime import datetime
|
2024-11-27 10:37:51 +00:00
|
|
|
from enum import Enum
|
2025-01-21 16:53:11 +01:00
|
|
|
from pathlib import Path
|
2022-06-09 10:18:41 -04:00
|
|
|
|
2024-11-27 10:37:51 +00:00
|
|
|
from colorama import Fore
|
2024-05-28 12:31:53 +00:00
|
|
|
from twisterlib.statuses import TwisterStatus
|
2024-04-11 08:53:40 +00:00
|
|
|
|
2022-06-09 10:18:41 -04:00
|
|
|
logger = logging.getLogger('twister')
|
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
|
|
|
|
class ReportStatus(str, Enum):
|
|
|
|
def __str__(self):
|
|
|
|
return str(self.value)
|
|
|
|
|
|
|
|
ERROR = 'error'
|
|
|
|
FAIL = 'failure'
|
|
|
|
SKIP = 'skipped'
|
|
|
|
|
|
|
|
|
2025-01-21 16:53:11 +01:00
|
|
|
class ReportingJSONEncoder(json.JSONEncoder):
|
|
|
|
def default(self, obj):
|
|
|
|
if isinstance(obj, Path):
|
|
|
|
return str(obj)
|
|
|
|
return super().default(obj)
|
|
|
|
|
|
|
|
|
2022-06-09 10:18:41 -04:00
|
|
|
class Reporting:
|
|
|
|
|
2024-04-30 10:33:32 +02:00
|
|
|
json_filters = {
|
|
|
|
'twister.json': {
|
|
|
|
'deny_suite': ['footprint']
|
|
|
|
},
|
|
|
|
'footprint.json': {
|
2024-08-14 08:15:58 +02:00
|
|
|
'deny_status': ['FILTER'],
|
2024-04-30 10:33:32 +02:00
|
|
|
'deny_suite': ['testcases', 'execution_time', 'recording', 'retries', 'runnable']
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-06-09 10:18:41 -04:00
|
|
|
def __init__(self, plan, env) -> None:
|
|
|
|
self.plan = plan #FIXME
|
|
|
|
self.instances = plan.instances
|
|
|
|
self.platforms = plan.platforms
|
|
|
|
self.selected_platforms = plan.selected_platforms
|
|
|
|
self.env = env
|
|
|
|
self.timestamp = datetime.now().isoformat()
|
|
|
|
self.outdir = os.path.abspath(env.options.outdir)
|
2024-05-06 15:17:54 +02:00
|
|
|
self.instance_fail_count = plan.instance_fail_count
|
2024-04-30 10:33:32 +02:00
|
|
|
self.footprint = None
|
2023-12-09 21:00:07 +01:00
|
|
|
self.coverage_status = None
|
2022-06-09 10:18:41 -04:00
|
|
|
|
2024-11-13 13:58:45 -05:00
|
|
|
|
2022-06-09 10:18:41 -04:00
|
|
|
@staticmethod
|
|
|
|
def process_log(log_file):
|
|
|
|
filtered_string = ""
|
|
|
|
if os.path.exists(log_file):
|
|
|
|
with open(log_file, "rb") as f:
|
|
|
|
log = f.read().decode("utf-8")
|
|
|
|
filtered_string = ''.join(filter(lambda x: x in string.printable, log))
|
|
|
|
|
|
|
|
return filtered_string
|
|
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
2024-11-27 17:50:07 +00:00
|
|
|
def xunit_testcase(
|
|
|
|
eleTestsuite,
|
|
|
|
name,
|
|
|
|
classname,
|
|
|
|
status: TwisterStatus,
|
|
|
|
ts_status: TwisterStatus,
|
|
|
|
reason,
|
|
|
|
duration,
|
|
|
|
runnable,
|
|
|
|
stats,
|
|
|
|
log,
|
|
|
|
build_only_as_skip
|
|
|
|
):
|
2022-06-09 10:18:41 -04:00
|
|
|
fails, passes, errors, skips = stats
|
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
if status in [TwisterStatus.SKIP, TwisterStatus.FILTER]:
|
2022-06-09 10:18:41 -04:00
|
|
|
duration = 0
|
|
|
|
|
|
|
|
eleTestcase = ET.SubElement(
|
|
|
|
eleTestsuite, "testcase",
|
|
|
|
classname=classname,
|
|
|
|
name=f"{name}",
|
|
|
|
time=f"{duration}")
|
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
if status in [TwisterStatus.SKIP, TwisterStatus.FILTER]:
|
2022-06-09 10:18:41 -04:00
|
|
|
skips += 1
|
|
|
|
# temporarily add build_only_as_skip to restore existing CI report behaviour
|
2024-05-28 12:31:53 +00:00
|
|
|
if ts_status == TwisterStatus.PASS and not runnable:
|
2022-06-09 10:18:41 -04:00
|
|
|
tc_type = "build"
|
|
|
|
else:
|
|
|
|
tc_type = status
|
2024-04-11 08:53:40 +00:00
|
|
|
ET.SubElement(eleTestcase, ReportStatus.SKIP, type=f"{tc_type}", message=f"{reason}")
|
2024-05-28 12:31:53 +00:00
|
|
|
elif status in [TwisterStatus.FAIL, TwisterStatus.BLOCK]:
|
2022-06-09 10:18:41 -04:00
|
|
|
fails += 1
|
2024-04-11 08:53:40 +00:00
|
|
|
el = ET.SubElement(eleTestcase, ReportStatus.FAIL, type="failure", message=f"{reason}")
|
2022-06-09 10:18:41 -04:00
|
|
|
if log:
|
|
|
|
el.text = log
|
2024-05-28 12:31:53 +00:00
|
|
|
elif status == TwisterStatus.ERROR:
|
2022-06-09 10:18:41 -04:00
|
|
|
errors += 1
|
2024-04-11 08:53:40 +00:00
|
|
|
el = ET.SubElement(eleTestcase, ReportStatus.ERROR, type="failure", message=f"{reason}")
|
2022-06-09 10:18:41 -04:00
|
|
|
if log:
|
|
|
|
el.text = log
|
2024-05-28 12:31:53 +00:00
|
|
|
elif status == TwisterStatus.PASS:
|
2024-09-11 15:52:56 +00:00
|
|
|
passes += 1
|
|
|
|
elif status == TwisterStatus.NOTRUN:
|
|
|
|
if build_only_as_skip:
|
2024-04-11 08:53:40 +00:00
|
|
|
ET.SubElement(eleTestcase, ReportStatus.SKIP, type="build", message="built only")
|
2022-06-09 10:18:41 -04:00
|
|
|
skips += 1
|
|
|
|
else:
|
|
|
|
passes += 1
|
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
if status == TwisterStatus.NONE:
|
2022-06-09 10:18:41 -04:00
|
|
|
logger.debug(f"{name}: No status")
|
2024-11-27 17:50:07 +00:00
|
|
|
ET.SubElement(
|
|
|
|
eleTestcase,
|
|
|
|
ReportStatus.SKIP,
|
|
|
|
type="untested",
|
|
|
|
message="No results captured, testsuite misconfiguration?"
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
else:
|
|
|
|
logger.error(f"{name}: Unknown status '{status}'")
|
|
|
|
|
|
|
|
return (fails, passes, errors, skips)
|
|
|
|
|
|
|
|
# Generate a report with all testsuites instead of doing this per platform
|
|
|
|
def xunit_report_suites(self, json_file, filename):
|
|
|
|
|
|
|
|
json_data = {}
|
2024-11-27 14:57:06 +00:00
|
|
|
with open(json_file) as json_results:
|
2022-06-09 10:18:41 -04:00
|
|
|
json_data = json.load(json_results)
|
|
|
|
|
|
|
|
|
|
|
|
env = json_data.get('environment', {})
|
|
|
|
version = env.get('zephyr_version', None)
|
|
|
|
|
|
|
|
eleTestsuites = ET.Element('testsuites')
|
|
|
|
all_suites = json_data.get("testsuites", [])
|
|
|
|
|
|
|
|
suites_to_report = all_suites
|
|
|
|
# do not create entry if everything is filtered out
|
|
|
|
if not self.env.options.detailed_skipped_report:
|
2024-11-27 17:50:07 +00:00
|
|
|
suites_to_report = list(
|
|
|
|
filter(lambda d: TwisterStatus(d.get('status')) != TwisterStatus.FILTER, all_suites)
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
for suite in suites_to_report:
|
|
|
|
duration = 0
|
|
|
|
eleTestsuite = ET.SubElement(eleTestsuites, 'testsuite',
|
|
|
|
name=suite.get("name"), time="0",
|
|
|
|
timestamp = self.timestamp,
|
|
|
|
tests="0",
|
|
|
|
failures="0",
|
|
|
|
errors="0", skipped="0")
|
|
|
|
eleTSPropetries = ET.SubElement(eleTestsuite, 'properties')
|
|
|
|
# Multiple 'property' can be added to 'properties'
|
|
|
|
# differing by name and value
|
|
|
|
ET.SubElement(eleTSPropetries, 'property', name="version", value=version)
|
|
|
|
ET.SubElement(eleTSPropetries, 'property', name="platform", value=suite.get("platform"))
|
|
|
|
ET.SubElement(eleTSPropetries, 'property', name="architecture", value=suite.get("arch"))
|
|
|
|
|
|
|
|
total = 0
|
|
|
|
fails = passes = errors = skips = 0
|
|
|
|
handler_time = suite.get('execution_time', 0)
|
|
|
|
runnable = suite.get('runnable', 0)
|
|
|
|
duration += float(handler_time)
|
2024-09-12 11:01:10 +00:00
|
|
|
ts_status = TwisterStatus(suite.get('status'))
|
2024-12-16 10:50:40 +01:00
|
|
|
classname = Path(suite.get("name","")).name
|
2022-06-09 10:18:41 -04:00
|
|
|
for tc in suite.get("testcases", []):
|
2024-09-12 11:01:10 +00:00
|
|
|
status = TwisterStatus(tc.get('status'))
|
2022-06-09 10:18:41 -04:00
|
|
|
reason = tc.get('reason', suite.get('reason', 'Unknown'))
|
|
|
|
log = tc.get("log", suite.get("log"))
|
|
|
|
|
|
|
|
tc_duration = tc.get('execution_time', handler_time)
|
|
|
|
name = tc.get("identifier")
|
|
|
|
fails, passes, errors, skips = self.xunit_testcase(eleTestsuite,
|
|
|
|
name, classname, status, ts_status, reason, tc_duration, runnable,
|
|
|
|
(fails, passes, errors, skips), log, True)
|
|
|
|
|
2023-02-02 15:47:39 +00:00
|
|
|
total = errors + passes + fails + skips
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
eleTestsuite.attrib['time'] = f"{duration}"
|
|
|
|
eleTestsuite.attrib['failures'] = f"{fails}"
|
|
|
|
eleTestsuite.attrib['errors'] = f"{errors}"
|
|
|
|
eleTestsuite.attrib['skipped'] = f"{skips}"
|
|
|
|
eleTestsuite.attrib['tests'] = f"{total}"
|
|
|
|
|
2024-12-15 17:52:57 +01:00
|
|
|
ET.indent(eleTestsuites, space="\t", level=0)
|
2022-06-09 10:18:41 -04:00
|
|
|
result = ET.tostring(eleTestsuites)
|
|
|
|
with open(filename, 'wb') as report:
|
|
|
|
report.write(result)
|
|
|
|
|
|
|
|
def xunit_report(self, json_file, filename, selected_platform=None, full_report=False):
|
|
|
|
if selected_platform:
|
|
|
|
selected = [selected_platform]
|
|
|
|
logger.info(f"Writing target report for {selected_platform}...")
|
|
|
|
else:
|
|
|
|
logger.info(f"Writing xunit report {filename}...")
|
|
|
|
selected = self.selected_platforms
|
|
|
|
|
|
|
|
json_data = {}
|
2024-11-27 14:57:06 +00:00
|
|
|
with open(json_file) as json_results:
|
2022-06-09 10:18:41 -04:00
|
|
|
json_data = json.load(json_results)
|
|
|
|
|
|
|
|
|
|
|
|
env = json_data.get('environment', {})
|
|
|
|
version = env.get('zephyr_version', None)
|
|
|
|
|
|
|
|
eleTestsuites = ET.Element('testsuites')
|
|
|
|
all_suites = json_data.get("testsuites", [])
|
|
|
|
|
|
|
|
for platform in selected:
|
|
|
|
suites = list(filter(lambda d: d['platform'] == platform, all_suites))
|
|
|
|
# do not create entry if everything is filtered out
|
|
|
|
if not self.env.options.detailed_skipped_report:
|
2024-11-27 17:50:07 +00:00
|
|
|
non_filtered = list(
|
|
|
|
filter(lambda d: TwisterStatus(d.get('status')) != TwisterStatus.FILTER, suites)
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
if not non_filtered:
|
|
|
|
continue
|
|
|
|
|
|
|
|
duration = 0
|
|
|
|
eleTestsuite = ET.SubElement(eleTestsuites, 'testsuite',
|
|
|
|
name=platform,
|
|
|
|
timestamp = self.timestamp,
|
|
|
|
time="0",
|
|
|
|
tests="0",
|
|
|
|
failures="0",
|
|
|
|
errors="0", skipped="0")
|
|
|
|
eleTSPropetries = ET.SubElement(eleTestsuite, 'properties')
|
|
|
|
# Multiple 'property' can be added to 'properties'
|
|
|
|
# differing by name and value
|
|
|
|
ET.SubElement(eleTSPropetries, 'property', name="version", value=version)
|
|
|
|
|
|
|
|
total = 0
|
|
|
|
fails = passes = errors = skips = 0
|
|
|
|
for ts in suites:
|
|
|
|
handler_time = ts.get('execution_time', 0)
|
|
|
|
runnable = ts.get('runnable', 0)
|
|
|
|
duration += float(handler_time)
|
|
|
|
|
2024-09-12 11:01:10 +00:00
|
|
|
ts_status = TwisterStatus(ts.get('status'))
|
2022-06-09 10:18:41 -04:00
|
|
|
# Do not report filtered testcases
|
2024-11-27 17:50:07 +00:00
|
|
|
if (
|
|
|
|
ts_status == TwisterStatus.FILTER
|
|
|
|
and not self.env.options.detailed_skipped_report
|
|
|
|
):
|
2022-06-09 10:18:41 -04:00
|
|
|
continue
|
|
|
|
if full_report:
|
2024-12-16 10:50:40 +01:00
|
|
|
classname = Path(ts.get("name","")).name
|
2022-06-09 10:18:41 -04:00
|
|
|
for tc in ts.get("testcases", []):
|
2024-09-12 11:01:10 +00:00
|
|
|
status = TwisterStatus(tc.get('status'))
|
2022-06-09 10:18:41 -04:00
|
|
|
reason = tc.get('reason', ts.get('reason', 'Unknown'))
|
|
|
|
log = tc.get("log", ts.get("log"))
|
|
|
|
|
|
|
|
tc_duration = tc.get('execution_time', handler_time)
|
|
|
|
name = tc.get("identifier")
|
|
|
|
fails, passes, errors, skips = self.xunit_testcase(eleTestsuite,
|
|
|
|
name, classname, status, ts_status, reason, tc_duration, runnable,
|
|
|
|
(fails, passes, errors, skips), log, True)
|
|
|
|
else:
|
|
|
|
reason = ts.get('reason', 'Unknown')
|
|
|
|
name = ts.get("name")
|
|
|
|
classname = f"{platform}:{name}"
|
|
|
|
log = ts.get("log")
|
|
|
|
fails, passes, errors, skips = self.xunit_testcase(eleTestsuite,
|
2024-10-07 14:19:08 +03:00
|
|
|
name, classname, ts_status, ts_status, reason, handler_time, runnable,
|
2022-06-09 10:18:41 -04:00
|
|
|
(fails, passes, errors, skips), log, False)
|
|
|
|
|
2023-02-02 15:47:39 +00:00
|
|
|
total = errors + passes + fails + skips
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
eleTestsuite.attrib['time'] = f"{duration}"
|
|
|
|
eleTestsuite.attrib['failures'] = f"{fails}"
|
|
|
|
eleTestsuite.attrib['errors'] = f"{errors}"
|
|
|
|
eleTestsuite.attrib['skipped'] = f"{skips}"
|
|
|
|
eleTestsuite.attrib['tests'] = f"{total}"
|
|
|
|
|
2024-12-15 17:52:57 +01:00
|
|
|
ET.indent(eleTestsuites, space="\t", level=0)
|
2022-06-09 10:18:41 -04:00
|
|
|
result = ET.tostring(eleTestsuites)
|
|
|
|
with open(filename, 'wb') as report:
|
|
|
|
report.write(result)
|
|
|
|
|
2024-05-07 23:58:22 +02:00
|
|
|
def json_report(self, filename, version="NA", platform=None, filters=None):
|
2022-06-09 10:18:41 -04:00
|
|
|
logger.info(f"Writing JSON report {filename}")
|
2024-05-07 09:11:47 +02:00
|
|
|
|
|
|
|
if self.env.options.report_all_options:
|
|
|
|
report_options = vars(self.env.options)
|
|
|
|
else:
|
|
|
|
report_options = self.env.non_default_options()
|
|
|
|
|
2022-06-09 10:18:41 -04:00
|
|
|
report = {}
|
|
|
|
report["environment"] = {"os": os.name,
|
|
|
|
"zephyr_version": version,
|
2022-11-05 13:42:26 -04:00
|
|
|
"toolchain": self.env.toolchain,
|
|
|
|
"commit_date": self.env.commit_date,
|
2024-05-07 09:11:47 +02:00
|
|
|
"run_date": self.env.run_date,
|
|
|
|
"options": report_options
|
2022-06-09 10:18:41 -04:00
|
|
|
}
|
|
|
|
suites = []
|
|
|
|
|
|
|
|
for instance in self.instances.values():
|
2023-11-30 14:28:59 +01:00
|
|
|
if platform and platform != instance.platform.name:
|
|
|
|
continue
|
2024-08-09 13:11:45 +00:00
|
|
|
if instance.status == TwisterStatus.FILTER and not self.env.options.report_filtered:
|
2024-04-30 14:17:47 +00:00
|
|
|
continue
|
2024-08-09 13:11:45 +00:00
|
|
|
if (filters and 'allow_status' in filters and \
|
|
|
|
instance.status not in [TwisterStatus[s] for s in filters['allow_status']]):
|
2024-11-27 17:50:07 +00:00
|
|
|
logger.debug(
|
|
|
|
f"Skip test suite '{instance.testsuite.name}'"
|
|
|
|
f" status '{instance.status}' not allowed for {filename}"
|
|
|
|
)
|
2024-05-07 23:58:22 +02:00
|
|
|
continue
|
2024-08-09 13:11:45 +00:00
|
|
|
if (filters and 'deny_status' in filters and \
|
|
|
|
instance.status in [TwisterStatus[s] for s in filters['deny_status']]):
|
2024-11-27 17:50:07 +00:00
|
|
|
logger.debug(
|
|
|
|
f"Skip test suite '{instance.testsuite.name}'"
|
|
|
|
f" status '{instance.status}' denied for {filename}"
|
|
|
|
)
|
2024-05-07 23:58:22 +02:00
|
|
|
continue
|
2022-06-09 10:18:41 -04:00
|
|
|
suite = {}
|
|
|
|
handler_log = os.path.join(instance.build_dir, "handler.log")
|
2023-10-26 09:08:54 +02:00
|
|
|
pytest_log = os.path.join(instance.build_dir, "twister_harness.log")
|
2022-06-09 10:18:41 -04:00
|
|
|
build_log = os.path.join(instance.build_dir, "build.log")
|
|
|
|
device_log = os.path.join(instance.build_dir, "device.log")
|
|
|
|
|
|
|
|
handler_time = instance.metrics.get('handler_time', 0)
|
2022-10-18 15:45:21 +02:00
|
|
|
used_ram = instance.metrics.get ("used_ram", 0)
|
|
|
|
used_rom = instance.metrics.get("used_rom",0)
|
|
|
|
available_ram = instance.metrics.get("available_ram", 0)
|
|
|
|
available_rom = instance.metrics.get("available_rom", 0)
|
2022-06-09 10:18:41 -04:00
|
|
|
suite = {
|
|
|
|
"name": instance.testsuite.name,
|
|
|
|
"arch": instance.platform.arch,
|
|
|
|
"platform": instance.platform.name,
|
2023-09-06 16:52:16 +02:00
|
|
|
"path": instance.testsuite.source_dir_rel
|
2022-06-09 10:18:41 -04:00
|
|
|
}
|
|
|
|
if instance.run_id:
|
|
|
|
suite['run_id'] = instance.run_id
|
|
|
|
|
|
|
|
suite["runnable"] = False
|
2024-05-28 12:31:53 +00:00
|
|
|
if instance.status != TwisterStatus.FILTER:
|
2022-06-09 10:18:41 -04:00
|
|
|
suite["runnable"] = instance.run
|
|
|
|
|
2022-10-18 15:45:21 +02:00
|
|
|
if used_ram:
|
|
|
|
suite["used_ram"] = used_ram
|
|
|
|
if used_rom:
|
|
|
|
suite["used_rom"] = used_rom
|
2022-06-09 10:18:41 -04:00
|
|
|
|
2022-10-24 13:34:48 -04:00
|
|
|
suite['retries'] = instance.retries
|
2024-12-05 10:16:03 -05:00
|
|
|
if instance.toolchain:
|
|
|
|
suite['toolchain'] = instance.toolchain
|
2022-10-24 13:34:48 -04:00
|
|
|
|
2023-08-17 16:42:42 -04:00
|
|
|
if instance.dut:
|
|
|
|
suite["dut"] = instance.dut
|
2022-10-18 15:45:21 +02:00
|
|
|
if available_ram:
|
|
|
|
suite["available_ram"] = available_ram
|
|
|
|
if available_rom:
|
|
|
|
suite["available_rom"] = available_rom
|
2024-05-28 12:31:53 +00:00
|
|
|
if instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
2022-06-09 10:18:41 -04:00
|
|
|
suite['status'] = instance.status
|
|
|
|
# FIXME
|
2023-10-26 09:08:54 +02:00
|
|
|
if os.path.exists(pytest_log):
|
|
|
|
suite["log"] = self.process_log(pytest_log)
|
|
|
|
elif os.path.exists(handler_log):
|
2022-06-09 10:18:41 -04:00
|
|
|
suite["log"] = self.process_log(handler_log)
|
|
|
|
elif os.path.exists(device_log):
|
|
|
|
suite["log"] = self.process_log(device_log)
|
|
|
|
else:
|
|
|
|
suite["log"] = self.process_log(build_log)
|
2025-02-07 16:05:21 +01:00
|
|
|
|
|
|
|
suite["reason"] = self.get_detailed_reason(instance.reason, suite["log"])
|
|
|
|
# update the reason to get more details also in other reports (e.g. junit)
|
|
|
|
# where build log is not available
|
|
|
|
instance.reason = suite["reason"]
|
2024-05-28 12:31:53 +00:00
|
|
|
elif instance.status == TwisterStatus.FILTER:
|
|
|
|
suite["status"] = TwisterStatus.FILTER
|
2022-06-09 10:18:41 -04:00
|
|
|
suite["reason"] = instance.reason
|
2024-05-28 12:31:53 +00:00
|
|
|
elif instance.status == TwisterStatus.PASS:
|
|
|
|
suite["status"] = TwisterStatus.PASS
|
|
|
|
elif instance.status == TwisterStatus.SKIP:
|
|
|
|
suite["status"] = TwisterStatus.SKIP
|
2022-06-09 10:18:41 -04:00
|
|
|
suite["reason"] = instance.reason
|
2024-10-22 10:05:44 +00:00
|
|
|
elif instance.status == TwisterStatus.NOTRUN:
|
|
|
|
suite["status"] = TwisterStatus.NOTRUN
|
|
|
|
suite["reason"] = instance.reason
|
|
|
|
else:
|
|
|
|
suite["status"] = TwisterStatus.NONE
|
|
|
|
suite["reason"] = 'Unknown Instance status.'
|
2022-06-09 10:18:41 -04:00
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
if instance.status != TwisterStatus.NONE:
|
2022-06-09 10:18:41 -04:00
|
|
|
suite["execution_time"] = f"{float(handler_time):.2f}"
|
2023-11-22 16:30:10 -05:00
|
|
|
suite["build_time"] = f"{float(instance.build_time):.2f}"
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
testcases = []
|
|
|
|
|
|
|
|
if len(instance.testcases) == 1:
|
|
|
|
single_case_duration = f"{float(handler_time):.2f}"
|
|
|
|
else:
|
|
|
|
single_case_duration = 0
|
|
|
|
|
|
|
|
for case in instance.testcases:
|
|
|
|
# freeform was set when no sub testcases were parsed, however,
|
|
|
|
# if we discover those at runtime, the fallback testcase wont be
|
|
|
|
# needed anymore and can be removed from the output, it does
|
|
|
|
# not have a status and would otherwise be reported as skipped.
|
2024-11-27 17:50:07 +00:00
|
|
|
if (
|
|
|
|
case.freeform
|
|
|
|
and case.status == TwisterStatus.NONE
|
|
|
|
and len(instance.testcases) > 1
|
|
|
|
):
|
2022-06-09 10:18:41 -04:00
|
|
|
continue
|
|
|
|
testcase = {}
|
|
|
|
testcase['identifier'] = case.name
|
2024-05-28 12:31:53 +00:00
|
|
|
if instance.status != TwisterStatus.NONE:
|
2022-06-09 10:18:41 -04:00
|
|
|
if single_case_duration:
|
|
|
|
testcase['execution_time'] = single_case_duration
|
|
|
|
else:
|
|
|
|
testcase['execution_time'] = f"{float(case.duration):.2f}"
|
|
|
|
|
|
|
|
if case.output != "":
|
|
|
|
testcase['log'] = case.output
|
|
|
|
|
2024-05-28 12:31:53 +00:00
|
|
|
if case.status == TwisterStatus.SKIP:
|
|
|
|
if instance.status == TwisterStatus.FILTER:
|
|
|
|
testcase["status"] = TwisterStatus.FILTER
|
2022-06-09 10:18:41 -04:00
|
|
|
else:
|
2024-05-28 12:31:53 +00:00
|
|
|
testcase["status"] = TwisterStatus.SKIP
|
2022-06-09 10:18:41 -04:00
|
|
|
testcase["reason"] = case.reason or instance.reason
|
|
|
|
else:
|
|
|
|
testcase["status"] = case.status
|
|
|
|
if case.reason:
|
|
|
|
testcase["reason"] = case.reason
|
|
|
|
|
|
|
|
testcases.append(testcase)
|
|
|
|
|
|
|
|
suite['testcases'] = testcases
|
2023-12-01 21:19:09 +01:00
|
|
|
|
|
|
|
if instance.recording is not None:
|
|
|
|
suite['recording'] = instance.recording
|
|
|
|
|
2024-11-27 17:50:07 +00:00
|
|
|
if (
|
|
|
|
instance.status not in [
|
|
|
|
TwisterStatus.NONE,
|
|
|
|
TwisterStatus.ERROR,
|
|
|
|
TwisterStatus.FILTER
|
|
|
|
]
|
|
|
|
and self.env.options.create_rom_ram_report
|
|
|
|
and self.env.options.footprint_report is not None
|
|
|
|
):
|
2024-04-30 10:33:32 +02:00
|
|
|
# Init as empty data preparing for filtering properties.
|
|
|
|
suite['footprint'] = {}
|
|
|
|
|
2024-05-07 23:58:22 +02:00
|
|
|
# Pass suite properties through the context filters.
|
|
|
|
if filters and 'allow_suite' in filters:
|
|
|
|
suite = {k:v for k,v in suite.items() if k in filters['allow_suite']}
|
|
|
|
|
|
|
|
if filters and 'deny_suite' in filters:
|
|
|
|
suite = {k:v for k,v in suite.items() if k not in filters['deny_suite']}
|
|
|
|
|
2024-04-30 10:33:32 +02:00
|
|
|
# Compose external data only to these properties which pass filtering.
|
|
|
|
if 'footprint' in suite:
|
|
|
|
do_all = 'all' in self.env.options.footprint_report
|
|
|
|
footprint_files = { 'ROM': 'rom.json', 'RAM': 'ram.json' }
|
|
|
|
for k,v in footprint_files.items():
|
|
|
|
if do_all or k in self.env.options.footprint_report:
|
|
|
|
footprint_fname = os.path.join(instance.build_dir, v)
|
|
|
|
try:
|
2024-11-27 14:57:06 +00:00
|
|
|
with open(footprint_fname) as footprint_json:
|
2024-04-30 10:33:32 +02:00
|
|
|
logger.debug(f"Collect footprint.{k} for '{instance.name}'")
|
|
|
|
suite['footprint'][k] = json.load(footprint_json)
|
|
|
|
except FileNotFoundError:
|
|
|
|
logger.error(f"Missing footprint.{k} for '{instance.name}'")
|
|
|
|
#
|
|
|
|
#
|
|
|
|
|
2022-06-09 10:18:41 -04:00
|
|
|
suites.append(suite)
|
|
|
|
|
|
|
|
report["testsuites"] = suites
|
2024-11-27 14:57:06 +00:00
|
|
|
with open(filename, 'w') as json_file:
|
2025-01-21 16:53:11 +01:00
|
|
|
json.dump(report, json_file, indent=4, separators=(',',':'), cls=ReportingJSONEncoder)
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
|
|
|
|
def compare_metrics(self, filename):
|
|
|
|
# name, datatype, lower results better
|
2022-10-18 15:45:21 +02:00
|
|
|
interesting_metrics = [("used_ram", int, True),
|
|
|
|
("used_rom", int, True)]
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
2024-11-27 16:57:32 +00:00
|
|
|
logger.error(f"Cannot compare metrics, {filename} not found")
|
2022-06-09 10:18:41 -04:00
|
|
|
return []
|
|
|
|
|
|
|
|
results = []
|
|
|
|
saved_metrics = {}
|
|
|
|
with open(filename) as fp:
|
|
|
|
jt = json.load(fp)
|
|
|
|
for ts in jt.get("testsuites", []):
|
|
|
|
d = {}
|
|
|
|
for m, _, _ in interesting_metrics:
|
|
|
|
d[m] = ts.get(m, 0)
|
|
|
|
ts_name = ts.get('name')
|
|
|
|
ts_platform = ts.get('platform')
|
|
|
|
saved_metrics[(ts_name, ts_platform)] = d
|
|
|
|
|
|
|
|
for instance in self.instances.values():
|
|
|
|
mkey = (instance.testsuite.name, instance.platform.name)
|
|
|
|
if mkey not in saved_metrics:
|
|
|
|
continue
|
|
|
|
sm = saved_metrics[mkey]
|
|
|
|
for metric, mtype, lower_better in interesting_metrics:
|
|
|
|
if metric not in instance.metrics:
|
|
|
|
continue
|
|
|
|
if sm[metric] == "":
|
|
|
|
continue
|
|
|
|
delta = instance.metrics.get(metric, 0) - mtype(sm[metric])
|
|
|
|
if delta == 0:
|
|
|
|
continue
|
|
|
|
results.append((instance, metric, instance.metrics.get(metric, 0), delta,
|
|
|
|
lower_better))
|
|
|
|
return results
|
|
|
|
|
|
|
|
def footprint_reports(self, report, show_footprint, all_deltas,
|
|
|
|
footprint_threshold, last_metrics):
|
|
|
|
if not report:
|
|
|
|
return
|
|
|
|
|
|
|
|
logger.debug("running footprint_reports")
|
|
|
|
deltas = self.compare_metrics(report)
|
|
|
|
warnings = 0
|
2024-04-01 22:41:13 +02:00
|
|
|
if deltas:
|
2022-06-09 10:18:41 -04:00
|
|
|
for i, metric, value, delta, lower_better in deltas:
|
|
|
|
if not all_deltas and ((delta < 0 and lower_better) or
|
|
|
|
(delta > 0 and not lower_better)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
percentage = 0
|
|
|
|
if value > delta:
|
|
|
|
percentage = (float(delta) / float(value - delta))
|
|
|
|
|
|
|
|
if not all_deltas and (percentage < (footprint_threshold / 100.0)):
|
|
|
|
continue
|
|
|
|
|
2024-04-01 22:41:13 +02:00
|
|
|
if show_footprint:
|
|
|
|
logger.log(
|
|
|
|
logging.INFO if all_deltas else logging.WARNING,
|
2024-11-27 17:50:07 +00:00
|
|
|
f"{i.platform.name:<25} {i.testsuite.name:<60} {metric} {delta:<+4},"
|
|
|
|
f" is now {value:6} {percentage:+.2%}"
|
|
|
|
)
|
2024-04-01 22:41:13 +02:00
|
|
|
|
2022-06-09 10:18:41 -04:00
|
|
|
warnings += 1
|
|
|
|
|
|
|
|
if warnings:
|
2024-04-01 22:41:13 +02:00
|
|
|
logger.warning("Found {} footprint deltas to {} as a baseline.".format(
|
|
|
|
warnings,
|
|
|
|
(report if not last_metrics else "the last twister run.")))
|
2022-06-09 10:18:41 -04:00
|
|
|
|
2023-01-27 11:44:23 +00:00
|
|
|
def synopsis(self):
|
2024-05-06 15:17:54 +02:00
|
|
|
if self.env.options.report_summary == 0:
|
|
|
|
count = self.instance_fail_count
|
|
|
|
log_txt = f"The following issues were found (showing the all {count} items):"
|
|
|
|
elif self.env.options.report_summary:
|
|
|
|
count = self.env.options.report_summary
|
2024-11-27 15:49:05 +00:00
|
|
|
log_txt = "The following issues were found "
|
2024-05-06 15:17:54 +02:00
|
|
|
if count > self.instance_fail_count:
|
2024-11-27 17:50:07 +00:00
|
|
|
log_txt += (
|
|
|
|
f"(presenting {self.instance_fail_count} out of the {count} items requested):"
|
|
|
|
)
|
2024-05-06 15:17:54 +02:00
|
|
|
else:
|
|
|
|
log_txt += f"(showing the {count} of {self.instance_fail_count} items):"
|
|
|
|
else:
|
|
|
|
count = 10
|
|
|
|
log_txt = f"The following issues were found (showing the top {count} items):"
|
2023-01-27 11:44:23 +00:00
|
|
|
cnt = 0
|
2023-01-30 15:43:36 -05:00
|
|
|
example_instance = None
|
2023-07-21 16:09:39 +02:00
|
|
|
detailed_test_id = self.env.options.detailed_test_id
|
2023-01-27 11:44:23 +00:00
|
|
|
for instance in self.instances.values():
|
2024-11-27 17:50:07 +00:00
|
|
|
if instance.status not in [
|
|
|
|
TwisterStatus.PASS,
|
|
|
|
TwisterStatus.FILTER,
|
|
|
|
TwisterStatus.SKIP,
|
|
|
|
TwisterStatus.NOTRUN
|
|
|
|
]:
|
2024-05-06 15:17:54 +02:00
|
|
|
cnt += 1
|
2023-01-27 11:44:23 +00:00
|
|
|
if cnt == 1:
|
|
|
|
logger.info("-+" * 40)
|
2024-05-06 15:17:54 +02:00
|
|
|
logger.info(log_txt)
|
2023-01-27 11:44:23 +00:00
|
|
|
|
2024-04-11 08:53:40 +00:00
|
|
|
status = instance.status
|
|
|
|
if self.env.options.report_summary is not None and \
|
2024-05-28 12:31:53 +00:00
|
|
|
status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
2024-04-11 08:53:40 +00:00
|
|
|
status = Fore.RED + status.upper() + Fore.RESET
|
2024-11-27 17:50:07 +00:00
|
|
|
logger.info(
|
|
|
|
f"{cnt}) {instance.testsuite.name} on {instance.platform.name}"
|
|
|
|
f" {status} ({instance.reason})"
|
|
|
|
)
|
2023-01-30 15:43:36 -05:00
|
|
|
example_instance = instance
|
2024-05-06 15:17:54 +02:00
|
|
|
if cnt == count:
|
2023-01-27 11:44:23 +00:00
|
|
|
break
|
2024-05-06 15:17:54 +02:00
|
|
|
if cnt == 0 and self.env.options.report_summary is not None:
|
|
|
|
logger.info("-+" * 40)
|
2024-11-27 15:49:05 +00:00
|
|
|
logger.info("No errors/fails found")
|
2023-01-27 11:44:23 +00:00
|
|
|
|
2023-01-30 15:43:36 -05:00
|
|
|
if cnt and example_instance:
|
2024-07-28 11:20:25 +10:00
|
|
|
cwd_rel_path = os.path.relpath(example_instance.testsuite.source_dir, start=os.getcwd())
|
|
|
|
|
2023-01-27 11:44:23 +00:00
|
|
|
logger.info("")
|
|
|
|
logger.info("To rerun the tests, call twister using the following commandline:")
|
2023-07-21 16:09:39 +02:00
|
|
|
extra_parameters = '' if detailed_test_id else ' --no-detailed-test-id'
|
|
|
|
logger.info(f"west twister -p <PLATFORM> -s <TEST ID>{extra_parameters}, for example:")
|
2023-01-27 11:44:23 +00:00
|
|
|
logger.info("")
|
2024-11-27 17:50:07 +00:00
|
|
|
logger.info(
|
|
|
|
f"west twister -p {example_instance.platform.name}"
|
|
|
|
f" -s {example_instance.testsuite.name}"
|
|
|
|
f"{extra_parameters}"
|
|
|
|
)
|
2024-11-27 15:49:05 +00:00
|
|
|
logger.info("or with west:")
|
2024-11-27 17:50:07 +00:00
|
|
|
logger.info(
|
|
|
|
f"west build -p -b {example_instance.platform.name} {cwd_rel_path}"
|
|
|
|
f" -T {example_instance.testsuite.id}"
|
|
|
|
)
|
2023-01-27 11:44:23 +00:00
|
|
|
logger.info("-+" * 40)
|
|
|
|
|
2025-02-24 18:00:52 +00:00
|
|
|
def summary(self, results, duration):
|
2022-06-09 10:18:41 -04:00
|
|
|
failed = 0
|
|
|
|
run = 0
|
|
|
|
for instance in self.instances.values():
|
2024-05-28 12:31:53 +00:00
|
|
|
if instance.status == TwisterStatus.FAIL:
|
2022-06-09 10:18:41 -04:00
|
|
|
failed += 1
|
|
|
|
|
|
|
|
# FIXME: need a better way to identify executed tests
|
|
|
|
handler_time = instance.metrics.get('handler_time', 0)
|
|
|
|
if float(handler_time) > 0:
|
|
|
|
run += 1
|
|
|
|
|
2024-11-11 15:55:18 +00:00
|
|
|
if results.total and results.total != results.filtered_configs:
|
|
|
|
pass_rate = (float(results.passed) / float(results.total - results.filtered_configs))
|
2022-06-09 10:18:41 -04:00
|
|
|
else:
|
|
|
|
pass_rate = 0
|
|
|
|
|
2024-11-27 17:50:07 +00:00
|
|
|
passed_color = (
|
|
|
|
TwisterStatus.get_color(TwisterStatus.FAIL)
|
|
|
|
if failed
|
|
|
|
else TwisterStatus.get_color(TwisterStatus.PASS)
|
|
|
|
)
|
|
|
|
unfiltered_configs = results.total - results.filtered_configs
|
|
|
|
notrun_number_section = (
|
|
|
|
f'{TwisterStatus.get_color(TwisterStatus.NOTRUN)}{results.notrun}{Fore.RESET}'
|
|
|
|
if results.notrun
|
|
|
|
else f'{results.notrun}'
|
|
|
|
)
|
|
|
|
failed_number_section = (
|
|
|
|
f'{TwisterStatus.get_color(TwisterStatus.FAIL)}{results.failed}{Fore.RESET}'
|
|
|
|
if results.failed
|
|
|
|
else f'{results.failed}'
|
|
|
|
)
|
|
|
|
error_number_section = (
|
|
|
|
f'{TwisterStatus.get_color(TwisterStatus.ERROR)}{results.error}{Fore.RESET}'
|
|
|
|
if results.error
|
|
|
|
else f'{results.error}'
|
|
|
|
)
|
|
|
|
warnings_number_section = (
|
|
|
|
f'{Fore.YELLOW}{self.plan.warnings + results.warnings}{Fore.RESET}'
|
|
|
|
if (self.plan.warnings + results.warnings)
|
|
|
|
else 'no'
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
logger.info(
|
2024-11-27 17:50:07 +00:00
|
|
|
f"{passed_color}{results.passed} of {unfiltered_configs}{Fore.RESET}"
|
2024-08-21 11:24:51 +00:00
|
|
|
f" executed test configurations passed ({pass_rate:.2%}),"
|
2024-11-27 17:50:07 +00:00
|
|
|
f" {notrun_number_section} built (not run),"
|
|
|
|
f" {failed_number_section} failed,"
|
|
|
|
f" {error_number_section} errored,"
|
|
|
|
f" with {warnings_number_section} warnings"
|
2024-08-21 11:24:51 +00:00
|
|
|
f" in {duration:.2f} seconds."
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
total_platforms = len(self.platforms)
|
2024-11-27 17:50:07 +00:00
|
|
|
filtered_platforms = set(
|
|
|
|
instance.platform.name for instance in self.instances.values()
|
|
|
|
if instance.status not in [
|
|
|
|
TwisterStatus.FILTER,
|
|
|
|
TwisterStatus.NOTRUN,
|
|
|
|
TwisterStatus.SKIP
|
|
|
|
]
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
# if we are only building, do not report about tests being executed.
|
|
|
|
if self.platforms and not self.env.options.build_only:
|
2024-11-27 17:50:07 +00:00
|
|
|
executed_cases = (
|
|
|
|
results.cases
|
|
|
|
- results.filtered_cases
|
|
|
|
- results.skipped_cases
|
|
|
|
- results.notrun_cases
|
|
|
|
)
|
2024-08-21 11:24:51 +00:00
|
|
|
pass_rate = 100 * (float(results.passed_cases) / float(executed_cases)) \
|
|
|
|
if executed_cases != 0 else 0
|
2024-11-13 13:58:45 -05:00
|
|
|
platform_rate = (100 * len(filtered_platforms) / len(self.platforms))
|
2024-11-27 17:50:07 +00:00
|
|
|
blocked_after_comma = ", " + str(results.blocked_cases) + " blocked"
|
|
|
|
failed_after_comma = ", " + str(results.failed_cases) + " failed"
|
|
|
|
error_after_comma = ", " + str(results.error_cases) + " errored"
|
|
|
|
none_after_comma = ", " + str(results.none_cases) + " without a status"
|
2024-08-21 11:24:51 +00:00
|
|
|
logger.info(
|
2024-11-27 17:50:07 +00:00
|
|
|
f'{results.passed_cases} of {executed_cases} executed test cases passed'
|
|
|
|
f' ({pass_rate:02.2f}%)'
|
|
|
|
f'{blocked_after_comma if results.blocked_cases else ""}'
|
|
|
|
f'{failed_after_comma if results.failed_cases else ""}'
|
|
|
|
f'{error_after_comma if results.error_cases else ""}'
|
|
|
|
f'{none_after_comma if results.none_cases else ""}'
|
|
|
|
f' on {len(filtered_platforms)} out of total {total_platforms} platforms'
|
|
|
|
f' ({platform_rate:02.2f}%).'
|
2024-08-21 11:24:51 +00:00
|
|
|
)
|
2024-11-07 17:28:24 -05:00
|
|
|
if results.skipped_cases or results.notrun_cases:
|
2024-11-27 17:50:07 +00:00
|
|
|
not_executed = results.skipped_cases + results.notrun_cases
|
|
|
|
skipped_after_colon = " " + str(results.skipped_cases) + " skipped"
|
|
|
|
notrun_after_comma = (
|
|
|
|
(", " if results.skipped_cases else " ")
|
|
|
|
+ str(results.notrun_cases)
|
|
|
|
+ " not run (built only)"
|
|
|
|
)
|
2024-08-21 11:24:51 +00:00
|
|
|
logger.info(
|
2024-11-27 17:50:07 +00:00
|
|
|
f'{not_executed} selected test cases not executed:' \
|
|
|
|
f'{skipped_after_colon if results.skipped_cases else ""}' \
|
|
|
|
f'{notrun_after_comma if results.notrun_cases else ""}' \
|
2024-08-21 11:24:51 +00:00
|
|
|
f'.'
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
|
2025-05-30 15:36:42 +02:00
|
|
|
built_only = results.total - run - results.filtered_configs - results.skipped
|
2024-11-27 17:50:07 +00:00
|
|
|
logger.info(
|
|
|
|
f"{Fore.GREEN}{run}{Fore.RESET} test configurations executed on platforms,"
|
|
|
|
f" {TwisterStatus.get_color(TwisterStatus.NOTRUN)}{built_only}{Fore.RESET}"
|
|
|
|
" test configurations were only built."
|
|
|
|
)
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
def save_reports(self, name, suffix, report_dir, no_update, platform_reports):
|
|
|
|
if not self.instances:
|
|
|
|
return
|
|
|
|
|
|
|
|
logger.info("Saving reports...")
|
|
|
|
if name:
|
|
|
|
report_name = name
|
|
|
|
else:
|
|
|
|
report_name = "twister"
|
|
|
|
|
|
|
|
if report_dir:
|
|
|
|
os.makedirs(report_dir, exist_ok=True)
|
|
|
|
filename = os.path.join(report_dir, report_name)
|
|
|
|
outdir = report_dir
|
|
|
|
else:
|
|
|
|
outdir = self.outdir
|
|
|
|
filename = os.path.join(outdir, report_name)
|
|
|
|
|
|
|
|
if suffix:
|
2024-11-27 16:57:32 +00:00
|
|
|
filename = f"{filename}_{suffix}"
|
2022-06-09 10:18:41 -04:00
|
|
|
|
|
|
|
if not no_update:
|
|
|
|
json_file = filename + ".json"
|
2024-04-30 10:33:32 +02:00
|
|
|
self.json_report(json_file, version=self.env.version,
|
|
|
|
filters=self.json_filters['twister.json'])
|
|
|
|
if self.env.options.footprint_report is not None:
|
|
|
|
self.json_report(filename + "_footprint.json", version=self.env.version,
|
|
|
|
filters=self.json_filters['footprint.json'])
|
2022-06-09 10:18:41 -04:00
|
|
|
self.xunit_report(json_file, filename + ".xml", full_report=False)
|
|
|
|
self.xunit_report(json_file, filename + "_report.xml", full_report=True)
|
|
|
|
self.xunit_report_suites(json_file, filename + "_suite_report.xml")
|
|
|
|
|
|
|
|
if platform_reports:
|
|
|
|
self.target_report(json_file, outdir, suffix)
|
|
|
|
|
|
|
|
|
|
|
|
def target_report(self, json_file, outdir, suffix):
|
2024-05-08 12:39:32 +02:00
|
|
|
platforms = {repr(inst.platform):inst.platform for _, inst in self.instances.items()}
|
|
|
|
for platform in platforms.values():
|
2022-06-09 10:18:41 -04:00
|
|
|
if suffix:
|
2024-11-27 16:57:32 +00:00
|
|
|
filename = os.path.join(outdir,f"{platform.normalized_name}_{suffix}.xml")
|
|
|
|
json_platform_file = os.path.join(outdir,f"{platform.normalized_name}_{suffix}")
|
2022-06-09 10:18:41 -04:00
|
|
|
else:
|
2024-11-27 16:57:32 +00:00
|
|
|
filename = os.path.join(outdir,f"{platform.normalized_name}.xml")
|
2024-04-30 10:33:32 +02:00
|
|
|
json_platform_file = os.path.join(outdir, platform.normalized_name)
|
2022-09-14 22:23:15 +02:00
|
|
|
self.xunit_report(json_file, filename, platform.name, full_report=True)
|
2024-04-30 10:33:32 +02:00
|
|
|
self.json_report(json_platform_file + ".json",
|
|
|
|
version=self.env.version, platform=platform.name,
|
|
|
|
filters=self.json_filters['twister.json'])
|
|
|
|
if self.env.options.footprint_report is not None:
|
|
|
|
self.json_report(json_platform_file + "_footprint.json",
|
|
|
|
version=self.env.version, platform=platform.name,
|
|
|
|
filters=self.json_filters['footprint.json'])
|
2025-02-07 16:05:21 +01:00
|
|
|
|
|
|
|
def get_detailed_reason(self, reason: str, log: str) -> str:
|
|
|
|
if reason == 'CMake build failure':
|
|
|
|
if error_key := self._parse_cmake_build_failure(log):
|
|
|
|
return f"{reason} - {error_key}"
|
|
|
|
elif reason == 'Build failure': # noqa SIM102
|
|
|
|
if error_key := self._parse_build_failure(log):
|
|
|
|
return f"{reason} - {error_key}"
|
|
|
|
return reason
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _parse_cmake_build_failure(log: str) -> str | None:
|
|
|
|
last_warning = 'no warning found'
|
|
|
|
lines = log.splitlines()
|
|
|
|
for i, line in enumerate(lines):
|
|
|
|
if "warning: " in line:
|
|
|
|
last_warning = line
|
|
|
|
elif "devicetree error: " in line:
|
|
|
|
return "devicetree error"
|
|
|
|
elif "fatal error: " in line:
|
|
|
|
return line[line.index('fatal error: ') :].strip()
|
|
|
|
elif "error: " in line: # error: Aborting due to Kconfig warnings
|
|
|
|
if "undefined symbol" in last_warning:
|
|
|
|
return last_warning[last_warning.index('undefined symbol') :].strip()
|
|
|
|
return last_warning
|
|
|
|
elif "CMake Error at" in line:
|
|
|
|
for next_line in lines[i + 1 :]:
|
|
|
|
if next_line.strip():
|
|
|
|
return line + ' ' + next_line
|
|
|
|
return line
|
|
|
|
return None
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _parse_build_failure(log: str) -> str | None:
|
|
|
|
last_warning = ''
|
|
|
|
lines = log.splitlines()
|
|
|
|
for i, line in enumerate(lines):
|
|
|
|
if "undefined reference" in line:
|
|
|
|
return line[line.index('undefined reference') :].strip()
|
|
|
|
elif "error: ld returned" in line:
|
|
|
|
if last_warning:
|
|
|
|
return last_warning
|
|
|
|
elif "overflowed by" in lines[i - 1]:
|
|
|
|
return "ld.bfd: region overflowed"
|
|
|
|
elif "ld.bfd: warning: " in lines[i - 1]:
|
|
|
|
return "ld.bfd:" + lines[i - 1].split("ld.bfd:", 1)[-1]
|
|
|
|
return line
|
|
|
|
elif "error: " in line:
|
|
|
|
return line[line.index('error: ') :].strip()
|
|
|
|
elif ": in function " in line:
|
|
|
|
last_warning = line[line.index('in function') :].strip()
|
2025-06-03 05:29:55 -04:00
|
|
|
elif "CMake Error at" in line:
|
|
|
|
for next_line in lines[i + 1 :]:
|
|
|
|
if next_line.strip():
|
|
|
|
return line + ' ' + next_line
|
|
|
|
return line
|
2025-02-07 16:05:21 +01:00
|
|
|
return None
|