2022-06-09 13:48:41 -04:00
|
|
|
# vim: set syntax=python ts=4 :
|
|
|
|
#
|
|
|
|
# Copyright (c) 2018-2022 Intel Corporation
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
2023-12-20 11:42:41 -05:00
|
|
|
import sys
|
2022-06-09 13:48:41 -04:00
|
|
|
import os
|
|
|
|
import logging
|
2022-11-29 11:18:06 -07:00
|
|
|
import pathlib
|
|
|
|
import shutil
|
2022-06-09 13:48:41 -04:00
|
|
|
import subprocess
|
|
|
|
import glob
|
|
|
|
import re
|
2024-05-10 20:20:24 +10:00
|
|
|
import tempfile
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger('twister')
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
2023-12-20 16:41:54 +01:00
|
|
|
supported_coverage_formats = {
|
|
|
|
"gcovr": ["html", "xml", "csv", "txt", "coveralls", "sonarqube"],
|
|
|
|
"lcov": ["html", "lcov"]
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-06-09 13:48:41 -04:00
|
|
|
class CoverageTool:
|
|
|
|
""" Base class for every supported coverage tool
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.gcov_tool = None
|
|
|
|
self.base_dir = None
|
2022-07-06 13:48:36 -04:00
|
|
|
self.output_formats = None
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
@staticmethod
|
2024-01-03 09:16:48 +11:00
|
|
|
def factory(tool, jobs=None):
|
2022-06-09 13:48:41 -04:00
|
|
|
if tool == 'lcov':
|
2024-01-03 09:16:48 +11:00
|
|
|
t = Lcov(jobs)
|
2022-06-09 13:48:41 -04:00
|
|
|
elif tool == 'gcovr':
|
|
|
|
t = Gcovr()
|
|
|
|
else:
|
|
|
|
logger.error("Unsupported coverage tool specified: {}".format(tool))
|
|
|
|
return None
|
|
|
|
|
|
|
|
logger.debug(f"Select {tool} as the coverage tool...")
|
|
|
|
return t
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def retrieve_gcov_data(input_file):
|
|
|
|
logger.debug("Working on %s" % input_file)
|
|
|
|
extracted_coverage_info = {}
|
|
|
|
capture_data = False
|
|
|
|
capture_complete = False
|
|
|
|
with open(input_file, 'r') as fp:
|
|
|
|
for line in fp.readlines():
|
|
|
|
if re.search("GCOV_COVERAGE_DUMP_START", line):
|
|
|
|
capture_data = True
|
2024-05-10 20:20:24 +10:00
|
|
|
capture_complete = False
|
2022-06-09 13:48:41 -04:00
|
|
|
continue
|
|
|
|
if re.search("GCOV_COVERAGE_DUMP_END", line):
|
|
|
|
capture_complete = True
|
2024-05-10 20:20:24 +10:00
|
|
|
# Keep searching for additional dumps
|
2022-06-09 13:48:41 -04:00
|
|
|
# Loop until the coverage data is found.
|
|
|
|
if not capture_data:
|
|
|
|
continue
|
|
|
|
if line.startswith("*"):
|
|
|
|
sp = line.split("<")
|
|
|
|
if len(sp) > 1:
|
|
|
|
# Remove the leading delimiter "*"
|
|
|
|
file_name = sp[0][1:]
|
|
|
|
# Remove the trailing new line char
|
|
|
|
hex_dump = sp[1][:-1]
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
continue
|
2024-05-10 20:20:24 +10:00
|
|
|
if file_name in extracted_coverage_info:
|
|
|
|
extracted_coverage_info[file_name].append(hex_dump)
|
|
|
|
else:
|
|
|
|
extracted_coverage_info[file_name] = [hex_dump]
|
2022-06-09 13:48:41 -04:00
|
|
|
if not capture_data:
|
|
|
|
capture_complete = True
|
|
|
|
return {'complete': capture_complete, 'data': extracted_coverage_info}
|
|
|
|
|
2024-05-10 20:20:24 +10:00
|
|
|
def merge_hexdumps(self, hexdumps):
|
|
|
|
# Only one hexdump
|
|
|
|
if len(hexdumps) == 1:
|
|
|
|
return hexdumps[0]
|
|
|
|
|
|
|
|
with tempfile.TemporaryDirectory() as dir:
|
|
|
|
# Write each hexdump to a dedicated temporary folder
|
|
|
|
dirs = []
|
|
|
|
for idx, dump in enumerate(hexdumps):
|
|
|
|
subdir = dir + f'/{idx}'
|
|
|
|
os.mkdir(subdir)
|
|
|
|
dirs.append(subdir)
|
|
|
|
with open(f'{subdir}/tmp.gcda', 'wb') as fp:
|
|
|
|
fp.write(bytes.fromhex(dump))
|
|
|
|
|
|
|
|
# Iteratively call gcov-tool (not gcov) to merge the files
|
|
|
|
merge_tool = self.gcov_tool + '-tool'
|
|
|
|
for d1, d2 in zip(dirs[:-1], dirs[1:]):
|
|
|
|
cmd = [merge_tool, 'merge', d1, d2, '--output', d2]
|
|
|
|
subprocess.call(cmd)
|
|
|
|
|
|
|
|
# Read back the final output file
|
|
|
|
with open(f'{dirs[-1]}/tmp.gcda', 'rb') as fp:
|
|
|
|
return fp.read(-1).hex()
|
|
|
|
|
|
|
|
def create_gcda_files(self, extracted_coverage_info):
|
2023-10-11 21:14:12 +02:00
|
|
|
gcda_created = True
|
2022-06-09 13:48:41 -04:00
|
|
|
logger.debug("Generating gcda files")
|
2024-05-10 20:20:24 +10:00
|
|
|
for filename, hexdumps in extracted_coverage_info.items():
|
2022-06-09 13:48:41 -04:00
|
|
|
# if kobject_hash is given for coverage gcovr fails
|
|
|
|
# hence skipping it problem only in gcovr v4.1
|
|
|
|
if "kobject_hash" in filename:
|
|
|
|
filename = (filename[:-4]) + "gcno"
|
|
|
|
try:
|
|
|
|
os.remove(filename)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
continue
|
|
|
|
|
2023-10-11 21:14:12 +02:00
|
|
|
try:
|
2024-05-10 20:20:24 +10:00
|
|
|
hexdump_val = self.merge_hexdumps(hexdumps)
|
2023-10-11 21:14:12 +02:00
|
|
|
with open(filename, 'wb') as fp:
|
|
|
|
fp.write(bytes.fromhex(hexdump_val))
|
|
|
|
except ValueError:
|
|
|
|
logger.exception("Unable to convert hex data for file: {}".format(filename))
|
|
|
|
gcda_created = False
|
|
|
|
except FileNotFoundError:
|
|
|
|
logger.exception("Unable to create gcda file: {}".format(filename))
|
|
|
|
gcda_created = False
|
|
|
|
return gcda_created
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
def generate(self, outdir):
|
2023-10-11 21:14:12 +02:00
|
|
|
coverage_completed = True
|
2022-06-09 13:48:41 -04:00
|
|
|
for filename in glob.glob("%s/**/handler.log" % outdir, recursive=True):
|
|
|
|
gcov_data = self.__class__.retrieve_gcov_data(filename)
|
|
|
|
capture_complete = gcov_data['complete']
|
|
|
|
extracted_coverage_info = gcov_data['data']
|
|
|
|
if capture_complete:
|
2024-05-10 20:20:24 +10:00
|
|
|
gcda_created = self.create_gcda_files(extracted_coverage_info)
|
2023-10-11 21:14:12 +02:00
|
|
|
if gcda_created:
|
|
|
|
logger.debug("Gcov data captured: {}".format(filename))
|
|
|
|
else:
|
|
|
|
logger.error("Gcov data invalid for: {}".format(filename))
|
|
|
|
coverage_completed = False
|
2022-06-09 13:48:41 -04:00
|
|
|
else:
|
|
|
|
logger.error("Gcov data capture incomplete: {}".format(filename))
|
2023-10-11 21:14:12 +02:00
|
|
|
coverage_completed = False
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
with open(os.path.join(outdir, "coverage.log"), "a") as coveragelog:
|
|
|
|
ret = self._generate(outdir, coveragelog)
|
|
|
|
if ret == 0:
|
2022-07-18 10:39:34 -04:00
|
|
|
report_log = {
|
|
|
|
"html": "HTML report generated: {}".format(os.path.join(outdir, "coverage", "index.html")),
|
2023-06-09 12:12:27 -06:00
|
|
|
"lcov": "LCOV report generated: {}".format(os.path.join(outdir, "coverage.info")),
|
2022-07-18 10:39:34 -04:00
|
|
|
"xml": "XML report generated: {}".format(os.path.join(outdir, "coverage", "coverage.xml")),
|
|
|
|
"csv": "CSV report generated: {}".format(os.path.join(outdir, "coverage", "coverage.csv")),
|
|
|
|
"txt": "TXT report generated: {}".format(os.path.join(outdir, "coverage", "coverage.txt")),
|
|
|
|
"coveralls": "Coveralls report generated: {}".format(os.path.join(outdir, "coverage", "coverage.coveralls.json")),
|
|
|
|
"sonarqube": "Sonarqube report generated: {}".format(os.path.join(outdir, "coverage", "coverage.sonarqube.xml"))
|
|
|
|
}
|
|
|
|
for r in self.output_formats.split(','):
|
|
|
|
logger.info(report_log[r])
|
2023-10-11 21:14:12 +02:00
|
|
|
else:
|
|
|
|
coverage_completed = False
|
|
|
|
logger.debug("All coverage data processed: {}".format(coverage_completed))
|
|
|
|
return coverage_completed
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
|
|
|
|
class Lcov(CoverageTool):
|
|
|
|
|
2024-01-03 09:16:48 +11:00
|
|
|
def __init__(self, jobs=None):
|
2022-06-09 13:48:41 -04:00
|
|
|
super().__init__()
|
|
|
|
self.ignores = []
|
2024-05-08 15:32:48 +10:00
|
|
|
self.ignore_branch_patterns = []
|
2023-06-09 12:12:27 -06:00
|
|
|
self.output_formats = "lcov,html"
|
2023-12-11 15:29:45 -05:00
|
|
|
self.version = self.get_version()
|
2024-01-03 09:16:48 +11:00
|
|
|
self.jobs = jobs
|
2023-12-11 15:29:45 -05:00
|
|
|
|
|
|
|
def get_version(self):
|
|
|
|
try:
|
|
|
|
result = subprocess.run(['lcov', '--version'],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
text=True, check=True)
|
|
|
|
version_output = result.stdout.strip().replace('lcov: LCOV version ', '')
|
|
|
|
return version_output
|
|
|
|
except subprocess.CalledProcessError as e:
|
2023-12-20 11:42:41 -05:00
|
|
|
logger.error(f"Unable to determine lcov version: {e}")
|
|
|
|
sys.exit(1)
|
|
|
|
except FileNotFoundError as e:
|
2024-06-21 18:37:21 +10:00
|
|
|
logger.error(f"Unable to find lcov tool: {e}")
|
2023-12-20 11:42:41 -05:00
|
|
|
sys.exit(1)
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
def add_ignore_file(self, pattern):
|
|
|
|
self.ignores.append('*' + pattern + '*')
|
|
|
|
|
|
|
|
def add_ignore_directory(self, pattern):
|
|
|
|
self.ignores.append('*/' + pattern + '/*')
|
|
|
|
|
2024-05-08 15:32:48 +10:00
|
|
|
def add_ignore_branch_pattern(self, pattern):
|
|
|
|
self.ignore_branch_patterns.append(pattern)
|
|
|
|
|
2024-01-04 11:00:47 +11:00
|
|
|
@property
|
|
|
|
def is_lcov_v2(self):
|
|
|
|
return self.version.startswith("2")
|
|
|
|
|
|
|
|
def run_command(self, cmd, coveragelog):
|
|
|
|
if self.is_lcov_v2:
|
|
|
|
# The --ignore-errors source option is added for genhtml as well as
|
|
|
|
# lcov to avoid it exiting due to
|
|
|
|
# samples/application_development/external_lib/
|
|
|
|
cmd += [
|
|
|
|
"--ignore-errors", "inconsistent,inconsistent",
|
|
|
|
"--ignore-errors", "negative,negative",
|
|
|
|
"--ignore-errors", "unused,unused",
|
|
|
|
"--ignore-errors", "empty,empty",
|
|
|
|
"--ignore-errors", "mismatch,mismatch",
|
|
|
|
]
|
|
|
|
|
2023-12-11 15:29:45 -05:00
|
|
|
cmd_str = " ".join(cmd)
|
|
|
|
logger.debug(f"Running {cmd_str}...")
|
|
|
|
return subprocess.call(cmd, stdout=coveragelog)
|
|
|
|
|
2024-01-04 11:00:47 +11:00
|
|
|
def run_lcov(self, args, coveragelog):
|
|
|
|
if self.is_lcov_v2:
|
2023-12-11 15:29:45 -05:00
|
|
|
branch_coverage = "branch_coverage=1"
|
2024-01-03 09:16:48 +11:00
|
|
|
if self.jobs is None:
|
|
|
|
# Default: --parallel=0 will autodetect appropriate parallelism
|
|
|
|
parallel = ["--parallel", "0"]
|
|
|
|
elif self.jobs == 1:
|
|
|
|
# Serial execution requested, don't parallelize at all
|
|
|
|
parallel = []
|
|
|
|
else:
|
|
|
|
parallel = ["--parallel", str(self.jobs)]
|
2023-12-11 15:29:45 -05:00
|
|
|
else:
|
|
|
|
branch_coverage = "lcov_branch_coverage=1"
|
2024-01-03 09:16:48 +11:00
|
|
|
parallel = []
|
2023-12-11 15:29:45 -05:00
|
|
|
|
2024-01-04 11:00:47 +11:00
|
|
|
cmd = [
|
|
|
|
"lcov", "--gcov-tool", self.gcov_tool,
|
|
|
|
"--rc", branch_coverage,
|
2024-01-03 09:16:48 +11:00
|
|
|
] + parallel + args
|
2024-01-04 11:00:47 +11:00
|
|
|
return self.run_command(cmd, coveragelog)
|
|
|
|
|
|
|
|
def _generate(self, outdir, coveragelog):
|
|
|
|
coveragefile = os.path.join(outdir, "coverage.info")
|
|
|
|
ztestfile = os.path.join(outdir, "ztest.info")
|
|
|
|
|
|
|
|
cmd = ["--capture", "--directory", outdir, "--output-file", coveragefile]
|
|
|
|
self.run_lcov(cmd, coveragelog)
|
|
|
|
|
|
|
|
# We want to remove tests/* and tests/ztest/test/* but save tests/ztest
|
|
|
|
cmd = ["--extract", coveragefile,
|
|
|
|
os.path.join(self.base_dir, "tests", "ztest", "*"),
|
|
|
|
"--output-file", ztestfile]
|
|
|
|
self.run_lcov(cmd, coveragelog)
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
|
2024-01-04 11:00:47 +11:00
|
|
|
cmd = ["--remove", ztestfile,
|
|
|
|
os.path.join(self.base_dir, "tests/ztest/test/*"),
|
|
|
|
"--output-file", ztestfile]
|
|
|
|
self.run_lcov(cmd, coveragelog)
|
2023-12-11 15:29:45 -05:00
|
|
|
|
2022-06-09 13:48:41 -04:00
|
|
|
files = [coveragefile, ztestfile]
|
|
|
|
else:
|
|
|
|
files = [coveragefile]
|
|
|
|
|
|
|
|
for i in self.ignores:
|
2024-01-04 11:00:47 +11:00
|
|
|
cmd = ["--remove", coveragefile, i, "--output-file", coveragefile]
|
|
|
|
self.run_lcov(cmd, coveragelog)
|
2022-06-09 13:48:41 -04:00
|
|
|
|
2023-06-09 12:12:27 -06:00
|
|
|
if 'html' not in self.output_formats.split(','):
|
|
|
|
return 0
|
|
|
|
|
2023-12-11 15:29:45 -05:00
|
|
|
cmd = ["genhtml", "--legend", "--branch-coverage",
|
2024-01-04 11:00:47 +11:00
|
|
|
"--prefix", self.base_dir,
|
|
|
|
"-output-directory", os.path.join(outdir, "coverage")] + files
|
2023-12-11 15:29:45 -05:00
|
|
|
return self.run_command(cmd, coveragelog)
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
|
|
|
|
class Gcovr(CoverageTool):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
|
|
|
self.ignores = []
|
2024-05-08 15:32:48 +10:00
|
|
|
self.ignore_branch_patterns = []
|
2023-06-09 12:12:27 -06:00
|
|
|
self.output_formats = "html"
|
2023-12-20 11:42:41 -05:00
|
|
|
self.version = self.get_version()
|
|
|
|
|
|
|
|
def get_version(self):
|
|
|
|
try:
|
|
|
|
result = subprocess.run(['gcovr', '--version'],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
text=True, check=True)
|
|
|
|
version_lines = result.stdout.strip().split('\n')
|
|
|
|
if version_lines:
|
|
|
|
version_output = version_lines[0].replace('gcovr ', '')
|
|
|
|
return version_output
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
logger.error(f"Unable to determine gcovr version: {e}")
|
|
|
|
sys.exit(1)
|
|
|
|
except FileNotFoundError as e:
|
2024-06-21 18:37:21 +10:00
|
|
|
logger.error(f"Unable to find gcovr tool: {e}")
|
2023-12-20 11:42:41 -05:00
|
|
|
sys.exit(1)
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
def add_ignore_file(self, pattern):
|
|
|
|
self.ignores.append('.*' + pattern + '.*')
|
|
|
|
|
|
|
|
def add_ignore_directory(self, pattern):
|
|
|
|
self.ignores.append(".*/" + pattern + '/.*')
|
|
|
|
|
2024-05-08 15:32:48 +10:00
|
|
|
def add_ignore_branch_pattern(self, pattern):
|
|
|
|
self.ignore_branch_patterns.append(pattern)
|
|
|
|
|
2022-06-09 13:48:41 -04:00
|
|
|
@staticmethod
|
|
|
|
def _interleave_list(prefix, list):
|
|
|
|
tuple_list = [(prefix, item) for item in list]
|
|
|
|
return [item for sublist in tuple_list for item in sublist]
|
|
|
|
|
2022-07-06 13:48:36 -04:00
|
|
|
@staticmethod
|
|
|
|
def _flatten_list(list):
|
|
|
|
return [a for b in list for a in b]
|
|
|
|
|
2022-06-09 13:48:41 -04:00
|
|
|
def _generate(self, outdir, coveragelog):
|
|
|
|
coveragefile = os.path.join(outdir, "coverage.json")
|
|
|
|
ztestfile = os.path.join(outdir, "ztest.json")
|
|
|
|
|
|
|
|
excludes = Gcovr._interleave_list("-e", self.ignores)
|
2024-05-08 15:32:48 +10:00
|
|
|
excludes += Gcovr._interleave_list("--exclude-branches-by-pattern", self.ignore_branch_patterns)
|
2022-06-09 13:48:41 -04:00
|
|
|
|
2023-12-18 13:35:40 +01:00
|
|
|
# Different ifdef-ed implementations of the same function should not be
|
|
|
|
# in conflict treated by GCOVR as separate objects for coverage statistics.
|
|
|
|
mode_options = ["--merge-mode-functions=separate"]
|
|
|
|
|
2022-06-09 13:48:41 -04:00
|
|
|
# We want to remove tests/* and tests/ztest/test/* but save tests/ztest
|
2023-10-26 07:42:28 +02:00
|
|
|
cmd = ["gcovr", "-r", self.base_dir,
|
|
|
|
"--gcov-ignore-parse-errors=negative_hits.warn_once_per_file",
|
2023-12-22 19:44:57 -05:00
|
|
|
"--gcov-executable", self.gcov_tool,
|
2023-10-26 07:42:28 +02:00
|
|
|
"-e", "tests/*"]
|
2023-12-18 13:35:40 +01:00
|
|
|
cmd += excludes + mode_options + ["--json", "-o", coveragefile, outdir]
|
2022-06-09 13:48:41 -04:00
|
|
|
cmd_str = " ".join(cmd)
|
|
|
|
logger.debug(f"Running {cmd_str}...")
|
|
|
|
subprocess.call(cmd, stdout=coveragelog)
|
|
|
|
|
|
|
|
subprocess.call(["gcovr", "-r", self.base_dir, "--gcov-executable",
|
|
|
|
self.gcov_tool, "-f", "tests/ztest", "-e",
|
|
|
|
"tests/ztest/test/*", "--json", "-o", ztestfile,
|
2023-12-18 13:35:40 +01:00
|
|
|
outdir] + mode_options, stdout=coveragelog)
|
2022-06-09 13:48:41 -04:00
|
|
|
|
|
|
|
if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
|
|
|
|
files = [coveragefile, ztestfile]
|
|
|
|
else:
|
|
|
|
files = [coveragefile]
|
|
|
|
|
|
|
|
subdir = os.path.join(outdir, "coverage")
|
|
|
|
os.makedirs(subdir, exist_ok=True)
|
|
|
|
|
|
|
|
tracefiles = self._interleave_list("--add-tracefile", files)
|
|
|
|
|
2022-07-06 13:48:36 -04:00
|
|
|
# Convert command line argument (comma-separated list) to gcovr flags
|
|
|
|
report_options = {
|
|
|
|
"html": ["--html", os.path.join(subdir, "index.html"), "--html-details"],
|
|
|
|
"xml": ["--xml", os.path.join(subdir, "coverage.xml"), "--xml-pretty"],
|
|
|
|
"csv": ["--csv", os.path.join(subdir, "coverage.csv")],
|
|
|
|
"txt": ["--txt", os.path.join(subdir, "coverage.txt")],
|
|
|
|
"coveralls": ["--coveralls", os.path.join(subdir, "coverage.coveralls.json"), "--coveralls-pretty"],
|
|
|
|
"sonarqube": ["--sonarqube", os.path.join(subdir, "coverage.sonarqube.xml")]
|
|
|
|
}
|
|
|
|
gcovr_options = self._flatten_list([report_options[r] for r in self.output_formats.split(',')])
|
|
|
|
|
2023-12-18 13:35:40 +01:00
|
|
|
return subprocess.call(["gcovr", "-r", self.base_dir] + mode_options + gcovr_options + tracefiles,
|
2022-06-09 13:48:41 -04:00
|
|
|
stdout=coveragelog)
|
2022-06-13 07:43:36 -04:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_coverage(testplan, options):
|
2023-02-20 12:34:03 +01:00
|
|
|
use_system_gcov = False
|
2023-12-22 19:44:57 -05:00
|
|
|
gcov_tool = None
|
2023-02-20 12:34:03 +01:00
|
|
|
|
2023-02-07 13:56:59 +00:00
|
|
|
for plat in options.coverage_platform:
|
|
|
|
_plat = testplan.get_platform(plat)
|
|
|
|
if _plat and (_plat.type in {"native", "unit"}):
|
|
|
|
use_system_gcov = True
|
2022-06-13 07:43:36 -04:00
|
|
|
if not options.gcov_tool:
|
2022-11-29 11:18:06 -07:00
|
|
|
zephyr_sdk_gcov_tool = os.path.join(
|
|
|
|
os.environ.get("ZEPHYR_SDK_INSTALL_DIR", default=""),
|
|
|
|
"x86_64-zephyr-elf/bin/x86_64-zephyr-elf-gcov")
|
|
|
|
if os.environ.get("ZEPHYR_TOOLCHAIN_VARIANT") == "llvm":
|
|
|
|
llvm_path = os.environ.get("LLVM_TOOLCHAIN_PATH")
|
|
|
|
if llvm_path is not None:
|
|
|
|
llvm_path = os.path.join(llvm_path, "bin")
|
|
|
|
llvm_cov = shutil.which("llvm-cov", path=llvm_path)
|
|
|
|
llvm_cov_ext = pathlib.Path(llvm_cov).suffix
|
|
|
|
gcov_lnk = os.path.join(options.outdir, f"gcov{llvm_cov_ext}")
|
|
|
|
try:
|
|
|
|
os.symlink(llvm_cov, gcov_lnk)
|
|
|
|
except OSError:
|
|
|
|
shutil.copy(llvm_cov, gcov_lnk)
|
2023-12-22 19:44:57 -05:00
|
|
|
gcov_tool = gcov_lnk
|
2023-02-07 13:56:59 +00:00
|
|
|
elif use_system_gcov:
|
2023-12-22 19:44:57 -05:00
|
|
|
gcov_tool = "gcov"
|
2022-11-29 11:18:06 -07:00
|
|
|
elif os.path.exists(zephyr_sdk_gcov_tool):
|
2023-12-22 19:44:57 -05:00
|
|
|
gcov_tool = zephyr_sdk_gcov_tool
|
2023-12-20 11:42:41 -05:00
|
|
|
else:
|
|
|
|
logger.error(f"Can't find a suitable gcov tool. Use --gcov-tool or set ZEPHYR_SDK_INSTALL_DIR.")
|
|
|
|
sys.exit(1)
|
2023-12-22 19:44:57 -05:00
|
|
|
else:
|
|
|
|
gcov_tool = str(options.gcov_tool)
|
2022-06-13 07:43:36 -04:00
|
|
|
|
|
|
|
logger.info("Generating coverage files...")
|
2023-12-22 19:44:57 -05:00
|
|
|
logger.info(f"Using gcov tool: {gcov_tool}")
|
2024-01-03 09:16:48 +11:00
|
|
|
coverage_tool = CoverageTool.factory(options.coverage_tool, jobs=options.jobs)
|
2023-12-22 19:44:57 -05:00
|
|
|
coverage_tool.gcov_tool = gcov_tool
|
2022-06-13 07:43:36 -04:00
|
|
|
coverage_tool.base_dir = os.path.abspath(options.coverage_basedir)
|
2022-07-18 10:39:34 -04:00
|
|
|
# Apply output format default
|
2023-06-09 12:12:27 -06:00
|
|
|
if options.coverage_formats is not None:
|
|
|
|
coverage_tool.output_formats = options.coverage_formats
|
2022-06-13 07:43:36 -04:00
|
|
|
coverage_tool.add_ignore_file('generated')
|
|
|
|
coverage_tool.add_ignore_directory('tests')
|
|
|
|
coverage_tool.add_ignore_directory('samples')
|
2024-05-08 15:32:48 +10:00
|
|
|
# Ignore branch coverage on LOG_* and LOG_HEXDUMP_* macros
|
|
|
|
# Branch misses are due to the implementation of Z_LOG2 and cannot be avoided
|
|
|
|
coverage_tool.add_ignore_branch_pattern(r"^\s*LOG_(?:HEXDUMP_)?(?:DBG|INF|WRN|ERR)\(.*")
|
2023-10-11 21:14:12 +02:00
|
|
|
coverage_completed = coverage_tool.generate(options.outdir)
|
|
|
|
return coverage_completed
|