twister: rework board handling

We now use hwmv2 to list boards instead of relying on twister specific
config files.
One yaml files (twister.yaml for now) will have all the data needed for
all possible targets and variations of a board reusing most of the data
where possible and variations can override the top level data.

Twister keeps track of 'aliases' of boards and identifies that for
example native_sim is the same as native_sim/native, so either names
will be possible in both test yaml files or on the command line,
however, the reporting will always use the full name, so no there is no
confusion about what is being tested/built.

Signed-off-by: Anas Nashif <anas.nashif@intel.com>
This commit is contained in:
Anas Nashif 2024-08-09 17:17:40 -04:00
commit dfc7860ab1
8 changed files with 326 additions and 237 deletions

View file

@ -1,7 +1,7 @@
# Copyright (c) 2022 Arm Limited (or its affiliates). All rights reserved. # Copyright (c) 2022 Arm Limited (or its affiliates). All rights reserved.
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
identifier: fvp_base_revc_2xaemv8a//smp/ns identifier: fvp_base_revc_2xaemv8a/fvp_base_revc_2xaemv8a/smp/ns
name: FVP Emulation FVP_Base_RevC-2xAEMvA (SMP) name: FVP Emulation FVP_Base_RevC-2xAEMvA (SMP)
arch: arm64 arch: arm64
type: sim type: sim

View file

@ -93,12 +93,14 @@ class TwisterConfigParser:
self.common = {} self.common = {}
def load(self): def load(self):
self.data = scl.yaml_load_verify(self.filename, self.schema) data = scl.yaml_load_verify(self.filename, self.schema)
self.data = data
if 'tests' in self.data: if 'tests' in self.data:
self.scenarios = self.data['tests'] self.scenarios = self.data['tests']
if 'common' in self.data: if 'common' in self.data:
self.common = self.data['common'] self.common = self.data['common']
return data
def _cast_value(self, value, typestr): def _cast_value(self, value, typestr):
if isinstance(value, str): if isinstance(value, str):

View file

@ -6,8 +6,11 @@
import os import os
import scl import scl
from twisterlib.config_parser import TwisterConfigParser
from twisterlib.environment import ZEPHYR_BASE from twisterlib.environment import ZEPHYR_BASE
import logging
logger = logging.getLogger('twister')
logger.setLevel(logging.DEBUG)
class Platform: class Platform:
"""Class representing metadata for a particular platform """Class representing metadata for a particular platform
@ -23,6 +26,7 @@ class Platform:
""" """
self.name = "" self.name = ""
self.aliases = []
self.normalized_name = "" self.normalized_name = ""
# if sysbuild to be used by default on a given platform # if sysbuild to be used by default on a given platform
self.sysbuild = False self.sysbuild = False
@ -38,7 +42,7 @@ class Platform:
self.flash = 512 self.flash = 512
self.supported = set() self.supported = set()
self.arch = "" self.arch = None
self.vendor = "" self.vendor = ""
self.tier = -1 self.tier = -1
self.type = "na" self.type = "na"
@ -50,41 +54,58 @@ class Platform:
self.filter_data = dict() self.filter_data = dict()
self.uart = "" self.uart = ""
self.resc = "" self.resc = ""
self.qualifier = None
def load(self, platform_file): def load(self, board, target, aliases, data):
scp = TwisterConfigParser(platform_file, self.platform_schema) """Load the platform data from the board data and target data
scp.load() board: the board object as per the zephyr build system
data = scp.data target: the target name of the board as per the zephyr build system
aliases: list of aliases for the target
data: the data from the twister.yaml file for the target
"""
self.name = target
self.aliases = aliases
# Get data for various targets and use the main board data as a
# defauly. Individual variant information will replace the default data
# provded in the main twister configuration for this board.
variants = data.get("variants", {})
variant_data = {}
for alias in aliases:
variant_data = variants.get(alias, {})
if variant_data:
break
self.name = data['identifier']
self.normalized_name = self.name.replace("/", "_") self.normalized_name = self.name.replace("/", "_")
self.sysbuild = data.get("sysbuild", False) self.sysbuild = variant_data.get("sysbuild", data.get("sysbuild", self.sysbuild))
self.twister = data.get("twister", True) self.twister = variant_data.get("twister", data.get("twister", self.twister))
# if no RAM size is specified by the board, take a default of 128K # if no RAM size is specified by the board, take a default of 128K
self.ram = data.get("ram", 128) self.ram = variant_data.get("ram", data.get("ram", self.ram))
testing = data.get("testing", {}) # if no flash size is specified by the board, take a default of 512K
self.timeout_multiplier = testing.get("timeout_multiplier", 1.0) self.flash = variant_data.get("flash", data.get("flash", self.flash))
self.ignore_tags = testing.get("ignore_tags", [])
self.only_tags = testing.get("only_tags", []) testing = variant_data.get("testing", data.get("testing", {}))
self.default = testing.get("default", False) self.timeout_multiplier = testing.get("timeout_multiplier", self.timeout_multiplier)
self.ignore_tags = testing.get("ignore_tags", self.ignore_tags)
self.only_tags = testing.get("only_tags", self.only_tags)
self.default = testing.get("default", self.default)
self.binaries = testing.get("binaries", []) self.binaries = testing.get("binaries", [])
renode = testing.get("renode", {}) renode = testing.get("renode", {})
self.uart = renode.get("uart", "") self.uart = renode.get("uart", "")
self.resc = renode.get("resc", "") self.resc = renode.get("resc", "")
# if no flash size is specified by the board, take a default of 512K
self.flash = data.get("flash", 512)
self.supported = set() self.supported = set()
for supp_feature in data.get("supported", []): for supp_feature in variant_data.get("supported", data.get("supported", [])):
for item in supp_feature.split(":"): for item in supp_feature.split(":"):
self.supported.add(item) self.supported.add(item)
self.arch = data['arch'] self.arch = variant_data.get('arch', data.get('arch', self.arch))
self.vendor = data.get('vendor', '') self.vendor = board.vendor
self.tier = data.get("tier", -1) self.tier = variant_data.get("tier", data.get("tier", self.tier))
self.type = data.get('type', "na") self.type = variant_data.get('type', data.get('type', self.type))
self.simulation = data.get('simulation', "na") self.simulation = variant_data.get('simulation', data.get('simulation', self.simulation))
self.simulation_exec = data.get('simulation_exec') self.simulation_exec = variant_data.get('simulation_exec', data.get('simulation_exec', self.simulation_exec))
self.supported_toolchains = data.get("toolchain", []) self.supported_toolchains = variant_data.get("toolchain", data.get("toolchain", []))
if self.supported_toolchains is None: if self.supported_toolchains is None:
self.supported_toolchains = [] self.supported_toolchains = []
@ -111,7 +132,7 @@ class Platform:
if toolchain not in self.supported_toolchains: if toolchain not in self.supported_toolchains:
self.supported_toolchains.append(toolchain) self.supported_toolchains.append(toolchain)
self.env = data.get("env", []) self.env = variant_data.get("env", data.get("env", []))
self.env_satisfied = True self.env_satisfied = True
for env in self.env: for env in self.env:
if not os.environ.get(env, None): if not os.environ.get(env, None):

View file

@ -241,7 +241,14 @@ class TestInstance:
self.handler = handler self.handler = handler
# Global testsuite parameters # Global testsuite parameters
def check_runnable(self, enable_slow=False, filter='buildable', fixtures=[], hardware_map=None): def check_runnable(self,
options,
hardware_map=None):
enable_slow = options.enable_slow
filter = options.filter
fixtures = options.fixture
device_testing = options.device_testing
if os.name == 'nt': if os.name == 'nt':
# running on simulators is currently supported only for QEMU on Windows # running on simulators is currently supported only for QEMU on Windows
@ -264,8 +271,7 @@ class TestInstance:
target_ready = bool(self.testsuite.type == "unit" or \ target_ready = bool(self.testsuite.type == "unit" or \
self.platform.type == "native" or \ self.platform.type == "native" or \
(self.platform.simulation in SUPPORTED_SIMS and \ (self.platform.simulation in SUPPORTED_SIMS and \
self.platform.simulation not in self.testsuite.simulation_exclude) or \ self.platform.simulation not in self.testsuite.simulation_exclude) or device_testing)
filter == 'runnable')
# check if test is runnable in pytest # check if test is runnable in pytest
if self.testsuite.harness == 'pytest': if self.testsuite.harness == 'pytest':
@ -317,9 +323,10 @@ class TestInstance:
content = "\n".join(new_config_list) content = "\n".join(new_config_list)
if enable_coverage: if enable_coverage:
if platform.name in coverage_platform: for cp in coverage_platform:
content = content + "\nCONFIG_COVERAGE=y" if cp in platform.aliases:
content = content + "\nCONFIG_COVERAGE_DUMP=y" content = content + "\nCONFIG_COVERAGE=y"
content = content + "\nCONFIG_COVERAGE_DUMP=y"
if enable_asan: if enable_asan:
if platform.type == "native": if platform.type == "native":

View file

@ -182,6 +182,7 @@ class TestPlan:
if self.options.test: if self.options.test:
self.run_individual_testsuite = self.options.test self.run_individual_testsuite = self.options.test
self.add_configurations()
num = self.add_testsuites(testsuite_filter=self.run_individual_testsuite) num = self.add_testsuites(testsuite_filter=self.run_individual_testsuite)
if num == 0: if num == 0:
raise TwisterRuntimeError("No test cases found at the specified location...") raise TwisterRuntimeError("No test cases found at the specified location...")
@ -192,9 +193,7 @@ class TestPlan:
self.scenarios.append(ts.id) self.scenarios.append(ts.id)
self.report_duplicates() self.report_duplicates()
self.parse_configuration(config_file=self.env.test_config) self.parse_configuration(config_file=self.env.test_config)
self.add_configurations()
if self.load_errors: if self.load_errors:
raise TwisterRuntimeError("Errors while loading configurations") raise TwisterRuntimeError("Errors while loading configurations")
@ -398,8 +397,13 @@ class TestPlan:
sys.stdout.write(what + "\n") sys.stdout.write(what + "\n")
sys.stdout.flush() sys.stdout.flush()
def find_twister_data(self, board_data_list, board_aliases):
"""Find the twister data for a board in the list of board data based on the aliases"""
for board_data in board_data_list:
if board_data.get('identifier') in board_aliases:
return board_data
def add_configurations(self): def add_configurations(self):
board_dirs = set()
# Create a list of board roots as defined by the build system in general # Create a list of board roots as defined by the build system in general
# Note, internally in twister a board root includes the `boards` folder # Note, internally in twister a board root includes the `boards` folder
# but in Zephyr build system, the board root is without the `boards` in folder path. # but in Zephyr build system, the board root is without the `boards` in folder path.
@ -407,82 +411,91 @@ class TestPlan:
lb_args = Namespace(arch_roots=self.env.arch_roots, soc_roots=self.env.soc_roots, lb_args = Namespace(arch_roots=self.env.arch_roots, soc_roots=self.env.soc_roots,
board_roots=board_roots, board=None, board_dir=None) board_roots=board_roots, board=None, board_dir=None)
v1_boards = list_boards.find_boards(lb_args) known_boards = list_boards.find_v2_boards(lb_args)
v2_dirs = list_boards.find_v2_board_dirs(lb_args) bdirs = {}
for b in v1_boards:
board_dirs.add(b.dir)
board_dirs.update(v2_dirs)
logger.debug("Reading platform configuration files under %s..." % self.env.board_roots)
platform_config = self.test_config.get('platforms', {}) platform_config = self.test_config.get('platforms', {})
for folder in board_dirs:
for file in glob.glob(os.path.join(folder, "*.yaml")):
# If the user set a platform filter, we can, if no other option would increase
# the allowed platform pool, save on time by not loading YAMLs of any boards
# that do not start with the required names.
if self.options.platform and \
not self.options.all and \
not self.options.integration and \
not any([
os.path.basename(file).startswith(
re.split('[/@]', p)[0]
) for p in self.options.platform
]):
continue
try:
platform = Platform()
platform.load(file)
if platform.name in [p.name for p in self.platforms]:
logger.error(f"Duplicate platform {platform.name} in {file}")
raise Exception(f"Duplicate platform identifier {platform.name} found")
if not platform.twister: # helper function to initialize and add platforms
def init_and_add_platforms(data, board, target, qualifier, aliases):
platform = Platform()
if not new_config_found:
data = self.find_twister_data(bdirs[board.dir], aliases)
if not data:
return
platform.load(board, target, aliases, data)
platform.qualifier = qualifier
if platform.name in [p.name for p in self.platforms]:
logger.error(f"Duplicate platform {platform.name} in {board.dir}")
raise Exception(f"Duplicate platform identifier {platform.name} found")
if not platform.twister:
return
logger.debug(f"Adding platform {platform.name} with aliases {platform.aliases}")
self.platforms.append(platform)
for board in known_boards:
new_config_found = False
# don't load the same board data twice
if not bdirs.get(board.dir):
datas = []
for file in glob.glob(os.path.join(board.dir, "*.yaml")):
if os.path.basename(file) == "twister.yaml":
continue continue
try:
self.platforms.append(platform) scp = TwisterConfigParser(file, Platform.platform_schema)
if not platform_config.get('override_default_platforms', False): sdata = scp.load()
if platform.default: datas.append(sdata)
self.default_platforms.append(platform.name) except Exception as e:
else: logger.error(f"Error loading {file}: {e!r}")
if platform.name in platform_config.get('default_platforms', []): self.load_errors += 1
logger.debug(f"adding {platform.name} to default platforms") continue
self.default_platforms.append(platform.name) bdirs[board.dir] = datas
data = {}
# support board@revision if os.path.exists(board.dir / 'twister.yaml'):
# if there is already an existed <board>_<revision>.yaml, then use it to try:
# load platform directly, otherwise, iterate the directory to scp = TwisterConfigParser(board.dir / 'twister.yaml', Platform.platform_schema)
# get all valid board revision based on each <board>_<revision>.conf. data = scp.load()
if '@' not in platform.name: except Exception as e:
tmp_dir = os.listdir(os.path.dirname(file)) logger.error(f"Error loading {board.dir / 'twister.yaml'}: {e!r}")
for item in tmp_dir:
# Need to make sure the revision matches
# the permitted patterns as described in
# cmake/modules/extensions.cmake.
revision_patterns = ["[A-Z]",
"[0-9]+",
"(0|[1-9][0-9]*)(_[0-9]+){0,2}"]
for pattern in revision_patterns:
result = re.match(f"{platform.name}_(?P<revision>{pattern})\\.conf", item)
if result:
revision = result.group("revision")
yaml_file = f"{platform.name}_{revision}.yaml"
if yaml_file not in tmp_dir:
platform_revision = copy.deepcopy(platform)
revision = revision.replace("_", ".")
platform_revision.name = f"{platform.name}@{revision}"
platform_revision.normalized_name = platform_revision.name.replace("/", "_")
platform_revision.default = False
self.platforms.append(platform_revision)
break
except RuntimeError as e:
logger.error("E: %s: can't load: %s" % (file, e))
self.load_errors += 1 self.load_errors += 1
continue
new_config_found = True
self.platform_names = [p.name for p in self.platforms]
for qual in list_boards.board_v2_qualifiers(board):
if board.revisions:
for rev in board.revisions:
target = f"{board.name}@{rev.name}/{qual}"
aliases = [target]
target_no_rev = f"{board.name}/{qual}"
if rev.name == board.revision_default:
aliases.append(target_no_rev)
if '/' not in qual and len(board.socs) == 1:
if rev.name == board.revision_default:
aliases.append(f"{board.name}")
aliases.append(f"{board.name}@{rev.name}")
init_and_add_platforms(data, board, target, qual, aliases)
else:
target = f"{board.name}/{qual}"
aliases = [target]
if '/' not in qual and len(board.socs) == 1:
aliases.append(board.name)
init_and_add_platforms(data, board, target, qual, aliases)
for platform in self.platforms:
if not platform_config.get('override_default_platforms', False):
if platform.default:
self.default_platforms.append(platform.name)
#logger.debug(f"adding {platform.name} to default platforms")
continue
for pp in platform_config.get('default_platforms', []):
if pp in platform.aliases:
logger.debug(f"adding {platform.name} to default platforms (override mode)")
self.default_platforms.append(platform.name)
self.platform_names = [a for p in self.platforms for a in p.aliases]
def get_all_tests(self): def get_all_tests(self):
testcases = [] testcases = []
@ -550,6 +563,30 @@ class TestPlan:
for name in parsed_data.scenarios.keys(): for name in parsed_data.scenarios.keys():
suite_dict = parsed_data.get_scenario(name) suite_dict = parsed_data.get_scenario(name)
suite = TestSuite(root, suite_path, name, data=suite_dict, detailed_test_id=self.options.detailed_test_id) suite = TestSuite(root, suite_path, name, data=suite_dict, detailed_test_id=self.options.detailed_test_id)
# convert to fully qualified names
_integration = []
_platform_allow = []
_platform_exclude = []
for _ip in suite.integration_platforms:
if _ip in self.platform_names:
_integration.append(self.get_platform(_ip).name)
else:
logger.error(f"Platform {_ip} not found in the list of platforms")
suite.integration_platforms = _integration
for _pe in suite.platform_exclude:
if _pe in self.platform_names:
_platform_exclude.append(self.get_platform(_pe).name)
else:
logger.error(f"Platform {_pe} not found in the list of platforms")
suite.platform_exclude = _platform_exclude
for _pa in suite.platform_allow:
if _pa in self.platform_names:
_platform_allow.append(self.get_platform(_pa).name)
else:
logger.error(f"Platform {_pa} not found in the list of platforms")
suite.platform_allow = _platform_allow
if suite.harness in ['ztest', 'test']: if suite.harness in ['ztest', 'test']:
if subcases is None: if subcases is None:
# scan it only once per testsuite # scan it only once per testsuite
@ -575,7 +612,7 @@ class TestPlan:
def get_platform(self, name): def get_platform(self, name):
selected_platform = None selected_platform = None
for platform in self.platforms: for platform in self.platforms:
if platform.name == name: if name in platform.aliases:
selected_platform = platform selected_platform = platform
break break
return selected_platform return selected_platform
@ -608,13 +645,10 @@ class TestPlan:
instance.run_id = ts.get("run_id") instance.run_id = ts.get("run_id")
if self.options.device_testing: if self.options.device_testing:
tfilter = 'runnable' self.options.filter = 'runnable'
else:
tfilter = 'buildable'
instance.run = instance.check_runnable( instance.run = instance.check_runnable(
self.options.enable_slow, self.options,
tfilter,
self.options.fixture,
self.hwm self.hwm
) )
@ -660,14 +694,24 @@ class TestPlan:
if tc.get('log'): if tc.get('log'):
case.output = tc.get('log') case.output = tc.get('log')
instance.create_overlay(platform,
instance.create_overlay(platform, self.options.enable_asan, self.options.enable_ubsan, self.options.enable_coverage, self.options.coverage_platform) self.options.enable_asan,
self.options.enable_ubsan,
self.options.enable_coverage,
self.options.coverage_platform
)
instance_list.append(instance) instance_list.append(instance)
self.add_instances(instance_list) self.add_instances(instance_list)
except FileNotFoundError as e: except FileNotFoundError as e:
logger.error(f"{e}") logger.error(f"{e}")
return 1 return 1
def check_platform(self, platform, platform_list):
for p in platform_list:
if p in platform.aliases:
return True
return False
def apply_filters(self, **kwargs): def apply_filters(self, **kwargs):
toolchain = self.env.toolchain toolchain = self.env.toolchain
@ -709,8 +753,16 @@ class TestPlan:
elif vendor_filter: elif vendor_filter:
vendor_platforms = True vendor_platforms = True
_platforms = []
if platform_filter: if platform_filter:
logger.debug(f"Checking platform filter: {platform_filter}")
# find in aliases and rename
self.verify_platforms_existence(platform_filter, f"platform_filter") self.verify_platforms_existence(platform_filter, f"platform_filter")
for pf in platform_filter:
logger.debug(f"Checking platform in filter: {pf}")
if pf in self.platform_names:
_platforms.append(self.get_platform(pf).name)
platform_filter = _platforms
platforms = list(filter(lambda p: p.name in platform_filter, self.platforms)) platforms = list(filter(lambda p: p.name in platform_filter, self.platforms))
elif emu_filter: elif emu_filter:
platforms = list(filter(lambda p: p.simulation != 'na', self.platforms)) platforms = list(filter(lambda p: p.simulation != 'na', self.platforms))
@ -776,19 +828,12 @@ class TestPlan:
instance_list = [] instance_list = []
for plat in platform_scope: for plat in platform_scope:
instance = TestInstance(ts, plat, self.env.outdir) instance = TestInstance(ts, plat, self.env.outdir)
if runnable:
tfilter = 'runnable'
else:
tfilter = 'buildable'
instance.run = instance.check_runnable( instance.run = instance.check_runnable(
self.options.enable_slow, self.options,
tfilter,
self.options.fixture,
self.hwm self.hwm
) )
if not force_platform and plat.name in exclude_platform: if not force_platform and self.check_platform(plat,exclude_platform):
instance.add_filter("Platform is excluded on command line.", Filters.CMD_LINE) instance.add_filter("Platform is excluded on command line.", Filters.CMD_LINE)
if (plat.arch == "unit") != (ts.type == "unit"): if (plat.arch == "unit") != (ts.type == "unit"):
@ -961,13 +1006,13 @@ class TestPlan:
keyed_test = keyed_tests.get(test_keys) keyed_test = keyed_tests.get(test_keys)
if keyed_test is not None: if keyed_test is not None:
plat_key = {key_field: getattr(keyed_test['plat'], key_field) for key_field in key_fields} plat_key = {key_field: getattr(keyed_test['plat'], key_field) for key_field in key_fields}
instance.add_filter(f"Already covered for key {tuple(key)} by platform {keyed_test['plat'].name} having key {plat_key}", Filters.PLATFORM_KEY) instance.add_filter(f"Already covered for key {key} by platform {keyed_test['plat'].name} having key {plat_key}", Filters.PLATFORM_KEY)
else: else:
# do not add a platform to keyed tests if previously filtered # do not add a platform to keyed tests if previously
# filtered
if not instance.filters: if not instance.filters:
keyed_tests[test_keys] = {'plat': plat, 'ts': ts} keyed_tests[test_keys] = {'plat': plat, 'ts': ts}
else:
instance.add_filter(f"Excluded platform missing key fields demanded by test {key_fields}", Filters.PLATFORM)
# if nothing stopped us until now, it means this configuration # if nothing stopped us until now, it means this configuration
# needs to be added. # needs to be added.
@ -981,11 +1026,11 @@ class TestPlan:
# take all default platforms # take all default platforms
if default_platforms and not ts.build_on_all and not integration: if default_platforms and not ts.build_on_all and not integration:
if ts.platform_allow: if ts.platform_allow:
a = set(self.default_platforms) _default_p = set(self.default_platforms)
b = set(ts.platform_allow) _platform_allow = set(ts.platform_allow)
c = a.intersection(b) _intersection = _default_p.intersection(_platform_allow)
if c: if _intersection:
aa = list(filter(lambda ts: ts.platform.name in c, instance_list)) aa = list(filter(lambda _scenario: _scenario.platform.name in _intersection, instance_list))
self.add_instances(aa) self.add_instances(aa)
else: else:
self.add_instances(instance_list) self.add_instances(instance_list)
@ -1011,7 +1056,11 @@ class TestPlan:
self.add_instances(instance_list) self.add_instances(instance_list)
for _, case in self.instances.items(): for _, case in self.instances.items():
case.create_overlay(case.platform, self.options.enable_asan, self.options.enable_ubsan, self.options.enable_coverage, self.options.coverage_platform) case.create_overlay(case.platform,
self.options.enable_asan,
self.options.enable_ubsan,
self.options.enable_coverage,
self.options.coverage_platform)
self.selected_platforms = set(p.platform.name for p in self.instances.values()) self.selected_platforms = set(p.platform.name for p in self.instances.values())
@ -1105,3 +1154,4 @@ def change_skip_to_error_if_integration(options, instance):
return return
instance.status = TwisterStatus.ERROR instance.status = TwisterStatus.ERROR
instance.reason += " but is one of the integration platforms" instance.reason += " but is one of the integration platforms"
logger.debug(f"Changing status of {instance.name} to ERROR because it is an integration platform")

View file

@ -139,7 +139,7 @@ def main(options: argparse.Namespace, default_options: argparse.Namespace):
for i in tplan.instances.values(): for i in tplan.instances.values():
if i.status == TwisterStatus.FILTER: if i.status == TwisterStatus.FILTER:
if options.platform and i.platform.name not in options.platform: if options.platform and not tplan.check_platform(i.platform, options.platform):
continue continue
logger.debug( logger.debug(
"{:<25} {:<50} {}SKIPPED{}: {}".format( "{:<25} {:<50} {}SKIPPED{}: {}".format(

View file

@ -9,103 +9,112 @@
# The original spec comes from Zephyr's twister script # The original spec comes from Zephyr's twister script
# #
type: map schema;platform-schema:
mapping: type: map
"identifier": mapping:
type: str "variants":
"maintainers": type: map
type: seq matching-rule: "any"
seq: mapping:
- type: str regex;(([a-zA-Z0-9_]+)):
"name": include: platform-schema
type: str "identifier":
"type": type: str
type: str "maintainers":
enum: ["mcu", "qemu", "sim", "unit", "native"] type: seq
"simulation": seq:
type: str - type: str
enum: "name":
[ type: str
"qemu", "type":
"simics", type: str
"xt-sim", enum: ["mcu", "qemu", "sim", "unit", "native"]
"renode", "simulation":
"nsim", type: str
"mdb-nsim", enum:
"tsim", [
"armfvp", "qemu",
"native", "simics",
"custom", "xt-sim",
] "renode",
"simulation_exec": "nsim",
type: str "mdb-nsim",
"arch": "tsim",
type: str "armfvp",
enum: "native",
[ "custom",
# architectures ]
"arc", "simulation_exec":
"arm", type: str
"arm64", "arch":
"mips", type: str
"nios2", enum:
"posix", [
"riscv", # architectures
"sparc", "arc",
"x86", "arm",
"xtensa", "arm64",
"mips",
"nios2",
"posix",
"riscv",
"sparc",
"x86",
"xtensa",
# unit testing # unit testing
"unit", "unit",
] ]
"vendor": "vendor":
type: str type: str
"tier": "tier":
type: int type: int
"toolchain": "toolchain":
type: seq type: seq
seq: seq:
- type: str - type: str
"sysbuild": "sysbuild":
type: bool type: bool
"env": "env":
type: seq type: seq
seq: seq:
- type: str - type: str
"ram": "ram":
type: int type: int
"flash": "flash":
type: int type: int
"twister": "twister":
type: bool type: bool
"supported": "supported":
type: seq type: seq
seq: seq:
- type: str - type: str
"testing": "testing":
type: map type: map
mapping: mapping:
"timeout_multiplier": "timeout_multiplier":
type: number type: number
required: false required: false
"default": "default":
type: bool type: bool
"binaries": "binaries":
type: seq type: seq
seq: seq:
- type: str - type: str
"only_tags": "only_tags":
type: seq type: seq
seq: seq:
- type: str - type: str
"ignore_tags": "ignore_tags":
type: seq type: seq
seq: seq:
- type: str - type: str
"renode": "renode":
type: map type: map
mapping: mapping:
"uart": "uart":
type: str type: str
"resc": "resc":
type: str type: str
include: platform-schema

View file

@ -1,6 +1,6 @@
tests: tests:
arch.arm64.smc_call.smc: arch.arm64.smc_call.smc:
platform_allow: fvp_base_revc_2xaemv8a//smp/ns platform_allow: fvp_base_revc_2xaemv8a/fvp_base_revc_2xaemv8a/smp/ns
tags: tags:
- arm - arm
- smc - smc