tests: sanitycheck: Testcases for load_from_file and apply_filters

test_testsuite_class.py: Added testcases for load_From_file and
apply_filters functions of Class Testsuite.
conftest.py: Modified class_testsuite fixture to create the
outdir as temporary directory which gets deleted after execution
of testcases.
test_data/sanitycheck.csv: load_from_file function uses this
customized file to load the failed or last run testcases.
test_data/sanitycheck_keyerror.csv: file used by
test_load_from_file function to raise the appropriate error
if there is a keyerror.
scripts/requirements-build-test.txt: added mock & csv python libraries

Signed-off-by: Aastha Grover <aastha.grover@intel.com>
This commit is contained in:
Aastha Grover 2020-06-01 16:08:20 -07:00 committed by Anas Nashif
commit cf72fe8fe9
6 changed files with 221 additions and 13 deletions

View file

@ -18,3 +18,6 @@ coverage
# used for west-command testing # used for west-command testing
pytest pytest
# used for mocking functions in pytest
mock

View file

@ -2,8 +2,7 @@
# Copyright (c) 2020 Intel Corporation # Copyright (c) 2020 Intel Corporation
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# pylint: disable=redefined-outer-name
# pylint: disable=line-too-long
'''Common fixtures for use in testing the sanitycheck tool.''' '''Common fixtures for use in testing the sanitycheck tool.'''
import os import os
@ -20,17 +19,17 @@ def _test_data():
data = ZEPHYR_BASE + "/scripts/tests/sanitycheck/test_data/" data = ZEPHYR_BASE + "/scripts/tests/sanitycheck/test_data/"
return data return data
@pytest.fixture @pytest.fixture(name='testcases_dir')
def testcases_dir(): def testcases_directory():
""" Pytest fixture to load the test data directory""" """ Pytest fixture to load the test data directory"""
return ZEPHYR_BASE + "/scripts/tests/sanitycheck/test_data/testcases" return ZEPHYR_BASE + "/scripts/tests/sanitycheck/test_data/testcases"
@pytest.fixture @pytest.fixture(name='class_testsuite')
def class_testsuite(test_data, testcases_dir): def testsuite_obj(test_data, testcases_dir, tmpdir_factory):
""" Pytest fixture to initialize and return the class TestSuite object""" """ Pytest fixture to initialize and return the class TestSuite object"""
board_root = test_data +"board_config/1_level/2_level/" board_root = test_data +"board_config/1_level/2_level/"
testcase_root = [testcases_dir + '/tests', testcases_dir + '/samples'] testcase_root = [testcases_dir + '/tests', testcases_dir + '/samples']
outdir = test_data +'sanity_out_demo' outdir = tmpdir_factory.mktemp("sanity_out_demo")
suite = TestSuite(board_root, testcase_root, outdir) suite = TestSuite(board_root, testcase_root, outdir)
return suite return suite

View file

@ -0,0 +1,9 @@
test,arch,platform,passed,status,extra_args,handler,handler_time,ram_size,rom_size
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app,nios2,demo_board_1,TRUE,Passed,,na,,2,3
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app,nios2,demo_board_2,TRUE,Passed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1,nios2,demo_board_2,FALSE,failed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_2,nios2,demo_board_2,FALSE,failed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_1,nios2,demo_board_1,TRUE,Passed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_2,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_1,nios2,demo_board_1,TRUE,Passed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_2,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
1 test arch platform passed status extra_args handler handler_time ram_size rom_size
2 scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app nios2 demo_board_1 TRUE Passed na 2 3
3 scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app nios2 demo_board_2 TRUE Passed qemu 0 2 3
4 scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1 nios2 demo_board_2 FALSE failed qemu 0 2 3
5 scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_2 nios2 demo_board_2 FALSE failed CONF_FILE="prj_poll.conf" qemu 0 2 3
6 scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_1 nios2 demo_board_1 TRUE Passed qemu 0 2 3
7 scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_2 nios2 demo_board_2 TRUE Passed CONF_FILE="prj_poll.conf" qemu 0 2 3
8 scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_1 nios2 demo_board_1 TRUE Passed qemu 0 2 3
9 scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_2 nios2 demo_board_2 TRUE Passed CONF_FILE="prj_poll.conf" qemu 0 2 3

View file

@ -0,0 +1,9 @@
handler_time,ram_size,rom_size
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample.app_dev.external_lib,nios2,demo_board_1,TRUE,Passed,,na,,0,0
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample.app_dev.external_lib,nios2,demo_board_2,TRUE,Passed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_api/kernel.fifo,nios2,demo_board_2,FALSE,failed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_api/kernel.fifo.poll,nios2,demo_board_2,FALSE,failed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_timeout/kernel.fifo.timeout,nios2,demo_board_1,TRUE,Passed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_timeout/kernel.fifo.timeout.poll,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_usage/kernel.fifo.usage,nios2,demo_board_1,TRUE,Passed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_usage/kernel.fifo.usage.poll,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
Can't render this file because it has a wrong number of fields in line 2.

View file

@ -9,7 +9,6 @@ Tests for testinstance class
import os import os
import sys import sys
import shutil
import pytest import pytest
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE") ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
@ -74,7 +73,6 @@ def test_create_overlay(class_testsuite, all_testcases_dict, platforms_list, tes
testinstance = TestInstance(testcase, platform, class_testsuite.outdir) testinstance = TestInstance(testcase, platform, class_testsuite.outdir)
platform.type = platform_type platform.type = platform_type
assert testinstance.create_overlay(platform, enable_asan, enable_coverage, coverage_platform) == expected_content assert testinstance.create_overlay(platform, enable_asan, enable_coverage, coverage_platform) == expected_content
shutil.rmtree(test_data + "sanity_out_demo")
def test_calculate_sizes(class_testsuite, all_testcases_dict, platforms_list): def test_calculate_sizes(class_testsuite, all_testcases_dict, platforms_list):
""" Test Calculate sizes method for zephyr elf""" """ Test Calculate sizes method for zephyr elf"""

View file

@ -2,14 +2,15 @@
# Copyright (c) 2020 Intel Corporation # Copyright (c) 2020 Intel Corporation
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# pylint: disable=line-too-long
# pylint: disable=C0321
''' '''
This test file contains testcases for Testsuite class of sanitycheck This test file contains testcases for Testsuite class of sanitycheck
''' '''
import sys import sys
import os import os
import csv
import pytest import pytest
from mock import call, patch, MagicMock
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE") ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk")) sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
@ -35,7 +36,7 @@ def test_testsuite_add_testcases(class_testsuite):
testcase_list.append(os.path.basename(os.path.normpath(key))) testcase_list.append(os.path.basename(os.path.normpath(key)))
assert sorted(testcase_list) == sorted(expected_testcases) assert sorted(testcase_list) == sorted(expected_testcases)
# Test 2 : Assert Testcase name is expected & all the testcases values are testcase class objects # Test 2 : Assert Testcase name is expected & all testcases values are testcase class objects
testcase = class_testsuite.testcases.get(tests_rel_dir + 'test_a/test_a.check_1') testcase = class_testsuite.testcases.get(tests_rel_dir + 'test_a/test_a.check_1')
assert testcase.name == tests_rel_dir + 'test_a/test_a.check_1' assert testcase.name == tests_rel_dir + 'test_a/test_a.check_1'
assert all(isinstance(n, TestCase) for n in class_testsuite.testcases.values()) assert all(isinstance(n, TestCase) for n in class_testsuite.testcases.values())
@ -57,7 +58,10 @@ def test_add_configurations(test_data, class_testsuite, board_root_dir):
def test_get_all_testcases(class_testsuite, all_testcases_dict): def test_get_all_testcases(class_testsuite, all_testcases_dict):
""" Testing get_all_testcases function of TestSuite class in Sanitycheck """ """ Testing get_all_testcases function of TestSuite class in Sanitycheck """
class_testsuite.testcases = all_testcases_dict class_testsuite.testcases = all_testcases_dict
expected_tests = ['test_b.check_1', 'test_b.check_2', 'test_c.check_1', 'test_c.check_2', 'test_a.check_1', 'test_a.check_2', 'sample_test.app'] expected_tests = ['test_b.check_1', 'test_b.check_2',
'test_c.check_1', 'test_c.check_2',
'test_a.check_1', 'test_a.check_2',
'sample_test.app']
assert len(class_testsuite.get_all_tests()) == 7 assert len(class_testsuite.get_all_tests()) == 7
assert sorted(class_testsuite.get_all_tests()) == sorted(expected_tests) assert sorted(class_testsuite.get_all_tests()) == sorted(expected_tests)
@ -82,3 +86,189 @@ def test_get_platforms(class_testsuite, platforms_list):
platform = class_testsuite.get_platform("demo_board_1") platform = class_testsuite.get_platform("demo_board_1")
assert isinstance(platform, Platform) assert isinstance(platform, Platform)
assert platform.name == "demo_board_1" assert platform.name == "demo_board_1"
def test_load_from_file(test_data, class_testsuite,
platforms_list, all_testcases_dict, caplog, tmpdir_factory):
""" Testing load_from_file function of TestSuite class in Sanitycheck """
# Scenario 1 : Validating the error raised if file to load from doesn't exist
with pytest.raises(SystemExit):
class_testsuite.load_from_file(test_data +"sanitycheck_test.csv")
assert "Couldn't find input file with list of tests." in caplog.text
# Scenario 2: Testing if the 'instances' dictionary in Testsuite class contains
# the expected values after execution of load_from_file function
# Note: tmp_dir is the temporary directory created so that the contents
# get deleted after invocation of this testcase.
tmp_dir = tmpdir_factory.mktemp("tmp")
class_testsuite.outdir = tmp_dir
class_testsuite.platforms = platforms_list
class_testsuite.testcases = all_testcases_dict
instance_name_list = []
failed_platform_list = []
with open(os.path.join(test_data, "sanitycheck.csv"), "r") as filepath:
for row in csv.DictReader(filepath):
testcase_root = os.path.join(ZEPHYR_BASE,
"scripts/tests/sanitycheck/test_data/testcases")
workdir = row['test'].split('/')[-3] + "/" + row['test'].split('/')[-2]
test_name = os.path.basename(os.path.normpath(row['test']))
testcase = TestCase(testcase_root, workdir, test_name)
testcase.build_only = False
instance_name = row["platform"] + "/" + row["test"]
instance_name_list.append(instance_name)
class_testsuite.load_from_file(test_data + "sanitycheck.csv")
assert list(class_testsuite.instances.keys()) == instance_name_list
#Scenario 3 : Assert the number of times mock method (get_platform) is called,
# equals to the number of testcases failed
failed_platform_list = [row["platform"]
for row in csv.DictReader(filepath)
if row["status"] == "failed"]
for row in failed_platform_list:
with patch.object(TestSuite, 'get_platform') as mock_method:
class_testsuite.load_from_file(class_testsuite.outdir + "sanitycheck.csv",
filter_status=["Skipped", "Passed"])
calls = [call(row)]
mock_method.assert_has_calls(calls, any_order=True)
assert mock_method.call_count == len(failed_platform_list)
# Scenario 4 : Assert add_instances function is called from load_from_file function
class_testsuite.add_instances = MagicMock(side_effect=class_testsuite.add_instances)
class_testsuite.load_from_file(test_data + "sanitycheck.csv")
class_testsuite.add_instances.assert_called()
# Scenario 5 : Validate if the Keyerror is raised in case if a header expected is missing
with pytest.raises(SystemExit):
class_testsuite.load_from_file(test_data + "sanitycheck_keyerror.csv")
assert "Key error while parsing tests file.('status')" in caplog.text
TESTDATA_PART1 = [
("toolchain_whitelist", ['gcc'], None, None, "Not in testcase toolchain whitelist"),
("platform_whitelist", ['demo_board_1'], None, None, "Not in testcase platform whitelist"),
("toolchain_exclude", ['zephyr'], None, None, "In test case toolchain exclude"),
("platform_exclude", ['demo_board_2'], None, None, "In test case platform exclude"),
("arch_exclude", ['x86_demo'], None, None, "In test case arch exclude"),
("arch_whitelist", ['arm'], None, None, "Not in test case arch whitelist"),
("skip", True, None, None, "Skip filter"),
("tags", set(['sensor', 'bluetooth']), "ignore_tags", ['bluetooth'], "Excluded tags per platform"),
("min_flash", "2024", "flash", "1024", "Not enough FLASH"),
("min_ram", "500", "ram", "256", "Not enough RAM"),
("None", "None", "env", ['BSIM_OUT_PATH', 'demo_env'], "Environment (BSIM_OUT_PATH, demo_env) not satisfied"),
("build_on_all", True, None, None, "Platform is excluded on command line."),
(None, None, "supported_toolchains", ['gcc'], "Not supported by the toolchain"),
(None, None, None, None, "Not a default test platform")
]
@pytest.mark.parametrize("tc_attribute, tc_value, plat_attribute, plat_value, expected_discards",
TESTDATA_PART1)
def test_apply_filters_part1(class_testsuite, all_testcases_dict, platforms_list,
tc_attribute, tc_value, plat_attribute, plat_value, expected_discards):
""" Testing apply_filters function of TestSuite class in Sanitycheck
Part 1: Response of apply_filters function (discard dictionary) have
appropriate values according to the filters
"""
if tc_attribute is None and plat_attribute is None:
discards = class_testsuite.apply_filters()
assert not discards
class_testsuite.platforms = platforms_list
class_testsuite.testcases = all_testcases_dict
for plat in class_testsuite.platforms:
if plat_attribute == "ignore_tags":
plat.ignore_tags = plat_value
if plat_attribute == "flash":
plat.flash = plat_value
if plat_attribute == "ram":
plat.ram = plat_value
if plat_attribute == "env":
plat.env = plat_value
plat.env_satisfied = False
if plat_attribute == "supported_toolchains":
plat.supported_toolchains = plat_value
for _, testcase in class_testsuite.testcases.items():
if tc_attribute == "toolchain_whitelist":
testcase.toolchain_whitelist = tc_value
if tc_attribute == "platform_whitelist":
testcase.platform_whitelist = tc_value
if tc_attribute == "toolchain_exclude":
testcase.toolchain_exclude = tc_value
if tc_attribute == "platform_exclude":
testcase.platform_exclude = tc_value
if tc_attribute == "arch_exclude":
testcase.arch_exclude = tc_value
if tc_attribute == "arch_whitelist":
testcase.arch_whitelist = tc_value
if tc_attribute == "skip":
testcase.skip = tc_value
if tc_attribute == "tags":
testcase.tags = tc_value
if tc_attribute == "min_flash":
testcase.min_flash = tc_value
if tc_attribute == "min_ram":
testcase.min_ram = tc_value
if tc_attribute == "build_on_all":
for _, testcase in class_testsuite.testcases.items():
testcase.build_on_all = tc_value
discards = class_testsuite.apply_filters(exclude_platform=['demo_board_1'])
elif plat_attribute == "supported_toolchains":
discards = class_testsuite.apply_filters(force_toolchain=False,
exclude_platform=['demo_board_1'],
platform=['demo_board_2'])
elif tc_attribute is None and plat_attribute is None:
discards = class_testsuite.apply_filters()
else:
discards = class_testsuite.apply_filters(exclude_platform=['demo_board_1'],
platform=['demo_board_2'])
assert all(x in list(discards.values()) for x in [expected_discards])
TESTDATA_PART2 = [
("device_testing", "True", "Not runnable on device"),
("exclude_tag", ['test_a'], "Command line testcase exclude filter"),
("run_individual_tests", ['scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1'], "Testcase name filter"),
("arch", ['arm_test'], "Command line testcase arch filter"),
("tag", ['test_d'], "Command line testcase tag filter")
]
@pytest.mark.parametrize("extra_filter, extra_filter_value, expected_discards", TESTDATA_PART2)
def test_apply_filters_part2(class_testsuite, all_testcases_dict,
platforms_list, extra_filter, extra_filter_value, expected_discards):
""" Testing apply_filters function of TestSuite class in Sanitycheck
Part 2 : Response of apply_filters function (discard dictionary) have
appropriate values according to the filters
"""
class_testsuite.platforms = platforms_list
class_testsuite.testcases = all_testcases_dict
kwargs = {extra_filter : extra_filter_value,
"exclude_platform" : ['demo_board_1'], "platform" : ['demo_board_2']}
discards = class_testsuite.apply_filters(**kwargs)
assert type(list(discards.keys())[0]).__name__ == "TestInstance"
assert list(dict.fromkeys(discards.values())) == [expected_discards]
TESTDATA_PART3 = [
(20, 20, -1, 0),
(-2, -1, 10, 20),
(0, 0, 0, 0)
]
@pytest.mark.parametrize("tc_min_flash, plat_flash, tc_min_ram, plat_ram",
TESTDATA_PART3)
def test_apply_filters_part3(class_testsuite, all_testcases_dict, platforms_list,
tc_min_flash, plat_flash, tc_min_ram, plat_ram):
""" Testing apply_filters function of TestSuite class in Sanitycheck
Part 3 : Testing edge cases for ram and flash values of platforms & testcases
"""
class_testsuite.platforms = platforms_list
class_testsuite.testcases = all_testcases_dict
for plat in class_testsuite.platforms:
plat.flash = plat_flash
plat.ram = plat_ram
for _, testcase in class_testsuite.testcases.items():
testcase.min_ram = tc_min_ram
testcase.min_flash = tc_min_flash
discards = class_testsuite.apply_filters(exclude_platform=['demo_board_1'],
platform=['demo_board_2'])
assert not discards