twister: tests: rename sanitycheck -> twister

Change all tests to use twister instead of sanitycheck.

Signed-off-by: Anas Nashif <anas.nashif@intel.com>
This commit is contained in:
Anas Nashif 2020-12-07 12:29:36 -05:00
commit 9318e6c8f7
10 changed files with 77 additions and 77 deletions

View file

@ -17,7 +17,7 @@ pip install -r $ZEPHYR_BASE/scripts/requirements-build-test.txt
The testcases can be executed from the root directory using
```
pytest $ZEPHYR_BASE/scripts/tests/sanitycheck
pytest $ZEPHYR_BASE/scripts/tests/twister
```
## Sanitycheck Coverage
@ -25,33 +25,33 @@ pytest $ZEPHYR_BASE/scripts/tests/sanitycheck
The coverage for all the tests can be run using the command below. This will collect all the tests available.
```bash
coverage run -m pytest $ZEPHYR_BASE/scripts/tests/sanitycheck/
coverage run -m pytest $ZEPHYR_BASE/scripts/tests/twister/
```
Then we can generate the coverage report for just sanitylib script using
Then we can generate the coverage report for just twister script using
```bash
coverage report -m $ZEPHYR_BASE/scripts/sanity_chk/sanitylib.py
coverage report -m $ZEPHYR_BASE/scripts/pylib/twister/twisterlib.py
```
To generate the coverage report for sanitycheck script use below command
To generate the coverage report for twister script use below command
```bash
coverage report -m $ZEPHYR_BASE/scripts/sanitycheck
coverage report -m $ZEPHYR_BASE/scripts/twister
```
The html coverage report for sanitycheck can be generated using
The html coverage report for twister can be generated using
```bash
coverage html sanitycheck
coverage html twister
```
If needed,the full coverage html report can be generated in every run of "pytest" in the tests directory using configuration file (setup.cfg).
## Organization of tests
- conftest.py: Contains common fixtures for use in testing the sanitycheck tool.
- test_sanitycheck.py : Contains basic testcases for environment variables, verifying testcase & platform schema's.
- test_testsuite_class.py : Contains testcases for Testsuite class (except reporting functionality) in sanitylib.py.
- conftest.py: Contains common fixtures for use in testing the twister tool.
- test_twister.py : Contains basic testcases for environment variables, verifying testcase & platform schema's.
- test_testsuite_class.py : Contains testcases for Testsuite class (except reporting functionality) in twisterlib.py.
- test_testinstance.py : Contains testcases for Testinstance and Testcase class.
- test_reporting_testsuite.py : Contains testcases for reporting fucntionality of Testsuite class of sanitycheck.
- test_reporting_testsuite.py : Contains testcases for reporting fucntionality of Testsuite class of twister.

View file

@ -3,26 +3,26 @@
#
# SPDX-License-Identifier: Apache-2.0
'''Common fixtures for use in testing the sanitycheck tool.'''
'''Common fixtures for use in testing the twister tool.'''
import os
import sys
import pytest
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
from sanitylib import TestSuite, TestInstance
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
from twisterlib import TestSuite, TestInstance
@pytest.fixture(name='test_data')
def _test_data():
""" Pytest fixture to load the test data directory"""
data = ZEPHYR_BASE + "/scripts/tests/sanitycheck/test_data/"
data = ZEPHYR_BASE + "/scripts/tests/twister/test_data/"
return data
@pytest.fixture(name='testcases_dir')
def testcases_directory():
""" Pytest fixture to load the test data directory"""
return ZEPHYR_BASE + "/scripts/tests/sanitycheck/test_data/testcases"
return ZEPHYR_BASE + "/scripts/tests/twister/test_data/testcases"
@pytest.fixture(name='class_testsuite')
def testsuite_obj(test_data, testcases_dir, tmpdir_factory):

View file

@ -1,9 +0,0 @@
test,arch,platform,passed,status,extra_args,handler,handler_time,ram_size,rom_size
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app,nios2,demo_board_1,TRUE,Passed,,na,,2,3
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app,nios2,demo_board_2,TRUE,Passed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1,nios2,demo_board_2,FALSE,failed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_2,nios2,demo_board_2,FALSE,failed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_1,nios2,demo_board_1,TRUE,Passed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_2,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_1,nios2,demo_board_1,TRUE,Passed,,qemu,0,2,3
scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_2,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
1 test arch platform passed status extra_args handler handler_time ram_size rom_size
2 scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app nios2 demo_board_1 TRUE Passed na 2 3
3 scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app nios2 demo_board_2 TRUE Passed qemu 0 2 3
4 scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1 nios2 demo_board_2 FALSE failed qemu 0 2 3
5 scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_2 nios2 demo_board_2 FALSE failed CONF_FILE="prj_poll.conf" qemu 0 2 3
6 scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_1 nios2 demo_board_1 TRUE Passed qemu 0 2 3
7 scripts/tests/sanitycheck/test_data/testcases/tests/test_b/test_b.check_2 nios2 demo_board_2 TRUE Passed CONF_FILE="prj_poll.conf" qemu 0 2 3
8 scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_1 nios2 demo_board_1 TRUE Passed qemu 0 2 3
9 scripts/tests/sanitycheck/test_data/testcases/tests/test_c/test_c.check_2 nios2 demo_board_2 TRUE Passed CONF_FILE="prj_poll.conf" qemu 0 2 3

View file

@ -1,9 +0,0 @@
handler_time,ram_size,rom_size
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample.app_dev.external_lib,nios2,demo_board_1,TRUE,Passed,,na,,0,0
scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample.app_dev.external_lib,nios2,demo_board_2,TRUE,Passed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_api/kernel.fifo,nios2,demo_board_2,FALSE,failed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_api/kernel.fifo.poll,nios2,demo_board_2,FALSE,failed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_timeout/kernel.fifo.timeout,nios2,demo_board_1,TRUE,Passed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_timeout/kernel.fifo.timeout.poll,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_usage/kernel.fifo.usage,nios2,demo_board_1,TRUE,Passed,,qemu,0,0,0
scripts/tests/sanitycheck/test_data/testcases/tests/test_kernel/test_fifo_usage/kernel.fifo.usage.poll,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
Can't render this file because it has a wrong number of fields in line 2.

View file

@ -0,0 +1,9 @@
test,arch,platform,passed,status,extra_args,handler,handler_time,ram_size,rom_size
scripts/tests/twister/test_data/testcases/samples/test_app/sample_test.app,nios2,demo_board_1,TRUE,Passed,,na,,2,3
scripts/tests/twister/test_data/testcases/samples/test_app/sample_test.app,nios2,demo_board_2,TRUE,Passed,,qemu,0,2,3
scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1,nios2,demo_board_2,FALSE,failed,,qemu,0,2,3
scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_2,nios2,demo_board_2,FALSE,failed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
scripts/tests/twister/test_data/testcases/tests/test_b/test_b.check_1,nios2,demo_board_1,TRUE,Passed,,qemu,0,2,3
scripts/tests/twister/test_data/testcases/tests/test_b/test_b.check_2,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
scripts/tests/twister/test_data/testcases/tests/test_c/test_c.check_1,nios2,demo_board_1,TRUE,Passed,,qemu,0,2,3
scripts/tests/twister/test_data/testcases/tests/test_c/test_c.check_2,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,2,3
1 test arch platform passed status extra_args handler handler_time ram_size rom_size
2 scripts/tests/twister/test_data/testcases/samples/test_app/sample_test.app nios2 demo_board_1 TRUE Passed na 2 3
3 scripts/tests/twister/test_data/testcases/samples/test_app/sample_test.app nios2 demo_board_2 TRUE Passed qemu 0 2 3
4 scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1 nios2 demo_board_2 FALSE failed qemu 0 2 3
5 scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_2 nios2 demo_board_2 FALSE failed CONF_FILE="prj_poll.conf" qemu 0 2 3
6 scripts/tests/twister/test_data/testcases/tests/test_b/test_b.check_1 nios2 demo_board_1 TRUE Passed qemu 0 2 3
7 scripts/tests/twister/test_data/testcases/tests/test_b/test_b.check_2 nios2 demo_board_2 TRUE Passed CONF_FILE="prj_poll.conf" qemu 0 2 3
8 scripts/tests/twister/test_data/testcases/tests/test_c/test_c.check_1 nios2 demo_board_1 TRUE Passed qemu 0 2 3
9 scripts/tests/twister/test_data/testcases/tests/test_c/test_c.check_2 nios2 demo_board_2 TRUE Passed CONF_FILE="prj_poll.conf" qemu 0 2 3

View file

@ -0,0 +1,9 @@
handler_time,ram_size,rom_size
scripts/tests/twister/test_data/testcases/samples/test_app/sample.app_dev.external_lib,nios2,demo_board_1,TRUE,Passed,,na,,0,0
scripts/tests/twister/test_data/testcases/samples/test_app/sample.app_dev.external_lib,nios2,demo_board_2,TRUE,Passed,,qemu,0,0,0
scripts/tests/twister/test_data/testcases/tests/test_kernel/test_fifo_api/kernel.fifo,nios2,demo_board_2,FALSE,failed,,qemu,0,0,0
scripts/tests/twister/test_data/testcases/tests/test_kernel/test_fifo_api/kernel.fifo.poll,nios2,demo_board_2,FALSE,failed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
scripts/tests/twister/test_data/testcases/tests/test_kernel/test_fifo_timeout/kernel.fifo.timeout,nios2,demo_board_1,TRUE,Passed,,qemu,0,0,0
scripts/tests/twister/test_data/testcases/tests/test_kernel/test_fifo_timeout/kernel.fifo.timeout.poll,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
scripts/tests/twister/test_data/testcases/tests/test_kernel/test_fifo_usage/kernel.fifo.usage,nios2,demo_board_1,TRUE,Passed,,qemu,0,0,0
scripts/tests/twister/test_data/testcases/tests/test_kernel/test_fifo_usage/kernel.fifo.usage.poll,nios2,demo_board_2,TRUE,Passed,"CONF_FILE=""prj_poll.conf""",qemu,0,0,0
Can't render this file because it has a wrong number of fields in line 2.

View file

@ -4,7 +4,7 @@
# SPDX-License-Identifier: Apache-2.0
'''
This test file contains testcases for reporting functionality of Testsuite class of sanitycheck
This test file contains testcases for reporting functionality of Testsuite class of twister
'''
import sys
import os
@ -15,11 +15,11 @@ from mock import MagicMock
import pytest
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/schemas/twister/"))
def test_discard_report(class_testsuite, platforms_list, all_testcases_dict, caplog, tmpdir):
""" Testing discard_report function of Testsuite class in sanitycheck
""" Testing discard_report function of Testsuite class in twister
Test 1: Check if apply_filters function has been run before running
discard_report
Test 2: Test if the generated report is not empty
@ -41,11 +41,11 @@ def test_discard_report(class_testsuite, platforms_list, all_testcases_dict, cap
assert set(['test', 'arch', 'platform', 'reason']) == set(list(csv_reader)[0])
def test_csv_report(class_testsuite, instances_fixture, tmpdir):
""" Testing csv_report function of Testsuite class in sanitycheck
""" Testing csv_report function of Testsuite class in twister
Test 1: Assert the csv_report isnt empty after execution of csv_report function
Test 2: Assert on the columns and values of the generated csv_report"""
class_testsuite.instances = instances_fixture
filename = tmpdir.mkdir("test_csv").join("sanitycheck_csv_report.csv")
filename = tmpdir.mkdir("test_csv").join("twister_csv_report.csv")
class_testsuite.csv_report(filename)
assert os.path.exists(filename)
assert os.stat(filename).st_size != 0
@ -83,22 +83,22 @@ def test_csv_report(class_testsuite, instances_fixture, tmpdir):
def test_xunit_report(class_testsuite, test_data,
instances_fixture, platforms_list, all_testcases_dict):
""" Testing xunit_report function of Testsuite class in sanitycheck
Test 1: Assert sanitycheck.xml file exists after execution of xunit_report function
""" Testing xunit_report function of Testsuite class in twister
Test 1: Assert twister.xml file exists after execution of xunit_report function
Test 2: Assert on fails, passes, skips, errors values
Test 3: Assert on the tree structure of sanitycheck.xml file"""
Test 3: Assert on the tree structure of twister.xml file"""
class_testsuite.platforms = platforms_list
class_testsuite.testcases = all_testcases_dict
kwargs = {"exclude_tag" : ['test_a'], "exclude_platform" : ['demo_board_1'],
"platform" : ['demo_board_2']}
class_testsuite.apply_filters(**kwargs)
class_testsuite.instances = instances_fixture
inst1 = class_testsuite.instances.get("demo_board_2/scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1")
inst2 = class_testsuite.instances.get("demo_board_2/scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_2")
inst1 = class_testsuite.instances.get("demo_board_2/scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1")
inst2 = class_testsuite.instances.get("demo_board_2/scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_2")
inst1.status = "failed"
inst2.status = "skipped"
filename = test_data + "sanitycheck.xml"
filename = test_data + "twister.xml"
fails, passes, errors, skips = class_testsuite.xunit_report(filename)
assert os.path.exists(filename)
@ -123,18 +123,18 @@ def test_xunit_report(class_testsuite, test_data,
os.remove(filename)
def test_compare_metrics(class_testsuite, test_data, instances_fixture, caplog):
""" Testing compare_metrics function of Testsuite class in sanitycheck
Test 1: Error message is raised if file sanitycheck.csv file doesnt exist
""" Testing compare_metrics function of Testsuite class in twister
Test 1: Error message is raised if file twister.csv file doesnt exist
Test 2: Assert on compare_metrics results for expected values"""
class_testsuite.instances = instances_fixture
for instance in class_testsuite.instances.values():
instance.metrics["ram_size"] = 5
instance.metrics["rom_size"] = 9
filename_not_exist = test_data + "sanitycheck_file_not_exist.csv"
filename_not_exist = test_data + "twister_file_not_exist.csv"
class_testsuite.compare_metrics(filename_not_exist)
assert "Cannot compare metrics, " + filename_not_exist + " not found" in caplog.text
filename = test_data + "sanitycheck.csv"
filename = test_data + "twister.csv"
results = class_testsuite.compare_metrics(filename)
for instance in class_testsuite.instances.values():
for res in results:
@ -147,7 +147,7 @@ def test_compare_metrics(class_testsuite, test_data, instances_fixture, caplog):
assert res[2] == instance.metrics["rom_size"]
def test_target_report(class_testsuite, instances_fixture, tmpdir_factory):
""" Testing target_report function of Testsuite class in sanitycheck
""" Testing target_report function of Testsuite class in twister
Test: Assert xunit_report function is called from target_report function"""
class_testsuite.instances = instances_fixture
outdir = tmpdir_factory.mktemp("tmp")

View file

@ -12,8 +12,8 @@ import sys
import pytest
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
from sanitylib import TestInstance, BuildError, TestCase, SanityCheckException
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
from twisterlib import TestInstance, BuildError, TestCase, SanityCheckException
TESTDATA_1 = [
@ -32,7 +32,7 @@ def test_check_build_or_run(class_testsuite, monkeypatch, all_testcases_dict, pl
Sceanrio 2: Test if build_only is enabled when the OS is Windows"""
class_testsuite.testcases = all_testcases_dict
testcase = class_testsuite.testcases.get('scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1')
testcase = class_testsuite.testcases.get('scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1')
class_testsuite.platforms = platforms_list
platform = class_testsuite.get_platform("demo_board_2")
@ -67,7 +67,7 @@ def test_create_overlay(class_testsuite, all_testcases_dict, platforms_list, ena
"""Test correct content is written to testcase_extra.conf based on if conditions
TO DO: Add extra_configs to the input list"""
class_testsuite.testcases = all_testcases_dict
testcase = class_testsuite.testcases.get('scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app')
testcase = class_testsuite.testcases.get('scripts/tests/twister/test_data/testcases/samples/test_app/sample_test.app')
class_testsuite.platforms = platforms_list
platform = class_testsuite.get_platform("demo_board_2")
@ -78,7 +78,7 @@ def test_create_overlay(class_testsuite, all_testcases_dict, platforms_list, ena
def test_calculate_sizes(class_testsuite, all_testcases_dict, platforms_list):
""" Test Calculate sizes method for zephyr elf"""
class_testsuite.testcases = all_testcases_dict
testcase = class_testsuite.testcases.get('scripts/tests/sanitycheck/test_data/testcases/samples/test_app/sample_test.app')
testcase = class_testsuite.testcases.get('scripts/tests/twister/test_data/testcases/samples/test_app/sample_test.app')
class_testsuite.platforms = platforms_list
platform = class_testsuite.get_platform("demo_board_2")
testinstance = TestInstance(testcase, platform, class_testsuite.outdir)
@ -87,9 +87,9 @@ def test_calculate_sizes(class_testsuite, all_testcases_dict, platforms_list):
assert testinstance.calculate_sizes() == "Missing/multiple output ELF binary"
TESTDATA_3 = [
(ZEPHYR_BASE + '/scripts/tests/sanitycheck/test_data/testcases', ZEPHYR_BASE, '/scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1', '/scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1'),
(ZEPHYR_BASE + '/scripts/tests/twister/test_data/testcases', ZEPHYR_BASE, '/scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1', '/scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1'),
(ZEPHYR_BASE, '.', 'test_a.check_1', 'test_a.check_1'),
(ZEPHYR_BASE, '/scripts/tests/sanitycheck/test_data/testcases/test_b', 'test_b.check_1', '/scripts/tests/sanitycheck/test_data/testcases/test_b/test_b.check_1'),
(ZEPHYR_BASE, '/scripts/tests/twister/test_data/testcases/test_b', 'test_b.check_1', '/scripts/tests/twister/test_data/testcases/test_b/test_b.check_1'),
(os.path.join(ZEPHYR_BASE, '/scripts/tests'), '.', 'test_b.check_1', 'test_b.check_1'),
(os.path.join(ZEPHYR_BASE, '/scripts/tests'), '.', '.', '.'),
(ZEPHYR_BASE, '.', 'test_a.check_1.check_2', 'test_a.check_1.check_2'),
@ -122,7 +122,7 @@ TESTDATA_5 = [
def test_scan_file(test_data, test_file, expected_warnings, expected_subcases):
'''Testing scan_file method with different ztest files for warnings and results'''
testcase = TestCase("/scripts/tests/sanitycheck/test_data/testcases/tests", ".", "test_a.check_1")
testcase = TestCase("/scripts/tests/twister/test_data/testcases/tests", ".", "test_a.check_1")
results, warnings = testcase.scan_file(os.path.join(test_data, test_file))
assert sorted(results) == sorted(expected_subcases)
@ -137,7 +137,7 @@ TESTDATA_6 = [
@pytest.mark.parametrize("test_path, expected_subcases", TESTDATA_6)
def test_subcases(test_data, test_path, expected_subcases):
'''Testing scan path and parse subcases methods for expected subcases'''
testcase = TestCase("/scripts/tests/sanitycheck/test_data/testcases/tests", ".", "test_a.check_1")
testcase = TestCase("/scripts/tests/twister/test_data/testcases/tests", ".", "test_a.check_1")
subcases = testcase.scan_path(os.path.join(test_data, test_path))
assert sorted(subcases) == sorted(expected_subcases)

View file

@ -4,7 +4,7 @@
# SPDX-License-Identifier: Apache-2.0
'''
This test file contains testcases for Testsuite class of sanitycheck
This test file contains testcases for Testsuite class of twister
'''
import sys
import os
@ -13,17 +13,17 @@ import pytest
from mock import call, patch, MagicMock
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
from sanitylib import TestCase, TestSuite, TestInstance, Platform
from twisterlib import TestCase, TestSuite, TestInstance, Platform
def test_testsuite_add_testcases(class_testsuite):
""" Testing add_testcase function of Testsuite class in sanitycheck """
""" Testing add_testcase function of Testsuite class in twister """
# Test 1: Check the list of testcases after calling add testcases function is as expected
class_testsuite.SAMPLE_FILENAME = 'test_sample_app.yaml'
class_testsuite.TESTCASE_FILENAME = 'test_data.yaml'
class_testsuite.add_testcases()
tests_rel_dir = 'scripts/tests/sanitycheck/test_data/testcases/tests/'
tests_rel_dir = 'scripts/tests/twister/test_data/testcases/tests/'
expected_testcases = ['test_b.check_1',
'test_b.check_2',
'test_c.check_1',
@ -90,7 +90,7 @@ def test_load_from_file(test_data, class_testsuite,
""" Testing load_from_file function of TestSuite class in Sanitycheck """
# Scenario 1 : Validating the error raised if file to load from doesn't exist
with pytest.raises(SystemExit):
class_testsuite.load_from_file(test_data +"sanitycheck_test.csv")
class_testsuite.load_from_file(test_data +"twister_test.csv")
assert "Couldn't find input file with list of tests." in caplog.text
# Scenario 2: Testing if the 'instances' dictionary in Testsuite class contains
@ -103,17 +103,17 @@ def test_load_from_file(test_data, class_testsuite,
class_testsuite.testcases = all_testcases_dict
instance_name_list = []
failed_platform_list = []
with open(os.path.join(test_data, "sanitycheck.csv"), "r") as filepath:
with open(os.path.join(test_data, "twister.csv"), "r") as filepath:
for row in csv.DictReader(filepath):
testcase_root = os.path.join(ZEPHYR_BASE,
"scripts/tests/sanitycheck/test_data/testcases")
"scripts/tests/twister/test_data/testcases")
workdir = row['test'].split('/')[-3] + "/" + row['test'].split('/')[-2]
test_name = os.path.basename(os.path.normpath(row['test']))
testcase = TestCase(testcase_root, workdir, test_name)
testcase.build_only = False
instance_name = row["platform"] + "/" + row["test"]
instance_name_list.append(instance_name)
class_testsuite.load_from_file(test_data + "sanitycheck.csv")
class_testsuite.load_from_file(test_data + "twister.csv")
assert list(class_testsuite.instances.keys()) == instance_name_list
#Scenario 3 : Assert the number of times mock method (get_platform) is called,
@ -123,7 +123,7 @@ def test_load_from_file(test_data, class_testsuite,
if row["status"] == "failed"]
for row in failed_platform_list:
with patch.object(TestSuite, 'get_platform') as mock_method:
class_testsuite.load_from_file(class_testsuite.outdir + "sanitycheck.csv",
class_testsuite.load_from_file(class_testsuite.outdir + "twister.csv",
filter_status=["Skipped", "Passed"])
calls = [call(row)]
mock_method.assert_has_calls(calls, any_order=True)
@ -131,12 +131,12 @@ def test_load_from_file(test_data, class_testsuite,
# Scenario 4 : Assert add_instances function is called from load_from_file function
class_testsuite.add_instances = MagicMock(side_effect=class_testsuite.add_instances)
class_testsuite.load_from_file(test_data + "sanitycheck.csv")
class_testsuite.load_from_file(test_data + "twister.csv")
class_testsuite.add_instances.assert_called()
# Scenario 5 : Validate if the Keyerror is raised in case if a header expected is missing
with pytest.raises(SystemExit):
class_testsuite.load_from_file(test_data + "sanitycheck_keyerror.csv")
class_testsuite.load_from_file(test_data + "twister_keyerror.csv")
assert "Key error while parsing tests file.('status')" in caplog.text
TESTDATA_PART1 = [
@ -224,7 +224,7 @@ def test_apply_filters_part1(class_testsuite, all_testcases_dict, platforms_list
TESTDATA_PART2 = [
("runnable", "True", "Not runnable on device"),
("exclude_tag", ['test_a'], "Command line testcase exclude filter"),
("run_individual_tests", ['scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1'], "Testcase name filter"),
("run_individual_tests", ['scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1'], "Testcase name filter"),
("arch", ['arm_test'], "Command line testcase arch filter"),
("tag", ['test_d'], "Command line testcase tag filter")
]

View file

@ -11,10 +11,10 @@ import sys
import pytest
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
import scl
from sanitylib import SanityConfigParser
from twisterlib import SanityConfigParser
def test_yamlload():
""" Test to check if loading the non-existent files raises the errors """
@ -29,7 +29,7 @@ def test_yamlload():
def test_correct_schema(filename, schema, test_data):
""" Test to validate the testcase schema"""
filename = test_data + filename
schema = scl.yaml_load(ZEPHYR_BASE +'/scripts/sanity_chk/' + schema)
schema = scl.yaml_load(ZEPHYR_BASE +'/scripts/schemas/twister//' + schema)
data = SanityConfigParser(filename, schema)
data.load()
assert data
@ -41,7 +41,7 @@ def test_correct_schema(filename, schema, test_data):
def test_incorrect_schema(filename, schema, test_data):
""" Test to validate the exception is raised for incorrect testcase schema"""
filename = test_data + filename
schema = scl.yaml_load(ZEPHYR_BASE +'/scripts/sanity_chk/' + schema)
schema = scl.yaml_load(ZEPHYR_BASE +'/scripts/schemas/twister//' + schema)
with pytest.raises(Exception) as exception:
scl.yaml_load_verify(filename, schema)
assert str(exception.value) == "Schema validation failed"