sanitycheck: Testcases for report functions of TestSuite Class
test_reporting_testsuite.py: Adding testcases for reporting functions of Class testSuite. test_testsuite_class.py: Testcase for add_instances function conftest.py: Added fixture for instances_fixture Signed-off-by: Aastha Grover <aastha.grover@intel.com>
This commit is contained in:
parent
dcbd915759
commit
8213a1509f
3 changed files with 214 additions and 10 deletions
|
@ -11,7 +11,7 @@ import pytest
|
|||
|
||||
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
|
||||
from sanitylib import TestSuite
|
||||
from sanitylib import TestSuite, TestInstance
|
||||
|
||||
@pytest.fixture(name='test_data')
|
||||
def _test_data():
|
||||
|
@ -33,18 +33,34 @@ def testsuite_obj(test_data, testcases_dir, tmpdir_factory):
|
|||
suite = TestSuite(board_root, testcase_root, outdir)
|
||||
return suite
|
||||
|
||||
@pytest.fixture
|
||||
def all_testcases_dict(class_testsuite):
|
||||
""" Pytest fixture to call add_testcase function of Testsuite class and return the testcases in kernel"""
|
||||
@pytest.fixture(name='all_testcases_dict')
|
||||
def testcases_dict(class_testsuite):
|
||||
""" Pytest fixture to call add_testcase function of
|
||||
Testsuite class and return the dictionary of testcases"""
|
||||
class_testsuite.SAMPLE_FILENAME = 'test_sample_app.yaml'
|
||||
class_testsuite.TESTCASE_FILENAME = 'test_data.yaml'
|
||||
class_testsuite.add_testcases()
|
||||
return class_testsuite.testcases
|
||||
|
||||
@pytest.fixture
|
||||
def platforms_list(test_data, class_testsuite):
|
||||
""" Pytest fixture to call add_configurations function of Testsuite class and return the Platforms list"""
|
||||
@pytest.fixture(name='platforms_list')
|
||||
def all_platforms_list(test_data, class_testsuite):
|
||||
""" Pytest fixture to call add_configurations function of
|
||||
Testsuite class and return the Platforms list"""
|
||||
class_testsuite.board_roots = os.path.abspath(test_data + "board_config")
|
||||
suite = TestSuite(class_testsuite.board_roots, class_testsuite.roots, class_testsuite.outdir)
|
||||
suite.add_configurations()
|
||||
return suite.platforms
|
||||
|
||||
@pytest.fixture
|
||||
def instances_fixture(class_testsuite, platforms_list, all_testcases_dict, tmpdir_factory):
|
||||
""" Pytest fixture to call add_instances function of Testsuite class
|
||||
and return the instances dictionary"""
|
||||
class_testsuite.outdir = tmpdir_factory.mktemp("sanity_out_demo")
|
||||
class_testsuite.platforms = platforms_list
|
||||
platform = class_testsuite.get_platform("demo_board_2")
|
||||
instance_list = []
|
||||
for _, testcase in all_testcases_dict.items():
|
||||
instance = TestInstance(testcase, platform, class_testsuite.outdir)
|
||||
instance_list.append(instance)
|
||||
class_testsuite.add_instances(instance_list)
|
||||
return class_testsuite.instances
|
||||
|
|
168
scripts/tests/sanitycheck/test_reporting_testsuite.py
Normal file
168
scripts/tests/sanitycheck/test_reporting_testsuite.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2020 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'''
|
||||
This test file contains testcases for reporting functionality of Testsuite class of sanitycheck
|
||||
'''
|
||||
import sys
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
import csv
|
||||
from collections import defaultdict
|
||||
from mock import MagicMock
|
||||
import pytest
|
||||
|
||||
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
|
||||
|
||||
|
||||
def test_discard_report(class_testsuite, platforms_list, all_testcases_dict, caplog, tmpdir):
|
||||
""" Testing discard_report function of Testsuite class in sanitycheck
|
||||
Test 1: Check if apply_filters function has been run before running
|
||||
discard_report
|
||||
Test 2: Test if the generated report is not empty
|
||||
Test 3: Test if the gerenrated report contains the expected columns"""
|
||||
class_testsuite.platforms = platforms_list
|
||||
class_testsuite.testcases = all_testcases_dict
|
||||
filename = tmpdir.mkdir("test_discard").join("discard_report.csv")
|
||||
with pytest.raises(SystemExit):
|
||||
class_testsuite.discard_report(filename)
|
||||
assert "apply_filters() hasn't been run!" in caplog.text
|
||||
|
||||
kwargs = {"exclude_tag" : ['test_a'], "exclude_platform" : ['demo_board_1'],
|
||||
"platform" : ['demo_board_2']}
|
||||
class_testsuite.apply_filters(**kwargs)
|
||||
class_testsuite.discard_report(filename)
|
||||
assert os.stat(filename).st_size != 0
|
||||
with open(filename, "r") as file:
|
||||
csv_reader = csv.reader(file)
|
||||
assert set(['test', 'arch', 'platform', 'reason']) == set(list(csv_reader)[0])
|
||||
|
||||
def test_csv_report(class_testsuite, instances_fixture, tmpdir):
|
||||
""" Testing csv_report function of Testsuite class in sanitycheck
|
||||
Test 1: Assert the csv_report isnt empty after execution of csv_report function
|
||||
Test 2: Assert on the columns and values of the generated csv_report"""
|
||||
class_testsuite.instances = instances_fixture
|
||||
filename = tmpdir.mkdir("test_csv").join("sanitycheck_csv_report.csv")
|
||||
class_testsuite.csv_report(filename)
|
||||
assert os.path.exists(filename)
|
||||
assert os.stat(filename).st_size != 0
|
||||
|
||||
mydict = {'test': [], 'arch' : [], 'platform' : [], 'status': [],
|
||||
'extra_args': [], 'handler': [], 'handler_time': [],
|
||||
'ram_size': [], 'rom_size': []}
|
||||
|
||||
with open(filename, "r") as file:
|
||||
csv_reader = csv.reader(file)
|
||||
assert set(mydict.keys()) == set(list(csv_reader)[0])
|
||||
|
||||
for instance in class_testsuite.instances.values():
|
||||
mydict["test"].append(instance.testcase.name)
|
||||
mydict["arch"].append(instance.platform.arch)
|
||||
mydict["platform"].append(instance.platform.name)
|
||||
instance_status = instance.status if instance.status is not None else ""
|
||||
mydict["status"].append(instance_status)
|
||||
args = " ".join(instance.testcase.extra_args)
|
||||
mydict["extra_args"].append(args)
|
||||
mydict["handler"].append(instance.platform.simulation)
|
||||
mydict["handler_time"].append(instance.metrics.get("handler_time", ""))
|
||||
mydict["ram_size"].append(instance.metrics.get("ram_size", '0'))
|
||||
mydict["rom_size"].append(instance.metrics.get("rom_size", '0'))
|
||||
|
||||
dict_file = open(filename, "r")
|
||||
dict_reader = csv.DictReader(dict_file)
|
||||
columns = defaultdict(list)
|
||||
for row in dict_reader:
|
||||
for (key, value) in row.items():
|
||||
columns[key].append(value)
|
||||
for _, value in enumerate(mydict):
|
||||
assert columns[value] == mydict[value]
|
||||
dict_file.close()
|
||||
|
||||
def test_xunit_report(class_testsuite, test_data,
|
||||
instances_fixture, platforms_list, all_testcases_dict):
|
||||
""" Testing xunit_report function of Testsuite class in sanitycheck
|
||||
Test 1: Assert sanitycheck.xml file exists after execution of xunit_report function
|
||||
Test 2: Assert on fails, passes, skips, errors values
|
||||
Test 3: Assert on the tree structure of sanitycheck.xml file"""
|
||||
class_testsuite.platforms = platforms_list
|
||||
class_testsuite.testcases = all_testcases_dict
|
||||
kwargs = {"exclude_tag" : ['test_a'], "exclude_platform" : ['demo_board_1'],
|
||||
"platform" : ['demo_board_2']}
|
||||
class_testsuite.apply_filters(**kwargs)
|
||||
class_testsuite.instances = instances_fixture
|
||||
inst1 = class_testsuite.instances.get("demo_board_2/scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_1")
|
||||
inst2 = class_testsuite.instances.get("demo_board_2/scripts/tests/sanitycheck/test_data/testcases/tests/test_a/test_a.check_2")
|
||||
inst1.status = "failed"
|
||||
inst2.status = "skipped"
|
||||
|
||||
filename = test_data + "sanitycheck.xml"
|
||||
class_testsuite.xunit_report(filename)
|
||||
assert os.path.exists(filename)
|
||||
|
||||
filesize = os.path.getsize(filename)
|
||||
assert filesize != 0
|
||||
|
||||
fails, passes, errors, skips = 0, 0, 0, 0
|
||||
for instance in class_testsuite.instances.values():
|
||||
if instance.status in ["failed", "timeout"]:
|
||||
if instance.reason in ['build_error', 'handler_crash']:
|
||||
errors += 1
|
||||
else:
|
||||
fails += 1
|
||||
elif instance.status == 'skipped':
|
||||
skips += 1
|
||||
else:
|
||||
passes += 1
|
||||
|
||||
tree = ET.parse(filename)
|
||||
assert int(tree.findall('testsuite')[0].attrib["skip"]) == int(skips)
|
||||
assert int(tree.findall('testsuite')[0].attrib["failures"]) == int(fails)
|
||||
assert int(tree.findall('testsuite')[0].attrib["errors"]) == int(errors)
|
||||
assert int(tree.findall('testsuite')[0].attrib["tests"]) == int(passes+fails+skips+errors)
|
||||
|
||||
for index in range(0, len(class_testsuite.instances)):
|
||||
if len(list(tree.findall('testsuite')[0][index])) != 0:
|
||||
if tree.findall('testsuite')[0][index][0].attrib["type"] == "failure":
|
||||
assert tree.findall('testsuite')[0][index].attrib["name"] == \
|
||||
(inst1.testcase.name)
|
||||
elif tree.findall('testsuite')[0][index][0].attrib["type"] == "skipped":
|
||||
assert tree.findall('testsuite')[0][index].attrib["name"] == \
|
||||
(inst2.testcase.name)
|
||||
os.remove(filename)
|
||||
|
||||
def test_compare_metrics(class_testsuite, test_data, instances_fixture, caplog):
|
||||
""" Testing compare_metrics function of Testsuite class in sanitycheck
|
||||
Test 1: Error message is raised if file sanitycheck.csv file doesnt exist
|
||||
Test 2: Assert on compare_metrics results for expected values"""
|
||||
class_testsuite.instances = instances_fixture
|
||||
for instance in class_testsuite.instances.values():
|
||||
instance.metrics["ram_size"] = 5
|
||||
instance.metrics["rom_size"] = 9
|
||||
filename_not_exist = test_data + "sanitycheck_file_not_exist.csv"
|
||||
class_testsuite.compare_metrics(filename_not_exist)
|
||||
assert "Cannot compare metrics, " + filename_not_exist + " not found" in caplog.text
|
||||
|
||||
filename = test_data + "sanitycheck.csv"
|
||||
results = class_testsuite.compare_metrics(filename)
|
||||
for instance in class_testsuite.instances.values():
|
||||
for res in results:
|
||||
assert res[0].platform.name == instance.platform.name
|
||||
if (res[0].platform.name == instance.platform.name) and \
|
||||
(res[0].testcase.name == instance.testcase.name):
|
||||
if res[1] == "ram_size":
|
||||
assert res[2] == instance.metrics["ram_size"]
|
||||
elif res[1] == "rom_size":
|
||||
assert res[2] == instance.metrics["rom_size"]
|
||||
|
||||
def test_target_report(class_testsuite, instances_fixture, tmpdir_factory):
|
||||
""" Testing target_report function of Testsuite class in sanitycheck
|
||||
Test: Assert xunit_report function is called from target_report function"""
|
||||
class_testsuite.instances = instances_fixture
|
||||
outdir = tmpdir_factory.mktemp("tmp")
|
||||
class_testsuite.xunit_report = MagicMock(side_effect=class_testsuite.xunit_report)
|
||||
class_testsuite.target_report(outdir, "abc", append=False)
|
||||
assert class_testsuite.instances
|
||||
class_testsuite.xunit_report.assert_called()
|
|
@ -15,7 +15,7 @@ from mock import call, patch, MagicMock
|
|||
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
|
||||
|
||||
from sanitylib import TestCase, TestSuite, Platform
|
||||
from sanitylib import TestCase, TestSuite, TestInstance, Platform
|
||||
|
||||
def test_testsuite_add_testcases(class_testsuite):
|
||||
""" Testing add_testcase function of Testsuite class in sanitycheck """
|
||||
|
@ -270,3 +270,23 @@ def test_apply_filters_part3(class_testsuite, all_testcases_dict, platforms_list
|
|||
discards = class_testsuite.apply_filters(exclude_platform=['demo_board_1'],
|
||||
platform=['demo_board_2'])
|
||||
assert not discards
|
||||
|
||||
def test_add_instances(test_data, class_testsuite, all_testcases_dict, platforms_list):
|
||||
""" Testing add_instances() function of TestSuite class in Sanitycheck
|
||||
Test 1: instances dictionary keys have expected values (Platform Name + Testcase Name)
|
||||
Test 2: Values of 'instances' dictionary in Testsuite class are an
|
||||
instance of 'TestInstance' class
|
||||
Test 3: Values of 'instances' dictionary have expected values.
|
||||
"""
|
||||
class_testsuite.outdir = test_data
|
||||
class_testsuite.platforms = platforms_list
|
||||
platform = class_testsuite.get_platform("demo_board_2")
|
||||
instance_list = []
|
||||
for _, testcase in all_testcases_dict.items():
|
||||
instance = TestInstance(testcase, platform, class_testsuite.outdir)
|
||||
instance_list.append(instance)
|
||||
class_testsuite.add_instances(instance_list)
|
||||
assert list(class_testsuite.instances.keys()) == \
|
||||
[platform.name + '/' + s for s in list(all_testcases_dict.keys())]
|
||||
assert all(isinstance(n, TestInstance) for n in list(class_testsuite.instances.values()))
|
||||
assert list(class_testsuite.instances.values()) == instance_list
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue