scripts: Fix twisterlib for ruff - UP015

This fixes ruff linting error UP015,
where open() has specified unnecessary modes.

Signed-off-by: Lukasz Mrugala <lukaszx.mrugala@intel.com>
This commit is contained in:
Lukasz Mrugala 2024-11-27 14:57:06 +00:00 committed by Carles Cufí
commit 0c6fd60407
18 changed files with 30 additions and 45 deletions

View file

@ -759,7 +759,6 @@
]
"./scripts/pylib/twister/twisterlib/cmakecache.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
"./scripts/pylib/twister/twisterlib/config_parser.py" = [
@ -769,7 +768,6 @@
"./scripts/pylib/twister/twisterlib/coverage.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"F541", # https://docs.astral.sh/ruff/rules/f-string-missing-placeholders
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
@ -782,40 +780,32 @@
"F541", # https://docs.astral.sh/ruff/rules/f-string-missing-placeholders
"SIM115", # https://docs.astral.sh/ruff/rules/open-file-with-context-handler
"UP007", # https://docs.astral.sh/ruff/rules/non-pep604-annotation
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP030", # https://docs.astral.sh/ruff/rules/format-literals
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
"./scripts/pylib/twister/twisterlib/hardwaremap.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
]
"./scripts/pylib/twister/twisterlib/harness.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"F541", # https://docs.astral.sh/ruff/rules/f-string-missing-placeholders
"F811", # https://docs.astral.sh/ruff/rules/redefined-while-unused
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
"./scripts/pylib/twister/twisterlib/package.py" = [
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
]
"./scripts/pylib/twister/twisterlib/platform.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
]
"./scripts/pylib/twister/twisterlib/quarantine.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
]
"./scripts/pylib/twister/twisterlib/reports.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"F541", # https://docs.astral.sh/ruff/rules/f-string-missing-placeholders
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
@ -823,18 +813,15 @@
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"F541", # https://docs.astral.sh/ruff/rules/f-string-missing-placeholders
"SIM115", # https://docs.astral.sh/ruff/rules/open-file-with-context-handler
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
"./scripts/pylib/twister/twisterlib/size_calc.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
]
"./scripts/pylib/twister/twisterlib/testinstance.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
]
"./scripts/pylib/twister/twisterlib/testplan.py" = [
@ -842,7 +829,6 @@
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"F401", # https://docs.astral.sh/ruff/rules/unused-import
"F541", # https://docs.astral.sh/ruff/rules/f-string-missing-placeholders
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP031", # https://docs.astral.sh/ruff/rules/printf-string-formatting
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
@ -853,7 +839,6 @@
]
"./scripts/pylib/twister/twisterlib/twister_main.py" = [
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
"UP015", # https://docs.astral.sh/ruff/rules/redundant-open-modes
"UP032", # https://docs.astral.sh/ruff/rules/f-string
]
"./scripts/pylint/checkers/argparse-checker.py" = [

View file

@ -112,7 +112,7 @@ class CMakeCache:
def load(self, cache_file):
entries = []
with open(cache_file, 'r') as cache:
with open(cache_file) as cache:
for line_no, line in enumerate(cache):
entry = CMakeCacheEntry.from_line(line, line_no)
if entry:

View file

@ -51,7 +51,7 @@ class CoverageTool:
extracted_coverage_info = {}
capture_data = False
capture_complete = False
with open(input_file, 'r') as fp:
with open(input_file) as fp:
for line in fp.readlines():
if re.search("GCOV_COVERAGE_DUMP_START", line):
capture_data = True

View file

@ -201,7 +201,7 @@ class BinaryHandler(Handler):
def _output_handler(self, proc, harness):
suffix = '\\r\\n'
with open(self.log, "wt") as log_out_fp:
with open(self.log, "w") as log_out_fp:
timeout_extended = False
timeout_time = time.time() + self.get_test_timeout()
while True:
@ -894,7 +894,7 @@ class QEMUHandler(Handler):
# Disable internal buffering, we don't
# want read() or poll() to ever block if there is data in there
in_fp = open(fifo_out, "rb", buffering=0)
log_out_fp = open(logfile, "wt")
log_out_fp = open(logfile, "w")
return out_fp, in_fp, log_out_fp
@ -1087,7 +1087,7 @@ class QEMUHandler(Handler):
is_timeout = False
qemu_pid = None
with subprocess.Popen(command, stdout=open(self.stdout_fn, "wt"), stderr=open(self.stderr_fn, "wt"), cwd=self.build_dir) as proc:
with subprocess.Popen(command, stdout=open(self.stdout_fn, "w"), stderr=open(self.stderr_fn, "w"), cwd=self.build_dir) as proc:
logger.debug("Spawning QEMUHandler Thread for %s" % self.name)
try:
@ -1174,7 +1174,7 @@ class QEMUWinHandler(Handler):
@staticmethod
def _open_log_file(logfile):
return open(logfile, "wt")
return open(logfile, "w")
@staticmethod
def _close_log_file(log_file):

View file

@ -367,7 +367,7 @@ class HardwareMap:
# use existing map
self.detected = natsorted(self.detected, key=lambda x: x.serial or '')
if os.path.exists(hwm_file):
with open(hwm_file, 'r') as yaml_file:
with open(hwm_file) as yaml_file:
hwm = yaml.load(yaml_file, Loader=SafeLoader)
if hwm:
hwm.sort(key=lambda x: x.get('id', ''))

View file

@ -229,7 +229,7 @@ class Robot(Harness):
self.instance.testcases[0].status = TwisterStatus.FAIL
if out:
with open(os.path.join(self.instance.build_dir, handler.log), "wt") as log:
with open(os.path.join(self.instance.build_dir, handler.log), 'w') as log:
log_msg = out.decode(sys.getdefaultencoding())
log.write(log_msg)

View file

@ -25,7 +25,7 @@ class Artifacts:
def package(self):
dirs = []
with open(os.path.join(self.options.outdir, "twister.json"), "r") as json_test_plan:
with open(os.path.join(self.options.outdir, "twister.json")) as json_test_plan:
jtp = json.load(json_test_plan)
for t in jtp['testsuites']:
if t['status'] != TwisterStatus.FILTER:

View file

@ -96,7 +96,7 @@ class QuarantineData:
@classmethod
def load_data_from_yaml(cls, filename: str | Path) -> QuarantineData:
"""Load quarantine from yaml file."""
with open(filename, 'r', encoding='UTF-8') as yaml_fd:
with open(filename, encoding='UTF-8') as yaml_fd:
qlist_raw_data: list[dict] = yaml.load(yaml_fd, Loader=SafeLoader)
try:
if not qlist_raw_data:

View file

@ -116,7 +116,7 @@ class Reporting:
def xunit_report_suites(self, json_file, filename):
json_data = {}
with open(json_file, "r") as json_results:
with open(json_file) as json_results:
json_data = json.load(json_results)
@ -185,7 +185,7 @@ class Reporting:
selected = self.selected_platforms
json_data = {}
with open(json_file, "r") as json_results:
with open(json_file) as json_results:
json_data = json.load(json_results)
@ -429,7 +429,7 @@ class Reporting:
if do_all or k in self.env.options.footprint_report:
footprint_fname = os.path.join(instance.build_dir, v)
try:
with open(footprint_fname, "rt") as footprint_json:
with open(footprint_fname) as footprint_json:
logger.debug(f"Collect footprint.{k} for '{instance.name}'")
suite['footprint'][k] = json.load(footprint_json)
except FileNotFoundError:
@ -440,7 +440,7 @@ class Reporting:
suites.append(suite)
report["testsuites"] = suites
with open(filename, "wt") as json_file:
with open(filename, 'w') as json_file:
json.dump(report, json_file, indent=4, separators=(',',':'))

View file

@ -743,7 +743,7 @@ class FilterBuilder(CMake):
if not filter_stages or "kconfig" in filter_stages:
with open(defconfig_path, "r") as fp:
with open(defconfig_path) as fp:
defconfig = {}
for line in fp.readlines():
m = self.config_re.match(line)
@ -1281,7 +1281,7 @@ class ProjectBuilder(FilterBuilder):
if not os.path.exists(runners_file_path):
return []
with open(runners_file_path, 'r') as file:
with open(runners_file_path) as file:
runners_content: dict = yaml.load(file, Loader=SafeLoader)
if 'config' not in runners_content:
@ -1320,7 +1320,7 @@ class ProjectBuilder(FilterBuilder):
if not os.path.exists(runners_file_path):
return
with open(runners_file_path, 'rt') as file:
with open(runners_file_path) as file:
runners_content_text = file.read()
runners_content_yaml: dict = yaml.load(runners_content_text, Loader=SafeLoader)
@ -1338,7 +1338,7 @@ class ProjectBuilder(FilterBuilder):
binary_path_relative = os.path.relpath(binary_path, start=runners_dir_path)
runners_content_text = runners_content_text.replace(binary_path, binary_path_relative)
with open(runners_file_path, 'wt') as file:
with open(runners_file_path, 'w') as file:
file.write(runners_content_text)
def _sanitize_zephyr_base_from_files(self):
@ -1354,14 +1354,14 @@ class ProjectBuilder(FilterBuilder):
if not os.path.exists(file_path):
continue
with open(file_path, "rt") as file:
with open(file_path) as file:
data = file.read()
# add trailing slash at the end of canonical_zephyr_base if it does not exist:
path_to_remove = os.path.join(canonical_zephyr_base, "")
data = data.replace(path_to_remove, "")
with open(file_path, "wt") as file:
with open(file_path, 'w') as file:
file.write(data)
@staticmethod

View file

@ -297,7 +297,7 @@ class SizeCalculator:
@return Content of the build.log file (list[str])
"""
if os.path.exists(path=self.buildlog_filename):
with open(file=self.buildlog_filename, mode='r') as file:
with open(file=self.buildlog_filename) as file:
file_content = file.readlines()
else:
if self.generate_warning:

View file

@ -92,7 +92,7 @@ class TestInstance:
self.recording.extend(recording)
filename = os.path.join(self.build_dir, fname_csv)
with open(filename, "wt") as csvfile:
with open(filename, 'w') as csvfile:
cw = csv.DictWriter(csvfile,
fieldnames = self.recording[0].keys(),
lineterminator = os.linesep,
@ -131,7 +131,7 @@ class TestInstance:
run_id = ""
run_id_file = os.path.join(self.build_dir, "run_id.txt")
if os.path.exists(run_id_file):
with open(run_id_file, "r") as fp:
with open(run_id_file) as fp:
run_id = fp.read()
else:
hash_object = hashlib.md5(self.name.encode())

View file

@ -662,7 +662,7 @@ class TestPlan:
if filter_platform is None:
filter_platform = []
try:
with open(file, "r") as json_test_plan:
with open(file) as json_test_plan:
jtp = json.load(json_test_plan)
instance_list = []
for ts in jtp.get("testsuites", []):

View file

@ -76,7 +76,7 @@ def main(options: argparse.Namespace, default_options: argparse.Namespace):
elif options.last_metrics:
ls = os.path.join(options.outdir, "twister.json")
if os.path.exists(ls):
with open(ls, "r") as fp:
with open(ls) as fp:
previous_results = fp.read()
else:
sys.exit(f"Can't compare metrics with non existing file {ls}")

View file

@ -400,7 +400,7 @@ def test_binaryhandler_output_handler(
mock.patch('time.time', side_effect=faux_timer.time):
handler._output_handler(proc, harness)
mock_file.assert_called_with(handler.log, 'wt')
mock_file.assert_called_with(handler.log, 'w')
if expected_handler_calls:
mock_file.return_value.write.assert_has_calls(expected_handler_calls)
@ -1778,7 +1778,7 @@ def test_qemuhandler_thread_open_files(fifo_in_exists, fifo_out_exists):
open_mock.assert_has_calls([
mock.call('fifo.in', 'wb'),
mock.call('fifo.out', 'rb', buffering=0),
mock.call('log.file', 'wt'),
mock.call('log.file', 'w'),
])
if fifo_in_exists:

View file

@ -657,7 +657,7 @@ def test_hardwaremap_save(mocked_hm, hwm, expected_dump):
read_mock = mock.mock_open(read_data=hwm)
write_mock = mock.mock_open()
def mock_open(filename, mode):
def mock_open(filename, mode='r'):
if mode == 'r':
return read_mock()
elif mode == 'w':

View file

@ -688,7 +688,7 @@ def test_filterbuilder_parse_generated(
cache = [cache_elem]
return cache
def mock_open(filepath, type, *args, **kwargs):
def mock_open(filepath, *args, **kwargs):
if filepath == expected_defconfig_path:
rd = 'I am not a proper line\n' \
'CONFIG_FOO="no"'

View file

@ -260,7 +260,7 @@ def test_testinstance_record(testinstance):
mock_file.assert_called_with(
os.path.join(testinstance.build_dir, 'recording.csv'),
'wt'
'w'
)
mock_writeheader.assert_has_calls([mock.call({ k:k for k in recording[0]})])