scripts: logging: dictionary: database_gen: Fix linter issues
Fix issues reported by ruff. Signed-off-by: Pieter De Gendt <pieter.degendt@basalte.be>
This commit is contained in:
parent
9e5aa827bb
commit
6af247e227
2 changed files with 43 additions and 67 deletions
|
@ -568,15 +568,6 @@
|
||||||
"UP036", # https://docs.astral.sh/ruff/rules/outdated-version-block
|
"UP036", # https://docs.astral.sh/ruff/rules/outdated-version-block
|
||||||
"UP038", # https://docs.astral.sh/ruff/rules/non-pep604-isinstance
|
"UP038", # https://docs.astral.sh/ruff/rules/non-pep604-isinstance
|
||||||
]
|
]
|
||||||
"./scripts/logging/dictionary/database_gen.py" = [
|
|
||||||
"E713", # https://docs.astral.sh/ruff/rules/not-in-test
|
|
||||||
"E741", # https://docs.astral.sh/ruff/rules/ambiguous-variable-name
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"SIM102", # https://docs.astral.sh/ruff/rules/collapsible-if
|
|
||||||
"SIM103", # https://docs.astral.sh/ruff/rules/needless-bool
|
|
||||||
"SIM113", # https://docs.astral.sh/ruff/rules/enumerate-for-loop
|
|
||||||
"SIM115", # https://docs.astral.sh/ruff/rules/open-file-with-context-handler
|
|
||||||
]
|
|
||||||
"./scripts/logging/dictionary/dictionary_parser/data_types.py" = [
|
"./scripts/logging/dictionary/dictionary_parser/data_types.py" = [
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
||||||
"UP039", # https://docs.astral.sh/ruff/rules/unnecessary-class-parentheses
|
"UP039", # https://docs.astral.sh/ruff/rules/unnecessary-class-parentheses
|
||||||
|
|
|
@ -21,22 +21,15 @@ import struct
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import dictionary_parser.log_database
|
import dictionary_parser.log_database
|
||||||
from dictionary_parser.log_database import LogDatabase
|
|
||||||
from dictionary_parser.utils import extract_one_string_in_section
|
|
||||||
from dictionary_parser.utils import find_string_in_mappings
|
|
||||||
|
|
||||||
import elftools
|
import elftools
|
||||||
|
from dictionary_parser.log_database import LogDatabase
|
||||||
|
from dictionary_parser.utils import extract_one_string_in_section, find_string_in_mappings
|
||||||
|
from elftools.dwarf.descriptions import describe_DWARF_expr
|
||||||
|
from elftools.dwarf.locationlists import LocationExpr, LocationParser
|
||||||
from elftools.elf.constants import SH_FLAGS
|
from elftools.elf.constants import SH_FLAGS
|
||||||
from elftools.elf.elffile import ELFFile
|
|
||||||
from elftools.elf.descriptions import describe_ei_data
|
from elftools.elf.descriptions import describe_ei_data
|
||||||
|
from elftools.elf.elffile import ELFFile
|
||||||
from elftools.elf.sections import SymbolTableSection
|
from elftools.elf.sections import SymbolTableSection
|
||||||
from elftools.dwarf.descriptions import (
|
|
||||||
describe_DWARF_expr
|
|
||||||
)
|
|
||||||
from elftools.dwarf.locationlists import (
|
|
||||||
LocationExpr, LocationParser
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER_FORMAT = "%(name)s: %(levelname)s: %(message)s"
|
LOGGER_FORMAT = "%(name)s: %(levelname)s: %(message)s"
|
||||||
logger = logging.getLogger(os.path.basename(sys.argv[0]))
|
logger = logging.getLogger(os.path.basename(sys.argv[0]))
|
||||||
|
@ -309,7 +302,7 @@ def find_die_var_base_type(compile_unit, die, is_const):
|
||||||
return die.attributes['DW_AT_name'].value.decode('ascii'), is_const
|
return die.attributes['DW_AT_name'].value.decode('ascii'), is_const
|
||||||
|
|
||||||
# Not a type, cannot continue
|
# Not a type, cannot continue
|
||||||
if not 'DW_AT_type' in die.attributes:
|
if 'DW_AT_type' not in die.attributes:
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
if die.tag == 'DW_TAG_const_type':
|
if die.tag == 'DW_TAG_const_type':
|
||||||
|
@ -334,10 +327,7 @@ def is_die_var_const_char(compile_unit, die):
|
||||||
"""
|
"""
|
||||||
var_type, is_const = find_die_var_base_type(compile_unit, die, False)
|
var_type, is_const = find_die_var_base_type(compile_unit, die, False)
|
||||||
|
|
||||||
if var_type is not None and var_type.endswith('char') and is_const:
|
return bool(var_type is not None and var_type.endswith('char') and is_const)
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def extract_string_variables(elf):
|
def extract_string_variables(elf):
|
||||||
|
@ -357,32 +347,31 @@ def extract_string_variables(elf):
|
||||||
for die in compile_unit.iter_DIEs():
|
for die in compile_unit.iter_DIEs():
|
||||||
# Only care about variables with location information
|
# Only care about variables with location information
|
||||||
# and of type "char"
|
# and of type "char"
|
||||||
if die.tag == 'DW_TAG_variable':
|
if die.tag == 'DW_TAG_variable' and ('DW_AT_type' in die.attributes
|
||||||
if ('DW_AT_type' in die.attributes
|
and 'DW_AT_location' in die.attributes
|
||||||
and 'DW_AT_location' in die.attributes
|
and is_die_var_const_char(compile_unit, die)
|
||||||
and is_die_var_const_char(compile_unit, die)
|
):
|
||||||
):
|
# Extract location information, which is
|
||||||
# Extract location information, which is
|
# its address in memory.
|
||||||
# its address in memory.
|
loc_attr = die.attributes['DW_AT_location']
|
||||||
loc_attr = die.attributes['DW_AT_location']
|
if loc_parser.attribute_has_location(loc_attr, die.cu['version']):
|
||||||
if loc_parser.attribute_has_location(loc_attr, die.cu['version']):
|
loc = loc_parser.parse_from_attribute(loc_attr, die.cu['version'], die)
|
||||||
loc = loc_parser.parse_from_attribute(loc_attr, die.cu['version'], die)
|
if isinstance(loc, LocationExpr):
|
||||||
if isinstance(loc, LocationExpr):
|
try:
|
||||||
try:
|
addr = describe_DWARF_expr(loc.loc_expr,
|
||||||
addr = describe_DWARF_expr(loc.loc_expr,
|
dwarf_info.structs)
|
||||||
dwarf_info.structs)
|
|
||||||
|
|
||||||
matcher = DT_LOCATION_REGEX.match(addr)
|
matcher = DT_LOCATION_REGEX.match(addr)
|
||||||
if matcher:
|
if matcher:
|
||||||
addr = int(matcher.group(1), 16)
|
addr = int(matcher.group(1), 16)
|
||||||
if addr > 0:
|
if addr > 0:
|
||||||
strings.append({
|
strings.append({
|
||||||
'name': die.attributes['DW_AT_name'].value,
|
'name': die.attributes['DW_AT_name'].value,
|
||||||
'addr': addr,
|
'addr': addr,
|
||||||
'die': die
|
'die': die
|
||||||
})
|
})
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return strings
|
return strings
|
||||||
|
|
||||||
|
@ -408,9 +397,8 @@ def is_printable(b):
|
||||||
def extract_strings_in_one_section(section, str_mappings):
|
def extract_strings_in_one_section(section, str_mappings):
|
||||||
"""Extract NULL-terminated strings in one ELF section"""
|
"""Extract NULL-terminated strings in one ELF section"""
|
||||||
data = section['data']
|
data = section['data']
|
||||||
idx = 0
|
|
||||||
start = None
|
start = None
|
||||||
for x in data:
|
for idx, x in enumerate(data):
|
||||||
if is_printable(chr(x)):
|
if is_printable(chr(x)):
|
||||||
# Printable character, potential part of string
|
# Printable character, potential part of string
|
||||||
if start is None:
|
if start is None:
|
||||||
|
@ -449,7 +437,6 @@ def extract_strings_in_one_section(section, str_mappings):
|
||||||
else:
|
else:
|
||||||
# Non-printable byte, remove start location
|
# Non-printable byte, remove start location
|
||||||
start = None
|
start = None
|
||||||
idx += 1
|
|
||||||
|
|
||||||
return str_mappings
|
return str_mappings
|
||||||
|
|
||||||
|
@ -508,7 +495,7 @@ def main():
|
||||||
elif args.verbose:
|
elif args.verbose:
|
||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
elffile = open(args.elffile, "rb")
|
elffile = open(args.elffile, "rb") # noqa: SIM115
|
||||||
if not elffile:
|
if not elffile:
|
||||||
logger.error("ERROR: Cannot open ELF file: %s, exiting...", args.elffile)
|
logger.error("ERROR: Cannot open ELF file: %s, exiting...", args.elffile)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -529,8 +516,8 @@ def main():
|
||||||
|
|
||||||
if args.build_header:
|
if args.build_header:
|
||||||
with open(args.build_header) as f:
|
with open(args.build_header) as f:
|
||||||
for l in f:
|
for line in f:
|
||||||
match = re.match(r'\s*#define\s+BUILD_VERSION\s+(.*)', l)
|
match = re.match(r'\s*#define\s+BUILD_VERSION\s+(.*)', line)
|
||||||
if match:
|
if match:
|
||||||
database.set_build_id(match.group(1))
|
database.set_build_id(match.group(1))
|
||||||
break
|
break
|
||||||
|
@ -570,17 +557,15 @@ def main():
|
||||||
extract_logging_subsys_information(elf, database, string_mappings)
|
extract_logging_subsys_information(elf, database, string_mappings)
|
||||||
|
|
||||||
# Write database file
|
# Write database file
|
||||||
if args.json:
|
if args.json and not LogDatabase.write_json_database(args.json, database):
|
||||||
if not LogDatabase.write_json_database(args.json, database):
|
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
|
||||||
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
|
args.json)
|
||||||
args.json)
|
sys.exit(1)
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if args.syst:
|
if args.syst and not LogDatabase.write_syst_database(args.syst, database):
|
||||||
if not LogDatabase.write_syst_database(args.syst, database):
|
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
|
||||||
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
|
args.syst)
|
||||||
args.syst)
|
sys.exit(1)
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
elffile.close()
|
elffile.close()
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue