scripts: dts: Remove deprecated extract_dts_includes.py script

We now use EDTS and gen_defines.py to generate DTS defines.  We
deprecated the old script and defines several releases ago, so lets now
remove them.

Signed-off-by: Kumar Gala <kumar.gala@linaro.org>
This commit is contained in:
Kumar Gala 2019-12-09 08:46:20 -06:00 committed by Anas Nashif
commit c8c35f76ab
13 changed files with 1 additions and 2423 deletions

View file

@ -8,7 +8,7 @@ file(MAKE_DIRECTORY ${PROJECT_BINARY_DIR}/include/generated)
# encoded in DTS.
#
# Here we call on dtc, the gcc preprocessor, and
# scripts/dts/extract_dts_includes.py to generate this header file at
# scripts/dts/gen_defines.py to generate this header file at
# CMake configure-time.
#
# See ~/zephyr/doc/dts
@ -198,29 +198,6 @@ if(SUPPORTS_DTS)
message(FATAL_ERROR "new extractor failed with return code: ${ret}")
endif()
#
# Run extract_dts_includes.py (the older DT/binding parser) to generate some
# legacy identifiers (via --deprecated-only). This will go away later.
#
set(CMD_EXTRACT_DTS_INCLUDES ${PYTHON_EXECUTABLE} ${ZEPHYR_BASE}/scripts/dts/extract_dts_includes.py
--deprecated-only
--dts ${BOARD}.dts_compiled
--yaml ${DTS_ROOT_BINDINGS}
--include ${GENERATED_DTS_BOARD_UNFIXED_H}.deprecated
--old-alias-names
)
execute_process(
COMMAND ${CMD_EXTRACT_DTS_INCLUDES}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
RESULT_VARIABLE ret
)
if(NOT "${ret}" STREQUAL "0")
message(FATAL_ERROR "command failed with return code: ${ret}")
endif()
else()
file(WRITE ${GENERATED_DTS_BOARD_UNFIXED_H} "/* WARNING. THIS FILE IS AUTO-GENERATED. DO NOT MODIFY! */")
file(WRITE ${GENERATED_DTS_BOARD_UNFIXED_H}.deprecated "/* WARNING. THIS FILE IS AUTO-GENERATED. DO NOT MODIFY! */")
endif(SUPPORTS_DTS)

View file

@ -13,8 +13,6 @@
#include <generated_dts_board_unfixed.h>
#include <generated_dts_board_unfixed.h.deprecated>
/* The following definitions fixup the generated include */
#include <generated_dts_board_fixups.h>

View file

@ -1,309 +0,0 @@
#!/usr/bin/env python3
#
# Copyright (c) 2017 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
# vim: ai:ts=4:sw=4
import sys
import pprint
import re
def read_until(line, fd, end):
out = [line]
while True:
idx = line.find(end)
if idx < 0:
line = clean_line(fd.readline(), fd)
out.append(line)
else:
out.append(line[idx + len(end):])
return out
def remove_comment(line, fd):
out = []
while True:
idx = line.find('/*')
if idx < 0:
idx = line.find('//')
if idx < 0:
out.append(line)
else:
out.append(line[:idx])
return ' '.join(out)
out.append(line[:idx])
line = read_until(line[idx:], fd, '*/')[-1]
def clean_line(line, fd):
return remove_comment(line, fd).strip()
def parse_node_name(line):
line = line[:-1]
if '@' in line:
line, addr = line.split('@')
else:
addr = None
if ':' in line:
if len(line.split(':')) == 3:
alt_label, label, name = line.split(':')
else:
label, name = line.split(':')
alt_label = None
else:
name = line
label = None
alt_label = None
if addr is None:
return label, name.strip(), None, None, None
if alt_label is None:
return label, name.strip(), addr, int(addr, 16), None
return label, name.strip(), addr, int(addr, 16), alt_label
def parse_values_internal(value, start, end, separator):
out = []
inside = False
accum = []
for ch in value:
if not inside:
if ch == start:
inside = True
accum = []
else:
if ch == end:
inside = False
out.append(''.join(accum))
accum = []
else:
accum.append(ch)
if separator == ' ':
out = [v.split() for v in out]
if len(out) == 1:
return parse_value(out[0])
return [parse_value(v) for v in out]
def parse_values(value, start, end, separator):
out = parse_values_internal(value, start, end, separator)
if isinstance(out, list) and \
all(isinstance(v, str) and len(v) == 1 and not v.isalpha() for v in out):
return bytearray(out)
return out
def parse_value(value):
if value == '':
return value
if isinstance(value, list):
out = [parse_value(v) for v in value]
return out[0] if len(out) == 1 else out
if value[0] == '<':
return parse_values(value, '<', '>', ' ')
if value[0] == '"':
return parse_values(value, '"', '"', ',')
if value[0] == '[':
return list(bytes.fromhex(value[1:value.find(']')]))
if value[0] == '&':
return {'ref': value[1:]}
if value[0].isdigit():
if value.startswith("0x"):
return int(value, 16)
if value[0] == '0':
return int(value, 8)
# Match alpha numeric values
if re.match(r"\w", value):
return value
return int(value, 10)
return value
def parse_property(property, fd):
if '=' in property:
key, value = property.split('=', 1)
value = ' '.join(read_until(value, fd, ';')).strip()
if not value.endswith(';'):
raise SyntaxError("parse_property: missing semicolon: %s" % value)
return key.strip(), parse_value(value[:-1])
property = property.strip()
if not property.endswith(';'):
raise SyntaxError("parse_property: missing semicolon: %s" % property)
return property[:-1].strip(), True
def build_node_name(name, addr):
if addr is None:
return name
elif isinstance(addr, int):
return '%s@%x' % (name, addr)
return '%s@%s' % (name, addr.strip())
def parse_node(line, fd):
label, name, addr, numeric_addr, alt_label = parse_node_name(line)
node = {
'label': label,
'type': type,
'addr': numeric_addr,
'children': {},
'props': {},
'name': build_node_name(name, addr)
}
if alt_label:
node['alt_name'] = alt_label
while True:
line = fd.readline()
if not line:
raise SyntaxError("parse_node: Missing } while parsing node")
line = clean_line(line, fd)
if not line:
continue
if line == "};":
break
if line.endswith('{'):
new_node = parse_node(line, fd)
node['children'][new_node['name']] = new_node
else:
key, value = parse_property(line, fd)
node['props'][key] = value
return node
def parse_file(fd, ignore_dts_version=False):
nodes = {}
has_v1_tag = False
while True:
line = fd.readline()
if not line:
break
line = clean_line(line, fd)
if not line:
continue
if line.startswith('/include/ '):
_, filename = line.split()
with open(filename.strip()[1:-1], encoding="utf-8") as new_fd:
nodes.update(parse_file(new_fd, True))
elif line == '/dts-v1/;':
has_v1_tag = True
elif line.startswith('/memreserve/ ') and line.endswith(';'):
_, start, end = line.split()
start = int(start, 16)
end = int(end[:-1], 16)
label = "reserved_memory_0x%x_0x%x" % (start, end)
nodes[label] = {
'type': 'memory',
'reg': [start, end],
'label': label,
'addr': start,
'name': '<memreserve>'
}
elif line.endswith('{'):
if not has_v1_tag and not ignore_dts_version:
raise SyntaxError("parse_file: Missing /dts-v1/ tag")
new_node = parse_node(line, fd)
nodes[new_node['name']] = new_node
else:
raise SyntaxError("parse_file: Couldn't understand the line: %s" % line)
return nodes
def dump_refs(name, value, indent=0):
spaces = ' ' * indent
out = []
if isinstance(value, dict) and 'ref' in value:
out.append('%s\"%s\" -> \"%s\";' % (spaces, name, value['ref']))
elif isinstance(value, list):
for elem in value:
out.extend(dump_refs(name, elem, indent))
return out
def dump_all_refs(name, props, indent=0):
out = []
for value in props.values():
out.extend(dump_refs(name, value, indent))
return out
def next_subgraph(count=[0]):
count[0] += 1
return 'subgraph cluster_%d' % count[0]
def get_dot_node_name(node):
name = node['name']
return name[1:] if name[0] == '&' else name
def dump_to_dot(nodes, indent=0, start_string='digraph devicetree', name=None):
spaces = ' ' * indent
print("%s%s {" % (spaces, start_string))
if name is not None:
print("%slabel = \"%s\";" % (spaces, name))
print("%s\"%s\";" % (spaces, name))
ref_list = []
for node in nodes.values():
if node['children']:
refs = dump_to_dot(node['children'], indent + 1, next_subgraph(),
get_dot_node_name(node))
ref_list.extend(refs)
else:
print("%s\"%s\";" % (spaces, get_dot_node_name(node)))
for node in nodes.values():
refs = dump_all_refs(get_dot_node_name(node), node['props'], indent)
ref_list.extend(refs)
if start_string.startswith("digraph"):
print("%s%s" % (spaces, '\n'.join(ref_list)))
print("%s}" % spaces)
return ref_list
def main(args):
if len(args) == 1:
print('Usage: %s filename.dts' % args[0])
return 1
if '--dot' in args:
formatter = dump_to_dot
args.remove('--dot')
else:
formatter = lambda nodes: pprint.pprint(nodes, indent=2)
with open(args[1], encoding="utf-8") as fd:
formatter(parse_file(fd))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))

View file

@ -1,7 +0,0 @@
#
# Copyright (c) 2017 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# Empty to allow all modules to be imported

View file

@ -1,224 +0,0 @@
#
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
from extract.globals import *
from extract.directive import DTDirective
##
# @brief Manage clocks related directives.
#
# Handles:
# - clocks
# directives.
#
class DTClocks(DTDirective):
def _extract_consumer(self, node_path, clocks, def_label):
clock_consumer_label = 'DT_' + node_label(node_path)
clock_index = 0
clock_cell_index = 0
nr_clock_cells = 0
clock_provider_node_path = ''
clock_provider = {}
for cell in clocks:
if clock_cell_index == 0:
if cell not in phandles:
raise Exception(
("Could not find the clock provider node {} for clocks"
" = {} in clock consumer node {}. Did you activate"
" the clock node?. Last clock provider: {}.")
.format(str(cell), str(clocks), node_path,
str(clock_provider)))
clock_provider_node_path = phandles[cell]
clock_provider = reduced[clock_provider_node_path]
clock_provider_bindings = get_binding(
clock_provider_node_path)
nr_clock_cells = int(clock_provider['props'].get(
'#clock-cells', 0))
clock_cells_string = clock_provider_bindings.get(
'cell_string', 'CLOCK')
if "clock-cells" in clock_provider_bindings:
clock_cells_names = clock_provider_bindings["clock-cells"]
elif "#cells" in clock_provider_bindings:
clock_cells_names = clock_provider_bindings["#cells"]
else:
clock_cells_names = ["ID", "CELL1", "CELL2", "CELL3"]
clock_cells = []
else:
clock_cells.append(cell)
clock_cell_index += 1
if clock_cell_index > nr_clock_cells or nr_clock_cells == 0:
# clock consumer device - clocks info
#####################################
prop_def = {}
prop_alias = {}
# Legacy clocks definitions by extract_cells
for i, cell in enumerate(clock_cells):
if i >= len(clock_cells_names):
clock_cell_name = 'CELL{}'.format(i)
else:
clock_cell_name = clock_cells_names[i]
if clock_cells_string == clock_cell_name:
clock_label = self.get_label_string([
clock_consumer_label, clock_cells_string,
str(clock_index)])
add_compat_alias(node_path,
self.get_label_string(["",
clock_cells_string, str(clock_index)]),
clock_label, prop_alias)
else:
clock_label = self.get_label_string([
clock_consumer_label, clock_cells_string,
clock_cell_name, str(clock_index)])
add_compat_alias(node_path,
self.get_label_string(["",
clock_cells_string, clock_cell_name,
str(clock_index)]),
clock_label, prop_alias)
prop_def[clock_label] = str(cell)
if clock_index == 0 and \
len(clocks) == (len(clock_cells) + 1):
index = ''
else:
index = str(clock_index)
if node_path in aliases:
if clock_cells_string == clock_cell_name:
add_prop_aliases(
node_path,
lambda alias:
self.get_label_string([
alias,
clock_cells_string,
index]),
clock_label,
prop_alias)
else:
add_prop_aliases(
node_path,
lambda alias:
self.get_label_string([
alias,
clock_cells_string,
clock_cell_name,
index]),
clock_label,
prop_alias)
# alias
if i < nr_clock_cells:
# clocks info for first clock
clock_alias_label = self.get_label_string([
clock_consumer_label, clock_cells_string,
clock_cell_name])
prop_alias[clock_alias_label] = clock_label
add_compat_alias(node_path,
self.get_label_string(["",
clock_cells_string, clock_cell_name]),
clock_label, prop_alias)
# Legacy clocks definitions by extract_controller
clock_provider_label_str = clock_provider['props'].get('label',
None)
if clock_provider_label_str is not None:
clock_cell_name = 'CLOCK_CONTROLLER'
if clock_index == 0 and \
len(clocks) == (len(clock_cells) + 1):
index = ''
else:
index = str(clock_index)
clock_label = self.get_label_string([clock_consumer_label,
clock_cell_name,
index])
add_compat_alias(node_path,
self.get_label_string(["", clock_cell_name, index]),
clock_label, prop_alias)
prop_def[clock_label] = '"' + clock_provider_label_str + '"'
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias:
self.get_label_string([
alias,
clock_cell_name,
index]),
clock_label,
prop_alias)
# If the provided clock has a fixed rate, extract its frequency
# as a macro generated for the clock consumer.
if clock_provider['props']['compatible'] == 'fixed-clock':
clock_prop_name = 'clock-frequency'
clock_prop_label = 'CLOCKS_CLOCK_FREQUENCY'
if clock_index == 0 and \
len(clocks) == (len(clock_cells) + 1):
index = ''
else:
index = str(clock_index)
clock_frequency_label = \
self.get_label_string([clock_consumer_label,
clock_prop_label,
index])
prop_def[clock_frequency_label] = \
clock_provider['props'][clock_prop_name]
add_compat_alias(
node_path,
self.get_label_string([clock_prop_label, index]),
clock_frequency_label,
prop_alias)
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias:
self.get_label_string([
alias,
clock_prop_label,
index]),
clock_frequency_label,
prop_alias)
insert_defs(node_path, prop_def, prop_alias)
clock_cell_index = 0
clock_index += 1
##
# @brief Extract clocks related directives
#
# @param node_path Path to node owning the clockxxx definition.
# @param prop clockxxx property name
# @param def_label Define label string of node owning the directive.
#
def extract(self, node_path, prop, def_label):
properties = reduced[node_path]['props'][prop]
prop_list = []
if not isinstance(properties, list):
prop_list.append(properties)
else:
prop_list = list(properties)
if prop == 'clocks':
# indicator for clock consumers
self._extract_consumer(node_path, prop_list, def_label)
else:
raise Exception(
"DTClocks.extract called with unexpected directive ({})."
.format(prop))
##
# @brief Management information for clocks.
clocks = DTClocks()

View file

@ -1,73 +0,0 @@
#
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
from extract.globals import *
from extract.directive import DTDirective
##
# @brief Manage compatible directives.
#
# Handles:
# - compatible
#
class DTCompatible(DTDirective):
##
# @brief Extract compatible
#
# @param node_path Path to node owning the
# compatible definition.
# @param prop compatible property name
# @param def_label Define label string of node owning the
# compatible definition.
#
def extract(self, node_path, prop, def_label):
# compatible definition
binding = get_binding(node_path)
compatible = reduced[node_path]['props'][prop]
if not isinstance(compatible, list):
compatible = [compatible, ]
for comp in compatible:
# Generate #define
insert_defs(node_path,
{'DT_COMPAT_' + str_to_label(comp): '1'},
{})
# Generate #define for BUS a "sensor" might be on, for example
# #define DT_ST_LPS22HB_PRESS_BUS_SPI 1
if 'parent' in binding:
compat_def = 'DT_' + str_to_label(comp) + '_BUS_' + \
binding['parent']['bus'].upper()
insert_defs(node_path, {compat_def: '1'}, {})
# Generate defines of the form:
# #define DT_<COMPAT>_<INSTANCE ID> 1
for compat, instance_id in reduced[node_path]['instance_id'].items():
compat_instance = 'DT_' + str_to_label(compat) + '_' + str(instance_id)
insert_defs(node_path, {compat_instance: '1'}, {})
deprecated_main.append(compat_instance)
# Generate defines of the form:
# #define DT_<COMPAT>_<INSTANCE ID>_BUS_<BUS> 1
if 'parent' in binding:
bus = binding['parent']['bus']
insert_defs(node_path,
{compat_instance + '_BUS_' + bus.upper(): '1'},
{})
deprecated_main.append(compat_instance + '_BUS_' + bus.upper())
##
# @brief Management information for compatible.
compatible = DTCompatible()

View file

@ -1,107 +0,0 @@
#
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
from extract.globals import *
from extract.directive import DTDirective
##
# @brief Manage directives in a default way.
#
class DTDefault(DTDirective):
@staticmethod
def _extract_enum(node_path, prop, prop_values, label):
cell_yaml = get_binding(node_path)['properties'][prop]
if 'enum' in cell_yaml:
if prop_values in cell_yaml['enum']:
if isinstance(cell_yaml['enum'], list):
value = cell_yaml['enum'].index(prop_values)
if isinstance(cell_yaml['enum'], dict):
value = cell_yaml['enum'][prop_values]
label = label + "_ENUM"
return {label:value}
else:
print("ERROR")
return {}
##
# @brief Extract directives in a default way
#
# @param node_path Path to node owning the clockxxx definition.
# @param prop property name
# @param prop type (string, boolean, etc)
# @param def_label Define label string of node owning the directive.
#
def extract(self, node_path, prop, prop_type, def_label):
prop_def = {}
prop_alias = {}
if prop_type == 'boolean':
if prop in reduced[node_path]['props']:
prop_values = 1
else:
prop_values = 0
else:
prop_values = reduced[node_path]['props'][prop]
if prop_type in {"string-array", "array", "uint8-array"}:
if not isinstance(prop_values, list):
prop_values = [prop_values]
if prop_type == "uint8-array":
prop_name = str_to_label(prop)
label = def_label + '_' + prop_name
prop_value = ''.join(['{ ',
', '.join(["0x%02x" % b for b in prop_values]),
' }'])
prop_def[label] = prop_value
add_compat_alias(node_path, prop_name, label, prop_alias)
elif isinstance(prop_values, list):
for i, prop_value in enumerate(prop_values):
prop_name = str_to_label(prop)
label = def_label + '_' + prop_name
if isinstance(prop_value, str):
prop_value = "\"" + prop_value + "\""
prop_def[label + '_' + str(i)] = prop_value
add_compat_alias(node_path,
prop_name + '_' + str(i),
label + '_' + str(i),
prop_alias)
else:
prop_name = str_to_label(prop)
label = def_label + '_' + prop_name
if prop_values == 'parent-label':
prop_values = find_parent_prop(node_path, 'label')
prop_def = self._extract_enum(node_path, prop, prop_values, label)
if prop_def:
add_compat_alias(node_path, prop_name + "_ENUM" , label + "_ENUM", prop_alias)
if isinstance(prop_values, str):
prop_values = "\"" + prop_values + "\""
prop_def[label] = prop_values
add_compat_alias(node_path, prop_name, label, prop_alias)
# generate defs for node aliases
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias: str_to_label(alias) + '_' + prop_name,
label,
prop_alias)
insert_defs(node_path, prop_def, prop_alias)
##
# @brief Management information for directives handled by default.
default = DTDefault()

View file

@ -1,40 +0,0 @@
#
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
from .globals import *
##
# @brief Base class for device tree directives
#
class DTDirective(object):
##
# @brief Get a label string for a list of label sub-strings.
#
# Label sub-strings are concatenated by '_'.
#
# @param label List of label sub-strings
# @return label string
#
@staticmethod
def get_label_string(label):
return str_to_label('_'.join(x.strip() for x in label if x.strip()))
##
# @brief Extract directive information.
#
# @param node_path Path to node issuing the directive.
# @param prop Directive property name
# @param def_label Define label string of node owning the directive.
#
def extract(self, node_path, prop, def_label):
pass

View file

@ -1,224 +0,0 @@
#
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
from extract.globals import *
from extract.directive import DTDirective
from extract.default import default
##
# @brief Manage flash directives.
#
class DTFlash(DTDirective):
def __init__(self):
self._area_id = 0
def extract_partition(self, node_path):
self._add_partition_index_entries(node_path)
self._add_partition_label_entries(node_path)
def _add_partition_index_entries(self, node_path):
# Adds DT_FLASH_AREA_<index>_... entries, to the '# DT_FLASH_AREA'
# section
prop_def = {}
prop_alias = {}
node = reduced[node_path]
# Index-based partition ID
area_id = self._area_id
self._area_id += 1
# Extract a per partition dev name. Something like
#
# #define DT_FLASH_AREA_1_DEV "FLASH_CTRL"
prop_def["DT_FLASH_AREA_{}_DEV".format(area_id)] = \
'"' + reduced[controller_path(node_path)]['props']['label'] + '"'
partition_label = str_to_label(node['props']['label'])
prop_def["DT_FLASH_AREA_{}_LABEL".format(area_id)] = partition_label
deprecated_main.append("DT_FLASH_AREA_{}_LABEL".format(area_id))
prop_def["DT_FLASH_AREA_{}_ID".format(partition_label)] = area_id
reg = node['props']['reg']
for i in range(len(reg)//2):
prop_def["DT_FLASH_AREA_{}_OFFSET_{}".format(area_id, i)] = reg[2*i]
prop_def["DT_FLASH_AREA_{}_SIZE_{}".format(area_id, i)] = reg[2*i + 1]
# Number of flash areas defined
prop_def["DT_FLASH_AREA_NUM"] = self._area_id
# Alias sector 0
prop_alias[
"DT_FLASH_AREA_{}_OFFSET".format(area_id)
] = "DT_FLASH_AREA_{}_OFFSET_0".format(area_id)
prop_alias[
"DT_FLASH_AREA_{}_SIZE".format(area_id)
] = "DT_FLASH_AREA_{}_SIZE_0".format(area_id)
insert_defs("DT_FLASH_AREA", prop_def, prop_alias)
@staticmethod
def _add_partition_label_entries(node_path):
# Adds DT_FLASH_AREA_<label>_... entries, to the '# partition@...'
# section
prop_def = {}
prop_alias = {}
node = reduced[node_path]
partition_label = str_to_label(node['props']['label'])
label = "DT_FLASH_AREA_{}_LABEL".format(partition_label)
deprecated_main.append(label)
prop_def[label] = '"' + node['props']['label'] + '"'
add_legacy_alias(prop_alias, label)
label = "DT_FLASH_AREA_{}_READ_ONLY".format(partition_label)
prop_def[label] = 1 if 'read-only' in node['props'] else 0
add_legacy_alias(prop_alias, label)
reg = node['props']['reg']
for i in range(len(reg)//2):
label = "DT_FLASH_AREA_{}_OFFSET_{}".format(partition_label, i)
prop_def[label] = reg[2*i]
add_legacy_alias(prop_alias, label)
label = "DT_FLASH_AREA_{}_SIZE_{}".format(partition_label, i)
prop_def[label] = reg[2*i + 1]
add_legacy_alias(prop_alias, label)
# Alias sector 0
label = "DT_FLASH_AREA_{}_OFFSET".format(partition_label)
prop_alias[label] = "DT_FLASH_AREA_{}_OFFSET_0".format(partition_label)
add_legacy_alias(prop_alias, label)
label = "DT_FLASH_AREA_{}_SIZE".format(partition_label)
prop_alias[label] = "DT_FLASH_AREA_{}_SIZE_0".format(partition_label)
add_legacy_alias(prop_alias, label)
insert_defs(node_path, prop_def, prop_alias)
@staticmethod
def extract_flash():
node_path = chosen.get('zephyr,flash')
if not node_path:
# Add addr/size 0 for systems with no flash controller. This is
# what they already do in the Kconfig options anyway.
insert_defs('dummy-flash',
{'DT_FLASH_BASE_ADDRESS': 0, 'DT_FLASH_SIZE': 0},
{})
return
flash_node = reduced[node_path]
orig_node_addr = node_path
nr_address_cells, nr_size_cells = get_addr_size_cells(node_path)
# if the nr_size_cells is 0, assume a SPI flash, need to look at parent
# for addr/size info, and the second reg property (assume first is mmio
# register for the controller itself)
is_spi_flash = nr_size_cells == 0
if is_spi_flash:
node_path = get_parent_path(node_path)
nr_address_cells, nr_size_cells = get_addr_size_cells(node_path)
reg = reduced[node_path]['props']['reg']
if not isinstance(reg, list):
reg = [reg]
props = list(reg)
num_reg_elem = len(props)/(nr_address_cells + nr_size_cells)
# if we found a spi flash, but don't have mmio direct access support
# which we determin by the spi controller node only have on reg element
# (ie for the controller itself and no region for the MMIO flash access)
if num_reg_elem == 1 and is_spi_flash:
node_path = orig_node_addr
else:
# We assume the last reg property is the one we want
while props:
addr = 0
size = 0
for x in range(nr_address_cells):
addr += props.pop(0) << (32 * (nr_address_cells - x - 1))
for x in range(nr_size_cells):
size += props.pop(0) << (32 * (nr_size_cells - x - 1))
addr += translate_addr(addr, node_path, nr_address_cells,
nr_size_cells)
insert_defs(node_path,
{'DT_FLASH_BASE_ADDRESS': hex(addr),
'DT_FLASH_SIZE': size//1024},
{})
for prop in 'write-block-size', 'erase-block-size':
if prop in flash_node['props']:
default.extract(node_path, prop, None, 'DT_FLASH')
# Add an non-DT prefix alias for compatiability
prop_alias = {}
label_post = '_' + str_to_label(prop)
prop_alias['FLASH' + label_post] = 'DT_FLASH' + label_post
insert_defs(node_path, {}, prop_alias)
@staticmethod
def extract_code_partition():
node_path = chosen.get('zephyr,code-partition')
if not node_path:
# Fall back on zephyr,flash if zephyr,code-partition isn't set.
# node_path will be 'dummy-flash' if neither is set.
node_path = chosen.get('zephyr,flash', 'dummy-flash')
node = reduced.get(node_path)
if node and node is not reduced.get(chosen.get('zephyr,flash')):
# only compute the load offset if the code partition
# is not the same as the flash base address
load_offset = node['props']['reg'][0]
load_size = node['props']['reg'][1]
else:
load_offset = 0
load_size = 0
insert_defs(node_path,
{'DT_CODE_PARTITION_OFFSET': load_offset,
'DT_CODE_PARTITION_SIZE': load_size},
{})
def controller_path(partition_path):
# Returns the DT path to the flash controller for the
# partition at 'partition_path'.
#
# For now assume node_path is something like
# /flash-controller@4001E000/flash@0/partitions/partition@fc000. First, we
# go up two levels to get the flash and check its compat.
#
# The flash controller might be the flash itself (for cases like NOR
# flashes). For the case of 'soc-nv-flash', we assume its the parent of the
# flash node.
controller_path = '/' + '/'.join(partition_path.split('/')[1:-2])
if get_compat(controller_path) == "soc-nv-flash":
return '/' + '/'.join(partition_path.split('/')[1:-3])
return controller_path
def add_legacy_alias(prop_alias, label):
prop_alias[label.lstrip('DT_')] = label
##
# @brief Management information for flash.
flash = DTFlash()

View file

@ -1,591 +0,0 @@
#
# Copyright (c) 2017 Linaro
# Copyright (c) 2017 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
import sys
from collections import defaultdict
# globals
phandles = {}
aliases = defaultdict(list)
chosen = {}
reduced = {}
defs = {}
bindings = {}
bus_bindings = {}
binding_compats = []
deprecated = []
deprecated_main = []
old_alias_names = False
regs_config = {
'zephyr,sram' : 'DT_SRAM',
'zephyr,ccm' : 'DT_CCM',
'zephyr,dtcm' : 'DT_DTCM'
}
name_config = {
'zephyr,console' : 'DT_UART_CONSOLE_ON_DEV_NAME',
'zephyr,shell-uart' : 'DT_UART_SHELL_ON_DEV_NAME',
'zephyr,bt-uart' : 'DT_BT_UART_ON_DEV_NAME',
'zephyr,bt-c2h-uart' : 'DT_BT_C2H_UART_ON_DEV_NAME',
'zephyr,uart-pipe' : 'DT_UART_PIPE_ON_DEV_NAME',
'zephyr,bt-mon-uart' : 'DT_BT_MONITOR_ON_DEV_NAME',
'zephyr,uart-mcumgr' : 'DT_UART_MCUMGR_ON_DEV_NAME'
}
def str_to_label(s):
# Change ,-@/ to _ and uppercase
return s.replace('-', '_') \
.replace(',', '_') \
.replace('@', '_') \
.replace('/', '_') \
.replace('.', '_') \
.replace('+', 'PLUS') \
.upper()
def all_compats(node):
# Returns a set() of all 'compatible' strings that appear at or below
# 'node', skipping disabled nodes
if node['props'].get('status') == 'disabled':
return set()
compats = set()
if 'compatible' in node['props']:
val = node['props']['compatible']
if isinstance(val, list):
compats.update(val)
else:
compats.add(val)
for child_node in node['children'].values():
compats.update(all_compats(child_node))
return compats
def create_aliases(root):
if 'aliases' in root['children']:
for name, node_path in root['children']['aliases']['props'].items():
aliases[node_path].append(name)
def get_compat(node_path):
# Returns the value of the 'compatible' property for the node at
# 'node_path'. Also checks the node's parent.
#
# Returns None if neither the node nor its parent has a 'compatible'
# property.
compat = reduced[node_path]['props'].get('compatible') or \
reduced[get_parent_path(node_path)]['props'].get('compatible')
if isinstance(compat, list):
return compat[0]
return compat
def create_chosen(root):
if 'chosen' in root['children']:
chosen.update(root['children']['chosen']['props'])
def create_phandles(root, name):
if root['props'].get('status') == 'disabled':
return
if 'phandle' in root['props']:
phandles[root['props']['phandle']] = name
if name != '/':
name += '/'
for child_name, child_node in root['children'].items():
create_phandles(child_node, name + child_name)
def insert_defs(node_path, new_defs, new_aliases):
for key in new_defs:
if key.startswith('DT_COMPAT_'):
node_path = 'compatibles'
remove = [k for k in new_aliases if k in new_defs]
for k in remove: del new_aliases[k]
if node_path in defs:
remove = [k for k in new_aliases if k in defs[node_path]]
for k in remove: del new_aliases[k]
defs[node_path]['aliases'].update(new_aliases)
defs[node_path].update(new_defs)
else:
new_defs['aliases'] = new_aliases
defs[node_path] = new_defs
# Dictionary where all keys default to 0. Used by create_reduced().
last_used_id = defaultdict(int)
def create_reduced(node, path):
# Compress nodes list to nodes w/ paths, add interrupt parent
if node['props'].get('status') == 'disabled':
return
reduced[path] = node.copy()
reduced[path].pop('children', None)
# Assign an instance ID for each compat
compat = node['props'].get('compatible')
if compat:
if not isinstance(compat, list):
compat = [compat]
reduced[path]['instance_id'] = {}
for comp in compat:
reduced[path]['instance_id'][comp] = last_used_id[comp]
last_used_id[comp] += 1
# Flatten 'prop = <1 2>, <3 4>' (which turns into nested lists) to
# 'prop = <1 2 3 4>'
for val in node['props'].values():
if isinstance(val, list) and isinstance(val[0], list):
# In-place modification
val[:] = [item for sublist in val for item in sublist]
if node['children']:
if path != '/':
path += '/'
for child_name, child_node in sorted(node['children'].items()):
create_reduced(child_node, path + child_name)
def node_label(node_path):
node_compat = get_compat(node_path)
def_label = str_to_label(node_compat)
if '@' in node_path:
# See if we have number we can convert
try:
unit_addr = int(node_path.split('@')[-1], 16)
(nr_addr_cells, nr_size_cells) = get_addr_size_cells(node_path)
unit_addr += translate_addr(unit_addr, node_path,
nr_addr_cells, nr_size_cells)
unit_addr = "%x" % unit_addr
except Exception:
unit_addr = node_path.split('@')[-1]
def_label += '_' + str_to_label(unit_addr)
else:
def_label += '_' + str_to_label(node_path.split('/')[-1])
return def_label
def get_parent_path(node_path):
# Turns /foo/bar into /foo. Returns None for /.
if node_path == '/':
return None
return '/'.join(node_path.split('/')[:-1]) or '/'
def find_parent_prop(node_path, prop):
parent_path = get_parent_path(node_path)
if prop not in reduced[parent_path]['props']:
raise Exception("Parent of node " + node_path +
" has no " + prop + " property")
return reduced[parent_path]['props'][prop]
# Get the #{address,size}-cells for a given node
def get_addr_size_cells(node_path):
parent_addr = get_parent_path(node_path)
# The DT spec says that if #address-cells is missing default to 2
# if #size-cells is missing default to 1
nr_addr = reduced[parent_addr]['props'].get('#address-cells', 2)
nr_size = reduced[parent_addr]['props'].get('#size-cells', 1)
return (nr_addr, nr_size)
def translate_addr(addr, node_path, nr_addr_cells, nr_size_cells):
parent_path = get_parent_path(node_path)
ranges = reduced[parent_path]['props'].get('ranges')
if not ranges:
return 0
if isinstance(ranges, list):
ranges = ranges.copy() # Modified in-place below
else:
# Empty value ('ranges;'), meaning the parent and child address spaces
# are the same
ranges = []
nr_p_addr_cells, nr_p_size_cells = get_addr_size_cells(parent_path)
range_offset = 0
while ranges:
child_bus_addr = 0
parent_bus_addr = 0
range_len = 0
for x in range(nr_addr_cells):
val = ranges.pop(0) << (32 * (nr_addr_cells - x - 1))
child_bus_addr += val
for x in range(nr_p_addr_cells):
val = ranges.pop(0) << (32 * (nr_p_addr_cells - x - 1))
parent_bus_addr += val
for x in range(nr_size_cells):
range_len += ranges.pop(0) << (32 * (nr_size_cells - x - 1))
# if we are outside of the range we don't need to translate
if child_bus_addr <= addr <= (child_bus_addr + range_len):
range_offset = parent_bus_addr - child_bus_addr
break
parent_range_offset = translate_addr(addr + range_offset,
parent_path, nr_p_addr_cells, nr_p_size_cells)
range_offset += parent_range_offset
return range_offset
def enable_old_alias_names(enable):
global old_alias_names
old_alias_names = enable
def add_compat_alias(node_path, label_postfix, label, prop_aliases, deprecate=False):
if 'instance_id' in reduced[node_path]:
instance = reduced[node_path]['instance_id']
for k in instance:
i = instance[k]
b = 'DT_' + str_to_label(k) + '_' + str(i) + '_' + label_postfix
deprecated.append(b)
prop_aliases[b] = label
b = "DT_INST_{}_{}_{}".format(str(i), str_to_label(k), label_postfix)
prop_aliases[b] = label
if deprecate:
deprecated.append(b)
def add_prop_aliases(node_path,
alias_label_function, prop_label, prop_aliases, deprecate=False):
node_compat = get_compat(node_path)
new_alias_prefix = 'DT_'
for alias in aliases[node_path]:
old_alias_label = alias_label_function(alias)
new_alias_label = new_alias_prefix + 'ALIAS_' + old_alias_label
new_alias_compat_label = new_alias_prefix + str_to_label(node_compat) + '_' + old_alias_label
if new_alias_label != prop_label:
prop_aliases[new_alias_label] = prop_label
if deprecate:
deprecated.append(new_alias_label)
if new_alias_compat_label != prop_label:
prop_aliases[new_alias_compat_label] = prop_label
if deprecate:
deprecated.append(new_alias_compat_label)
if old_alias_names and old_alias_label != prop_label:
prop_aliases[old_alias_label] = prop_label
def get_binding(node_path):
compat = reduced[node_path]['props'].get('compatible')
if isinstance(compat, list):
compat = compat[0]
# Support two levels of recursive 'child-binding:'. The new scripts support
# any number of levels, but it gets a bit tricky to implement here, because
# nodes don't store their bindings.
parent_path = get_parent_path(node_path)
pparent_path = get_parent_path(parent_path)
parent_compat = get_compat(parent_path)
pparent_compat = get_compat(pparent_path) if pparent_path else None
if parent_compat in bindings or pparent_compat in bindings:
if compat is None:
# The node doesn't get a binding from 'compatible'. See if it gets
# one via 'sub-node' or 'child-binding'.
parent_binding = bindings.get(parent_compat)
if parent_binding:
for sub_key in 'sub-node', 'child-binding':
if sub_key in parent_binding:
return parent_binding[sub_key]
# Look for 'child-binding: child-binding: ...' in grandparent node
pparent_binding = bindings.get(pparent_compat)
if pparent_binding and 'child-binding' in pparent_binding:
pp_child_binding = pparent_binding['child-binding']
if 'child-binding' in pp_child_binding:
return pp_child_binding['child-binding']
# look for a bus-specific binding
parent_binding = bindings.get(parent_compat)
if parent_binding:
if 'child-bus' in parent_binding:
bus = parent_binding['child-bus']
return bus_bindings[bus][compat]
if 'child' in parent_binding and 'bus' in parent_binding['child']:
bus = parent_binding['child']['bus']
return bus_bindings[bus][compat]
# No bus-specific binding found, look in the main dict.
if compat:
return bindings[compat]
return None
def get_binding_compats():
return binding_compats
def build_cell_array(prop_array):
index = 0
ret_array = []
if isinstance(prop_array, int):
# Work around old code generating an integer for e.g.
# 'pwms = <&foo>'
prop_array = [prop_array]
while index < len(prop_array):
handle = prop_array[index]
if handle in {0, -1}:
ret_array.append([])
index += 1
else:
# get controller node (referenced via phandle)
cell_parent = phandles[handle]
for prop in reduced[cell_parent]['props']:
if prop[0] == '#' and '-cells' in prop:
num_cells = reduced[cell_parent]['props'][prop]
break
ret_array.append(prop_array[index:index+num_cells+1])
index += num_cells + 1
return ret_array
def child_to_parent_unmap(cell_parent, gpio_index):
# This function returns a (gpio-controller, pin number) tuple from
# cell_parent (identified as a 'nexus node', ie: has a 'gpio-map'
# property) and gpio_index.
# Note: Nexus nodes and gpio-map property are described in the
# upcoming (presumably v0.3) Device Tree specification, chapter
# 'Nexus nodes and Specifier Mapping'.
# First, retrieve gpio-map as a list
gpio_map = reduced[cell_parent]['props']['gpio-map']
# Before parsing, we need to know 'gpio-map' row size
# gpio-map raws are encoded as follows:
# [child specifier][gpio controller phandle][parent specifier]
# child specifier field length is connector property #gpio-cells
child_specifier_size = reduced[cell_parent]['props']['#gpio-cells']
# parent specifier field length is parent property #gpio-cells
# Assumption 1: We assume parent #gpio-cells is constant across
# the map, so we take the value of the first occurrence and apply
# to the whole map.
parent = phandles[gpio_map[child_specifier_size]]
parent_specifier_size = reduced[parent]['props']['#gpio-cells']
array_cell_size = child_specifier_size + 1 + parent_specifier_size
# Now that the length of each entry in 'gpio-map' is known,
# look for a match with gpio_index
for i in range(0, len(gpio_map), array_cell_size):
entry = gpio_map[i:i+array_cell_size]
if entry[0] == gpio_index:
parent_controller_phandle = entry[child_specifier_size]
# Assumption 2: We assume optional properties 'gpio-map-mask'
# and 'gpio-map-pass-thru' are not specified.
# So, for now, only the pin number (first value of the parent
# specifier field) should be returned.
parent_pin_number = entry[child_specifier_size+1]
# Return gpio_controller and specifier pin
return phandles[parent_controller_phandle], parent_pin_number
# gpio_index did not match any entry in the gpio-map
return None, None
def extract_controller(node_path, prop, prop_values, index,
def_label, generic, handle_single=False,
deprecate=False):
prop_def = {}
prop_alias = {}
prop_array = build_cell_array(prop_values)
if handle_single:
prop_array = [prop_array[index]]
for i, elem in enumerate(prop_array):
num_cells = len(elem)
# if the entry is empty, skip
if num_cells == 0:
continue
cell_parent = phandles[elem[0]]
if 'gpio-map' in reduced[cell_parent]['props']:
# Parent is a gpio 'nexus node' (ie has gpio-map).
# Controller should be found in the map, using elem[1] as index.
# Pin attribues (number, flag) will not be used in this function
cell_parent, _ = child_to_parent_unmap(cell_parent, elem[1])
if cell_parent is None:
raise Exception("No parent matching child specifier")
l_cell = reduced[cell_parent]['props'].get('label')
if l_cell is None:
continue
l_base = [def_label]
# Check is defined should be indexed (_0, _1)
if handle_single or i == 0 and len(prop_array) == 1:
# 0 or 1 element in prop_values
l_idx = []
else:
l_idx = [str(i)]
l_cellname = str_to_label(generic + '_' + 'controller')
label = l_base + [l_cellname] + l_idx
add_compat_alias(node_path, '_'.join(label[1:]), '_'.join(label), prop_alias, deprecate)
prop_def['_'.join(label)] = "\"" + l_cell + "\""
#generate defs also if node is referenced as an alias in dts
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias: '_'.join([str_to_label(alias)] + label[1:]),
'_'.join(label),
prop_alias, deprecate)
insert_defs(node_path, prop_def, prop_alias)
if deprecate:
deprecated_main.extend(list(prop_def.keys()))
def extract_cells(node_path, prop, prop_values, names, index,
def_label, generic, handle_single=False,
deprecate=False):
prop_array = build_cell_array(prop_values)
if handle_single:
prop_array = [prop_array[index]]
for i, elem in enumerate(prop_array):
num_cells = len(elem)
# if the entry is empty, skip
if num_cells == 0:
continue
cell_parent = phandles[elem[0]]
if 'gpio-map' in reduced[cell_parent]['props']:
# Parent is a gpio connector ie 'nexus node', ie has gpio-map).
# Controller and pin number should be found in the connector map,
# using elem[1] as index.
# Parent pin flag is not used, so child flag(s) value (elem[2:])
# is kept as is.
cell_parent, elem[1] = child_to_parent_unmap(cell_parent, elem[1])
if cell_parent is None:
raise Exception("No parent matching child specifier")
try:
cell_yaml = get_binding(cell_parent)
except:
raise Exception(
"Could not find yaml description for " +
reduced[cell_parent]['name'])
try:
name = names.pop(0).upper()
except Exception:
name = ''
# Get number of cells per element of current property
for props in reduced[cell_parent]['props']:
if props[0] == '#' and '-cells' in props:
if props[1:] in cell_yaml:
cell_yaml_names = props[1:] # #foo-cells -> foo-cells
else:
cell_yaml_names = '#cells'
l_cell = [str_to_label(str(generic))]
l_base = [def_label]
# Check if #define should be indexed (_0, _1, ...)
if handle_single or i == 0 and len(prop_array) == 1:
# Less than 2 elements in prop_values
# Indexing is not needed
l_idx = []
else:
l_idx = [str(i)]
prop_def = {}
prop_alias = {}
# Generate label for each field of the property element
for j in range(num_cells-1):
l_cellname = [str(cell_yaml[cell_yaml_names][j]).upper()]
if l_cell == l_cellname:
label = l_base + l_cell + l_idx
else:
label = l_base + l_cell + l_cellname + l_idx
label_name = l_base + [name] + l_cellname
add_compat_alias(node_path, '_'.join(label[1:]), '_'.join(label), prop_alias, deprecate)
prop_def['_'.join(label)] = elem[j+1]
if name:
prop_alias['_'.join(label_name)] = '_'.join(label)
# generate defs for node aliases
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias: '_'.join([str_to_label(alias)] + label[1:]),
'_'.join(label),
prop_alias, deprecate)
insert_defs(node_path, prop_def, prop_alias)
if deprecate:
deprecated_main.extend(list(prop_def.keys()))
def err(msg):
# General error reporting helper. Prints a message to stderr and exits with
# status 1.
sys.exit("error: " + msg)

View file

@ -1,122 +0,0 @@
#
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
from extract.globals import *
from extract.directive import DTDirective
##
# @brief Manage interrupts directives.
#
class DTInterrupts(DTDirective):
##
# @brief Extract interrupts
#
# @param node_path Path to node owning the
# interrupts definition.
# @param prop compatible property name
# @param names (unused)
# @param def_label Define label string of node owning the
# compatible definition.
#
def extract(self, node_path, prop, names, def_label):
if prop == "interrupts-extended":
return
vals = reduced[node_path]['props'][prop]
if not isinstance(vals, list):
vals = [vals]
irq_parent = parent_irq_node(node_path)
if not irq_parent:
err(node_path + " has no interrupt-parent")
l_base = [def_label]
index = 0
while vals:
prop_def = {}
prop_alias = {}
l_idx = [str(index)]
if names:
name = [str_to_label(names.pop(0))]
else:
name = []
cell_yaml = get_binding(irq_parent)
l_cell_prefix = ['IRQ']
for i in range(reduced[irq_parent]['props']['#interrupt-cells']):
if "interrupt-cells" in cell_yaml:
cell_yaml_name = "interrupt-cells"
else:
cell_yaml_name = "#cells"
l_cell_name = [cell_yaml[cell_yaml_name][i].upper()]
if l_cell_name == l_cell_prefix:
l_cell_name = []
full_name = '_'.join(l_base + l_cell_prefix + l_idx + l_cell_name)
prop_def[full_name] = vals.pop(0)
add_compat_alias(node_path,
'_'.join(l_cell_prefix + l_idx + l_cell_name),
full_name, prop_alias)
if name:
alias_list = l_base + l_cell_prefix + name + l_cell_name
prop_alias['_'.join(alias_list)] = full_name
add_compat_alias(node_path,
'_'.join(l_cell_prefix + name + l_cell_name),
full_name, prop_alias)
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias:
'_'.join([str_to_label(alias)] +
l_cell_prefix + l_idx + l_cell_name),
full_name,
prop_alias)
if name:
add_prop_aliases(
node_path,
lambda alias:
'_'.join([str_to_label(alias)] +
l_cell_prefix + name + l_cell_name),
full_name,
prop_alias)
else:
add_prop_aliases(
node_path,
lambda alias:
'_'.join([str_to_label(alias)] +
l_cell_prefix + name + l_cell_name),
full_name,
prop_alias, True)
index += 1
insert_defs(node_path, prop_def, prop_alias)
def parent_irq_node(node_path):
while node_path:
if 'interrupt-parent' in reduced[node_path]['props']:
return phandles[reduced[node_path]['props']['interrupt-parent']]
node_path = get_parent_path(node_path)
return None
##
# @brief Management information for interrupts.
interrupts = DTInterrupts()

View file

@ -1,128 +0,0 @@
#
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
from copy import deepcopy
from extract.globals import *
from extract.directive import DTDirective
##
# @brief Manage reg directive.
#
class DTReg(DTDirective):
##
# @brief Extract reg directive info
#
# @param node_path Path to node owning the
# reg definition.
# @param names (unused)
# @param def_label Define label string of node owning the
# compatible definition.
#
def extract(self, node_path, names, def_label, div):
binding = get_binding(node_path)
reg = reduced[node_path]['props']['reg']
if not isinstance(reg, list):
reg = [reg]
(nr_address_cells, nr_size_cells) = get_addr_size_cells(node_path)
if 'parent' in binding:
bus = binding['parent']['bus']
if bus == 'spi':
cs_gpios = None
try:
cs_gpios = deepcopy(find_parent_prop(node_path, 'cs-gpios'))
except Exception:
pass
if cs_gpios:
extract_controller(node_path, "cs-gpios", cs_gpios, reg[0], def_label, "cs-gpio", True, True)
extract_controller(node_path, "cs-gpios", cs_gpios, reg[0], def_label, "cs-gpios", True)
extract_cells(node_path, "cs-gpios", cs_gpios, None, reg[0], def_label, "cs-gpio", True, True)
extract_cells(node_path, "cs-gpios", cs_gpios, None, reg[0], def_label, "cs-gpios", True)
# generate defines
l_base = [def_label]
l_addr = [str_to_label("BASE_ADDRESS")]
l_size = ["SIZE"]
index = 0
props = list(reg)
while props:
prop_def = {}
prop_alias = {}
addr = 0
size = 0
# Check is defined should be indexed (_0, _1)
if index == 0 and len(props) <= (nr_address_cells + nr_size_cells):
# 1 element (len 2) or no element (len 0) in props
l_idx = []
else:
l_idx = [str(index)]
try:
name = [names.pop(0).upper()]
except Exception:
name = []
for x in range(nr_address_cells):
addr += props.pop(0) << (32 * (nr_address_cells - x - 1))
for x in range(nr_size_cells):
size += props.pop(0) << (32 * (nr_size_cells - x - 1))
addr += translate_addr(addr, node_path,
nr_address_cells, nr_size_cells)
l_addr_fqn = '_'.join(l_base + l_addr + l_idx)
l_size_fqn = '_'.join(l_base + l_size + l_idx)
if nr_address_cells:
prop_def[l_addr_fqn] = hex(addr)
add_compat_alias(node_path, '_'.join(l_addr + l_idx), l_addr_fqn, prop_alias)
if nr_size_cells:
prop_def[l_size_fqn] = int(size / div)
add_compat_alias(node_path, '_'.join(l_size + l_idx), l_size_fqn, prop_alias)
if name:
if nr_address_cells:
prop_alias['_'.join(l_base + name + l_addr)] = l_addr_fqn
add_compat_alias(node_path, '_'.join(name + l_addr), l_addr_fqn, prop_alias)
if nr_size_cells:
prop_alias['_'.join(l_base + name + l_size)] = l_size_fqn
add_compat_alias(node_path, '_'.join(name + l_size), l_size_fqn, prop_alias)
# generate defs for node aliases
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias:
'_'.join([str_to_label(alias)] + l_addr + l_idx),
l_addr_fqn,
prop_alias)
if nr_size_cells:
add_prop_aliases(
node_path,
lambda alias:
'_'.join([str_to_label(alias)] + l_size + l_idx),
l_size_fqn,
prop_alias)
insert_defs(node_path, prop_def, prop_alias)
# increment index for definition creation
index += 1
##
# @brief Management information for registers.
reg = DTReg()

View file

@ -1,572 +0,0 @@
#!/usr/bin/env python3
#
# Copyright (c) 2017, Linaro Limited
# Copyright (c) 2018, Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
# NOTE: This file is part of the old device tree scripts, which will be removed
# later. They are kept to generate some legacy #defines via the
# --deprecated-only flag.
#
# The new scripts are gen_defines.py, edtlib.py, and dtlib.py.
# vim: ai:ts=4:sw=4
import os, fnmatch
import re
import argparse
from collections import defaultdict
import yaml
try:
# Use the C LibYAML parser if available, rather than the Python parser.
# It's much faster.
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
from devicetree import parse_file
from extract.globals import *
import extract.globals
from extract.clocks import clocks
from extract.compatible import compatible
from extract.interrupts import interrupts
from extract.reg import reg
from extract.flash import flash
from extract.default import default
def extract_bus_name(node_path, def_label):
label = def_label + '_BUS_NAME'
prop_alias = {}
add_compat_alias(node_path, 'BUS_NAME', label, prop_alias)
# Generate defines for node aliases
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias: str_to_label(alias) + '_BUS_NAME',
label,
prop_alias)
insert_defs(node_path,
{label: '"' + find_parent_prop(node_path, 'label') + '"'},
prop_alias)
def extract_string_prop(node_path, key, label):
if node_path not in defs:
# Make all defs have the special 'aliases' key, to remove existence
# checks elsewhere
defs[node_path] = {'aliases': {}}
defs[node_path][label] = '"' + reduced[node_path]['props'][key] + '"'
def generate_prop_defines(node_path, prop):
# Generates #defines (and .conf file values) from the prop
# named 'prop' on the device tree node at 'node_path'
binding = get_binding(node_path)
if 'parent-bus' in binding or \
'parent' in binding and 'bus' in binding['parent']:
# If the binding specifies a parent for the node, then include the
# parent in the #define's generated for the properties
parent_path = get_parent_path(node_path)
def_label = 'DT_' + node_label(parent_path) + '_' \
+ node_label(node_path)
else:
def_label = 'DT_' + node_label(node_path)
names = prop_names(reduced[node_path], prop)
if prop == 'reg':
reg.extract(node_path, names, def_label, 1)
elif prop in {'interrupts', 'interrupts-extended'}:
interrupts.extract(node_path, prop, names, def_label)
elif prop == 'compatible':
compatible.extract(node_path, prop, def_label)
elif 'clocks' in prop:
clocks.extract(node_path, prop, def_label)
elif 'pwms' in prop or '-gpios' in prop or prop == "gpios":
prop_values = reduced[node_path]['props'][prop]
generic = prop[:-1] # Drop the 's' from the prop
# Deprecated the non-'S' form
extract_controller(node_path, prop, prop_values, 0,
def_label, generic, deprecate=True)
extract_controller(node_path, prop, prop_values, 0,
def_label, prop)
# Deprecated the non-'S' form
extract_cells(node_path, prop, prop_values,
names, 0, def_label, generic, deprecate=True)
extract_cells(node_path, prop, prop_values,
names, 0, def_label, prop)
else:
default.extract(node_path, prop,
binding['properties'][prop]['type'],
def_label)
def generate_node_defines(node_path):
# Generates #defines (and .conf file values) from the device
# tree node at 'node_path'
if get_compat(node_path) not in get_binding_compats():
return
# We extract a few different #defines for a flash partition, so it's easier
# to handle it in one step
if 'partition@' in node_path:
flash.extract_partition(node_path)
return
if get_binding(node_path) is None:
return
generate_bus_defines(node_path)
props = get_binding(node_path).get('properties')
if not props:
return
# Generate per-property ('foo = <1 2 3>', etc.) #defines
for yaml_prop, yaml_val in props.items():
if yaml_prop.startswith("#") or yaml_prop.endswith("-map"):
continue
match = False
# Handle each property individually, this ends up handling common
# patterns for things like reg, interrupts, etc that we don't need
# any special case handling at a node level
for prop in reduced[node_path]['props']:
if re.fullmatch(yaml_prop, prop):
match = True
generate_prop_defines(node_path, prop)
# Handle the case that we have a boolean property, but its not
# in the dts
if not match and yaml_val['type'] == 'boolean':
generate_prop_defines(node_path, yaml_prop)
def generate_bus_defines(node_path):
# Generates any node-level #defines related to
#
# parent:
# bus: ...
binding = get_binding(node_path)
if not ('parent-bus' in binding or
'parent' in binding and 'bus' in binding['parent']):
return
parent_path = get_parent_path(node_path)
# Check that parent has matching child bus value
try:
parent_binding = get_binding(parent_path)
if 'child-bus' in parent_binding:
parent_bus = parent_binding['child-bus']
else:
parent_bus = parent_binding['child']['bus']
except (KeyError, TypeError):
raise Exception("{0} defines parent {1} as bus master, but {1} is not "
"configured as bus master in binding"
.format(node_path, parent_path))
if 'parent-bus' in binding:
bus = binding['parent-bus']
else:
bus = binding['parent']['bus']
if parent_bus != bus:
raise Exception("{0} defines parent {1} as {2} bus master, but {1} is "
"configured as {3} bus master"
.format(node_path, parent_path, bus, parent_bus))
# Generate *_BUS_NAME #define
extract_bus_name(
node_path,
'DT_' + node_label(parent_path) + '_' + node_label(node_path))
def prop_names(node, prop_name):
# Returns a list with the *-names for the property (reg-names,
# interrupt-names, etc.) The list is copied so that it can be modified
# in-place later without stomping on the device tree data.
# The first case turns 'interrupts' into 'interrupt-names'
names = node['props'].get(prop_name[:-1] + '-names', []) or \
node['props'].get(prop_name + '-names', [])
if isinstance(names, list):
# Allow the list of names to be modified in-place without
# stomping on the property
return names.copy()
return [names]
def merge_properties(parent, fname, to_dict, from_dict):
# Recursively merges the 'from_dict' dictionary into 'to_dict', to
# implement !include. 'parent' is the current parent key being looked at.
# 'fname' is the top-level .yaml file.
for k in from_dict:
if (k in to_dict and isinstance(to_dict[k], dict)
and isinstance(from_dict[k], dict)):
merge_properties(k, fname, to_dict[k], from_dict[k])
else:
to_dict[k] = from_dict[k]
def merge_included_bindings(fname, node):
# Recursively merges properties from files !include'd from the 'inherits'
# section of the binding. 'fname' is the path to the top-level binding
# file, and 'node' the current top-level YAML node being processed.
res = node
if "include" in node:
included = node.pop("include")
if isinstance(included, str):
included = [included]
for included_fname in included:
binding = load_binding_file(included_fname)
inherited = merge_included_bindings(fname, binding)
merge_properties(None, fname, inherited, res)
res = inherited
if 'inherits' in node:
for inherited in node.pop('inherits'):
inherited = merge_included_bindings(fname, inherited)
merge_properties(None, fname, inherited, res)
res = inherited
return res
def define_str(name, value, value_tabs, is_deprecated=False):
line = "#define " + name
if is_deprecated:
line += " __DEPRECATED_MACRO "
return line + (value_tabs - len(line)//8)*'\t' + str(value) + '\n'
def write_conf(f):
for node in sorted(defs):
f.write('# ' + node.split('/')[-1] + '\n')
for prop in sorted(defs[node]):
if prop != 'aliases' and prop.startswith("DT_"):
f.write('%s=%s\n' % (prop, defs[node][prop]))
for alias in sorted(defs[node]['aliases']):
alias_target = defs[node]['aliases'][alias]
if alias_target not in defs[node]:
alias_target = defs[node]['aliases'][alias_target]
if alias.startswith("DT_"):
f.write('%s=%s\n' % (alias, defs[node].get(alias_target)))
f.write('\n')
def write_header(f, deprecated_only):
f.write('''\
/**********************************************
* Generated include file
* DO NOT MODIFY
*/
#ifndef GENERATED_DTS_BOARD_UNFIXED_H
#define GENERATED_DTS_BOARD_UNFIXED_H
''')
def max_dict_key(dct):
return max(len(key) for key in dct)
for node in sorted(defs):
f.write('/* ' + node.split('/')[-1] + ' */\n')
maxlen = max_dict_key(defs[node])
if defs[node]['aliases']:
maxlen = max(maxlen, max_dict_key(defs[node]['aliases']))
maxlen += len('#define ')
value_tabs = (maxlen + 8)//8 # Tabstop index for value
if 8*value_tabs - maxlen <= 2:
# Add some minimum room between the macro name and the value
value_tabs += 1
for prop in sorted(defs[node]):
if prop != 'aliases':
deprecated_warn = False
if prop in deprecated_main:
deprecated_warn = True
if not prop.startswith('DT_'):
deprecated_warn = True
if deprecated_only and not deprecated_warn:
continue
f.write(define_str(prop, defs[node][prop], value_tabs, deprecated_warn))
for alias in sorted(defs[node]['aliases']):
alias_target = defs[node]['aliases'][alias]
deprecated_warn = False
# Mark any non-DT_ prefixed define as deprecated except
# for now we special case LED, SW, and *PWM_LED*
if not alias.startswith('DT_'):
deprecated_warn = True
if alias in deprecated:
deprecated_warn = True
if deprecated_only and not deprecated_warn:
continue
f.write(define_str(alias, alias_target, value_tabs, deprecated_warn))
f.write('\n')
f.write('#endif\n')
def load_bindings(root, binding_dirs):
find_binding_files(binding_dirs)
dts_compats = all_compats(root)
compat_to_binding = {}
# Maps buses to dictionaries that map compats to YAML nodes
bus_to_binding = defaultdict(dict)
compats = []
# Add '!include foo.yaml' handling
Loader.add_constructor('!include', yaml_include)
# Code below is adapated from edtlib.py
# Searches for any 'compatible' string mentioned in the devicetree
# files, with a regex
dt_compats_search = re.compile(
"|".join(re.escape(compat) for compat in dts_compats)
).search
for file in binding_files:
with open(file, encoding="utf-8") as f:
contents = f.read()
if not dt_compats_search(contents):
continue
binding = yaml.load(contents, Loader=Loader)
binding_compats = _binding_compats(binding)
if not binding_compats:
continue
with open(file, 'r', encoding='utf-8') as yf:
binding = merge_included_bindings(file,
yaml.load(yf, Loader=Loader))
for compat in binding_compats:
if compat not in compats:
compats.append(compat)
if 'parent-bus' in binding:
bus_to_binding[binding['parent-bus']][compat] = binding
if 'parent' in binding:
bus_to_binding[binding['parent']['bus']][compat] = binding
compat_to_binding[compat] = binding
if not compat_to_binding:
raise Exception("No bindings found in '{}'".format(binding_dirs))
extract.globals.bindings = compat_to_binding
extract.globals.bus_bindings = bus_to_binding
extract.globals.binding_compats = compats
def _binding_compats(binding):
# Adapated from edtlib.py
def new_style_compats():
if binding is None or "compatible" not in binding:
return []
val = binding["compatible"]
if isinstance(val, str):
return [val]
return val
def old_style_compat():
try:
return binding["properties"]["compatible"]["constraint"]
except Exception:
return None
new_compats = new_style_compats()
old_compat = old_style_compat()
if old_compat:
return [old_compat]
return new_compats
def find_binding_files(binding_dirs):
# Initializes the global 'binding_files' variable with a list of paths to
# binding (.yaml) files
global binding_files
binding_files = []
for binding_dir in binding_dirs:
for root, _, filenames in os.walk(binding_dir):
for filename in fnmatch.filter(filenames, '*.yaml'):
binding_files.append(os.path.join(root, filename))
def yaml_include(loader, node):
# Implements !include. Returns a list with the top-level YAML structures
# for the included files (a single-element list if there's just one file).
if isinstance(node, yaml.ScalarNode):
# !include foo.yaml
return [load_binding_file(loader.construct_scalar(node))]
if isinstance(node, yaml.SequenceNode):
# !include [foo.yaml, bar.yaml]
return [load_binding_file(fname)
for fname in loader.construct_sequence(node)]
yaml_inc_error("Error: unrecognised node type in !include statement")
def load_binding_file(fname):
# yaml_include() helper for loading an !include'd file. !include takes just
# the basename of the file, so we need to make sure that there aren't
# multiple candidates.
filepaths = [filepath for filepath in binding_files
if os.path.basename(filepath) == os.path.basename(fname)]
if not filepaths:
yaml_inc_error("Error: unknown file name '{}' in !include statement"
.format(fname))
if len(filepaths) > 1:
yaml_inc_error("Error: multiple candidates for file name '{}' in "
"!include statement: {}".format(fname, filepaths))
with open(filepaths[0], 'r', encoding='utf-8') as f:
return yaml.load(f, Loader=Loader)
def yaml_inc_error(msg):
# Helper for reporting errors in the !include implementation
raise yaml.constructor.ConstructorError(None, None, msg)
def generate_defines():
# Generates #defines (and .conf file values) from DTS
# sorted() otherwise Python < 3.6 randomizes the order of the flash
# partition table
for node_path in sorted(reduced.keys()):
generate_node_defines(node_path)
if not defs:
raise Exception("No information parsed from dts file.")
for k, v in regs_config.items():
if k in chosen:
reg.extract(chosen[k], None, v, 1024)
for k, v in name_config.items():
if k in chosen:
extract_string_prop(chosen[k], "label", v)
flash.extract_flash()
flash.extract_code_partition()
def parse_arguments():
rdh = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(description=__doc__, formatter_class=rdh)
parser.add_argument("-d", "--dts", required=True, help="DTS file")
parser.add_argument("-y", "--yaml", nargs='+', required=True,
help="YAML file directories, we allow multiple")
parser.add_argument("-i", "--include",
help="Generate include file for the build system")
parser.add_argument("-k", "--keyvalue",
help="Generate config file for the build system")
parser.add_argument("--old-alias-names", action='store_true',
help="Generate aliases also in the old way, without "
"compatibility information in their labels")
parser.add_argument("--deprecated-only", action='store_true',
help="Generate only the deprecated defines")
return parser.parse_args()
def main():
args = parse_arguments()
enable_old_alias_names(args.old_alias_names)
# Parse DTS and fetch the root node
with open(args.dts, 'r', encoding='utf-8') as f:
root = parse_file(f)['/']
# Create some global data structures from the parsed DTS
create_reduced(root, '/')
create_phandles(root, '/')
create_aliases(root)
create_chosen(root)
# Re-sort instance_id by reg addr
#
# Note: this is a short term fix and should be removed when
# generate defines for instance with a prefix like 'DT_INST'
#
# Build a dict of dicts, first level is index by compat
# second level is index by reg addr
compat_reg_dict = defaultdict(dict)
for node in reduced.values():
instance = node.get('instance_id')
if instance and node['addr'] is not None:
for compat in instance:
reg = node['addr']
compat_reg_dict[compat][reg] = node
# Walk the reg addr in sorted order to re-index 'instance_id'
for compat in compat_reg_dict:
# only update if we have more than one instance
if len(compat_reg_dict[compat]) > 1:
for idx, reg_addr in enumerate(sorted(compat_reg_dict[compat])):
compat_reg_dict[compat][reg_addr]['instance_id'][compat] = idx
# Load any bindings (.yaml files) that match 'compatible' values from the
# DTS
load_bindings(root, args.yaml)
# Generate keys and values for the configuration file and the header file
generate_defines()
# Write the configuration file and the header file
if args.keyvalue is not None:
with open(args.keyvalue, 'w', encoding='utf-8') as f:
write_conf(f)
if args.include is not None:
with open(args.include, 'w', encoding='utf-8') as f:
write_header(f, args.deprecated_only)
if __name__ == '__main__':
main()