scripts: extract_dts_includes.py: Adhere to PEP8

This patch PEP8-ifies the extract_dts_includes.py script

Signed-off-by: Andy Gross <andy.gross@linaro.org>
This commit is contained in:
Andy Gross 2017-06-15 13:15:23 -04:00 committed by Kumar Gala
commit 70e54f9ef7

View file

@ -7,7 +7,6 @@ from os import listdir
import os import os
import re import re
import yaml import yaml
import pprint
import argparse import argparse
from devicetree import parse_file from devicetree import parse_file
@ -19,12 +18,14 @@ aliases = {}
chosen = {} chosen = {}
reduced = {} reduced = {}
def convert_string_to_label(s): def convert_string_to_label(s):
# Transmute ,- to _ # Transmute ,- to _
s = s.replace("-", "_"); s = s.replace("-", "_")
s = s.replace(",", "_"); s = s.replace(",", "_")
return s return s
def get_all_compatibles(d, name, comp_dict): def get_all_compatibles(d, name, comp_dict):
if 'props' in d: if 'props' in d:
compat = d['props'].get('compatible') compat = d['props'].get('compatible')
@ -33,7 +34,7 @@ def get_all_compatibles(d, name, comp_dict):
if enabled == "disabled": if enabled == "disabled":
return comp_dict return comp_dict
if compat != None: if compat is not None:
comp_dict[name] = compat comp_dict[name] = compat
if name != '/': if name != '/':
@ -46,6 +47,7 @@ def get_all_compatibles(d, name, comp_dict):
return comp_dict return comp_dict
def get_aliases(root): def get_aliases(root):
if 'children' in root: if 'children' in root:
if 'aliases' in root['children']: if 'aliases' in root['children']:
@ -54,6 +56,7 @@ def get_aliases(root):
return return
def get_compat(node): def get_compat(node):
compat = None compat = None
@ -66,6 +69,7 @@ def get_compat(node):
return compat return compat
def get_chosen(root): def get_chosen(root):
if 'children' in root: if 'children' in root:
@ -75,6 +79,7 @@ def get_chosen(root):
return return
def get_phandles(root, name, handles): def get_phandles(root, name, handles):
if 'props' in root: if 'props' in root:
@ -84,7 +89,7 @@ def get_phandles(root, name, handles):
if enabled == "disabled": if enabled == "disabled":
return return
if handle != None: if handle is not None:
phandles[handle] = name phandles[handle] = name
if name != '/': if name != '/':
@ -97,6 +102,7 @@ def get_phandles(root, name, handles):
return return
class Loader(yaml.Loader): class Loader(yaml.Loader):
def __init__(self, stream): def __init__(self, stream):
self._root = os.path.realpath(stream.name) self._root = os.path.realpath(stream.name)
@ -135,6 +141,7 @@ class Loader(yaml.Loader):
with open(filepath, 'r') as f: with open(filepath, 'r') as f:
return yaml.load(f, Loader) return yaml.load(f, Loader)
def insert_defs(node_address, defs, new_defs, new_aliases): def insert_defs(node_address, defs, new_defs, new_aliases):
if node_address in defs: if node_address in defs:
if 'aliases' in defs[node_address]: if 'aliases' in defs[node_address]:
@ -149,6 +156,7 @@ def insert_defs(node_address, defs, new_defs, new_aliases):
return return
def find_node_by_path(nodes, path): def find_node_by_path(nodes, path):
d = nodes d = nodes
for k in path[1:].split('/'): for k in path[1:].split('/'):
@ -156,6 +164,7 @@ def find_node_by_path(nodes, path):
return d return d
def compress_nodes(nodes, path): def compress_nodes(nodes, path):
if 'props' in nodes: if 'props' in nodes:
status = nodes['props'].get('status') status = nodes['props'].get('status')
@ -174,16 +183,19 @@ def compress_nodes(nodes, path):
return return
def find_parent_irq_node(node_address): def find_parent_irq_node(node_address):
address = '' address = ''
for comp in node_address.split('/')[1:]: for comp in node_address.split('/')[1:]:
address += '/' + comp address += '/' + comp
if 'interrupt-parent' in reduced[address]['props']: if 'interrupt-parent' in reduced[address]['props']:
interrupt_parent = reduced[address]['props'].get('interrupt-parent') interrupt_parent = reduced[address]['props'].get(
'interrupt-parent')
return reduced[phandles[interrupt_parent]] return reduced[phandles[interrupt_parent]]
def extract_interrupts(node_address, yaml, y_key, names, defs, def_label): def extract_interrupts(node_address, yaml, y_key, names, defs, def_label):
node = reduced[node_address] node = reduced[node_address]
@ -214,7 +226,8 @@ def extract_interrupts(node_address, yaml, y_key, names, defs, def_label):
cell_parent = irq_parent cell_parent = irq_parent
cell_yaml = yaml[get_compat(cell_parent)] cell_yaml = yaml[get_compat(cell_parent)]
l_cell_prefix = [yaml[get_compat(irq_parent)].get('cell_string', []).upper()] l_cell_prefix = [yaml[get_compat(irq_parent)].get(
'cell_string', []).upper()]
for i in range(cell_parent['props']['#interrupt-cells']): for i in range(cell_parent['props']['#interrupt-cells']):
l_cell_name = [cell_yaml['#cells'][i].upper()] l_cell_name = [cell_yaml['#cells'][i].upper()]
@ -231,8 +244,8 @@ def extract_interrupts(node_address, yaml, y_key, names, defs, def_label):
return return
def extract_reg_prop(node_address, names, defs, def_label, div, post_label): def extract_reg_prop(node_address, names, defs, def_label, div, post_label):
node = reduced[node_address]
props = list(reduced[node_address]['props']['reg']) props = list(reduced[node_address]['props']['reg'])
@ -241,7 +254,8 @@ def extract_reg_prop(node_address, names, defs, def_label, div, post_label):
address = '' address = ''
for comp in node_address.split('/')[1:]: for comp in node_address.split('/')[1:]:
address += '/' + comp address += '/' + comp
address_cells = reduced[address]['props'].get('#address-cells', address_cells) address_cells = reduced[address]['props'].get(
'#address-cells', address_cells)
size_cells = reduced[address]['props'].get('#size-cells', size_cells) size_cells = reduced[address]['props'].get('#size-cells', size_cells)
if post_label is None: if post_label is None:
@ -288,7 +302,9 @@ def extract_reg_prop(node_address, names, defs, def_label, div, post_label):
return return
def extract_cells(node_address, yaml, y_key, names, index, prefix, defs, def_label):
def extract_cells(node_address, yaml, y_key, names, index, prefix, defs,
def_label):
try: try:
props = list(reduced[node_address]['props'].get(y_key)) props = list(reduced[node_address]['props'].get(y_key))
except: except:
@ -299,7 +315,8 @@ def extract_cells(node_address, yaml, y_key, names, index, prefix, defs, def_lab
try: try:
cell_yaml = yaml[get_compat(cell_parent)] cell_yaml = yaml[get_compat(cell_parent)]
except: except:
raise Exception("Could not find yaml description for " + cell_parent['name']) raise Exception(
"Could not find yaml description for " + cell_parent['name'])
try: try:
name = names.pop(0).upper() name = names.pop(0).upper()
@ -334,11 +351,14 @@ def extract_cells(node_address, yaml, y_key, names, index, prefix, defs, def_lab
# recurse if we have anything left # recurse if we have anything left
if len(props): if len(props):
extract_cells(node_address, yaml, y_key, names, index + 1, prefix, defs, def_label) extract_cells(node_address, yaml, y_key, names,
index + 1, prefix, defs, def_label)
return return
def extract_pinctrl(node_address, yaml, pinconf, names, index, defs, def_label):
def extract_pinctrl(node_address, yaml, pinconf, names, index, defs,
def_label):
prop_list = [] prop_list = []
if not isinstance(pinconf, list): if not isinstance(pinconf, list):
@ -347,38 +367,41 @@ def extract_pinctrl(node_address, yaml, pinconf, names, index, defs, def_label):
prop_list = list(pinconf) prop_list = list(pinconf)
def_prefix = def_label.split('_') def_prefix = def_label.split('_')
target_node = node_address
prop_def = {} prop_def = {}
for p in prop_list: for p in prop_list:
pin_node_address = phandles[p] pin_node_address = phandles[p]
pin_entry = reduced[pin_node_address]
parent_address = '/'.join(pin_node_address.split('/')[:-1]) parent_address = '/'.join(pin_node_address.split('/')[:-1])
pin_parent = reduced[parent_address] pin_parent = reduced[parent_address]
cell_yaml = yaml[get_compat(pin_parent)] cell_yaml = yaml[get_compat(pin_parent)]
cell_prefix = cell_yaml.get('cell_string', None) cell_prefix = cell_yaml.get('cell_string', None)
post_fix = [] post_fix = []
if cell_prefix != None: if cell_prefix is not None:
post_fix.append(cell_prefix) post_fix.append(cell_prefix)
for subnode in reduced.keys(): for subnode in reduced.keys():
if pin_node_address in subnode and pin_node_address != subnode: if pin_node_address in subnode and pin_node_address != subnode:
# found a subnode underneath the pinmux handle # found a subnode underneath the pinmux handle
node_label = subnode.split('/')[-2:]
pin_label = def_prefix + post_fix + subnode.split('/')[-2:] pin_label = def_prefix + post_fix + subnode.split('/')[-2:]
for i, pin in enumerate(reduced[subnode]['props']['pins']): for i, pin in enumerate(reduced[subnode]['props']['pins']):
key_label = list(pin_label) + [cell_yaml['#cells'][0]] + [str(i)] key_label = list(pin_label) + \
func_label = key_label[:-2] + [cell_yaml['#cells'][1]] + [str(i)] [cell_yaml['#cells'][0]] + [str(i)]
key_label = convert_string_to_label('_'.join(key_label)).upper() func_label = key_label[:-2] + \
func_label = convert_string_to_label('_'.join(func_label)).upper() [cell_yaml['#cells'][1]] + [str(i)]
key_label = convert_string_to_label(
'_'.join(key_label)).upper()
func_label = convert_string_to_label(
'_'.join(func_label)).upper()
prop_def[key_label] = pin prop_def[key_label] = pin
prop_def[func_label] = reduced[subnode]['props']['function'] prop_def[func_label] = \
reduced[subnode]['props']['function']
insert_defs(node_address, defs, prop_def, {}) insert_defs(node_address, defs, prop_def, {})
def extract_single(node_address, yaml, prop, key, prefix, defs, def_label): def extract_single(node_address, yaml, prop, key, prefix, defs, def_label):
prop_def = {} prop_def = {}
@ -404,9 +427,9 @@ def extract_single(node_address, yaml, prop, key, prefix, defs, def_label):
return return
def extract_property(node_compat, yaml, node_address, y_key, y_val, names, prefix, defs, label_override):
node = reduced[node_address] def extract_property(node_compat, yaml, node_address, y_key, y_val, names,
prefix, defs, label_override):
if 'base_label' in yaml[node_compat]: if 'base_label' in yaml[node_compat]:
def_label = yaml[node_compat].get('base_label') def_label = yaml[node_compat].get('base_label')
@ -418,12 +441,14 @@ def extract_property(node_compat, yaml, node_address, y_key, y_val, names, prefi
def_label += '_' + label_override def_label += '_' + label_override
if y_key == 'reg': if y_key == 'reg':
extract_reg_prop(node_address, names, defs, def_label, 1, y_val.get('label', None)) extract_reg_prop(node_address, names, defs, def_label,
1, y_val.get('label', None))
elif y_key == 'interrupts' or y_key == 'interupts-extended': elif y_key == 'interrupts' or y_key == 'interupts-extended':
extract_interrupts(node_address, yaml, y_key, names, defs, def_label) extract_interrupts(node_address, yaml, y_key, names, defs, def_label)
elif 'pinctrl-' in y_key: elif 'pinctrl-' in y_key:
p_index = int(y_key.split('-')[1]) p_index = int(y_key.split('-')[1])
extract_pinctrl(node_address, yaml, reduced[node_address]['props'][y_key], extract_pinctrl(node_address, yaml,
reduced[node_address]['props'][y_key],
names[p_index], p_index, defs, def_label) names[p_index], p_index, defs, def_label)
elif 'clocks' in y_key: elif 'clocks' in y_key:
extract_cells(node_address, yaml, y_key, extract_cells(node_address, yaml, y_key,
@ -435,13 +460,14 @@ def extract_property(node_compat, yaml, node_address, y_key, y_val, names, prefi
return return
def extract_node_include_info(reduced, root_node_address, sub_node_address, def extract_node_include_info(reduced, root_node_address, sub_node_address,
yaml, defs, structs, y_sub): yaml, defs, structs, y_sub):
node = reduced[sub_node_address] node = reduced[sub_node_address]
node_compat = get_compat(reduced[root_node_address]) node_compat = get_compat(reduced[root_node_address])
label_override = None label_override = None
if not node_compat in yaml.keys(): if node_compat not in yaml.keys():
return {}, {} return {}, {}
if y_sub is None: if y_sub is None:
@ -453,7 +479,8 @@ def extract_node_include_info(reduced, root_node_address, sub_node_address,
for yp in y_node['properties']: for yp in y_node['properties']:
if yp.get('label') is not None: if yp.get('label') is not None:
if node['props'].get('label') is not None: if node['props'].get('label') is not None:
label_override = convert_string_to_label(node['props']['label']).upper() label_override = convert_string_to_label(
node['props']['label']).upper()
break break
# check to see if we need to process the properties # check to see if we need to process the properties
@ -462,17 +489,13 @@ def extract_node_include_info(reduced, root_node_address, sub_node_address,
if 'properties' in v: if 'properties' in v:
for c in reduced: for c in reduced:
if root_node_address + '/' in c: if root_node_address + '/' in c:
extract_node_include_info(reduced, root_node_address, c, yaml, defs, structs, v) extract_node_include_info(
reduced, root_node_address, c, yaml, defs, structs,
v)
if 'generation' in v: if 'generation' in v:
if v['generation'] == 'define':
label = v.get('define_string')
storage = defs
else:
label = v.get('structures_string')
storage = structs
prefix = [] prefix = []
if v.get('use-name-prefix') != None: if v.get('use-name-prefix') is not None:
prefix = [convert_string_to_label(k.upper())] prefix = [convert_string_to_label(k.upper())]
for c in node['props'].keys(): for c in node['props'].keys():
@ -491,10 +514,13 @@ def extract_node_include_info(reduced, root_node_address, sub_node_address,
if not isinstance(names, list): if not isinstance(names, list):
names = [names] names = [names]
extract_property(node_compat, yaml, sub_node_address, c, v, names, prefix, defs, label_override) extract_property(
node_compat, yaml, sub_node_address, c, v, names,
prefix, defs, label_override)
return return
def yaml_collapse(yaml_list): def yaml_collapse(yaml_list):
collapsed = dict(yaml_list) collapsed = dict(yaml_list)
@ -533,8 +559,8 @@ def print_key_value(k, v, tabstop):
return return
def generate_keyvalue_file(defs, args): def generate_keyvalue_file(defs, args):
compatible = reduced['/']['props']['compatible'][0]
node_keys = sorted(defs.keys()) node_keys = sorted(defs.keys())
for node in node_keys: for node in node_keys:
@ -552,17 +578,18 @@ def generate_keyvalue_file(defs, args):
sys.stdout.write("\n") sys.stdout.write("\n")
def generate_include_file(defs, args): def generate_include_file(defs, args):
compatible = reduced['/']['props']['compatible'][0] compatible = reduced['/']['props']['compatible'][0]
sys.stdout.write("/**************************************************\n") sys.stdout.write("/**************************************************\n")
sys.stdout.write(" * Generated include file for " + compatible) sys.stdout.write(" * Generated include file for " + compatible)
sys.stdout.write("\n") sys.stdout.write("\n")
sys.stdout.write(" * DO NOT MODIFY\n"); sys.stdout.write(" * DO NOT MODIFY\n")
sys.stdout.write(" */\n") sys.stdout.write(" */\n")
sys.stdout.write("\n") sys.stdout.write("\n")
sys.stdout.write("#ifndef _DEVICE_TREE_BOARD_H" + "\n"); sys.stdout.write("#ifndef _DEVICE_TREE_BOARD_H" + "\n")
sys.stdout.write("#define _DEVICE_TREE_BOARD_H" + "\n"); sys.stdout.write("#define _DEVICE_TREE_BOARD_H" + "\n")
sys.stdout.write("\n") sys.stdout.write("\n")
node_keys = sorted(defs.keys()) node_keys = sorted(defs.keys())
@ -592,17 +619,21 @@ def generate_include_file(defs, args):
if args.fixup and os.path.exists(args.fixup): if args.fixup and os.path.exists(args.fixup):
sys.stdout.write("\n") sys.stdout.write("\n")
sys.stdout.write("/* Following definitions fixup the generated include */\n") sys.stdout.write(
"/* Following definitions fixup the generated include */\n")
try: try:
with open(args.fixup, "r") as fd: with open(args.fixup, "r") as fd:
for line in fd.readlines(): for line in fd.readlines():
sys.stdout.write(line) sys.stdout.write(line)
sys.stdout.write("\n") sys.stdout.write("\n")
except: except:
raise Exception("Input file " + os.path.abspath(args.fixup) + " does not exist.") raise Exception(
"Input file " + os.path.abspath(args.fixup) +
" does not exist.")
sys.stdout.write("#endif\n") sys.stdout.write("#endif\n")
def lookup_defs(defs, node, key): def lookup_defs(defs, node, key):
if node not in defs: if node not in defs:
return None return None
@ -615,17 +646,18 @@ def lookup_defs(defs, node, key):
def parse_arguments(): def parse_arguments():
parser = argparse.ArgumentParser(description = __doc__, rdh = argparse.RawDescriptionHelpFormatter
formatter_class = argparse.RawDescriptionHelpFormatter) parser = argparse.ArgumentParser(description=__doc__, formatter_class=rdh)
parser.add_argument("-d", "--dts", help="DTS file") parser.add_argument("-d", "--dts", help="DTS file")
parser.add_argument("-y", "--yaml", help="YAML file") parser.add_argument("-y", "--yaml", help="YAML file")
parser.add_argument("-f", "--fixup", help="Fixup file") parser.add_argument("-f", "--fixup", help="Fixup file")
parser.add_argument("-k", "--keyvalue", action="store_true", parser.add_argument("-k", "--keyvalue", action="store_true",
help="Generate file to be included by the build system") help="Generate include file for the build system")
return parser.parse_args() return parser.parse_args()
def main(): def main():
args = parse_arguments() args = parse_arguments()
if not args.dts or not args.yaml: if not args.dts or not args.yaml:
@ -636,7 +668,8 @@ def main():
with open(args.dts, "r") as fd: with open(args.dts, "r") as fd:
d = parse_file(fd) d = parse_file(fd)
except: except:
raise Exception("Input file " + os.path.abspath(args.dts) + " does not exist.") raise Exception(
"Input file " + os.path.abspath(args.dts) + " does not exist.")
# compress list to nodes w/ paths, add interrupt parent # compress list to nodes w/ paths, add interrupt parent
compress_nodes(d['/'], '/') compress_nodes(d['/'], '/')
@ -667,7 +700,8 @@ def main():
if zephyrbase is not None: if zephyrbase is not None:
for filename in listdir(zephyrbase + '/dts/common/yaml'): for filename in listdir(zephyrbase + '/dts/common/yaml'):
if re.match('.*\.yaml\Z', filename): if re.match('.*\.yaml\Z', filename):
yaml_files.append(os.path.realpath(zephyrbase + '/dts/common/yaml/' + filename)) yaml_files.append(os.path.realpath(
zephyrbase + '/dts/common/yaml/' + filename))
yaml_list = {} yaml_list = {}
file_load_list = set() file_load_list = set()
@ -677,7 +711,7 @@ def main():
c = line.split(':')[1].strip() c = line.split(':')[1].strip()
c = c.strip('"') c = c.strip('"')
if c in s: if c in s:
if not file in file_load_list: if file not in file_load_list:
file_load_list.add(file) file_load_list.add(file)
with open(file, 'r') as yf: with open(file, 'r') as yf:
yaml_list[c] = yaml.load(yf, Loader) yaml_list[c] = yaml.load(yf, Loader)
@ -692,21 +726,25 @@ def main():
structs = {} structs = {}
for k, v in reduced.items(): for k, v in reduced.items():
node_compat = get_compat(v) node_compat = get_compat(v)
if node_compat != None and node_compat in yaml_list: if node_compat is not None and node_compat in yaml_list:
extract_node_include_info(reduced, k, k, yaml_list, defs, structs, None) extract_node_include_info(
reduced, k, k, yaml_list, defs, structs, None)
if defs == {}: if defs == {}:
raise Exception("No information parsed from dts file.") raise Exception("No information parsed from dts file.")
if 'zephyr,flash' in chosen: if 'zephyr,flash' in chosen:
extract_reg_prop(chosen['zephyr,flash'], None, defs, "CONFIG_FLASH", 1024, None) extract_reg_prop(chosen['zephyr,flash'], None,
defs, "CONFIG_FLASH", 1024, None)
else: else:
# We will add address and size of 0 for systems with no flash controller # We will add address/size of 0 for systems with no flash controller
# This is what they already do in the Kconfig options anyway # This is what they already do in the Kconfig options anyway
defs['dummy-flash'] = { 'CONFIG_FLASH_BASE_ADDRESS': 0, 'CONFIG_FLASH_SIZE': 0 } defs['dummy-flash'] = {'CONFIG_FLASH_BASE_ADDRESS': 0,
'CONFIG_FLASH_SIZE': 0}
if 'zephyr,sram' in chosen: if 'zephyr,sram' in chosen:
extract_reg_prop(chosen['zephyr,sram'], None, defs, "CONFIG_SRAM", 1024, None) extract_reg_prop(chosen['zephyr,sram'], None,
defs, "CONFIG_SRAM", 1024, None)
# only compute the load offset if a code partition exists and it is not the # only compute the load offset if a code partition exists and it is not the
# same as the flash base address # same as the flash base address
@ -715,8 +753,6 @@ def main():
'zephyr,flash' in chosen and \ 'zephyr,flash' in chosen and \
reduced[chosen['zephyr,flash']] is not \ reduced[chosen['zephyr,flash']] is not \
reduced[chosen['zephyr,code-partition']]: reduced[chosen['zephyr,code-partition']]:
flash_base = lookup_defs(defs, chosen['zephyr,flash'],
'CONFIG_FLASH_BASE_ADDRESS')
part_defs = {} part_defs = {}
extract_reg_prop(chosen['zephyr,code-partition'], None, part_defs, extract_reg_prop(chosen['zephyr,code-partition'], None, part_defs,
"PARTITION", 1, 'offset') "PARTITION", 1, 'offset')
@ -738,5 +774,6 @@ def main():
else: else:
generate_include_file(defs, args) generate_include_file(defs, args)
if __name__ == '__main__': if __name__ == '__main__':
main() main()