scripts: python: cleanup script and fix PEP8 issues

Ran scripts through flake8 and fixed issues.

Signed-off-by: Anas Nashif <anas.nashif@intel.com>
This commit is contained in:
Anas Nashif 2017-12-12 08:19:25 -05:00 committed by Anas Nashif
commit 7256553955
14 changed files with 346 additions and 228 deletions

View file

@ -59,7 +59,7 @@ for line in fileinput.input():
last_sec = None
continue
if last_sec != None:
if last_sec is not None:
dv = vma - last_vma
dl = lma - last_lma
if dv != dl:

View file

@ -13,25 +13,30 @@ import codecs
import gzip
import io
def parse_args():
global args
parser = argparse.ArgumentParser(description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-f", "--file", required=True, help="Input file")
parser.add_argument("-g", "--gzip", action="store_true",
help="Compress the file using gzip before output")
help="Compress the file using gzip before output")
args = parser.parse_args()
def get_nice_string(list_or_iterator):
return ", ".join( "0x" + str(x) for x in list_or_iterator)
return ", ".join("0x" + str(x) for x in list_or_iterator)
def make_hex(chunk):
hexdata = codecs.encode(chunk, 'hex').decode("utf-8")
hexlist = map(''.join, zip(*[iter(hexdata)]*2))
hexlist = map(''.join, zip(*[iter(hexdata)] * 2))
print(get_nice_string(hexlist) + ',')
def main():
parse_args()
@ -45,5 +50,6 @@ def main():
for chunk in iter(lambda: fp.read(8), b''):
make_hex(chunk)
if __name__ == "__main__":
main()

View file

@ -46,6 +46,7 @@ config_regex = \
b"(?P<comment>(^\s*#.*\n)+)" \
b"(?P<regex>(^[^#].*\n)+)"
def config_import_file(filename):
"""
Imports regular expresions from any file *.conf in the given path,
@ -53,7 +54,7 @@ def config_import_file(filename):
"""
try:
with open(filename, "rb") as f:
mm = mmap.mmap(f.fileno(), 0, access = mmap.ACCESS_READ)
mm = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
# That regex basically selects any block of
# lines that is not a comment block. The
# finditer() finds all the blocks and selects
@ -81,6 +82,7 @@ def config_import_file(filename):
logging.error("E: %s: can't load config file: %s" % (filename, e))
raise
def config_import_path(path):
"""
Imports regular expresions from any file *.conf in the given path
@ -95,8 +97,10 @@ def config_import_path(path):
continue
config_import_file(filename)
except Exception as e:
raise Exception("E: %s: can't load config files: %s %s" % (path,
e, traceback.format_exc()))
raise Exception(
"E: %s: can't load config files: %s %s" %
(path, e, traceback.format_exc()))
def config_import(paths):
"""
@ -108,7 +112,7 @@ def config_import(paths):
_paths = []
# Go over the list, flush it if the user gave an empty path ("")
for path in paths:
if path == "" or path == None:
if path == "" or path is None:
logging.debug("flushing current config list: %s", _paths)
_paths = []
else:
@ -117,28 +121,29 @@ def config_import(paths):
for path in _paths:
config_import_path(path)
arg_parser = argparse.ArgumentParser(
description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
arg_parser.add_argument("-v", "--verbosity", action = "count", default = 0,
help = "increase verbosity")
arg_parser.add_argument("-q", "--quiet", action = "count", default = 0,
help = "decrease verbosity")
arg_parser.add_argument("-e", "--errors", action = "store", default = None,
help = "file where to store errors")
arg_parser.add_argument("-w", "--warnings", action = "store", default = None,
help = "file where to store warnings")
arg_parser.add_argument("-c", "--config-dir", action = "append", nargs = "?",
default = [ ".known-issues/" ],
help = "configuration directory (multiple can be "
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
arg_parser.add_argument("-v", "--verbosity", action="count", default=0,
help="increase verbosity")
arg_parser.add_argument("-q", "--quiet", action="count", default=0,
help="decrease verbosity")
arg_parser.add_argument("-e", "--errors", action="store", default=None,
help="file where to store errors")
arg_parser.add_argument("-w", "--warnings", action="store", default=None,
help="file where to store warnings")
arg_parser.add_argument("-c", "--config-dir", action="append", nargs="?",
default=[".known-issues/"],
help="configuration directory (multiple can be "
"given; if none given, clears the current list) "
"%(default)s")
arg_parser.add_argument("FILENAMEs", nargs = "+",
help = "files to filter")
arg_parser.add_argument("FILENAMEs", nargs="+",
help="files to filter")
args = arg_parser.parse_args()
logging.basicConfig(level = 40 - 10 * (args.verbosity - args.quiet),
format = "%(levelname)s: %(message)s")
logging.basicConfig(level=40 - 10 * (args.verbosity - args.quiet),
format="%(levelname)s: %(message)s")
path = ".known-issues/"
logging.debug("Reading configuration from directory `%s`", path)
@ -155,16 +160,19 @@ if args.errors:
else:
errors = None
def report_error(data):
sys.stdout.write(data)
if errors:
errors.write(data)
def report_warning(data):
sys.stderr.write(data)
if warnings:
warnings.write(data)
if args.warnings:
warnings = open(args.warnings, "w")
else:
@ -174,19 +182,22 @@ if args.errors:
else:
errors = None
def report_error(data):
sys.stdout.write(data.decode('utf-8'))
if errors:
errors.write(data)
def report_warning(data):
sys.stderr.write(data)
if warnings:
warnings.write(data)
for filename in args.FILENAMEs:
if os.stat(filename).st_size == 0:
continue # skip empty log files
continue # skip empty log files
try:
with open(filename, "r+b") as f:
logging.info("%s: filtering", filename)
@ -198,14 +209,17 @@ for filename in args.FILENAMEs:
filename, origin, ex.pattern)
for m in re.finditer(ex.pattern, mm, re.MULTILINE):
logging.info("%s: %s-%s: match from from %s %s",
filename, m.start(), m.end(), origin, flags)
filename, m.start(), m.end(), origin, flags)
if 'warning' in flags:
exclude_ranges.append((m.start(), m.end(), True))
else:
exclude_ranges.append((m.start(), m.end(), False))
exclude_ranges = sorted(exclude_ranges, key=lambda r: r[0])
logging.warning("%s: ranges excluded: %s", filename, exclude_ranges)
logging.warning(
"%s: ranges excluded: %s",
filename,
exclude_ranges)
# Decide what to do with what has been filtered; warnings
# go to stderr and warnings file, errors to stdout, what
@ -216,18 +230,18 @@ for filename in args.FILENAMEs:
if b > offset:
# We have something not caught by a filter, an error
logging.info("%s: error range (%d, %d), from %d %dB",
filename, offset, b, offset, b - offset)
filename, offset, b, offset, b - offset)
report_error(mm.read(b - offset))
mm.seek(b)
if warning == True: # A warning, print it
mm.seek(b)
logging.info("%s: warning range (%d, %d), from %d %dB",
filename, b, e, offset, e - b)
filename, b, e, offset, e - b)
report_warning(mm.read(e - b))
else: # Exclude, ignore it
d = b - offset
logging.info("%s: exclude range (%d, %d), from %d %dB",
filename, b, e, offset, d)
filename, b, e, offset, d)
offset = e
mm.seek(offset)
if len(mm) != offset:

View file

@ -10,12 +10,11 @@
import os
import re
from optparse import OptionParser
import sys
import argparse
import subprocess
import json
import operator
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
@ -50,20 +49,23 @@ parser.add_option("-n", "--nm", type="string", dest="bin_nm",
# Return a dict containing symbol_name: path/to/file/where/it/originates
# for all symbols from the .elf file. Optionnaly strips the path according
# to the passed sub-path
def load_symbols_and_paths(bin_nm, elf_file, path_to_strip = None):
def load_symbols_and_paths(bin_nm, elf_file, path_to_strip=None):
symbols_paths = {}
nm_out = subprocess.check_output([bin_nm, elf_file, "-S", "-l", "--size-sort", "--radix=d"])
nm_out = subprocess.check_output(
[bin_nm, elf_file, "-S", "-l", "--size-sort", "--radix=d"])
for line in nm_out.decode('utf8').split('\n'):
fields = line.replace('\t', ' ').split(' ')
# Get rid of trailing empty field
if len(fields) == 1 and fields[0] == '':
continue
assert len(fields)>=4
if len(fields)<5:
assert len(fields) >= 4
if len(fields) < 5:
path = ":/" + fields[3]
else:
path = fields[4].split(':')[0]
if path_to_strip != None:
if path_to_strip is not None:
if path_to_strip in path:
path = path.replace(path_to_strip, "") + '/' + fields[3]
else:
@ -71,53 +73,64 @@ def load_symbols_and_paths(bin_nm, elf_file, path_to_strip = None):
symbols_paths[fields[3]] = path
return symbols_paths
def get_section_size(f, section_name):
decimal_size = 0
re_res = re.search(r"(.*] "+section_name+".*)", f, re.MULTILINE)
if re_res != None :
re_res = re.search(r"(.*] " + section_name + ".*)", f, re.MULTILINE)
if re_res is not None:
# Replace multiple spaces with one space
# Skip first characters to avoid having 1 extra random space
res = ' '.join(re_res.group(1).split())[5:]
decimal_size = int(res.split()[4], 16)
return decimal_size
def get_footprint_from_bin_and_statfile(bin_file, stat_file, total_flash, total_ram):
"""Compute flash and RAM memory footprint from a .bin and.stat file"""
def get_footprint_from_bin_and_statfile(
bin_file, stat_file, total_flash, total_ram):
"""Compute flash and RAM memory footprint from a .bin and .stat file"""
f = open(stat_file).read()
# Get kctext + text + ctors + rodata + kcrodata segment size
total_used_flash = os.path.getsize(bin_file)
#getting used ram on target
total_used_ram = (get_section_size(f, "noinit") + get_section_size(f, "bss")
+ get_section_size(f, "initlevel") + get_section_size(f, "datas") + get_section_size(f, ".data")
+ get_section_size(f, ".heap") + get_section_size(f, ".stack") + get_section_size(f, ".bss")
+ get_section_size(f, ".panic_section"))
# getting used ram on target
total_used_ram = (get_section_size(f, "noinit") +
get_section_size(f, "bss") +
get_section_size(f, "initlevel") +
get_section_size(f, "datas") +
get_section_size(f, ".data") +
get_section_size(f, ".heap") +
get_section_size(f, ".stack") +
get_section_size(f, ".bss") +
get_section_size(f, ".panic_section"))
total_percent_ram = 0
total_percent_flash = 0
if total_ram > 0:
total_percent_ram = float(total_used_ram) / total_ram * 100
if total_flash >0:
if total_flash > 0:
total_percent_flash = float(total_used_flash) / total_flash * 100
res = { "total_flash": total_used_flash,
"percent_flash": total_percent_flash,
"total_ram": total_used_ram,
"percent_ram": total_percent_ram}
res = {"total_flash": total_used_flash,
"percent_flash": total_percent_flash,
"total_ram": total_used_ram,
"percent_ram": total_percent_ram}
return res
def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source_dir, features_json):
def generate_target_memory_section(
bin_objdump, bin_nm, out, kernel_name, source_dir, features_json):
features_path_data = None
try:
features_path_data = json.loads(open(features_json, 'r').read())
except:
except BaseException:
pass
bin_file_abs = os.path.join(out, kernel_name+'.bin')
elf_file_abs = os.path.join(out, kernel_name+'.elf')
bin_file_abs = os.path.join(out, kernel_name + '.bin')
elf_file_abs = os.path.join(out, kernel_name + '.elf')
# First deal with size on flash. These are the symbols flagged as LOAD in objdump output
# First deal with size on flash. These are the symbols flagged as LOAD in
# objdump output
size_out = subprocess.check_output([bin_objdump, "-hw", elf_file_abs])
loaded_section_total = 0
loaded_section_names = []
@ -127,9 +140,11 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
ram_section_names_sizes = {}
for line in size_out.decode('utf8').split('\n'):
if "LOAD" in line:
loaded_section_total = loaded_section_total + int(line.split()[2], 16)
loaded_section_total = loaded_section_total + \
int(line.split()[2], 16)
loaded_section_names.append(line.split()[1])
loaded_section_names_sizes[line.split()[1]] = int(line.split()[2], 16)
loaded_section_names_sizes[line.split()[1]] = int(
line.split()[2], 16)
if "ALLOC" in line and "READONLY" not in line and "rodata" not in line and "CODE" not in line:
ram_section_total = ram_section_total + int(line.split()[2], 16)
ram_section_names.append(line.split()[1])
@ -147,7 +162,7 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
splitted_path = path.split('/')
cur = None
for p in splitted_path:
if cur == None:
if cur is None:
cur = p
else:
cur = cur + '/' + p
@ -176,7 +191,6 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
siblings = _siblings_for_node(tree, node)
return max([tree[e] for e in siblings])
# Extract the list of symbols a second time but this time using the objdump tool
# which provides more info as nm
symbols_out = subprocess.check_output([bin_objdump, "-tw", elf_file_abs])
@ -213,21 +227,24 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
def _check_all_symbols(symbols_struct, features_list):
out = ""
sorted_nodes = sorted(symbols_struct.items(), key=operator.itemgetter(0))
sorted_nodes = sorted(symbols_struct.items(),
key=operator.itemgetter(0))
named_symbol_filter = re.compile('.*\.[a-zA-Z]+/.*')
out_symbols_filter = re.compile('^:/')
for symbpath in sorted_nodes:
matched = 0
# The files and folders (not matching regex) are discarded
# like: folder folder/file.ext
is_symbol=named_symbol_filter.match(symbpath[0])
is_generated=out_symbols_filter.match(symbpath[0])
if is_symbol == None and is_generated == None:
is_symbol = named_symbol_filter.match(symbpath[0])
is_generated = out_symbols_filter.match(symbpath[0])
if is_symbol is None and is_generated is None:
continue
# The symbols inside a file are kept: folder/file.ext/symbol
# and unrecognized paths too (":/")
for feature in features_list:
matched = matched + _does_symbol_matches_feature(symbpath[0], symbpath[1], feature)
matched = matched + \
_does_symbol_matches_feature(
symbpath[0], symbpath[1], feature)
if matched is 0:
out += "UNCATEGORIZED: %s %d<br/>" % (symbpath[0], symbpath[1])
return out
@ -239,7 +256,8 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
# filter out if the include-filter is not in the symbol string
if inc_path not in symbol:
continue
# if the symbol match the include-filter, check against exclude-filter
# if the symbol match the include-filter, check against
# exclude-filter
is_excluded = 0
for exc_path in feature["excludes"]:
if exc_path in symbol:
@ -256,15 +274,13 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
matched = matched + child_matched
return matched
# Create a simplified tree keeping only the most important contributors
# This is used for the pie diagram summary
min_parent_size = bin_size/25
min_sibling_size = bin_size/35
min_parent_size = bin_size / 25
min_sibling_size = bin_size / 35
tmp = {}
for e in data_nodes:
if _parent_for_node(e) == None:
if _parent_for_node(e) is None:
continue
if data_nodes[_parent_for_node(e)] < min_parent_size:
continue
@ -290,10 +306,9 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
else:
filtered_data_nodes[e] = tmp[e]
def _parent_level_3_at_most(node):
e = _parent_for_node(node)
while e.count('/')>2:
while e.count('/') > 2:
e = _parent_for_node(e)
return e
@ -303,14 +318,15 @@ def generate_target_memory_section(bin_objdump, bin_nm, out, kernel_name, source
def print_tree(data, total, depth):
base = os.environ['ZEPHYR_BASE']
totp = 0
print('{:92s} {:10s} {:8s}'.format(bcolors.FAIL + "Path", "Size", "%" + bcolors.ENDC))
print('{:92s} {:10s} {:8s}'.format(
bcolors.FAIL + "Path", "Size", "%" + bcolors.ENDC))
print("'='*110i")
for i in sorted(data):
p = i.split("/")
if depth and len(p) > depth:
continue
percent = 100 * float(data[i])/float(total)
percent = 100 * float(data[i]) / float(total)
percent_c = percent
if len(p) < 2:
totp += percent
@ -320,11 +336,13 @@ def print_tree(data, total, depth):
s = bcolors.WARNING + p[-1] + bcolors.ENDC
else:
s = bcolors.OKBLUE + p[-1] + bcolors.ENDC
print('{:80s} {:20d} {:8.2f}%'.format(" "*(len(p)-1) + s, data[i], percent_c ))
print('{:80s} {:20d} {:8.2f}%'.format(
" " * (len(p) - 1) + s, data[i], percent_c))
else:
print('{:80s} {:20d} {:8.2f}%'.format(bcolors.OKBLUE + i + bcolors.ENDC, data[i], percent_c ))
print('{:80s} {:20d} {:8.2f}%'.format(
bcolors.OKBLUE + i + bcolors.ENDC, data[i], percent_c))
print('='*110)
print('=' * 110)
print('{:92d}'.format(total))
return totp
@ -332,14 +350,18 @@ def print_tree(data, total, depth):
binary = os.path.join(options.outdir, options.binary + ".elf")
if options.outdir and os.path.exists(binary):
fp = get_footprint_from_bin_and_statfile("%s/%s.bin" %(options.outdir, options.binary),
"%s/%s.stat" %(options.outdir,options.binary), 0, 0 )
fp = get_footprint_from_bin_and_statfile(
"%s/%s.bin" % (options.outdir, options.binary),
"%s/%s.stat" % (options.outdir, options.binary),
0, 0)
base = os.environ['ZEPHYR_BASE']
ram, data = generate_target_memory_section(options.bin_objdump, options.bin_nm, options.outdir, options.binary, base + '/', None)
ram, data = generate_target_memory_section(
options.bin_objdump, options.bin_nm, options.outdir, options.binary,
base + '/', None)
if options.rom:
print_tree(data, fp['total_flash'], options.depth)
if options.ram:
print_tree(ram, fp['total_ram'], options.depth)
else:
print("%s does not exist." %(binary))
print("%s does not exist." % (binary))

View file

@ -17,11 +17,13 @@ if LooseVersion(elftools.__version__) < LooseVersion('0.24'):
sys.stderr.write("pyelftools is out of date, need version 0.24 or later\n")
sys.exit(1)
def debug(text):
if not args.verbose:
return
sys.stdout.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
def error(text):
sys.stderr.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
sys.exit(1)
@ -29,13 +31,15 @@ def error(text):
gdt_pd_fmt = "<HIH"
FLAGS_GRAN = 1 << 7 # page granularity
FLAGS_GRAN = 1 << 7 # page granularity
ACCESS_EX = 1 << 3 # executable
ACCESS_DC = 1 << 2 # direction/conforming
ACCESS_RW = 1 << 1 # read or write permission
# 6 byte pseudo descriptor, but we're going to actually use this as the
# zero descriptor and return 8 bytes
def create_gdt_pseudo_desc(addr, size):
debug("create pseudo decriptor: %x %x" % (addr, size))
# ...and take back one byte for the Intel god whose Ark this is...
@ -57,12 +61,13 @@ def chop_base_limit(base, limit):
gdt_ent_fmt = "<HHBBBB"
def create_code_data_entry(base, limit, dpl, flags, access):
debug("create code or data entry: %x %x %x %x %x" %
(base, limit, dpl, flags, access))
(base, limit, dpl, flags, access))
base_lo, base_mid, base_hi, limit_lo, limit_hi = chop_base_limit(base,
limit)
limit)
# This is a valid descriptor
present = 1
@ -86,19 +91,18 @@ def create_code_data_entry(base, limit, dpl, flags, access):
def create_tss_entry(base, limit, dpl):
debug("create TSS entry: %x %x %x" % (base, limit, dpl));
debug("create TSS entry: %x %x %x" % (base, limit, dpl))
present = 1
base_lo, base_mid, base_hi, limit_lo, limit_hi, = chop_base_limit(base,
limit)
limit)
type_code = 0x9 # non-busy 32-bit TSS descriptor
type_code = 0x9 # non-busy 32-bit TSS descriptor
gran = 0
flags = (gran << 7) | limit_hi
type_byte = ((present << 7) | (dpl << 5) | type_code)
return struct.pack(gdt_ent_fmt, limit_lo, base_lo, base_mid,
type_byte, flags, base_hi)
@ -114,15 +118,16 @@ def get_symbols(obj):
def parse_args():
global args
parser = argparse.ArgumentParser(description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-k", "--kernel", required=True,
help="Zephyr kernel image")
help="Zephyr kernel image")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print extra debugging information")
help="Print extra debugging information")
parser.add_argument("-o", "--output-gdt", required=True,
help="output GDT binary")
help="output GDT binary")
args = parser.parse_args()
@ -153,11 +158,11 @@ def main():
# Selector 0x08: code descriptor
fp.write(create_code_data_entry(0, 0xFFFFF, 0,
FLAGS_GRAN, ACCESS_EX | ACCESS_RW))
FLAGS_GRAN, ACCESS_EX | ACCESS_RW))
# Selector 0x10: data descriptor
fp.write(create_code_data_entry(0, 0xFFFFF, 0,
FLAGS_GRAN, ACCESS_RW))
FLAGS_GRAN, ACCESS_RW))
if num_entries >= 5:
main_tss = syms["_main_tss"]
@ -172,13 +177,12 @@ def main():
if num_entries == 7:
# Selector 0x28: code descriptor, dpl = 3
fp.write(create_code_data_entry(0, 0xFFFFF, 3,
FLAGS_GRAN, ACCESS_EX | ACCESS_RW))
FLAGS_GRAN, ACCESS_EX | ACCESS_RW))
# Selector 0x30: data descriptor, dpl = 3
fp.write(create_code_data_entry(0, 0xFFFFF, 3,
FLAGS_GRAN, ACCESS_RW))
FLAGS_GRAN, ACCESS_RW))
if __name__ == "__main__":
main()

View file

@ -23,34 +23,38 @@ KERNEL_CODE_SEG = 0x08
# These exception vectors push an error code onto the stack.
ERR_CODE_VECTORS = [8, 10, 11, 12, 13, 14, 17]
def debug(text):
if not args.verbose:
return
sys.stdout.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
def error(text):
sys.stderr.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
sys.exit(1)
# See Section 6.11 of the Intel Architecture Software Developer's Manual
gate_desc_format = "<HHBBH"
def create_irq_gate(handler, dpl):
present = 1
gate_type = 0xE # 32-bit interrupt gate
gate_type = 0xE # 32-bit interrupt gate
type_attr = gate_type | (dpl << 5) | (present << 7)
offset_hi = handler >> 16
offset_lo = handler & 0xFFFF
data = struct.pack(gate_desc_format, offset_lo, KERNEL_CODE_SEG, 0,
type_attr, offset_hi)
type_attr, offset_hi)
return data
def create_task_gate(tss, dpl):
present = 1
gate_type = 0x5 # 32-bit task gate
gate_type = 0x5 # 32-bit task gate
type_attr = gate_type | (dpl << 5) | (present << 7)
data = struct.pack(gate_desc_format, 0, tss, 0, type_attr, 0)
@ -73,13 +77,16 @@ def create_idt_binary(idt_config, filename):
fp.write(data)
map_fmt = "<B"
def create_irq_vec_map_binary(irq_vec_map, filename):
with open(filename, "wb") as fp:
for i in irq_vec_map:
fp.write(struct.pack(map_fmt, i))
def priority_range(prio):
# Priority levels are represented as groups of 16 vectors within the IDT
base = 32 + (prio * 16)
@ -93,7 +100,7 @@ def update_irq_vec_map(irq_vec_map, irq, vector, max_irq):
if irq >= max_irq:
error("irq %d specified, but CONFIG_MAX_IRQ_LINES is %d" %
(irq, max_irq))
(irq, max_irq))
# This table will never have values less than 32 since those are for
# exceptions; 0 means unconfigured
@ -117,9 +124,9 @@ def setup_idt(spur_code, spur_nocode, intlist, max_vec, max_irq):
if vec >= max_vec:
error("Vector %d specified, but size of IDT is only %d vectors" %
(vec, max_vec))
(vec, max_vec))
if vectors[vec] != None:
if vectors[vec] is not None:
error("Multiple assignments for vector %d" % vec)
vectors[vec] = (handler, tss, dpl)
@ -133,7 +140,7 @@ def setup_idt(spur_code, spur_nocode, intlist, max_vec, max_irq):
for vi in priority_range(prio):
if vi >= max_vec:
break
if vectors[vi] == None:
if vectors[vi] is None:
vec = vi
break
@ -145,7 +152,7 @@ def setup_idt(spur_code, spur_nocode, intlist, max_vec, max_irq):
# Pass 3: fill in unused vectors with spurious handler at dpl=0
for i in range(max_vec):
if vectors[i] != None:
if vectors[i] is not None:
continue
if i in ERR_CODE_VECTORS:
@ -157,6 +164,7 @@ def setup_idt(spur_code, spur_nocode, intlist, max_vec, max_irq):
return vectors, irq_vec_map
def get_symbols(obj):
for section in obj.iter_sections():
if isinstance(section, SymbolTableSection):
@ -185,6 +193,7 @@ intlist_header_fmt = "<IIi"
intlist_entry_fmt = "<Iiiiii"
def get_intlist(elf):
intdata = elf.get_section_by_name("intList").data()
@ -199,7 +208,7 @@ def get_intlist(elf):
debug("spurious handler (no code) : %s" % hex(header[1]))
intlist = [i for i in
struct.iter_unpack(intlist_entry_fmt, intdata)]
struct.iter_unpack(intlist_entry_fmt, intdata)]
debug("Configured interrupt routing")
debug("handler irq pri vec dpl")
@ -218,17 +227,18 @@ def get_intlist(elf):
def parse_args():
global args
parser = argparse.ArgumentParser(description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-m", "--vector-map", required=True,
help="Output file mapping IRQ lines to IDT vectors")
help="Output file mapping IRQ lines to IDT vectors")
parser.add_argument("-o", "--output-idt", required=True,
help="Output file containing IDT binary")
help="Output file containing IDT binary")
parser.add_argument("-k", "--kernel", required=True,
help="Zephyr kernel image")
help="Zephyr kernel image")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print extra debugging information")
help="Print extra debugging information")
args = parser.parse_args()
@ -250,6 +260,6 @@ def main():
create_idt_binary(vectors, args.output_idt)
create_irq_vec_map_binary(irq_vec_map, args.vector_map)
if __name__ == "__main__":
main()

View file

@ -21,44 +21,46 @@ if LooseVersion(elftools.__version__) < LooseVersion('0.24'):
sys.exit(1)
kobjects = [
"k_alert",
"k_msgq",
"k_mutex",
"k_pipe",
"k_sem",
"k_stack",
"k_thread",
"k_timer",
"_k_thread_stack_element",
"device"
]
"k_alert",
"k_msgq",
"k_mutex",
"k_pipe",
"k_sem",
"k_stack",
"k_thread",
"k_timer",
"_k_thread_stack_element",
"device"
]
subsystems = [
"adc_driver_api",
"aio_cmp_driver_api",
"counter_driver_api",
"crypto_driver_api",
"flash_driver_api",
"gpio_driver_api",
"i2c_driver_api",
"i2s_driver_api",
"ipm_driver_api",
"pinmux_driver_api",
"pwm_driver_api",
"entropy_driver_api",
"rtc_driver_api",
"sensor_driver_api",
"spi_driver_api",
"uart_driver_api",
]
"adc_driver_api",
"aio_cmp_driver_api",
"counter_driver_api",
"crypto_driver_api",
"flash_driver_api",
"gpio_driver_api",
"i2c_driver_api",
"i2s_driver_api",
"ipm_driver_api",
"pinmux_driver_api",
"pwm_driver_api",
"entropy_driver_api",
"rtc_driver_api",
"sensor_driver_api",
"spi_driver_api",
"uart_driver_api",
]
def subsystem_to_enum(subsys):
return "K_OBJ_DRIVER_" + subsys[:-11].upper()
def kobject_to_enum(ko):
return "K_OBJ_" + ko[2:].upper()
DW_OP_addr = 0x3
DW_OP_fbreg = 0x91
STACK_TYPE = "_k_thread_stack_element"
@ -71,15 +73,18 @@ type_env = {}
scr = os.path.basename(sys.argv[0])
def debug(text):
if not args.verbose:
return
sys.stdout.write(scr + ": " + text + "\n")
def error(text):
sys.stderr.write("%s ERROR: %s\n" % (scr, text))
sys.exit(1)
def debug_die(die, text):
fn, ln = get_filename_lineno(die)
@ -89,6 +94,7 @@ def debug_die(die, text):
# --- type classes ----
class KobjectInstance:
def __init__(self, type_obj, addr):
global thread_counter
@ -183,8 +189,8 @@ class AggregateTypeMember:
self.member_offset = member_offset
def __repr__(self):
return "<member %s, type %d, offset %d>" % (self.member_name,
self.member_type, self.member_offset)
return "<member %s, type %d, offset %d>" % (
self.member_name, self.member_type, self.member_offset)
def has_kobject(self):
if self.member_type not in type_env:
@ -272,6 +278,7 @@ def die_get_byte_size(die):
return die.attributes["DW_AT_byte_size"].value
def analyze_die_struct(die):
name = die_get_name(die) or "<anon>"
offset = die.offset
@ -296,7 +303,7 @@ def analyze_die_struct(die):
member_offset = child.attributes["DW_AT_data_member_location"].value
cname = die_get_name(child) or "<anon>"
m = AggregateTypeMember(child.offset, cname, child_type,
member_offset)
member_offset)
at.add_member(m)
return
@ -341,7 +348,7 @@ def addr_deref(elf, addr):
data = section.data()
offset = addr - start
return struct.unpack("<I" if args.little_endian else ">I",
data[offset:offset+4])[0]
data[offset:offset + 4])[0]
return 0
@ -366,7 +373,7 @@ def get_filename_lineno(die):
def find_kobjects(elf, syms):
if not elf.has_dwarf_info():
sys.stderr.write("ELF file has no DWARF information\n");
sys.stderr.write("ELF file has no DWARF information\n")
sys.exit(1)
kram_start = syms["__kernel_ram_start"]
@ -436,13 +443,18 @@ def find_kobjects(elf, syms):
continue
else:
if "DW_AT_location" not in die.attributes:
debug_die(die, "No location information for object '%s'; possibly stack allocated"
% name)
debug_die(
die,
"No location information for object '%s'; possibly stack allocated" %
name)
continue
loc = die.attributes["DW_AT_location"]
if loc.form != "DW_FORM_exprloc":
debug_die(die, "kernel object '%s' unexpected location format" % name)
debug_die(
die,
"kernel object '%s' unexpected location format" %
name)
continue
opcode = loc.value[0]
@ -452,8 +464,9 @@ def find_kobjects(elf, syms):
if opcode == DW_OP_fbreg:
debug_die(die, "kernel object '%s' found on stack" % name)
else:
debug_die(die, "kernel object '%s' unexpected exprloc opcode %s"
% (name, hex(opcode)))
debug_die(
die, "kernel object '%s' unexpected exprloc opcode %s" %
(name, hex(opcode)))
continue
addr = (loc.value[1] | (loc.value[2] << 8) | (loc.value[3] << 16) |
@ -467,7 +480,7 @@ def find_kobjects(elf, syms):
and (addr < krom_start or addr >= krom_end)):
debug_die(die, "object '%s' found in invalid location %s" %
(name, hex(addr)));
(name, hex(addr)))
continue
type_obj = type_env[type_offset]
@ -475,7 +488,7 @@ def find_kobjects(elf, syms):
all_objs.update(objs)
debug("symbol '%s' at %s contains %d object(s)" % (name, hex(addr),
len(objs)))
len(objs)))
# Step 4: objs is a dictionary mapping variable memory addresses to their
# associated type objects. Now that we have seen all variables and can
@ -525,7 +538,8 @@ struct _k_object;
# Different versions of gperf have different prototypes for the lookup function,
# best to implement the wrapper here. The pointer value itself is turned into
# a string, we told gperf to expect binary strings that are not NULL-terminated.
# a string, we told gperf to expect binary strings that are not
# NULL-terminated.
footer = """%%
struct _k_object *_k_object_find(void *obj)
{
@ -561,8 +575,11 @@ def write_gperf_table(fp, objs, static_begin, static_end):
val = "\\x%02x" % byte
fp.write(val)
fp.write("\",{},%s,%s,%d\n" % (obj_type,
"K_OBJ_FLAG_INITIALIZED" if initialized else "0", ko.data))
fp.write(
"\",{},%s,%s,%d\n" %
(obj_type,
"K_OBJ_FLAG_INITIALIZED" if initialized else "0",
ko.data))
fp.write(footer)
@ -579,15 +596,17 @@ def get_symbols(obj):
def parse_args():
global args
parser = argparse.ArgumentParser(description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-k", "--kernel", required=True,
help="Input zephyr ELF binary")
parser.add_argument("-o", "--output", required=True,
help="Output list of kernel object addresses for gperf use")
help="Input zephyr ELF binary")
parser.add_argument(
"-o", "--output", required=True,
help="Output list of kernel object addresses for gperf use")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print extra debugging information")
help="Print extra debugging information")
args = parser.parse_args()
@ -604,13 +623,13 @@ def main():
if thread_counter > max_threads:
sys.stderr.write("Too many thread objects (%d)\n" % thread_counter)
sys.stderr.write("Increase CONFIG_MAX_THREAD_BYTES to %d\n",
-(-thread_counter // 8));
-(-thread_counter // 8))
sys.exit(1)
with open(args.output, "w") as fp:
write_gperf_table(fp, objs, syms["_static_kernel_objects_begin"],
syms["_static_kernel_objects_end"])
syms["_static_kernel_objects_end"])
if __name__ == "__main__":
main()

View file

@ -217,7 +217,7 @@ class PageMode_4kb:
# thus the new PDE start address is a multiple of that
# number
extra_pde_start_address = (extra_pde *
(self.size_addressed_per_pde))
(self.size_addressed_per_pde))
# the start address of and extra pde will always be 0
# and the end address is calculated with the new pde's start
@ -577,10 +577,10 @@ class PageMode_PAE:
user_mode = perm_for_pte & PAGE_ENTRY_USER_SUPERVISOR
pwt = PAGE_ENTRY_PWT
pcd = PAGE_ENTRY_PCD
a = PAGE_ENTRY_ALLOC
d = PAGE_ENTRY_DIRTY
a = PAGE_ENTRY_ALLOC
d = PAGE_ENTRY_DIRTY
pat = PAGE_ENTRY_PAT
g = PAGE_ENTRY_GLOBAL
g = PAGE_ENTRY_GLOBAL
# This points to the actual memory in the HW
# totally 20 bits to rep the phy address
@ -684,15 +684,15 @@ class PageMode_PAE:
# each page directory entry has a addr range of
# (1024 *4096) thus the new PDE start address is a
# multiple of that number
extra_pde_start_address = (extra_pde *
(self.size_addressed_per_pde))
extra_pde_start_address = (
extra_pde * (self.size_addressed_per_pde))
# the start address of and extra pde will always be 0
# and the end address is calculated with the new
# pde's start address and the overflow_size
extra_pte_valid_addr_end = (
self.get_pte_number(extra_pde_start_address +
overflow_size - 1))
self.get_pte_number(extra_pde_start_address +
overflow_size - 1))
# if the overflow_size couldn't be fit inside this new
# pde then need another pde and so we now need to limit
@ -820,7 +820,6 @@ class PageMode_PAE:
binary_value)
output_offset += struct.calcsize(self.write_page_entry_bin)
# To populate the binary file the module struct needs a buffer of the
# excat size This returns the size needed for the given set of page tables.
def set_binary_file_size(self):
@ -1096,8 +1095,9 @@ def check_bits(val, bits):
def parse_args():
global args
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-e", "--big-endian", action="store_true",
help="Target encodes data in big-endian format"
@ -1107,8 +1107,9 @@ def parse_args():
help="Input file from which MMU regions are read.")
parser.add_argument("-k", "--kernel",
help="Zephyr kernel image")
parser.add_argument("-o", "--output",
help="Output file into which the page tables are written.")
parser.add_argument(
"-o", "--output",
help="Output file into which the page tables are written.")
parser.add_argument("-v", "--verbose", action="count", default=0,
help="Print debugging information. Multiple "
"invocations increase verbosity")
@ -1135,11 +1136,15 @@ def format_string(input_str):
return output_str
# format for 32bit hex value
def hex_32(input_value):
output_value = "{0:#0{1}x}".format(input_value, 10)
return output_value
# format for 20bit hex value
def hex_20(input_value):
output_value = "{0:#0{1}x}".format(input_value, 7)
return output_value
@ -1161,6 +1166,8 @@ def verbose_output(page_mode):
page_mode.print_all_page_table_info()
# build sym table
def get_symbols(obj):
for section in obj.iter_sections():
if isinstance(section, SymbolTableSection):

View file

@ -10,6 +10,7 @@ from elftools.elf.sections import SymbolTableSection
import argparse
import sys
def get_symbol_table(obj):
for section in obj.iter_sections():
if isinstance(section, SymbolTableSection):
@ -17,6 +18,7 @@ def get_symbol_table(obj):
raise LookupError("Could not find symbol table")
def gen_offset_header(input_name, input_file, output_file):
include_guard = "__GEN_OFFSETS_H__"
output_file.write("""/* THIS FILE IS AUTO GENERATED. PLEASE DO NOT EDIT.
@ -41,18 +43,29 @@ def gen_offset_header(input_name, input_file, output_file):
if sym.entry['st_info']['bind'] != 'STB_GLOBAL':
continue
output_file.write("#define %s 0x%x\n" % (sym.name, sym.entry['st_value']))
output_file.write(
"#define %s 0x%x\n" %
(sym.name, sym.entry['st_value']))
output_file.write("\n#endif /* %s */\n" % include_guard)
return 0
if __name__ == '__main__':
parser = argparse.ArgumentParser(
formatter_class = argparse.RawDescriptionHelpFormatter)
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-i", "--input", required=True, help="Input object file")
parser.add_argument("-o", "--output", required=True, help="Output header file")
parser.add_argument(
"-i",
"--input",
required=True,
help="Input object file")
parser.add_argument(
"-o",
"--output",
required=True,
help="Output header file")
args = parser.parse_args()

View file

@ -7,11 +7,13 @@
import sys
from enum import Enum
class Retval(Enum):
VOID = 0
U32 = 1
U64 = 2
def gen_macro(ret, argc):
if ret == Retval.VOID:
suffix = "_VOID"
@ -27,12 +29,13 @@ def gen_macro(ret, argc):
sys.stdout.write(", t%d, p%d" % (i, i))
sys.stdout.write(")")
def gen_fn(ret, argc, name, extern=False):
sys.stdout.write("\t%s %s %s(" %
(("extern" if extern else "static inline"),
("ret" if ret != Retval.VOID else "void"), name))
(("extern" if extern else "static inline"),
("ret" if ret != Retval.VOID else "void"), name))
if argc == 0:
sys.stdout.write("void");
sys.stdout.write("void")
else:
for i in range(argc):
sys.stdout.write("t%d p%d" % (i, i))
@ -40,8 +43,10 @@ def gen_fn(ret, argc, name, extern=False):
sys.stdout.write(", ")
sys.stdout.write(")")
def tabs(count):
sys.stdout.write("\t" * count);
sys.stdout.write("\t" * count)
def gen_make_syscall(ret, argc, tabcount):
tabs(tabcount)
@ -64,18 +69,20 @@ def gen_make_syscall(ret, argc, tabcount):
# dropped, and the hndlr_ref section is itself dropped from the binary
# from gc-sections; these references will not consume space.
sys.stdout.write("static _GENERIC_SECTION(hndlr_ref) __used void *href = (void *)&_handler_##name; \\\n")
sys.stdout.write(
"static _GENERIC_SECTION(hndlr_ref) __used void *href = (void *)&_handler_##name; \\\n")
tabs(tabcount)
if (ret != Retval.VOID):
sys.stdout.write("return (ret)")
if (argc <= 6 and ret != Retval.U64):
sys.stdout.write("_arch")
sys.stdout.write("_syscall%s_invoke%d(" %
(("_ret64" if ret == Retval.U64 else ""), argc))
(("_ret64" if ret == Retval.U64 else ""), argc))
for i in range(argc):
sys.stdout.write("(u32_t)p%d, " % (i))
sys.stdout.write("id); \\\n")
def gen_call_impl(ret, argc):
if (ret != Retval.VOID):
sys.stdout.write("return ")
@ -86,9 +93,11 @@ def gen_call_impl(ret, argc):
sys.stdout.write(", ")
sys.stdout.write("); \\\n")
def newline():
sys.stdout.write(" \\\n")
def gen_defines_inner(ret, argc, kernel_only=False, user_only=False):
sys.stdout.write("#define ")
gen_macro(ret, argc)
@ -99,7 +108,7 @@ def gen_defines_inner(ret, argc, kernel_only=False, user_only=False):
sys.stdout.write(";")
newline()
gen_fn(ret, argc, "name");
gen_fn(ret, argc, "name")
newline()
sys.stdout.write("\t{")
newline()
@ -120,7 +129,7 @@ def gen_defines_inner(ret, argc, kernel_only=False, user_only=False):
# Prevent memory access issues if the implementation function gets
# inlined
sys.stdout.write("\t\t\tcompiler_barrier();");
sys.stdout.write("\t\t\tcompiler_barrier();")
newline()
sys.stdout.write("\t\t\t")
@ -137,11 +146,13 @@ def gen_defines(argc, kernel_only=False, user_only=False):
gen_defines_inner(Retval.U64, argc, kernel_only, user_only)
sys.stdout.write("/* Auto-generated by gen_syscall_header.py, do not edit! */\n\n")
sys.stdout.write(
"/* Auto-generated by gen_syscall_header.py, do not edit! */\n\n")
sys.stdout.write("#ifndef GEN_SYSCALL_H\n#define GEN_SYSCALL_H\n\n")
for i in range(11):
sys.stdout.write("#if !defined(CONFIG_USERSPACE) || defined(__ZEPHYR_SUPERVISOR__)\n")
sys.stdout.write(
"#if !defined(CONFIG_USERSPACE) || defined(__ZEPHYR_SUPERVISOR__)\n")
gen_defines(i, kernel_only=True)
sys.stdout.write("#elif defined(__ZEPHYR_USER__)\n")
gen_defines(i, user_only=True)
@ -150,8 +161,3 @@ for i in range(11):
sys.stdout.write("#endif /* mixed kernel/user macros */\n\n")
sys.stdout.write("#endif /* GEN_SYSCALL_H */\n")

View file

@ -83,15 +83,16 @@ u32_t %s(u32_t arg1, u32_t arg2, u32_t arg3,
def parse_args():
global args
parser = argparse.ArgumentParser(description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-i", "--json-file", required=True,
help="Read syscall information from json file")
help="Read syscall information from json file")
parser.add_argument("-d", "--syscall-dispatch", required=True,
help="output C system call dispatch table file")
help="output C system call dispatch table file")
parser.add_argument("-o", "--base-output", required=True,
help="Base output directory for syscall macro headers")
help="Base output directory for syscall macro headers")
args = parser.parse_args()
@ -137,6 +138,6 @@ def main():
with open(out_fn, "w") as fp:
fp.write(header)
if __name__ == "__main__":
main()

View file

@ -20,17 +20,20 @@ __syscall\s+ # __syscall attribute, must be first
typename_regex = re.compile(r'(.*?)([A-Za-z0-9_]+)$')
class SyscallParseException(Exception):
pass
def typename_split(item):
if "[" in item:
raise SyscallParseException("Please pass arrays to syscalls as pointers, unable to process '%s'"
% item)
raise SyscallParseException(
"Please pass arrays to syscalls as pointers, unable to process '%s'" %
item)
if "(" in item:
raise SyscallParseException("Please use typedefs for function pointers")
raise SyscallParseException(
"Please use typedefs for function pointers")
mo = typename_regex.match(item)
if not mo:
@ -115,17 +118,21 @@ def analyze_headers(base_path):
return ret
def parse_args():
global args
parser = argparse.ArgumentParser(description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-i", "--include", required=True,
help="Base include directory")
parser.add_argument("-j", "--json-file", required=True,
help="Write system call prototype information as json to file")
help="Base include directory")
parser.add_argument(
"-j", "--json-file", required=True,
help="Write system call prototype information as json to file")
args = parser.parse_args()
def main():
parse_args()
@ -152,5 +159,6 @@ def main():
with open(path, 'w') as fp:
fp.write(new)
if __name__ == "__main__":
main()

View file

@ -25,6 +25,7 @@ the generated code so that we work with pointers directly and not strings.
This saves a considerable amount of space.
"""
def debug(text):
if not args.verbose:
return
@ -37,7 +38,12 @@ def error(text):
def warn(text):
sys.stdout.write(os.path.basename(sys.argv[0]) + " WARNING: " + text + "\n")
sys.stdout.write(
os.path.basename(
sys.argv[0]) +
" WARNING: " +
text +
"\n")
def reformat_str(match_obj):
@ -54,14 +60,14 @@ def reformat_str(match_obj):
break
if addr_str[i] == "\\":
if addr_str[i+1].isdigit():
if addr_str[i + 1].isdigit():
# Octal escape sequence
val_str = addr_str[i+1:i+4]
val_str = addr_str[i + 1:i + 4]
addr_vals[ctr] = int(val_str, 8)
i += 4
else:
# Char value that had to be escaped by C string rules
addr_vals[ctr] = ord(addr_str[i+1])
addr_vals[ctr] = ord(addr_str[i + 1])
i += 2
else:
@ -72,6 +78,7 @@ def reformat_str(match_obj):
return "(char *)0x%02x%02x%02x%02x" % tuple(addr_vals)
def process_line(line, fp):
if line.startswith("#"):
fp.write(line)
@ -90,7 +97,7 @@ def process_line(line, fp):
v_hi = LooseVersion("3.1")
if (v < v_lo or v > v_hi):
warn("gperf %s is not tested, versions %s through %s supported" %
(v, v_lo, v_hi))
(v, v_lo, v_hi))
# Replace length lookups with constant len of 4 since we're always
# looking at pointers
@ -110,7 +117,7 @@ def process_line(line, fp):
# Hashing the address of the string
line = re.sub(r"hash [(]str, len[)]",
r"hash((const char *)&str, len)", line)
r"hash((const char *)&str, len)", line)
# Just compare pointers directly instead of using memcmp
if re.search("if [(][*]str", line):
@ -123,18 +130,20 @@ def process_line(line, fp):
fp.write(line)
def parse_args():
global args
parser = argparse.ArgumentParser(description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-i", "--input", required=True,
help="Input C file from gperf")
help="Input C file from gperf")
parser.add_argument("-o", "--output", required=True,
help="Output C file with processing done")
help="Output C file with processing done")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print extra debugging information")
help="Print extra debugging information")
args = parser.parse_args()

View file

@ -10,4 +10,3 @@ fd.seek(0x12)
# Write 0x03 which is EM_386 to e_machine
fd.write(b'\x03')
fd.close()