doc: _extensions: apply ruff lint rules
This makes all Python scripts in doc/_extensions compliant w.r.t current Ruff rules Signed-off-by: Benjamin Cabé <benjamin@zephyrproject.org>
This commit is contained in:
parent
e4539aa9c9
commit
cf005feb9d
12 changed files with 156 additions and 233 deletions
|
@ -43,78 +43,6 @@
|
||||||
"./boards/microchip/mec172xevb_assy6906/support/mec172x_remote_flasher.py" = [
|
"./boards/microchip/mec172xevb_assy6906/support/mec172x_remote_flasher.py" = [
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
||||||
]
|
]
|
||||||
"./doc/_extensions/zephyr/api_overview.py" = [
|
|
||||||
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/application.py" = [
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"SIM102", # https://docs.astral.sh/ruff/rules/collapsible-if
|
|
||||||
"UP032", # https://docs.astral.sh/ruff/rules/f-string
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/domain/__init__.py" = [
|
|
||||||
"B023", # https://docs.astral.sh/ruff/rules/function-uses-loop-variable
|
|
||||||
"B026", # https://docs.astral.sh/ruff/rules/star-arg-unpacking-after-keyword-arg
|
|
||||||
"E402", # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file
|
|
||||||
"E501", # https://docs.astral.sh/ruff/rules/line-too-long
|
|
||||||
"F401", # https://docs.astral.sh/ruff/rules/unused-import
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/doxybridge.py" = [
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/doxyrunner.py" = [
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"SIM115", # https://docs.astral.sh/ruff/rules/open-file-with-context-handler
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP007", # https://docs.astral.sh/ruff/rules/non-pep604-annotation
|
|
||||||
"UP024", # https://docs.astral.sh/ruff/rules/os-error-alias
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/doxytooltip/__init__.py" = [
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/external_content.py" = [
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP007", # https://docs.astral.sh/ruff/rules/non-pep604-annotation
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/gh_utils.py" = [
|
|
||||||
"E402", # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP007", # https://docs.astral.sh/ruff/rules/non-pep604-annotation
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/kconfig/__init__.py" = [
|
|
||||||
"E402", # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file
|
|
||||||
"SIM112", # https://docs.astral.sh/ruff/rules/uncapitalized-environment-variables
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP007", # https://docs.astral.sh/ruff/rules/non-pep604-annotation
|
|
||||||
"UP028", # https://docs.astral.sh/ruff/rules/yield-in-for-loop
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/link-roles.py" = [
|
|
||||||
"B006", # https://docs.astral.sh/ruff/rules/mutable-argument-default
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"SIM102", # https://docs.astral.sh/ruff/rules/collapsible-if
|
|
||||||
"UP010", # https://docs.astral.sh/ruff/rules/unnecessary-future-import
|
|
||||||
]
|
|
||||||
"./doc/_extensions/zephyr/manifest_projects_table.py" = [
|
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
|
||||||
"SIM114", # https://docs.astral.sh/ruff/rules/if-with-same-arms
|
|
||||||
"UP006", # https://docs.astral.sh/ruff/rules/non-pep585-annotation
|
|
||||||
"UP035", # https://docs.astral.sh/ruff/rules/deprecated-import
|
|
||||||
]
|
|
||||||
"./doc/_scripts/gen_boards_catalog.py" = [
|
"./doc/_scripts/gen_boards_catalog.py" = [
|
||||||
"E401", # https://docs.astral.sh/ruff/rules/multiple-imports-on-one-line
|
"E401", # https://docs.astral.sh/ruff/rules/multiple-imports-on-one-line
|
||||||
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
"I001", # https://docs.astral.sh/ruff/rules/unsorted-imports
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
# Copyright (c) 2023 Intel Corporation
|
# Copyright (c) 2023 Intel Corporation
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
import doxmlparser
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import doxmlparser
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from doxmlparser.compound import DoxCompoundKind
|
from doxmlparser.compound import DoxCompoundKind
|
||||||
from pathlib import Path
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.util.docutils import SphinxDirective
|
from sphinx.util.docutils import SphinxDirective
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
|
|
||||||
class ApiOverview(SphinxDirective):
|
class ApiOverview(SphinxDirective):
|
||||||
|
@ -56,7 +56,9 @@ def visit_group(app, group, all_groups, rows, indent=0):
|
||||||
|
|
||||||
if since:
|
if since:
|
||||||
since_url = nodes.inline()
|
since_url = nodes.inline()
|
||||||
reference = nodes.reference(text=f"v{since.strip()}.0", refuri=f"{github_uri}/v{since.strip()}.0")
|
reference = nodes.reference(
|
||||||
|
text=f"v{since.strip()}.0", refuri=f"{github_uri}/v{since.strip()}.0"
|
||||||
|
)
|
||||||
reference.attributes["internal"] = True
|
reference.attributes["internal"] = True
|
||||||
since_url += reference
|
since_url += reference
|
||||||
else:
|
else:
|
||||||
|
@ -161,7 +163,7 @@ def sync_contents(app: Sphinx) -> None:
|
||||||
app.builder.env.api_overview_table = generate_table(app, toplevel, groups)
|
app.builder.env.api_overview_table = generate_table(app, toplevel, groups)
|
||||||
|
|
||||||
|
|
||||||
def setup(app) -> Dict[str, Any]:
|
def setup(app) -> dict[str, Any]:
|
||||||
app.add_config_value("api_overview_doxygen_xml_dir", "html/doxygen/xml", "env")
|
app.add_config_value("api_overview_doxygen_xml_dir", "html/doxygen/xml", "env")
|
||||||
app.add_config_value("api_overview_doxygen_base_url", "../../doxygen/html", "env")
|
app.add_config_value("api_overview_doxygen_base_url", "../../doxygen/html", "env")
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,10 @@
|
||||||
|
|
||||||
'''Sphinx extensions related to managing Zephyr applications.'''
|
'''Sphinx extensions related to managing Zephyr applications.'''
|
||||||
|
|
||||||
from docutils import nodes
|
|
||||||
from docutils.parsers.rst import Directive
|
|
||||||
from docutils.parsers.rst import directives
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import Directive, directives
|
||||||
|
|
||||||
ZEPHYR_BASE = Path(__file__).parents[3]
|
ZEPHYR_BASE = Path(__file__).parents[3]
|
||||||
|
|
||||||
|
@ -79,8 +78,7 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
flash_args = self.options.get('flash-args', None)
|
flash_args = self.options.get('flash-args', None)
|
||||||
|
|
||||||
if tool not in self.TOOLS:
|
if tool not in self.TOOLS:
|
||||||
raise self.error('Unknown tool {}; choose from: {}'.format(
|
raise self.error(f'Unknown tool {tool}; choose from: {self.TOOLS}')
|
||||||
tool, self.TOOLS))
|
|
||||||
|
|
||||||
if app and zephyr_app:
|
if app and zephyr_app:
|
||||||
raise self.error('Both app and zephyr-app options were given.')
|
raise self.error('Both app and zephyr-app options were given.')
|
||||||
|
@ -92,20 +90,17 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
raise self.error('build-dir-fmt is only supported for the west build tool.')
|
raise self.error('build-dir-fmt is only supported for the west build tool.')
|
||||||
|
|
||||||
if generator not in self.GENERATORS:
|
if generator not in self.GENERATORS:
|
||||||
raise self.error('Unknown generator {}; choose from: {}'.format(
|
raise self.error(f'Unknown generator {generator}; choose from: {self.GENERATORS}')
|
||||||
generator, self.GENERATORS))
|
|
||||||
|
|
||||||
if host_os not in self.HOST_OS:
|
if host_os not in self.HOST_OS:
|
||||||
raise self.error('Unknown host-os {}; choose from: {}'.format(
|
raise self.error(f'Unknown host-os {host_os}; choose from: {self.HOST_OS}')
|
||||||
host_os, self.HOST_OS))
|
|
||||||
|
|
||||||
if compact and skip_config:
|
if compact and skip_config:
|
||||||
raise self.error('Both compact and maybe-skip-config options were given.')
|
raise self.error('Both compact and maybe-skip-config options were given.')
|
||||||
|
|
||||||
if zephyr_app:
|
|
||||||
# as folks might use "<...>" notation to indicate a variable portion of the path, we
|
# as folks might use "<...>" notation to indicate a variable portion of the path, we
|
||||||
# deliberately don't check for the validity of such paths.
|
# deliberately don't check for the validity of such paths.
|
||||||
if not any([x in zephyr_app for x in ["<", ">"]]):
|
if zephyr_app and not any([x in zephyr_app for x in ["<", ">"]]):
|
||||||
app_path = ZEPHYR_BASE / zephyr_app
|
app_path = ZEPHYR_BASE / zephyr_app
|
||||||
if not app_path.is_dir():
|
if not app_path.is_dir():
|
||||||
raise self.error(
|
raise self.error(
|
||||||
|
@ -168,7 +163,7 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
if tool_comment:
|
if tool_comment:
|
||||||
paragraph = nodes.paragraph()
|
paragraph = nodes.paragraph()
|
||||||
paragraph += nodes.Text(tool_comment.format(
|
paragraph += nodes.Text(tool_comment.format(
|
||||||
'CMake and {}'.format(generator)))
|
f'CMake and {generator}'))
|
||||||
content.append(paragraph)
|
content.append(paragraph)
|
||||||
content.append(self._lit_block(c))
|
content.append(self._lit_block(c))
|
||||||
else:
|
else:
|
||||||
|
@ -208,30 +203,30 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
# west always defaults to ninja
|
# west always defaults to ninja
|
||||||
gen_arg = ' -G\'Unix Makefiles\'' if generator == 'make' else ''
|
gen_arg = ' -G\'Unix Makefiles\'' if generator == 'make' else ''
|
||||||
cmake_args = gen_arg + self._cmake_args(**kwargs)
|
cmake_args = gen_arg + self._cmake_args(**kwargs)
|
||||||
cmake_args = ' --{}'.format(cmake_args) if cmake_args != '' else ''
|
cmake_args = f' --{cmake_args}' if cmake_args != '' else ''
|
||||||
build_args = "".join(f" -o {b}" for b in build_args) if build_args else ""
|
build_args = "".join(f" -o {b}" for b in build_args) if build_args else ""
|
||||||
west_args = ' {}'.format(west_args) if west_args else ''
|
west_args = f' {west_args}' if west_args else ''
|
||||||
flash_args = ' {}'.format(flash_args) if flash_args else ''
|
flash_args = f' {flash_args}' if flash_args else ''
|
||||||
snippet_args = ''.join(f' -S {s}' for s in snippets) if snippets else ''
|
snippet_args = ''.join(f' -S {s}' for s in snippets) if snippets else ''
|
||||||
shield_args = ''.join(f' --shield {s}' for s in shield) if shield else ''
|
shield_args = ''.join(f' --shield {s}' for s in shield) if shield else ''
|
||||||
# ignore zephyr_app since west needs to run within
|
# ignore zephyr_app since west needs to run within
|
||||||
# the installation. Instead rely on relative path.
|
# the installation. Instead rely on relative path.
|
||||||
src = ' {}'.format(app) if app and not cd_into else ''
|
src = f' {app}' if app and not cd_into else ''
|
||||||
|
|
||||||
if build_dir_fmt is None:
|
if build_dir_fmt is None:
|
||||||
dst = ' -d {}'.format(build_dir) if build_dir != 'build' else ''
|
dst = f' -d {build_dir}' if build_dir != 'build' else ''
|
||||||
build_dst = dst
|
build_dst = dst
|
||||||
else:
|
else:
|
||||||
app_name = app.split('/')[-1]
|
app_name = app.split('/')[-1]
|
||||||
build_dir_formatted = build_dir_fmt.format(app=app_name, board=board, source_dir=app)
|
build_dir_formatted = build_dir_fmt.format(app=app_name, board=board, source_dir=app)
|
||||||
dst = ' -d {}'.format(build_dir_formatted)
|
dst = f' -d {build_dir_formatted}'
|
||||||
build_dst = ''
|
build_dst = ''
|
||||||
|
|
||||||
if in_tree and not compact:
|
if in_tree and not compact:
|
||||||
content.append(in_tree)
|
content.append(in_tree)
|
||||||
|
|
||||||
if cd_into and app:
|
if cd_into and app:
|
||||||
content.append('cd {}'.format(app))
|
content.append(f'cd {app}')
|
||||||
|
|
||||||
# We always have to run west build.
|
# We always have to run west build.
|
||||||
#
|
#
|
||||||
|
@ -252,21 +247,21 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
# etc. commands can use the signed file which must be created
|
# etc. commands can use the signed file which must be created
|
||||||
# in this step.
|
# in this step.
|
||||||
if 'sign' in goals:
|
if 'sign' in goals:
|
||||||
content.append('west sign{}'.format(dst))
|
content.append(f'west sign{dst}')
|
||||||
|
|
||||||
for goal in goals:
|
for goal in goals:
|
||||||
if goal in {'build', 'sign'}:
|
if goal in {'build', 'sign'}:
|
||||||
continue
|
continue
|
||||||
elif goal == 'flash':
|
elif goal == 'flash':
|
||||||
content.append('west flash{}{}'.format(flash_args, dst))
|
content.append(f'west flash{flash_args}{dst}')
|
||||||
elif goal == 'debug':
|
elif goal == 'debug':
|
||||||
content.append('west debug{}'.format(dst))
|
content.append(f'west debug{dst}')
|
||||||
elif goal == 'debugserver':
|
elif goal == 'debugserver':
|
||||||
content.append('west debugserver{}'.format(dst))
|
content.append(f'west debugserver{dst}')
|
||||||
elif goal == 'attach':
|
elif goal == 'attach':
|
||||||
content.append('west attach{}'.format(dst))
|
content.append(f'west attach{dst}')
|
||||||
else:
|
else:
|
||||||
content.append('west build -t {}{}'.format(goal, dst))
|
content.append(f'west build -t {goal}{dst}')
|
||||||
|
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
@ -274,14 +269,15 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
def _mkdir(mkdir, build_dir, host_os, skip_config):
|
def _mkdir(mkdir, build_dir, host_os, skip_config):
|
||||||
content = []
|
content = []
|
||||||
if skip_config:
|
if skip_config:
|
||||||
content.append("# If you already made a build directory ({}) and ran cmake, just 'cd {}' instead.".format(build_dir, build_dir)) # noqa: E501
|
content.append(f"# If you already made a build directory ({build_dir}) and ran cmake, "
|
||||||
|
f"just 'cd {build_dir}' instead.")
|
||||||
if host_os == 'all':
|
if host_os == 'all':
|
||||||
content.append('mkdir {} && cd {}'.format(build_dir, build_dir))
|
content.append(f'mkdir {build_dir} && cd {build_dir}')
|
||||||
if host_os == "unix":
|
if host_os == "unix":
|
||||||
content.append('{} {} && cd {}'.format(mkdir, build_dir, build_dir))
|
content.append(f'{mkdir} {build_dir} && cd {build_dir}')
|
||||||
elif host_os == "win":
|
elif host_os == "win":
|
||||||
build_dir = build_dir.replace('/', '\\')
|
build_dir = build_dir.replace('/', '\\')
|
||||||
content.append('mkdir {} & cd {}'.format(build_dir, build_dir))
|
content.append(f'mkdir {build_dir} & cd {build_dir}')
|
||||||
return content
|
return content
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -289,11 +285,11 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
board = kwargs['board']
|
board = kwargs['board']
|
||||||
conf = kwargs['conf']
|
conf = kwargs['conf']
|
||||||
gen_args = kwargs['gen_args']
|
gen_args = kwargs['gen_args']
|
||||||
board_arg = ' -DBOARD={}'.format(board) if board else ''
|
board_arg = f' -DBOARD={board}' if board else ''
|
||||||
conf_arg = ' -DCONF_FILE={}'.format(conf) if conf else ''
|
conf_arg = f' -DCONF_FILE={conf}' if conf else ''
|
||||||
gen_args = ' {}'.format(gen_args) if gen_args else ''
|
gen_args = f' {gen_args}' if gen_args else ''
|
||||||
|
|
||||||
return '{}{}{}'.format(board_arg, conf_arg, gen_args)
|
return f'{board_arg}{conf_arg}{gen_args}'
|
||||||
|
|
||||||
def _cd_into(self, mkdir, **kwargs):
|
def _cd_into(self, mkdir, **kwargs):
|
||||||
app = kwargs['app']
|
app = kwargs['app']
|
||||||
|
@ -319,13 +315,13 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
if os_comment:
|
if os_comment:
|
||||||
content.append(os_comment.format('Linux/macOS'))
|
content.append(os_comment.format('Linux/macOS'))
|
||||||
if app:
|
if app:
|
||||||
content.append('cd {}'.format(app))
|
content.append(f'cd {app}')
|
||||||
elif host == "win":
|
elif host == "win":
|
||||||
if os_comment:
|
if os_comment:
|
||||||
content.append(os_comment.format('Windows'))
|
content.append(os_comment.format('Windows'))
|
||||||
if app:
|
if app:
|
||||||
backslashified = app.replace('/', '\\')
|
backslashified = app.replace('/', '\\')
|
||||||
content.append('cd {}'.format(backslashified))
|
content.append(f'cd {backslashified}')
|
||||||
if mkdir:
|
if mkdir:
|
||||||
content.extend(self._mkdir(mkdir, build_dir, host, skip_config))
|
content.extend(self._mkdir(mkdir, build_dir, host, skip_config))
|
||||||
if not compact:
|
if not compact:
|
||||||
|
@ -359,39 +355,36 @@ class ZephyrAppCommandsDirective(Directive):
|
||||||
cmake_build_dir = ''
|
cmake_build_dir = ''
|
||||||
tool_build_dir = ''
|
tool_build_dir = ''
|
||||||
else:
|
else:
|
||||||
source_dir = ' {}'.format(app) if app else ' .'
|
source_dir = f' {app}' if app else ' .'
|
||||||
cmake_build_dir = ' -B{}'.format(build_dir)
|
cmake_build_dir = f' -B{build_dir}'
|
||||||
tool_build_dir = ' -C{}'.format(build_dir)
|
tool_build_dir = f' -C{build_dir}'
|
||||||
|
|
||||||
# Now generate the actual cmake and make/ninja commands
|
# Now generate the actual cmake and make/ninja commands
|
||||||
gen_arg = ' -GNinja' if generator == 'ninja' else ''
|
gen_arg = ' -GNinja' if generator == 'ninja' else ''
|
||||||
build_args = ' {}'.format(build_args) if build_args else ''
|
build_args = f' {build_args}' if build_args else ''
|
||||||
snippet_args = ' -DSNIPPET="{}"'.format(';'.join(snippets)) if snippets else ''
|
snippet_args = ' -DSNIPPET="{}"'.format(';'.join(snippets)) if snippets else ''
|
||||||
shield_args = ' -DSHIELD="{}"'.format(';'.join(shield)) if shield else ''
|
shield_args = ' -DSHIELD="{}"'.format(';'.join(shield)) if shield else ''
|
||||||
cmake_args = self._cmake_args(**kwargs)
|
cmake_args = self._cmake_args(**kwargs)
|
||||||
|
|
||||||
if not compact:
|
if not compact:
|
||||||
if not cd_into and skip_config:
|
if not cd_into and skip_config:
|
||||||
content.append("# If you already ran cmake with -B{}, you " \
|
content.append(f'# If you already ran cmake with -B{build_dir}, you '
|
||||||
"can skip this step and run {} directly.".
|
f'can skip this step and run {generator} directly.')
|
||||||
format(build_dir, generator)) # noqa: E501
|
|
||||||
else:
|
else:
|
||||||
content.append('# Use cmake to configure a {}-based build' \
|
content.append(f'# Use cmake to configure a {generator.capitalize()}-based build'
|
||||||
'system:'.format(generator.capitalize())) # noqa: E501
|
'system:')
|
||||||
|
|
||||||
content.append('cmake{}{}{}{}{}{}'.format(cmake_build_dir, gen_arg,
|
content.append(f'cmake{cmake_build_dir}{gen_arg}{cmake_args}{snippet_args}{shield_args}{source_dir}')
|
||||||
cmake_args, snippet_args, shield_args, source_dir))
|
|
||||||
if not compact:
|
if not compact:
|
||||||
content.extend(['',
|
content.extend(['',
|
||||||
'# Now run the build tool on the generated build system:'])
|
'# Now run the build tool on the generated build system:'])
|
||||||
|
|
||||||
if 'build' in goals:
|
if 'build' in goals:
|
||||||
content.append('{}{}{}'.format(generator, tool_build_dir,
|
content.append(f'{generator}{tool_build_dir}{build_args}')
|
||||||
build_args))
|
|
||||||
for goal in goals:
|
for goal in goals:
|
||||||
if goal == 'build':
|
if goal == 'build':
|
||||||
continue
|
continue
|
||||||
content.append('{}{} {}'.format(generator, tool_build_dir, goal))
|
content.append(f'{generator}{tool_build_dir} {goal}')
|
||||||
|
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
|
|
@ -26,15 +26,17 @@ Roles
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
from collections.abc import Iterator
|
||||||
from os import path
|
from os import path
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, Iterator, List, Tuple, Final
|
from typing import Any
|
||||||
|
|
||||||
|
from anytree import ChildResolverError, Node, PreOrderIter, Resolver, search
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
from docutils.statemachine import StringList
|
from docutils.statemachine import StringList
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.domains import Domain, ObjType
|
from sphinx.domains import Domain, ObjType
|
||||||
|
@ -51,20 +53,15 @@ from sphinx.util.template import SphinxRenderer
|
||||||
from zephyr.doxybridge import DoxygenGroupDirective
|
from zephyr.doxybridge import DoxygenGroupDirective
|
||||||
from zephyr.gh_utils import gh_link_get_url
|
from zephyr.gh_utils import gh_link_get_url
|
||||||
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from anytree import Node, Resolver, ChildResolverError, PreOrderIter, search
|
|
||||||
|
|
||||||
__version__ = "0.2.0"
|
__version__ = "0.2.0"
|
||||||
|
|
||||||
ZEPHYR_BASE = Path(__file__).parents[4]
|
|
||||||
|
|
||||||
sys.path.insert(0, str(ZEPHYR_BASE / "scripts/dts/python-devicetree/src"))
|
sys.path.insert(0, str(Path(__file__).parents[4] / "scripts/dts/python-devicetree/src"))
|
||||||
sys.path.insert(0, str(Path(__file__).parents[3] / "_scripts"))
|
sys.path.insert(0, str(Path(__file__).parents[3] / "_scripts"))
|
||||||
|
|
||||||
from gen_boards_catalog import get_catalog
|
from gen_boards_catalog import get_catalog
|
||||||
|
|
||||||
|
ZEPHYR_BASE = Path(__file__).parents[4]
|
||||||
TEMPLATES_DIR = Path(__file__).parent / "templates"
|
TEMPLATES_DIR = Path(__file__).parent / "templates"
|
||||||
RESOURCES_DIR = Path(__file__).parent / "static"
|
RESOURCES_DIR = Path(__file__).parent / "static"
|
||||||
|
|
||||||
|
@ -295,10 +292,10 @@ class CodeSampleCategoriesTocPatching(SphinxPostTransform):
|
||||||
reference = nodes.reference(
|
reference = nodes.reference(
|
||||||
"",
|
"",
|
||||||
"",
|
"",
|
||||||
|
*[nodes.Text(tree.category["name"])],
|
||||||
internal=True,
|
internal=True,
|
||||||
refuri=docname,
|
refuri=docname,
|
||||||
anchorname="",
|
anchorname="",
|
||||||
*[nodes.Text(tree.category["name"])],
|
|
||||||
classes=["category-link"],
|
classes=["category-link"],
|
||||||
)
|
)
|
||||||
compact_paragraph += reference
|
compact_paragraph += reference
|
||||||
|
@ -328,10 +325,10 @@ class CodeSampleCategoriesTocPatching(SphinxPostTransform):
|
||||||
sample_xref = nodes.reference(
|
sample_xref = nodes.reference(
|
||||||
"",
|
"",
|
||||||
"",
|
"",
|
||||||
|
*[nodes.Text(code_sample["name"])],
|
||||||
internal=True,
|
internal=True,
|
||||||
refuri=code_sample["docname"],
|
refuri=code_sample["docname"],
|
||||||
anchorname="",
|
anchorname="",
|
||||||
*[nodes.Text(code_sample["name"])],
|
|
||||||
classes=["code-sample-link"],
|
classes=["code-sample-link"],
|
||||||
)
|
)
|
||||||
sample_xref["reftitle"] = code_sample["description"].astext()
|
sample_xref["reftitle"] = code_sample["description"].astext()
|
||||||
|
@ -414,7 +411,8 @@ class ProcessCodeSampleListingNode(SphinxPostTransform):
|
||||||
"",
|
"",
|
||||||
"""
|
"""
|
||||||
<div class="cs-search-bar">
|
<div class="cs-search-bar">
|
||||||
<input type="text" class="cs-search-input" placeholder="Filter code samples..." onkeyup="filterSamples(this)">
|
<input type="text" class="cs-search-input"
|
||||||
|
placeholder="Filter code samples..." onkeyup="filterSamples(this)">
|
||||||
<i class="fa fa-search"></i>
|
<i class="fa fa-search"></i>
|
||||||
</div>
|
</div>
|
||||||
""",
|
""",
|
||||||
|
@ -432,7 +430,8 @@ class ProcessCodeSampleListingNode(SphinxPostTransform):
|
||||||
|
|
||||||
category_node = search.find(
|
category_node = search.find(
|
||||||
code_samples_categories_tree,
|
code_samples_categories_tree,
|
||||||
lambda node: hasattr(node, "category") and node.category["id"] == category,
|
lambda node, category=category: hasattr(node, "category")
|
||||||
|
and node.category["id"] == category,
|
||||||
)
|
)
|
||||||
self.output_sample_categories_sections(category_node, container)
|
self.output_sample_categories_sections(category_node, container)
|
||||||
|
|
||||||
|
@ -721,13 +720,13 @@ class ZephyrDomain(Domain):
|
||||||
"board": BoardDirective,
|
"board": BoardDirective,
|
||||||
}
|
}
|
||||||
|
|
||||||
object_types: Dict[str, ObjType] = {
|
object_types: dict[str, ObjType] = {
|
||||||
"code-sample": ObjType("code sample", "code-sample"),
|
"code-sample": ObjType("code sample", "code-sample"),
|
||||||
"code-sample-category": ObjType("code sample category", "code-sample-category"),
|
"code-sample-category": ObjType("code sample category", "code-sample-category"),
|
||||||
"board": ObjType("board", "board"),
|
"board": ObjType("board", "board"),
|
||||||
}
|
}
|
||||||
|
|
||||||
initial_data: Dict[str, Any] = {
|
initial_data: dict[str, Any] = {
|
||||||
"code-samples": {}, # id -> code sample data
|
"code-samples": {}, # id -> code sample data
|
||||||
"code-samples-categories": {}, # id -> code sample category data
|
"code-samples-categories": {}, # id -> code sample category data
|
||||||
"code-samples-categories-tree": Node("samples"),
|
"code-samples-categories-tree": Node("samples"),
|
||||||
|
@ -754,11 +753,12 @@ class ZephyrDomain(Domain):
|
||||||
self.data["has_code_sample_listing"].pop(docname, None)
|
self.data["has_code_sample_listing"].pop(docname, None)
|
||||||
self.data["has_board_catalog"].pop(docname, None)
|
self.data["has_board_catalog"].pop(docname, None)
|
||||||
|
|
||||||
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
|
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||||
self.data["code-samples"].update(otherdata["code-samples"])
|
self.data["code-samples"].update(otherdata["code-samples"])
|
||||||
self.data["code-samples-categories"].update(otherdata["code-samples-categories"])
|
self.data["code-samples-categories"].update(otherdata["code-samples-categories"])
|
||||||
|
|
||||||
# self.data["boards"] contains all the boards right from builder-inited time, but it still # potentially needs merging since a board's docname property is set by BoardDirective to
|
# self.data["boards"] contains all the boards right from builder-inited time, but it still
|
||||||
|
# potentially needs merging since a board's docname property is set by BoardDirective to
|
||||||
# indicate the board is documented in a specific document.
|
# indicate the board is documented in a specific document.
|
||||||
for board_name, board in otherdata["boards"].items():
|
for board_name, board in otherdata["boards"].items():
|
||||||
if "docname" in board:
|
if "docname" in board:
|
||||||
|
@ -819,7 +819,7 @@ class ZephyrDomain(Domain):
|
||||||
)
|
)
|
||||||
|
|
||||||
# used by Sphinx Immaterial theme
|
# used by Sphinx Immaterial theme
|
||||||
def get_object_synopses(self) -> Iterator[Tuple[Tuple[str, str], str]]:
|
def get_object_synopses(self) -> Iterator[tuple[tuple[str, str], str]]:
|
||||||
for _, code_sample in self.data["code-samples"].items():
|
for _, code_sample in self.data["code-samples"].items():
|
||||||
yield (
|
yield (
|
||||||
(code_sample["docname"], code_sample["id"]),
|
(code_sample["docname"], code_sample["id"]),
|
||||||
|
@ -899,7 +899,7 @@ class ZephyrDomain(Domain):
|
||||||
class CustomDoxygenGroupDirective(DoxygenGroupDirective):
|
class CustomDoxygenGroupDirective(DoxygenGroupDirective):
|
||||||
"""Monkey patch for Breathe's DoxygenGroupDirective."""
|
"""Monkey patch for Breathe's DoxygenGroupDirective."""
|
||||||
|
|
||||||
def run(self) -> List[Node]:
|
def run(self) -> list[Node]:
|
||||||
nodes = super().run()
|
nodes = super().run()
|
||||||
|
|
||||||
if self.config.zephyr_breathe_insert_related_samples:
|
if self.config.zephyr_breathe_insert_related_samples:
|
||||||
|
|
|
@ -4,22 +4,19 @@ Copyright (c) 2024 The Linux Foundation
|
||||||
SPDX-License-Identifier: Apache-2.0
|
SPDX-License-Identifier: Apache-2.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
|
import os
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import doxmlparser
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
from doxmlparser.compound import DoxCompoundKind, DoxMemberKind
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
|
from sphinx.domains.c import CXRefRole
|
||||||
from sphinx.transforms.post_transforms import SphinxPostTransform
|
from sphinx.transforms.post_transforms import SphinxPostTransform
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util.docutils import SphinxDirective
|
from sphinx.util.docutils import SphinxDirective
|
||||||
from sphinx.domains.c import CXRefRole
|
|
||||||
|
|
||||||
import doxmlparser
|
|
||||||
from doxmlparser.compound import DoxCompoundKind, DoxMemberKind
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -154,7 +151,7 @@ def parse_sections(compounddef):
|
||||||
return cache
|
return cache
|
||||||
|
|
||||||
|
|
||||||
def parse_compound(inDirName, baseName) -> Dict:
|
def parse_compound(inDirName, baseName) -> dict:
|
||||||
rootObj = doxmlparser.compound.parse(inDirName + "/" + baseName + ".xml", True)
|
rootObj = doxmlparser.compound.parse(inDirName + "/" + baseName + ".xml", True)
|
||||||
cache = {}
|
cache = {}
|
||||||
group_titles = {}
|
group_titles = {}
|
||||||
|
@ -218,7 +215,7 @@ def doxygen_parse(app: Sphinx) -> None:
|
||||||
parse_index(app, str(app.config.doxybridge_dir / "xml"))
|
parse_index(app, str(app.config.doxybridge_dir / "xml"))
|
||||||
|
|
||||||
|
|
||||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
def setup(app: Sphinx) -> dict[str, Any]:
|
||||||
app.add_config_value("doxybridge_dir", None, "env")
|
app.add_config_value("doxybridge_dir", None, "env")
|
||||||
|
|
||||||
app.add_directive("doxygengroup", DoxygenGroupDirective)
|
app.add_directive("doxygengroup", DoxygenGroupDirective)
|
||||||
|
|
|
@ -43,19 +43,18 @@ Configuration options
|
||||||
|
|
||||||
import filecmp
|
import filecmp
|
||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
import shutil
|
import shutil
|
||||||
from subprocess import Popen, PIPE, STDOUT
|
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import List, Dict, Optional, Any
|
from pathlib import Path
|
||||||
|
from subprocess import PIPE, STDOUT, Popen
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.environment import BuildEnvironment
|
from sphinx.environment import BuildEnvironment
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
|
|
||||||
|
|
||||||
__version__ = "0.1.0"
|
__version__ = "0.1.0"
|
||||||
|
|
||||||
|
|
||||||
|
@ -77,7 +76,7 @@ def hash_file(file: Path) -> str:
|
||||||
|
|
||||||
return sha256.hexdigest()
|
return sha256.hexdigest()
|
||||||
|
|
||||||
def get_doxygen_option(doxyfile: str, option: str) -> List[str]:
|
def get_doxygen_option(doxyfile: str, option: str) -> list[str]:
|
||||||
"""Obtain the value of a Doxygen option.
|
"""Obtain the value of a Doxygen option.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -133,9 +132,9 @@ def process_doxyfile(
|
||||||
outdir: Path,
|
outdir: Path,
|
||||||
silent: bool,
|
silent: bool,
|
||||||
fmt: bool = False,
|
fmt: bool = False,
|
||||||
fmt_pattern: Optional[str] = None,
|
fmt_pattern: str | None = None,
|
||||||
fmt_vars: Optional[Dict[str, str]] = None,
|
fmt_vars: dict[str, str] | None = None,
|
||||||
outdir_var: Optional[str] = None,
|
outdir_var: str | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Process Doxyfile.
|
"""Process Doxyfile.
|
||||||
|
|
||||||
|
@ -270,11 +269,11 @@ def run_doxygen(doxygen: str, doxyfile: str, silent: bool = False) -> None:
|
||||||
silent: If Doxygen output should be logged or not.
|
silent: If Doxygen output should be logged or not.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
f_doxyfile = tempfile.NamedTemporaryFile("w", delete=False)
|
with tempfile.NamedTemporaryFile("w", delete=False) as f_doxyfile:
|
||||||
f_doxyfile.write(doxyfile)
|
f_doxyfile.write(doxyfile)
|
||||||
f_doxyfile.close()
|
f_doxyfile_name = f_doxyfile.name
|
||||||
|
|
||||||
p = Popen([doxygen, f_doxyfile.name], stdout=PIPE, stderr=STDOUT, encoding="utf-8")
|
p = Popen([doxygen, f_doxyfile_name], stdout=PIPE, stderr=STDOUT, encoding="utf-8")
|
||||||
while True:
|
while True:
|
||||||
line = p.stdout.readline() # type: ignore
|
line = p.stdout.readline() # type: ignore
|
||||||
if line:
|
if line:
|
||||||
|
@ -282,10 +281,10 @@ def run_doxygen(doxygen: str, doxyfile: str, silent: bool = False) -> None:
|
||||||
if p.poll() is not None:
|
if p.poll() is not None:
|
||||||
break
|
break
|
||||||
|
|
||||||
Path(f_doxyfile.name).unlink()
|
Path(f_doxyfile_name).unlink()
|
||||||
|
|
||||||
if p.returncode:
|
if p.returncode:
|
||||||
raise IOError(f"Doxygen process returned non-zero ({p.returncode})")
|
raise OSError(f"Doxygen process returned non-zero ({p.returncode})")
|
||||||
|
|
||||||
|
|
||||||
def sync_doxygen(doxyfile: str, new: Path, prev: Path) -> None:
|
def sync_doxygen(doxyfile: str, new: Path, prev: Path) -> None:
|
||||||
|
@ -380,7 +379,7 @@ def doxygen_build(app: Sphinx) -> None:
|
||||||
shutil.rmtree(tmp_outdir)
|
shutil.rmtree(tmp_outdir)
|
||||||
|
|
||||||
|
|
||||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
def setup(app: Sphinx) -> dict[str, Any]:
|
||||||
app.add_config_value("doxyrunner_doxygen", "doxygen", "env")
|
app.add_config_value("doxyrunner_doxygen", "doxygen", "env")
|
||||||
app.add_config_value("doxyrunner_doxyfile", None, "env")
|
app.add_config_value("doxyrunner_doxyfile", None, "env")
|
||||||
app.add_config_value("doxyrunner_outdir", None, "env")
|
app.add_config_value("doxyrunner_outdir", None, "env")
|
||||||
|
|
|
@ -10,8 +10,7 @@ to enable tooltips for C domain links.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
|
@ -20,7 +19,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RESOURCES_DIR = Path(__file__).parent / "static"
|
RESOURCES_DIR = Path(__file__).parent / "static"
|
||||||
|
|
||||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
def setup(app: Sphinx) -> dict[str, Any]:
|
||||||
app.config.html_static_path.append(RESOURCES_DIR.as_posix())
|
app.config.html_static_path.append(RESOURCES_DIR.as_posix())
|
||||||
|
|
||||||
app.add_js_file("tippy/popper.min.js")
|
app.add_js_file("tippy/popper.min.js")
|
||||||
|
|
|
@ -32,15 +32,14 @@ Configuration options
|
||||||
|
|
||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import Dict, Any, List, Optional
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
|
||||||
__version__ = "0.1.0"
|
__version__ = "0.1.0"
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,9 +50,9 @@ DEFAULT_DIRECTIVES = ("figure", "image", "include", "literalinclude")
|
||||||
def adjust_includes(
|
def adjust_includes(
|
||||||
fname: Path,
|
fname: Path,
|
||||||
basepath: Path,
|
basepath: Path,
|
||||||
directives: List[str],
|
directives: list[str],
|
||||||
encoding: str,
|
encoding: str,
|
||||||
dstpath: Optional[Path] = None,
|
dstpath: Path | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Adjust included content paths.
|
"""Adjust included content paths.
|
||||||
|
|
||||||
|
@ -162,7 +161,7 @@ def sync_contents(app: Sphinx) -> None:
|
||||||
file.unlink()
|
file.unlink()
|
||||||
|
|
||||||
|
|
||||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
def setup(app: Sphinx) -> dict[str, Any]:
|
||||||
app.add_config_value("external_content_contents", [], "env")
|
app.add_config_value("external_content_contents", [], "env")
|
||||||
app.add_config_value("external_content_directives", DEFAULT_DIRECTIVES, "env")
|
app.add_config_value("external_content_directives", DEFAULT_DIRECTIVES, "env")
|
||||||
app.add_config_value("external_content_keep", [], "")
|
app.add_config_value("external_content_keep", [], "")
|
||||||
|
|
|
@ -33,26 +33,25 @@ Configuration options
|
||||||
for, e.g., auto-generated pages not in Git.
|
for, e.g., auto-generated pages not in Git.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from functools import partial
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from typing import Final, Optional, Tuple
|
from typing import Final
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.util.i18n import format_date
|
from sphinx.util.i18n import format_date
|
||||||
|
|
||||||
ZEPHYR_BASE : Final[str] = Path(__file__).parents[3]
|
sys.path.insert(0, str(Path(__file__).parents[3] / "scripts"))
|
||||||
SCRIPTS : Final[str] = ZEPHYR_BASE / "scripts"
|
|
||||||
sys.path.insert(0, str(SCRIPTS))
|
|
||||||
|
|
||||||
from get_maintainer import Maintainers
|
from get_maintainer import Maintainers
|
||||||
|
|
||||||
|
ZEPHYR_BASE : Final[str] = Path(__file__).parents[3]
|
||||||
MAINTAINERS : Final[Maintainers] = Maintainers(filename=f"{ZEPHYR_BASE}/MAINTAINERS.yml")
|
MAINTAINERS : Final[Maintainers] = Maintainers(filename=f"{ZEPHYR_BASE}/MAINTAINERS.yml")
|
||||||
|
|
||||||
|
|
||||||
|
@ -90,7 +89,7 @@ def get_page_prefix(app: Sphinx, pagename: str) -> str:
|
||||||
return found_prefix
|
return found_prefix
|
||||||
|
|
||||||
|
|
||||||
def gh_link_get_url(app: Sphinx, pagename: str, mode: str = "blob") -> Optional[str]:
|
def gh_link_get_url(app: Sphinx, pagename: str, mode: str = "blob") -> str | None:
|
||||||
"""Obtain GitHub URL for the given page.
|
"""Obtain GitHub URL for the given page.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -117,7 +116,7 @@ def gh_link_get_url(app: Sphinx, pagename: str, mode: str = "blob") -> Optional[
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def gh_link_get_open_issue_url(app: Sphinx, pagename: str, sha1: str) -> Optional[str]:
|
def gh_link_get_open_issue_url(app: Sphinx, pagename: str, sha1: str) -> str | None:
|
||||||
"""Link to open a new Github issue regarding "pagename" with title, body, and
|
"""Link to open a new Github issue regarding "pagename" with title, body, and
|
||||||
labels already pre-filled with useful information.
|
labels already pre-filled with useful information.
|
||||||
|
|
||||||
|
@ -164,7 +163,7 @@ def gh_link_get_open_issue_url(app: Sphinx, pagename: str, sha1: str) -> Optiona
|
||||||
return f"{app.config.gh_link_base_url}/issues/new?title={title}&labels={labels}&body={body}"
|
return f"{app.config.gh_link_base_url}/issues/new?title={title}&labels={labels}&body={body}"
|
||||||
|
|
||||||
|
|
||||||
def git_info_filter(app: Sphinx, pagename) -> Optional[Tuple[str, str]]:
|
def git_info_filter(app: Sphinx, pagename) -> tuple[str, str] | None:
|
||||||
"""Return a tuple with the date and SHA1 of the last commit made to a page.
|
"""Return a tuple with the date and SHA1 of the last commit made to a page.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
|
@ -33,10 +33,11 @@ import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from collections.abc import Iterable
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
from typing import Any
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from sphinx.addnodes import pending_xref
|
from sphinx.addnodes import pending_xref
|
||||||
|
@ -53,22 +54,19 @@ from sphinx.util.nodes import make_refnode
|
||||||
__version__ = "0.1.0"
|
__version__ = "0.1.0"
|
||||||
|
|
||||||
|
|
||||||
RESOURCES_DIR = Path(__file__).parent / "static"
|
sys.path.insert(0, str(Path(__file__).parents[4] / "scripts"))
|
||||||
ZEPHYR_BASE = Path(__file__).parents[4]
|
sys.path.insert(0, str(Path(__file__).parents[4] / "scripts/kconfig"))
|
||||||
|
|
||||||
SCRIPTS = ZEPHYR_BASE / "scripts"
|
|
||||||
sys.path.insert(0, str(SCRIPTS))
|
|
||||||
|
|
||||||
KCONFIGLIB = SCRIPTS / "kconfig"
|
|
||||||
sys.path.insert(0, str(KCONFIGLIB))
|
|
||||||
|
|
||||||
import kconfiglib
|
import kconfiglib
|
||||||
import list_boards
|
import list_boards
|
||||||
import list_hardware
|
import list_hardware
|
||||||
import zephyr_module
|
import zephyr_module
|
||||||
|
|
||||||
|
RESOURCES_DIR = Path(__file__).parent / "static"
|
||||||
|
ZEPHYR_BASE = Path(__file__).parents[4]
|
||||||
|
|
||||||
def kconfig_load(app: Sphinx) -> Tuple[kconfiglib.Kconfig, Dict[str, str]]:
|
|
||||||
|
def kconfig_load(app: Sphinx) -> tuple[kconfiglib.Kconfig, dict[str, str]]:
|
||||||
"""Load Kconfig"""
|
"""Load Kconfig"""
|
||||||
with TemporaryDirectory() as td:
|
with TemporaryDirectory() as td:
|
||||||
modules = zephyr_module.parse_modules(ZEPHYR_BASE)
|
modules = zephyr_module.parse_modules(ZEPHYR_BASE)
|
||||||
|
@ -131,7 +129,7 @@ def kconfig_load(app: Sphinx) -> Tuple[kconfiglib.Kconfig, Dict[str, str]]:
|
||||||
|
|
||||||
# base environment
|
# base environment
|
||||||
os.environ["ZEPHYR_BASE"] = str(ZEPHYR_BASE)
|
os.environ["ZEPHYR_BASE"] = str(ZEPHYR_BASE)
|
||||||
os.environ["srctree"] = str(ZEPHYR_BASE)
|
os.environ["srctree"] = str(ZEPHYR_BASE) # noqa: SIM112
|
||||||
os.environ["KCONFIG_DOC_MODE"] = "1"
|
os.environ["KCONFIG_DOC_MODE"] = "1"
|
||||||
os.environ["KCONFIG_BINARY_DIR"] = td
|
os.environ["KCONFIG_BINARY_DIR"] = td
|
||||||
|
|
||||||
|
@ -232,13 +230,12 @@ class KconfigDomain(Domain):
|
||||||
object_types = {"option": ObjType("option", "option")}
|
object_types = {"option": ObjType("option", "option")}
|
||||||
roles = {"option": XRefRole()}
|
roles = {"option": XRefRole()}
|
||||||
directives = {"search": KconfigSearch}
|
directives = {"search": KconfigSearch}
|
||||||
initial_data: Dict[str, Any] = {"options": set()}
|
initial_data: dict[str, Any] = {"options": set()}
|
||||||
|
|
||||||
def get_objects(self) -> Iterable[Tuple[str, str, str, str, str, int]]:
|
def get_objects(self) -> Iterable[tuple[str, str, str, str, str, int]]:
|
||||||
for obj in self.data["options"]:
|
yield from self.data["options"]
|
||||||
yield obj
|
|
||||||
|
|
||||||
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
|
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||||
self.data["options"].update(otherdata["options"])
|
self.data["options"].update(otherdata["options"])
|
||||||
|
|
||||||
def resolve_xref(
|
def resolve_xref(
|
||||||
|
@ -250,7 +247,7 @@ class KconfigDomain(Domain):
|
||||||
target: str,
|
target: str,
|
||||||
node: pending_xref,
|
node: pending_xref,
|
||||||
contnode: nodes.Element,
|
contnode: nodes.Element,
|
||||||
) -> Optional[nodes.Element]:
|
) -> nodes.Element | None:
|
||||||
match = [
|
match = [
|
||||||
(docname, anchor)
|
(docname, anchor)
|
||||||
for name, _, _, docname, anchor, _ in self.get_objects()
|
for name, _, _, docname, anchor, _ in self.get_objects()
|
||||||
|
@ -443,8 +440,8 @@ def kconfig_install(
|
||||||
app: Sphinx,
|
app: Sphinx,
|
||||||
pagename: str,
|
pagename: str,
|
||||||
templatename: str,
|
templatename: str,
|
||||||
context: Dict,
|
context: dict,
|
||||||
doctree: Optional[nodes.Node],
|
doctree: nodes.Node | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Install the Kconfig library files on pages that require it."""
|
"""Install the Kconfig library files on pages that require it."""
|
||||||
if (
|
if (
|
||||||
|
|
|
@ -4,14 +4,14 @@
|
||||||
|
|
||||||
# based on http://protips.readthedocs.io/link-roles.html
|
# based on http://protips.readthedocs.io/link-roles.html
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
from docutils import nodes
|
from collections.abc import Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any, Final
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from typing import Final
|
|
||||||
|
|
||||||
ZEPHYR_BASE: Final[str] = Path(__file__).parents[3]
|
ZEPHYR_BASE: Final[str] = Path(__file__).parents[3]
|
||||||
|
|
||||||
|
@ -58,8 +58,17 @@ def setup(app):
|
||||||
|
|
||||||
|
|
||||||
def modulelink(default_module=None, format="blob"):
|
def modulelink(default_module=None, format="blob"):
|
||||||
def role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
def role(
|
||||||
# Set default values
|
name: str,
|
||||||
|
rawtext: str,
|
||||||
|
text: str,
|
||||||
|
lineno: int,
|
||||||
|
inliner,
|
||||||
|
options: dict[str, Any] | None = None,
|
||||||
|
content: Sequence[str] = (),
|
||||||
|
):
|
||||||
|
if options is None:
|
||||||
|
options = {}
|
||||||
module = default_module
|
module = default_module
|
||||||
rev = get_github_rev()
|
rev = get_github_rev()
|
||||||
config = inliner.document.settings.env.app.config
|
config = inliner.document.settings.env.app.config
|
||||||
|
@ -110,8 +119,7 @@ def modulelink(default_module=None, format="blob"):
|
||||||
if not any(
|
if not any(
|
||||||
p.match(glob)
|
p.match(glob)
|
||||||
for glob in config.link_roles_manifest_project_broken_links_ignore_globs
|
for glob in config.link_roles_manifest_project_broken_links_ignore_globs
|
||||||
):
|
) and not Path(ZEPHYR_BASE, link).exists():
|
||||||
if not Path(ZEPHYR_BASE, link).exists():
|
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"{link} not found in {config.link_roles_manifest_project} {trace}"
|
f"{link} not found in {config.link_roles_manifest_project} {trace}"
|
||||||
)
|
)
|
||||||
|
|
|
@ -27,7 +27,7 @@ SPDX-License-Identifier: Apache-2.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from typing import Any, Dict, List
|
from typing import Any
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
|
@ -35,7 +35,6 @@ from sphinx.application import Sphinx
|
||||||
from sphinx.util.docutils import SphinxDirective
|
from sphinx.util.docutils import SphinxDirective
|
||||||
from west.manifest import Manifest
|
from west.manifest import Manifest
|
||||||
|
|
||||||
|
|
||||||
__version__ = "0.1.0"
|
__version__ = "0.1.0"
|
||||||
|
|
||||||
|
|
||||||
|
@ -67,7 +66,7 @@ class ManifestProjectsTable(SphinxDirective):
|
||||||
|
|
||||||
return f"{base_url}/releases/tag/{rev}"
|
return f"{base_url}/releases/tag/{rev}"
|
||||||
|
|
||||||
def run(self) -> List[nodes.Element]:
|
def run(self) -> list[nodes.Element]:
|
||||||
active_filter = self.options.get("filter", None)
|
active_filter = self.options.get("filter", None)
|
||||||
|
|
||||||
manifest = Manifest.from_file(self.env.config.manifest_projects_table_manifest)
|
manifest = Manifest.from_file(self.env.config.manifest_projects_table_manifest)
|
||||||
|
@ -75,11 +74,14 @@ class ManifestProjectsTable(SphinxDirective):
|
||||||
for project in manifest.projects:
|
for project in manifest.projects:
|
||||||
if project.name == "manifest":
|
if project.name == "manifest":
|
||||||
continue
|
continue
|
||||||
if active_filter == 'active' and manifest.is_active(project):
|
if (
|
||||||
projects.append(project)
|
active_filter == "active"
|
||||||
elif active_filter == 'inactive' and not manifest.is_active(project):
|
and manifest.is_active(project)
|
||||||
projects.append(project)
|
or active_filter == "inactive"
|
||||||
elif active_filter == 'all' or active_filter is None:
|
and not manifest.is_active(project)
|
||||||
|
or active_filter == "all"
|
||||||
|
or active_filter is None
|
||||||
|
):
|
||||||
projects.append(project)
|
projects.append(project)
|
||||||
|
|
||||||
# build table
|
# build table
|
||||||
|
@ -129,7 +131,7 @@ class ManifestProjectsTable(SphinxDirective):
|
||||||
return [table]
|
return [table]
|
||||||
|
|
||||||
|
|
||||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
def setup(app: Sphinx) -> dict[str, Any]:
|
||||||
app.add_config_value("manifest_projects_table_manifest", None, "env")
|
app.add_config_value("manifest_projects_table_manifest", None, "env")
|
||||||
|
|
||||||
directives.register_directive("manifest-projects-table", ManifestProjectsTable)
|
directives.register_directive("manifest-projects-table", ManifestProjectsTable)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue