mirror of
https://github.com/torvalds/linux.git
synced 2026-01-25 15:03:52 +08:00
Merge tag 'docs/v6.17-1' of git://git.kernel.org/pub/scm/linux/kernel/git/mchehab/linux-docs
Mauro Carvalho Chehab says: ==================== add a generic yaml parser integrated with Netlink specs generation - An YAML parser Sphinx plugin, integrated with Netlink YAML doc parser. The patch content is identical to my v10 submission: https://lore.kernel.org/cover.1753718185.git.mchehab+huawei@kernel.org * tag 'docs/v6.17-1' of git://git.kernel.org/pub/scm/linux/kernel/git/mchehab/linux-docs: sphinx: parser_yaml.py: fix line numbers information docs: parser_yaml.py: fix backward compatibility with old docutils docs: parser_yaml.py: add support for line numbers from the parser tools: netlink_yml_parser.py: add line numbers to parsed data MAINTAINERS: add netlink_yml_parser.py to linux-doc docs: netlink: remove obsolete .gitignore from unused directory tools: ynl_gen_rst.py: drop support for generating index files docs: uapi: netlink: update netlink specs link docs: use parser_yaml extension to handle Netlink specs docs: sphinx: add a parser for yaml files for Netlink specs tools: ynl_gen_rst.py: cleanup coding style docs: netlink: index.rst: add a netlink index file tools: ynl_gen_rst.py: Split library from command line tool docs: netlink: netlink-raw.rst: use :ref: instead of :doc: ==================== Link: https://patch.msgid.link/20250812113329.356c93c2@foz.lan Signed-off-by: Jakub Kicinski <kuba@kernel.org>
This commit is contained in:
@@ -104,22 +104,6 @@ quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(BUILDDIR)/$3/$4)
|
||||
cp $(if $(patsubst /%,,$(DOCS_CSS)),$(abspath $(srctree)/$(DOCS_CSS)),$(DOCS_CSS)) $(BUILDDIR)/$3/_static/; \
|
||||
fi
|
||||
|
||||
YNL_INDEX:=$(srctree)/Documentation/networking/netlink_spec/index.rst
|
||||
YNL_RST_DIR:=$(srctree)/Documentation/networking/netlink_spec
|
||||
YNL_YAML_DIR:=$(srctree)/Documentation/netlink/specs
|
||||
YNL_TOOL:=$(srctree)/tools/net/ynl/pyynl/ynl_gen_rst.py
|
||||
|
||||
YNL_RST_FILES_TMP := $(patsubst %.yaml,%.rst,$(wildcard $(YNL_YAML_DIR)/*.yaml))
|
||||
YNL_RST_FILES := $(patsubst $(YNL_YAML_DIR)%,$(YNL_RST_DIR)%, $(YNL_RST_FILES_TMP))
|
||||
|
||||
$(YNL_INDEX): $(YNL_RST_FILES)
|
||||
$(Q)$(YNL_TOOL) -o $@ -x
|
||||
|
||||
$(YNL_RST_DIR)/%.rst: $(YNL_YAML_DIR)/%.yaml $(YNL_TOOL)
|
||||
$(Q)$(YNL_TOOL) -i $< -o $@
|
||||
|
||||
htmldocs texinfodocs latexdocs epubdocs xmldocs: $(YNL_INDEX)
|
||||
|
||||
htmldocs:
|
||||
@$(srctree)/scripts/sphinx-pre-install --version-check
|
||||
@+$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,html,$(var),,$(var)))
|
||||
@@ -186,7 +170,6 @@ refcheckdocs:
|
||||
$(Q)cd $(srctree);scripts/documentation-file-ref-check
|
||||
|
||||
cleandocs:
|
||||
$(Q)rm -f $(YNL_INDEX) $(YNL_RST_FILES)
|
||||
$(Q)rm -rf $(BUILDDIR)
|
||||
$(Q)$(MAKE) BUILDDIR=$(abspath $(BUILDDIR)) $(build)=Documentation/userspace-api/media clean
|
||||
|
||||
|
||||
@@ -42,6 +42,15 @@ exclude_patterns = []
|
||||
dyn_include_patterns = []
|
||||
dyn_exclude_patterns = ["output"]
|
||||
|
||||
# Currently, only netlink/specs has a parser for yaml.
|
||||
# Prefer using include patterns if available, as it is faster
|
||||
if has_include_patterns:
|
||||
dyn_include_patterns.append("netlink/specs/*.yaml")
|
||||
else:
|
||||
dyn_exclude_patterns.append("netlink/*.yaml")
|
||||
dyn_exclude_patterns.append("devicetree/bindings/**.yaml")
|
||||
dyn_exclude_patterns.append("core-api/kho/bindings/**.yaml")
|
||||
|
||||
# Properly handle include/exclude patterns
|
||||
# ----------------------------------------
|
||||
|
||||
@@ -102,12 +111,12 @@ extensions = [
|
||||
"kernel_include",
|
||||
"kfigure",
|
||||
"maintainers_include",
|
||||
"parser_yaml",
|
||||
"rstFlatTable",
|
||||
"sphinx.ext.autosectionlabel",
|
||||
"sphinx.ext.ifconfig",
|
||||
"translations",
|
||||
]
|
||||
|
||||
# Since Sphinx version 3, the C function parser is more pedantic with regards
|
||||
# to type checking. Due to that, having macros at c:function cause problems.
|
||||
# Those needed to be escaped by using c_id_attributes[] array
|
||||
@@ -204,10 +213,11 @@ else:
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["sphinx/templates"]
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
# The suffixes of source filenames that will be automatically parsed
|
||||
source_suffix = {
|
||||
".rst": "restructuredtext",
|
||||
".yaml": "yaml",
|
||||
}
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
13
Documentation/netlink/specs/index.rst
Normal file
13
Documentation/netlink/specs/index.rst
Normal file
@@ -0,0 +1,13 @@
|
||||
.. SPDX-License-Identifier: GPL-2.0
|
||||
|
||||
.. _specs:
|
||||
|
||||
=============================
|
||||
Netlink Family Specifications
|
||||
=============================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:glob:
|
||||
|
||||
*
|
||||
@@ -57,7 +57,7 @@ Contents:
|
||||
filter
|
||||
generic-hdlc
|
||||
generic_netlink
|
||||
netlink_spec/index
|
||||
../netlink/specs/index
|
||||
gen_stats
|
||||
gtp
|
||||
ila
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
*.rst
|
||||
@@ -1,4 +0,0 @@
|
||||
SPDX-License-Identifier: GPL-2.0
|
||||
|
||||
This file is populated during the build of the documentation (htmldocs) by the
|
||||
tools/net/ynl/pyynl/ynl_gen_rst.py script.
|
||||
123
Documentation/sphinx/parser_yaml.py
Executable file
123
Documentation/sphinx/parser_yaml.py
Executable file
@@ -0,0 +1,123 @@
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
# Copyright 2025 Mauro Carvalho Chehab <mchehab+huawei@kernel.org>
|
||||
|
||||
"""
|
||||
Sphinx extension for processing YAML files
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pprint import pformat
|
||||
|
||||
from docutils import statemachine
|
||||
from docutils.parsers.rst import Parser as RSTParser
|
||||
from docutils.parsers.rst import states
|
||||
from docutils.statemachine import ViewList
|
||||
|
||||
from sphinx.util import logging
|
||||
from sphinx.parsers import Parser
|
||||
|
||||
srctree = os.path.abspath(os.environ["srctree"])
|
||||
sys.path.insert(0, os.path.join(srctree, "tools/net/ynl/pyynl/lib"))
|
||||
|
||||
from doc_generator import YnlDocGenerator # pylint: disable=C0413
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class YamlParser(Parser):
|
||||
"""
|
||||
Kernel parser for YAML files.
|
||||
|
||||
This is a simple sphinx.Parser to handle yaml files inside the
|
||||
Kernel tree that will be part of the built documentation.
|
||||
|
||||
The actual parser function is not contained here: the code was
|
||||
written in a way that parsing yaml for different subsystems
|
||||
can be done from a single dispatcher.
|
||||
|
||||
All it takes to have parse YAML patches is to have an import line:
|
||||
|
||||
from some_parser_code import NewYamlGenerator
|
||||
|
||||
To this module. Then add an instance of the parser with:
|
||||
|
||||
new_parser = NewYamlGenerator()
|
||||
|
||||
and add a logic inside parse() to handle it based on the path,
|
||||
like this:
|
||||
|
||||
if "/foo" in fname:
|
||||
msg = self.new_parser.parse_yaml_file(fname)
|
||||
"""
|
||||
|
||||
supported = ('yaml', )
|
||||
|
||||
netlink_parser = YnlDocGenerator()
|
||||
|
||||
re_lineno = re.compile(r"\.\. LINENO ([0-9]+)$")
|
||||
|
||||
tab_width = 8
|
||||
|
||||
def rst_parse(self, inputstring, document, msg):
|
||||
"""
|
||||
Receives a ReST content that was previously converted by the
|
||||
YAML parser, adding it to the document tree.
|
||||
"""
|
||||
|
||||
self.setup_parse(inputstring, document)
|
||||
|
||||
result = ViewList()
|
||||
|
||||
self.statemachine = states.RSTStateMachine(state_classes=states.state_classes,
|
||||
initial_state='Body',
|
||||
debug=document.reporter.debug_flag)
|
||||
|
||||
try:
|
||||
# Parse message with RSTParser
|
||||
lineoffset = 0;
|
||||
|
||||
lines = statemachine.string2lines(msg, self.tab_width,
|
||||
convert_whitespace=True)
|
||||
|
||||
for line in lines:
|
||||
match = self.re_lineno.match(line)
|
||||
if match:
|
||||
lineoffset = int(match.group(1))
|
||||
continue
|
||||
|
||||
result.append(line, document.current_source, lineoffset)
|
||||
|
||||
self.statemachine.run(result, document)
|
||||
|
||||
except Exception as e:
|
||||
document.reporter.error("YAML parsing error: %s" % pformat(e))
|
||||
|
||||
self.finish_parse()
|
||||
|
||||
# Overrides docutils.parsers.Parser. See sphinx.parsers.RSTParser
|
||||
def parse(self, inputstring, document):
|
||||
"""Check if a YAML is meant to be parsed."""
|
||||
|
||||
fname = document.current_source
|
||||
|
||||
# Handle netlink yaml specs
|
||||
if "/netlink/specs/" in fname:
|
||||
msg = self.netlink_parser.parse_yaml_file(fname)
|
||||
self.rst_parse(inputstring, document, msg)
|
||||
|
||||
# All other yaml files are ignored
|
||||
|
||||
def setup(app):
|
||||
"""Setup function for the Sphinx extension."""
|
||||
|
||||
# Add YAML parser
|
||||
app.add_source_parser(YamlParser)
|
||||
app.add_source_suffix('.yaml', 'yaml')
|
||||
|
||||
return {
|
||||
'version': '1.0',
|
||||
'parallel_read_safe': True,
|
||||
'parallel_write_safe': True,
|
||||
}
|
||||
@@ -18,4 +18,4 @@ Netlink documentation for users.
|
||||
|
||||
See also:
|
||||
- :ref:`Documentation/core-api/netlink.rst <kernel_netlink>`
|
||||
- :ref:`Documentation/networking/netlink_spec/index.rst <specs>`
|
||||
- :ref:`Documentation/netlink/specs/index.rst <specs>`
|
||||
|
||||
@@ -62,8 +62,8 @@ Sub-messages
|
||||
------------
|
||||
|
||||
Several raw netlink families such as
|
||||
:doc:`rt-link<../../networking/netlink_spec/rt-link>` and
|
||||
:doc:`tc<../../networking/netlink_spec/tc>` use attribute nesting as an
|
||||
:ref:`rt-link<netlink-rt-link>` and
|
||||
:ref:`tc<netlink-tc>` use attribute nesting as an
|
||||
abstraction to carry module specific information.
|
||||
|
||||
Conceptually it looks as follows::
|
||||
@@ -162,7 +162,7 @@ then this is an error.
|
||||
Nested struct definitions
|
||||
-------------------------
|
||||
|
||||
Many raw netlink families such as :doc:`tc<../../networking/netlink_spec/tc>`
|
||||
Many raw netlink families such as :ref:`tc<netlink-tc>`
|
||||
make use of nested struct definitions. The ``netlink-raw`` schema makes it
|
||||
possible to embed a struct within a struct definition using the ``struct``
|
||||
property. For example, the following struct definition embeds the
|
||||
|
||||
@@ -15,7 +15,7 @@ kernel headers directly.
|
||||
Internally kernel uses the YAML specs to generate:
|
||||
|
||||
- the C uAPI header
|
||||
- documentation of the protocol as a ReST file - see :ref:`Documentation/networking/netlink_spec/index.rst <specs>`
|
||||
- documentation of the protocol as a ReST file - see :ref:`Documentation/netlink/specs/index.rst <specs>`
|
||||
- policy tables for input attribute validation
|
||||
- operation tables
|
||||
|
||||
|
||||
@@ -7307,6 +7307,7 @@ F: scripts/get_abi.py
|
||||
F: scripts/kernel-doc*
|
||||
F: scripts/lib/abi/*
|
||||
F: scripts/lib/kdoc/*
|
||||
F: tools/net/ynl/pyynl/lib/doc_generator.py
|
||||
F: scripts/sphinx-pre-install
|
||||
X: Documentation/ABI/
|
||||
X: Documentation/admin-guide/media/
|
||||
|
||||
@@ -4,6 +4,8 @@ from .nlspec import SpecAttr, SpecAttrSet, SpecEnumEntry, SpecEnumSet, \
|
||||
SpecFamily, SpecOperation, SpecSubMessage, SpecSubMessageFormat
|
||||
from .ynl import YnlFamily, Netlink, NlError
|
||||
|
||||
from .doc_generator import YnlDocGenerator
|
||||
|
||||
__all__ = ["SpecAttr", "SpecAttrSet", "SpecEnumEntry", "SpecEnumSet",
|
||||
"SpecFamily", "SpecOperation", "SpecSubMessage", "SpecSubMessageFormat",
|
||||
"YnlFamily", "Netlink", "NlError"]
|
||||
|
||||
398
tools/net/ynl/pyynl/lib/doc_generator.py
Normal file
398
tools/net/ynl/pyynl/lib/doc_generator.py
Normal file
@@ -0,0 +1,398 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
|
||||
"""
|
||||
Class to auto generate the documentation for Netlink specifications.
|
||||
|
||||
:copyright: Copyright (C) 2023 Breno Leitao <leitao@debian.org>
|
||||
:license: GPL Version 2, June 1991 see linux/COPYING for details.
|
||||
|
||||
This class performs extensive parsing to the Linux kernel's netlink YAML
|
||||
spec files, in an effort to avoid needing to heavily mark up the original
|
||||
YAML file.
|
||||
|
||||
This code is split in two classes:
|
||||
1) RST formatters: Use to convert a string to a RST output
|
||||
2) YAML Netlink (YNL) doc generator: Generate docs from YAML data
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
import yaml
|
||||
|
||||
LINE_STR = '__lineno__'
|
||||
|
||||
class NumberedSafeLoader(yaml.SafeLoader): # pylint: disable=R0901
|
||||
"""Override the SafeLoader class to add line number to parsed data"""
|
||||
|
||||
def construct_mapping(self, node, *args, **kwargs):
|
||||
mapping = super().construct_mapping(node, *args, **kwargs)
|
||||
mapping[LINE_STR] = node.start_mark.line
|
||||
|
||||
return mapping
|
||||
|
||||
class RstFormatters:
|
||||
"""RST Formatters"""
|
||||
|
||||
SPACE_PER_LEVEL = 4
|
||||
|
||||
@staticmethod
|
||||
def headroom(level: int) -> str:
|
||||
"""Return space to format"""
|
||||
return " " * (level * RstFormatters.SPACE_PER_LEVEL)
|
||||
|
||||
@staticmethod
|
||||
def bold(text: str) -> str:
|
||||
"""Format bold text"""
|
||||
return f"**{text}**"
|
||||
|
||||
@staticmethod
|
||||
def inline(text: str) -> str:
|
||||
"""Format inline text"""
|
||||
return f"``{text}``"
|
||||
|
||||
@staticmethod
|
||||
def sanitize(text: str) -> str:
|
||||
"""Remove newlines and multiple spaces"""
|
||||
# This is useful for some fields that are spread across multiple lines
|
||||
return str(text).replace("\n", " ").strip()
|
||||
|
||||
def rst_fields(self, key: str, value: str, level: int = 0) -> str:
|
||||
"""Return a RST formatted field"""
|
||||
return self.headroom(level) + f":{key}: {value}"
|
||||
|
||||
def rst_definition(self, key: str, value: Any, level: int = 0) -> str:
|
||||
"""Format a single rst definition"""
|
||||
return self.headroom(level) + key + "\n" + self.headroom(level + 1) + str(value)
|
||||
|
||||
def rst_paragraph(self, paragraph: str, level: int = 0) -> str:
|
||||
"""Return a formatted paragraph"""
|
||||
return self.headroom(level) + paragraph
|
||||
|
||||
def rst_bullet(self, item: str, level: int = 0) -> str:
|
||||
"""Return a formatted a bullet"""
|
||||
return self.headroom(level) + f"- {item}"
|
||||
|
||||
@staticmethod
|
||||
def rst_subsection(title: str) -> str:
|
||||
"""Add a sub-section to the document"""
|
||||
return f"{title}\n" + "-" * len(title)
|
||||
|
||||
@staticmethod
|
||||
def rst_subsubsection(title: str) -> str:
|
||||
"""Add a sub-sub-section to the document"""
|
||||
return f"{title}\n" + "~" * len(title)
|
||||
|
||||
@staticmethod
|
||||
def rst_section(namespace: str, prefix: str, title: str) -> str:
|
||||
"""Add a section to the document"""
|
||||
return f".. _{namespace}-{prefix}-{title}:\n\n{title}\n" + "=" * len(title)
|
||||
|
||||
@staticmethod
|
||||
def rst_subtitle(title: str) -> str:
|
||||
"""Add a subtitle to the document"""
|
||||
return "\n" + "-" * len(title) + f"\n{title}\n" + "-" * len(title) + "\n\n"
|
||||
|
||||
@staticmethod
|
||||
def rst_title(title: str) -> str:
|
||||
"""Add a title to the document"""
|
||||
return "=" * len(title) + f"\n{title}\n" + "=" * len(title) + "\n\n"
|
||||
|
||||
def rst_list_inline(self, list_: List[str], level: int = 0) -> str:
|
||||
"""Format a list using inlines"""
|
||||
return self.headroom(level) + "[" + ", ".join(self.inline(i) for i in list_) + "]"
|
||||
|
||||
@staticmethod
|
||||
def rst_ref(namespace: str, prefix: str, name: str) -> str:
|
||||
"""Add a hyperlink to the document"""
|
||||
mappings = {'enum': 'definition',
|
||||
'fixed-header': 'definition',
|
||||
'nested-attributes': 'attribute-set',
|
||||
'struct': 'definition'}
|
||||
if prefix in mappings:
|
||||
prefix = mappings[prefix]
|
||||
return f":ref:`{namespace}-{prefix}-{name}`"
|
||||
|
||||
def rst_header(self) -> str:
|
||||
"""The headers for all the auto generated RST files"""
|
||||
lines = []
|
||||
|
||||
lines.append(self.rst_paragraph(".. SPDX-License-Identifier: GPL-2.0"))
|
||||
lines.append(self.rst_paragraph(".. NOTE: This document was auto-generated.\n\n"))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
@staticmethod
|
||||
def rst_toctree(maxdepth: int = 2) -> str:
|
||||
"""Generate a toctree RST primitive"""
|
||||
lines = []
|
||||
|
||||
lines.append(".. toctree::")
|
||||
lines.append(f" :maxdepth: {maxdepth}\n\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
@staticmethod
|
||||
def rst_label(title: str) -> str:
|
||||
"""Return a formatted label"""
|
||||
return f".. _{title}:\n\n"
|
||||
|
||||
@staticmethod
|
||||
def rst_lineno(lineno: int) -> str:
|
||||
"""Return a lineno comment"""
|
||||
return f".. LINENO {lineno}\n"
|
||||
|
||||
class YnlDocGenerator:
|
||||
"""YAML Netlink specs Parser"""
|
||||
|
||||
fmt = RstFormatters()
|
||||
|
||||
def parse_mcast_group(self, mcast_group: List[Dict[str, Any]]) -> str:
|
||||
"""Parse 'multicast' group list and return a formatted string"""
|
||||
lines = []
|
||||
for group in mcast_group:
|
||||
lines.append(self.fmt.rst_bullet(group["name"]))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_do(self, do_dict: Dict[str, Any], level: int = 0) -> str:
|
||||
"""Parse 'do' section and return a formatted string"""
|
||||
lines = []
|
||||
if LINE_STR in do_dict:
|
||||
lines.append(self.fmt.rst_lineno(do_dict[LINE_STR]))
|
||||
|
||||
for key in do_dict.keys():
|
||||
if key == LINE_STR:
|
||||
continue
|
||||
lines.append(self.fmt.rst_paragraph(self.fmt.bold(key), level + 1))
|
||||
if key in ['request', 'reply']:
|
||||
lines.append(self.parse_do_attributes(do_dict[key], level + 1) + "\n")
|
||||
else:
|
||||
lines.append(self.fmt.headroom(level + 2) + do_dict[key] + "\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_do_attributes(self, attrs: Dict[str, Any], level: int = 0) -> str:
|
||||
"""Parse 'attributes' section"""
|
||||
if "attributes" not in attrs:
|
||||
return ""
|
||||
lines = [self.fmt.rst_fields("attributes",
|
||||
self.fmt.rst_list_inline(attrs["attributes"]),
|
||||
level + 1)]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_operations(self, operations: List[Dict[str, Any]], namespace: str) -> str:
|
||||
"""Parse operations block"""
|
||||
preprocessed = ["name", "doc", "title", "do", "dump", "flags"]
|
||||
linkable = ["fixed-header", "attribute-set"]
|
||||
lines = []
|
||||
|
||||
for operation in operations:
|
||||
if LINE_STR in operation:
|
||||
lines.append(self.fmt.rst_lineno(operation[LINE_STR]))
|
||||
|
||||
lines.append(self.fmt.rst_section(namespace, 'operation',
|
||||
operation["name"]))
|
||||
lines.append(self.fmt.rst_paragraph(operation["doc"]) + "\n")
|
||||
|
||||
for key in operation.keys():
|
||||
if key == LINE_STR:
|
||||
continue
|
||||
|
||||
if key in preprocessed:
|
||||
# Skip the special fields
|
||||
continue
|
||||
value = operation[key]
|
||||
if key in linkable:
|
||||
value = self.fmt.rst_ref(namespace, key, value)
|
||||
lines.append(self.fmt.rst_fields(key, value, 0))
|
||||
if 'flags' in operation:
|
||||
lines.append(self.fmt.rst_fields('flags',
|
||||
self.fmt.rst_list_inline(operation['flags'])))
|
||||
|
||||
if "do" in operation:
|
||||
lines.append(self.fmt.rst_paragraph(":do:", 0))
|
||||
lines.append(self.parse_do(operation["do"], 0))
|
||||
if "dump" in operation:
|
||||
lines.append(self.fmt.rst_paragraph(":dump:", 0))
|
||||
lines.append(self.parse_do(operation["dump"], 0))
|
||||
|
||||
# New line after fields
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_entries(self, entries: List[Dict[str, Any]], level: int) -> str:
|
||||
"""Parse a list of entries"""
|
||||
ignored = ["pad"]
|
||||
lines = []
|
||||
for entry in entries:
|
||||
if isinstance(entry, dict):
|
||||
# entries could be a list or a dictionary
|
||||
field_name = entry.get("name", "")
|
||||
if field_name in ignored:
|
||||
continue
|
||||
type_ = entry.get("type")
|
||||
if type_:
|
||||
field_name += f" ({self.fmt.inline(type_)})"
|
||||
lines.append(
|
||||
self.fmt.rst_fields(field_name,
|
||||
self.fmt.sanitize(entry.get("doc", "")),
|
||||
level)
|
||||
)
|
||||
elif isinstance(entry, list):
|
||||
lines.append(self.fmt.rst_list_inline(entry, level))
|
||||
else:
|
||||
lines.append(self.fmt.rst_bullet(self.fmt.inline(self.fmt.sanitize(entry)),
|
||||
level))
|
||||
|
||||
lines.append("\n")
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_definitions(self, defs: Dict[str, Any], namespace: str) -> str:
|
||||
"""Parse definitions section"""
|
||||
preprocessed = ["name", "entries", "members"]
|
||||
ignored = ["render-max"] # This is not printed
|
||||
lines = []
|
||||
|
||||
for definition in defs:
|
||||
if LINE_STR in definition:
|
||||
lines.append(self.fmt.rst_lineno(definition[LINE_STR]))
|
||||
|
||||
lines.append(self.fmt.rst_section(namespace, 'definition', definition["name"]))
|
||||
for k in definition.keys():
|
||||
if k == LINE_STR:
|
||||
continue
|
||||
if k in preprocessed + ignored:
|
||||
continue
|
||||
lines.append(self.fmt.rst_fields(k, self.fmt.sanitize(definition[k]), 0))
|
||||
|
||||
# Field list needs to finish with a new line
|
||||
lines.append("\n")
|
||||
if "entries" in definition:
|
||||
lines.append(self.fmt.rst_paragraph(":entries:", 0))
|
||||
lines.append(self.parse_entries(definition["entries"], 1))
|
||||
if "members" in definition:
|
||||
lines.append(self.fmt.rst_paragraph(":members:", 0))
|
||||
lines.append(self.parse_entries(definition["members"], 1))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_attr_sets(self, entries: List[Dict[str, Any]], namespace: str) -> str:
|
||||
"""Parse attribute from attribute-set"""
|
||||
preprocessed = ["name", "type"]
|
||||
linkable = ["enum", "nested-attributes", "struct", "sub-message"]
|
||||
ignored = ["checks"]
|
||||
lines = []
|
||||
|
||||
for entry in entries:
|
||||
lines.append(self.fmt.rst_section(namespace, 'attribute-set',
|
||||
entry["name"]))
|
||||
for attr in entry["attributes"]:
|
||||
if LINE_STR in attr:
|
||||
lines.append(self.fmt.rst_lineno(attr[LINE_STR]))
|
||||
|
||||
type_ = attr.get("type")
|
||||
attr_line = attr["name"]
|
||||
if type_:
|
||||
# Add the attribute type in the same line
|
||||
attr_line += f" ({self.fmt.inline(type_)})"
|
||||
|
||||
lines.append(self.fmt.rst_subsubsection(attr_line))
|
||||
|
||||
for k in attr.keys():
|
||||
if k == LINE_STR:
|
||||
continue
|
||||
if k in preprocessed + ignored:
|
||||
continue
|
||||
if k in linkable:
|
||||
value = self.fmt.rst_ref(namespace, k, attr[k])
|
||||
else:
|
||||
value = self.fmt.sanitize(attr[k])
|
||||
lines.append(self.fmt.rst_fields(k, value, 0))
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_sub_messages(self, entries: List[Dict[str, Any]], namespace: str) -> str:
|
||||
"""Parse sub-message definitions"""
|
||||
lines = []
|
||||
|
||||
for entry in entries:
|
||||
lines.append(self.fmt.rst_section(namespace, 'sub-message',
|
||||
entry["name"]))
|
||||
for fmt in entry["formats"]:
|
||||
value = fmt["value"]
|
||||
|
||||
lines.append(self.fmt.rst_bullet(self.fmt.bold(value)))
|
||||
for attr in ['fixed-header', 'attribute-set']:
|
||||
if attr in fmt:
|
||||
lines.append(self.fmt.rst_fields(attr,
|
||||
self.fmt.rst_ref(namespace,
|
||||
attr,
|
||||
fmt[attr]),
|
||||
1))
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def parse_yaml(self, obj: Dict[str, Any]) -> str:
|
||||
"""Format the whole YAML into a RST string"""
|
||||
lines = []
|
||||
|
||||
# Main header
|
||||
lineno = obj.get('__lineno__', 0)
|
||||
lines.append(self.fmt.rst_lineno(lineno))
|
||||
|
||||
family = obj['name']
|
||||
|
||||
lines.append(self.fmt.rst_header())
|
||||
lines.append(self.fmt.rst_label("netlink-" + family))
|
||||
|
||||
title = f"Family ``{family}`` netlink specification"
|
||||
lines.append(self.fmt.rst_title(title))
|
||||
lines.append(self.fmt.rst_paragraph(".. contents:: :depth: 3\n"))
|
||||
|
||||
if "doc" in obj:
|
||||
lines.append(self.fmt.rst_subtitle("Summary"))
|
||||
lines.append(self.fmt.rst_paragraph(obj["doc"], 0))
|
||||
|
||||
# Operations
|
||||
if "operations" in obj:
|
||||
lines.append(self.fmt.rst_subtitle("Operations"))
|
||||
lines.append(self.parse_operations(obj["operations"]["list"],
|
||||
family))
|
||||
|
||||
# Multicast groups
|
||||
if "mcast-groups" in obj:
|
||||
lines.append(self.fmt.rst_subtitle("Multicast groups"))
|
||||
lines.append(self.parse_mcast_group(obj["mcast-groups"]["list"]))
|
||||
|
||||
# Definitions
|
||||
if "definitions" in obj:
|
||||
lines.append(self.fmt.rst_subtitle("Definitions"))
|
||||
lines.append(self.parse_definitions(obj["definitions"], family))
|
||||
|
||||
# Attributes set
|
||||
if "attribute-sets" in obj:
|
||||
lines.append(self.fmt.rst_subtitle("Attribute sets"))
|
||||
lines.append(self.parse_attr_sets(obj["attribute-sets"], family))
|
||||
|
||||
# Sub-messages
|
||||
if "sub-messages" in obj:
|
||||
lines.append(self.fmt.rst_subtitle("Sub-messages"))
|
||||
lines.append(self.parse_sub_messages(obj["sub-messages"], family))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
# Main functions
|
||||
# ==============
|
||||
|
||||
def parse_yaml_file(self, filename: str) -> str:
|
||||
"""Transform the YAML specified by filename into an RST-formatted string"""
|
||||
with open(filename, "r", encoding="utf-8") as spec_file:
|
||||
numbered_yaml = yaml.load(spec_file, Loader=NumberedSafeLoader)
|
||||
content = self.parse_yaml(numbered_yaml)
|
||||
|
||||
return content
|
||||
@@ -10,353 +10,17 @@
|
||||
|
||||
This script performs extensive parsing to the Linux kernel's netlink YAML
|
||||
spec files, in an effort to avoid needing to heavily mark up the original
|
||||
YAML file.
|
||||
|
||||
This code is split in three big parts:
|
||||
1) RST formatters: Use to convert a string to a RST output
|
||||
2) Parser helpers: Functions to parse the YAML data structure
|
||||
3) Main function and small helpers
|
||||
YAML file. It uses the library code from scripts/lib.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
import os.path
|
||||
import pathlib
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
|
||||
SPACE_PER_LEVEL = 4
|
||||
|
||||
|
||||
# RST Formatters
|
||||
# ==============
|
||||
def headroom(level: int) -> str:
|
||||
"""Return space to format"""
|
||||
return " " * (level * SPACE_PER_LEVEL)
|
||||
|
||||
|
||||
def bold(text: str) -> str:
|
||||
"""Format bold text"""
|
||||
return f"**{text}**"
|
||||
|
||||
|
||||
def inline(text: str) -> str:
|
||||
"""Format inline text"""
|
||||
return f"``{text}``"
|
||||
|
||||
|
||||
def sanitize(text: str) -> str:
|
||||
"""Remove newlines and multiple spaces"""
|
||||
# This is useful for some fields that are spread across multiple lines
|
||||
return str(text).replace("\n", " ").strip()
|
||||
|
||||
|
||||
def rst_fields(key: str, value: str, level: int = 0) -> str:
|
||||
"""Return a RST formatted field"""
|
||||
return headroom(level) + f":{key}: {value}"
|
||||
|
||||
|
||||
def rst_definition(key: str, value: Any, level: int = 0) -> str:
|
||||
"""Format a single rst definition"""
|
||||
return headroom(level) + key + "\n" + headroom(level + 1) + str(value)
|
||||
|
||||
|
||||
def rst_paragraph(paragraph: str, level: int = 0) -> str:
|
||||
"""Return a formatted paragraph"""
|
||||
return headroom(level) + paragraph
|
||||
|
||||
|
||||
def rst_bullet(item: str, level: int = 0) -> str:
|
||||
"""Return a formatted a bullet"""
|
||||
return headroom(level) + f"- {item}"
|
||||
|
||||
|
||||
def rst_subsection(title: str) -> str:
|
||||
"""Add a sub-section to the document"""
|
||||
return f"{title}\n" + "-" * len(title)
|
||||
|
||||
|
||||
def rst_subsubsection(title: str) -> str:
|
||||
"""Add a sub-sub-section to the document"""
|
||||
return f"{title}\n" + "~" * len(title)
|
||||
|
||||
|
||||
def rst_section(namespace: str, prefix: str, title: str) -> str:
|
||||
"""Add a section to the document"""
|
||||
return f".. _{namespace}-{prefix}-{title}:\n\n{title}\n" + "=" * len(title)
|
||||
|
||||
|
||||
def rst_subtitle(title: str) -> str:
|
||||
"""Add a subtitle to the document"""
|
||||
return "\n" + "-" * len(title) + f"\n{title}\n" + "-" * len(title) + "\n\n"
|
||||
|
||||
|
||||
def rst_title(title: str) -> str:
|
||||
"""Add a title to the document"""
|
||||
return "=" * len(title) + f"\n{title}\n" + "=" * len(title) + "\n\n"
|
||||
|
||||
|
||||
def rst_list_inline(list_: List[str], level: int = 0) -> str:
|
||||
"""Format a list using inlines"""
|
||||
return headroom(level) + "[" + ", ".join(inline(i) for i in list_) + "]"
|
||||
|
||||
|
||||
def rst_ref(namespace: str, prefix: str, name: str) -> str:
|
||||
"""Add a hyperlink to the document"""
|
||||
mappings = {'enum': 'definition',
|
||||
'fixed-header': 'definition',
|
||||
'nested-attributes': 'attribute-set',
|
||||
'struct': 'definition'}
|
||||
if prefix in mappings:
|
||||
prefix = mappings[prefix]
|
||||
return f":ref:`{namespace}-{prefix}-{name}`"
|
||||
|
||||
|
||||
def rst_header() -> str:
|
||||
"""The headers for all the auto generated RST files"""
|
||||
lines = []
|
||||
|
||||
lines.append(rst_paragraph(".. SPDX-License-Identifier: GPL-2.0"))
|
||||
lines.append(rst_paragraph(".. NOTE: This document was auto-generated.\n\n"))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def rst_toctree(maxdepth: int = 2) -> str:
|
||||
"""Generate a toctree RST primitive"""
|
||||
lines = []
|
||||
|
||||
lines.append(".. toctree::")
|
||||
lines.append(f" :maxdepth: {maxdepth}\n\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def rst_label(title: str) -> str:
|
||||
"""Return a formatted label"""
|
||||
return f".. _{title}:\n\n"
|
||||
|
||||
|
||||
# Parsers
|
||||
# =======
|
||||
|
||||
|
||||
def parse_mcast_group(mcast_group: List[Dict[str, Any]]) -> str:
|
||||
"""Parse 'multicast' group list and return a formatted string"""
|
||||
lines = []
|
||||
for group in mcast_group:
|
||||
lines.append(rst_bullet(group["name"]))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_do(do_dict: Dict[str, Any], level: int = 0) -> str:
|
||||
"""Parse 'do' section and return a formatted string"""
|
||||
lines = []
|
||||
for key in do_dict.keys():
|
||||
lines.append(rst_paragraph(bold(key), level + 1))
|
||||
if key in ['request', 'reply']:
|
||||
lines.append(parse_do_attributes(do_dict[key], level + 1) + "\n")
|
||||
else:
|
||||
lines.append(headroom(level + 2) + do_dict[key] + "\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_do_attributes(attrs: Dict[str, Any], level: int = 0) -> str:
|
||||
"""Parse 'attributes' section"""
|
||||
if "attributes" not in attrs:
|
||||
return ""
|
||||
lines = [rst_fields("attributes", rst_list_inline(attrs["attributes"]), level + 1)]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_operations(operations: List[Dict[str, Any]], namespace: str) -> str:
|
||||
"""Parse operations block"""
|
||||
preprocessed = ["name", "doc", "title", "do", "dump", "flags"]
|
||||
linkable = ["fixed-header", "attribute-set"]
|
||||
lines = []
|
||||
|
||||
for operation in operations:
|
||||
lines.append(rst_section(namespace, 'operation', operation["name"]))
|
||||
lines.append(rst_paragraph(operation["doc"]) + "\n")
|
||||
|
||||
for key in operation.keys():
|
||||
if key in preprocessed:
|
||||
# Skip the special fields
|
||||
continue
|
||||
value = operation[key]
|
||||
if key in linkable:
|
||||
value = rst_ref(namespace, key, value)
|
||||
lines.append(rst_fields(key, value, 0))
|
||||
if 'flags' in operation:
|
||||
lines.append(rst_fields('flags', rst_list_inline(operation['flags'])))
|
||||
|
||||
if "do" in operation:
|
||||
lines.append(rst_paragraph(":do:", 0))
|
||||
lines.append(parse_do(operation["do"], 0))
|
||||
if "dump" in operation:
|
||||
lines.append(rst_paragraph(":dump:", 0))
|
||||
lines.append(parse_do(operation["dump"], 0))
|
||||
|
||||
# New line after fields
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_entries(entries: List[Dict[str, Any]], level: int) -> str:
|
||||
"""Parse a list of entries"""
|
||||
ignored = ["pad"]
|
||||
lines = []
|
||||
for entry in entries:
|
||||
if isinstance(entry, dict):
|
||||
# entries could be a list or a dictionary
|
||||
field_name = entry.get("name", "")
|
||||
if field_name in ignored:
|
||||
continue
|
||||
type_ = entry.get("type")
|
||||
if type_:
|
||||
field_name += f" ({inline(type_)})"
|
||||
lines.append(
|
||||
rst_fields(field_name, sanitize(entry.get("doc", "")), level)
|
||||
)
|
||||
elif isinstance(entry, list):
|
||||
lines.append(rst_list_inline(entry, level))
|
||||
else:
|
||||
lines.append(rst_bullet(inline(sanitize(entry)), level))
|
||||
|
||||
lines.append("\n")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_definitions(defs: Dict[str, Any], namespace: str) -> str:
|
||||
"""Parse definitions section"""
|
||||
preprocessed = ["name", "entries", "members"]
|
||||
ignored = ["render-max"] # This is not printed
|
||||
lines = []
|
||||
|
||||
for definition in defs:
|
||||
lines.append(rst_section(namespace, 'definition', definition["name"]))
|
||||
for k in definition.keys():
|
||||
if k in preprocessed + ignored:
|
||||
continue
|
||||
lines.append(rst_fields(k, sanitize(definition[k]), 0))
|
||||
|
||||
# Field list needs to finish with a new line
|
||||
lines.append("\n")
|
||||
if "entries" in definition:
|
||||
lines.append(rst_paragraph(":entries:", 0))
|
||||
lines.append(parse_entries(definition["entries"], 1))
|
||||
if "members" in definition:
|
||||
lines.append(rst_paragraph(":members:", 0))
|
||||
lines.append(parse_entries(definition["members"], 1))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_attr_sets(entries: List[Dict[str, Any]], namespace: str) -> str:
|
||||
"""Parse attribute from attribute-set"""
|
||||
preprocessed = ["name", "type"]
|
||||
linkable = ["enum", "nested-attributes", "struct", "sub-message"]
|
||||
ignored = ["checks"]
|
||||
lines = []
|
||||
|
||||
for entry in entries:
|
||||
lines.append(rst_section(namespace, 'attribute-set', entry["name"]))
|
||||
for attr in entry["attributes"]:
|
||||
type_ = attr.get("type")
|
||||
attr_line = attr["name"]
|
||||
if type_:
|
||||
# Add the attribute type in the same line
|
||||
attr_line += f" ({inline(type_)})"
|
||||
|
||||
lines.append(rst_subsubsection(attr_line))
|
||||
|
||||
for k in attr.keys():
|
||||
if k in preprocessed + ignored:
|
||||
continue
|
||||
if k in linkable:
|
||||
value = rst_ref(namespace, k, attr[k])
|
||||
else:
|
||||
value = sanitize(attr[k])
|
||||
lines.append(rst_fields(k, value, 0))
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_sub_messages(entries: List[Dict[str, Any]], namespace: str) -> str:
|
||||
"""Parse sub-message definitions"""
|
||||
lines = []
|
||||
|
||||
for entry in entries:
|
||||
lines.append(rst_section(namespace, 'sub-message', entry["name"]))
|
||||
for fmt in entry["formats"]:
|
||||
value = fmt["value"]
|
||||
|
||||
lines.append(rst_bullet(bold(value)))
|
||||
for attr in ['fixed-header', 'attribute-set']:
|
||||
if attr in fmt:
|
||||
lines.append(rst_fields(attr,
|
||||
rst_ref(namespace, attr, fmt[attr]),
|
||||
1))
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_yaml(obj: Dict[str, Any]) -> str:
|
||||
"""Format the whole YAML into a RST string"""
|
||||
lines = []
|
||||
|
||||
# Main header
|
||||
|
||||
lines.append(rst_header())
|
||||
|
||||
family = obj['name']
|
||||
|
||||
title = f"Family ``{family}`` netlink specification"
|
||||
lines.append(rst_title(title))
|
||||
lines.append(rst_paragraph(".. contents:: :depth: 3\n"))
|
||||
|
||||
if "doc" in obj:
|
||||
lines.append(rst_subtitle("Summary"))
|
||||
lines.append(rst_paragraph(obj["doc"], 0))
|
||||
|
||||
# Operations
|
||||
if "operations" in obj:
|
||||
lines.append(rst_subtitle("Operations"))
|
||||
lines.append(parse_operations(obj["operations"]["list"], family))
|
||||
|
||||
# Multicast groups
|
||||
if "mcast-groups" in obj:
|
||||
lines.append(rst_subtitle("Multicast groups"))
|
||||
lines.append(parse_mcast_group(obj["mcast-groups"]["list"]))
|
||||
|
||||
# Definitions
|
||||
if "definitions" in obj:
|
||||
lines.append(rst_subtitle("Definitions"))
|
||||
lines.append(parse_definitions(obj["definitions"], family))
|
||||
|
||||
# Attributes set
|
||||
if "attribute-sets" in obj:
|
||||
lines.append(rst_subtitle("Attribute sets"))
|
||||
lines.append(parse_attr_sets(obj["attribute-sets"], family))
|
||||
|
||||
# Sub-messages
|
||||
if "sub-messages" in obj:
|
||||
lines.append(rst_subtitle("Sub-messages"))
|
||||
lines.append(parse_sub_messages(obj["sub-messages"], family))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# Main functions
|
||||
# ==============
|
||||
|
||||
sys.path.append(pathlib.Path(__file__).resolve().parent.as_posix())
|
||||
from lib import YnlDocGenerator # pylint: disable=C0413
|
||||
|
||||
def parse_arguments() -> argparse.Namespace:
|
||||
"""Parse arguments from user"""
|
||||
@@ -367,9 +31,6 @@ def parse_arguments() -> argparse.Namespace:
|
||||
|
||||
# Index and input are mutually exclusive
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(
|
||||
"-x", "--index", action="store_true", help="Generate the index page"
|
||||
)
|
||||
group.add_argument("-i", "--input", help="YAML file name")
|
||||
|
||||
args = parser.parse_args()
|
||||
@@ -391,15 +52,6 @@ def parse_arguments() -> argparse.Namespace:
|
||||
return args
|
||||
|
||||
|
||||
def parse_yaml_file(filename: str) -> str:
|
||||
"""Transform the YAML specified by filename into an RST-formatted string"""
|
||||
with open(filename, "r", encoding="utf-8") as spec_file:
|
||||
yaml_data = yaml.safe_load(spec_file)
|
||||
content = parse_yaml(yaml_data)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def write_to_rstfile(content: str, filename: str) -> None:
|
||||
"""Write the generated content into an RST file"""
|
||||
logging.debug("Saving RST file to %s", filename)
|
||||
@@ -408,35 +60,17 @@ def write_to_rstfile(content: str, filename: str) -> None:
|
||||
rst_file.write(content)
|
||||
|
||||
|
||||
def generate_main_index_rst(output: str) -> None:
|
||||
"""Generate the `networking_spec/index` content and write to the file"""
|
||||
lines = []
|
||||
|
||||
lines.append(rst_header())
|
||||
lines.append(rst_label("specs"))
|
||||
lines.append(rst_title("Netlink Family Specifications"))
|
||||
lines.append(rst_toctree(1))
|
||||
|
||||
index_dir = os.path.dirname(output)
|
||||
logging.debug("Looking for .rst files in %s", index_dir)
|
||||
for filename in sorted(os.listdir(index_dir)):
|
||||
if not filename.endswith(".rst") or filename == "index.rst":
|
||||
continue
|
||||
lines.append(f" {filename.replace('.rst', '')}\n")
|
||||
|
||||
logging.debug("Writing an index file at %s", output)
|
||||
write_to_rstfile("".join(lines), output)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main function that reads the YAML files and generates the RST files"""
|
||||
|
||||
args = parse_arguments()
|
||||
|
||||
parser = YnlDocGenerator()
|
||||
|
||||
if args.input:
|
||||
logging.debug("Parsing %s", args.input)
|
||||
try:
|
||||
content = parse_yaml_file(os.path.join(args.input))
|
||||
content = parser.parse_yaml_file(os.path.join(args.input))
|
||||
except Exception as exception:
|
||||
logging.warning("Failed to parse %s.", args.input)
|
||||
logging.warning(exception)
|
||||
@@ -444,10 +78,6 @@ def main() -> None:
|
||||
|
||||
write_to_rstfile(content, args.output)
|
||||
|
||||
if args.index:
|
||||
# Generate the index RST file
|
||||
generate_main_index_rst(args.output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
Reference in New Issue
Block a user