Adding upstream version 1:10.0.2+ds.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
This commit is contained in:
parent
bf2768bd0f
commit
ea34ddeea6
37998 changed files with 9510514 additions and 0 deletions
230
docs/sphinx/compat.py
Normal file
230
docs/sphinx/compat.py
Normal file
|
@ -0,0 +1,230 @@
|
|||
"""
|
||||
Sphinx cross-version compatibility goop
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Optional,
|
||||
Type,
|
||||
)
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node, Text
|
||||
from docutils.statemachine import StringList
|
||||
|
||||
import sphinx
|
||||
from sphinx import addnodes, util
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util import docfields
|
||||
from sphinx.util.docutils import (
|
||||
ReferenceRole,
|
||||
SphinxDirective,
|
||||
switch_source_input,
|
||||
)
|
||||
from sphinx.util.typing import TextlikeNode
|
||||
|
||||
|
||||
MAKE_XREF_WORKAROUND = sphinx.version_info[:3] < (4, 1, 0)
|
||||
|
||||
|
||||
SpaceNode: Callable[[str], Node]
|
||||
KeywordNode: Callable[[str, str], Node]
|
||||
|
||||
if sphinx.version_info[:3] >= (4, 0, 0):
|
||||
SpaceNode = addnodes.desc_sig_space
|
||||
KeywordNode = addnodes.desc_sig_keyword
|
||||
else:
|
||||
SpaceNode = Text
|
||||
KeywordNode = addnodes.desc_annotation
|
||||
|
||||
|
||||
def nested_parse_with_titles(
|
||||
directive: SphinxDirective, content_node: Element
|
||||
) -> None:
|
||||
"""
|
||||
This helper preserves error parsing context across sphinx versions.
|
||||
"""
|
||||
|
||||
# necessary so that the child nodes get the right source/line set
|
||||
content_node.document = directive.state.document
|
||||
|
||||
try:
|
||||
# Modern sphinx (6.2.0+) supports proper offsetting for
|
||||
# nested parse error context management
|
||||
util.nodes.nested_parse_with_titles(
|
||||
directive.state,
|
||||
directive.content,
|
||||
content_node,
|
||||
content_offset=directive.content_offset,
|
||||
)
|
||||
except TypeError:
|
||||
# No content_offset argument. Fall back to SSI method.
|
||||
with switch_source_input(directive.state, directive.content):
|
||||
util.nodes.nested_parse_with_titles(
|
||||
directive.state, directive.content, content_node
|
||||
)
|
||||
|
||||
|
||||
# ###########################################
|
||||
# xref compatibility hacks for Sphinx < 4.1 #
|
||||
# ###########################################
|
||||
|
||||
# When we require >= Sphinx 4.1, the following function and the
|
||||
# subsequent 3 compatibility classes can be removed. Anywhere in
|
||||
# qapi_domain that uses one of these Compat* types can be switched to
|
||||
# using the garden-variety lib-provided classes with no trickery.
|
||||
|
||||
|
||||
def _compat_make_xref( # pylint: disable=unused-argument
|
||||
self: sphinx.util.docfields.Field,
|
||||
rolename: str,
|
||||
domain: str,
|
||||
target: str,
|
||||
innernode: Type[TextlikeNode] = addnodes.literal_emphasis,
|
||||
contnode: Optional[Node] = None,
|
||||
env: Optional[BuildEnvironment] = None,
|
||||
inliner: Any = None,
|
||||
location: Any = None,
|
||||
) -> Node:
|
||||
"""
|
||||
Compatibility workaround for Sphinx versions prior to 4.1.0.
|
||||
|
||||
Older sphinx versions do not use the domain's XRefRole for parsing
|
||||
and formatting cross-references, so we need to perform this magick
|
||||
ourselves to avoid needing to write the parser/formatter in two
|
||||
separate places.
|
||||
|
||||
This workaround isn't brick-for-brick compatible with modern Sphinx
|
||||
versions, because we do not have access to the parent directive's
|
||||
state during this parsing like we do in more modern versions.
|
||||
|
||||
It's no worse than what pre-Sphinx 4.1.0 does, so... oh well!
|
||||
"""
|
||||
|
||||
# Yes, this function is gross. Pre-4.1 support is a miracle.
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
assert env
|
||||
# Note: Sphinx's own code ignores the type warning here, too.
|
||||
if not rolename:
|
||||
return contnode or innernode(target, target) # type: ignore[call-arg]
|
||||
|
||||
# Get the role instance, but don't *execute it* - we lack the
|
||||
# correct state to do so. Instead, we'll just use its public
|
||||
# methods to do our reference formatting, and emulate the rest.
|
||||
role = env.get_domain(domain).roles[rolename]
|
||||
assert isinstance(role, XRefRole)
|
||||
|
||||
# XRefRole features not supported by this compatibility shim;
|
||||
# these were not supported in Sphinx 3.x either, so nothing of
|
||||
# value is really lost.
|
||||
assert not target.startswith("!")
|
||||
assert not re.match(ReferenceRole.explicit_title_re, target)
|
||||
assert not role.lowercase
|
||||
assert not role.fix_parens
|
||||
|
||||
# Code below based mostly on sphinx.roles.XRefRole; run() and
|
||||
# create_xref_node()
|
||||
options = {
|
||||
"refdoc": env.docname,
|
||||
"refdomain": domain,
|
||||
"reftype": rolename,
|
||||
"refexplicit": False,
|
||||
"refwarn": role.warn_dangling,
|
||||
}
|
||||
refnode = role.nodeclass(target, **options)
|
||||
title, target = role.process_link(env, refnode, False, target, target)
|
||||
refnode["reftarget"] = target
|
||||
classes = ["xref", domain, f"{domain}-{rolename}"]
|
||||
refnode += role.innernodeclass(target, title, classes=classes)
|
||||
|
||||
# This is the very gross part of the hack. Normally,
|
||||
# result_nodes takes a document object to which we would pass
|
||||
# self.inliner.document. Prior to Sphinx 4.1, we don't *have* an
|
||||
# inliner to pass, so we have nothing to pass here. However, the
|
||||
# actual implementation of role.result_nodes in this case
|
||||
# doesn't actually use that argument, so this winds up being
|
||||
# ... fine. Rest easy at night knowing this code only runs under
|
||||
# old versions of Sphinx, so at least it won't change in the
|
||||
# future on us and lead to surprising new failures.
|
||||
# Gross, I know.
|
||||
result_nodes, _messages = role.result_nodes(
|
||||
None, # type: ignore
|
||||
env,
|
||||
refnode,
|
||||
is_ref=True,
|
||||
)
|
||||
return nodes.inline(target, "", *result_nodes)
|
||||
|
||||
|
||||
class CompatField(docfields.Field):
|
||||
if MAKE_XREF_WORKAROUND:
|
||||
make_xref = _compat_make_xref
|
||||
|
||||
|
||||
class CompatGroupedField(docfields.GroupedField):
|
||||
if MAKE_XREF_WORKAROUND:
|
||||
make_xref = _compat_make_xref
|
||||
|
||||
|
||||
class CompatTypedField(docfields.TypedField):
|
||||
if MAKE_XREF_WORKAROUND:
|
||||
make_xref = _compat_make_xref
|
||||
|
||||
|
||||
# ################################################################
|
||||
# Nested parsing error location fix for Sphinx 5.3.0 < x < 6.2.0 #
|
||||
# ################################################################
|
||||
|
||||
# When we require Sphinx 4.x, the TYPE_CHECKING hack where we avoid
|
||||
# subscripting ObjectDescription at runtime can be removed in favor of
|
||||
# just always subscripting the class.
|
||||
|
||||
# When we require Sphinx > 6.2.0, the rest of this compatibility hack
|
||||
# can be dropped and QAPIObject can just inherit directly from
|
||||
# ObjectDescription[Signature].
|
||||
|
||||
SOURCE_LOCATION_FIX = (5, 3, 0) <= sphinx.version_info[:3] < (6, 2, 0)
|
||||
|
||||
Signature = str
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
_BaseClass = ObjectDescription[Signature]
|
||||
else:
|
||||
_BaseClass = ObjectDescription
|
||||
|
||||
|
||||
class ParserFix(_BaseClass):
|
||||
|
||||
_temp_content: StringList
|
||||
_temp_offset: int
|
||||
_temp_node: Optional[addnodes.desc_content]
|
||||
|
||||
def before_content(self) -> None:
|
||||
# Work around a sphinx bug and parse the content ourselves.
|
||||
self._temp_content = self.content
|
||||
self._temp_offset = self.content_offset
|
||||
self._temp_node = None
|
||||
|
||||
if SOURCE_LOCATION_FIX:
|
||||
self._temp_node = addnodes.desc_content()
|
||||
self.state.nested_parse(
|
||||
self.content, self.content_offset, self._temp_node
|
||||
)
|
||||
# Sphinx will try to parse the content block itself,
|
||||
# Give it nothingness to parse instead.
|
||||
self.content = StringList()
|
||||
self.content_offset = 0
|
||||
|
||||
def transform_content(self, content_node: addnodes.desc_content) -> None:
|
||||
# Sphinx workaround: Inject our parsed content and restore state.
|
||||
if self._temp_node:
|
||||
content_node += self._temp_node.children
|
||||
self.content = self._temp_content
|
||||
self.content_offset = self._temp_offset
|
166
docs/sphinx/dbusdoc.py
Normal file
166
docs/sphinx/dbusdoc.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
# D-Bus XML documentation extension
|
||||
#
|
||||
# Copyright (C) 2021, Red Hat Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
#
|
||||
# Author: Marc-André Lureau <marcandre.lureau@redhat.com>
|
||||
"""dbus-doc is a Sphinx extension that provides documentation from D-Bus XML."""
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
import sphinx
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node
|
||||
from docutils.parsers.rst import Directive, directives
|
||||
from docutils.parsers.rst.states import RSTState
|
||||
from docutils.statemachine import StringList, ViewList
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.errors import ExtensionError
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docstrings import prepare_docstring
|
||||
from sphinx.util.docutils import SphinxDirective, switch_source_input
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
import dbusdomain
|
||||
from dbusparser import parse_dbus_xml
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
__version__ = "1.0"
|
||||
|
||||
|
||||
class DBusDoc:
|
||||
def __init__(self, sphinx_directive, dbusfile):
|
||||
self._cur_doc = None
|
||||
self._sphinx_directive = sphinx_directive
|
||||
self._dbusfile = dbusfile
|
||||
self._top_node = nodes.section()
|
||||
self.result = StringList()
|
||||
self.indent = ""
|
||||
|
||||
def add_line(self, line: str, *lineno: int) -> None:
|
||||
"""Append one line of generated reST to the output."""
|
||||
if line.strip(): # not a blank line
|
||||
self.result.append(self.indent + line, self._dbusfile, *lineno)
|
||||
else:
|
||||
self.result.append("", self._dbusfile, *lineno)
|
||||
|
||||
def add_method(self, method):
|
||||
self.add_line(f".. dbus:method:: {method.name}")
|
||||
self.add_line("")
|
||||
self.indent += " "
|
||||
for arg in method.in_args:
|
||||
self.add_line(f":arg {arg.signature} {arg.name}: {arg.doc_string}")
|
||||
for arg in method.out_args:
|
||||
self.add_line(f":ret {arg.signature} {arg.name}: {arg.doc_string}")
|
||||
self.add_line("")
|
||||
for line in prepare_docstring("\n" + method.doc_string):
|
||||
self.add_line(line)
|
||||
self.indent = self.indent[:-3]
|
||||
|
||||
def add_signal(self, signal):
|
||||
self.add_line(f".. dbus:signal:: {signal.name}")
|
||||
self.add_line("")
|
||||
self.indent += " "
|
||||
for arg in signal.args:
|
||||
self.add_line(f":arg {arg.signature} {arg.name}: {arg.doc_string}")
|
||||
self.add_line("")
|
||||
for line in prepare_docstring("\n" + signal.doc_string):
|
||||
self.add_line(line)
|
||||
self.indent = self.indent[:-3]
|
||||
|
||||
def add_property(self, prop):
|
||||
self.add_line(f".. dbus:property:: {prop.name}")
|
||||
self.indent += " "
|
||||
self.add_line(f":type: {prop.signature}")
|
||||
access = {"read": "readonly", "write": "writeonly", "readwrite": "readwrite"}[
|
||||
prop.access
|
||||
]
|
||||
self.add_line(f":{access}:")
|
||||
if prop.emits_changed_signal:
|
||||
self.add_line(f":emits-changed: yes")
|
||||
self.add_line("")
|
||||
for line in prepare_docstring("\n" + prop.doc_string):
|
||||
self.add_line(line)
|
||||
self.indent = self.indent[:-3]
|
||||
|
||||
def add_interface(self, iface):
|
||||
self.add_line(f".. dbus:interface:: {iface.name}")
|
||||
self.add_line("")
|
||||
self.indent += " "
|
||||
for line in prepare_docstring("\n" + iface.doc_string):
|
||||
self.add_line(line)
|
||||
for method in iface.methods:
|
||||
self.add_method(method)
|
||||
for sig in iface.signals:
|
||||
self.add_signal(sig)
|
||||
for prop in iface.properties:
|
||||
self.add_property(prop)
|
||||
self.indent = self.indent[:-3]
|
||||
|
||||
|
||||
def parse_generated_content(state: RSTState, content: StringList) -> List[Node]:
|
||||
"""Parse a generated content by Documenter."""
|
||||
with switch_source_input(state, content):
|
||||
node = nodes.paragraph()
|
||||
node.document = state.document
|
||||
state.nested_parse(content, 0, node)
|
||||
|
||||
return node.children
|
||||
|
||||
|
||||
class DBusDocDirective(SphinxDirective):
|
||||
"""Extract documentation from the specified D-Bus XML file"""
|
||||
|
||||
has_content = True
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
final_argument_whitespace = True
|
||||
|
||||
def run(self):
|
||||
reporter = self.state.document.reporter
|
||||
|
||||
try:
|
||||
source, lineno = reporter.get_source_and_line(self.lineno) # type: ignore
|
||||
except AttributeError:
|
||||
source, lineno = (None, None)
|
||||
|
||||
logger.debug("[dbusdoc] %s:%s: input:\n%s", source, lineno, self.block_text)
|
||||
|
||||
env = self.state.document.settings.env
|
||||
dbusfile = env.config.qapidoc_srctree + "/" + self.arguments[0]
|
||||
with open(dbusfile, "rb") as f:
|
||||
xml_data = f.read()
|
||||
xml = parse_dbus_xml(xml_data)
|
||||
doc = DBusDoc(self, dbusfile)
|
||||
for iface in xml:
|
||||
doc.add_interface(iface)
|
||||
|
||||
result = parse_generated_content(self.state, doc.result)
|
||||
return result
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
"""Register dbus-doc directive with Sphinx"""
|
||||
app.add_config_value("dbusdoc_srctree", None, "env")
|
||||
app.add_directive("dbus-doc", DBusDocDirective)
|
||||
dbusdomain.setup(app)
|
||||
|
||||
return dict(version=__version__, parallel_read_safe=True, parallel_write_safe=True)
|
410
docs/sphinx/dbusdomain.py
Normal file
410
docs/sphinx/dbusdomain.py
Normal file
|
@ -0,0 +1,410 @@
|
|||
# D-Bus sphinx domain extension
|
||||
#
|
||||
# Copyright (C) 2021, Red Hat Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
#
|
||||
# Author: Marc-André Lureau <marcandre.lureau@redhat.com>
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Tuple,
|
||||
cast,
|
||||
)
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node
|
||||
from docutils.parsers.rst import directives
|
||||
from sphinx import addnodes
|
||||
from sphinx.addnodes import desc_signature, pending_xref
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, Index, IndexEntry, ObjType
|
||||
from sphinx.locale import _
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util import nodes as node_utils
|
||||
from sphinx.util.docfields import Field, TypedField
|
||||
from sphinx.util.typing import OptionSpec
|
||||
|
||||
|
||||
class DBusDescription(ObjectDescription[str]):
|
||||
"""Base class for DBus objects"""
|
||||
|
||||
option_spec: OptionSpec = ObjectDescription.option_spec.copy()
|
||||
option_spec.update(
|
||||
{
|
||||
"deprecated": directives.flag,
|
||||
}
|
||||
)
|
||||
|
||||
def get_index_text(self, modname: str, name: str) -> str:
|
||||
"""Return the text for the index entry of the object."""
|
||||
raise NotImplementedError("must be implemented in subclasses")
|
||||
|
||||
def add_target_and_index(
|
||||
self, name: str, sig: str, signode: desc_signature
|
||||
) -> None:
|
||||
ifacename = self.env.ref_context.get("dbus:interface")
|
||||
node_id = name
|
||||
if ifacename:
|
||||
node_id = f"{ifacename}.{node_id}"
|
||||
|
||||
signode["names"].append(name)
|
||||
signode["ids"].append(node_id)
|
||||
|
||||
if "noindexentry" not in self.options:
|
||||
indextext = self.get_index_text(ifacename, name)
|
||||
if indextext:
|
||||
self.indexnode["entries"].append(
|
||||
("single", indextext, node_id, "", None)
|
||||
)
|
||||
|
||||
domain = cast(DBusDomain, self.env.get_domain("dbus"))
|
||||
domain.note_object(name, self.objtype, node_id, location=signode)
|
||||
|
||||
|
||||
class DBusInterface(DBusDescription):
|
||||
"""
|
||||
Implementation of ``dbus:interface``.
|
||||
"""
|
||||
|
||||
def get_index_text(self, ifacename: str, name: str) -> str:
|
||||
return ifacename
|
||||
|
||||
def before_content(self) -> None:
|
||||
self.env.ref_context["dbus:interface"] = self.arguments[0]
|
||||
|
||||
def after_content(self) -> None:
|
||||
self.env.ref_context.pop("dbus:interface")
|
||||
|
||||
def handle_signature(self, sig: str, signode: desc_signature) -> str:
|
||||
signode += addnodes.desc_annotation("interface ", "interface ")
|
||||
signode += addnodes.desc_name(sig, sig)
|
||||
return sig
|
||||
|
||||
def run(self) -> List[Node]:
|
||||
_, node = super().run()
|
||||
name = self.arguments[0]
|
||||
section = nodes.section(ids=[name + "-section"])
|
||||
section += nodes.title(name, "%s interface" % name)
|
||||
section += node
|
||||
return [self.indexnode, section]
|
||||
|
||||
|
||||
class DBusMember(DBusDescription):
|
||||
|
||||
signal = False
|
||||
|
||||
|
||||
class DBusMethod(DBusMember):
|
||||
"""
|
||||
Implementation of ``dbus:method``.
|
||||
"""
|
||||
|
||||
option_spec: OptionSpec = DBusMember.option_spec.copy()
|
||||
option_spec.update(
|
||||
{
|
||||
"noreply": directives.flag,
|
||||
}
|
||||
)
|
||||
|
||||
doc_field_types: List[Field] = [
|
||||
TypedField(
|
||||
"arg",
|
||||
label=_("Arguments"),
|
||||
names=("arg",),
|
||||
rolename="arg",
|
||||
typerolename=None,
|
||||
typenames=("argtype", "type"),
|
||||
),
|
||||
TypedField(
|
||||
"ret",
|
||||
label=_("Returns"),
|
||||
names=("ret",),
|
||||
rolename="ret",
|
||||
typerolename=None,
|
||||
typenames=("rettype", "type"),
|
||||
),
|
||||
]
|
||||
|
||||
def get_index_text(self, ifacename: str, name: str) -> str:
|
||||
return _("%s() (%s method)") % (name, ifacename)
|
||||
|
||||
def handle_signature(self, sig: str, signode: desc_signature) -> str:
|
||||
params = addnodes.desc_parameterlist()
|
||||
returns = addnodes.desc_parameterlist()
|
||||
|
||||
contentnode = addnodes.desc_content()
|
||||
self.state.nested_parse(self.content, self.content_offset, contentnode)
|
||||
for child in contentnode:
|
||||
if isinstance(child, nodes.field_list):
|
||||
for field in child:
|
||||
ty, sg, name = field[0].astext().split(None, 2)
|
||||
param = addnodes.desc_parameter()
|
||||
param += addnodes.desc_sig_keyword_type(sg, sg)
|
||||
param += addnodes.desc_sig_space()
|
||||
param += addnodes.desc_sig_name(name, name)
|
||||
if ty == "arg":
|
||||
params += param
|
||||
elif ty == "ret":
|
||||
returns += param
|
||||
|
||||
anno = "signal " if self.signal else "method "
|
||||
signode += addnodes.desc_annotation(anno, anno)
|
||||
signode += addnodes.desc_name(sig, sig)
|
||||
signode += params
|
||||
if not self.signal and "noreply" not in self.options:
|
||||
ret = addnodes.desc_returns()
|
||||
ret += returns
|
||||
signode += ret
|
||||
|
||||
return sig
|
||||
|
||||
|
||||
class DBusSignal(DBusMethod):
|
||||
"""
|
||||
Implementation of ``dbus:signal``.
|
||||
"""
|
||||
|
||||
doc_field_types: List[Field] = [
|
||||
TypedField(
|
||||
"arg",
|
||||
label=_("Arguments"),
|
||||
names=("arg",),
|
||||
rolename="arg",
|
||||
typerolename=None,
|
||||
typenames=("argtype", "type"),
|
||||
),
|
||||
]
|
||||
signal = True
|
||||
|
||||
def get_index_text(self, ifacename: str, name: str) -> str:
|
||||
return _("%s() (%s signal)") % (name, ifacename)
|
||||
|
||||
|
||||
class DBusProperty(DBusMember):
|
||||
"""
|
||||
Implementation of ``dbus:property``.
|
||||
"""
|
||||
|
||||
option_spec: OptionSpec = DBusMember.option_spec.copy()
|
||||
option_spec.update(
|
||||
{
|
||||
"type": directives.unchanged,
|
||||
"readonly": directives.flag,
|
||||
"writeonly": directives.flag,
|
||||
"readwrite": directives.flag,
|
||||
"emits-changed": directives.unchanged,
|
||||
}
|
||||
)
|
||||
|
||||
doc_field_types: List[Field] = []
|
||||
|
||||
def get_index_text(self, ifacename: str, name: str) -> str:
|
||||
return _("%s (%s property)") % (name, ifacename)
|
||||
|
||||
def transform_content(self, contentnode: addnodes.desc_content) -> None:
|
||||
fieldlist = nodes.field_list()
|
||||
access = None
|
||||
if "readonly" in self.options:
|
||||
access = _("read-only")
|
||||
if "writeonly" in self.options:
|
||||
access = _("write-only")
|
||||
if "readwrite" in self.options:
|
||||
access = _("read & write")
|
||||
if access:
|
||||
content = nodes.Text(access)
|
||||
fieldname = nodes.field_name("", _("Access"))
|
||||
fieldbody = nodes.field_body("", nodes.paragraph("", "", content))
|
||||
field = nodes.field("", fieldname, fieldbody)
|
||||
fieldlist += field
|
||||
emits = self.options.get("emits-changed", None)
|
||||
if emits:
|
||||
content = nodes.Text(emits)
|
||||
fieldname = nodes.field_name("", _("Emits Changed"))
|
||||
fieldbody = nodes.field_body("", nodes.paragraph("", "", content))
|
||||
field = nodes.field("", fieldname, fieldbody)
|
||||
fieldlist += field
|
||||
if len(fieldlist) > 0:
|
||||
contentnode.insert(0, fieldlist)
|
||||
|
||||
def handle_signature(self, sig: str, signode: desc_signature) -> str:
|
||||
contentnode = addnodes.desc_content()
|
||||
self.state.nested_parse(self.content, self.content_offset, contentnode)
|
||||
ty = self.options.get("type")
|
||||
|
||||
signode += addnodes.desc_annotation("property ", "property ")
|
||||
signode += addnodes.desc_name(sig, sig)
|
||||
signode += addnodes.desc_sig_punctuation("", ":")
|
||||
signode += addnodes.desc_sig_keyword_type(ty, ty)
|
||||
return sig
|
||||
|
||||
def run(self) -> List[Node]:
|
||||
self.name = "dbus:member"
|
||||
return super().run()
|
||||
|
||||
|
||||
class DBusXRef(XRefRole):
|
||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
||||
refnode["dbus:interface"] = env.ref_context.get("dbus:interface")
|
||||
if not has_explicit_title:
|
||||
title = title.lstrip(".") # only has a meaning for the target
|
||||
target = target.lstrip("~") # only has a meaning for the title
|
||||
# if the first character is a tilde, don't display the module/class
|
||||
# parts of the contents
|
||||
if title[0:1] == "~":
|
||||
title = title[1:]
|
||||
dot = title.rfind(".")
|
||||
if dot != -1:
|
||||
title = title[dot + 1 :]
|
||||
# if the first character is a dot, search more specific namespaces first
|
||||
# else search builtins first
|
||||
if target[0:1] == ".":
|
||||
target = target[1:]
|
||||
refnode["refspecific"] = True
|
||||
return title, target
|
||||
|
||||
|
||||
class DBusIndex(Index):
|
||||
"""
|
||||
Index subclass to provide a D-Bus interfaces index.
|
||||
"""
|
||||
|
||||
name = "dbusindex"
|
||||
localname = _("D-Bus Interfaces Index")
|
||||
shortname = _("dbus")
|
||||
|
||||
def generate(
|
||||
self, docnames: Iterable[str] = None
|
||||
) -> Tuple[List[Tuple[str, List[IndexEntry]]], bool]:
|
||||
content: Dict[str, List[IndexEntry]] = {}
|
||||
# list of prefixes to ignore
|
||||
ignores: List[str] = self.domain.env.config["dbus_index_common_prefix"]
|
||||
ignores = sorted(ignores, key=len, reverse=True)
|
||||
|
||||
ifaces = sorted(
|
||||
[
|
||||
x
|
||||
for x in self.domain.data["objects"].items()
|
||||
if x[1].objtype == "interface"
|
||||
],
|
||||
key=lambda x: x[0].lower(),
|
||||
)
|
||||
for name, (docname, node_id, _) in ifaces:
|
||||
if docnames and docname not in docnames:
|
||||
continue
|
||||
|
||||
for ignore in ignores:
|
||||
if name.startswith(ignore):
|
||||
name = name[len(ignore) :]
|
||||
stripped = ignore
|
||||
break
|
||||
else:
|
||||
stripped = ""
|
||||
|
||||
entries = content.setdefault(name[0].lower(), [])
|
||||
entries.append(IndexEntry(stripped + name, 0, docname, node_id, "", "", ""))
|
||||
|
||||
# sort by first letter
|
||||
sorted_content = sorted(content.items())
|
||||
|
||||
return sorted_content, False
|
||||
|
||||
|
||||
class ObjectEntry(NamedTuple):
|
||||
docname: str
|
||||
node_id: str
|
||||
objtype: str
|
||||
|
||||
|
||||
class DBusDomain(Domain):
|
||||
"""
|
||||
Implementation of the D-Bus domain.
|
||||
"""
|
||||
|
||||
name = "dbus"
|
||||
label = "D-Bus"
|
||||
object_types: Dict[str, ObjType] = {
|
||||
"interface": ObjType(_("interface"), "iface", "obj"),
|
||||
"method": ObjType(_("method"), "meth", "obj"),
|
||||
"signal": ObjType(_("signal"), "sig", "obj"),
|
||||
"property": ObjType(_("property"), "attr", "_prop", "obj"),
|
||||
}
|
||||
directives = {
|
||||
"interface": DBusInterface,
|
||||
"method": DBusMethod,
|
||||
"signal": DBusSignal,
|
||||
"property": DBusProperty,
|
||||
}
|
||||
roles = {
|
||||
"iface": DBusXRef(),
|
||||
"meth": DBusXRef(),
|
||||
"sig": DBusXRef(),
|
||||
"prop": DBusXRef(),
|
||||
}
|
||||
initial_data: Dict[str, Dict[str, Tuple[Any]]] = {
|
||||
"objects": {}, # fullname -> ObjectEntry
|
||||
}
|
||||
indices = [
|
||||
DBusIndex,
|
||||
]
|
||||
|
||||
@property
|
||||
def objects(self) -> Dict[str, ObjectEntry]:
|
||||
return self.data.setdefault("objects", {}) # fullname -> ObjectEntry
|
||||
|
||||
def note_object(
|
||||
self, name: str, objtype: str, node_id: str, location: Any = None
|
||||
) -> None:
|
||||
self.objects[name] = ObjectEntry(self.env.docname, node_id, objtype)
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for fullname, obj in list(self.objects.items()):
|
||||
if obj.docname == docname:
|
||||
del self.objects[fullname]
|
||||
|
||||
def find_obj(self, typ: str, name: str) -> Optional[Tuple[str, ObjectEntry]]:
|
||||
# skip parens
|
||||
if name[-2:] == "()":
|
||||
name = name[:-2]
|
||||
if typ in ("meth", "sig", "prop"):
|
||||
try:
|
||||
ifacename, name = name.rsplit(".", 1)
|
||||
except ValueError:
|
||||
pass
|
||||
return self.objects.get(name)
|
||||
|
||||
def resolve_xref(
|
||||
self,
|
||||
env: "BuildEnvironment",
|
||||
fromdocname: str,
|
||||
builder: "Builder",
|
||||
typ: str,
|
||||
target: str,
|
||||
node: pending_xref,
|
||||
contnode: Element,
|
||||
) -> Optional[Element]:
|
||||
"""Resolve the pending_xref *node* with the given *typ* and *target*."""
|
||||
objdef = self.find_obj(typ, target)
|
||||
if objdef:
|
||||
return node_utils.make_refnode(
|
||||
builder, fromdocname, objdef.docname, objdef.node_id, contnode
|
||||
)
|
||||
|
||||
def get_objects(self) -> Iterator[Tuple[str, str, str, str, str, int]]:
|
||||
for refname, obj in self.objects.items():
|
||||
yield (refname, refname, obj.objtype, obj.docname, obj.node_id, 1)
|
||||
|
||||
def merge_domaindata(self, docnames, otherdata):
|
||||
for name, obj in otherdata['objects'].items():
|
||||
if obj.docname in docnames:
|
||||
self.data['objects'][name] = obj
|
||||
|
||||
def setup(app):
|
||||
app.add_domain(DBusDomain)
|
||||
app.add_config_value("dbus_index_common_prefix", [], "env")
|
373
docs/sphinx/dbusparser.py
Normal file
373
docs/sphinx/dbusparser.py
Normal file
|
@ -0,0 +1,373 @@
|
|||
# Based from "GDBus - GLib D-Bus Library":
|
||||
#
|
||||
# Copyright (C) 2008-2011 Red Hat, Inc.
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General
|
||||
# Public License along with this library; if not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# Author: David Zeuthen <davidz@redhat.com>
|
||||
|
||||
import xml.parsers.expat
|
||||
|
||||
|
||||
class Annotation:
|
||||
def __init__(self, key, value):
|
||||
self.key = key
|
||||
self.value = value
|
||||
self.annotations = []
|
||||
self.since = ""
|
||||
|
||||
|
||||
class Arg:
|
||||
def __init__(self, name, signature):
|
||||
self.name = name
|
||||
self.signature = signature
|
||||
self.annotations = []
|
||||
self.doc_string = ""
|
||||
self.since = ""
|
||||
|
||||
|
||||
class Method:
|
||||
def __init__(self, name, h_type_implies_unix_fd=True):
|
||||
self.name = name
|
||||
self.h_type_implies_unix_fd = h_type_implies_unix_fd
|
||||
self.in_args = []
|
||||
self.out_args = []
|
||||
self.annotations = []
|
||||
self.doc_string = ""
|
||||
self.since = ""
|
||||
self.deprecated = False
|
||||
self.unix_fd = False
|
||||
|
||||
|
||||
class Signal:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.args = []
|
||||
self.annotations = []
|
||||
self.doc_string = ""
|
||||
self.since = ""
|
||||
self.deprecated = False
|
||||
|
||||
|
||||
class Property:
|
||||
def __init__(self, name, signature, access):
|
||||
self.name = name
|
||||
self.signature = signature
|
||||
self.access = access
|
||||
self.annotations = []
|
||||
self.arg = Arg("value", self.signature)
|
||||
self.arg.annotations = self.annotations
|
||||
self.readable = False
|
||||
self.writable = False
|
||||
if self.access == "readwrite":
|
||||
self.readable = True
|
||||
self.writable = True
|
||||
elif self.access == "read":
|
||||
self.readable = True
|
||||
elif self.access == "write":
|
||||
self.writable = True
|
||||
else:
|
||||
raise ValueError('Invalid access type "{}"'.format(self.access))
|
||||
self.doc_string = ""
|
||||
self.since = ""
|
||||
self.deprecated = False
|
||||
self.emits_changed_signal = True
|
||||
|
||||
|
||||
class Interface:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.methods = []
|
||||
self.signals = []
|
||||
self.properties = []
|
||||
self.annotations = []
|
||||
self.doc_string = ""
|
||||
self.doc_string_brief = ""
|
||||
self.since = ""
|
||||
self.deprecated = False
|
||||
|
||||
|
||||
class DBusXMLParser:
|
||||
STATE_TOP = "top"
|
||||
STATE_NODE = "node"
|
||||
STATE_INTERFACE = "interface"
|
||||
STATE_METHOD = "method"
|
||||
STATE_SIGNAL = "signal"
|
||||
STATE_PROPERTY = "property"
|
||||
STATE_ARG = "arg"
|
||||
STATE_ANNOTATION = "annotation"
|
||||
STATE_IGNORED = "ignored"
|
||||
|
||||
def __init__(self, xml_data, h_type_implies_unix_fd=True):
|
||||
self._parser = xml.parsers.expat.ParserCreate()
|
||||
self._parser.CommentHandler = self.handle_comment
|
||||
self._parser.CharacterDataHandler = self.handle_char_data
|
||||
self._parser.StartElementHandler = self.handle_start_element
|
||||
self._parser.EndElementHandler = self.handle_end_element
|
||||
|
||||
self.parsed_interfaces = []
|
||||
self._cur_object = None
|
||||
|
||||
self.state = DBusXMLParser.STATE_TOP
|
||||
self.state_stack = []
|
||||
self._cur_object = None
|
||||
self._cur_object_stack = []
|
||||
|
||||
self.doc_comment_last_symbol = ""
|
||||
|
||||
self._h_type_implies_unix_fd = h_type_implies_unix_fd
|
||||
|
||||
self._parser.Parse(xml_data)
|
||||
|
||||
COMMENT_STATE_BEGIN = "begin"
|
||||
COMMENT_STATE_PARAMS = "params"
|
||||
COMMENT_STATE_BODY = "body"
|
||||
COMMENT_STATE_SKIP = "skip"
|
||||
|
||||
def handle_comment(self, data):
|
||||
comment_state = DBusXMLParser.COMMENT_STATE_BEGIN
|
||||
lines = data.split("\n")
|
||||
symbol = ""
|
||||
body = ""
|
||||
in_para = False
|
||||
params = {}
|
||||
for line in lines:
|
||||
orig_line = line
|
||||
line = line.lstrip()
|
||||
if comment_state == DBusXMLParser.COMMENT_STATE_BEGIN:
|
||||
if len(line) > 0:
|
||||
colon_index = line.find(": ")
|
||||
if colon_index == -1:
|
||||
if line.endswith(":"):
|
||||
symbol = line[0 : len(line) - 1]
|
||||
comment_state = DBusXMLParser.COMMENT_STATE_PARAMS
|
||||
else:
|
||||
comment_state = DBusXMLParser.COMMENT_STATE_SKIP
|
||||
else:
|
||||
symbol = line[0:colon_index]
|
||||
rest_of_line = line[colon_index + 2 :].strip()
|
||||
if len(rest_of_line) > 0:
|
||||
body += rest_of_line + "\n"
|
||||
comment_state = DBusXMLParser.COMMENT_STATE_PARAMS
|
||||
elif comment_state == DBusXMLParser.COMMENT_STATE_PARAMS:
|
||||
if line.startswith("@"):
|
||||
colon_index = line.find(": ")
|
||||
if colon_index == -1:
|
||||
comment_state = DBusXMLParser.COMMENT_STATE_BODY
|
||||
if not in_para:
|
||||
in_para = True
|
||||
body += orig_line + "\n"
|
||||
else:
|
||||
param = line[1:colon_index]
|
||||
docs = line[colon_index + 2 :]
|
||||
params[param] = docs
|
||||
else:
|
||||
comment_state = DBusXMLParser.COMMENT_STATE_BODY
|
||||
if len(line) > 0:
|
||||
if not in_para:
|
||||
in_para = True
|
||||
body += orig_line + "\n"
|
||||
elif comment_state == DBusXMLParser.COMMENT_STATE_BODY:
|
||||
if len(line) > 0:
|
||||
if not in_para:
|
||||
in_para = True
|
||||
body += orig_line + "\n"
|
||||
else:
|
||||
if in_para:
|
||||
body += "\n"
|
||||
in_para = False
|
||||
if in_para:
|
||||
body += "\n"
|
||||
|
||||
if symbol != "":
|
||||
self.doc_comment_last_symbol = symbol
|
||||
self.doc_comment_params = params
|
||||
self.doc_comment_body = body
|
||||
|
||||
def handle_char_data(self, data):
|
||||
# print 'char_data=%s'%data
|
||||
pass
|
||||
|
||||
def handle_start_element(self, name, attrs):
|
||||
old_state = self.state
|
||||
old_cur_object = self._cur_object
|
||||
if self.state == DBusXMLParser.STATE_IGNORED:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
elif self.state == DBusXMLParser.STATE_TOP:
|
||||
if name == DBusXMLParser.STATE_NODE:
|
||||
self.state = DBusXMLParser.STATE_NODE
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
elif self.state == DBusXMLParser.STATE_NODE:
|
||||
if name == DBusXMLParser.STATE_INTERFACE:
|
||||
self.state = DBusXMLParser.STATE_INTERFACE
|
||||
iface = Interface(attrs["name"])
|
||||
self._cur_object = iface
|
||||
self.parsed_interfaces.append(iface)
|
||||
elif name == DBusXMLParser.STATE_ANNOTATION:
|
||||
self.state = DBusXMLParser.STATE_ANNOTATION
|
||||
anno = Annotation(attrs["name"], attrs["value"])
|
||||
self._cur_object.annotations.append(anno)
|
||||
self._cur_object = anno
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
|
||||
# assign docs, if any
|
||||
if "name" in attrs and self.doc_comment_last_symbol == attrs["name"]:
|
||||
self._cur_object.doc_string = self.doc_comment_body
|
||||
if "short_description" in self.doc_comment_params:
|
||||
short_description = self.doc_comment_params["short_description"]
|
||||
self._cur_object.doc_string_brief = short_description
|
||||
if "since" in self.doc_comment_params:
|
||||
self._cur_object.since = self.doc_comment_params["since"].strip()
|
||||
|
||||
elif self.state == DBusXMLParser.STATE_INTERFACE:
|
||||
if name == DBusXMLParser.STATE_METHOD:
|
||||
self.state = DBusXMLParser.STATE_METHOD
|
||||
method = Method(
|
||||
attrs["name"], h_type_implies_unix_fd=self._h_type_implies_unix_fd
|
||||
)
|
||||
self._cur_object.methods.append(method)
|
||||
self._cur_object = method
|
||||
elif name == DBusXMLParser.STATE_SIGNAL:
|
||||
self.state = DBusXMLParser.STATE_SIGNAL
|
||||
signal = Signal(attrs["name"])
|
||||
self._cur_object.signals.append(signal)
|
||||
self._cur_object = signal
|
||||
elif name == DBusXMLParser.STATE_PROPERTY:
|
||||
self.state = DBusXMLParser.STATE_PROPERTY
|
||||
prop = Property(attrs["name"], attrs["type"], attrs["access"])
|
||||
self._cur_object.properties.append(prop)
|
||||
self._cur_object = prop
|
||||
elif name == DBusXMLParser.STATE_ANNOTATION:
|
||||
self.state = DBusXMLParser.STATE_ANNOTATION
|
||||
anno = Annotation(attrs["name"], attrs["value"])
|
||||
self._cur_object.annotations.append(anno)
|
||||
self._cur_object = anno
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
|
||||
# assign docs, if any
|
||||
if "name" in attrs and self.doc_comment_last_symbol == attrs["name"]:
|
||||
self._cur_object.doc_string = self.doc_comment_body
|
||||
if "since" in self.doc_comment_params:
|
||||
self._cur_object.since = self.doc_comment_params["since"].strip()
|
||||
|
||||
elif self.state == DBusXMLParser.STATE_METHOD:
|
||||
if name == DBusXMLParser.STATE_ARG:
|
||||
self.state = DBusXMLParser.STATE_ARG
|
||||
arg_name = None
|
||||
if "name" in attrs:
|
||||
arg_name = attrs["name"]
|
||||
arg = Arg(arg_name, attrs["type"])
|
||||
direction = attrs.get("direction", "in")
|
||||
if direction == "in":
|
||||
self._cur_object.in_args.append(arg)
|
||||
elif direction == "out":
|
||||
self._cur_object.out_args.append(arg)
|
||||
else:
|
||||
raise ValueError('Invalid direction "{}"'.format(direction))
|
||||
self._cur_object = arg
|
||||
elif name == DBusXMLParser.STATE_ANNOTATION:
|
||||
self.state = DBusXMLParser.STATE_ANNOTATION
|
||||
anno = Annotation(attrs["name"], attrs["value"])
|
||||
self._cur_object.annotations.append(anno)
|
||||
self._cur_object = anno
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
|
||||
# assign docs, if any
|
||||
if self.doc_comment_last_symbol == old_cur_object.name:
|
||||
if "name" in attrs and attrs["name"] in self.doc_comment_params:
|
||||
doc_string = self.doc_comment_params[attrs["name"]]
|
||||
if doc_string is not None:
|
||||
self._cur_object.doc_string = doc_string
|
||||
if "since" in self.doc_comment_params:
|
||||
self._cur_object.since = self.doc_comment_params[
|
||||
"since"
|
||||
].strip()
|
||||
|
||||
elif self.state == DBusXMLParser.STATE_SIGNAL:
|
||||
if name == DBusXMLParser.STATE_ARG:
|
||||
self.state = DBusXMLParser.STATE_ARG
|
||||
arg_name = None
|
||||
if "name" in attrs:
|
||||
arg_name = attrs["name"]
|
||||
arg = Arg(arg_name, attrs["type"])
|
||||
self._cur_object.args.append(arg)
|
||||
self._cur_object = arg
|
||||
elif name == DBusXMLParser.STATE_ANNOTATION:
|
||||
self.state = DBusXMLParser.STATE_ANNOTATION
|
||||
anno = Annotation(attrs["name"], attrs["value"])
|
||||
self._cur_object.annotations.append(anno)
|
||||
self._cur_object = anno
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
|
||||
# assign docs, if any
|
||||
if self.doc_comment_last_symbol == old_cur_object.name:
|
||||
if "name" in attrs and attrs["name"] in self.doc_comment_params:
|
||||
doc_string = self.doc_comment_params[attrs["name"]]
|
||||
if doc_string is not None:
|
||||
self._cur_object.doc_string = doc_string
|
||||
if "since" in self.doc_comment_params:
|
||||
self._cur_object.since = self.doc_comment_params[
|
||||
"since"
|
||||
].strip()
|
||||
|
||||
elif self.state == DBusXMLParser.STATE_PROPERTY:
|
||||
if name == DBusXMLParser.STATE_ANNOTATION:
|
||||
self.state = DBusXMLParser.STATE_ANNOTATION
|
||||
anno = Annotation(attrs["name"], attrs["value"])
|
||||
self._cur_object.annotations.append(anno)
|
||||
self._cur_object = anno
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
|
||||
elif self.state == DBusXMLParser.STATE_ARG:
|
||||
if name == DBusXMLParser.STATE_ANNOTATION:
|
||||
self.state = DBusXMLParser.STATE_ANNOTATION
|
||||
anno = Annotation(attrs["name"], attrs["value"])
|
||||
self._cur_object.annotations.append(anno)
|
||||
self._cur_object = anno
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
|
||||
elif self.state == DBusXMLParser.STATE_ANNOTATION:
|
||||
if name == DBusXMLParser.STATE_ANNOTATION:
|
||||
self.state = DBusXMLParser.STATE_ANNOTATION
|
||||
anno = Annotation(attrs["name"], attrs["value"])
|
||||
self._cur_object.annotations.append(anno)
|
||||
self._cur_object = anno
|
||||
else:
|
||||
self.state = DBusXMLParser.STATE_IGNORED
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
'Unhandled state "{}" while entering element with name "{}"'.format(
|
||||
self.state, name
|
||||
)
|
||||
)
|
||||
|
||||
self.state_stack.append(old_state)
|
||||
self._cur_object_stack.append(old_cur_object)
|
||||
|
||||
def handle_end_element(self, name):
|
||||
self.state = self.state_stack.pop()
|
||||
self._cur_object = self._cur_object_stack.pop()
|
||||
|
||||
|
||||
def parse_dbus_xml(xml_data):
|
||||
parser = DBusXMLParser(xml_data, True)
|
||||
return parser.parsed_interfaces
|
69
docs/sphinx/depfile.py
Normal file
69
docs/sphinx/depfile.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# QEMU depfile generation extension
|
||||
#
|
||||
# Copyright (c) 2020 Red Hat, Inc.
|
||||
#
|
||||
# This work is licensed under the terms of the GNU GPLv2 or later.
|
||||
# See the COPYING file in the top-level directory.
|
||||
|
||||
"""depfile is a Sphinx extension that writes a dependency file for
|
||||
an external build system"""
|
||||
|
||||
import os
|
||||
import sphinx
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
def get_infiles(env):
|
||||
for x in env.found_docs:
|
||||
yield str(env.doc2path(x))
|
||||
yield from ((os.path.join(env.srcdir, dep)
|
||||
for dep in env.dependencies[x]))
|
||||
for mod in sys.modules.values():
|
||||
if hasattr(mod, '__file__'):
|
||||
if mod.__file__:
|
||||
yield mod.__file__
|
||||
# this is perhaps going to include unused files:
|
||||
for static_path in env.config.html_static_path + env.config.templates_path:
|
||||
for path in Path(static_path).rglob('*'):
|
||||
yield str(path)
|
||||
|
||||
# also include kdoc script
|
||||
yield str(env.config.kerneldoc_bin[1])
|
||||
|
||||
|
||||
def write_depfile(app, exception):
|
||||
if exception:
|
||||
return
|
||||
|
||||
env = app.env
|
||||
if not env.config.depfile:
|
||||
return
|
||||
|
||||
# Using a directory as the output file does not work great because
|
||||
# its timestamp does not necessarily change when the contents change.
|
||||
# So create a timestamp file.
|
||||
if env.config.depfile_stamp:
|
||||
with open(env.config.depfile_stamp, 'w') as f:
|
||||
pass
|
||||
|
||||
with open(env.config.depfile, 'w') as f:
|
||||
print((env.config.depfile_stamp or app.outdir) + ": \\", file=f)
|
||||
print(*get_infiles(env), file=f)
|
||||
for x in get_infiles(env):
|
||||
print(x + ":", file=f)
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_config_value('depfile', None, 'env')
|
||||
app.add_config_value('depfile_stamp', None, 'env')
|
||||
app.connect('build-finished', write_depfile)
|
||||
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
30
docs/sphinx/fakedbusdoc.py
Normal file
30
docs/sphinx/fakedbusdoc.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# D-Bus XML documentation extension, compatibility gunk for <sphinx4
|
||||
#
|
||||
# Copyright (C) 2021, Red Hat Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
#
|
||||
# Author: Marc-André Lureau <marcandre.lureau@redhat.com>
|
||||
"""dbus-doc is a Sphinx extension that provides documentation from D-Bus XML."""
|
||||
|
||||
from docutils.parsers.rst import Directive
|
||||
from sphinx.application import Sphinx
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
class FakeDBusDocDirective(Directive):
|
||||
has_content = True
|
||||
required_arguments = 1
|
||||
|
||||
def run(self):
|
||||
return []
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
"""Register a fake dbus-doc directive with Sphinx"""
|
||||
app.add_directive("dbus-doc", FakeDBusDocDirective)
|
||||
|
||||
return dict(
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
195
docs/sphinx/hxtool.py
Normal file
195
docs/sphinx/hxtool.py
Normal file
|
@ -0,0 +1,195 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# QEMU hxtool .hx file parsing extension
|
||||
#
|
||||
# Copyright (c) 2020 Linaro
|
||||
#
|
||||
# This work is licensed under the terms of the GNU GPLv2 or later.
|
||||
# See the COPYING file in the top-level directory.
|
||||
"""hxtool is a Sphinx extension that implements the hxtool-doc directive"""
|
||||
|
||||
# The purpose of this extension is to read fragments of rST
|
||||
# from .hx files, and insert them all into the current document.
|
||||
# The rST fragments are delimited by SRST/ERST lines.
|
||||
# The conf.py file must set the hxtool_srctree config value to
|
||||
# the root of the QEMU source tree.
|
||||
# Each hxtool-doc:: directive takes one argument which is the
|
||||
# path of the .hx file to process, relative to the source tree.
|
||||
|
||||
import os
|
||||
import re
|
||||
from enum import Enum
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils.parsers.rst import directives, Directive
|
||||
from sphinx.errors import ExtensionError
|
||||
from sphinx.util.docutils import switch_source_input
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
import sphinx
|
||||
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
# We parse hx files with a state machine which may be in one of two
|
||||
# states: reading the C code fragment, or inside a rST fragment.
|
||||
class HxState(Enum):
|
||||
CTEXT = 1
|
||||
RST = 2
|
||||
|
||||
def serror(file, lnum, errtext):
|
||||
"""Raise an exception giving a user-friendly syntax error message"""
|
||||
raise ExtensionError('%s line %d: syntax error: %s' % (file, lnum, errtext))
|
||||
|
||||
def parse_directive(line):
|
||||
"""Return first word of line, if any"""
|
||||
return re.split(r'\W', line)[0]
|
||||
|
||||
def parse_defheading(file, lnum, line):
|
||||
"""Handle a DEFHEADING directive"""
|
||||
# The input should be "DEFHEADING(some string)", though note that
|
||||
# the 'some string' could be the empty string. If the string is
|
||||
# empty we ignore the directive -- these are used only to add
|
||||
# blank lines in the plain-text content of the --help output.
|
||||
#
|
||||
# Return the heading text. We strip out any trailing ':' for
|
||||
# consistency with other headings in the rST documentation.
|
||||
match = re.match(r'DEFHEADING\((.*?):?\)', line)
|
||||
if match is None:
|
||||
serror(file, lnum, "Invalid DEFHEADING line")
|
||||
return match.group(1)
|
||||
|
||||
def parse_archheading(file, lnum, line):
|
||||
"""Handle an ARCHHEADING directive"""
|
||||
# The input should be "ARCHHEADING(some string, other arg)",
|
||||
# though note that the 'some string' could be the empty string.
|
||||
# As with DEFHEADING, empty string ARCHHEADINGs will be ignored.
|
||||
#
|
||||
# Return the heading text. We strip out any trailing ':' for
|
||||
# consistency with other headings in the rST documentation.
|
||||
match = re.match(r'ARCHHEADING\((.*?):?,.*\)', line)
|
||||
if match is None:
|
||||
serror(file, lnum, "Invalid ARCHHEADING line")
|
||||
return match.group(1)
|
||||
|
||||
def parse_srst(file, lnum, line):
|
||||
"""Handle an SRST directive"""
|
||||
# The input should be either "SRST", or "SRST(label)".
|
||||
match = re.match(r'SRST(\((.*?)\))?', line)
|
||||
if match is None:
|
||||
serror(file, lnum, "Invalid SRST line")
|
||||
return match.group(2)
|
||||
|
||||
class HxtoolDocDirective(Directive):
|
||||
"""Extract rST fragments from the specified .hx file"""
|
||||
required_argument = 1
|
||||
optional_arguments = 1
|
||||
option_spec = {
|
||||
'hxfile': directives.unchanged_required
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
hxfile = env.config.hxtool_srctree + '/' + self.arguments[0]
|
||||
|
||||
# Tell sphinx of the dependency
|
||||
env.note_dependency(os.path.abspath(hxfile))
|
||||
|
||||
state = HxState.CTEXT
|
||||
# We build up lines of rST in this ViewList, which we will
|
||||
# later put into a 'section' node.
|
||||
rstlist = ViewList()
|
||||
current_node = None
|
||||
node_list = []
|
||||
|
||||
with open(hxfile) as f:
|
||||
lines = (l.rstrip() for l in f)
|
||||
for lnum, line in enumerate(lines, 1):
|
||||
directive = parse_directive(line)
|
||||
|
||||
if directive == 'HXCOMM':
|
||||
pass
|
||||
elif directive == 'SRST':
|
||||
if state == HxState.RST:
|
||||
serror(hxfile, lnum, 'expected ERST, found SRST')
|
||||
else:
|
||||
state = HxState.RST
|
||||
label = parse_srst(hxfile, lnum, line)
|
||||
if label:
|
||||
rstlist.append("", hxfile, lnum - 1)
|
||||
# Build label as _DOCNAME-HXNAME-LABEL
|
||||
hx = os.path.splitext(os.path.basename(hxfile))[0]
|
||||
refline = ".. _" + env.docname + "-" + hx + \
|
||||
"-" + label + ":"
|
||||
rstlist.append(refline, hxfile, lnum - 1)
|
||||
elif directive == 'ERST':
|
||||
if state == HxState.CTEXT:
|
||||
serror(hxfile, lnum, 'expected SRST, found ERST')
|
||||
else:
|
||||
state = HxState.CTEXT
|
||||
elif directive == 'DEFHEADING' or directive == 'ARCHHEADING':
|
||||
if directive == 'DEFHEADING':
|
||||
heading = parse_defheading(hxfile, lnum, line)
|
||||
else:
|
||||
heading = parse_archheading(hxfile, lnum, line)
|
||||
if heading == "":
|
||||
continue
|
||||
# Put the accumulated rST into the previous node,
|
||||
# and then start a fresh section with this heading.
|
||||
if len(rstlist) > 0:
|
||||
if current_node is None:
|
||||
# We had some rST fragments before the first
|
||||
# DEFHEADING. We don't have a section to put
|
||||
# these in, so rather than magicing up a section,
|
||||
# make it a syntax error.
|
||||
serror(hxfile, lnum,
|
||||
'first DEFHEADING must precede all rST text')
|
||||
self.do_parse(rstlist, current_node)
|
||||
rstlist = ViewList()
|
||||
if current_node is not None:
|
||||
node_list.append(current_node)
|
||||
section_id = 'hxtool-%d' % env.new_serialno('hxtool')
|
||||
current_node = nodes.section(ids=[section_id])
|
||||
current_node += nodes.title(heading, heading)
|
||||
else:
|
||||
# Not a directive: put in output if we are in rST fragment
|
||||
if state == HxState.RST:
|
||||
# Sphinx counts its lines from 0
|
||||
rstlist.append(line, hxfile, lnum - 1)
|
||||
|
||||
if current_node is None:
|
||||
# We don't have multiple sections, so just parse the rst
|
||||
# fragments into a dummy node so we can return the children.
|
||||
current_node = nodes.section()
|
||||
self.do_parse(rstlist, current_node)
|
||||
return current_node.children
|
||||
else:
|
||||
# Put the remaining accumulated rST into the last section, and
|
||||
# return all the sections.
|
||||
if len(rstlist) > 0:
|
||||
self.do_parse(rstlist, current_node)
|
||||
node_list.append(current_node)
|
||||
return node_list
|
||||
|
||||
# This is from kerneldoc.py -- it works around an API change in
|
||||
# Sphinx between 1.6 and 1.7. Unlike kerneldoc.py, we use
|
||||
# sphinx.util.nodes.nested_parse_with_titles() rather than the
|
||||
# plain self.state.nested_parse(), and so we can drop the saving
|
||||
# of title_styles and section_level that kerneldoc.py does,
|
||||
# because nested_parse_with_titles() does that for us.
|
||||
def do_parse(self, result, node):
|
||||
with switch_source_input(self.state, result):
|
||||
nested_parse_with_titles(self.state, result, node)
|
||||
|
||||
|
||||
def setup(app):
|
||||
""" Register hxtool-doc directive with Sphinx"""
|
||||
app.add_config_value('hxtool_srctree', None, 'env')
|
||||
app.add_directive('hxtool-doc', HxtoolDocDirective)
|
||||
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
168
docs/sphinx/kerneldoc.py
Normal file
168
docs/sphinx/kerneldoc.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# Copyright © 2016 Intel Corporation
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
#
|
||||
# Authors:
|
||||
# Jani Nikula <jani.nikula@intel.com>
|
||||
#
|
||||
# Please make sure this works on both python2 and python3.
|
||||
#
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import re
|
||||
import glob
|
||||
|
||||
from docutils import nodes, statemachine
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils.parsers.rst import directives, Directive
|
||||
|
||||
import sphinx
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docutils import switch_source_input
|
||||
|
||||
|
||||
__version__ = '1.0'
|
||||
logger = logging.getLogger('kerneldoc')
|
||||
|
||||
|
||||
class KernelDocDirective(Directive):
|
||||
"""Extract kernel-doc comments from the specified file"""
|
||||
required_argument = 1
|
||||
optional_arguments = 4
|
||||
option_spec = {
|
||||
'doc': directives.unchanged_required,
|
||||
'functions': directives.unchanged,
|
||||
'export': directives.unchanged,
|
||||
'internal': directives.unchanged,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
cmd = env.config.kerneldoc_bin + ['-rst', '-enable-lineno']
|
||||
|
||||
# Pass the version string to kernel-doc, as it needs to use a different
|
||||
# dialect, depending what the C domain supports for each specific
|
||||
# Sphinx versions
|
||||
cmd += ['-sphinx-version', sphinx.__version__]
|
||||
|
||||
# Pass through the warnings-as-errors flag
|
||||
if env.config.kerneldoc_werror:
|
||||
cmd += ['-Werror']
|
||||
|
||||
filename = env.config.kerneldoc_srctree + '/' + self.arguments[0]
|
||||
export_file_patterns = []
|
||||
|
||||
# Tell sphinx of the dependency
|
||||
env.note_dependency(os.path.abspath(filename))
|
||||
|
||||
tab_width = self.options.get('tab-width', self.state.document.settings.tab_width)
|
||||
|
||||
# FIXME: make this nicer and more robust against errors
|
||||
if 'export' in self.options:
|
||||
cmd += ['-export']
|
||||
export_file_patterns = str(self.options.get('export')).split()
|
||||
elif 'internal' in self.options:
|
||||
cmd += ['-internal']
|
||||
export_file_patterns = str(self.options.get('internal')).split()
|
||||
elif 'doc' in self.options:
|
||||
cmd += ['-function', str(self.options.get('doc'))]
|
||||
elif 'functions' in self.options:
|
||||
functions = self.options.get('functions').split()
|
||||
if functions:
|
||||
for f in functions:
|
||||
cmd += ['-function', f]
|
||||
else:
|
||||
cmd += ['-no-doc-sections']
|
||||
|
||||
for pattern in export_file_patterns:
|
||||
for f in glob.glob(env.config.kerneldoc_srctree + '/' + pattern):
|
||||
env.note_dependency(os.path.abspath(f))
|
||||
cmd += ['-export-file', f]
|
||||
|
||||
cmd += [filename]
|
||||
|
||||
try:
|
||||
logger.verbose('calling kernel-doc \'%s\'' % (" ".join(cmd)))
|
||||
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = p.communicate()
|
||||
|
||||
out, err = codecs.decode(out, 'utf-8'), codecs.decode(err, 'utf-8')
|
||||
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write(err)
|
||||
|
||||
logger.warning(
|
||||
'kernel-doc \'%s\' failed with return code %d' %
|
||||
(" ".join(cmd), p.returncode)
|
||||
)
|
||||
return [nodes.error(None, nodes.paragraph(text = "kernel-doc missing"))]
|
||||
elif env.config.kerneldoc_verbosity > 0:
|
||||
sys.stderr.write(err)
|
||||
|
||||
lines = statemachine.string2lines(out, tab_width, convert_whitespace=True)
|
||||
result = ViewList()
|
||||
|
||||
lineoffset = 0;
|
||||
line_regex = re.compile("^#define LINENO ([0-9]+)$")
|
||||
for line in lines:
|
||||
match = line_regex.search(line)
|
||||
if match:
|
||||
# sphinx counts lines from 0
|
||||
lineoffset = int(match.group(1)) - 1
|
||||
# we must eat our comments since the upset the markup
|
||||
else:
|
||||
result.append(line, filename, lineoffset)
|
||||
lineoffset += 1
|
||||
|
||||
node = nodes.section()
|
||||
self.do_parse(result, node)
|
||||
|
||||
return node.children
|
||||
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
logger.warning('kernel-doc \'%s\' processing failed with: %s' %
|
||||
(" ".join(cmd), str(e)))
|
||||
return [nodes.error(None, nodes.paragraph(text = "kernel-doc missing"))]
|
||||
|
||||
def do_parse(self, result, node):
|
||||
with switch_source_input(self.state, result):
|
||||
self.state.nested_parse(result, 0, node, match_titles=1)
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_config_value('kerneldoc_bin', None, 'env')
|
||||
app.add_config_value('kerneldoc_srctree', None, 'env')
|
||||
app.add_config_value('kerneldoc_verbosity', 1, 'env')
|
||||
app.add_config_value('kerneldoc_werror', 0, 'env')
|
||||
|
||||
app.add_directive('kernel-doc', KernelDocDirective)
|
||||
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
1052
docs/sphinx/qapi_domain.py
Normal file
1052
docs/sphinx/qapi_domain.py
Normal file
File diff suppressed because it is too large
Load diff
733
docs/sphinx/qapidoc.py
Normal file
733
docs/sphinx/qapidoc.py
Normal file
|
@ -0,0 +1,733 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# QEMU qapidoc QAPI file parsing extension
|
||||
#
|
||||
# Copyright (c) 2024-2025 Red Hat
|
||||
# Copyright (c) 2020 Linaro
|
||||
#
|
||||
# This work is licensed under the terms of the GNU GPLv2 or later.
|
||||
# See the COPYING file in the top-level directory.
|
||||
|
||||
"""
|
||||
qapidoc is a Sphinx extension that implements the qapi-doc directive
|
||||
|
||||
The purpose of this extension is to read the documentation comments
|
||||
in QAPI schema files, and insert them all into the current document.
|
||||
|
||||
It implements one new rST directive, "qapi-doc::".
|
||||
Each qapi-doc:: directive takes one argument, which is the
|
||||
pathname of the schema file to process, relative to the source tree.
|
||||
|
||||
The docs/conf.py file must set the qapidoc_srctree config value to
|
||||
the root of the QEMU source tree.
|
||||
|
||||
The Sphinx documentation on writing extensions is at:
|
||||
https://www.sphinx-doc.org/en/master/development/index.html
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
__version__ = "2.0"
|
||||
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
from docutils.statemachine import StringList
|
||||
from qapi.error import QAPIError
|
||||
from qapi.parser import QAPIDoc
|
||||
from qapi.schema import (
|
||||
QAPISchema,
|
||||
QAPISchemaArrayType,
|
||||
QAPISchemaCommand,
|
||||
QAPISchemaDefinition,
|
||||
QAPISchemaEnumMember,
|
||||
QAPISchemaEvent,
|
||||
QAPISchemaFeature,
|
||||
QAPISchemaMember,
|
||||
QAPISchemaObjectType,
|
||||
QAPISchemaObjectTypeMember,
|
||||
QAPISchemaType,
|
||||
QAPISchemaVisitor,
|
||||
)
|
||||
from qapi.source import QAPISourceInfo
|
||||
|
||||
from qapidoc_legacy import QAPISchemaGenRSTVisitor # type: ignore
|
||||
from sphinx import addnodes
|
||||
from sphinx.directives.code import CodeBlock
|
||||
from sphinx.errors import ExtensionError
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docutils import SphinxDirective, switch_source_input
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import (
|
||||
Any,
|
||||
Generator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Union,
|
||||
)
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.util.typing import ExtensionMetadata
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Transmogrifier:
|
||||
# pylint: disable=too-many-public-methods
|
||||
|
||||
# Field names used for different entity types:
|
||||
field_types = {
|
||||
"enum": "value",
|
||||
"struct": "memb",
|
||||
"union": "memb",
|
||||
"event": "memb",
|
||||
"command": "arg",
|
||||
"alternate": "alt",
|
||||
}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._curr_ent: Optional[QAPISchemaDefinition] = None
|
||||
self._result = StringList()
|
||||
self.indent = 0
|
||||
|
||||
@property
|
||||
def result(self) -> StringList:
|
||||
return self._result
|
||||
|
||||
@property
|
||||
def entity(self) -> QAPISchemaDefinition:
|
||||
assert self._curr_ent is not None
|
||||
return self._curr_ent
|
||||
|
||||
@property
|
||||
def member_field_type(self) -> str:
|
||||
return self.field_types[self.entity.meta]
|
||||
|
||||
# General-purpose rST generation functions
|
||||
|
||||
def get_indent(self) -> str:
|
||||
return " " * self.indent
|
||||
|
||||
@contextmanager
|
||||
def indented(self) -> Generator[None]:
|
||||
self.indent += 1
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.indent -= 1
|
||||
|
||||
def add_line_raw(self, line: str, source: str, *lineno: int) -> None:
|
||||
"""Append one line of generated reST to the output."""
|
||||
|
||||
# NB: Sphinx uses zero-indexed lines; subtract one.
|
||||
lineno = tuple((n - 1 for n in lineno))
|
||||
|
||||
if line.strip():
|
||||
# not a blank line
|
||||
self._result.append(
|
||||
self.get_indent() + line.rstrip("\n"), source, *lineno
|
||||
)
|
||||
else:
|
||||
self._result.append("", source, *lineno)
|
||||
|
||||
def add_line(self, content: str, info: QAPISourceInfo) -> None:
|
||||
# NB: We *require* an info object; this works out OK because we
|
||||
# don't document built-in objects that don't have
|
||||
# one. Everything else should.
|
||||
self.add_line_raw(content, info.fname, info.line)
|
||||
|
||||
def add_lines(
|
||||
self,
|
||||
content: str,
|
||||
info: QAPISourceInfo,
|
||||
) -> None:
|
||||
lines = content.splitlines(True)
|
||||
for i, line in enumerate(lines):
|
||||
self.add_line_raw(line, info.fname, info.line + i)
|
||||
|
||||
def ensure_blank_line(self) -> None:
|
||||
# Empty document -- no blank line required.
|
||||
if not self._result:
|
||||
return
|
||||
|
||||
# Last line isn't blank, add one.
|
||||
if self._result[-1].strip(): # pylint: disable=no-member
|
||||
fname, line = self._result.info(-1)
|
||||
assert isinstance(line, int)
|
||||
# New blank line is credited to one-after the current last line.
|
||||
# +2: correct for zero/one index, then increment by one.
|
||||
self.add_line_raw("", fname, line + 2)
|
||||
|
||||
def add_field(
|
||||
self,
|
||||
kind: str,
|
||||
name: str,
|
||||
body: str,
|
||||
info: QAPISourceInfo,
|
||||
typ: Optional[str] = None,
|
||||
) -> None:
|
||||
if typ:
|
||||
text = f":{kind} {typ} {name}: {body}"
|
||||
else:
|
||||
text = f":{kind} {name}: {body}"
|
||||
self.add_lines(text, info)
|
||||
|
||||
def format_type(
|
||||
self, ent: Union[QAPISchemaDefinition | QAPISchemaMember]
|
||||
) -> Optional[str]:
|
||||
if isinstance(ent, (QAPISchemaEnumMember, QAPISchemaFeature)):
|
||||
return None
|
||||
|
||||
qapi_type = ent
|
||||
optional = False
|
||||
if isinstance(ent, QAPISchemaObjectTypeMember):
|
||||
qapi_type = ent.type
|
||||
optional = ent.optional
|
||||
|
||||
if isinstance(qapi_type, QAPISchemaArrayType):
|
||||
ret = f"[{qapi_type.element_type.doc_type()}]"
|
||||
else:
|
||||
assert isinstance(qapi_type, QAPISchemaType)
|
||||
tmp = qapi_type.doc_type()
|
||||
assert tmp
|
||||
ret = tmp
|
||||
if optional:
|
||||
ret += "?"
|
||||
|
||||
return ret
|
||||
|
||||
def generate_field(
|
||||
self,
|
||||
kind: str,
|
||||
member: QAPISchemaMember,
|
||||
body: str,
|
||||
info: QAPISourceInfo,
|
||||
) -> None:
|
||||
typ = self.format_type(member)
|
||||
self.add_field(kind, member.name, body, info, typ)
|
||||
|
||||
# Transmogrification helpers
|
||||
|
||||
def visit_paragraph(self, section: QAPIDoc.Section) -> None:
|
||||
# Squelch empty paragraphs.
|
||||
if not section.text:
|
||||
return
|
||||
|
||||
self.ensure_blank_line()
|
||||
self.add_lines(section.text, section.info)
|
||||
self.ensure_blank_line()
|
||||
|
||||
def visit_member(self, section: QAPIDoc.ArgSection) -> None:
|
||||
# FIXME: ifcond for members
|
||||
# TODO: features for members (documented at entity-level,
|
||||
# but sometimes defined per-member. Should we add such
|
||||
# information to member descriptions when we can?)
|
||||
assert section.member
|
||||
self.generate_field(
|
||||
self.member_field_type,
|
||||
section.member,
|
||||
# TODO drop fallbacks when undocumented members are outlawed
|
||||
section.text if section.text else "Not documented",
|
||||
section.info,
|
||||
)
|
||||
|
||||
def visit_feature(self, section: QAPIDoc.ArgSection) -> None:
|
||||
# FIXME - ifcond for features is not handled at all yet!
|
||||
# Proposal: decorate the right-hand column with some graphical
|
||||
# element to indicate conditional availability?
|
||||
assert section.text # Guaranteed by parser.py
|
||||
assert section.member
|
||||
|
||||
self.generate_field("feat", section.member, section.text, section.info)
|
||||
|
||||
def visit_returns(self, section: QAPIDoc.Section) -> None:
|
||||
assert isinstance(self.entity, QAPISchemaCommand)
|
||||
rtype = self.entity.ret_type
|
||||
# q_empty can produce None, but we won't be documenting anything
|
||||
# without an explicit return statement in the doc block, and we
|
||||
# should not have any such explicit statements when there is no
|
||||
# return value.
|
||||
assert rtype
|
||||
|
||||
typ = self.format_type(rtype)
|
||||
assert typ
|
||||
assert section.text
|
||||
self.add_field("return", typ, section.text, section.info)
|
||||
|
||||
def visit_errors(self, section: QAPIDoc.Section) -> None:
|
||||
# FIXME: the formatting for errors may be inconsistent and may
|
||||
# or may not require different newline placement to ensure
|
||||
# proper rendering as a nested list.
|
||||
self.add_lines(f":error:\n{section.text}", section.info)
|
||||
|
||||
def preamble(self, ent: QAPISchemaDefinition) -> None:
|
||||
"""
|
||||
Generate option lines for QAPI entity directives.
|
||||
"""
|
||||
if ent.doc and ent.doc.since:
|
||||
assert ent.doc.since.kind == QAPIDoc.Kind.SINCE
|
||||
# Generated from the entity's docblock; info location is exact.
|
||||
self.add_line(f":since: {ent.doc.since.text}", ent.doc.since.info)
|
||||
|
||||
if ent.ifcond.is_present():
|
||||
doc = ent.ifcond.docgen()
|
||||
assert ent.info
|
||||
# Generated from entity definition; info location is approximate.
|
||||
self.add_line(f":ifcond: {doc}", ent.info)
|
||||
|
||||
# Hoist special features such as :deprecated: and :unstable:
|
||||
# into the options block for the entity. If, in the future, new
|
||||
# special features are added, qapi-domain will chirp about
|
||||
# unrecognized options and fail until they are handled in
|
||||
# qapi-domain.
|
||||
for feat in ent.features:
|
||||
if feat.is_special():
|
||||
# FIXME: handle ifcond if present. How to display that
|
||||
# information is TBD.
|
||||
# Generated from entity def; info location is approximate.
|
||||
assert feat.info
|
||||
self.add_line(f":{feat.name}:", feat.info)
|
||||
|
||||
self.ensure_blank_line()
|
||||
|
||||
def _insert_member_pointer(self, ent: QAPISchemaDefinition) -> None:
|
||||
|
||||
def _get_target(
|
||||
ent: QAPISchemaDefinition,
|
||||
) -> Optional[QAPISchemaDefinition]:
|
||||
if isinstance(ent, (QAPISchemaCommand, QAPISchemaEvent)):
|
||||
return ent.arg_type
|
||||
if isinstance(ent, QAPISchemaObjectType):
|
||||
return ent.base
|
||||
return None
|
||||
|
||||
target = _get_target(ent)
|
||||
if target is not None and not target.is_implicit():
|
||||
assert ent.info
|
||||
self.add_field(
|
||||
self.member_field_type,
|
||||
"q_dummy",
|
||||
f"The members of :qapi:type:`{target.name}`.",
|
||||
ent.info,
|
||||
"q_dummy",
|
||||
)
|
||||
|
||||
if isinstance(ent, QAPISchemaObjectType) and ent.branches is not None:
|
||||
for variant in ent.branches.variants:
|
||||
if variant.type.name == "q_empty":
|
||||
continue
|
||||
assert ent.info
|
||||
self.add_field(
|
||||
self.member_field_type,
|
||||
"q_dummy",
|
||||
f" When ``{ent.branches.tag_member.name}`` is "
|
||||
f"``{variant.name}``: "
|
||||
f"The members of :qapi:type:`{variant.type.name}`.",
|
||||
ent.info,
|
||||
"q_dummy",
|
||||
)
|
||||
|
||||
def visit_sections(self, ent: QAPISchemaDefinition) -> None:
|
||||
sections = ent.doc.all_sections if ent.doc else []
|
||||
|
||||
# Determine the index location at which we should generate
|
||||
# documentation for "The members of ..." pointers. This should
|
||||
# go at the end of the members section(s) if any. Note that
|
||||
# index 0 is assumed to be a plain intro section, even if it is
|
||||
# empty; and that a members section if present will always
|
||||
# immediately follow the opening PLAIN section.
|
||||
gen_index = 1
|
||||
if len(sections) > 1:
|
||||
while sections[gen_index].kind == QAPIDoc.Kind.MEMBER:
|
||||
gen_index += 1
|
||||
if gen_index >= len(sections):
|
||||
break
|
||||
|
||||
# Add sections in source order:
|
||||
for i, section in enumerate(sections):
|
||||
# @var is translated to ``var``:
|
||||
section.text = re.sub(r"@([\w-]+)", r"``\1``", section.text)
|
||||
|
||||
if section.kind == QAPIDoc.Kind.PLAIN:
|
||||
self.visit_paragraph(section)
|
||||
elif section.kind == QAPIDoc.Kind.MEMBER:
|
||||
assert isinstance(section, QAPIDoc.ArgSection)
|
||||
self.visit_member(section)
|
||||
elif section.kind == QAPIDoc.Kind.FEATURE:
|
||||
assert isinstance(section, QAPIDoc.ArgSection)
|
||||
self.visit_feature(section)
|
||||
elif section.kind in (QAPIDoc.Kind.SINCE, QAPIDoc.Kind.TODO):
|
||||
# Since is handled in preamble, TODO is skipped intentionally.
|
||||
pass
|
||||
elif section.kind == QAPIDoc.Kind.RETURNS:
|
||||
self.visit_returns(section)
|
||||
elif section.kind == QAPIDoc.Kind.ERRORS:
|
||||
self.visit_errors(section)
|
||||
else:
|
||||
assert False
|
||||
|
||||
# Generate "The members of ..." entries if necessary:
|
||||
if i == gen_index - 1:
|
||||
self._insert_member_pointer(ent)
|
||||
|
||||
self.ensure_blank_line()
|
||||
|
||||
# Transmogrification core methods
|
||||
|
||||
def visit_module(self, path: str) -> None:
|
||||
name = Path(path).stem
|
||||
# module directives are credited to the first line of a module file.
|
||||
self.add_line_raw(f".. qapi:module:: {name}", path, 1)
|
||||
self.ensure_blank_line()
|
||||
|
||||
def visit_freeform(self, doc: QAPIDoc) -> None:
|
||||
# TODO: Once the old qapidoc transformer is deprecated, freeform
|
||||
# sections can be updated to pure rST, and this transformed removed.
|
||||
#
|
||||
# For now, translate our micro-format into rST. Code adapted
|
||||
# from Peter Maydell's freeform().
|
||||
|
||||
assert len(doc.all_sections) == 1, doc.all_sections
|
||||
body = doc.all_sections[0]
|
||||
text = body.text
|
||||
info = doc.info
|
||||
|
||||
if re.match(r"=+ ", text):
|
||||
# Section/subsection heading (if present, will always be the
|
||||
# first line of the block)
|
||||
(heading, _, text) = text.partition("\n")
|
||||
(leader, _, heading) = heading.partition(" ")
|
||||
# Implicit +1 for heading in the containing .rst doc
|
||||
level = len(leader) + 1
|
||||
|
||||
# https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#sections
|
||||
markers = ' #*=_^"'
|
||||
overline = level <= 2
|
||||
marker = markers[level]
|
||||
|
||||
self.ensure_blank_line()
|
||||
# This credits all 2 or 3 lines to the single source line.
|
||||
if overline:
|
||||
self.add_line(marker * len(heading), info)
|
||||
self.add_line(heading, info)
|
||||
self.add_line(marker * len(heading), info)
|
||||
self.ensure_blank_line()
|
||||
|
||||
# Eat blank line(s) and advance info
|
||||
trimmed = text.lstrip("\n")
|
||||
text = trimmed
|
||||
info = info.next_line(len(text) - len(trimmed) + 1)
|
||||
|
||||
self.add_lines(text, info)
|
||||
self.ensure_blank_line()
|
||||
|
||||
def visit_entity(self, ent: QAPISchemaDefinition) -> None:
|
||||
assert ent.info
|
||||
|
||||
try:
|
||||
self._curr_ent = ent
|
||||
|
||||
# Squish structs and unions together into an "object" directive.
|
||||
meta = ent.meta
|
||||
if meta in ("struct", "union"):
|
||||
meta = "object"
|
||||
|
||||
# This line gets credited to the start of the /definition/.
|
||||
self.add_line(f".. qapi:{meta}:: {ent.name}", ent.info)
|
||||
with self.indented():
|
||||
self.preamble(ent)
|
||||
self.visit_sections(ent)
|
||||
finally:
|
||||
self._curr_ent = None
|
||||
|
||||
def set_namespace(self, namespace: str, source: str, lineno: int) -> None:
|
||||
self.add_line_raw(
|
||||
f".. qapi:namespace:: {namespace}", source, lineno + 1
|
||||
)
|
||||
self.ensure_blank_line()
|
||||
|
||||
|
||||
class QAPISchemaGenDepVisitor(QAPISchemaVisitor):
|
||||
"""A QAPI schema visitor which adds Sphinx dependencies each module
|
||||
|
||||
This class calls the Sphinx note_dependency() function to tell Sphinx
|
||||
that the generated documentation output depends on the input
|
||||
schema file associated with each module in the QAPI input.
|
||||
"""
|
||||
|
||||
def __init__(self, env: Any, qapidir: str) -> None:
|
||||
self._env = env
|
||||
self._qapidir = qapidir
|
||||
|
||||
def visit_module(self, name: str) -> None:
|
||||
if name != "./builtin":
|
||||
qapifile = self._qapidir + "/" + name
|
||||
self._env.note_dependency(os.path.abspath(qapifile))
|
||||
super().visit_module(name)
|
||||
|
||||
|
||||
class NestedDirective(SphinxDirective):
|
||||
def run(self) -> Sequence[nodes.Node]:
|
||||
raise NotImplementedError
|
||||
|
||||
def do_parse(self, rstlist: StringList, node: nodes.Node) -> None:
|
||||
"""
|
||||
Parse rST source lines and add them to the specified node
|
||||
|
||||
Take the list of rST source lines rstlist, parse them as
|
||||
rST, and add the resulting docutils nodes as children of node.
|
||||
The nodes are parsed in a way that allows them to include
|
||||
subheadings (titles) without confusing the rendering of
|
||||
anything else.
|
||||
"""
|
||||
with switch_source_input(self.state, rstlist):
|
||||
nested_parse_with_titles(self.state, rstlist, node)
|
||||
|
||||
|
||||
class QAPIDocDirective(NestedDirective):
|
||||
"""Extract documentation from the specified QAPI .json file"""
|
||||
|
||||
required_argument = 1
|
||||
optional_arguments = 1
|
||||
option_spec = {
|
||||
"qapifile": directives.unchanged_required,
|
||||
"namespace": directives.unchanged,
|
||||
"transmogrify": directives.flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def new_serialno(self) -> str:
|
||||
"""Return a unique new ID string suitable for use as a node's ID"""
|
||||
env = self.state.document.settings.env
|
||||
return "qapidoc-%d" % env.new_serialno("qapidoc")
|
||||
|
||||
def transmogrify(self, schema: QAPISchema) -> nodes.Element:
|
||||
logger.info("Transmogrifying QAPI to rST ...")
|
||||
vis = Transmogrifier()
|
||||
modules = set()
|
||||
|
||||
if "namespace" in self.options:
|
||||
vis.set_namespace(
|
||||
self.options["namespace"], *self.get_source_info()
|
||||
)
|
||||
|
||||
for doc in schema.docs:
|
||||
module_source = doc.info.fname
|
||||
if module_source not in modules:
|
||||
vis.visit_module(module_source)
|
||||
modules.add(module_source)
|
||||
|
||||
if doc.symbol:
|
||||
ent = schema.lookup_entity(doc.symbol)
|
||||
assert isinstance(ent, QAPISchemaDefinition)
|
||||
vis.visit_entity(ent)
|
||||
else:
|
||||
vis.visit_freeform(doc)
|
||||
|
||||
logger.info("Transmogrification complete.")
|
||||
|
||||
contentnode = nodes.section()
|
||||
content = vis.result
|
||||
titles_allowed = True
|
||||
|
||||
logger.info("Transmogrifier running nested parse ...")
|
||||
with switch_source_input(self.state, content):
|
||||
if titles_allowed:
|
||||
node: nodes.Element = nodes.section()
|
||||
node.document = self.state.document
|
||||
nested_parse_with_titles(self.state, content, contentnode)
|
||||
else:
|
||||
node = nodes.paragraph()
|
||||
node.document = self.state.document
|
||||
self.state.nested_parse(content, 0, contentnode)
|
||||
logger.info("Transmogrifier's nested parse completed.")
|
||||
|
||||
if self.env.app.verbosity >= 2 or os.environ.get("DEBUG"):
|
||||
argname = "_".join(Path(self.arguments[0]).parts)
|
||||
name = Path(argname).stem + ".ir"
|
||||
self.write_intermediate(content, name)
|
||||
|
||||
sys.stdout.flush()
|
||||
return contentnode
|
||||
|
||||
def write_intermediate(self, content: StringList, filename: str) -> None:
|
||||
logger.info(
|
||||
"writing intermediate rST for '%s' to '%s'",
|
||||
self.arguments[0],
|
||||
filename,
|
||||
)
|
||||
|
||||
srctree = Path(self.env.app.config.qapidoc_srctree).resolve()
|
||||
outlines = []
|
||||
lcol_width = 0
|
||||
|
||||
for i, line in enumerate(content):
|
||||
src, lineno = content.info(i)
|
||||
srcpath = Path(src).resolve()
|
||||
srcpath = srcpath.relative_to(srctree)
|
||||
|
||||
lcol = f"{srcpath}:{lineno:04d}"
|
||||
lcol_width = max(lcol_width, len(lcol))
|
||||
outlines.append((lcol, line))
|
||||
|
||||
with open(filename, "w", encoding="UTF-8") as outfile:
|
||||
for lcol, rcol in outlines:
|
||||
outfile.write(lcol.rjust(lcol_width))
|
||||
outfile.write(" |")
|
||||
if rcol:
|
||||
outfile.write(f" {rcol}")
|
||||
outfile.write("\n")
|
||||
|
||||
def legacy(self, schema: QAPISchema) -> nodes.Element:
|
||||
vis = QAPISchemaGenRSTVisitor(self)
|
||||
vis.visit_begin(schema)
|
||||
for doc in schema.docs:
|
||||
if doc.symbol:
|
||||
vis.symbol(doc, schema.lookup_entity(doc.symbol))
|
||||
else:
|
||||
vis.freeform(doc)
|
||||
return vis.get_document_node() # type: ignore
|
||||
|
||||
def run(self) -> Sequence[nodes.Node]:
|
||||
env = self.state.document.settings.env
|
||||
qapifile = env.config.qapidoc_srctree + "/" + self.arguments[0]
|
||||
qapidir = os.path.dirname(qapifile)
|
||||
transmogrify = "transmogrify" in self.options
|
||||
|
||||
try:
|
||||
schema = QAPISchema(qapifile)
|
||||
|
||||
# First tell Sphinx about all the schema files that the
|
||||
# output documentation depends on (including 'qapifile' itself)
|
||||
schema.visit(QAPISchemaGenDepVisitor(env, qapidir))
|
||||
except QAPIError as err:
|
||||
# Launder QAPI parse errors into Sphinx extension errors
|
||||
# so they are displayed nicely to the user
|
||||
raise ExtensionError(str(err)) from err
|
||||
|
||||
if transmogrify:
|
||||
contentnode = self.transmogrify(schema)
|
||||
else:
|
||||
contentnode = self.legacy(schema)
|
||||
|
||||
return contentnode.children
|
||||
|
||||
|
||||
class QMPExample(CodeBlock, NestedDirective):
|
||||
"""
|
||||
Custom admonition for QMP code examples.
|
||||
|
||||
When the :annotated: option is present, the body of this directive
|
||||
is parsed as normal rST, but with any '::' code blocks set to use
|
||||
the QMP lexer. Code blocks must be explicitly written by the user,
|
||||
but this allows for intermingling explanatory paragraphs with
|
||||
arbitrary rST syntax and code blocks for more involved examples.
|
||||
|
||||
When :annotated: is absent, the directive body is treated as a
|
||||
simple standalone QMP code block literal.
|
||||
"""
|
||||
|
||||
required_argument = 0
|
||||
optional_arguments = 0
|
||||
has_content = True
|
||||
option_spec = {
|
||||
"annotated": directives.flag,
|
||||
"title": directives.unchanged,
|
||||
}
|
||||
|
||||
def _highlightlang(self) -> addnodes.highlightlang:
|
||||
"""Return the current highlightlang setting for the document"""
|
||||
node = None
|
||||
doc = self.state.document
|
||||
|
||||
if hasattr(doc, "findall"):
|
||||
# docutils >= 0.18.1
|
||||
for node in doc.findall(addnodes.highlightlang):
|
||||
pass
|
||||
else:
|
||||
for elem in doc.traverse():
|
||||
if isinstance(elem, addnodes.highlightlang):
|
||||
node = elem
|
||||
|
||||
if node:
|
||||
return node
|
||||
|
||||
# No explicit directive found, use defaults
|
||||
node = addnodes.highlightlang(
|
||||
lang=self.env.config.highlight_language,
|
||||
force=False,
|
||||
# Yes, Sphinx uses this value to effectively disable line
|
||||
# numbers and not 0 or None or -1 or something. ¯\_(ツ)_/¯
|
||||
linenothreshold=sys.maxsize,
|
||||
)
|
||||
return node
|
||||
|
||||
def admonition_wrap(self, *content: nodes.Node) -> List[nodes.Node]:
|
||||
title = "Example:"
|
||||
if "title" in self.options:
|
||||
title = f"{title} {self.options['title']}"
|
||||
|
||||
admon = nodes.admonition(
|
||||
"",
|
||||
nodes.title("", title),
|
||||
*content,
|
||||
classes=["admonition", "admonition-example"],
|
||||
)
|
||||
return [admon]
|
||||
|
||||
def run_annotated(self) -> List[nodes.Node]:
|
||||
lang_node = self._highlightlang()
|
||||
|
||||
content_node: nodes.Element = nodes.section()
|
||||
|
||||
# Configure QMP highlighting for "::" blocks, if needed
|
||||
if lang_node["lang"] != "QMP":
|
||||
content_node += addnodes.highlightlang(
|
||||
lang="QMP",
|
||||
force=False, # "True" ignores lexing errors
|
||||
linenothreshold=lang_node["linenothreshold"],
|
||||
)
|
||||
|
||||
self.do_parse(self.content, content_node)
|
||||
|
||||
# Restore prior language highlighting, if needed
|
||||
if lang_node["lang"] != "QMP":
|
||||
content_node += addnodes.highlightlang(**lang_node.attributes)
|
||||
|
||||
return content_node.children
|
||||
|
||||
def run(self) -> List[nodes.Node]:
|
||||
annotated = "annotated" in self.options
|
||||
|
||||
if annotated:
|
||||
content_nodes = self.run_annotated()
|
||||
else:
|
||||
self.arguments = ["QMP"]
|
||||
content_nodes = super().run()
|
||||
|
||||
return self.admonition_wrap(*content_nodes)
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> ExtensionMetadata:
|
||||
"""Register qapi-doc directive with Sphinx"""
|
||||
app.setup_extension("qapi_domain")
|
||||
app.add_config_value("qapidoc_srctree", None, "env")
|
||||
app.add_directive("qapi-doc", QAPIDocDirective)
|
||||
app.add_directive("qmp-example", QMPExample)
|
||||
|
||||
return {
|
||||
"version": __version__,
|
||||
"parallel_read_safe": True,
|
||||
"parallel_write_safe": True,
|
||||
}
|
440
docs/sphinx/qapidoc_legacy.py
Normal file
440
docs/sphinx/qapidoc_legacy.py
Normal file
|
@ -0,0 +1,440 @@
|
|||
# coding=utf-8
|
||||
# type: ignore
|
||||
#
|
||||
# QEMU qapidoc QAPI file parsing extension
|
||||
#
|
||||
# Copyright (c) 2020 Linaro
|
||||
#
|
||||
# This work is licensed under the terms of the GNU GPLv2 or later.
|
||||
# See the COPYING file in the top-level directory.
|
||||
|
||||
"""
|
||||
qapidoc is a Sphinx extension that implements the qapi-doc directive
|
||||
|
||||
The purpose of this extension is to read the documentation comments
|
||||
in QAPI schema files, and insert them all into the current document.
|
||||
|
||||
It implements one new rST directive, "qapi-doc::".
|
||||
Each qapi-doc:: directive takes one argument, which is the
|
||||
pathname of the schema file to process, relative to the source tree.
|
||||
|
||||
The docs/conf.py file must set the qapidoc_srctree config value to
|
||||
the root of the QEMU source tree.
|
||||
|
||||
The Sphinx documentation on writing extensions is at:
|
||||
https://www.sphinx-doc.org/en/master/development/index.html
|
||||
"""
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.statemachine import ViewList
|
||||
from qapi.error import QAPISemError
|
||||
from qapi.gen import QAPISchemaVisitor
|
||||
from qapi.parser import QAPIDoc
|
||||
|
||||
|
||||
def dedent(text: str) -> str:
|
||||
# Adjust indentation to make description text parse as paragraph.
|
||||
|
||||
lines = text.splitlines(True)
|
||||
if re.match(r"\s+", lines[0]):
|
||||
# First line is indented; description started on the line after
|
||||
# the name. dedent the whole block.
|
||||
return textwrap.dedent(text)
|
||||
|
||||
# Descr started on same line. Dedent line 2+.
|
||||
return lines[0] + textwrap.dedent("".join(lines[1:]))
|
||||
|
||||
|
||||
class QAPISchemaGenRSTVisitor(QAPISchemaVisitor):
|
||||
"""A QAPI schema visitor which generates docutils/Sphinx nodes
|
||||
|
||||
This class builds up a tree of docutils/Sphinx nodes corresponding
|
||||
to documentation for the various QAPI objects. To use it, first
|
||||
create a QAPISchemaGenRSTVisitor object, and call its
|
||||
visit_begin() method. Then you can call one of the two methods
|
||||
'freeform' (to add documentation for a freeform documentation
|
||||
chunk) or 'symbol' (to add documentation for a QAPI symbol). These
|
||||
will cause the visitor to build up the tree of document
|
||||
nodes. Once you've added all the documentation via 'freeform' and
|
||||
'symbol' method calls, you can call 'get_document_nodes' to get
|
||||
the final list of document nodes (in a form suitable for returning
|
||||
from a Sphinx directive's 'run' method).
|
||||
"""
|
||||
def __init__(self, sphinx_directive):
|
||||
self._cur_doc = None
|
||||
self._sphinx_directive = sphinx_directive
|
||||
self._top_node = nodes.section()
|
||||
self._active_headings = [self._top_node]
|
||||
|
||||
def _make_dlitem(self, term, defn):
|
||||
"""Return a dlitem node with the specified term and definition.
|
||||
|
||||
term should be a list of Text and literal nodes.
|
||||
defn should be one of:
|
||||
- a string, which will be handed to _parse_text_into_node
|
||||
- a list of Text and literal nodes, which will be put into
|
||||
a paragraph node
|
||||
"""
|
||||
dlitem = nodes.definition_list_item()
|
||||
dlterm = nodes.term('', '', *term)
|
||||
dlitem += dlterm
|
||||
if defn:
|
||||
dldef = nodes.definition()
|
||||
if isinstance(defn, list):
|
||||
dldef += nodes.paragraph('', '', *defn)
|
||||
else:
|
||||
self._parse_text_into_node(defn, dldef)
|
||||
dlitem += dldef
|
||||
return dlitem
|
||||
|
||||
def _make_section(self, title):
|
||||
"""Return a section node with optional title"""
|
||||
section = nodes.section(ids=[self._sphinx_directive.new_serialno()])
|
||||
if title:
|
||||
section += nodes.title(title, title)
|
||||
return section
|
||||
|
||||
def _nodes_for_ifcond(self, ifcond, with_if=True):
|
||||
"""Return list of Text, literal nodes for the ifcond
|
||||
|
||||
Return a list which gives text like ' (If: condition)'.
|
||||
If with_if is False, we don't return the "(If: " and ")".
|
||||
"""
|
||||
|
||||
doc = ifcond.docgen()
|
||||
if not doc:
|
||||
return []
|
||||
doc = nodes.literal('', doc)
|
||||
if not with_if:
|
||||
return [doc]
|
||||
|
||||
nodelist = [nodes.Text(' ('), nodes.strong('', 'If: ')]
|
||||
nodelist.append(doc)
|
||||
nodelist.append(nodes.Text(')'))
|
||||
return nodelist
|
||||
|
||||
def _nodes_for_one_member(self, member):
|
||||
"""Return list of Text, literal nodes for this member
|
||||
|
||||
Return a list of doctree nodes which give text like
|
||||
'name: type (optional) (If: ...)' suitable for use as the
|
||||
'term' part of a definition list item.
|
||||
"""
|
||||
term = [nodes.literal('', member.name)]
|
||||
if member.type.doc_type():
|
||||
term.append(nodes.Text(': '))
|
||||
term.append(nodes.literal('', member.type.doc_type()))
|
||||
if member.optional:
|
||||
term.append(nodes.Text(' (optional)'))
|
||||
if member.ifcond.is_present():
|
||||
term.extend(self._nodes_for_ifcond(member.ifcond))
|
||||
return term
|
||||
|
||||
def _nodes_for_variant_when(self, branches, variant):
|
||||
"""Return list of Text, literal nodes for variant 'when' clause
|
||||
|
||||
Return a list of doctree nodes which give text like
|
||||
'when tagname is variant (If: ...)' suitable for use in
|
||||
the 'branches' part of a definition list.
|
||||
"""
|
||||
term = [nodes.Text(' when '),
|
||||
nodes.literal('', branches.tag_member.name),
|
||||
nodes.Text(' is '),
|
||||
nodes.literal('', '"%s"' % variant.name)]
|
||||
if variant.ifcond.is_present():
|
||||
term.extend(self._nodes_for_ifcond(variant.ifcond))
|
||||
return term
|
||||
|
||||
def _nodes_for_members(self, doc, what, base=None, branches=None):
|
||||
"""Return list of doctree nodes for the table of members"""
|
||||
dlnode = nodes.definition_list()
|
||||
for section in doc.args.values():
|
||||
term = self._nodes_for_one_member(section.member)
|
||||
# TODO drop fallbacks when undocumented members are outlawed
|
||||
if section.text:
|
||||
defn = dedent(section.text)
|
||||
else:
|
||||
defn = [nodes.Text('Not documented')]
|
||||
|
||||
dlnode += self._make_dlitem(term, defn)
|
||||
|
||||
if base:
|
||||
dlnode += self._make_dlitem([nodes.Text('The members of '),
|
||||
nodes.literal('', base.doc_type())],
|
||||
None)
|
||||
|
||||
if branches:
|
||||
for v in branches.variants:
|
||||
if v.type.name == 'q_empty':
|
||||
continue
|
||||
assert not v.type.is_implicit()
|
||||
term = [nodes.Text('The members of '),
|
||||
nodes.literal('', v.type.doc_type())]
|
||||
term.extend(self._nodes_for_variant_when(branches, v))
|
||||
dlnode += self._make_dlitem(term, None)
|
||||
|
||||
if not dlnode.children:
|
||||
return []
|
||||
|
||||
section = self._make_section(what)
|
||||
section += dlnode
|
||||
return [section]
|
||||
|
||||
def _nodes_for_enum_values(self, doc):
|
||||
"""Return list of doctree nodes for the table of enum values"""
|
||||
seen_item = False
|
||||
dlnode = nodes.definition_list()
|
||||
for section in doc.args.values():
|
||||
termtext = [nodes.literal('', section.member.name)]
|
||||
if section.member.ifcond.is_present():
|
||||
termtext.extend(self._nodes_for_ifcond(section.member.ifcond))
|
||||
# TODO drop fallbacks when undocumented members are outlawed
|
||||
if section.text:
|
||||
defn = dedent(section.text)
|
||||
else:
|
||||
defn = [nodes.Text('Not documented')]
|
||||
|
||||
dlnode += self._make_dlitem(termtext, defn)
|
||||
seen_item = True
|
||||
|
||||
if not seen_item:
|
||||
return []
|
||||
|
||||
section = self._make_section('Values')
|
||||
section += dlnode
|
||||
return [section]
|
||||
|
||||
def _nodes_for_arguments(self, doc, arg_type):
|
||||
"""Return list of doctree nodes for the arguments section"""
|
||||
if arg_type and not arg_type.is_implicit():
|
||||
assert not doc.args
|
||||
section = self._make_section('Arguments')
|
||||
dlnode = nodes.definition_list()
|
||||
dlnode += self._make_dlitem(
|
||||
[nodes.Text('The members of '),
|
||||
nodes.literal('', arg_type.name)],
|
||||
None)
|
||||
section += dlnode
|
||||
return [section]
|
||||
|
||||
return self._nodes_for_members(doc, 'Arguments')
|
||||
|
||||
def _nodes_for_features(self, doc):
|
||||
"""Return list of doctree nodes for the table of features"""
|
||||
seen_item = False
|
||||
dlnode = nodes.definition_list()
|
||||
for section in doc.features.values():
|
||||
dlnode += self._make_dlitem(
|
||||
[nodes.literal('', section.member.name)], dedent(section.text))
|
||||
seen_item = True
|
||||
|
||||
if not seen_item:
|
||||
return []
|
||||
|
||||
section = self._make_section('Features')
|
||||
section += dlnode
|
||||
return [section]
|
||||
|
||||
def _nodes_for_sections(self, doc):
|
||||
"""Return list of doctree nodes for additional sections"""
|
||||
nodelist = []
|
||||
for section in doc.sections:
|
||||
if section.kind == QAPIDoc.Kind.TODO:
|
||||
# Hide TODO: sections
|
||||
continue
|
||||
|
||||
if section.kind == QAPIDoc.Kind.PLAIN:
|
||||
# Sphinx cannot handle sectionless titles;
|
||||
# Instead, just append the results to the prior section.
|
||||
container = nodes.container()
|
||||
self._parse_text_into_node(section.text, container)
|
||||
nodelist += container.children
|
||||
continue
|
||||
|
||||
snode = self._make_section(section.kind.name.title())
|
||||
self._parse_text_into_node(dedent(section.text), snode)
|
||||
nodelist.append(snode)
|
||||
return nodelist
|
||||
|
||||
def _nodes_for_if_section(self, ifcond):
|
||||
"""Return list of doctree nodes for the "If" section"""
|
||||
nodelist = []
|
||||
if ifcond.is_present():
|
||||
snode = self._make_section('If')
|
||||
snode += nodes.paragraph(
|
||||
'', '', *self._nodes_for_ifcond(ifcond, with_if=False)
|
||||
)
|
||||
nodelist.append(snode)
|
||||
return nodelist
|
||||
|
||||
def _add_doc(self, typ, sections):
|
||||
"""Add documentation for a command/object/enum...
|
||||
|
||||
We assume we're documenting the thing defined in self._cur_doc.
|
||||
typ is the type of thing being added ("Command", "Object", etc)
|
||||
|
||||
sections is a list of nodes for sections to add to the definition.
|
||||
"""
|
||||
|
||||
doc = self._cur_doc
|
||||
snode = nodes.section(ids=[self._sphinx_directive.new_serialno()])
|
||||
snode += nodes.title('', '', *[nodes.literal(doc.symbol, doc.symbol),
|
||||
nodes.Text(' (' + typ + ')')])
|
||||
self._parse_text_into_node(doc.body.text, snode)
|
||||
for s in sections:
|
||||
if s is not None:
|
||||
snode += s
|
||||
self._add_node_to_current_heading(snode)
|
||||
|
||||
def visit_enum_type(self, name, info, ifcond, features, members, prefix):
|
||||
doc = self._cur_doc
|
||||
self._add_doc('Enum',
|
||||
self._nodes_for_enum_values(doc)
|
||||
+ self._nodes_for_features(doc)
|
||||
+ self._nodes_for_sections(doc)
|
||||
+ self._nodes_for_if_section(ifcond))
|
||||
|
||||
def visit_object_type(self, name, info, ifcond, features,
|
||||
base, members, branches):
|
||||
doc = self._cur_doc
|
||||
if base and base.is_implicit():
|
||||
base = None
|
||||
self._add_doc('Object',
|
||||
self._nodes_for_members(doc, 'Members', base, branches)
|
||||
+ self._nodes_for_features(doc)
|
||||
+ self._nodes_for_sections(doc)
|
||||
+ self._nodes_for_if_section(ifcond))
|
||||
|
||||
def visit_alternate_type(self, name, info, ifcond, features,
|
||||
alternatives):
|
||||
doc = self._cur_doc
|
||||
self._add_doc('Alternate',
|
||||
self._nodes_for_members(doc, 'Members')
|
||||
+ self._nodes_for_features(doc)
|
||||
+ self._nodes_for_sections(doc)
|
||||
+ self._nodes_for_if_section(ifcond))
|
||||
|
||||
def visit_command(self, name, info, ifcond, features, arg_type,
|
||||
ret_type, gen, success_response, boxed, allow_oob,
|
||||
allow_preconfig, coroutine):
|
||||
doc = self._cur_doc
|
||||
self._add_doc('Command',
|
||||
self._nodes_for_arguments(doc, arg_type)
|
||||
+ self._nodes_for_features(doc)
|
||||
+ self._nodes_for_sections(doc)
|
||||
+ self._nodes_for_if_section(ifcond))
|
||||
|
||||
def visit_event(self, name, info, ifcond, features, arg_type, boxed):
|
||||
doc = self._cur_doc
|
||||
self._add_doc('Event',
|
||||
self._nodes_for_arguments(doc, arg_type)
|
||||
+ self._nodes_for_features(doc)
|
||||
+ self._nodes_for_sections(doc)
|
||||
+ self._nodes_for_if_section(ifcond))
|
||||
|
||||
def symbol(self, doc, entity):
|
||||
"""Add documentation for one symbol to the document tree
|
||||
|
||||
This is the main entry point which causes us to add documentation
|
||||
nodes for a symbol (which could be a 'command', 'object', 'event',
|
||||
etc). We do this by calling 'visit' on the schema entity, which
|
||||
will then call back into one of our visit_* methods, depending
|
||||
on what kind of thing this symbol is.
|
||||
"""
|
||||
self._cur_doc = doc
|
||||
entity.visit(self)
|
||||
self._cur_doc = None
|
||||
|
||||
def _start_new_heading(self, heading, level):
|
||||
"""Start a new heading at the specified heading level
|
||||
|
||||
Create a new section whose title is 'heading' and which is placed
|
||||
in the docutils node tree as a child of the most recent level-1
|
||||
heading. Subsequent document sections (commands, freeform doc chunks,
|
||||
etc) will be placed as children of this new heading section.
|
||||
"""
|
||||
if len(self._active_headings) < level:
|
||||
raise QAPISemError(self._cur_doc.info,
|
||||
'Level %d subheading found outside a '
|
||||
'level %d heading'
|
||||
% (level, level - 1))
|
||||
snode = self._make_section(heading)
|
||||
self._active_headings[level - 1] += snode
|
||||
self._active_headings = self._active_headings[:level]
|
||||
self._active_headings.append(snode)
|
||||
return snode
|
||||
|
||||
def _add_node_to_current_heading(self, node):
|
||||
"""Add the node to whatever the current active heading is"""
|
||||
self._active_headings[-1] += node
|
||||
|
||||
def freeform(self, doc):
|
||||
"""Add a piece of 'freeform' documentation to the document tree
|
||||
|
||||
A 'freeform' document chunk doesn't relate to any particular
|
||||
symbol (for instance, it could be an introduction).
|
||||
|
||||
If the freeform document starts with a line of the form
|
||||
'= Heading text', this is a section or subsection heading, with
|
||||
the heading level indicated by the number of '=' signs.
|
||||
"""
|
||||
|
||||
# QAPIDoc documentation says free-form documentation blocks
|
||||
# must have only a body section, nothing else.
|
||||
assert not doc.sections
|
||||
assert not doc.args
|
||||
assert not doc.features
|
||||
self._cur_doc = doc
|
||||
|
||||
text = doc.body.text
|
||||
if re.match(r'=+ ', text):
|
||||
# Section/subsection heading (if present, will always be
|
||||
# the first line of the block)
|
||||
(heading, _, text) = text.partition('\n')
|
||||
(leader, _, heading) = heading.partition(' ')
|
||||
node = self._start_new_heading(heading, len(leader))
|
||||
if text == '':
|
||||
return
|
||||
else:
|
||||
node = nodes.container()
|
||||
|
||||
self._parse_text_into_node(text, node)
|
||||
self._cur_doc = None
|
||||
|
||||
def _parse_text_into_node(self, doctext, node):
|
||||
"""Parse a chunk of QAPI-doc-format text into the node
|
||||
|
||||
The doc comment can contain most inline rST markup, including
|
||||
bulleted and enumerated lists.
|
||||
As an extra permitted piece of markup, @var will be turned
|
||||
into ``var``.
|
||||
"""
|
||||
|
||||
# Handle the "@var means ``var`` case
|
||||
doctext = re.sub(r'@([\w-]+)', r'``\1``', doctext)
|
||||
|
||||
rstlist = ViewList()
|
||||
for line in doctext.splitlines():
|
||||
# The reported line number will always be that of the start line
|
||||
# of the doc comment, rather than the actual location of the error.
|
||||
# Being more precise would require overhaul of the QAPIDoc class
|
||||
# to track lines more exactly within all the sub-parts of the doc
|
||||
# comment, as well as counting lines here.
|
||||
rstlist.append(line, self._cur_doc.info.fname,
|
||||
self._cur_doc.info.line)
|
||||
# Append a blank line -- in some cases rST syntax errors get
|
||||
# attributed to the line after one with actual text, and if there
|
||||
# isn't anything in the ViewList corresponding to that then Sphinx
|
||||
# 1.6's AutodocReporter will then misidentify the source/line location
|
||||
# in the error message (usually attributing it to the top-level
|
||||
# .rst file rather than the offending .json file). The extra blank
|
||||
# line won't affect the rendered output.
|
||||
rstlist.append("", self._cur_doc.info.fname, self._cur_doc.info.line)
|
||||
self._sphinx_directive.do_parse(rstlist, node)
|
||||
|
||||
def get_document_node(self):
|
||||
"""Return the root docutils node which makes up the document"""
|
||||
return self._top_node
|
48
docs/sphinx/qmp_lexer.py
Normal file
48
docs/sphinx/qmp_lexer.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# QEMU Monitor Protocol Lexer Extension
|
||||
#
|
||||
# Copyright (C) 2019, Red Hat Inc.
|
||||
#
|
||||
# Authors:
|
||||
# Eduardo Habkost <ehabkost@redhat.com>
|
||||
# John Snow <jsnow@redhat.com>
|
||||
#
|
||||
# This work is licensed under the terms of the GNU GPLv2 or later.
|
||||
# See the COPYING file in the top-level directory.
|
||||
"""qmp_lexer is a Sphinx extension that provides a QMP lexer for code blocks."""
|
||||
|
||||
from pygments.lexer import RegexLexer, DelegatingLexer
|
||||
from pygments.lexers.data import JsonLexer
|
||||
from pygments import token
|
||||
from sphinx import errors
|
||||
|
||||
class QMPExampleMarkersLexer(RegexLexer):
|
||||
"""
|
||||
QMPExampleMarkersLexer lexes QMP example annotations.
|
||||
This lexer adds support for directionality flow and elision indicators.
|
||||
"""
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'-> ', token.Generic.Prompt),
|
||||
(r'<- ', token.Generic.Prompt),
|
||||
(r'\.{3}( .* \.{3})?', token.Comment.Multiline),
|
||||
]
|
||||
}
|
||||
|
||||
class QMPExampleLexer(DelegatingLexer):
|
||||
"""QMPExampleLexer lexes annotated QMP examples."""
|
||||
def __init__(self, **options):
|
||||
super(QMPExampleLexer, self).__init__(JsonLexer, QMPExampleMarkersLexer,
|
||||
token.Error, **options)
|
||||
|
||||
def setup(sphinx):
|
||||
"""For use by the Sphinx extensions API."""
|
||||
try:
|
||||
sphinx.require_sphinx('2.1')
|
||||
sphinx.add_lexer('QMP', QMPExampleLexer)
|
||||
except errors.VersionRequirementError:
|
||||
sphinx.add_lexer('QMP', QMPExampleLexer())
|
||||
|
||||
return dict(
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
Loading…
Add table
Add a link
Reference in a new issue