summaryrefslogtreecommitdiffstats
path: root/sphinx/ext
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-05 16:20:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-05 16:20:58 +0000
commit5bb0bb4be543fd5eca41673696a62ed80d493591 (patch)
treead2c464f140e86c7f178a6276d7ea4a93e3e6c92 /sphinx/ext
parentAdding upstream version 7.2.6. (diff)
downloadsphinx-upstream.tar.xz
sphinx-upstream.zip
Adding upstream version 7.3.7.upstream/7.3.7upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'sphinx/ext')
-rw-r--r--sphinx/ext/apidoc.py20
-rw-r--r--sphinx/ext/autodoc/__init__.py176
-rw-r--r--sphinx/ext/autodoc/directive.py13
-rw-r--r--sphinx/ext/autodoc/importer.py138
-rw-r--r--sphinx/ext/autodoc/mock.py12
-rw-r--r--sphinx/ext/autodoc/preserve_defaults.py7
-rw-r--r--sphinx/ext/autodoc/type_comment.py3
-rw-r--r--sphinx/ext/autodoc/typehints.py9
-rw-r--r--sphinx/ext/autosectionlabel.py5
-rw-r--r--sphinx/ext/autosummary/__init__.py38
-rw-r--r--sphinx/ext/autosummary/generate.py18
-rw-r--r--sphinx/ext/coverage.py46
-rw-r--r--sphinx/ext/doctest.py71
-rw-r--r--sphinx/ext/duration.py1
-rw-r--r--sphinx/ext/extlinks.py7
-rw-r--r--sphinx/ext/githubpages.py5
-rw-r--r--sphinx/ext/graphviz.py17
-rw-r--r--sphinx/ext/ifconfig.py10
-rw-r--r--sphinx/ext/imgconverter.py11
-rw-r--r--sphinx/ext/imgmath.py17
-rw-r--r--sphinx/ext/inheritance_diagram.py62
-rw-r--r--sphinx/ext/intersphinx.py163
-rw-r--r--sphinx/ext/linkcode.py5
-rw-r--r--sphinx/ext/mathjax.py3
-rw-r--r--sphinx/ext/napoleon/__init__.py19
-rw-r--r--sphinx/ext/napoleon/docstring.py66
-rw-r--r--sphinx/ext/todo.py17
-rw-r--r--sphinx/ext/viewcode.py23
28 files changed, 620 insertions, 362 deletions
diff --git a/sphinx/ext/apidoc.py b/sphinx/ext/apidoc.py
index 42df848..b2e2291 100644
--- a/sphinx/ext/apidoc.py
+++ b/sphinx/ext/apidoc.py
@@ -32,7 +32,7 @@ from sphinx.util.osutil import FileAvoidWrite, ensuredir
from sphinx.util.template import ReSTRenderer
if TYPE_CHECKING:
- from collections.abc import Generator, Sequence
+ from collections.abc import Iterator, Sequence
logger = logging.getLogger(__name__)
@@ -47,7 +47,7 @@ else:
'show-inheritance',
]
-PY_SUFFIXES = ('.py', '.pyx') + tuple(EXTENSION_SUFFIXES)
+PY_SUFFIXES = ('.py', '.pyx', *tuple(EXTENSION_SUFFIXES))
template_dir = path.join(package_dir, 'templates', 'apidoc')
@@ -68,7 +68,7 @@ def module_join(*modnames: str | None) -> str:
def is_packagedir(dirname: str | None = None, files: list[str] | None = None) -> bool:
"""Check given *files* contains __init__ file."""
- if files is None and dirname is None:
+ if files is dirname is None:
return False
if files is None:
@@ -168,7 +168,7 @@ def create_modules_toc_file(modules: list[str], opts: Any, name: str = 'modules'
"""Create the module's index."""
modules.sort()
prev_module = ''
- for module in modules[:]:
+ for module in modules.copy():
# look if the module is a subpackage and, if yes, ignore it
if module.startswith(prev_module + '.'):
modules.remove(module)
@@ -209,14 +209,12 @@ def is_skipped_module(filename: str, opts: Any, _excludes: Sequence[re.Pattern[s
if not path.exists(filename):
# skip if the file doesn't exist
return True
- if path.basename(filename).startswith('_') and not opts.includeprivate:
- # skip if the module has a "private" name
- return True
- return False
+ # skip if the module has a "private" name
+ return path.basename(filename).startswith('_') and not opts.includeprivate
def walk(rootpath: str, excludes: Sequence[re.Pattern[str]], opts: Any,
- ) -> Generator[tuple[str, list[str], list[str]], None, None]:
+ ) -> Iterator[tuple[str, list[str], list[str]]]:
"""Walk through the directory and list files and subdirectories up."""
followlinks = getattr(opts, 'followlinks', False)
includeprivate = getattr(opts, 'includeprivate', False)
@@ -268,14 +266,14 @@ def recurse_tree(rootpath: str, excludes: Sequence[re.Pattern[str]], opts: Any,
is_pkg = is_packagedir(None, files)
is_namespace = not is_pkg and implicit_namespaces
if is_pkg:
- for f in files[:]:
+ for f in files.copy():
if is_initpy(f):
files.remove(f)
files.insert(0, f)
elif root != rootpath:
# only accept non-package at toplevel unless using implicit namespaces
if not implicit_namespaces:
- del subs[:]
+ subs.clear()
continue
if is_pkg or is_namespace:
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 8d68f72..45e4cad 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -7,11 +7,13 @@ for those who like elaborate docstrings.
from __future__ import annotations
+import functools
+import operator
import re
import sys
import warnings
from inspect import Parameter, Signature
-from typing import TYPE_CHECKING, Any, Callable, TypeVar
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, TypeVar
from docutils.statemachine import StringList
@@ -31,7 +33,13 @@ from sphinx.util.inspect import (
safe_getattr,
stringify_signature,
)
-from sphinx.util.typing import OptionSpec, get_type_hints, restify, stringify_annotation
+from sphinx.util.typing import (
+ ExtensionMetadata,
+ OptionSpec,
+ get_type_hints,
+ restify,
+ stringify_annotation,
+)
if TYPE_CHECKING:
from collections.abc import Iterator, Sequence
@@ -221,7 +229,7 @@ def between(
return
deleted = 0
delete = not exclude
- orig_lines = lines[:]
+ orig_lines = lines.copy()
for i, line in enumerate(orig_lines):
if delete:
lines.pop(i - deleted)
@@ -243,6 +251,7 @@ def between(
# But we define this class here to keep compatibility (see #4538)
class Options(dict):
"""A dict/attribute hybrid that returns None on nonexisting keys."""
+
def copy(self) -> Options:
return Options(super().copy())
@@ -275,7 +284,7 @@ class ObjectMember:
self.skipped = skipped
self.class_ = class_
- def __getitem__(self, index):
+ def __getitem__(self, index: int) -> Any:
warnings.warn('The tuple interface of ObjectMember is deprecated. '
'Use (obj.__name__, obj.object) instead.',
RemovedInSphinx80Warning, stacklevel=2)
@@ -297,6 +306,7 @@ class Documenter:
in fact, it will be used to parse an auto directive's options that matches
the Documenter.
"""
+
#: name by which the directive is called (auto...) and the default
#: generated directive name
objtype = 'object'
@@ -309,7 +319,7 @@ class Documenter:
#: true if the generated content may contain titles
titles_allowed = True
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'no-index': bool_option,
'noindex': bool_option,
}
@@ -319,8 +329,9 @@ class Documenter:
return autodoc_attrgetter(self.env.app, obj, name, *defargs)
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
"""Called to see if a member can be documented by this Documenter."""
msg = 'must be implemented in subclasses'
raise NotImplementedError(msg)
@@ -450,9 +461,7 @@ class Documenter:
subject = inspect.unpartial(self.object)
modname = self.get_attr(subject, '__module__', None)
- if modname and modname != self.modname:
- return False
- return True
+ return not modname or modname == self.modname
def format_args(self, **kwargs: Any) -> str:
"""Format the argument signature of *self.object*.
@@ -923,7 +932,7 @@ class Documenter:
except PycodeError:
pass
- docstrings: list[str] = sum(self.get_doc() or [], [])
+ docstrings: list[str] = functools.reduce(operator.iadd, self.get_doc() or [], [])
if ismock(self.object) and not docstrings:
logger.warning(__('A mocked object is detected: %r'),
self.name, type='autodoc')
@@ -966,11 +975,12 @@ class ModuleDocumenter(Documenter):
"""
Specialized Documenter subclass for modules.
"""
+
objtype = 'module'
content_indent = ''
_extra_indent = ' '
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'members': members_option, 'undoc-members': bool_option,
'no-index': bool_option, 'inherited-members': inherited_members_option,
'show-inheritance': bool_option, 'synopsis': identity,
@@ -997,8 +1007,9 @@ class ModuleDocumenter(Documenter):
self.add_line(line, src[0], src[1])
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
# don't document submodules automatically
return False
@@ -1127,13 +1138,14 @@ class ModuleLevelDocumenter(Documenter):
Specialized Documenter subclass for objects on module level (functions,
classes, data/constants).
"""
+
def resolve_name(self, modname: str | None, parents: Any, path: str, base: str,
) -> tuple[str | None, list[str]]:
if modname is not None:
- return modname, parents + [base]
+ return modname, [*parents, base]
if path:
modname = path.rstrip('.')
- return modname, parents + [base]
+ return modname, [*parents, base]
# if documenting a toplevel object without explicit module,
# it can be contained in another auto directive ...
@@ -1142,7 +1154,7 @@ class ModuleLevelDocumenter(Documenter):
if not modname:
modname = self.env.ref_context.get('py:module')
# ... else, it stays None, which means invalid
- return modname, parents + [base]
+ return modname, [*parents, base]
class ClassLevelDocumenter(Documenter):
@@ -1150,10 +1162,11 @@ class ClassLevelDocumenter(Documenter):
Specialized Documenter subclass for objects on class level (methods,
attributes).
"""
+
def resolve_name(self, modname: str | None, parents: Any, path: str, base: str,
) -> tuple[str | None, list[str]]:
if modname is not None:
- return modname, parents + [base]
+ return modname, [*parents, base]
if path:
mod_cls = path.rstrip('.')
@@ -1177,7 +1190,7 @@ class ClassLevelDocumenter(Documenter):
if not modname:
modname = self.env.ref_context.get('py:module')
# ... else, it stays None, which means invalid
- return modname, parents + [base]
+ return modname, [*parents, base]
class DocstringSignatureMixin:
@@ -1185,6 +1198,7 @@ class DocstringSignatureMixin:
Mixin for FunctionDocumenter and MethodDocumenter to provide the
feature of reading the signature from the docstring.
"""
+
_new_docstrings: list[list[str]] | None = None
_signatures: list[str] = []
@@ -1256,7 +1270,7 @@ class DocstringSignatureMixin:
self.args, self.retann = result
sig = super().format_signature(**kwargs) # type: ignore[misc]
if self._signatures:
- return "\n".join([sig] + self._signatures)
+ return "\n".join((sig, *self._signatures))
else:
return sig
@@ -1266,6 +1280,7 @@ class DocstringStripSignatureMixin(DocstringSignatureMixin):
Mixin for AttributeDocumenter to provide the
feature of stripping any function signature from the docstring.
"""
+
def format_signature(self, **kwargs: Any) -> str:
if (
self.args is None
@@ -1286,12 +1301,14 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
"""
Specialized Documenter subclass for functions.
"""
+
objtype = 'function'
member_order = 30
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
# supports functions, builtins and bound methods exported at the module level
return (inspect.isfunction(member) or inspect.isbuiltin(member) or
(inspect.isroutine(member) and isinstance(parent, ModuleDocumenter)))
@@ -1393,7 +1410,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
if len(sig.parameters) == 0:
return None
- def dummy():
+ def dummy(): # NoQA: ANN202
pass
params = list(sig.parameters.values())
@@ -1414,6 +1431,7 @@ class DecoratorDocumenter(FunctionDocumenter):
"""
Specialized Documenter subclass for decorator functions.
"""
+
objtype = 'decorator'
# must be lower than FunctionDocumenter
@@ -1445,9 +1463,10 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
"""
Specialized Documenter subclass for classes.
"""
+
objtype = 'class'
member_order = 20
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'members': members_option, 'undoc-members': bool_option,
'no-index': bool_option, 'inherited-members': inherited_members_option,
'show-inheritance': bool_option, 'member-order': member_order_option,
@@ -1481,8 +1500,9 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
merge_members_option(self.options)
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
return isinstance(member, type) or (
isattr and (inspect.isNewType(member) or isinstance(member, TypeVar)))
@@ -1509,7 +1529,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
return None, None, None
def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
- """ Get the `attr` function or method from `obj`, if it is user-defined. """
+ """Get the `attr` function or method from `obj`, if it is user-defined."""
if inspect.is_builtin_class_method(obj, attr):
return None
attr = self.get_attr(obj, attr, None)
@@ -1657,7 +1677,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
try:
analyzer = ModuleAnalyzer.for_module(cls.__module__)
analyzer.analyze()
- qualname = '.'.join([cls.__qualname__, self._signature_method_name])
+ qualname = f'{cls.__qualname__}.{self._signature_method_name}'
if qualname in analyzer.overloads:
return analyzer.overloads.get(qualname, [])
elif qualname in analyzer.tagorder:
@@ -1678,7 +1698,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
__qualname__ = None
if __modname__ and __qualname__:
- return '.'.join([__modname__, __qualname__])
+ return f'{__modname__}.{__qualname__}'
else:
return None
@@ -1904,6 +1924,7 @@ class ExceptionDocumenter(ClassDocumenter):
"""
Specialized ClassDocumenter subclass for exceptions.
"""
+
objtype = 'exception'
member_order = 10
@@ -1911,8 +1932,9 @@ class ExceptionDocumenter(ClassDocumenter):
priority = ClassDocumenter.priority + 5
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
try:
return isinstance(member, type) and issubclass(member, BaseException)
except TypeError as exc:
@@ -2016,16 +2038,18 @@ class DataDocumenter(GenericAliasMixin,
"""
Specialized Documenter subclass for data items.
"""
+
objtype = 'data'
member_order = 40
priority = -10
- option_spec: OptionSpec = dict(ModuleLevelDocumenter.option_spec)
+ option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
option_spec["annotation"] = annotation_option
option_spec["no-value"] = bool_option
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
return isinstance(parent, ModuleDocumenter) and isattr
def update_annotations(self, parent: Any) -> None:
@@ -2054,7 +2078,8 @@ class DataDocumenter(GenericAliasMixin,
return True
else:
doc = self.get_doc() or []
- docstring, metadata = separate_metadata('\n'.join(sum(doc, [])))
+ docstring, metadata = separate_metadata(
+ '\n'.join(functools.reduce(operator.iadd, doc, [])))
if 'hide-value' in metadata:
return True
@@ -2135,14 +2160,16 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
"""
Specialized Documenter subclass for methods (normal, static and class).
"""
+
objtype = 'method'
directivetype = 'method'
member_order = 50
priority = 1 # must be more than FunctionDocumenter
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
return inspect.isroutine(member) and not isinstance(parent, ModuleDocumenter)
def import_object(self, raiseerror: bool = False) -> bool:
@@ -2169,7 +2196,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
kwargs.setdefault('unqualified_typehints', True)
try:
- if self.object == object.__init__ and self.parent != object:
+ if self.object == object.__init__ and self.parent != object: # NoQA: E721
# Classes not having own __init__() method are shown as no arguments.
#
# Note: The signature of object.__init__() is (self, /, *args, **kwargs).
@@ -2206,7 +2233,8 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
self.add_line(' :abstractmethod:', sourcename)
if inspect.iscoroutinefunction(obj) or inspect.isasyncgenfunction(obj):
self.add_line(' :async:', sourcename)
- if inspect.isclassmethod(obj):
+ if (inspect.isclassmethod(obj) or
+ inspect.is_singledispatch_method(obj) and inspect.isclassmethod(obj.func)):
self.add_line(' :classmethod:', sourcename)
if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
self.add_line(' :staticmethod:', sourcename)
@@ -2238,6 +2266,8 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
if typ is object:
pass # default implementation. skipped.
else:
+ if inspect.isclassmethod(func):
+ func = func.__func__
dispatchmeth = self.annotate_to_first_argument(func, typ)
if dispatchmeth:
documenter = MethodDocumenter(self.directive, '')
@@ -2292,7 +2322,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
if len(sig.parameters) == 1:
return None
- def dummy():
+ def dummy(): # NoQA: ANN202
pass
params = list(sig.parameters.values())
@@ -2408,8 +2438,8 @@ class SlotsMixin(DataDocumenterMixinBase):
if self.object is SLOTSATTR:
try:
parent___slots__ = inspect.getslots(self.parent)
- if parent___slots__ and parent___slots__.get(self.objpath[-1]):
- docstring = prepare_docstring(parent___slots__[self.objpath[-1]])
+ if parent___slots__ and (docstring := parent___slots__.get(self.objpath[-1])):
+ docstring = prepare_docstring(docstring)
return [docstring]
else:
return []
@@ -2440,9 +2470,7 @@ class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
# An instance variable defined in __init__().
if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
return True
- if self.is_runtime_instance_attribute_not_commented(parent):
- return True
- return False
+ return self.is_runtime_instance_attribute_not_commented(parent)
def is_runtime_instance_attribute_not_commented(self, parent: Any) -> bool:
"""Check the subject is an attribute defined in __init__() without comment."""
@@ -2454,7 +2482,7 @@ class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
analyzer = ModuleAnalyzer.for_module(module)
analyzer.analyze()
if qualname and self.objpath:
- key = '.'.join([qualname, self.objpath[-1]])
+ key = f'{qualname}.{self.objpath[-1]}'
if key in analyzer.tagorder:
return True
except (AttributeError, PycodeError):
@@ -2464,7 +2492,8 @@ class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
def import_object(self, raiseerror: bool = False) -> bool:
"""Check the existence of runtime instance attribute after failing to import the
- attribute."""
+ attribute.
+ """
try:
return super().import_object(raiseerror=True) # type: ignore[misc]
except ImportError as exc:
@@ -2517,7 +2546,8 @@ class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
def import_object(self, raiseerror: bool = False) -> bool:
"""Check the exisitence of uninitialized instance attribute when failed to import
- the attribute."""
+ the attribute.
+ """
try:
return super().import_object(raiseerror=True) # type: ignore[misc]
except ImportError as exc:
@@ -2556,9 +2586,10 @@ class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore[misc]
"""
Specialized Documenter subclass for attributes.
"""
+
objtype = 'attribute'
member_order = 60
- option_spec: OptionSpec = dict(ModuleLevelDocumenter.option_spec)
+ option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
option_spec["annotation"] = annotation_option
option_spec["no-value"] = bool_option
@@ -2571,15 +2602,14 @@ class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore[misc]
return inspect.isfunction(obj) or inspect.isbuiltin(obj) or inspect.ismethod(obj)
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
if isinstance(parent, ModuleDocumenter):
return False
if inspect.isattributedescriptor(member):
return True
- if not inspect.isroutine(member) and not isinstance(member, type):
- return True
- return False
+ return not inspect.isroutine(member) and not isinstance(member, type)
def document_members(self, all_members: bool = False) -> None:
pass
@@ -2625,7 +2655,8 @@ class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore[misc]
else:
doc = self.get_doc()
if doc:
- docstring, metadata = separate_metadata('\n'.join(sum(doc, [])))
+ docstring, metadata = separate_metadata(
+ '\n'.join(functools.reduce(operator.iadd, doc, [])))
if 'hide-value' in metadata:
return True
@@ -2711,6 +2742,7 @@ class PropertyDocumenter(DocstringStripSignatureMixin, # type: ignore[misc]
"""
Specialized Documenter subclass for properties.
"""
+
objtype = 'property'
member_order = 60
@@ -2718,8 +2750,9 @@ class PropertyDocumenter(DocstringStripSignatureMixin, # type: ignore[misc]
priority = AttributeDocumenter.priority + 1
@classmethod
- def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any,
- ) -> bool:
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any,
+ ) -> bool:
if isinstance(parent, ClassDocumenter):
if inspect.isproperty(member):
return True
@@ -2732,7 +2765,8 @@ class PropertyDocumenter(DocstringStripSignatureMixin, # type: ignore[misc]
def import_object(self, raiseerror: bool = False) -> bool:
"""Check the exisitence of uninitialized instance attribute when failed to import
- the attribute."""
+ the attribute.
+ """
ret = super().import_object(raiseerror)
if ret and not inspect.isproperty(self.object):
__dict__ = safe_getattr(self.parent, '__dict__', {})
@@ -2793,7 +2827,7 @@ class PropertyDocumenter(DocstringStripSignatureMixin, # type: ignore[misc]
except ValueError:
pass
- def _get_property_getter(self):
+ def _get_property_getter(self) -> Callable | None:
if safe_getattr(self.object, 'fget', None): # property
return self.object.fget
if safe_getattr(self.object, 'func', None): # cached_property
@@ -2810,7 +2844,7 @@ def autodoc_attrgetter(app: Sphinx, obj: Any, name: str, *defargs: Any) -> Any:
return safe_getattr(obj, name, *defargs)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_autodocumenter(ModuleDocumenter)
app.add_autodocumenter(ClassDocumenter)
app.add_autodocumenter(ExceptionDocumenter)
@@ -2821,22 +2855,22 @@ def setup(app: Sphinx) -> dict[str, Any]:
app.add_autodocumenter(AttributeDocumenter)
app.add_autodocumenter(PropertyDocumenter)
- app.add_config_value('autoclass_content', 'class', True, ENUM('both', 'class', 'init'))
- app.add_config_value('autodoc_member_order', 'alphabetical', True,
+ app.add_config_value('autoclass_content', 'class', 'env', ENUM('both', 'class', 'init'))
+ app.add_config_value('autodoc_member_order', 'alphabetical', 'env',
ENUM('alphabetical', 'bysource', 'groupwise'))
- app.add_config_value('autodoc_class_signature', 'mixed', True, ENUM('mixed', 'separated'))
- app.add_config_value('autodoc_default_options', {}, True)
- app.add_config_value('autodoc_docstring_signature', True, True)
- app.add_config_value('autodoc_mock_imports', [], True)
- app.add_config_value('autodoc_typehints', "signature", True,
+ app.add_config_value('autodoc_class_signature', 'mixed', 'env', ENUM('mixed', 'separated'))
+ app.add_config_value('autodoc_default_options', {}, 'env')
+ app.add_config_value('autodoc_docstring_signature', True, 'env')
+ app.add_config_value('autodoc_mock_imports', [], 'env')
+ app.add_config_value('autodoc_typehints', "signature", 'env',
ENUM("signature", "description", "none", "both"))
- app.add_config_value('autodoc_typehints_description_target', 'all', True,
+ app.add_config_value('autodoc_typehints_description_target', 'all', 'env',
ENUM('all', 'documented', 'documented_params'))
- app.add_config_value('autodoc_type_aliases', {}, True)
+ app.add_config_value('autodoc_type_aliases', {}, 'env')
app.add_config_value('autodoc_typehints_format', "short", 'env',
ENUM("fully-qualified", "short"))
- app.add_config_value('autodoc_warningiserror', True, True)
- app.add_config_value('autodoc_inherit_docstrings', True, True)
+ app.add_config_value('autodoc_warningiserror', True, 'env')
+ app.add_config_value('autodoc_inherit_docstrings', True, 'env')
app.add_event('autodoc-before-process-signature')
app.add_event('autodoc-process-docstring')
app.add_event('autodoc-process-signature')
diff --git a/sphinx/ext/autodoc/directive.py b/sphinx/ext/autodoc/directive.py
index 64cbc9b..130e347 100644
--- a/sphinx/ext/autodoc/directive.py
+++ b/sphinx/ext/autodoc/directive.py
@@ -59,20 +59,20 @@ class DocumenterBridge:
def process_documenter_options(documenter: type[Documenter], config: Config, options: dict,
) -> Options:
"""Recognize options of Documenter from user input."""
+ default_options = config.autodoc_default_options
for name in AUTODOC_DEFAULT_OPTIONS:
if name not in documenter.option_spec:
continue
negated = options.pop('no-' + name, True) is None
- if name in config.autodoc_default_options and not negated:
- if name in options and isinstance(config.autodoc_default_options[name], str):
+ if name in default_options and not negated:
+ if name in options and isinstance(default_options[name], str):
# take value from options if present or extend it
# with autodoc_default_options if necessary
if name in AUTODOC_EXTENDABLE_OPTIONS:
if options[name] is not None and options[name].startswith('+'):
- options[name] = ','.join([config.autodoc_default_options[name],
- options[name][1:]])
+ options[name] = f'{default_options[name]},{options[name][1:]}'
else:
- options[name] = config.autodoc_default_options[name]
+ options[name] = default_options[name]
elif options.get(name) is not None:
# remove '+' from option argument if there's nothing to merge it with
@@ -104,6 +104,7 @@ class AutodocDirective(SphinxDirective):
It invokes a Documenter upon running. After the processing, it parses and returns
the content generated by Documenter.
"""
+
option_spec = DummyOptionSpec()
has_content = True
required_arguments = 1
@@ -114,7 +115,7 @@ class AutodocDirective(SphinxDirective):
reporter = self.state.document.reporter
try:
- source, lineno = reporter.get_source_and_line( # type: ignore[attr-defined]
+ source, lineno = reporter.get_source_and_line(
self.lineno)
except AttributeError:
source, lineno = (None, None)
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index 84bfee5..784fa71 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -8,7 +8,8 @@ import os
import sys
import traceback
import typing
-from typing import TYPE_CHECKING, Any, Callable, NamedTuple
+from enum import Enum
+from typing import TYPE_CHECKING, NamedTuple
from sphinx.ext.autodoc.mock import ismock, undecorate
from sphinx.pycode import ModuleAnalyzer, PycodeError
@@ -20,16 +21,91 @@ from sphinx.util.inspect import (
isclass,
isenumclass,
safe_getattr,
+ unwrap_all,
)
if TYPE_CHECKING:
+ from collections.abc import Callable, Iterator, Mapping
from types import ModuleType
+ from typing import Any
from sphinx.ext.autodoc import ObjectMember
logger = logging.getLogger(__name__)
+def _filter_enum_dict(
+ enum_class: type[Enum],
+ attrgetter: Callable[[Any, str, Any], Any],
+ enum_class_dict: Mapping[str, object],
+) -> Iterator[tuple[str, type, Any]]:
+ """Find the attributes to document of an enumeration class.
+
+ The output consists of triplets ``(attribute name, defining class, value)``
+ where the attribute name can appear more than once during the iteration
+ but with different defining class. The order of occurrence is guided by
+ the MRO of *enum_class*.
+ """
+ # attributes that were found on a mixin type or the data type
+ candidate_in_mro: set[str] = set()
+ # sunder names that were picked up (and thereby allowed to be redefined)
+ # see: https://docs.python.org/3/howto/enum.html#supported-dunder-names
+ sunder_names = {'_name_', '_value_', '_missing_', '_order_', '_generate_next_value_'}
+ # attributes that can be picked up on a mixin type or the enum's data type
+ public_names = {'name', 'value', *object.__dict__, *sunder_names}
+ # names that are ignored by default
+ ignore_names = Enum.__dict__.keys() - public_names
+
+ def is_native_api(obj: object, name: str) -> bool:
+ """Check whether *obj* is the same as ``Enum.__dict__[name]``."""
+ return unwrap_all(obj) is unwrap_all(Enum.__dict__[name])
+
+ def should_ignore(name: str, value: Any) -> bool:
+ if name in sunder_names:
+ return is_native_api(value, name)
+ return name in ignore_names
+
+ sentinel = object()
+
+ def query(name: str, defining_class: type) -> tuple[str, type, Any] | None:
+ value = attrgetter(enum_class, name, sentinel)
+ if value is not sentinel:
+ return (name, defining_class, value)
+ return None
+
+ # attributes defined on a parent type, possibly shadowed later by
+ # the attributes defined directly inside the enumeration class
+ for parent in enum_class.__mro__:
+ if parent in {enum_class, Enum, object}:
+ continue
+
+ parent_dict = attrgetter(parent, '__dict__', {})
+ for name, value in parent_dict.items():
+ if should_ignore(name, value):
+ continue
+
+ candidate_in_mro.add(name)
+ if (item := query(name, parent)) is not None:
+ yield item
+
+ # exclude members coming from the native Enum unless
+ # they were redefined on a mixin type or the data type
+ excluded_members = Enum.__dict__.keys() - candidate_in_mro
+ yield from filter(None, (query(name, enum_class) for name in enum_class_dict
+ if name not in excluded_members))
+
+ # check if allowed members from ``Enum`` were redefined at the enum level
+ special_names = sunder_names | public_names
+ special_names &= enum_class_dict.keys()
+ special_names &= Enum.__dict__.keys()
+ for name in special_names:
+ if (
+ not is_native_api(enum_class_dict[name], name)
+ and (item := query(name, enum_class)) is not None
+ ):
+ yield item
+
+
def mangle(subject: Any, name: str) -> str:
"""Mangle the given name."""
try:
@@ -61,9 +137,7 @@ def unmangle(subject: Any, name: str) -> str | None:
def import_module(modname: str, warningiserror: bool = False) -> Any:
- """
- Call importlib.import_module(modname), convert exceptions to ImportError
- """
+ """Call importlib.import_module(modname), convert exceptions to ImportError."""
try:
with logging.skip_warningiserror(not warningiserror):
return importlib.import_module(modname)
@@ -97,7 +171,7 @@ def import_object(modname: str, objpath: list[str], objtype: str = '',
try:
module = None
exc_on_importing = None
- objpath = list(objpath)
+ objpath = objpath.copy()
while module is None:
try:
original_module_names = frozenset(sys.modules)
@@ -194,15 +268,11 @@ def get_object_members(
# enum members
if isenumclass(subject):
- for name, value in subject.__members__.items():
- if name not in members:
- members[name] = Attribute(name, True, value)
-
- superclass = subject.__mro__[1]
- for name in obj_dict:
- if name not in superclass.__dict__:
- value = safe_getattr(subject, name)
- members[name] = Attribute(name, True, value)
+ for name, defining_class, value in _filter_enum_dict(subject, attrgetter, obj_dict):
+ # the order of occurrence of *name* matches the subject's MRO,
+ # allowing inherited attributes to be shadowed correctly
+ if unmangled := unmangle(defining_class, name):
+ members[unmangled] = Attribute(unmangled, defining_class is subject, value)
# members in __slots__
try:
@@ -220,18 +290,18 @@ def get_object_members(
try:
value = attrgetter(subject, name)
directly_defined = name in obj_dict
- name = unmangle(subject, name)
- if name and name not in members:
- members[name] = Attribute(name, directly_defined, value)
+ unmangled = unmangle(subject, name)
+ if unmangled and unmangled not in members:
+ members[unmangled] = Attribute(unmangled, directly_defined, value)
except AttributeError:
continue
# annotation only member (ex. attr: int)
- for i, cls in enumerate(getmro(subject)):
+ for cls in getmro(subject):
for name in getannotations(cls):
- name = unmangle(cls, name)
- if name and name not in members:
- members[name] = Attribute(name, i == 0, INSTANCEATTR)
+ unmangled = unmangle(cls, name)
+ if unmangled and unmangled not in members:
+ members[unmangled] = Attribute(unmangled, cls is subject, INSTANCEATTR)
if analyzer:
# append instance attributes (cf. self.attr1) if analyzer knows
@@ -255,15 +325,11 @@ def get_class_members(subject: Any, objpath: Any, attrgetter: Callable,
# enum members
if isenumclass(subject):
- for name, value in subject.__members__.items():
- if name not in members:
- members[name] = ObjectMember(name, value, class_=subject)
-
- superclass = subject.__mro__[1]
- for name in obj_dict:
- if name not in superclass.__dict__:
- value = safe_getattr(subject, name)
- members[name] = ObjectMember(name, value, class_=subject)
+ for name, defining_class, value in _filter_enum_dict(subject, attrgetter, obj_dict):
+ # the order of occurrence of *name* matches the subject's MRO,
+ # allowing inherited attributes to be shadowed correctly
+ if unmangled := unmangle(defining_class, name):
+ members[unmangled] = ObjectMember(unmangled, value, class_=defining_class)
# members in __slots__
try:
@@ -308,15 +374,15 @@ def get_class_members(subject: Any, objpath: Any, attrgetter: Callable,
# annotation only member (ex. attr: int)
for name in getannotations(cls):
- name = unmangle(cls, name)
- if name and name not in members:
- if analyzer and (qualname, name) in analyzer.attr_docs:
- docstring = '\n'.join(analyzer.attr_docs[qualname, name])
+ unmangled = unmangle(cls, name)
+ if unmangled and unmangled not in members:
+ if analyzer and (qualname, unmangled) in analyzer.attr_docs:
+ docstring = '\n'.join(analyzer.attr_docs[qualname, unmangled])
else:
docstring = None
- members[name] = ObjectMember(name, INSTANCEATTR, class_=cls,
- docstring=docstring)
+ members[unmangled] = ObjectMember(unmangled, INSTANCEATTR, class_=cls,
+ docstring=docstring)
# append or complete instance attributes (cf. self.attr1) if analyzer knows
if analyzer:
diff --git a/sphinx/ext/autodoc/mock.py b/sphinx/ext/autodoc/mock.py
index 7034977..c2ab0fe 100644
--- a/sphinx/ext/autodoc/mock.py
+++ b/sphinx/ext/autodoc/mock.py
@@ -14,7 +14,7 @@ from sphinx.util import logging
from sphinx.util.inspect import isboundmethod, safe_getattr
if TYPE_CHECKING:
- from collections.abc import Generator, Iterator, Sequence
+ from collections.abc import Iterator, Sequence
logger = logging.getLogger(__name__)
@@ -80,6 +80,7 @@ def _make_subclass(name: str, module: str, superclass: Any = _MockObject,
class _MockModule(ModuleType):
"""Used by autodoc_mock_imports."""
+
__file__ = os.devnull
__sphinx_mock__ = True
@@ -97,6 +98,7 @@ class _MockModule(ModuleType):
class MockLoader(Loader):
"""A loader for mocking."""
+
def __init__(self, finder: MockFinder) -> None:
super().__init__()
self.finder = finder
@@ -135,12 +137,12 @@ class MockFinder(MetaPathFinder):
@contextlib.contextmanager
-def mock(modnames: list[str]) -> Generator[None, None, None]:
+def mock(modnames: list[str]) -> Iterator[None]:
"""Insert mock modules during context::
- with mock(['target.module.name']):
- # mock modules are enabled here
- ...
+ with mock(['target.module.name']):
+ # mock modules are enabled here
+ ...
"""
try:
finder = MockFinder(modnames)
diff --git a/sphinx/ext/autodoc/preserve_defaults.py b/sphinx/ext/autodoc/preserve_defaults.py
index 5f957ce..b0b3243 100644
--- a/sphinx/ext/autodoc/preserve_defaults.py
+++ b/sphinx/ext/autodoc/preserve_defaults.py
@@ -22,6 +22,7 @@ if TYPE_CHECKING:
from typing import Any
from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
_LAMBDA_NAME = (lambda: None).__name__
@@ -96,7 +97,7 @@ def _get_arguments(obj: Any, /) -> ast.arguments | None:
return _get_arguments_inner(subject)
-def _is_lambda(x, /):
+def _is_lambda(x: Any, /) -> bool:
return isinstance(x, types.LambdaType) and x.__name__ == _LAMBDA_NAME
@@ -189,8 +190,8 @@ def update_defvalue(app: Sphinx, obj: Any, bound_method: bool) -> None:
logger.warning(__("Failed to parse a default argument value for %r: %s"), obj, exc)
-def setup(app: Sphinx) -> dict[str, Any]:
- app.add_config_value('autodoc_preserve_defaults', False, True)
+def setup(app: Sphinx) -> ExtensionMetadata:
+ app.add_config_value('autodoc_preserve_defaults', False, 'env')
app.connect('autodoc-before-process-signature', update_defvalue)
return {
diff --git a/sphinx/ext/autodoc/type_comment.py b/sphinx/ext/autodoc/type_comment.py
index e2c9ae2..e0a5a63 100644
--- a/sphinx/ext/autodoc/type_comment.py
+++ b/sphinx/ext/autodoc/type_comment.py
@@ -15,6 +15,7 @@ if TYPE_CHECKING:
from collections.abc import Sequence
from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
@@ -134,7 +135,7 @@ def update_annotations_using_type_comments(app: Sphinx, obj: Any, bound_method:
logger.warning(__("Failed to parse type_comment for %r: %s"), obj, exc)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.connect('autodoc-before-process-signature', update_annotations_using_type_comments)
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/autodoc/typehints.py b/sphinx/ext/autodoc/typehints.py
index 79906fb..df0c468 100644
--- a/sphinx/ext/autodoc/typehints.py
+++ b/sphinx/ext/autodoc/typehints.py
@@ -11,16 +11,17 @@ from docutils import nodes
import sphinx
from sphinx import addnodes
from sphinx.util import inspect
-from sphinx.util.typing import stringify_annotation
+from sphinx.util.typing import ExtensionMetadata, stringify_annotation
if TYPE_CHECKING:
from docutils.nodes import Element
from sphinx.application import Sphinx
+ from sphinx.ext.autodoc import Options
def record_typehints(app: Sphinx, objtype: str, name: str, obj: Any,
- options: dict, args: str, retann: str) -> None:
+ options: Options, args: str, retann: str) -> None:
"""Record type hints to env object."""
if app.config.autodoc_typehints_format == 'short':
mode = 'smart'
@@ -50,7 +51,7 @@ def merge_typehints(app: Sphinx, domain: str, objtype: str, contentnode: Element
try:
signature = cast(addnodes.desc_signature, contentnode.parent[0])
if signature['module']:
- fullname = '.'.join([signature['module'], signature['fullname']])
+ fullname = f'{signature["module"]}.{signature["fullname"]}'
else:
fullname = signature['fullname']
except KeyError:
@@ -208,7 +209,7 @@ def augment_descriptions_with_types(
node += field
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.connect('autodoc-process-signature', record_typehints)
app.connect('object-description-transform', merge_typehints)
diff --git a/sphinx/ext/autosectionlabel.py b/sphinx/ext/autosectionlabel.py
index d423fcc..c1eb46b 100644
--- a/sphinx/ext/autosectionlabel.py
+++ b/sphinx/ext/autosectionlabel.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, cast
+from typing import TYPE_CHECKING, cast
from docutils import nodes
@@ -16,6 +16,7 @@ if TYPE_CHECKING:
from docutils.nodes import Node
from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
@@ -57,7 +58,7 @@ def register_sections_as_label(app: Sphinx, document: Node) -> None:
domain.labels[name] = docname, labelid, sectname
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_config_value('autosectionlabel_prefix_document', False, 'env')
app.add_config_value('autosectionlabel_maxdepth', None, 'env')
app.connect('doctree-read', register_sections_as_label)
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index edb8f0d..7057f43 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -48,7 +48,9 @@ This can be used as the default role to make links 'smart'.
from __future__ import annotations
+import functools
import inspect
+import operator
import os
import posixpath
import re
@@ -56,7 +58,7 @@ import sys
from inspect import Parameter
from os import path
from types import ModuleType
-from typing import TYPE_CHECKING, Any, cast
+from typing import TYPE_CHECKING, Any, ClassVar, cast
from docutils import nodes
from docutils.parsers.rst import directives
@@ -93,7 +95,7 @@ if TYPE_CHECKING:
from sphinx.application import Sphinx
from sphinx.extension import Extension
- from sphinx.util.typing import OptionSpec
+ from sphinx.util.typing import ExtensionMetadata, OptionSpec
from sphinx.writers.html import HTML5Translator
logger = logging.getLogger(__name__)
@@ -162,7 +164,7 @@ class FakeDirective(DocumenterBridge):
settings = Struct(tab_width=8)
document = Struct(settings=settings)
app = FakeApplication()
- app.config.add('autodoc_class_signature', 'mixed', True, None)
+ app.config.add('autodoc_class_signature', 'mixed', 'env', ())
env = BuildEnvironment(app) # type: ignore[arg-type]
state = Struct(document=document)
super().__init__(env, None, Options(), 0, state)
@@ -216,7 +218,7 @@ class Autosummary(SphinxDirective):
optional_arguments = 0
final_argument_whitespace = False
has_content = True
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'caption': directives.unchanged_required,
'toctree': directives.unchanged,
'nosignatures': directives.flag,
@@ -284,9 +286,9 @@ class Autosummary(SphinxDirective):
return import_ivar_by_name(name, prefixes)
except ImportError as exc2:
if exc2.__cause__:
- errors: list[BaseException] = exc.exceptions + [exc2.__cause__]
+ errors: list[BaseException] = [*exc.exceptions, exc2.__cause__]
else:
- errors = exc.exceptions + [exc2]
+ errors = [*exc.exceptions, exc2]
raise ImportExceptionGroup(exc.args[0], errors) from None
@@ -591,7 +593,7 @@ def limited_join(sep: str, items: list[str], max_chars: int = 30,
else:
break
- return sep.join(list(items[:n_items]) + [overflow_marker])
+ return sep.join([*list(items[:n_items]), overflow_marker])
# -- Importing items -----------------------------------------------------------
@@ -603,7 +605,7 @@ class ImportExceptionGroup(Exception):
It contains an error messages and a list of exceptions as its arguments.
"""
- def __init__(self, message: str | None, exceptions: Sequence[BaseException]):
+ def __init__(self, message: str | None, exceptions: Sequence[BaseException]) -> None:
super().__init__(message)
self.exceptions = list(exceptions)
@@ -640,7 +642,7 @@ def import_by_name(
for prefix in prefixes:
try:
if prefix:
- prefixed_name = '.'.join([prefix, name])
+ prefixed_name = f'{prefix}.{name}'
else:
prefixed_name = name
obj, parent, modname = _import_by_name(prefixed_name, grouped_exception=True)
@@ -651,7 +653,8 @@ def import_by_name(
tried.append(prefixed_name)
errors.append(exc)
- exceptions: list[BaseException] = sum((e.exceptions for e in errors), [])
+ exceptions: list[BaseException] = functools.reduce(
+ operator.iadd, (e.exceptions for e in errors), [])
raise ImportExceptionGroup('no module named %s' % ' or '.join(tried), exceptions)
@@ -742,6 +745,7 @@ class AutoLink(SphinxRole):
Expands to ':obj:`text`' if `text` is an object that can be imported;
otherwise expands to '*text*'.
"""
+
def run(self) -> tuple[list[Node], list[system_message]]:
pyobj_role = self.env.get_domain('py').role('obj')
assert pyobj_role is not None
@@ -766,7 +770,7 @@ class AutoLink(SphinxRole):
def get_rst_suffix(app: Sphinx) -> str | None:
def get_supported_format(suffix: str) -> tuple[str, ...]:
- parser_class = app.registry.get_source_parsers().get(suffix)
+ parser_class = app.registry.get_source_parsers().get(suffix.removeprefix('.'))
if parser_class is None:
return ('restructuredtext',)
return parser_class.supported
@@ -803,7 +807,7 @@ def process_generate_options(app: Sphinx) -> None:
suffix = get_rst_suffix(app)
if suffix is None:
- logger.warning(__('autosummary generats .rst files internally. '
+ logger.warning(__('autosummary generates .rst files internally. '
'But your source_suffix does not contain .rst. Skipped.'))
return
@@ -817,7 +821,7 @@ def process_generate_options(app: Sphinx) -> None:
encoding=app.config.source_encoding)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
# I need autodoc
app.setup_extension('sphinx.ext.autodoc')
app.add_node(autosummary_toc,
@@ -835,13 +839,13 @@ def setup(app: Sphinx) -> dict[str, Any]:
app.add_directive('autosummary', Autosummary)
app.add_role('autolink', AutoLink())
app.connect('builder-inited', process_generate_options)
- app.add_config_value('autosummary_context', {}, True)
+ app.add_config_value('autosummary_context', {}, 'env')
app.add_config_value('autosummary_filename_map', {}, 'html')
- app.add_config_value('autosummary_generate', True, True, [bool, list])
- app.add_config_value('autosummary_generate_overwrite', True, False)
+ app.add_config_value('autosummary_generate', True, 'env', {bool, list})
+ app.add_config_value('autosummary_generate_overwrite', True, '')
app.add_config_value('autosummary_mock_imports',
lambda config: config.autodoc_mock_imports, 'env')
- app.add_config_value('autosummary_imported_members', [], False, [bool])
+ app.add_config_value('autosummary_imported_members', [], '', bool)
app.add_config_value('autosummary_ignore_module_all', True, 'env', bool)
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py
index 06814f9..83497f9 100644
--- a/sphinx/ext/autosummary/generate.py
+++ b/sphinx/ext/autosummary/generate.py
@@ -71,10 +71,9 @@ class DummyApplication:
self._warncount = 0
self.warningiserror = False
- self.config.add('autosummary_context', {}, True, None)
- self.config.add('autosummary_filename_map', {}, True, None)
+ self.config.add('autosummary_context', {}, 'env', ())
+ self.config.add('autosummary_filename_map', {}, 'env', ())
self.config.add('autosummary_ignore_module_all', True, 'env', bool)
- self.config.init_values()
def emit_firstresult(self, *args: Any) -> None:
pass
@@ -134,7 +133,8 @@ class AutosummaryRenderer:
if app.translator:
self.env.add_extension("jinja2.ext.i18n")
- self.env.install_gettext_translations(app.translator)
+ # ``install_gettext_translations`` is injected by the ``jinja2.ext.i18n`` extension
+ self.env.install_gettext_translations(app.translator) # type: ignore[attr-defined]
def render(self, template_name: str, context: dict) -> str:
"""Render a template file."""
@@ -249,8 +249,8 @@ class ModuleScanner:
def members_of(obj: Any, conf: Config) -> Sequence[str]:
"""Get the members of ``obj``, possibly ignoring the ``__all__`` module attribute
- Follows the ``conf.autosummary_ignore_module_all`` setting."""
-
+ Follows the ``conf.autosummary_ignore_module_all`` setting.
+ """
if conf.autosummary_ignore_module_all:
return dir(obj)
else:
@@ -331,7 +331,7 @@ def generate_autosummary_content(name: str, obj: Any, parent: Any,
if doc.objtype in ('method', 'attribute', 'property'):
ns['class'] = qualname.rsplit(".", 1)[0]
- if doc.objtype in ('class',):
+ if doc.objtype == 'class':
shortname = qualname
else:
shortname = qualname.rsplit(".", 1)[-1]
@@ -509,9 +509,9 @@ def generate_autosummary_docs(sources: list[str],
qualname = name.replace(modname + ".", "")
except ImportError as exc2:
if exc2.__cause__:
- exceptions: list[BaseException] = exc.exceptions + [exc2.__cause__]
+ exceptions: list[BaseException] = [*exc.exceptions, exc2.__cause__]
else:
- exceptions = exc.exceptions + [exc2]
+ exceptions = [*exc.exceptions, exc2]
errors = list({f"* {type(e).__name__}: {e}" for e in exceptions})
logger.warning(__('[autosummary] failed to import %s.\nPossible hints:\n%s'),
diff --git a/sphinx/ext/coverage.py b/sphinx/ext/coverage.py
index e3d9745..cfe0936 100644
--- a/sphinx/ext/coverage.py
+++ b/sphinx/ext/coverage.py
@@ -19,13 +19,14 @@ import sphinx
from sphinx.builders import Builder
from sphinx.locale import __
from sphinx.util import logging
-from sphinx.util.console import red # type: ignore[attr-defined]
+from sphinx.util.console import red
from sphinx.util.inspect import safe_getattr
if TYPE_CHECKING:
from collections.abc import Iterator
from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
@@ -69,6 +70,7 @@ class CoverageBuilder(Builder):
"""
Evaluates coverage of code in the documentation.
"""
+
name = 'coverage'
epilog = __('Testing of coverage in the sources finished, look at the '
'results in %(outdir)s' + path.sep + 'python.txt.')
@@ -270,7 +272,7 @@ class CoverageBuilder(Builder):
self.py_documented[mod_name] = documented_objects
def _write_py_statistics(self, op: TextIO) -> None:
- """ Outputs the table of ``op``."""
+ """Outputs the table of ``op``."""
all_modules = set(self.py_documented.keys()).union(
set(self.py_undocumented.keys()))
all_objects: set[str] = set()
@@ -290,11 +292,15 @@ class CoverageBuilder(Builder):
value = 100.0
table.append([module, '%.2f%%' % value, '%d' % len(self.py_undocumented[module])])
- table.append([
- 'TOTAL',
- f'{100 * len(all_documented_objects) / len(all_objects):.2f}%',
- f'{len(all_objects) - len(all_documented_objects)}',
- ])
+
+ if all_objects:
+ table.append([
+ 'TOTAL',
+ f'{100 * len(all_documented_objects) / len(all_objects):.2f}%',
+ f'{len(all_objects) - len(all_documented_objects)}',
+ ])
+ else:
+ table.append(['TOTAL', '100', '0'])
for line in _write_table(table):
op.write(f'{line}\n')
@@ -383,18 +389,18 @@ class CoverageBuilder(Builder):
self.py_undocumented, self.py_documented), dumpfile)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_builder(CoverageBuilder)
- app.add_config_value('coverage_ignore_modules', [], False)
- app.add_config_value('coverage_ignore_functions', [], False)
- app.add_config_value('coverage_ignore_classes', [], False)
- app.add_config_value('coverage_ignore_pyobjects', [], False)
- app.add_config_value('coverage_c_path', [], False)
- app.add_config_value('coverage_c_regexes', {}, False)
- app.add_config_value('coverage_ignore_c_items', {}, False)
- app.add_config_value('coverage_write_headline', True, False)
- app.add_config_value('coverage_statistics_to_report', True, False, (bool,))
- app.add_config_value('coverage_statistics_to_stdout', True, False, (bool,))
- app.add_config_value('coverage_skip_undoc_in_source', False, False)
- app.add_config_value('coverage_show_missing_items', False, False)
+ app.add_config_value('coverage_ignore_modules', [], '')
+ app.add_config_value('coverage_ignore_functions', [], '')
+ app.add_config_value('coverage_ignore_classes', [], '')
+ app.add_config_value('coverage_ignore_pyobjects', [], '')
+ app.add_config_value('coverage_c_path', [], '')
+ app.add_config_value('coverage_c_regexes', {}, '')
+ app.add_config_value('coverage_ignore_c_items', {}, '')
+ app.add_config_value('coverage_write_headline', True, '')
+ app.add_config_value('coverage_statistics_to_report', True, '', bool)
+ app.add_config_value('coverage_statistics_to_stdout', True, '', bool)
+ app.add_config_value('coverage_skip_undoc_in_source', False, '')
+ app.add_config_value('coverage_show_missing_items', False, '')
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py
index c55ef2f..e6ba274 100644
--- a/sphinx/ext/doctest.py
+++ b/sphinx/ext/doctest.py
@@ -11,7 +11,7 @@ import sys
import time
from io import StringIO
from os import path
-from typing import TYPE_CHECKING, Any, Callable
+from typing import TYPE_CHECKING, Any, Callable, ClassVar
from docutils import nodes
from docutils.parsers.rst import directives
@@ -22,7 +22,7 @@ import sphinx
from sphinx.builders import Builder
from sphinx.locale import __
from sphinx.util import logging
-from sphinx.util.console import bold # type: ignore[attr-defined]
+from sphinx.util.console import bold
from sphinx.util.docutils import SphinxDirective
from sphinx.util.osutil import relpath
@@ -32,7 +32,7 @@ if TYPE_CHECKING:
from docutils.nodes import Element, Node, TextElement
from sphinx.application import Sphinx
- from sphinx.util.typing import OptionSpec
+ from sphinx.util.typing import ExtensionMetadata, OptionSpec
logger = logging.getLogger(__name__)
@@ -125,7 +125,7 @@ class TestDirective(SphinxDirective):
if self.name == 'doctest' and 'pyversion' in self.options:
try:
spec = self.options['pyversion']
- python_version = '.'.join([str(v) for v in sys.version_info[:3]])
+ python_version = '.'.join(map(str, sys.version_info[:3]))
if not is_allowed_version(spec, python_version):
flag = doctest.OPTIONFLAGS_BY_NAME['SKIP']
node['options'][flag] = True # Skip the test
@@ -143,19 +143,19 @@ class TestDirective(SphinxDirective):
class TestsetupDirective(TestDirective):
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'skipif': directives.unchanged_required,
}
class TestcleanupDirective(TestDirective):
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'skipif': directives.unchanged_required,
}
class DoctestDirective(TestDirective):
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'hide': directives.flag,
'no-trim-doctest-flags': directives.flag,
'options': directives.unchanged,
@@ -166,7 +166,7 @@ class DoctestDirective(TestDirective):
class TestcodeDirective(TestDirective):
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'hide': directives.flag,
'no-trim-doctest-flags': directives.flag,
'pyversion': directives.unchanged_required,
@@ -176,7 +176,7 @@ class TestcodeDirective(TestDirective):
class TestoutputDirective(TestDirective):
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'hide': directives.flag,
'no-trim-doctest-flags': directives.flag,
'options': directives.unchanged,
@@ -276,6 +276,7 @@ class DocTestBuilder(Builder):
"""
Runs test snippets in the documentation.
"""
+
name = 'doctest'
epilog = __('Testing of doctests in the sources finished, look at the '
'results in %(outdir)s/output.txt.')
@@ -310,6 +311,12 @@ class DocTestBuilder(Builder):
'==================================%s\n') %
(date, '=' * len(date)))
+ def __del__(self) -> None:
+ # free resources upon destruction (the file handler might not be
+ # closed if the builder is never used)
+ if hasattr(self, 'outfile'):
+ self.outfile.close()
+
def _out(self, text: str) -> None:
logger.info(text, nonl=True)
self.outfile.write(text)
@@ -361,16 +368,16 @@ Doctest summary
def get_filename_for_node(self, node: Node, docname: str) -> str:
"""Try to get the file which actually contains the doctest, not the
- filename of the document it's included in."""
+ filename of the document it's included in.
+ """
try:
- filename = relpath(node.source, self.env.srcdir)\
- .rsplit(':docstring of ', maxsplit=1)[0]
+ filename = relpath(node.source, self.env.srcdir).rsplit(':docstring of ', maxsplit=1)[0] # type: ignore[arg-type] # noqa: E501
except Exception:
filename = self.env.doc2path(docname, False)
return filename
@staticmethod
- def get_line_number(node: Node) -> int:
+ def get_line_number(node: Node) -> int | None:
"""Get the real line number or admit we don't know."""
# TODO: Work out how to store or calculate real (file-relative)
# line numbers for doctest blocks in docstrings.
@@ -379,7 +386,7 @@ Doctest summary
# not the file. This is correct where it is set, in
# `docutils.nodes.Node.setup_child`, but Sphinx should report
# relative to the file, not the docstring.
- return None # type: ignore[return-value]
+ return None
if node.line is not None:
# TODO: find the root cause of this off by one error.
return node.line - 1
@@ -393,7 +400,7 @@ Doctest summary
context: dict[str, Any] = {}
if self.config.doctest_global_setup:
exec(self.config.doctest_global_setup, context) # NoQA: S102
- should_skip = eval(condition, context) # NoQA: PGH001
+ should_skip = eval(condition, context) # NoQA: S307
if self.config.doctest_global_cleanup:
exec(self.config.doctest_global_cleanup, context) # NoQA: S102
return should_skip
@@ -420,21 +427,21 @@ Doctest summary
def condition(node: Node) -> bool:
return isinstance(node, (nodes.literal_block, nodes.comment)) \
and 'testnodetype' in node
- for node in doctree.findall(condition): # type: Element
- if self.skipped(node):
+ for node in doctree.findall(condition):
+ if self.skipped(node): # type: ignore[arg-type]
continue
- source = node['test'] if 'test' in node else node.astext()
+ source = node['test'] if 'test' in node else node.astext() # type: ignore[index, operator]
filename = self.get_filename_for_node(node, docname)
line_number = self.get_line_number(node)
if not source:
logger.warning(__('no code/output in %s block at %s:%s'),
- node.get('testnodetype', 'doctest'),
+ node.get('testnodetype', 'doctest'), # type: ignore[attr-defined]
filename, line_number)
- code = TestCode(source, type=node.get('testnodetype', 'doctest'),
- filename=filename, lineno=line_number,
- options=node.get('options'))
- node_groups = node.get('groups', ['default'])
+ code = TestCode(source, type=node.get('testnodetype', 'doctest'), # type: ignore[attr-defined]
+ filename=filename, lineno=line_number, # type: ignore[arg-type]
+ options=node.get('options')) # type: ignore[attr-defined]
+ node_groups = node.get('groups', ['default']) # type: ignore[attr-defined]
if '*' in node_groups:
add_to_all_groups.append(code)
continue
@@ -501,9 +508,7 @@ Doctest summary
old_f = runner.failures
self.type = 'exec' # the snippet may contain multiple statements
runner.run(sim_doctest, out=self._warn_out, clear_globs=False)
- if runner.failures > old_f:
- return False
- return True
+ return runner.failures <= old_f
# run the setup code
if not run_setup_cleanup(self.setup_runner, group.setup, 'setup'):
@@ -555,7 +560,7 @@ Doctest summary
run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup')
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_directive('testsetup', TestsetupDirective)
app.add_directive('testcleanup', TestcleanupDirective)
app.add_directive('doctest', DoctestDirective)
@@ -563,13 +568,13 @@ def setup(app: Sphinx) -> dict[str, Any]:
app.add_directive('testoutput', TestoutputDirective)
app.add_builder(DocTestBuilder)
# this config value adds to sys.path
- app.add_config_value('doctest_show_successes', True, False, (bool,))
- app.add_config_value('doctest_path', [], False)
- app.add_config_value('doctest_test_doctest_blocks', 'default', False)
- app.add_config_value('doctest_global_setup', '', False)
- app.add_config_value('doctest_global_cleanup', '', False)
+ app.add_config_value('doctest_show_successes', True, '', bool)
+ app.add_config_value('doctest_path', [], '')
+ app.add_config_value('doctest_test_doctest_blocks', 'default', '')
+ app.add_config_value('doctest_global_setup', '', '')
+ app.add_config_value('doctest_global_cleanup', '', '')
app.add_config_value(
'doctest_default_flags',
doctest.DONT_ACCEPT_TRUE_FOR_1 | doctest.ELLIPSIS | doctest.IGNORE_EXCEPTION_DETAIL,
- False)
+ '')
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/duration.py b/sphinx/ext/duration.py
index 26e197f..2243579 100644
--- a/sphinx/ext/duration.py
+++ b/sphinx/ext/duration.py
@@ -22,6 +22,7 @@ logger = logging.getLogger(__name__)
class DurationDomain(Domain):
"""A domain for durations of Sphinx processing."""
+
name = 'duration'
@property
diff --git a/sphinx/ext/extlinks.py b/sphinx/ext/extlinks.py
index 173df4d..a880278 100644
--- a/sphinx/ext/extlinks.py
+++ b/sphinx/ext/extlinks.py
@@ -37,7 +37,7 @@ if TYPE_CHECKING:
from docutils.parsers.rst.states import Inliner
from sphinx.application import Sphinx
- from sphinx.util.typing import RoleFunction
+ from sphinx.util.typing import ExtensionMetadata, RoleFunction
logger = logging.getLogger(__name__)
@@ -96,7 +96,8 @@ def make_link_role(name: str, base_url: str, caption: str) -> RoleFunction:
# Remark: It is an implementation detail that we use Pythons %-formatting.
# So far we only expose ``%s`` and require quoting of ``%`` using ``%%``.
def role(typ: str, rawtext: str, text: str, lineno: int,
- inliner: Inliner, options: dict | None = None, content: Sequence[str] = (),
+ inliner: Inliner, options: dict[str, Any] | None = None,
+ content: Sequence[str] = (),
) -> tuple[list[Node], list[system_message]]:
text = utils.unescape(text)
has_explicit_title, title, part = split_explicit_title(text)
@@ -116,7 +117,7 @@ def setup_link_roles(app: Sphinx) -> None:
app.add_role(name, make_link_role(name, base_url, caption))
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_config_value('extlinks', {}, 'env')
app.add_config_value('extlinks_detect_hardcoded_links', False, 'env')
diff --git a/sphinx/ext/githubpages.py b/sphinx/ext/githubpages.py
index c9be928..aac4797 100644
--- a/sphinx/ext/githubpages.py
+++ b/sphinx/ext/githubpages.py
@@ -5,13 +5,14 @@ from __future__ import annotations
import contextlib
import os
import urllib.parse
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING
import sphinx
if TYPE_CHECKING:
from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
+ from sphinx.util.typing import ExtensionMetadata
def _get_domain_from_url(url: str) -> str:
@@ -52,6 +53,6 @@ def create_nojekyll_and_cname(app: Sphinx, env: BuildEnvironment) -> None:
os.unlink(cname_path)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.connect('env-updated', create_nojekyll_and_cname)
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/graphviz.py b/sphinx/ext/graphviz.py
index 528bf30..9e6ce11 100644
--- a/sphinx/ext/graphviz.py
+++ b/sphinx/ext/graphviz.py
@@ -11,7 +11,7 @@ from hashlib import sha1
from itertools import chain
from os import path
from subprocess import CalledProcessError
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, ClassVar
from urllib.parse import urlsplit, urlunsplit
from docutils import nodes
@@ -31,7 +31,7 @@ if TYPE_CHECKING:
from sphinx.application import Sphinx
from sphinx.config import Config
- from sphinx.util.typing import OptionSpec
+ from sphinx.util.typing import ExtensionMetadata, OptionSpec
from sphinx.writers.html import HTML5Translator
from sphinx.writers.latex import LaTeXTranslator
from sphinx.writers.manpage import ManualPageTranslator
@@ -47,6 +47,7 @@ class GraphvizError(SphinxError):
class ClickableMapDefinition:
"""A manipulator for clickable map file of graphviz."""
+
maptag_re = re.compile('<map id="(.*?)"')
href_re = re.compile('href=".*?"')
@@ -81,7 +82,7 @@ class ClickableMapDefinition:
If not exists, this only returns empty string.
"""
if self.clickable:
- return '\n'.join([self.content[0]] + self.clickable + [self.content[-1]])
+ return '\n'.join((self.content[0], *self.clickable, self.content[-1]))
else:
return ''
@@ -111,11 +112,12 @@ class Graphviz(SphinxDirective):
"""
Directive to insert arbitrary dot markup.
"""
+
has_content = True
required_arguments = 0
optional_arguments = 1
final_argument_whitespace = False
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'alt': directives.unchanged,
'align': align_spec,
'caption': directives.unchanged,
@@ -179,11 +181,12 @@ class GraphvizSimple(SphinxDirective):
"""
Directive to insert arbitrary dot markup.
"""
+
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'alt': directives.unchanged,
'align': align_spec,
'caption': directives.unchanged,
@@ -330,7 +333,7 @@ def render_dot_html(self: HTML5Translator, node: graphviz, code: str, options: d
logger.warning(__('dot code %r: %s'), code, exc)
raise nodes.SkipNode from exc
- classes = [imgcls, 'graphviz'] + node.get('classes', [])
+ classes = [imgcls, 'graphviz', *node.get('classes', [])]
imgcls = ' '.join(filter(None, classes))
if fname is None:
@@ -449,7 +452,7 @@ def on_config_inited(_app: Sphinx, config: Config) -> None:
config.html_static_path.append(css_path)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_node(graphviz,
html=(html_visit_graphviz, None),
latex=(latex_visit_graphviz, None),
diff --git a/sphinx/ext/ifconfig.py b/sphinx/ext/ifconfig.py
index 1d2b197..398d669 100644
--- a/sphinx/ext/ifconfig.py
+++ b/sphinx/ext/ifconfig.py
@@ -16,7 +16,7 @@ namespace of the project configuration (that is, all variables from
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, ClassVar
from docutils import nodes
@@ -28,7 +28,7 @@ if TYPE_CHECKING:
from docutils.nodes import Node
from sphinx.application import Sphinx
- from sphinx.util.typing import OptionSpec
+ from sphinx.util.typing import ExtensionMetadata, OptionSpec
class ifconfig(nodes.Element):
@@ -41,7 +41,7 @@ class IfConfig(SphinxDirective):
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
- option_spec: OptionSpec = {}
+ option_spec: ClassVar[OptionSpec] = {}
def run(self) -> list[Node]:
node = ifconfig()
@@ -58,7 +58,7 @@ def process_ifconfig_nodes(app: Sphinx, doctree: nodes.document, docname: str) -
ns['builder'] = app.builder.name
for node in list(doctree.findall(ifconfig)):
try:
- res = eval(node['expr'], ns) # NoQA: PGH001
+ res = eval(node['expr'], ns) # NoQA: S307
except Exception as err:
# handle exceptions in a clean fashion
from traceback import format_exception_only
@@ -74,7 +74,7 @@ def process_ifconfig_nodes(app: Sphinx, doctree: nodes.document, docname: str) -
node.replace_self(node.children)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_node(ifconfig)
app.add_directive('ifconfig', IfConfig)
app.connect('doctree-resolved', process_ifconfig_nodes)
diff --git a/sphinx/ext/imgconverter.py b/sphinx/ext/imgconverter.py
index 071a2cf..5a9fa30 100644
--- a/sphinx/ext/imgconverter.py
+++ b/sphinx/ext/imgconverter.py
@@ -5,7 +5,7 @@ from __future__ import annotations
import subprocess
import sys
from subprocess import CalledProcessError
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING
import sphinx
from sphinx.errors import ExtensionError
@@ -15,6 +15,7 @@ from sphinx.util import logging
if TYPE_CHECKING:
from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
@@ -56,9 +57,9 @@ class ImagemagickConverter(ImageConverter):
# (or first page) of image (ex. Animation GIF, PDF)
_from += '[0]'
- args = ([self.config.image_converter] +
- self.config.image_converter_args +
- [_from, _to])
+ args = ([
+ self.config.image_converter, *self.config.image_converter_args, _from, _to,
+ ])
logger.debug('Invoking %r ...', args)
subprocess.run(args, capture_output=True, check=True)
return True
@@ -73,7 +74,7 @@ class ImagemagickConverter(ImageConverter):
(exc.stderr, exc.stdout)) from exc
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_post_transform(ImagemagickConverter)
if sys.platform == 'win32':
# On Windows, we use Imagemagik v7 by default to avoid the trouble for
diff --git a/sphinx/ext/imgmath.py b/sphinx/ext/imgmath.py
index a5f49d9..c640bc7 100644
--- a/sphinx/ext/imgmath.py
+++ b/sphinx/ext/imgmath.py
@@ -2,6 +2,8 @@
from __future__ import annotations
+__all__ = ()
+
import base64
import contextlib
import re
@@ -11,7 +13,7 @@ import tempfile
from hashlib import sha1
from os import path
from subprocess import CalledProcessError
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING
from docutils import nodes
@@ -33,14 +35,13 @@ if TYPE_CHECKING:
from sphinx.application import Sphinx
from sphinx.builders import Builder
from sphinx.config import Config
+ from sphinx.util.typing import ExtensionMetadata
from sphinx.writers.html import HTML5Translator
logger = logging.getLogger(__name__)
templates_path = path.join(package_dir, 'templates', 'imgmath')
-__all__ = ()
-
class MathExtError(SphinxError):
category = 'Math extension error'
@@ -70,7 +71,7 @@ def read_svg_depth(filename: str) -> int | None:
"""Read the depth from comment at last line of SVG file
"""
with open(filename, encoding="utf-8") as f:
- for line in f: # noqa: B007
+ for line in f: # NoQA: B007
pass
# Only last line is checked
matched = depthsvgcomment_re.match(line)
@@ -140,7 +141,7 @@ def compile_math(latex: str, builder: Builder) -> str:
# --output-directory option, so we have to manually chdir to the
# temp dir to run it.
command = [builder.config.imgmath_latex]
- if imgmath_latex_name not in ['tectonic']:
+ if imgmath_latex_name != 'tectonic':
command.append('--interaction=nonstopmode')
# add custom args from the config file
command.extend(builder.config.imgmath_latex_args)
@@ -149,7 +150,7 @@ def compile_math(latex: str, builder: Builder) -> str:
try:
subprocess.run(command, capture_output=True, cwd=tempdir, check=True,
encoding='ascii')
- if imgmath_latex_name in ['xelatex', 'tectonic']:
+ if imgmath_latex_name in {'xelatex', 'tectonic'}:
return path.join(tempdir, 'math.xdv')
else:
return path.join(tempdir, 'math.dvi')
@@ -384,7 +385,7 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non
raise nodes.SkipNode
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_html_math_renderer('imgmath',
(html_visit_math, None),
(html_visit_displaymath, None))
@@ -402,6 +403,6 @@ def setup(app: Sphinx) -> dict[str, Any]:
app.add_config_value('imgmath_latex_preamble', '', 'html')
app.add_config_value('imgmath_add_tooltips', True, 'html')
app.add_config_value('imgmath_font_size', 12, 'html')
- app.add_config_value('imgmath_embed', False, 'html', [bool])
+ app.add_config_value('imgmath_embed', False, 'html', bool)
app.connect('build-finished', clean_up_files)
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/inheritance_diagram.py b/sphinx/ext/inheritance_diagram.py
index 3a015a2..b9e5137 100644
--- a/sphinx/ext/inheritance_diagram.py
+++ b/sphinx/ext/inheritance_diagram.py
@@ -37,7 +37,7 @@ import re
from collections.abc import Iterable, Sequence
from importlib import import_module
from os import path
-from typing import TYPE_CHECKING, Any, cast
+from typing import TYPE_CHECKING, Any, ClassVar, cast
from docutils import nodes
from docutils.parsers.rst import directives
@@ -58,7 +58,7 @@ if TYPE_CHECKING:
from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
- from sphinx.util.typing import OptionSpec
+ from sphinx.util.typing import ExtensionMetadata, OptionSpec
from sphinx.writers.html import HTML5Translator
from sphinx.writers.latex import LaTeXTranslator
from sphinx.writers.texinfo import TexinfoTranslator
@@ -123,11 +123,8 @@ def import_classes(name: str, currmodule: str) -> Any:
return [target]
elif inspect.ismodule(target):
# If imported object is a module, return classes defined on it
- classes = []
- for cls in target.__dict__.values():
- if inspect.isclass(cls) and cls.__module__ == target.__name__:
- classes.append(cls)
- return classes
+ return [cls for cls in target.__dict__.values()
+ if inspect.isclass(cls) and cls.__module__ == target.__name__]
raise InheritanceException('%r specified for inheritance diagram is '
'not a class or module' % name)
@@ -142,6 +139,7 @@ class InheritanceGraph:
from all the way to the root "object", and then is able to generate a
graphviz dot graph from them.
"""
+
def __init__(self, class_names: list[str], currmodule: str, show_builtins: bool = False,
private_bases: bool = False, parts: int = 0,
aliases: dict[str, str] | None = None, top_classes: Sequence[Any] = (),
@@ -272,10 +270,10 @@ class InheritanceGraph:
}
def _format_node_attrs(self, attrs: dict[str, Any]) -> str:
- return ','.join(['%s=%s' % x for x in sorted(attrs.items())])
+ return ','.join(f'{k}={v}' for k, v in sorted(attrs.items()))
def _format_graph_attrs(self, attrs: dict[str, Any]) -> str:
- return ''.join(['%s=%s;\n' % x for x in sorted(attrs.items())])
+ return ''.join(f'{k}={v};\n' for k, v in sorted(attrs.items()))
def generate_dot(self, name: str, urls: dict[str, str] | None = None,
env: BuildEnvironment | None = None,
@@ -309,34 +307,35 @@ class InheritanceGraph:
n_attrs.update(env.config.inheritance_node_attrs)
e_attrs.update(env.config.inheritance_edge_attrs)
- res: list[str] = []
- res.append('digraph %s {\n' % name)
- res.append(self._format_graph_attrs(g_attrs))
+ res: list[str] = [
+ f'digraph {name} {{\n',
+ self._format_graph_attrs(g_attrs),
+ ]
for name, fullname, bases, tooltip in sorted(self.class_info):
# Write the node
this_node_attrs = n_attrs.copy()
if fullname in urls:
- this_node_attrs['URL'] = '"%s"' % urls[fullname]
- this_node_attrs['target'] = '"_top"'
+ this_node_attrs["URL"] = '"%s"' % urls[fullname]
+ this_node_attrs["target"] = '"_top"'
if tooltip:
- this_node_attrs['tooltip'] = tooltip
- res.append(' "%s" [%s];\n' %
- (name, self._format_node_attrs(this_node_attrs)))
+ this_node_attrs["tooltip"] = tooltip
+ res.append(' "%s" [%s];\n' % (name, self._format_node_attrs(this_node_attrs)))
# Write the edges
- for base_name in bases:
- res.append(' "%s" -> "%s" [%s];\n' %
- (base_name, name,
- self._format_node_attrs(e_attrs)))
- res.append('}\n')
- return ''.join(res)
+ res.extend(
+ ' "%s" -> "%s" [%s];\n' % (base_name, name, self._format_node_attrs(e_attrs))
+ for base_name in bases
+ )
+ res.append("}\n")
+ return "".join(res)
class inheritance_diagram(graphviz):
"""
A docutils node to use as a placeholder for the inheritance diagram.
"""
+
pass
@@ -344,11 +343,12 @@ class InheritanceDiagram(SphinxDirective):
"""
Run when the inheritance_diagram directive is first encountered.
"""
+
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'parts': int,
'private-bases': directives.flag,
'caption': directives.unchanged,
@@ -378,7 +378,7 @@ class InheritanceDiagram(SphinxDirective):
aliases=self.config.inheritance_alias,
top_classes=node['top-classes'])
except InheritanceException as err:
- return [node.document.reporter.warning(err, line=self.lineno)]
+ return [node.document.reporter.warning(err, line=self.lineno)] # type: ignore[union-attr]
# Create xref nodes for each target of the graph's image map and
# add them to the doc tree so that Sphinx can resolve the
@@ -386,7 +386,7 @@ class InheritanceDiagram(SphinxDirective):
# removed from the doctree after we're done with them.
for name in graph.get_all_class_names():
refnodes, x = class_role( # type: ignore[call-arg,misc]
- 'class', ':class:`%s`' % name, name, 0, self.state) # type: ignore[arg-type]
+ 'class', ':class:`%s`' % name, name, 0, self.state)
node.extend(refnodes)
# Store the graph object so we can use it to generate the
# dot file later
@@ -477,7 +477,7 @@ def skip(self: nodes.NodeVisitor, node: inheritance_diagram) -> None:
raise nodes.SkipNode
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.setup_extension('sphinx.ext.graphviz')
app.add_node(
inheritance_diagram,
@@ -487,8 +487,8 @@ def setup(app: Sphinx) -> dict[str, Any]:
man=(skip, None),
texinfo=(texinfo_visit_inheritance_diagram, None))
app.add_directive('inheritance-diagram', InheritanceDiagram)
- app.add_config_value('inheritance_graph_attrs', {}, False)
- app.add_config_value('inheritance_node_attrs', {}, False)
- app.add_config_value('inheritance_edge_attrs', {}, False)
- app.add_config_value('inheritance_alias', {}, False)
+ app.add_config_value('inheritance_graph_attrs', {}, '')
+ app.add_config_value('inheritance_node_attrs', {}, '')
+ app.add_config_value('inheritance_edge_attrs', {}, '')
+ app.add_config_value('inheritance_alias', {}, '')
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/intersphinx.py b/sphinx/ext/intersphinx.py
index 453bb6e..a8a2cf1 100644
--- a/sphinx/ext/intersphinx.py
+++ b/sphinx/ext/intersphinx.py
@@ -34,6 +34,7 @@ from docutils.utils import relative_path
import sphinx
from sphinx.addnodes import pending_xref
from sphinx.builders.html import INVENTORY_FILENAME
+from sphinx.deprecation import _deprecation_warning
from sphinx.errors import ExtensionError
from sphinx.locale import _, __
from sphinx.transforms.post_transforms import ReferencesResolver
@@ -53,7 +54,7 @@ if TYPE_CHECKING:
from sphinx.config import Config
from sphinx.domains import Domain
from sphinx.environment import BuildEnvironment
- from sphinx.util.typing import Inventory, InventoryItem, RoleFunction
+ from sphinx.util.typing import ExtensionMetadata, Inventory, InventoryItem, RoleFunction
InventoryCacheEntry = tuple[Union[str, None], int, Inventory]
@@ -245,7 +246,7 @@ def fetch_inventory_group(
for fail in failures:
logger.info(*fail)
else:
- issues = '\n'.join([f[0] % f[1:] for f in failures])
+ issues = '\n'.join(f[0] % f[1:] for f in failures)
logger.warning(__("failed to reach any of the inventories "
"with the following issues:") + "\n" + issues)
@@ -334,8 +335,10 @@ def _resolve_reference_in_domain_by_target(
if target in inventory[objtype]:
# Case sensitive match, use it
data = inventory[objtype][target]
- elif objtype == 'std:term':
- # Check for potential case insensitive matches for terms only
+ elif objtype in {'std:label', 'std:term'}:
+ # Some types require case insensitive matches:
+ # * 'term': https://github.com/sphinx-doc/sphinx/issues/9291
+ # * 'label': https://github.com/sphinx-doc/sphinx/issues/12008
target_lower = target.lower()
insensitive_matches = list(filter(lambda k: k.lower() == target_lower,
inventory[objtype].keys()))
@@ -479,7 +482,6 @@ def resolve_reference_detect_inventory(env: BuildEnvironment,
to form ``inv_name:newtarget``. If ``inv_name`` is a named inventory, then resolution
is tried in that inventory with the new target.
"""
-
# ordinary direct lookup, use data as is
res = resolve_reference_any_inventory(env, True, node, contnode)
if res is not None:
@@ -501,7 +503,6 @@ def resolve_reference_detect_inventory(env: BuildEnvironment,
def missing_reference(app: Sphinx, env: BuildEnvironment, node: pending_xref,
contnode: TextElement) -> nodes.reference | None:
"""Attempt to resolve a missing reference via intersphinx references."""
-
return resolve_reference_detect_inventory(env, node, contnode)
@@ -533,17 +534,90 @@ class IntersphinxRole(SphinxRole):
assert self.name == self.orig_name.lower()
inventory, name_suffix = self.get_inventory_and_name_suffix(self.orig_name)
if inventory and not inventory_exists(self.env, inventory):
- logger.warning(__('inventory for external cross-reference not found: %s'),
- inventory, location=(self.env.docname, self.lineno))
+ self._emit_warning(
+ __('inventory for external cross-reference not found: %r'), inventory
+ )
return [], []
- role_name = self.get_role_name(name_suffix)
+ domain_name, role_name = self._get_domain_role(name_suffix)
+
if role_name is None:
- logger.warning(__('role for external cross-reference not found: %s'), name_suffix,
- location=(self.env.docname, self.lineno))
+ self._emit_warning(
+ __('invalid external cross-reference suffix: %r'), name_suffix
+ )
return [], []
- result, messages = self.invoke_role(role_name)
+ # attempt to find a matching role function
+ role_func: RoleFunction | None
+
+ if domain_name is not None:
+ # the user specified a domain, so we only check that
+ if (domain := self.env.domains.get(domain_name)) is None:
+ self._emit_warning(
+ __('domain for external cross-reference not found: %r'), domain_name
+ )
+ return [], []
+ if (role_func := domain.roles.get(role_name)) is None:
+ msg = 'role for external cross-reference not found in domain %r: %r'
+ if (
+ object_types := domain.object_types.get(role_name)
+ ) is not None and object_types.roles:
+ self._emit_warning(
+ __(f'{msg} (perhaps you meant one of: %s)'),
+ domain_name,
+ role_name,
+ self._concat_strings(object_types.roles),
+ )
+ else:
+ self._emit_warning(__(msg), domain_name, role_name)
+ return [], []
+
+ else:
+ # the user did not specify a domain,
+ # so we check first the default (if available) then standard domains
+ domains: list[Domain] = []
+ if default_domain := self.env.temp_data.get('default_domain'):
+ domains.append(default_domain)
+ if (
+ std_domain := self.env.domains.get('std')
+ ) is not None and std_domain not in domains:
+ domains.append(std_domain)
+
+ role_func = None
+ for domain in domains:
+ if (role_func := domain.roles.get(role_name)) is not None:
+ domain_name = domain.name
+ break
+
+ if role_func is None or domain_name is None:
+ domains_str = self._concat_strings(d.name for d in domains)
+ msg = 'role for external cross-reference not found in domains %s: %r'
+ possible_roles: set[str] = set()
+ for d in domains:
+ if o := d.object_types.get(role_name):
+ possible_roles.update(f'{d.name}:{r}' for r in o.roles)
+ if possible_roles:
+ msg = f'{msg} (perhaps you meant one of: %s)'
+ self._emit_warning(
+ __(msg),
+ domains_str,
+ role_name,
+ self._concat_strings(possible_roles),
+ )
+ else:
+ self._emit_warning(__(msg), domains_str, role_name)
+ return [], []
+
+ result, messages = role_func(
+ f'{domain_name}:{role_name}',
+ self.rawtext,
+ self.text,
+ self.lineno,
+ self.inliner,
+ self.options,
+ self.content,
+ )
+
for node in result:
if isinstance(node, pending_xref):
node['intersphinx'] = True
@@ -552,13 +626,17 @@ class IntersphinxRole(SphinxRole):
return result, messages
def get_inventory_and_name_suffix(self, name: str) -> tuple[str | None, str]:
+ """Extract an inventory name (if any) and ``domain+name`` suffix from a role *name*.
+ and the domain+name suffix.
+
+ The role name is expected to be of one of the following forms:
+
+ - ``external+inv:name`` -- explicit inventory and name, any domain.
+ - ``external+inv:domain:name`` -- explicit inventory, domain and name.
+ - ``external:name`` -- any inventory and domain, explicit name.
+ - ``external:domain:name`` -- any inventory, explicit domain and name.
+ """
assert name.startswith('external'), name
- # either we have an explicit inventory name, i.e,
- # :external+inv:role: or
- # :external+inv:domain:role:
- # or we look in all inventories, i.e.,
- # :external:role: or
- # :external:domain:role:
suffix = name[9:]
if name[8] == '+':
inv_name, suffix = suffix.split(':', 1)
@@ -569,7 +647,39 @@ class IntersphinxRole(SphinxRole):
msg = f'Malformed :external: role name: {name}'
raise ValueError(msg)
+ def _get_domain_role(self, name: str) -> tuple[str | None, str | None]:
+ """Convert the *name* string into a domain and a role name.
+
+ - If *name* contains no ``:``, return ``(None, name)``.
+ - If *name* contains a single ``:``, the domain/role is split on this.
+ - If *name* contains multiple ``:``, return ``(None, None)``.
+ """
+ names = name.split(':')
+ if len(names) == 1:
+ return None, names[0]
+ elif len(names) == 2:
+ return names[0], names[1]
+ else:
+ return None, None
+
+ def _emit_warning(self, msg: str, /, *args: Any) -> None:
+ logger.warning(
+ msg,
+ *args,
+ type='intersphinx',
+ subtype='external',
+ location=(self.env.docname, self.lineno),
+ )
+
+ def _concat_strings(self, strings: Iterable[str]) -> str:
+ return ', '.join(f'{s!r}' for s in sorted(strings))
+
+ # deprecated methods
+
def get_role_name(self, name: str) -> tuple[str, str] | None:
+ _deprecation_warning(
+ __name__, f'{self.__class__.__name__}.get_role_name', '', remove=(9, 0)
+ )
names = name.split(':')
if len(names) == 1:
# role
@@ -591,6 +701,9 @@ class IntersphinxRole(SphinxRole):
return None
def is_existent_role(self, domain_name: str, role_name: str) -> bool:
+ _deprecation_warning(
+ __name__, f'{self.__class__.__name__}.is_existent_role', '', remove=(9, 0)
+ )
try:
domain = self.env.get_domain(domain_name)
return role_name in domain.roles
@@ -598,6 +711,10 @@ class IntersphinxRole(SphinxRole):
return False
def invoke_role(self, role: tuple[str, str]) -> tuple[list[Node], list[system_message]]:
+ """Invoke the role described by a ``(domain, role name)`` pair."""
+ _deprecation_warning(
+ __name__, f'{self.__class__.__name__}.invoke_role', '', remove=(9, 0)
+ )
domain = self.env.get_domain(role[0])
if domain:
role_func = domain.role(role[1])
@@ -681,11 +798,11 @@ def normalize_intersphinx_mapping(app: Sphinx, config: Config) -> None:
config.intersphinx_mapping.pop(key)
-def setup(app: Sphinx) -> dict[str, Any]:
- app.add_config_value('intersphinx_mapping', {}, True)
- app.add_config_value('intersphinx_cache_limit', 5, False)
- app.add_config_value('intersphinx_timeout', None, False)
- app.add_config_value('intersphinx_disabled_reftypes', ['std:doc'], True)
+def setup(app: Sphinx) -> ExtensionMetadata:
+ app.add_config_value('intersphinx_mapping', {}, 'env')
+ app.add_config_value('intersphinx_cache_limit', 5, '')
+ app.add_config_value('intersphinx_timeout', None, '')
+ app.add_config_value('intersphinx_disabled_reftypes', ['std:doc'], 'env')
app.connect('config-inited', normalize_intersphinx_mapping, priority=800)
app.connect('builder-inited', load_mappings)
app.connect('source-read', install_dispatcher)
diff --git a/sphinx/ext/linkcode.py b/sphinx/ext/linkcode.py
index ee10406..93118cd 100644
--- a/sphinx/ext/linkcode.py
+++ b/sphinx/ext/linkcode.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING
from docutils import nodes
@@ -15,6 +15,7 @@ if TYPE_CHECKING:
from docutils.nodes import Node
from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
class LinkcodeError(SphinxError):
@@ -71,7 +72,7 @@ def doctree_read(app: Sphinx, doctree: Node) -> None:
signode += onlynode
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.connect('doctree-read', doctree_read)
app.add_config_value('linkcode_resolve', None, '')
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/mathjax.py b/sphinx/ext/mathjax.py
index 41d18b9..24109ee 100644
--- a/sphinx/ext/mathjax.py
+++ b/sphinx/ext/mathjax.py
@@ -21,6 +21,7 @@ from sphinx.util.math import get_node_equation_number
if TYPE_CHECKING:
from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
from sphinx.writers.html import HTML5Translator
# more information for mathjax secure url is here:
@@ -109,7 +110,7 @@ def install_mathjax(app: Sphinx, pagename: str, templatename: str, context: dict
builder.add_js_file(app.config.mathjax_path, **options)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_html_math_renderer('mathjax',
(html_visit_math, None),
(html_visit_displaymath, None))
diff --git a/sphinx/ext/napoleon/__init__.py b/sphinx/ext/napoleon/__init__.py
index 61aa3d8..581f3ea 100644
--- a/sphinx/ext/napoleon/__init__.py
+++ b/sphinx/ext/napoleon/__init__.py
@@ -2,13 +2,17 @@
from __future__ import annotations
-from typing import Any
+from typing import TYPE_CHECKING, Any
import sphinx
from sphinx.application import Sphinx
from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
from sphinx.util import inspect
+if TYPE_CHECKING:
+ from sphinx.config import _ConfigRebuild
+ from sphinx.util.typing import ExtensionMetadata
+
class Config:
"""Sphinx napoleon extension settings in `conf.py`.
@@ -261,8 +265,9 @@ class Config:
Use the type annotations of class attributes that are documented in the docstring
but do not have a type in the docstring.
- """
- _config_values = {
+ """ # NoQA: D301
+
+ _config_values: dict[str, tuple[Any, _ConfigRebuild]] = {
'napoleon_google_docstring': (True, 'env'),
'napoleon_numpy_docstring': (True, 'env'),
'napoleon_include_init_with_doc': (False, 'env'),
@@ -288,7 +293,7 @@ class Config:
setattr(self, name, value)
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
@@ -326,7 +331,7 @@ def setup(app: Sphinx) -> dict[str, Any]:
def _patch_python_domain() -> None:
- from sphinx.domains.python import PyObject, PyTypedField
+ from sphinx.domains.python._object import PyObject, PyTypedField
from sphinx.locale import _
for doc_field in PyObject.doc_field_types:
if doc_field.name == 'parameter':
@@ -335,7 +340,7 @@ def _patch_python_domain() -> None:
PyObject.doc_field_types.append(
PyTypedField('keyword', label=_('Keyword Arguments'),
names=('keyword', 'kwarg', 'kwparam'),
- typerolename='obj', typenames=('paramtype', 'kwtype'),
+ typerolename='class', typenames=('paramtype', 'kwtype'),
can_collapse=True))
@@ -386,7 +391,7 @@ def _process_docstring(app: Sphinx, what: str, name: str, obj: Any,
docstring = GoogleDocstring(result_lines, app.config, app, what, name,
obj, options)
result_lines = docstring.lines()
- lines[:] = result_lines[:]
+ lines[:] = result_lines.copy()
def _skip_member(app: Sphinx, what: str, name: str, obj: Any,
diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py
index 2ffde39..2ce3b2d 100644
--- a/sphinx/ext/napoleon/docstring.py
+++ b/sphinx/ext/napoleon/docstring.py
@@ -7,6 +7,7 @@ import contextlib
import inspect
import re
from functools import partial
+from itertools import starmap
from typing import TYPE_CHECKING, Any, Callable
from sphinx.locale import _, __
@@ -14,6 +15,8 @@ from sphinx.util import logging
from sphinx.util.typing import get_type_hints, stringify_annotation
if TYPE_CHECKING:
+ from collections.abc import Iterator
+
from sphinx.application import Sphinx
from sphinx.config import Config as SphinxConfig
@@ -145,7 +148,7 @@ class GoogleDocstring:
"""
_name_rgx = re.compile(r"^\s*((?::(?P<role>\S+):)?`(?P<name>~?[a-zA-Z0-9_.-]+)`|"
- r" (?P<name2>~?[a-zA-Z0-9_.-]+))\s*", re.X)
+ r" (?P<name2>~?[a-zA-Z0-9_.-]+))\s*", re.VERBOSE)
def __init__(
self,
@@ -304,19 +307,18 @@ class GoogleDocstring:
_type = _convert_type_spec(_type, self._config.napoleon_type_aliases or {})
indent = self._get_indent(line) + 1
- _descs = [_desc] + self._dedent(self._consume_indented_block(indent))
+ _descs = [_desc, *self._dedent(self._consume_indented_block(indent))]
_descs = self.__class__(_descs, self._config).lines()
return _name, _type, _descs
def _consume_fields(self, parse_type: bool = True, prefer_type: bool = False,
multiple: bool = False) -> list[tuple[str, str, list[str]]]:
self._consume_empty()
- fields = []
+ fields: list[tuple[str, str, list[str]]] = []
while not self._is_section_break():
_name, _type, _desc = self._consume_field(parse_type, prefer_type)
if multiple and _name:
- for name in _name.split(","):
- fields.append((name.strip(), _type, _desc))
+ fields.extend((name.strip(), _type, _desc) for name in _name.split(","))
elif _name or _type or _desc:
fields.append((_name, _type, _desc))
return fields
@@ -327,7 +329,7 @@ class GoogleDocstring:
if not colon or not _desc:
_type, _desc = _desc, _type
_desc += colon
- _descs = [_desc] + self._dedent(self._consume_to_end())
+ _descs = [_desc, *self._dedent(self._consume_to_end())]
_descs = self.__class__(_descs, self._config).lines()
return _type, _descs
@@ -399,15 +401,15 @@ class GoogleDocstring:
def _fix_field_desc(self, desc: list[str]) -> list[str]:
if self._is_list(desc):
- desc = [''] + desc
+ desc = ['', *desc]
elif desc[0].endswith('::'):
desc_block = desc[1:]
indent = self._get_indent(desc[0])
block_indent = self._get_initial_indent(desc_block)
if block_indent > indent:
- desc = [''] + desc
+ desc = ['', *desc]
else:
- desc = ['', desc[0]] + self._indent(desc_block, 4)
+ desc = ['', desc[0], *self._indent(desc_block, 4)]
return desc
def _format_admonition(self, admonition: str, lines: list[str]) -> list[str]:
@@ -416,7 +418,7 @@ class GoogleDocstring:
return [f'.. {admonition}:: {lines[0].strip()}', '']
elif lines:
lines = self._indent(self._dedent(lines), 3)
- return ['.. %s::' % admonition, ''] + lines + ['']
+ return ['.. %s::' % admonition, '', *lines, '']
else:
return ['.. %s::' % admonition, '']
@@ -453,7 +455,7 @@ class GoogleDocstring:
if _type:
lines.append(f':{type_role} {_name}: {_type}')
- return lines + ['']
+ return [*lines, '']
def _format_field(self, _name: str, _type: str, _desc: list[str]) -> list[str]:
_desc = self._strip_empty(_desc)
@@ -480,7 +482,7 @@ class GoogleDocstring:
if _desc[0]:
return [field + _desc[0]] + _desc[1:]
else:
- return [field] + _desc
+ return [field, *_desc]
else:
return [field]
@@ -537,7 +539,7 @@ class GoogleDocstring:
return [(' ' * n) + line for line in lines]
def _is_indented(self, line: str, indent: int = 1) -> bool:
- for i, s in enumerate(line): # noqa: SIM110
+ for i, s in enumerate(line): # NoQA: SIM110
if i >= indent:
return True
elif not s.isspace():
@@ -623,7 +625,7 @@ class GoogleDocstring:
self._is_in_section = True
self._section_indent = self._get_current_indent()
if _directive_regex.match(section):
- lines = [section] + self._consume_to_next_section()
+ lines = [section, *self._consume_to_next_section()]
else:
lines = self._sections[section.lower()](section)
finally:
@@ -711,7 +713,7 @@ class GoogleDocstring:
else:
header = '.. rubric:: %s' % section
if lines:
- return [header, ''] + lines + ['']
+ return [header, '', *lines, '']
else:
return [header, '']
@@ -733,7 +735,7 @@ class GoogleDocstring:
if 'no-index' in self._opt or 'noindex' in self._opt:
lines.append(' :no-index:')
if _desc:
- lines.extend([''] + self._indent(_desc, 3))
+ lines.extend(['', *self._indent(_desc, 3)])
lines.append('')
return lines
@@ -888,7 +890,7 @@ def _recombine_set_tokens(tokens: list[str]) -> list[str]:
token_queue = collections.deque(tokens)
keywords = ("optional", "default")
- def takewhile_set(tokens):
+ def takewhile_set(tokens: collections.deque[str]) -> Iterator[str]:
open_braces = 0
previous_token = None
while True:
@@ -924,7 +926,7 @@ def _recombine_set_tokens(tokens: list[str]) -> list[str]:
if open_braces == 0:
break
- def combine_set(tokens):
+ def combine_set(tokens: collections.deque[str]) -> Iterator[str]:
while True:
try:
token = tokens.popleft()
@@ -941,7 +943,7 @@ def _recombine_set_tokens(tokens: list[str]) -> list[str]:
def _tokenize_type_spec(spec: str) -> list[str]:
- def postprocess(item):
+ def postprocess(item: str) -> list[str]:
if _default_regex.match(item):
default = item[:7]
# can't be separated by anything other than a single space
@@ -962,7 +964,7 @@ def _tokenize_type_spec(spec: str) -> list[str]:
def _token_type(token: str, location: str | None = None) -> str:
- def is_numeric(token):
+ def is_numeric(token: str) -> bool:
try:
# use complex to make sure every numeric value is detected as literal
complex(token)
@@ -1026,7 +1028,7 @@ def _convert_numpy_type_spec(
if translations is None:
translations = {}
- def convert_obj(obj, translations, default_translation):
+ def convert_obj(obj: str, translations: dict[str, str], default_translation: str) -> str:
translation = translations.get(obj, obj)
# use :class: (the default) only if obj is not a standard singleton
@@ -1155,6 +1157,7 @@ class NumpyDocstring(GoogleDocstring):
The lines of the docstring in a list.
"""
+
def __init__(
self,
docstring: str | list[str],
@@ -1180,13 +1183,13 @@ class NumpyDocstring(GoogleDocstring):
elif filepath is None:
filepath = ""
- return ":".join([filepath, "docstring of %s" % name])
+ return f"{filepath}:docstring of {name}"
def _escape_args_and_kwargs(self, name: str) -> str:
func = super()._escape_args_and_kwargs
if ", " in name:
- return ", ".join(func(param) for param in name.split(", "))
+ return ", ".join(map(func, name.split(", ")))
else:
return func(name)
@@ -1233,7 +1236,7 @@ class NumpyDocstring(GoogleDocstring):
line1, line2 = self._lines.get(0), self._lines.get(1)
return (not self._lines or
self._is_section_header() or
- ['', ''] == [line1, line2] or
+ (line1 == line2 == '') or
(self._is_in_section and
line1 and
not self._is_indented(line1, self._section_indent)))
@@ -1269,7 +1272,7 @@ class NumpyDocstring(GoogleDocstring):
func_name1, func_name2, :meth:`func_name`, func_name3
"""
- items = []
+ items: list[tuple[str, list[str], str | None]] = []
def parse_item_name(text: str) -> tuple[str, str | None]:
"""Match ':role:`name`' or 'name'"""
@@ -1286,10 +1289,12 @@ class NumpyDocstring(GoogleDocstring):
if not name:
return
name, role = parse_item_name(name)
- items.append((name, list(rest), role))
- del rest[:]
+ items.append((name, rest.copy(), role))
+ rest.clear()
- def translate(func, description, role):
+ def translate(
+ func: str, description: list[str], role: str | None,
+ ) -> tuple[str, list[str], str | None]:
translations = self._config.napoleon_type_aliases
if role is not None or not translations:
return func, description, role
@@ -1336,10 +1341,7 @@ class NumpyDocstring(GoogleDocstring):
return []
# apply type aliases
- items = [
- translate(func, description, role)
- for func, description, role in items
- ]
+ items = list(starmap(translate, items))
lines: list[str] = []
last_had_desc = True
diff --git a/sphinx/ext/todo.py b/sphinx/ext/todo.py
index e540e7e..1962328 100644
--- a/sphinx/ext/todo.py
+++ b/sphinx/ext/todo.py
@@ -7,7 +7,9 @@ with a backlink to the original location.
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, cast
+import functools
+import operator
+from typing import TYPE_CHECKING, Any, ClassVar, cast
from docutils import nodes
from docutils.parsers.rst import directives
@@ -26,7 +28,7 @@ if TYPE_CHECKING:
from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
- from sphinx.util.typing import OptionSpec
+ from sphinx.util.typing import ExtensionMetadata, OptionSpec
from sphinx.writers.html import HTML5Translator
from sphinx.writers.latex import LaTeXTranslator
@@ -51,7 +53,7 @@ class Todo(BaseAdmonition, SphinxDirective):
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
- option_spec: OptionSpec = {
+ option_spec: ClassVar[OptionSpec] = {
'class': directives.class_option,
'name': directives.unchanged,
}
@@ -85,7 +87,7 @@ class TodoDomain(Domain):
def clear_doc(self, docname: str) -> None:
self.todos.pop(docname, None)
- def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
+ def merge_domaindata(self, docnames: list[str], otherdata: dict[str, Any]) -> None:
for docname in docnames:
self.todos[docname] = otherdata['todos'][docname]
@@ -110,7 +112,7 @@ class TodoList(SphinxDirective):
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
- option_spec: OptionSpec = {}
+ option_spec: ClassVar[OptionSpec] = {}
def run(self) -> list[Node]:
# Simply insert an empty todolist node which will be replaced later
@@ -129,7 +131,8 @@ class TodoListProcessor:
self.process(doctree, docname)
def process(self, doctree: nodes.document, docname: str) -> None:
- todos: list[todo_node] = sum(self.domain.todos.values(), [])
+ todos: list[todo_node] = functools.reduce(
+ operator.iadd, self.domain.todos.values(), [])
for node in list(doctree.findall(todolist)):
if not self.config.todo_include_todos:
node.parent.remove(node)
@@ -221,7 +224,7 @@ def latex_depart_todo_node(self: LaTeXTranslator, node: todo_node) -> None:
self.body.append('\\end{sphinxadmonition}\n')
-def setup(app: Sphinx) -> dict[str, Any]:
+def setup(app: Sphinx) -> ExtensionMetadata:
app.add_event('todo-defined')
app.add_config_value('todo_include_todos', False, 'html')
app.add_config_value('todo_link_only', False, 'html')
diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py
index c5fcda5..39a08b6 100644
--- a/sphinx/ext/viewcode.py
+++ b/sphinx/ext/viewcode.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import operator
import posixpath
import traceback
from importlib import import_module
@@ -22,11 +23,12 @@ from sphinx.util.display import status_iterator
from sphinx.util.nodes import make_refnode
if TYPE_CHECKING:
- from collections.abc import Generator, Iterable
+ from collections.abc import Iterable, Iterator
from sphinx.application import Sphinx
from sphinx.builders import Builder
from sphinx.environment import BuildEnvironment
+ from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
@@ -79,9 +81,7 @@ def is_supported_builder(builder: Builder) -> bool:
return False
if builder.name == 'singlehtml':
return False
- if builder.name.startswith('epub') and not builder.config.viewcode_enable_epub:
- return False
- return True
+ return not (builder.name.startswith('epub') and not builder.config.viewcode_enable_epub)
def doctree_read(app: Sphinx, doctree: Node) -> None:
@@ -185,6 +185,7 @@ def env_purge_doc(app: Sphinx, env: BuildEnvironment, docname: str) -> None:
class ViewcodeAnchorTransform(SphinxPostTransform):
"""Convert or remove viewcode_anchor nodes depends on builder."""
+
default_priority = 100
def run(self, **kwargs: Any) -> None:
@@ -239,7 +240,7 @@ def should_generate_module_page(app: Sphinx, modname: str) -> bool:
return True
-def collect_pages(app: Sphinx) -> Generator[tuple[str, dict[str, Any], str], None, None]:
+def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]:
env = app.builder.env
if not hasattr(env, '_viewcode_modules'):
return
@@ -254,7 +255,7 @@ def collect_pages(app: Sphinx) -> Generator[tuple[str, dict[str, Any], str], Non
sorted(env._viewcode_modules.items()),
__('highlighting module code... '), "blue",
len(env._viewcode_modules),
- app.verbosity, lambda x: x[0]):
+ app.verbosity, operator.itemgetter(0)):
if not entry:
continue
if not should_generate_module_page(app, modname):
@@ -340,11 +341,11 @@ def collect_pages(app: Sphinx) -> Generator[tuple[str, dict[str, Any], str], Non
yield (posixpath.join(OUTPUT_DIRNAME, 'index'), context, 'page.html')
-def setup(app: Sphinx) -> dict[str, Any]:
- app.add_config_value('viewcode_import', None, False)
- app.add_config_value('viewcode_enable_epub', False, False)
- app.add_config_value('viewcode_follow_imported_members', True, False)
- app.add_config_value('viewcode_line_numbers', False, 'env', (bool,))
+def setup(app: Sphinx) -> ExtensionMetadata:
+ app.add_config_value('viewcode_import', None, '')
+ app.add_config_value('viewcode_enable_epub', False, '')
+ app.add_config_value('viewcode_follow_imported_members', True, '')
+ app.add_config_value('viewcode_line_numbers', False, 'env', bool)
app.connect('doctree-read', doctree_read)
app.connect('env-merge-info', env_merge_info)
app.connect('env-purge-doc', env_purge_doc)